launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #22093
[Merge] lp:~cjwatson/launchpad/deprecated-assert-methods-1 into lp:launchpad
Colin Watson has proposed merging lp:~cjwatson/launchpad/deprecated-assert-methods-1 into lp:launchpad.
Commit message:
Use modern equivalents of deprecated TestCase.fail{If,Unless}* methods.
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
For more details, see:
https://code.launchpad.net/~cjwatson/launchpad/deprecated-assert-methods-1/+merge/335627
--
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~cjwatson/launchpad/deprecated-assert-methods-1 into lp:launchpad.
=== modified file 'bzrplugins/lpserve/test_lpserve.py'
--- bzrplugins/lpserve/test_lpserve.py 2017-09-27 02:12:20 +0000
+++ bzrplugins/lpserve/test_lpserve.py 2018-01-02 14:40:48 +0000
@@ -740,10 +740,10 @@
# The service should be up and responsive.
response = self.send_message_to_service('hello\n')
self.assertEqual('ok\nyep, still alive\n', response)
- self.failUnless(os.path.isfile(self.service_pid_filename))
+ self.assertTrue(os.path.isfile(self.service_pid_filename))
with open(self.service_pid_filename, 'rb') as f:
content = f.read()
self.assertEqualDiff('%d\n' % (self.service_process,), content)
# We're done. Shut it down.
self.stop_service()
- self.failIf(os.path.isfile(self.service_pid_filename))
+ self.assertFalse(os.path.isfile(self.service_pid_filename))
=== modified file 'lib/launchpad_loggerhead/tests.py'
--- lib/launchpad_loggerhead/tests.py 2012-01-01 03:03:28 +0000
+++ lib/launchpad_loggerhead/tests.py 2018-01-02 14:40:48 +0000
@@ -109,7 +109,7 @@
self.browser.open(
config.codehosting.secure_codebrowse_root + 'favicon.ico')
self.assertEqual(self.session['user'], 'bob')
- self.failUnless(self.browser.cookies.get(self.cookie_name))
+ self.assertTrue(self.browser.cookies.get(self.cookie_name))
# When we visit +logout, our session is gone.
self.browser.open(
=== modified file 'lib/lp/answers/tests/test_question_workflow.py'
--- lib/lp/answers/tests/test_question_workflow.py 2017-10-25 10:02:12 +0000
+++ lib/lp/answers/tests/test_question_workflow.py 2018-01-02 14:40:48 +0000
@@ -138,8 +138,8 @@
self.setQuestionStatus(self.question, status)
expected = status.name in statuses_expected_true
allowed = getattr(self.question, guard_name)
- self.failUnless(
- expected == allowed, "%s != %s when status = %s" % (
+ self.assertEqual(
+ expected, allowed, "%s != %s when status = %s" % (
guard_name, expected, status.name))
def _testValidTransition(self, statuses, transition_method,
@@ -224,7 +224,7 @@
transition_method(*args, **kwargs)
except InvalidQuestionStateError:
exceptionRaised = True
- self.failUnless(exceptionRaised,
+ self.assertTrue(exceptionRaised,
"%s() when status = %s should raise an error" % (
transition_method.__name__, status.name))
@@ -240,7 +240,7 @@
It also verifies that the question status, datelastquery (or
datelastresponse) were updated to reflect the time of the message.
"""
- self.failUnless(verifyObject(IQuestionMessage, message))
+ self.assertTrue(verifyObject(IQuestionMessage, message))
self.assertEquals("Re: Help!", message.subject)
self.assertEquals(expected_owner, message.owner)
@@ -268,42 +268,42 @@
def failure_msg(msg):
return "From status %s: %s" % (status_name, msg)
- self.failUnless(
+ self.assertTrue(
len(self.collected_events) >= 1,
failure_msg('failed to trigger an IObjectCreatedEvent'))
created_event = self.collected_events[0]
created_event_user = IPerson(created_event.user)
- self.failUnless(
+ self.assertTrue(
IObjectCreatedEvent.providedBy(created_event),
failure_msg(
"%s doesn't provide IObjectCreatedEvent" % created_event))
- self.failUnless(
+ self.assertTrue(
created_event.object == message,
failure_msg("IObjectCreatedEvent contains wrong message"))
- self.failUnless(
+ self.assertTrue(
created_event_user == message.owner,
failure_msg("%s != %s" % (
created_event_user.displayname, message.owner.displayname)))
- self.failUnless(
+ self.assertTrue(
len(self.collected_events) == 2,
failure_msg('failed to trigger an IObjectModifiedEvent'))
modified_event = self.collected_events[1]
modified_event_user = IPerson(modified_event.user)
- self.failUnless(
+ self.assertTrue(
IObjectModifiedEvent.providedBy(modified_event),
failure_msg(
"%s doesn't provide IObjectModifiedEvent"
% modified_event))
- self.failUnless(
+ self.assertTrue(
modified_event.object == self.question,
failure_msg("IObjectModifiedEvent contains wrong question"))
- self.failUnless(
+ self.assertTrue(
modified_event_user == message.owner,
failure_msg("%s != %s" % (
modified_event_user.displayname, message.owner.displayname)))
if edited_fields:
- self.failUnless(
+ self.assertTrue(
set(modified_event.edited_fields) == set(edited_fields),
failure_msg("%s != %s" % (
set(modified_event.edited_fields), set(edited_fields))))
@@ -704,7 +704,7 @@
question2.confirmAnswer('That worked!', answer=question1_answer)
except AssertionError:
answerRefused = True
- self.failUnless(
+ self.assertTrue(
answerRefused, 'confirmAnswer accepted a message from a different'
'question')
=== modified file 'lib/lp/answers/tests/test_questiontarget.py'
--- lib/lp/answers/tests/test_questiontarget.py 2017-10-25 10:02:12 +0000
+++ lib/lp/answers/tests/test_questiontarget.py 2018-01-02 14:40:48 +0000
@@ -105,11 +105,11 @@
# answer_contacts_with_languages is not part of its public API.
answer_contacts = removeSecurityProxy(
product).answer_contacts_with_languages
- self.failUnlessEqual(answer_contacts, [answer_contact])
+ self.assertEqual(answer_contacts, [answer_contact])
langs = [
lang.englishname for lang in answer_contact.getLanguagesCache()]
# The languages cache has been filled in the correct order.
- self.failUnlessEqual(langs, ['English', 'Portuguese (Brazil)'])
+ self.assertEqual(langs, ['English', 'Portuguese (Brazil)'])
def test_SourcePackage_implementation_should_prefill_cache(self):
# Remove the answer contact's security proxy because we need to call
@@ -128,11 +128,11 @@
# answer_contacts_with_languages is not part of its public API.
answer_contacts = removeSecurityProxy(
source_package).answer_contacts_with_languages
- self.failUnlessEqual(answer_contacts, [answer_contact])
+ self.assertEqual(answer_contacts, [answer_contact])
langs = [
lang.englishname for lang in answer_contact.getLanguagesCache()]
# The languages cache has been filled in the correct order.
- self.failUnlessEqual(langs, ['English', 'Portuguese (Brazil)'])
+ self.assertEqual(langs, ['English', 'Portuguese (Brazil)'])
class TestQuestionTargetCreateQuestionFromBug(TestCaseWithFactory):
=== modified file 'lib/lp/app/browser/tests/test_launchpadroot.py'
--- lib/lp/app/browser/tests/test_launchpadroot.py 2017-11-10 12:13:39 +0000
+++ lib/lp/app/browser/tests/test_launchpadroot.py 2018-01-02 14:40:48 +0000
@@ -63,22 +63,22 @@
login_person(self.expert)
def test_anonymous_cannot_edit(self):
- self.failIf(check_permission('launchpad.Edit', self.root),
+ self.assertFalse(check_permission('launchpad.Edit', self.root),
"Anonymous user shouldn't have launchpad.Edit on ILaunchpadRoot")
def test_regular_user_cannot_edit(self):
login_person(self.factory.makePerson())
- self.failIf(check_permission('launchpad.Edit', self.root),
+ self.assertFalse(check_permission('launchpad.Edit', self.root),
"Regular users shouldn't have launchpad.Edit on ILaunchpadRoot")
def test_registry_expert_can_edit(self):
self.setUpRegistryExpert()
- self.failUnless(check_permission('launchpad.Edit', self.root),
+ self.assertTrue(check_permission('launchpad.Edit', self.root),
"Registry experts should have launchpad.Edit on ILaunchpadRoot")
def test_admins_can_edit(self):
login_person(self.admin)
- self.failUnless(check_permission('launchpad.Edit', self.root),
+ self.assertTrue(check_permission('launchpad.Edit', self.root),
"Admins should have launchpad.Edit on ILaunchpadRoot")
def test_featured_projects_view_requires_edit(self):
@@ -94,7 +94,7 @@
# urlfetch.
view.getRecentBlogPosts = lambda: []
content = BeautifulSoup(view(), parseOnlyThese=SoupStrainer('a'))
- self.failUnless(
+ self.assertTrue(
content.find('a', href='+featuredprojects'),
"Cannot find the +featuredprojects link on the first page")
=== modified file 'lib/lp/app/tests/test_help.py'
--- lib/lp/app/tests/test_help.py 2011-12-30 09:16:36 +0000
+++ lib/lp/app/tests/test_help.py 2018-01-02 14:40:48 +0000
@@ -27,10 +27,10 @@
def assertHasHelpFolderView(self, name, expected_folder_path):
"""Assert that the named help folder has the right path."""
view = create_view(getUtility(ILaunchpadApplication), name)
- self.failUnless(
+ self.assertTrue(
isinstance(view, ExportedFolder),
'View should be an instance of ExportedFolder: %s' % view)
- self.failUnless(
+ self.assertTrue(
os.path.samefile(view.folder, expected_folder_path),
"Expected help folder %s, got %s" % (
expected_folder_path, view.folder))
=== modified file 'lib/lp/app/tests/test_tales.py'
--- lib/lp/app/tests/test_tales.py 2014-07-09 02:42:47 +0000
+++ lib/lp/app/tests/test_tales.py 2018-01-02 14:40:48 +0000
@@ -363,7 +363,7 @@
# Traversal of invalid names raises an exception.
adapter = getAdapter(None, IPathAdapter, 'fmt')
traverse = getattr(adapter, 'traverse', None)
- self.failUnlessRaises(TraversalError, traverse, "foo", [])
+ self.assertRaises(TraversalError, traverse, "foo", [])
def test_shorten_traversal(self):
# Traversal of 'shorten' works as expected.
=== modified file 'lib/lp/archivepublisher/tests/test_debversion.py'
--- lib/lp/archivepublisher/tests/test_debversion.py 2012-08-17 11:14:22 +0000
+++ lib/lp/archivepublisher/tests/test_debversion.py 2018-01-02 14:40:48 +0000
@@ -123,7 +123,7 @@
def testComparisons(self):
"""Sample Version comparisons should pass."""
for x, y in self.COMPARISONS:
- self.failUnless(Version(x) < Version(y))
+ self.assertTrue(Version(x) < Version(y))
def testNullEpochIsZero(self):
"""Version should treat an omitted epoch as a zero one."""
@@ -133,4 +133,4 @@
"""Version should treat an omitted revision as being equal to zero.
"""
self.assertEquals(Version("1.0"), Version("1.0-0"))
- self.failUnless(Version("1.0") == Version("1.0-0"))
+ self.assertTrue(Version("1.0") == Version("1.0-0"))
=== modified file 'lib/lp/bugs/browser/tests/test_bugsubscriptionfilter.py'
--- lib/lp/bugs/browser/tests/test_bugsubscriptionfilter.py 2012-09-17 16:13:40 +0000
+++ lib/lp/bugs/browser/tests/test_bugsubscriptionfilter.py 2018-01-02 14:40:48 +0000
@@ -251,7 +251,7 @@
self.assertEqual([], self.view.conditions)
def test_not_filters_everything_normally(self):
- self.failIf(self.view.filters_everything)
+ self.assertFalse(self.view.filters_everything)
def test_conditions_for_COMMENTS_events(self):
# If we are subscribed to comments, that is all-inclusive: no
=== modified file 'lib/lp/bugs/browser/tests/test_bugtask.py'
--- lib/lp/bugs/browser/tests/test_bugtask.py 2017-10-21 18:14:14 +0000
+++ lib/lp/bugs/browser/tests/test_bugtask.py 2018-01-02 14:40:48 +0000
@@ -375,8 +375,7 @@
self.view = BugTasksNominationsView(self.bug, LaunchpadTestRequest())
def test_current_user_affected_status(self):
- self.failUnlessEqual(
- None, self.view.current_user_affected_status)
+ self.assertIsNone(self.view.current_user_affected_status)
self.bug.markUserAffected(self.view.user, True)
self.refresh()
self.assertTrue(self.view.current_user_affected_status)
@@ -385,54 +384,43 @@
self.assertFalse(self.view.current_user_affected_status)
def test_current_user_affected_js_status(self):
- self.failUnlessEqual(
- 'null', self.view.current_user_affected_js_status)
+ self.assertEqual('null', self.view.current_user_affected_js_status)
self.bug.markUserAffected(self.view.user, True)
self.refresh()
- self.failUnlessEqual(
- 'true', self.view.current_user_affected_js_status)
+ self.assertEqual('true', self.view.current_user_affected_js_status)
self.bug.markUserAffected(self.view.user, False)
self.refresh()
- self.failUnlessEqual(
- 'false', self.view.current_user_affected_js_status)
+ self.assertEqual('false', self.view.current_user_affected_js_status)
def test_other_users_affected_count(self):
# The number of other users affected does not change when the
# logged-in user marked themselves as affected or not.
- self.failUnlessEqual(
- 1, self.view.other_users_affected_count)
+ self.assertEqual(1, self.view.other_users_affected_count)
self.bug.markUserAffected(self.view.user, True)
self.refresh()
- self.failUnlessEqual(
- 1, self.view.other_users_affected_count)
+ self.assertEqual(1, self.view.other_users_affected_count)
self.bug.markUserAffected(self.view.user, False)
self.refresh()
- self.failUnlessEqual(
- 1, self.view.other_users_affected_count)
+ self.assertEqual(1, self.view.other_users_affected_count)
def test_other_users_affected_count_other_users(self):
# The number of other users affected only changes when other
# users mark themselves as affected.
- self.failUnlessEqual(
- 1, self.view.other_users_affected_count)
+ self.assertEqual(1, self.view.other_users_affected_count)
other_user_1 = self.factory.makePerson()
self.bug.markUserAffected(other_user_1, True)
self.refresh()
- self.failUnlessEqual(
- 2, self.view.other_users_affected_count)
+ self.assertEqual(2, self.view.other_users_affected_count)
other_user_2 = self.factory.makePerson()
self.bug.markUserAffected(other_user_2, True)
self.refresh()
- self.failUnlessEqual(
- 3, self.view.other_users_affected_count)
+ self.assertEqual(3, self.view.other_users_affected_count)
self.bug.markUserAffected(other_user_1, False)
self.refresh()
- self.failUnlessEqual(
- 2, self.view.other_users_affected_count)
+ self.assertEqual(2, self.view.other_users_affected_count)
self.bug.markUserAffected(self.view.user, True)
self.refresh()
- self.failUnlessEqual(
- 2, self.view.other_users_affected_count)
+ self.assertEqual(2, self.view.other_users_affected_count)
def makeDuplicate(self):
user2 = self.factory.makePerson()
@@ -519,51 +507,45 @@
def test_affected_statement_no_one_affected(self):
self.bug.markUserAffected(self.bug.owner, False)
- self.failUnlessEqual(
- 0, self.view.other_users_affected_count)
- self.failUnlessEqual(
+ self.assertEqual(0, self.view.other_users_affected_count)
+ self.assertEqual(
"Does this bug affect you?", self.view.affected_statement)
def test_affected_statement_only_you(self):
self.view.context.markUserAffected(self.view.user, True)
- self.failUnless(self.bug.isUserAffected(self.view.user))
+ self.assertTrue(self.bug.isUserAffected(self.view.user))
self.view.context.markUserAffected(self.bug.owner, False)
- self.failUnlessEqual(
- 0, self.view.other_users_affected_count)
- self.failUnlessEqual(
- "This bug affects you", self.view.affected_statement)
+ self.assertEqual(0, self.view.other_users_affected_count)
+ self.assertEqual("This bug affects you", self.view.affected_statement)
def test_affected_statement_only_not_you(self):
self.view.context.markUserAffected(self.view.user, False)
- self.failIf(self.bug.isUserAffected(self.view.user))
+ self.assertFalse(self.bug.isUserAffected(self.view.user))
self.view.context.markUserAffected(self.bug.owner, False)
- self.failUnlessEqual(
- 0, self.view.other_users_affected_count)
- self.failUnlessEqual(
+ self.assertEqual(0, self.view.other_users_affected_count)
+ self.assertEqual(
"This bug doesn't affect you", self.view.affected_statement)
def test_affected_statement_1_person_not_you(self):
self.assertIs(None, self.bug.isUserAffected(self.view.user))
- self.failUnlessEqual(
- 1, self.view.other_users_affected_count)
- self.failUnlessEqual(
+ self.assertEqual(1, self.view.other_users_affected_count)
+ self.assertEqual(
"This bug affects 1 person. Does this bug affect you?",
self.view.affected_statement)
def test_affected_statement_1_person_and_you(self):
self.view.context.markUserAffected(self.view.user, True)
- self.failUnless(self.bug.isUserAffected(self.view.user))
- self.failUnlessEqual(
- 1, self.view.other_users_affected_count)
- self.failUnlessEqual(
+ self.assertTrue(self.bug.isUserAffected(self.view.user))
+ self.assertEqual(1, self.view.other_users_affected_count)
+ self.assertEqual(
"This bug affects you and 1 other person",
self.view.affected_statement)
def test_affected_statement_1_person_and_not_you(self):
self.view.context.markUserAffected(self.view.user, False)
- self.failIf(self.bug.isUserAffected(self.view.user))
- self.failUnlessEqual(1, self.view.other_users_affected_count)
- self.failUnlessEqual(
+ self.assertFalse(self.bug.isUserAffected(self.view.user))
+ self.assertEqual(1, self.view.other_users_affected_count)
+ self.assertEqual(
"This bug affects 1 person, but not you",
self.view.affected_statement)
@@ -571,45 +553,45 @@
self.assertIs(None, self.bug.isUserAffected(self.view.user))
other_user = self.factory.makePerson()
self.view.context.markUserAffected(other_user, True)
- self.failUnlessEqual(2, self.view.other_users_affected_count)
- self.failUnlessEqual(
+ self.assertEqual(2, self.view.other_users_affected_count)
+ self.assertEqual(
"This bug affects 2 people. Does this bug affect you?",
self.view.affected_statement)
def test_affected_statement_more_than_1_person_and_you(self):
self.view.context.markUserAffected(self.view.user, True)
- self.failUnless(self.bug.isUserAffected(self.view.user))
+ self.assertTrue(self.bug.isUserAffected(self.view.user))
other_user = self.factory.makePerson()
self.view.context.markUserAffected(other_user, True)
- self.failUnlessEqual(2, self.view.other_users_affected_count)
- self.failUnlessEqual(
+ self.assertEqual(2, self.view.other_users_affected_count)
+ self.assertEqual(
"This bug affects you and 2 other people",
self.view.affected_statement)
def test_affected_statement_more_than_1_person_and_not_you(self):
self.view.context.markUserAffected(self.view.user, False)
- self.failIf(self.bug.isUserAffected(self.view.user))
+ self.assertFalse(self.bug.isUserAffected(self.view.user))
other_user = self.factory.makePerson()
self.view.context.markUserAffected(other_user, True)
- self.failUnlessEqual(2, self.view.other_users_affected_count)
- self.failUnlessEqual(
+ self.assertEqual(2, self.view.other_users_affected_count)
+ self.assertEqual(
"This bug affects 2 people, but not you",
self.view.affected_statement)
def test_anon_affected_statement_no_one_affected(self):
self.bug.markUserAffected(self.bug.owner, False)
- self.failUnlessEqual(0, self.bug.users_affected_count)
+ self.assertEqual(0, self.bug.users_affected_count)
self.assertIs(None, self.view.anon_affected_statement)
def test_anon_affected_statement_1_user_affected(self):
- self.failUnlessEqual(1, self.bug.users_affected_count)
- self.failUnlessEqual(
+ self.assertEqual(1, self.bug.users_affected_count)
+ self.assertEqual(
"This bug affects 1 person", self.view.anon_affected_statement)
def test_anon_affected_statement_2_users_affected(self):
self.view.context.markUserAffected(self.view.user, True)
- self.failUnlessEqual(2, self.bug.users_affected_count)
- self.failUnlessEqual(
+ self.assertEqual(2, self.bug.users_affected_count)
+ self.assertEqual(
"This bug affects 2 people", self.view.anon_affected_statement)
@@ -631,19 +613,19 @@
for count in range(10 - len(self.bug.bugtasks) - 1):
self.factory.makeBugTask(bug=self.bug)
self.view.initialize()
- self.failIf(self.view.many_bugtasks)
+ self.assertFalse(self.view.many_bugtasks)
row_view = self.view._getTableRowView(
self.bug.default_bugtask, False, False)
- self.failIf(row_view.many_bugtasks)
+ self.assertFalse(row_view.many_bugtasks)
def test_many_bugtasks(self):
for count in range(10 - len(self.bug.bugtasks)):
self.factory.makeBugTask(bug=self.bug)
self.view.initialize()
- self.failUnless(self.view.many_bugtasks)
+ self.assertTrue(self.view.many_bugtasks)
row_view = self.view._getTableRowView(
self.bug.default_bugtask, False, False)
- self.failUnless(row_view.many_bugtasks)
+ self.assertTrue(row_view.many_bugtasks)
def test_getTargetLinkTitle_product(self):
# The target link title is always none for products.
=== modified file 'lib/lp/bugs/model/tests/test_personsubscriptioninfo.py'
--- lib/lp/bugs/model/tests/test_personsubscriptioninfo.py 2012-10-12 11:57:10 +0000
+++ lib/lp/bugs/model/tests/test_personsubscriptioninfo.py 2018-01-02 14:40:48 +0000
@@ -105,7 +105,7 @@
# Load a `PersonSubscriptionInfo`s for a subscriber and a bug.
self.subscriptions.reload()
self.assertCollectionsAreEmpty()
- self.failIf(self.subscriptions.muted)
+ self.assertFalse(self.subscriptions.muted)
def test_no_subscriptions_getDataForClient(self):
self.subscriptions.reload()
@@ -125,7 +125,7 @@
self.subscriptions.reload()
self.assertCollectionsAreEmpty(except_='as_assignee')
- self.failIf(self.subscriptions.muted)
+ self.assertFalse(self.subscriptions.muted)
self.assertCollectionContents(
self.subscriptions.as_assignee, personal=1)
self.assertVirtualSubscriptionInfoMatches(
@@ -159,7 +159,7 @@
self.subscriptions.reload()
self.assertCollectionsAreEmpty(except_='as_assignee')
- self.failIf(self.subscriptions.muted)
+ self.assertFalse(self.subscriptions.muted)
self.assertCollectionContents(
self.subscriptions.as_assignee, as_team_member=1)
self.assertVirtualSubscriptionInfoMatches(
@@ -186,7 +186,7 @@
self.subscriptions.reload()
self.assertCollectionsAreEmpty(except_='as_assignee')
- self.failIf(self.subscriptions.muted)
+ self.assertFalse(self.subscriptions.muted)
self.assertCollectionContents(
self.subscriptions.as_assignee, as_team_admin=1)
self.assertVirtualSubscriptionInfoMatches(
@@ -215,7 +215,7 @@
self.subscriptions.reload()
self.assertCollectionsAreEmpty(except_='direct')
- self.failIf(self.subscriptions.muted)
+ self.assertFalse(self.subscriptions.muted)
self.assertCollectionContents(
self.subscriptions.direct, personal=1)
self.assertRealSubscriptionInfoMatches(
@@ -247,7 +247,7 @@
self.subscriptions.reload()
self.assertCollectionsAreEmpty(except_='direct')
- self.failIf(self.subscriptions.muted)
+ self.assertFalse(self.subscriptions.muted)
self.assertCollectionContents(
self.subscriptions.direct, as_team_member=1)
self.assertRealSubscriptionInfoMatches(
@@ -278,7 +278,7 @@
self.subscriptions.reload()
self.assertCollectionsAreEmpty(except_='direct')
- self.failIf(self.subscriptions.muted)
+ self.assertFalse(self.subscriptions.muted)
self.assertCollectionContents(
self.subscriptions.direct, as_team_admin=1)
self.assertRealSubscriptionInfoMatches(
@@ -306,7 +306,7 @@
self.subscriptions.reload()
self.assertCollectionsAreEmpty(except_='from_duplicate')
- self.failIf(self.subscriptions.muted)
+ self.assertFalse(self.subscriptions.muted)
self.assertCollectionContents(
self.subscriptions.from_duplicate, personal=1)
self.assertRealSubscriptionInfoMatches(
@@ -324,7 +324,7 @@
# This means no subscriptions on the duplicate bug.
self.assertCollectionsAreEmpty()
- self.failIf(self.subscriptions.muted)
+ self.assertFalse(self.subscriptions.muted)
self.assertCollectionContents(
self.subscriptions.from_duplicate, personal=0)
@@ -341,7 +341,7 @@
self.subscriptions.reload()
self.assertCollectionsAreEmpty(except_='from_duplicate')
- self.failIf(self.subscriptions.muted)
+ self.assertFalse(self.subscriptions.muted)
self.assertCollectionContents(
self.subscriptions.from_duplicate, personal=2)
self.assertRealSubscriptionInfoMatches(
@@ -362,7 +362,7 @@
self.subscriptions.reload()
self.assertCollectionsAreEmpty(except_='from_duplicate')
- self.failIf(self.subscriptions.muted)
+ self.assertFalse(self.subscriptions.muted)
self.assertCollectionContents(
self.subscriptions.from_duplicate, as_team_member=1)
self.assertRealSubscriptionInfoMatches(
@@ -384,7 +384,7 @@
self.subscriptions.reload()
self.assertCollectionsAreEmpty(except_='from_duplicate')
- self.failIf(self.subscriptions.muted)
+ self.assertFalse(self.subscriptions.muted)
self.assertCollectionContents(
self.subscriptions.from_duplicate, as_team_admin=1)
self.assertRealSubscriptionInfoMatches(
@@ -425,7 +425,7 @@
self.subscriptions.loadSubscriptionsFor(target.owner, self.bug)
self.assertCollectionsAreEmpty(except_='as_owner')
- self.failIf(self.subscriptions.muted)
+ self.assertFalse(self.subscriptions.muted)
self.assertCollectionContents(
self.subscriptions.as_owner, personal=1)
self.assertVirtualSubscriptionInfoMatches(
@@ -439,7 +439,7 @@
# Subscribed directly to the bug.
self.subscriptions.loadSubscriptionsFor(target.owner, self.bug)
self.assertCollectionsAreEmpty()
- self.failIf(self.subscriptions.muted)
+ self.assertFalse(self.subscriptions.muted)
def test_owner_through_team(self):
# Bug is targeted to a pillar with no supervisor set.
@@ -452,7 +452,7 @@
self.subscriptions.reload()
self.assertCollectionsAreEmpty(except_='as_owner')
- self.failIf(self.subscriptions.muted)
+ self.assertFalse(self.subscriptions.muted)
self.assertCollectionContents(
self.subscriptions.as_owner, as_team_member=1)
self.assertVirtualSubscriptionInfoMatches(
@@ -473,7 +473,7 @@
self.subscriptions.reload()
self.assertCollectionsAreEmpty(except_='as_owner')
- self.failIf(self.subscriptions.muted)
+ self.assertFalse(self.subscriptions.muted)
self.assertCollectionContents(
self.subscriptions.as_owner, as_team_admin=1)
self.assertVirtualSubscriptionInfoMatches(
@@ -489,7 +489,7 @@
# Load a `PersonSubscriptionInfo`s for subscriber and a bug.
self.subscriptions.reload()
- self.failUnless(self.subscriptions.muted)
+ self.assertTrue(self.subscriptions.muted)
def test_many_duplicate_team_admin_subscriptions_few_queries(self):
# This is related to bug 811447. The user is subscribed to a
=== modified file 'lib/lp/bugs/scripts/checkwatches/tests/test_base.py'
--- lib/lp/bugs/scripts/checkwatches/tests/test_base.py 2015-10-15 14:09:50 +0000
+++ lib/lp/bugs/scripts/checkwatches/tests/test_base.py 2018-01-02 14:40:48 +0000
@@ -141,7 +141,7 @@
base1.init(self.email, transaction.manager, self.logger)
base2 = WorkingBase()
base2.initFromParent(base1)
- self.failUnlessEqual(base1.__dict__, base2.__dict__)
+ self.assertEqual(base1.__dict__, base2.__dict__)
class TestWorkingBaseErrorReporting(TestCaseWithFactory):
=== modified file 'lib/lp/bugs/scripts/checkwatches/tests/test_core.py'
--- lib/lp/bugs/scripts/checkwatches/tests/test_core.py 2015-09-29 06:06:00 +0000
+++ lib/lp/bugs/scripts/checkwatches/tests/test_core.py 2018-01-02 14:40:48 +0000
@@ -143,13 +143,13 @@
remote_systems_and_watches = (
self.updater._getExternalBugTrackersAndWatches(
gnome_bugzilla, []))
- self.failUnlessEqual(2, len(remote_systems_and_watches))
+ self.assertEqual(2, len(remote_systems_and_watches))
# One will have comment syncing enabled.
- self.failUnless(
+ self.assertTrue(
any(remote_system.sync_comments
for (remote_system, watches) in remote_systems_and_watches))
# One will have comment syncing disabled.
- self.failUnless(
+ self.assertTrue(
any(not remote_system.sync_comments
for (remote_system, watches) in remote_systems_and_watches))
# When there are no syncable products, only one remote system
@@ -158,9 +158,9 @@
remote_systems_and_watches = (
self.updater._getExternalBugTrackersAndWatches(
gnome_bugzilla, []))
- self.failUnlessEqual(1, len(remote_systems_and_watches))
+ self.assertEqual(1, len(remote_systems_and_watches))
[(remote_system, watches)] = remote_systems_and_watches
- self.failIf(remote_system.sync_comments)
+ self.assertFalse(remote_system.sync_comments)
class BrokenCheckwatchesMaster(CheckwatchesMaster):
@@ -224,13 +224,13 @@
# it accordingly.
remote_system.batch_size = None
checkwatches.core.suggest_batch_size(remote_system, 1)
- self.failUnlessEqual(100, remote_system.batch_size)
+ self.assertEqual(100, remote_system.batch_size)
remote_system.batch_size = None
checkwatches.core.suggest_batch_size(remote_system, 12350)
- self.failUnlessEqual(247, remote_system.batch_size)
+ self.assertEqual(247, remote_system.batch_size)
# If the batch_size is already set, it will not be changed.
checkwatches.core.suggest_batch_size(remote_system, 99999)
- self.failUnlessEqual(247, remote_system.batch_size)
+ self.assertEqual(247, remote_system.batch_size)
def test_xmlrpc_connection_errors_set_activity_properly(self):
# HTTP status codes of 502, 503 and 504 indicate connection
@@ -341,8 +341,8 @@
def test_args_and_kwargs(self):
def func(name, aptitude):
- self.failUnlessEqual("Robin Hood", name)
- self.failUnlessEqual("Riding through the glen", aptitude)
+ self.assertEqual("Robin Hood", name)
+ self.assertEqual("Riding through the glen", aptitude)
# Positional args specified when adding a job are passed to
# the job function at run time.
@@ -373,17 +373,17 @@
self.scheduler.schedule(
list.remove, numbers, 3)
self.scheduler.schedule(
- lambda: self.failUnlessEqual([1, 2], numbers))
+ lambda: self.assertEqual([1, 2], numbers))
# Remove 1 and check.
self.scheduler.schedule(
list.remove, numbers, 1)
self.scheduler.schedule(
- lambda: self.failUnlessEqual([2], numbers))
+ lambda: self.assertEqual([2], numbers))
# Remove 2 and check.
self.scheduler.schedule(
list.remove, numbers, 2)
self.scheduler.schedule(
- lambda: self.failUnlessEqual([], numbers))
+ lambda: self.assertEqual([], numbers))
# Run the scheduler.
self.scheduler.run()
=== modified file 'lib/lp/bugs/tests/test_bug_messages.py'
--- lib/lp/bugs/tests/test_bug_messages.py 2016-05-05 08:23:13 +0000
+++ lib/lp/bugs/tests/test_bug_messages.py 2018-01-02 14:40:48 +0000
@@ -46,7 +46,7 @@
# the parent isn't linked to the same bug as the
# IIndexedMessage.
for indexed_message in self.bug_2.indexed_messages:
- self.failUnlessEqual(None, indexed_message.parent)
+ self.assertIsNone(indexed_message.parent)
class TestUserCanSetCommentVisibility(TestCaseWithFactory):
=== modified file 'lib/lp/bugs/tests/test_bugchanges.py'
--- lib/lp/bugs/tests/test_bugchanges.py 2017-05-31 17:31:58 +0000
+++ lib/lp/bugs/tests/test_bugchanges.py 2018-01-02 14:40:48 +0000
@@ -479,7 +479,7 @@
for bug_task in self.bug.bugtasks:
bug_task.transitionToStatus(
BugTaskStatus.FIXRELEASED, user=self.user)
- self.failUnless(self.bug.is_complete)
+ self.assertTrue(self.bug.is_complete)
self.saveOldChanges()
branch = self.factory.makeBranch()
self.bug.linkBranch(branch, self.user)
@@ -532,7 +532,7 @@
for bug_task in self.bug.bugtasks:
bug_task.transitionToStatus(
BugTaskStatus.FIXRELEASED, user=self.user)
- self.failUnless(self.bug.is_complete)
+ self.assertTrue(self.bug.is_complete)
branch = self.factory.makeBranch()
self.bug.linkBranch(branch, self.user)
self.saveOldChanges()
=== modified file 'lib/lp/bugs/tests/test_bugs_webservice.py'
--- lib/lp/bugs/tests/test_bugs_webservice.py 2017-10-21 18:14:14 +0000
+++ lib/lp/bugs/tests/test_bugs_webservice.py 2018-01-02 14:40:48 +0000
@@ -220,7 +220,7 @@
response = webservice.get(url)
self.assertThat(collector, HasQueryCount(LessThan(24)))
with_2_count = collector.count
- self.failUnlessEqual(response.status, 200)
+ self.assertEqual(response.status, 200)
login(USER_EMAIL)
for i in range(5):
self.factory.makeBugAttachment(self.bug)
@@ -255,7 +255,7 @@
response = webservice.get(url)
self.assertThat(collector, HasQueryCount(LessThan(24)))
with_2_count = collector.count
- self.failUnlessEqual(response.status, 200)
+ self.assertEqual(response.status, 200)
login(USER_EMAIL)
for i in range(50):
self.factory.makeBugComment(bug)
@@ -288,12 +288,12 @@
bug = self.webservice.load(api_url(self.bug))
messages = bug.messages
latest_message = [message for message in messages][-1]
- self.failUnlessEqual(self.message2.subject, latest_message.subject)
+ self.assertEqual(self.message2.subject, latest_message.subject)
# The parent_link for the latest message should be None
# because the parent is not a member of this bug's messages
# collection itself.
- self.failUnlessEqual(None, latest_message.parent)
+ self.assertIsNone(latest_message.parent)
class TestPostBugWithLargeCollections(TestCaseWithFactory):
@@ -319,7 +319,7 @@
'subscriptions', 'users_affected', 'users_unaffected',
'users_affected_with_dupes', 'messages', 'attachments',
'activity'):
- self.failUnless(
+ self.assertTrue(
IDoNotSnapshot.providedBy(IBug[field_name]),
'IBug.%s should not be included in snapshots, see bug 507642.'
% field_name)
=== modified file 'lib/lp/bugs/tests/test_bugtracker.py'
--- lib/lp/bugs/tests/test_bugtracker.py 2016-07-25 11:42:50 +0000
+++ lib/lp/bugs/tests/test_bugtracker.py 2018-01-02 14:40:48 +0000
@@ -168,48 +168,47 @@
'watches_with_unpushed_comments',
]
for attribute in attributes:
- self.failUnless(
+ self.assertTrue(
getattr(original, attribute, marker) is not marker,
"Attribute %s missing from bug tracker." % attribute)
snapshot = Snapshot(original, providing=IBugTracker)
for attribute in attributes:
- self.failUnless(
+ self.assertTrue(
getattr(snapshot, attribute, marker) is marker,
"Attribute %s not missing from snapshot." % attribute)
def test_watches_ready_to_check(self):
bug_tracker = self.factory.makeBugTracker()
# Initially there are no watches, so none need to be checked.
- self.failUnless(bug_tracker.watches_ready_to_check.is_empty())
+ self.assertTrue(bug_tracker.watches_ready_to_check.is_empty())
# A bug watch without a next_check set is not ready either.
bug_watch = self.factory.makeBugWatch(bugtracker=bug_tracker)
removeSecurityProxy(bug_watch).next_check = None
- self.failUnless(bug_tracker.watches_ready_to_check.is_empty())
+ self.assertTrue(bug_tracker.watches_ready_to_check.is_empty())
# If we set its next_check date, it will be ready.
removeSecurityProxy(bug_watch).next_check = (
datetime.now(utc) - timedelta(hours=1))
- self.failUnless(1, bug_tracker.watches_ready_to_check.count())
- self.failUnlessEqual(
- bug_watch, bug_tracker.watches_ready_to_check.one())
+ self.assertTrue(1, bug_tracker.watches_ready_to_check.count())
+ self.assertEqual(bug_watch, bug_tracker.watches_ready_to_check.one())
def test_watches_with_unpushed_comments(self):
bug_tracker = self.factory.makeBugTracker()
# Initially there are no watches, so there are no unpushed
# comments.
- self.failUnless(bug_tracker.watches_with_unpushed_comments.is_empty())
+ self.assertTrue(bug_tracker.watches_with_unpushed_comments.is_empty())
# A new bug watch has no comments, so the same again.
bug_watch = self.factory.makeBugWatch(bugtracker=bug_tracker)
- self.failUnless(bug_tracker.watches_with_unpushed_comments.is_empty())
+ self.assertTrue(bug_tracker.watches_with_unpushed_comments.is_empty())
# A comment linked to the bug watch will be found.
login_person(bug_watch.bug.owner)
message = self.factory.makeMessage(owner=bug_watch.owner)
bug_message = bug_watch.bug.linkMessage(message, bug_watch)
- self.failUnless(1, bug_tracker.watches_with_unpushed_comments.count())
- self.failUnlessEqual(
+ self.assertTrue(1, bug_tracker.watches_with_unpushed_comments.count())
+ self.assertEqual(
bug_watch, bug_tracker.watches_with_unpushed_comments.one())
# Once the comment has been pushed, it will no longer be found.
removeSecurityProxy(bug_message).remote_comment_id = 'brains'
- self.failUnless(bug_tracker.watches_with_unpushed_comments.is_empty())
+ self.assertTrue(bug_tracker.watches_with_unpushed_comments.is_empty())
def _assertBugWatchesAreCheckedInTheFuture(self):
"""Check the dates of all self.bug_tracker.watches.
=== modified file 'lib/lp/bugs/tests/test_bugwatch.py'
--- lib/lp/bugs/tests/test_bugwatch.py 2016-07-13 10:11:25 +0000
+++ lib/lp/bugs/tests/test_bugwatch.py 2018-01-02 14:40:48 +0000
@@ -343,7 +343,7 @@
watch = self.factory.makeBugWatch(bug=bug)
product_task.bugwatch = watch
# For a single-task bug the bug task is eligible for update.
- self.failUnlessEqual(
+ self.assertEqual(
[product_task], list(
removeSecurityProxy(watch).bugtasks_to_update))
# If we add a task such that the existing task becomes a
@@ -352,14 +352,14 @@
product_series_task = self.factory.makeBugTask(
bug=bug, target=product.development_focus)
product_series_task.bugwatch = watch
- self.failUnlessEqual(
+ self.assertEqual(
[product_series_task], list(
removeSecurityProxy(watch).bugtasks_to_update))
# But once the bug is marked as a duplicate,
# bugtasks_to_update yields nothing.
bug.markAsDuplicate(
self.factory.makeBug(target=product, owner=product.owner))
- self.failUnlessEqual(
+ self.assertEqual(
[], list(removeSecurityProxy(watch).bugtasks_to_update))
def test_updateStatus_with_duplicate_bug(self):
@@ -371,14 +371,14 @@
bug_task = bug.default_bugtask
bug_task.bugwatch = self.factory.makeBugWatch()
bug_task_initial_status = bug_task.status
- self.failIfEqual(BugTaskStatus.INPROGRESS, bug_task.status)
+ self.assertNotEqual(BugTaskStatus.INPROGRESS, bug_task.status)
bug_task.bugwatch.updateStatus('foo', BugTaskStatus.INPROGRESS)
- self.failUnlessEqual(bug_task_initial_status, bug_task.status)
+ self.assertEqual(bug_task_initial_status, bug_task.status)
# Once the task is no longer linked to a duplicate bug, the
# status will get updated.
bug.markAsDuplicate(None)
bug_task.bugwatch.updateStatus('foo', BugTaskStatus.INPROGRESS)
- self.failUnlessEqual(BugTaskStatus.INPROGRESS, bug_task.status)
+ self.assertEqual(BugTaskStatus.INPROGRESS, bug_task.status)
def test_updateImportance_with_duplicate_bug(self):
# Calling BugWatch.updateImportance() will not update the
@@ -389,27 +389,27 @@
bug_task = bug.default_bugtask
bug_task.bugwatch = self.factory.makeBugWatch()
bug_task_initial_importance = bug_task.importance
- self.failIfEqual(BugTaskImportance.HIGH, bug_task.importance)
+ self.assertNotEqual(BugTaskImportance.HIGH, bug_task.importance)
bug_task.bugwatch.updateImportance('foo', BugTaskImportance.HIGH)
- self.failUnlessEqual(bug_task_initial_importance, bug_task.importance)
+ self.assertEqual(bug_task_initial_importance, bug_task.importance)
# Once the task is no longer linked to a duplicate bug, the
# importance will get updated.
bug.markAsDuplicate(None)
bug_task.bugwatch.updateImportance('foo', BugTaskImportance.HIGH)
- self.failUnlessEqual(BugTaskImportance.HIGH, bug_task.importance)
+ self.assertEqual(BugTaskImportance.HIGH, bug_task.importance)
def test_get_bug_watch_ids(self):
# get_bug_watch_ids() yields the IDs for the given bug
# watches.
bug_watches = [self.factory.makeBugWatch()]
- self.failUnlessEqual(
+ self.assertEqual(
[bug_watch.id for bug_watch in bug_watches],
list(get_bug_watch_ids(bug_watches)))
def test_get_bug_watch_ids_with_iterator(self):
# get_bug_watch_ids() can also accept an iterator.
bug_watches = [self.factory.makeBugWatch()]
- self.failUnlessEqual(
+ self.assertEqual(
[bug_watch.id for bug_watch in bug_watches],
list(get_bug_watch_ids(iter(bug_watches))))
@@ -417,7 +417,7 @@
# If something resembling an ID is found, get_bug_watch_ids()
# yields it unaltered.
bug_watches = [1, 2, 3]
- self.failUnlessEqual(
+ self.assertEqual(
bug_watches, list(get_bug_watch_ids(bug_watches)))
def test_get_bug_watch_ids_with_mixed_list(self):
@@ -425,7 +425,7 @@
# objects are a mix of bug watches and IDs.
bug_watch = self.factory.makeBugWatch()
bug_watches = [1234, bug_watch]
- self.failUnlessEqual(
+ self.assertEqual(
[1234, bug_watch.id], list(get_bug_watch_ids(bug_watches)))
def test_get_bug_watch_ids_with_others_in_list(self):
@@ -475,16 +475,16 @@
bug_watch_set = getUtility(IBugWatchSet)
# Passing in the remote bug ID gets us every bug watch that
# refers to that remote bug.
- self.failUnlessEqual(
+ self.assertEqual(
set(bug_watches_alice),
set(bug_watch_set.getBugWatchesForRemoteBug('alice')))
- self.failUnlessEqual(
+ self.assertEqual(
set(bug_watches_bob),
set(bug_watch_set.getBugWatchesForRemoteBug('bob')))
# The search can be narrowed by passing in a list or other
# iterable collection of bug watch IDs.
bug_watches_limited = bug_watches_alice + bug_watches_bob[:1]
- self.failUnlessEqual(
+ self.assertEqual(
set(bug_watches_bob[:1]),
set(bug_watch_set.getBugWatchesForRemoteBug('bob', [
bug_watch.id for bug_watch in bug_watches_limited])))
@@ -509,11 +509,11 @@
def _checkStatusOfBugWatches(
self, last_checked_is_null, next_check_is_null, last_error_type):
for bug_watch in self.bug_watches:
- self.failUnlessEqual(
+ self.assertEqual(
last_checked_is_null, bug_watch.lastchecked is None)
- self.failUnlessEqual(
+ self.assertEqual(
next_check_is_null, bug_watch.next_check is None)
- self.failUnlessEqual(
+ self.assertEqual(
last_error_type, bug_watch.last_error_type)
def test_bulkSetError(self):
@@ -532,9 +532,9 @@
def _checkActivityForBugWatches(self, result, message, oops_id):
for bug_watch in self.bug_watches:
latest_activity = bug_watch.activity.first()
- self.failUnlessEqual(result, latest_activity.result)
- self.failUnlessEqual(message, latest_activity.message)
- self.failUnlessEqual(oops_id, latest_activity.oops_id)
+ self.assertEqual(result, latest_activity.result)
+ self.assertEqual(message, latest_activity.message)
+ self.assertEqual(oops_id, latest_activity.oops_id)
def test_bulkAddActivity(self):
# Called with only bug watches, bulkAddActivity() adds
@@ -629,7 +629,7 @@
# start of this test.
messages = [activity.message for activity in self.bug_watch.activity]
for i in range(MAX_SAMPLE_SIZE):
- self.failUnless("Activity %s" % i in messages)
+ self.assertIn("Activity %s" % i, messages)
class TestBugWatchResetting(TestCaseWithFactory):
=== modified file 'lib/lp/buildmaster/tests/test_builder.py'
--- lib/lp/buildmaster/tests/test_builder.py 2015-04-20 09:48:57 +0000
+++ lib/lp/buildmaster/tests/test_builder.py 2018-01-02 14:40:48 +0000
@@ -371,8 +371,8 @@
next_job = removeSecurityProxy(
self.frog_builder)._findBuildCandidate()
build = getUtility(IBinaryPackageBuildSet).getByQueueEntry(next_job)
- self.failUnlessEqual('primary', build.archive.name)
- self.failUnlessEqual('gedit', build.source_package_release.name)
+ self.assertEqual('primary', build.archive.name)
+ self.assertEqual('gedit', build.source_package_release.name)
# Now even if we set the build building, we'll still get the
# second non-ppa build for the same archive as the next candidate.
@@ -380,8 +380,8 @@
next_job = removeSecurityProxy(
self.frog_builder)._findBuildCandidate()
build = getUtility(IBinaryPackageBuildSet).getByQueueEntry(next_job)
- self.failUnlessEqual('primary', build.archive.name)
- self.failUnlessEqual('firefox', build.source_package_release.name)
+ self.assertEqual('primary', build.archive.name)
+ self.assertEqual('firefox', build.source_package_release.name)
def test_findBuildCandidate_for_recipe_build(self):
# Recipe builds with a higher score are selected first.
@@ -405,7 +405,7 @@
next_job = removeSecurityProxy(
self.frog_builder)._findBuildCandidate()
- self.failUnlessEqual(recipe_build_job, next_job)
+ self.assertEqual(recipe_build_job, next_job)
class TestFindRecipeBuildCandidates(TestFindBuildCandidateBase):
@@ -446,4 +446,4 @@
next_job = removeSecurityProxy(
self.frog_builder)._findBuildCandidate()
- self.failUnlessEqual(self.bq2, next_job)
+ self.assertEqual(self.bq2, next_job)
=== modified file 'lib/lp/buildmaster/tests/test_buildfarmjob.py'
--- lib/lp/buildmaster/tests/test_buildfarmjob.py 2015-02-17 07:39:47 +0000
+++ lib/lp/buildmaster/tests/test_buildfarmjob.py 2018-01-02 14:40:48 +0000
@@ -104,7 +104,7 @@
build_farm_job = getUtility(IBuildFarmJobSource).new(
job_type=BuildFarmJobType.PACKAGEBUILD,
date_created=ten_years_ago)
- self.failUnlessEqual(
+ self.assertEqual(
ten_years_ago, removeSecurityProxy(build_farm_job).date_created)
@@ -139,11 +139,11 @@
BuildStatus.BUILDING, date_started=now)
self.build_farm_job.updateStatus(
BuildStatus.FULLYBUILT, date_finished=now + duration)
- self.failUnlessEqual(duration, self.build_farm_job.duration)
+ self.assertEqual(duration, self.build_farm_job.duration)
def test_view_build_farm_job(self):
# Anonymous access can read public builds, but not edit.
- self.failUnlessEqual(
+ self.assertEqual(
BuildStatus.NEEDSBUILD, self.build_farm_job.status)
self.assertRaises(
Unauthorized, getattr, self.build_farm_job, 'retry')
=== modified file 'lib/lp/buildmaster/tests/test_packagebuild.py'
--- lib/lp/buildmaster/tests/test_packagebuild.py 2015-02-17 07:39:47 +0000
+++ lib/lp/buildmaster/tests/test_packagebuild.py 2018-01-02 14:40:48 +0000
@@ -62,7 +62,7 @@
lfa = self.factory.makeLibraryFileAlias('mybuildlog.txt')
self.package_build.setLog(lfa)
log_url = self.package_build.log_url
- self.failUnlessEqual(
+ self.assertEqual(
'http://launchpad.dev/~joe/+archive/ubuntu/ppa/'
'+recipebuild/%d/+files/mybuildlog.txt' % (
self.package_build.id),
@@ -72,8 +72,8 @@
# The given content is uploaded to the librarian and linked as
# the upload log.
self.package_build.storeUploadLog("Some content")
- self.failIfEqual(None, self.package_build.upload_log)
- self.failUnlessEqual(
+ self.assertIsNotNone(self.package_build.upload_log)
+ self.assertEqual(
hashlib.sha1("Some content").hexdigest(),
self.package_build.upload_log.content.sha1)
@@ -83,16 +83,16 @@
login('admin@xxxxxxxxxxxxx')
self.package_build.archive.buildd_secret = 'sekrit'
self.package_build.archive.private = True
- self.failUnless(self.package_build.is_private)
+ self.assertTrue(self.package_build.is_private)
self.package_build.storeUploadLog("Some content")
- self.failUnless(self.package_build.upload_log.restricted)
+ self.assertTrue(self.package_build.upload_log.restricted)
def test_storeUploadLog_unicode(self):
# Unicode upload logs are uploaded as UTF-8.
unicode_content = u"Some content \N{SNOWMAN}"
self.package_build.storeUploadLog(unicode_content)
- self.failIfEqual(None, self.package_build.upload_log)
- self.failUnlessEqual(
+ self.assertIsNotNone(self.package_build.upload_log)
+ self.assertEqual(
hashlib.sha1(unicode_content.encode('utf-8')).hexdigest(),
self.package_build.upload_log.content.sha1)
@@ -101,7 +101,7 @@
Store.of(self.package_build).flush()
self.package_build.storeUploadLog("Some content")
log_url = self.package_build.upload_log_url
- self.failUnlessEqual(
+ self.assertEqual(
'http://launchpad.dev/~joe/+archive/ubuntu/ppa/'
'+recipebuild/%d/+files/upload_%d_log.txt' % (
self.package_build.id, self.package_build.id),
=== modified file 'lib/lp/code/browser/tests/test_branchmergeproposal.py'
--- lib/lp/code/browser/tests/test_branchmergeproposal.py 2017-11-05 08:32:30 +0000
+++ lib/lp/code/browser/tests/test_branchmergeproposal.py 2018-01-02 14:40:48 +0000
@@ -501,7 +501,7 @@
view = getMultiAdapter(
(self.bmp, LaunchpadTestRequest()), name='+votes')
- self.failUnless(
+ self.assertTrue(
isinstance(view, BranchMergeProposalVoteView),
"The +votes page for a BranchMergeProposal is expected to be a "
"BranchMergeProposalVoteView")
=== modified file 'lib/lp/codehosting/puller/tests/test_scheduler.py'
--- lib/lp/codehosting/puller/tests/test_scheduler.py 2012-10-31 00:54:33 +0000
+++ lib/lp/codehosting/puller/tests/test_scheduler.py 2018-01-02 14:40:48 +0000
@@ -93,7 +93,7 @@
manager = self.makeJobScheduler()
manager.lockfilename = self.masterlock
manager.lock()
- self.failUnless(os.path.exists(self.masterlock))
+ self.assertTrue(os.path.exists(self.masterlock))
manager.unlock()
def testManagerEnforcesLocks(self):
@@ -103,7 +103,7 @@
anothermanager = self.makeJobScheduler()
anothermanager.lockfilename = self.masterlock
self.assertRaises(scheduler.LockError, anothermanager.lock)
- self.failUnless(os.path.exists(self.masterlock))
+ self.assertTrue(os.path.exists(self.masterlock))
manager.unlock()
def test_run_calls_acquireBranchToPull(self):
@@ -162,9 +162,8 @@
The failure is asserted to contain an exception of type
`exception_type`."""
- self.failUnless(self.puller_protocol.failure is not None)
- self.failUnless(
- self.puller_protocol.failure.check(exception_type))
+ self.assertIsNotNone(self.puller_protocol.failure)
+ self.assertTrue(self.puller_protocol.failure.check(exception_type))
def assertProtocolInState0(self):
"""Assert that the protocol is in state 0."""
=== modified file 'lib/lp/codehosting/scanner/tests/test_bzrsync.py'
--- lib/lp/codehosting/scanner/tests/test_bzrsync.py 2016-07-01 20:33:06 +0000
+++ lib/lp/codehosting/scanner/tests/test_bzrsync.py 2018-01-02 14:40:48 +0000
@@ -299,13 +299,13 @@
def assertInMainline(self, revision_id, db_branch):
"""Assert that `revision_id` is in the mainline of `db_branch`."""
- self.failUnless(
+ self.assertTrue(
self.isMainline(db_branch, revision_id),
"%r not in mainline of %r" % (revision_id, db_branch))
def assertNotInMainline(self, revision_id, db_branch):
"""Assert that `revision_id` is not in the mainline of `db_branch`."""
- self.failIf(
+ self.assertFalse(
self.isMainline(db_branch, revision_id),
"%r in mainline of %r" % (revision_id, db_branch))
=== modified file 'lib/lp/codehosting/scanner/tests/test_email.py'
--- lib/lp/codehosting/scanner/tests/test_email.py 2013-07-04 07:58:00 +0000
+++ lib/lp/codehosting/scanner/tests/test_email.py 2018-01-02 14:40:48 +0000
@@ -66,13 +66,6 @@
LaunchpadZopelessLayer.txn.commit()
return branch
- def assertTextIn(self, expected, text):
- """Assert that expected is in text.
-
- Report expected and text in case of failure.
- """
- self.failUnless(expected in text, '%r not in %r' % (expected, text))
-
def test_empty_branch(self):
self.makeBzrSync(self.db_branch).syncBranchAndClose()
JobRunner.fromReady(getUtility(IRevisionMailJobSource)).runAll()
@@ -81,7 +74,7 @@
expected = 'First scan of the branch detected 0 revisions'
message = email.message_from_string(initial_email[2])
email_body = message.get_payload()
- self.assertTextIn(expected, email_body)
+ self.assertIn(expected, email_body)
self.assertEmailHeadersEqual(
'[Branch %s] 0 revisions' % self.db_branch.unique_name,
message['Subject'])
@@ -96,7 +89,7 @@
' in the revision history of the=\n branch.')
message = email.message_from_string(initial_email[2])
email_body = message.get_payload()
- self.assertTextIn(expected, email_body)
+ self.assertIn(expected, email_body)
self.assertEmailHeadersEqual(
'[Branch %s] 1 revision' % self.db_branch.unique_name,
message['Subject'])
@@ -114,7 +107,7 @@
expected = '1 revision was removed from the branch.'
message = email.message_from_string(uncommit_email[2])
email_body = message.get_payload()
- self.assertTextIn(expected, email_body)
+ self.assertIn(expected, email_body)
self.assertEmailHeadersEqual(
'[Branch %s] 1 revision removed' % self.db_branch.unique_name,
message['Subject'])
@@ -139,10 +132,10 @@
[recommit_email, uncommit_email] = stub.test_emails
uncommit_email_body = uncommit_email[2]
expected = '1 revision was removed from the branch.'
- self.assertTextIn(expected, uncommit_email_body)
+ self.assertIn(expected, uncommit_email_body)
subject = (
'Subject: [Branch %s] Test branch' % self.db_branch.unique_name)
- self.assertTextIn(expected, uncommit_email_body)
+ self.assertIn(expected, uncommit_email_body)
recommit_email_msg = email.message_from_string(recommit_email[2])
recommit_email_body = recommit_email_msg.get_payload()[0].get_payload(
@@ -157,7 +150,7 @@
'added:\n hello.txt',
]
for bit in body_bits:
- self.assertTextIn(bit, recommit_email_body)
+ self.assertIn(bit, recommit_email_body)
class TestViaCelery(TestCaseWithFactory):
=== modified file 'lib/lp/codehosting/sshserver/tests/test_session.py'
--- lib/lp/codehosting/sshserver/tests/test_session.py 2012-01-01 02:58:52 +0000
+++ lib/lp/codehosting/sshserver/tests/test_session.py 2018-01-02 14:40:48 +0000
@@ -164,7 +164,7 @@
def test_providesISession(self):
# ExecOnlySession must provide ISession.
- self.failUnless(ISession.providedBy(self.session),
+ self.assertTrue(ISession.providedBy(self.session),
"ExecOnlySession doesn't implement ISession")
def test_closedDoesNothingWhenNoCommand(self):
@@ -247,7 +247,7 @@
from twisted.internet import reactor
adapter = ExecOnlySession.getAvatarAdapter()
session = adapter(self.avatar)
- self.failUnless(isinstance(session, ExecOnlySession),
+ self.assertTrue(isinstance(session, ExecOnlySession),
"ISession(avatar) doesn't adapt to ExecOnlySession. "
"Got %r instead." % (session,))
self.assertIs(self.avatar, session.avatar)
@@ -303,7 +303,7 @@
def test_makeRestrictedExecOnlySession(self):
# A RestrictedExecOnlySession is constructed with an avatar, a reactor
# and an expected command.
- self.failUnless(
+ self.assertTrue(
isinstance(self.session, RestrictedExecOnlySession),
"%r not an instance of RestrictedExecOnlySession"
% (self.session,))
@@ -353,7 +353,7 @@
adapter = RestrictedExecOnlySession.getAvatarAdapter(
lookup_template)
session = adapter(self.avatar)
- self.failUnless(
+ self.assertTrue(
isinstance(session, RestrictedExecOnlySession),
"ISession(avatar) doesn't adapt to RestrictedExecOnlySession. "
"Got %r instead." % (session,))
@@ -377,7 +377,7 @@
# adapts to a RestrictedExecOnlySession. This means that a
# RestrictedExecOnlySession handles any requests to execute a command.
session = ISession(self.avatar)
- self.failUnless(
+ self.assertTrue(
isinstance(session, RestrictedExecOnlySession),
"ISession(avatar) doesn't adapt to ExecOnlySession. "
"Got %r instead." % (session,))
@@ -404,7 +404,7 @@
"[codehosting]\nuse_forking_daemon: False\n")
self.addCleanup(config.pop, 'codehosting-no-forking')
session = ISession(self.avatar)
- self.failIf(isinstance(session, ForkingRestrictedExecOnlySession),
+ self.assertFalse(isinstance(session, ForkingRestrictedExecOnlySession),
"ISession(avatar) shouldn't adapt to "
" ForkingRestrictedExecOnlySession when forking is disabled. ")
@@ -413,7 +413,7 @@
"[codehosting]\nuse_forking_daemon: True\n")
self.addCleanup(config.pop, 'codehosting-forking')
session = ISession(self.avatar)
- self.failUnless(
+ self.assertTrue(
isinstance(session, ForkingRestrictedExecOnlySession),
"ISession(avatar) doesn't adapt to "
" ForkingRestrictedExecOnlySession. "
=== modified file 'lib/lp/codehosting/tests/test_bzrutils.py'
--- lib/lp/codehosting/tests/test_bzrutils.py 2016-12-31 05:01:10 +0000
+++ lib/lp/codehosting/tests/test_bzrutils.py 2018-01-02 14:40:48 +0000
@@ -232,7 +232,7 @@
# Check that our set up worked: remote_branch is Remote and
# source_branch is not.
self.assertIsInstance(remote_branch, RemoteBranch)
- self.failIf(isinstance(vfs_branch, RemoteBranch))
+ self.assertNotIsInstance(vfs_branch, RemoteBranch)
# Now, get_vfs_format_classes on both branches returns the same format
# information.
self.assertEqual(
=== modified file 'lib/lp/codehosting/tests/test_format_comparison.py'
--- lib/lp/codehosting/tests/test_format_comparison.py 2015-10-19 10:56:16 +0000
+++ lib/lp/codehosting/tests/test_format_comparison.py 2018-01-02 14:40:48 +0000
@@ -56,7 +56,7 @@
def testAllIdentical(self):
# identical_formats should return True when both branches have the same
# bzrdir, repository, and branch formats.
- self.failUnless(
+ self.assertTrue(
identical_formats(
StubBranch(BzrDirFormatA(), RepoFormatA(), BranchFormatA()),
StubBranch(BzrDirFormatA(), RepoFormatA(), BranchFormatA())))
@@ -64,7 +64,7 @@
def testDifferentBzrDirFormats(self):
# identical_formats should return False when both branches have the
# different bzrdir formats.
- self.failIf(
+ self.assertFalse(
identical_formats(
StubBranch(BzrDirFormatA(), RepoFormatA(), BranchFormatA()),
StubBranch(BzrDirFormatB(), RepoFormatA(), BranchFormatA())))
@@ -72,7 +72,7 @@
def testDifferentRepositoryFormats(self):
# identical_formats should return False when both branches have the
# different repository formats.
- self.failIf(
+ self.assertFalse(
identical_formats(
StubBranch(BzrDirFormatA(), RepoFormatA(), BranchFormatA()),
StubBranch(BzrDirFormatA(), RepoFormatB(), BranchFormatA())))
@@ -80,7 +80,7 @@
def testDifferentBranchFormats(self):
# identical_formats should return False when both branches have the
# different branch formats.
- self.failIf(
+ self.assertFalse(
identical_formats(
StubBranch(BzrDirFormatA(), RepoFormatA(), BranchFormatA()),
StubBranch(BzrDirFormatA(), RepoFormatA(), BranchFormatB())))
=== modified file 'lib/lp/codehosting/vfs/tests/test_filesystem.py'
--- lib/lp/codehosting/vfs/tests/test_filesystem.py 2011-10-28 02:51:42 +0000
+++ lib/lp/codehosting/vfs/tests/test_filesystem.py 2018-01-02 14:40:48 +0000
@@ -49,8 +49,8 @@
transport = self.getTransport('~%s/+junk' % self.requester.name)
transport.mkdir('foo')
transport.mkdir('bar')
- self.failUnless(stat.S_ISDIR(transport.stat('foo').st_mode))
- self.failUnless(stat.S_ISDIR(transport.stat('bar').st_mode))
+ self.assertTrue(stat.S_ISDIR(transport.stat('foo').st_mode))
+ self.assertTrue(stat.S_ISDIR(transport.stat('bar').st_mode))
# Try to remove a branch directory, which is not allowed.
self.assertRaises(
=== modified file 'lib/lp/hardwaredb/scripts/tests/test_hwdb_submission_processing.py'
--- lib/lp/hardwaredb/scripts/tests/test_hwdb_submission_processing.py 2014-12-07 13:18:18 +0000
+++ lib/lp/hardwaredb/scripts/tests/test_hwdb_submission_processing.py 2018-01-02 14:40:48 +0000
@@ -1660,13 +1660,13 @@
parser = SubmissionParser(self.log)
parser.buildHalDeviceList(parsed_data)
device = parser.devices[self.UDI_USB_CONTROLLER_PCI_SIDE]
- self.failUnless(device.is_real_device,
+ self.assertTrue(device.is_real_device,
'Device with info.bus property not treated as a '
'real device.')
self.renameInfoBusToInfoSubsystem(devices)
parser.buildHalDeviceList(parsed_data)
device = parser.devices[self.UDI_USB_CONTROLLER_PCI_SIDE]
- self.failUnless(device.is_real_device,
+ self.assertTrue(device.is_real_device,
'Device with info.subsystem property not treated as '
'a real device.')
@@ -1691,9 +1691,9 @@
parser = SubmissionParser(self.log)
parser.buildHalDeviceList(parsed_data)
device = parser.devices[UDI_HAL_STORAGE_DEVICE]
- self.failIf(device.is_real_device,
- 'Device without info.bus property treated as a '
- 'real device')
+ self.assertFalse(device.is_real_device,
+ 'Device without info.bus property treated as a '
+ 'real device')
def testHALDeviceRealDeviceHALBusValueIgnored(self):
"""Test of HALDevice.is_real_device: ignored values of info.bus.
@@ -1738,7 +1738,7 @@
properties['info.bus'] = (tested_bus, 'str')
parser.buildHalDeviceList(parsed_data)
device = parser.devices[self.UDI_USB_HUB_IF0]
- self.failIf(
+ self.assertFalse(
device.is_real_device,
'Device with info.bus=%s treated as a real device'
% tested_bus)
@@ -1748,7 +1748,7 @@
properties['info.subsystem'] = (tested_bus, 'str')
parser.buildHalDeviceList(parsed_data)
device = parser.devices[self.UDI_USB_HUB_IF0]
- self.failIf(
+ self.assertFalse(
device.is_real_device,
'Device with info.subsystem=%s treated as a real device'
% tested_bus)
@@ -1891,13 +1891,13 @@
parser.buildHalDeviceList(parsed_data)
scsi_device = parser.devices[self.UDI_USB_STORAGE_SCSI_DEVICE]
- self.failIf(scsi_device.is_real_device,
+ self.assertFalse(scsi_device.is_real_device,
'Unexpected result of HWDevice.is_real_device for a HAL SCSI '
'device as a subdevice of a USB storage device.')
self.renameInfoBusToInfoSubsystem(devices)
scsi_device = parser.devices[self.UDI_USB_STORAGE_SCSI_DEVICE]
- self.failIf(scsi_device.is_real_device,
+ self.assertFalse(scsi_device.is_real_device,
'Unexpected result of HWDevice.is_real_device for a HAL SCSI '
'device as a subdevice of a USB storage device.')
@@ -1921,7 +1921,7 @@
parser = SubmissionParser(self.log)
parser.buildHalDeviceList(parsed_data)
device = parser.devices[self.UDI_COMPUTER]
- self.failUnless(device.is_real_device,
+ self.assertTrue(device.is_real_device,
'Root device not treated as a real device')
def testHALDeviceRealChildren(self):
@@ -2042,7 +2042,7 @@
parser = SubmissionParser(self.log)
parser.buildHalDeviceList(parsed_data)
device = parser.devices[self.UDI_SATA_CONTROLLER]
- self.failUnless(
+ self.assertTrue(
device.has_reliable_data,
'Regular device treated as not having reliable data.')
@@ -2074,7 +2074,7 @@
properties['info.bus'] = (bus, 'str')
parser.buildHalDeviceList(parsed_data)
device = parser.devices[self.UDI_SATA_CONTROLLER]
- self.failIf(device.has_reliable_data,
+ self.assertFalse(device.has_reliable_data,
'Device with bus=%s treated as having reliable data.' % bus)
def testHasReliableDataRootDevice(self):
@@ -2105,7 +2105,7 @@
parser = SubmissionParser(self.log)
parser.buildHalDeviceList(parsed_data)
device = parser.devices[self.UDI_COMPUTER]
- self.failUnless(
+ self.assertTrue(
device.has_reliable_data,
"Root device not treated as having reliable data.")
@@ -2174,7 +2174,7 @@
parser.submission_key = submission_key
parser.buildHalDeviceList(test_parsed_data)
device = parser.devices[self.UDI_SATA_CONTROLLER]
- self.failIf(
+ self.assertFalse(
device.has_reliable_data,
'Device with missing property %s treated as having reliable'
'data.' % missing_data)
@@ -2224,7 +2224,7 @@
parser = SubmissionParser(self.log)
parser.buildHalDeviceList(parsed_data)
device = parser.devices[self.UDI_SATA_DISK]
- self.failIf(
+ self.assertFalse(
device.has_reliable_data,
'IDE Device with missing properties vendor ID, product ID, '
'product name treated as having reliabledata.')
@@ -3079,7 +3079,7 @@
parser = SubmissionParser(self.log)
parser.buildHalDeviceList(self.parsed_data)
device = parser.devices[self.UDI_USB_STORAGE]
- self.failUnless(
+ self.assertTrue(
device.is_real_device,
'Testing info.bus property: Regular USB Device not treated '
'as a real device.')
@@ -3087,7 +3087,7 @@
self.renameInfoBusToInfoSubsystem()
parser.buildHalDeviceList(self.parsed_data)
device = parser.devices[self.UDI_USB_STORAGE]
- self.failUnless(
+ self.assertTrue(
device.is_real_device,
'Testing info.subsystem property: Regular USB Device not treated '
'as a real device.')
@@ -3102,7 +3102,7 @@
parser = SubmissionParser(self.log)
parser.buildHalDeviceList(self.parsed_data)
device = parser.devices[self.UDI_USB_CONTROLLER_USB_SIDE]
- self.failIf(
+ self.assertFalse(
device.is_real_device,
'Testing info.bus property: USB Device with vendor/product '
'ID 0:0 property treated as a real device.')
@@ -3110,7 +3110,7 @@
self.renameInfoBusToInfoSubsystem()
parser.buildHalDeviceList(self.parsed_data)
device = parser.devices[self.UDI_USB_CONTROLLER_USB_SIDE]
- self.failIf(
+ self.assertFalse(
device.is_real_device,
'Testing info.subsystem property: USB Device with vendor/product '
'ID 0:0 property treated as a real device.')
@@ -3128,7 +3128,7 @@
parser.submission_key = 'USB device test 1'
parser.buildHalDeviceList(self.parsed_data)
device = parser.devices[self.UDI_USB_CONTROLLER_USB_SIDE]
- self.failIf(
+ self.assertFalse(
device.is_real_device,
'Testing info.bus property: USB Device with vendor/product '
'ID 0:0 property treated as a real device.')
@@ -3141,7 +3141,7 @@
self.renameInfoBusToInfoSubsystem()
parser.buildHalDeviceList(self.parsed_data)
device = parser.devices[self.UDI_USB_CONTROLLER_USB_SIDE]
- self.failIf(
+ self.assertFalse(
device.is_real_device,
'Testing info.subsystem property: USB Device with vendor/product '
'ID 0:0 property treated as a real device.')
@@ -3164,7 +3164,7 @@
parser.submission_key = 'USB device test 2'
parser.buildHalDeviceList(self.parsed_data)
device = parser.devices[self.UDI_USB_CONTROLLER_USB_SIDE]
- self.failIf(
+ self.assertFalse(
device.is_real_device,
'Testing info.bus property: USB Device with vendor/product '
'ID 0:0 property treated as a real device.')
@@ -3177,7 +3177,7 @@
self.renameInfoBusToInfoSubsystem()
parser.buildHalDeviceList(self.parsed_data)
device = parser.devices[self.UDI_USB_CONTROLLER_USB_SIDE]
- self.failIf(
+ self.assertFalse(
device.is_real_device,
'Testing info.subsystem property: USB Device with vendor/product '
'ID 0:0 property treated as a real device.')
@@ -3200,7 +3200,7 @@
parser.submission_key = 'USB device test 3'
parser.buildHalDeviceList(self.parsed_data)
device = parser.devices[self.UDI_USB_CONTROLLER_USB_SIDE]
- self.failIf(
+ self.assertFalse(
device.is_real_device,
'Testing info.bus property: USB Device with vendor/product '
'ID 0:0 property treated as a real device.')
@@ -3214,7 +3214,7 @@
# for HALDevice.is_real_device. The USB host controller in the
# test data is an example.
device = parser.devices[self.UDI_USB_CONTROLLER_PCI_SIDE]
- self.failUnless(
+ self.assertTrue(
device.is_real_device,
'Testing info.bus property: Device with existing info.bus '
'property not treated as a real device.')
@@ -3222,7 +3222,7 @@
self.renameInfoBusToInfoSubsystem()
parser.buildHalDeviceList(self.parsed_data)
device = parser.devices[self.UDI_USB_CONTROLLER_USB_SIDE]
- self.failIf(
+ self.assertFalse(
device.is_real_device,
'Testing info.subsystem property: USB Device with vendor/product '
'ID 0:0 property treated as a real device.')
@@ -3233,7 +3233,7 @@
+ self.UDI_USB_CONTROLLER_USB_SIDE)
device = parser.devices[self.UDI_USB_CONTROLLER_PCI_SIDE]
- self.failUnless(
+ self.assertTrue(
device.is_real_device,
'Testing info.subsystem property: Device with existing info.bus '
'property not treated as a real device.')
@@ -5128,8 +5128,8 @@
device_driver_links_in_submission = [
submission_device.device_driver_link
for submission_device in submission_devices]
- self.failUnless(
- device_driver_link in device_driver_links_in_submission,
+ self.assertIn(
+ device_driver_link, device_driver_links_in_submission,
'No entry in HWSubmissionDevice for bus %s, '
'vendor %s, product %s, submission %s'
% (bus, vendor_id, product_id, submission.submission_key))
@@ -5150,8 +5150,8 @@
'No entry in HWDeviceDriverLink for bus %s, '
'vendor %s, product %s, driver %s'
% (bus, vendor_id, product_id, driver_name))
- self.failUnless(
- device_driver_link in device_driver_links_in_submission,
+ self.assertIn(
+ device_driver_link, device_driver_links_in_submission,
'No entry in HWSubmissionDevice for bus %s, '
'vendor %s, product %s, driver %s, submission %s'
% (bus, vendor_id, product_id, driver_name,
@@ -5183,7 +5183,7 @@
submission_data, False, submission_key)
parser = SubmissionParser(self.log)
result = parser.processSubmission(submission)
- self.failUnless(result,
+ self.assertTrue(result,
'Simple valid uncompressed submission could not be '
'processed. Logged errors:\n%s'
% self.getLogData())
@@ -5201,7 +5201,7 @@
submission_data, True, submission_key)
parser = SubmissionParser(self.log)
result = parser.processSubmission(submission)
- self.failUnless(result,
+ self.assertTrue(result,
'Simple valid compressed submission could not be '
'processed. Logged errors:\n%s'
% self.getLogData())
@@ -5222,7 +5222,8 @@
submission_data, True, submission_key)
parser = SubmissionParser(self.log)
result = parser.processSubmission(submission)
- self.failIf(result, 'Formally invalid submission treated as valid.')
+ self.assertFalse(
+ result, 'Formally invalid submission treated as valid.')
def testProcessSubmissionInconsistentData(self):
"""Test of SubmissionParser.processSubmission().
@@ -5247,7 +5248,7 @@
submission_data, True, submission_key)
parser = SubmissionParser(self.log)
result = parser.processSubmission(submission)
- self.failIf(
+ self.assertFalse(
result, 'Submission with inconsistent data treated as valid.')
def test_processSubmission_udev_data(self):
@@ -5353,7 +5354,7 @@
submission_data, False, submission_key)
parser = SubmissionParser(self.log)
result = parser.processSubmission(submission)
- self.failUnless(
+ self.assertTrue(
result,
'Real submission data not processed. Logged errors:\n%s'
% self.getLogData())
@@ -5462,7 +5463,7 @@
expected_message = (
'Exception while processing HWDB submission '
'test_submission_id_1 (OOPS-')
- self.failUnless(
+ self.assertTrue(
messages.startswith(expected_message),
'Unexpected log message: %r' % messages)
@@ -5499,7 +5500,7 @@
expected_message = (
'Could not reach the Librarian while processing HWDB '
'submission submission-2 (OOPS-')
- self.failUnless(
+ self.assertTrue(
messages.startswith(expected_message),
'Unexpected log messages: %r' % messages)
=== modified file 'lib/lp/registry/browser/tests/test_person.py'
--- lib/lp/registry/browser/tests/test_person.py 2017-05-12 10:57:33 +0000
+++ lib/lp/registry/browser/tests/test_person.py 2018-01-02 14:40:48 +0000
@@ -439,7 +439,7 @@
# authorised to view the PPA.
login(ANONYMOUS)
person_view = PersonView(self.owner, LaunchpadTestRequest())
- self.failUnless(person_view.should_show_ppa_section)
+ self.assertTrue(person_view.should_show_ppa_section)
def test_viewing_person_without_ppa(self):
# If the context person does not have a ppa then the section
@@ -447,28 +447,28 @@
login(ANONYMOUS)
person_without_ppa = self.factory.makePerson()
person_view = PersonView(person_without_ppa, LaunchpadTestRequest())
- self.failIf(person_view.should_show_ppa_section)
+ self.assertFalse(person_view.should_show_ppa_section)
def test_viewing_self(self):
# If the current user has edit access to the context person then
# the section should always display.
login_person(self.owner)
person_view = PersonView(self.owner, LaunchpadTestRequest())
- self.failUnless(person_view.should_show_ppa_section)
+ self.assertTrue(person_view.should_show_ppa_section)
# If the ppa is private, the section is still displayed to
# a user with edit access to the person.
self.make_ppa_private(self.person_ppa)
login_person(self.owner)
person_view = PersonView(self.owner, LaunchpadTestRequest())
- self.failUnless(person_view.should_show_ppa_section)
+ self.assertTrue(person_view.should_show_ppa_section)
# Even a person without a PPA will see the section when viewing
# themselves.
person_without_ppa = self.factory.makePerson()
login_person(person_without_ppa)
person_view = PersonView(person_without_ppa, LaunchpadTestRequest())
- self.failUnless(person_view.should_show_ppa_section)
+ self.assertTrue(person_view.should_show_ppa_section)
def test_anon_viewing_person_with_private_ppa(self):
# If the ppa is private, the ppa section will not be displayed
@@ -476,13 +476,13 @@
self.make_ppa_private(self.person_ppa)
login(ANONYMOUS)
person_view = PersonView(self.owner, LaunchpadTestRequest())
- self.failIf(person_view.should_show_ppa_section)
+ self.assertFalse(person_view.should_show_ppa_section)
# But if the context person has a second ppa that is public,
# then anon users will see the section.
self.factory.makeArchive(owner=self.owner)
person_view = PersonView(self.owner, LaunchpadTestRequest())
- self.failUnless(person_view.should_show_ppa_section)
+ self.assertTrue(person_view.should_show_ppa_section)
def test_viewing_team_with_private_ppa(self):
# If a team PPA is private, the ppa section will be displayed
@@ -495,18 +495,18 @@
# So the member will see the section.
person_view = PersonView(self.team, LaunchpadTestRequest())
- self.failUnless(person_view.should_show_ppa_section)
+ self.assertTrue(person_view.should_show_ppa_section)
# But other users who are not members will not.
non_member = self.factory.makePerson()
login_person(non_member)
person_view = PersonView(self.team, LaunchpadTestRequest())
- self.failIf(person_view.should_show_ppa_section)
+ self.assertFalse(person_view.should_show_ppa_section)
# Unless the team also has another ppa which is public.
self.factory.makeArchive(owner=self.team)
person_view = PersonView(self.team, LaunchpadTestRequest())
- self.failUnless(person_view.should_show_ppa_section)
+ self.assertTrue(person_view.should_show_ppa_section)
class TestPersonRenameFormMixin:
=== modified file 'lib/lp/registry/tests/test_distribution.py'
--- lib/lp/registry/tests/test_distribution.py 2015-10-13 13:22:08 +0000
+++ lib/lp/registry/tests/test_distribution.py 2018-01-02 14:40:48 +0000
@@ -660,10 +660,10 @@
ws_distro = self.wsObject(distro, distro.owner)
now = datetime.datetime.now(tz=pytz.utc)
day = datetime.timedelta(days=1)
- self.failUnlessEqual(
+ self.assertEqual(
[oopsid],
ws_distro.findReferencedOOPS(start_date=now - day, end_date=now))
- self.failUnlessEqual(
+ self.assertEqual(
[],
ws_distro.findReferencedOOPS(
start_date=now + day, end_date=now + day))
@@ -678,6 +678,6 @@
ws_distro = self.wsObject(distro, distro.owner)
now = datetime.datetime.now(tz=pytz.utc)
day = datetime.timedelta(days=1)
- self.failUnlessEqual(
+ self.assertEqual(
[],
ws_distro.findReferencedOOPS(start_date=now - day, end_date=now))
=== modified file 'lib/lp/registry/tests/test_distributionmirror.py'
--- lib/lp/registry/tests/test_distributionmirror.py 2015-02-18 18:44:32 +0000
+++ lib/lp/registry/tests/test_distributionmirror.py 2018-01-02 14:40:48 +0000
@@ -52,24 +52,25 @@
removeSecurityProxy(bin_mirror).freshness = freshness
def test_archive_mirror_without_content_should_be_disabled(self):
- self.failUnless(self.archive_mirror.shouldDisable())
+ self.assertTrue(self.archive_mirror.shouldDisable())
def test_archive_mirror_with_any_content_should_not_be_disabled(self):
self._create_source_mirror(
self.hoary, PackagePublishingPocket.RELEASE,
self.hoary.components[0], MirrorFreshness.UP)
flush_database_updates()
- self.failIf(self.archive_mirror.shouldDisable())
+ self.assertFalse(self.archive_mirror.shouldDisable())
def test_cdimage_mirror_not_missing_content_should_not_be_disabled(self):
expected_file_count = 1
self.cdimage_mirror.ensureMirrorCDImageSeries(
self.hoary, flavour='ubuntu')
- self.failIf(self.cdimage_mirror.shouldDisable(expected_file_count))
+ self.assertFalse(
+ self.cdimage_mirror.shouldDisable(expected_file_count))
def test_cdimage_mirror_missing_content_should_be_disabled(self):
expected_file_count = 1
- self.failUnless(
+ self.assertTrue(
self.cdimage_mirror.shouldDisable(expected_file_count))
def test_delete_all_mirror_cdimage_series(self):
@@ -82,9 +83,9 @@
self.assertEqual(0, len(self.cdimage_mirror.cdimage_series))
def test_archive_mirror_without_content_freshness(self):
- self.failIf(self.archive_mirror.source_series or
- self.archive_mirror.arch_series)
- self.failUnlessEqual(
+ self.assertFalse(self.archive_mirror.source_series)
+ self.assertFalse(self.archive_mirror.arch_series)
+ self.assertEqual(
self.archive_mirror.getOverallFreshness(),
MirrorFreshness.UNKNOWN)
@@ -96,7 +97,7 @@
self.hoary, PackagePublishingPocket.RELEASE,
self.hoary.components[1], MirrorFreshness.TWODAYSBEHIND)
flush_database_updates()
- self.failUnlessEqual(
+ self.assertEqual(
removeSecurityProxy(self.archive_mirror).source_mirror_freshness,
MirrorFreshness.TWODAYSBEHIND)
@@ -108,7 +109,7 @@
self.hoary_i386, PackagePublishingPocket.RELEASE,
self.hoary.components[1], MirrorFreshness.ONEHOURBEHIND)
flush_database_updates()
- self.failUnlessEqual(
+ self.assertEqual(
removeSecurityProxy(self.archive_mirror).arch_mirror_freshness,
MirrorFreshness.ONEHOURBEHIND)
@@ -120,7 +121,7 @@
self.hoary, PackagePublishingPocket.RELEASE,
self.hoary.components[1], MirrorFreshness.TWODAYSBEHIND)
flush_database_updates()
- self.failUnlessEqual(
+ self.assertEqual(
self.archive_mirror.getOverallFreshness(),
MirrorFreshness.TWODAYSBEHIND)
@@ -132,7 +133,7 @@
self.hoary_i386, PackagePublishingPocket.RELEASE,
self.hoary.components[1], MirrorFreshness.ONEHOURBEHIND)
flush_database_updates()
- self.failUnlessEqual(
+ self.assertEqual(
self.archive_mirror.getOverallFreshness(),
MirrorFreshness.ONEHOURBEHIND)
@@ -152,7 +153,7 @@
self.hoary.components[1], MirrorFreshness.TWODAYSBEHIND)
flush_database_updates()
- self.failUnlessEqual(
+ self.assertEqual(
self.archive_mirror.getOverallFreshness(),
MirrorFreshness.TWODAYSBEHIND)
@@ -164,19 +165,19 @@
# notified when it's disabled --it doesn't matter whether it was
# previously enabled or disabled.
self.factory.makeMirrorProbeRecord(mirror)
- self.failUnless(mirror.enabled)
+ self.assertTrue(mirror.enabled)
log = 'Got a 404 on http://foo/baz'
mirror.disable(notify_owner=True, log=log)
# A notification was sent to the owner and other to the mirror admins.
transaction.commit()
- self.failUnlessEqual(len(stub.test_emails), 2)
+ self.assertEqual(len(stub.test_emails), 2)
stub.test_emails = []
mirror.disable(notify_owner=True, log=log)
# Again, a notification was sent to the owner and other to the mirror
# admins.
transaction.commit()
- self.failUnlessEqual(len(stub.test_emails), 2)
+ self.assertEqual(len(stub.test_emails), 2)
stub.test_emails = []
# For mirrors that have been probed more than once, we'll only notify
@@ -186,7 +187,7 @@
mirror.disable(notify_owner=True, log=log)
# A notification was sent to the owner and other to the mirror admins.
transaction.commit()
- self.failUnlessEqual(len(stub.test_emails), 2)
+ self.assertEqual(len(stub.test_emails), 2)
stub.test_emails = []
# We can always disable notifications to the owner by passing
@@ -194,14 +195,14 @@
mirror.enabled = True
mirror.disable(notify_owner=False, log=log)
transaction.commit()
- self.failUnlessEqual(len(stub.test_emails), 1)
+ self.assertEqual(len(stub.test_emails), 1)
stub.test_emails = []
mirror.enabled = False
mirror.disable(notify_owner=True, log=log)
# No notifications were sent this time
transaction.commit()
- self.failUnlessEqual(len(stub.test_emails), 0)
+ self.assertEqual(len(stub.test_emails), 0)
stub.test_emails = []
def test_no_email_sent_to_uncontactable_owner(self):
@@ -221,7 +222,7 @@
self.factory.makeMirrorProbeRecord(mirror)
mirror.disable(notify_owner=True, log="It broke.")
transaction.commit()
- self.failUnlessEqual(len(stub.test_emails), 1)
+ self.assertEqual(len(stub.test_emails), 1)
class TestDistributionMirrorSet(TestCase):
@@ -235,7 +236,7 @@
This function ensures the orderBy argument given to it contains
the 'random' string in its first item.
"""
- self.failUnlessEqual(kw['orderBy'][0].name, 'random')
+ self.assertEqual(kw['orderBy'][0].name, 'random')
return [1, 2, 3]
orig_select = DistributionMirror.select
@@ -256,11 +257,11 @@
main_mirror = getUtility(ILaunchpadCelebrities).ubuntu_archive_mirror
mirrors = getUtility(IDistributionMirrorSet).getBestMirrorsForCountry(
france, MirrorContent.ARCHIVE)
- self.failUnless(len(mirrors) > 1, "Not enough mirrors")
- self.failUnlessEqual(main_mirror, mirrors[-1])
+ self.assertTrue(len(mirrors) > 1, "Not enough mirrors")
+ self.assertEqual(main_mirror, mirrors[-1])
main_mirror = getUtility(ILaunchpadCelebrities).ubuntu_cdimage_mirror
mirrors = getUtility(IDistributionMirrorSet).getBestMirrorsForCountry(
france, MirrorContent.RELEASE)
- self.failUnless(len(mirrors) > 1, "Not enough mirrors")
- self.failUnlessEqual(main_mirror, mirrors[-1])
+ self.assertTrue(len(mirrors) > 1, "Not enough mirrors")
+ self.assertEqual(main_mirror, mirrors[-1])
=== modified file 'lib/lp/registry/tests/test_distributionmirror_prober.py'
--- lib/lp/registry/tests/test_distributionmirror_prober.py 2016-02-05 16:51:12 +0000
+++ lib/lp/registry/tests/test_distributionmirror_prober.py 2018-01-02 14:40:48 +0000
@@ -120,22 +120,22 @@
def test_environment_http_proxy_is_handled_correctly(self):
os.environ['http_proxy'] = 'http://squid.internal:3128'
prober = ProberFactory(self.urls['200'])
- self.failUnlessEqual(prober.request_host, 'localhost')
- self.failUnlessEqual(prober.request_port, self.port)
- self.failUnlessEqual(prober.request_path, '/valid-mirror')
- self.failUnlessEqual(prober.connect_host, 'squid.internal')
- self.failUnlessEqual(prober.connect_port, 3128)
- self.failUnlessEqual(prober.connect_path, self.urls['200'])
+ self.assertEqual(prober.request_host, 'localhost')
+ self.assertEqual(prober.request_port, self.port)
+ self.assertEqual(prober.request_path, '/valid-mirror')
+ self.assertEqual(prober.connect_host, 'squid.internal')
+ self.assertEqual(prober.connect_port, 3128)
+ self.assertEqual(prober.connect_path, self.urls['200'])
def test_connect_cancels_existing_timeout_call(self):
prober = ProberFactory(self.urls['200'])
prober.timeoutCall = reactor.callLater(
30, prober.failWithTimeoutError)
old_timeout_call = prober.timeoutCall
- self.failUnless(old_timeout_call.active())
+ self.assertTrue(old_timeout_call.active())
prober.connect()
- self.failIf(old_timeout_call.active())
- self.failUnless(prober.timeoutCall.active())
+ self.assertFalse(old_timeout_call.active())
+ self.assertTrue(prober.timeoutCall.active())
return prober._deferred
def _test_connect_to_host(self, url, host):
@@ -153,7 +153,7 @@
reactor.connectTCP = fakeConnect
def restore_connect(result, orig_connect):
- self.failUnlessEqual(prober.connecting_to, host)
+ self.assertEqual(prober.connecting_to, host)
reactor.connectTCP = orig_connect
return None
@@ -173,23 +173,23 @@
def test_probe_sets_up_timeout_call(self):
prober = ProberFactory(self.urls['200'])
- self.failUnless(getattr(prober, 'timeoutCall', None) is None)
+ self.assertIsNone(getattr(prober, 'timeoutCall', None))
deferred = prober.probe()
- self.failUnless(getattr(prober, 'timeoutCall', None) is not None)
+ self.assertIsNotNone(getattr(prober, 'timeoutCall', None))
return deferred
def test_RedirectAwareProber_follows_http_redirect(self):
url = 'http://localhost:%s/redirect-to-valid-mirror/file' % self.port
prober = RedirectAwareProberFactory(url)
- self.failUnless(prober.redirection_count == 0)
- self.failUnless(prober.url == url)
+ self.assertTrue(prober.redirection_count == 0)
+ self.assertTrue(prober.url == url)
deferred = prober.probe()
def got_result(result):
- self.failUnless(prober.redirection_count == 1)
+ self.assertTrue(prober.redirection_count == 1)
new_url = 'http://localhost:%s/valid-mirror/file' % self.port
- self.failUnless(prober.url == new_url)
- self.failUnless(result == str(httplib.OK))
+ self.assertTrue(prober.url == new_url)
+ self.assertTrue(result == str(httplib.OK))
return deferred.addCallback(got_result)
@@ -209,7 +209,7 @@
d = self._createProberAndProbe(self.urls['200'])
def got_result(result):
- self.failUnless(
+ self.assertTrue(
result == str(httplib.OK),
"Expected a '200' status but got '%s'" % result)
@@ -218,20 +218,20 @@
def test_success_cancel_timeout_call(self):
prober = ProberFactory(self.urls['200'])
deferred = prober.probe()
- self.failUnless(prober.timeoutCall.active())
+ self.assertTrue(prober.timeoutCall.active())
def check_timeout_call(result):
- self.failIf(prober.timeoutCall.active())
+ self.assertFalse(prober.timeoutCall.active())
return deferred.addCallback(check_timeout_call)
def test_failure_cancel_timeout_call(self):
prober = ProberFactory(self.urls['500'])
deferred = prober.probe()
- self.failUnless(prober.timeoutCall.active())
+ self.assertTrue(prober.timeoutCall.active())
def check_timeout_call(result):
- self.failIf(prober.timeoutCall.active())
+ self.assertFalse(prober.timeoutCall.active())
return deferred.addErrback(check_timeout_call)
@@ -347,10 +347,10 @@
requests = MIN_REQUESTS_TO_CONSIDER_RATIO - 1
timeouts = requests
prober = self._createProberStubConnectAndProbe(requests, timeouts)
- self.failUnless(prober.connectCalled)
+ self.assertTrue(prober.connectCalled)
# Ensure the number of requests and timeouts we're using should
# _NOT_ cause a given host to be skipped.
- self.failIf(should_skip_host(self.host))
+ self.assertFalse(should_skip_host(self.host))
def test_connect_is_not_called_after_too_many_timeouts(self):
"""If we get a small requests/timeouts ratio on a given host, we'll
@@ -362,10 +362,10 @@
timeouts = (
(MIN_REQUESTS_TO_CONSIDER_RATIO / MIN_REQUEST_TIMEOUT_RATIO) + 2)
prober = self._createProberStubConnectAndProbe(requests, timeouts)
- self.failIf(prober.connectCalled)
+ self.assertFalse(prober.connectCalled)
# Ensure the number of requests and timeouts we're using should
# actually cause a given host to be skipped.
- self.failUnless(should_skip_host(self.host))
+ self.assertTrue(should_skip_host(self.host))
def test_connect_is_called_if_not_many_timeouts(self):
# If the ratio is not too small we consider it's safe to keep
@@ -374,10 +374,10 @@
timeouts = (
(MIN_REQUESTS_TO_CONSIDER_RATIO / MIN_REQUEST_TIMEOUT_RATIO) - 2)
prober = self._createProberStubConnectAndProbe(requests, timeouts)
- self.failUnless(prober.connectCalled)
+ self.assertTrue(prober.connectCalled)
# Ensure the number of requests and timeouts we're using should
# _NOT_ cause a given host to be skipped.
- self.failIf(should_skip_host(self.host))
+ self.assertFalse(should_skip_host(self.host))
class TestProberFactoryRequestTimeoutRatioWithTwisted(TrialTestCase):
@@ -422,9 +422,9 @@
u'http://%s:%s/timeout' % (host, self.port))
def got_error(error):
- self.failUnlessEqual(
+ self.assertEqual(
{host: 1}, distributionmirror_prober.host_requests)
- self.failUnlessEqual(
+ self.assertEqual(
{host: 1}, distributionmirror_prober.host_timeouts)
return d.addErrback(got_error)
@@ -435,9 +435,9 @@
u'http://%s:%s/valid-mirror' % (host, self.port))
def got_result(result):
- self.failUnlessEqual(
+ self.assertEqual(
{host: 1}, distributionmirror_prober.host_requests)
- self.failUnlessEqual(
+ self.assertEqual(
{host: 0}, distributionmirror_prober.host_timeouts)
return d.addCallback(got_result)
@@ -451,7 +451,7 @@
distributionmirror_prober.host_timeouts = {host: timeouts}
# Ensure the number of requests and timeouts we're using should
# cause a given host to be skipped.
- self.failUnless(should_skip_host(host))
+ self.assertTrue(should_skip_host(host))
d = self._createProberAndProbe(
u'http://%s:%s/timeout' % (host, self.port))
@@ -549,9 +549,9 @@
prober = RedirectAwareProberFactory('http://foo.bar')
prober.timeoutCall = FakeTimeOutCall()
prober.connect = lambda: None
- self.failIf(prober.timeoutCall.resetCalled)
+ self.assertFalse(prober.timeoutCall.resetCalled)
prober.redirect('http://bar.foo')
- self.failUnless(prober.timeoutCall.resetCalled)
+ self.assertTrue(prober.timeoutCall.resetCalled)
def _createFactoryAndStubConnectAndTimeoutCall(self, url=None):
if url is None:
@@ -575,7 +575,7 @@
prober.failed = failed
prober.redirect('http://foo.bar/baz/boo/notfound?file=package.deb')
- self.failUnless(prober.has_failed)
+ self.assertTrue(prober.has_failed)
def test_does_not_raise_if_redirected_to_reencoded_file(self):
prober = self._createFactoryAndStubConnectAndTimeoutCall(
@@ -597,9 +597,9 @@
"""
config.push('localhost_only_conf', localhost_only_conf)
prober = self._createFactoryAndStubConnectAndTimeoutCall()
- self.failUnless(prober.connect_host != 'localhost')
+ self.assertTrue(prober.connect_host != 'localhost')
prober.probe()
- self.failIf(prober.connectCalled)
+ self.assertFalse(prober.connectCalled)
# Restore the config.
config.pop('localhost_only_conf')
@@ -612,7 +612,7 @@
config.push('remote_conf', remote_conf)
prober = self._createFactoryAndStubConnectAndTimeoutCall()
prober.probe()
- self.failUnless(prober.connectCalled)
+ self.assertTrue(prober.connectCalled)
# Restore the config.
config.pop('remote_conf')
@@ -621,18 +621,18 @@
prober.failed = lambda error: None
prober.redirection_count = MAX_REDIRECTS
prober.redirect('http://bar.foo')
- self.failIf(prober.connectCalled)
+ self.assertFalse(prober.connectCalled)
def test_noconnection_is_made_when_url_scheme_is_not_http_or_ftp(self):
prober = self._createFactoryAndStubConnectAndTimeoutCall()
prober.failed = lambda error: None
prober.redirect('ssh://bar.foo')
- self.failIf(prober.connectCalled)
+ self.assertFalse(prober.connectCalled)
def test_connection_is_made_on_successful_redirect(self):
prober = self._createFactoryAndStubConnectAndTimeoutCall()
prober.redirect('http://bar.foo')
- self.failUnless(prober.connectCalled)
+ self.assertTrue(prober.connectCalled)
def test_connection_is_closed_on_redirect(self):
protocol = RedirectAwareProberProtocol()
@@ -644,7 +644,7 @@
"Length: 0\r\n"
"\r\n")
self.assertEqual('http://foo.baz/', protocol.factory.redirectedTo)
- self.failUnless(protocol.transport.disconnecting)
+ self.assertTrue(protocol.transport.disconnecting)
class TestMirrorCDImageProberCallbacks(TestCaseWithFactory):
@@ -722,10 +722,10 @@
[(defer.FAILURE, Failure(exception))])
# Twisted callbacks may raise or return a failure; that's why we
# check the return value.
- self.failIf(isinstance(failure, Failure))
+ self.assertFalse(isinstance(failure, Failure))
# Also, these failures are not logged to stdout/stderr since
# they're expected to happen.
- self.failIf(logger.errorCalled)
+ self.assertFalse(logger.errorCalled)
def test_unexpected_failures_are_logged_but_not_raised(self):
# Errors which are not expected as logged using the
@@ -737,10 +737,10 @@
[(defer.FAILURE, Failure(ZeroDivisionError()))])
# Twisted callbacks may raise or return a failure; that's why we
# check the return value.
- self.failIf(isinstance(failure, Failure))
+ self.assertFalse(isinstance(failure, Failure))
# Unlike the expected failures, these ones must be logged as errors to
# stdout/stderr.
- self.failUnless(logger.errorCalled)
+ self.assertTrue(logger.errorCalled)
class TestArchiveMirrorProberCallbacks(TestCaseWithFactory):
@@ -917,8 +917,8 @@
def test_no_cache(self):
url = 'http://releases.ubuntu.com/.manifest'
request = _build_request_for_cdimage_file_list(url)
- self.failUnlessEqual(request.headers['Pragma'], 'no-cache')
- self.failUnlessEqual(request.headers['Cache-control'], 'no-cache')
+ self.assertEqual(request.headers['Pragma'], 'no-cache')
+ self.assertEqual(request.headers['Cache-control'], 'no-cache')
class TestLoggingMixin(TestCase):
@@ -940,7 +940,7 @@
logger.logMessage("Ubuntu Warty Released")
logger.log_file.seek(0)
message = logger.log_file.read()
- self.failUnlessEqual(
+ self.assertEqual(
'Wed Oct 20 12:00:00 2004: Ubuntu Warty Released',
message)
=== modified file 'lib/lp/registry/tests/test_distroseries.py'
--- lib/lp/registry/tests/test_distroseries.py 2016-03-31 00:31:07 +0000
+++ lib/lp/registry/tests/test_distroseries.py 2018-01-02 14:40:48 +0000
@@ -684,7 +684,7 @@
new_distroseries.hide_all_translations = False
transaction.commit()
translatables = self._get_translatables()
- self.failUnlessEqual(
+ self.assertEqual(
translatables, self._ref_translatables(),
"A newly created distroseries should not be translatable but "
"translatables() returns %r instead of %r." % (
@@ -696,7 +696,7 @@
sourcepackagename=new_sourcepackagename)
transaction.commit()
translatables = self._get_translatables()
- self.failUnlessEqual(
+ self.assertEqual(
translatables, self._ref_translatables(u"sampleseries"),
"After assigning a PO template, a distroseries should be "
"translatable but translatables() returns %r instead of %r." % (
@@ -707,7 +707,7 @@
new_distroseries.hide_all_translations = True
transaction.commit()
translatables = self._get_translatables()
- self.failUnlessEqual(
+ self.assertEqual(
translatables, self._ref_translatables(),
"After hiding all translation, a distroseries should not be "
"translatable but translatables() returns %r instead of %r." % (
=== modified file 'lib/lp/registry/tests/test_karmacache_updater.py'
--- lib/lp/registry/tests/test_karmacache_updater.py 2016-01-26 15:47:37 +0000
+++ lib/lp/registry/tests/test_karmacache_updater.py 2018-01-02 14:40:48 +0000
@@ -44,7 +44,7 @@
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(out, err) = process.communicate()
- self.failUnless(process.returncode == 0, (out, err))
+ self.assertTrue(process.returncode == 0, (out, err))
# This is a quite long test, but it's better this way because the
# karmacache updater script takes quite a while to run and changes/deletes
@@ -58,9 +58,9 @@
# will delete the cache entries for Sample Person.
sample_person = self.personset.getByName('name12')
cache_entries = self._getCacheEntriesByPerson(sample_person)
- self.failUnless(not cache_entries.is_empty())
+ self.assertFalse(cache_entries.is_empty())
for cache in cache_entries:
- self.failIf(cache.karmavalue <= 0)
+ self.assertFalse(cache.karmavalue <= 0)
# As we can see, Foo Bar already has some karmacache entries. We'll
# now add some fresh Karma entries for them and later we'll check that
@@ -69,9 +69,9 @@
foobar = self.personset.getByName('name16')
cache_entries = self._getCacheEntriesByPerson(foobar)
foobar_original_entries_count = cache_entries.count()
- self.failUnless(foobar_original_entries_count > 0)
+ self.assertTrue(foobar_original_entries_count > 0)
for cache in cache_entries:
- self.failIf(cache.karmavalue <= 0)
+ self.assertFalse(cache.karmavalue <= 0)
firefox = getUtility(IProductSet)['firefox']
foobar.assignKarma('bugcreated', firefox)
@@ -79,7 +79,7 @@
# add some fresh Karma entries to them, our cache-updater script
# will have to create new KarmaCache entries for them.
nopriv = self.personset.getByName('no-priv')
- self.failUnless(self._getCacheEntriesByPerson(nopriv).count() == 0)
+ self.assertTrue(self._getCacheEntriesByPerson(nopriv).count() == 0)
nopriv.assignKarma('bugcreated', firefox)
transaction.commit()
@@ -91,7 +91,7 @@
# Check that Sample Person has no KarmaCache entries at all
sample_person = self.personset.getByName('name12')
- self.failUnless(
+ self.assertTrue(
self._getCacheEntriesByPerson(sample_person).count() == 0)
# Check that Foo Bar had their KarmaCache entries updated.
@@ -99,7 +99,7 @@
# The cache entries that would have their karmavalue updated to 0 are
# instead deleted from the DB; that's why the new count can be smaller
# than the original one.
- self.failUnless(entries_count <= foobar_original_entries_count)
+ self.assertTrue(entries_count <= foobar_original_entries_count)
# And finally, ensure that No Priv got some new KarmaCache entries.
- self.failUnless(not self._getCacheEntriesByPerson(nopriv).is_empty())
+ self.assertFalse(self._getCacheEntriesByPerson(nopriv).is_empty())
=== modified file 'lib/lp/registry/tests/test_nameblacklist.py'
--- lib/lp/registry/tests/test_nameblacklist.py 2013-06-20 05:50:00 +0000
+++ lib/lp/registry/tests/test_nameblacklist.py 2018-01-02 14:40:48 +0000
@@ -56,13 +56,13 @@
result = self.store.execute(
"SELECT is_blacklisted_name(%s, %s)", (name, user_id))
blacklisted = result.get_one()[0]
- self.failIf(blacklisted is None, 'is_blacklisted_name returned NULL')
+ self.assertIsNotNone(blacklisted, 'is_blacklisted_name returned NULL')
return bool(blacklisted)
def test_name_blacklist_match(self):
# A name that is not blacklisted returns NULL/None
- self.failUnless(self.name_blacklist_match(u"bar") is None)
+ self.assertIsNone(self.name_blacklist_match(u"bar"))
# A name that is blacklisted returns the id of the row in the
# NameBlacklist table that matched. Rows are tried in order, and the
@@ -110,16 +110,16 @@
self.name_blacklist_match(u"foobar"),
self.foo_exp.id)
self.foo_exp.regexp = u'nomatch2'
- self.failUnless(self.name_blacklist_match(u"foobar") is None)
+ self.assertIsNone(self.name_blacklist_match(u"foobar"))
def test_is_blacklisted_name(self):
# is_blacklisted_name() is just a wrapper around name_blacklist_match
# that is friendlier to use in a boolean context.
- self.failUnless(self.is_blacklisted_name(u"bar") is False)
- self.failUnless(self.is_blacklisted_name(u"foo") is True)
+ self.assertFalse(self.is_blacklisted_name(u"bar"))
+ self.assertTrue(self.is_blacklisted_name(u"foo"))
self.caret_foo_exp.regexp = u'bar'
self.foo_exp.regexp = u'bar2'
- self.failUnless(self.is_blacklisted_name(u"foo") is False)
+ self.assertFalse(self.is_blacklisted_name(u"foo"))
def test_is_blacklisted_name_admin_false(self):
# Users in the expression's admin team are will return False.
@@ -127,11 +127,11 @@
self.assertFalse(self.is_blacklisted_name(u"fnord", user.id))
def test_case_insensitive(self):
- self.failUnless(self.is_blacklisted_name(u"Foo") is True)
+ self.assertTrue(self.is_blacklisted_name(u"Foo"))
def test_verbose(self):
# Testing the VERBOSE flag is used when compiling the regexp
- self.failUnless(self.is_blacklisted_name(u"verbose") is True)
+ self.assertTrue(self.is_blacklisted_name(u"verbose"))
class TestNameBlacklistSet(TestCaseWithFactory):
=== modified file 'lib/lp/registry/tests/test_oopsreferences.py'
--- lib/lp/registry/tests/test_oopsreferences.py 2013-06-20 05:50:00 +0000
+++ lib/lp/registry/tests/test_oopsreferences.py 2018-01-02 14:40:48 +0000
@@ -39,10 +39,10 @@
self.store.flush()
now = datetime.now(tz=utc)
day = timedelta(days=1)
- self.failUnlessEqual(
+ self.assertEqual(
set([oopsid]),
referenced_oops(now - day, now, "product=1", {}))
- self.failUnlessEqual(
+ self.assertEqual(
set(),
referenced_oops(now + day, now + day, "product=1", {}))
@@ -53,10 +53,10 @@
self.store.flush()
now = datetime.now(tz=utc)
day = timedelta(days=1)
- self.failUnlessEqual(
+ self.assertEqual(
set([oopsid]),
referenced_oops(now - day, now, "product=1", {}))
- self.failUnlessEqual(
+ self.assertEqual(
set(),
referenced_oops(now + day, now + day, "product=1", {}))
@@ -68,10 +68,10 @@
self.store.flush()
now = datetime.now(tz=utc)
day = timedelta(days=1)
- self.failUnlessEqual(
+ self.assertEqual(
set([oopsid]),
referenced_oops(now - day, now, "product=1", {}))
- self.failUnlessEqual(
+ self.assertEqual(
set(),
referenced_oops(now + day, now + day, "product=1", {}))
@@ -83,10 +83,10 @@
self.store.flush()
now = datetime.now(tz=utc)
day = timedelta(days=1)
- self.failUnlessEqual(
+ self.assertEqual(
set([oopsid]),
referenced_oops(now - day, now, "product=1", {}))
- self.failUnlessEqual(
+ self.assertEqual(
set(),
referenced_oops(now + day, now + day, "product=1", {}))
@@ -96,11 +96,11 @@
self.store.flush()
now = datetime.now(tz=utc)
day = timedelta(days=1)
- self.failUnlessEqual(
+ self.assertEqual(
set([oopsid]),
referenced_oops(now - day, now, "product=%(product)s",
{'product': question.product.id}))
- self.failUnlessEqual(
+ self.assertEqual(
set([]),
referenced_oops(now + day, now + day, "product=%(product)s",
{'product': question.product.id}))
@@ -112,7 +112,7 @@
now = datetime.now(tz=utc)
day = timedelta(days=1)
self.store.flush()
- self.failUnlessEqual(
+ self.assertEqual(
set(),
referenced_oops(now - day, now, "product=%(product)s",
{'product': question.product.id + 1}))
@@ -124,11 +124,11 @@
self.store.flush()
now = datetime.now(tz=utc)
day = timedelta(days=1)
- self.failUnlessEqual(
+ self.assertEqual(
set([oopsid]),
referenced_oops(now - day, now, "product=%(product)s",
{'product': question.product.id}))
- self.failUnlessEqual(
+ self.assertEqual(
set([]),
referenced_oops(now + day, now + day, "product=%(product)s",
{'product': question.product.id}))
@@ -141,11 +141,11 @@
self.store.flush()
now = datetime.now(tz=utc)
day = timedelta(days=1)
- self.failUnlessEqual(
+ self.assertEqual(
set([oopsid]),
referenced_oops(now - day, now, "product=%(product)s",
{'product': question.product.id}))
- self.failUnlessEqual(
+ self.assertEqual(
set([]),
referenced_oops(now + day, now + day, "product=%(product)s",
{'product': question.product.id}))
@@ -159,11 +159,11 @@
self.store.flush()
now = datetime.now(tz=utc)
day = timedelta(days=1)
- self.failUnlessEqual(
+ self.assertEqual(
set([oopsid]),
referenced_oops(now - day, now, "distribution=%(distribution)s",
{'distribution': distro.id}))
- self.failUnlessEqual(
+ self.assertEqual(
set([]),
referenced_oops(now + day, now + day,
"distribution=%(distribution)s", {'distribution': distro.id}))
@@ -188,9 +188,9 @@
self.store.flush()
now = datetime.now(tz=utc)
day = timedelta(days=1)
- self.failUnlessEqual(
+ self.assertEqual(
set([oopsid_old, oopsid_new]),
referenced_oops(now - day, now, "product=1", {}))
- self.failUnlessEqual(
+ self.assertEqual(
set([]),
referenced_oops(now + day, now + day, "product=1", {}))
=== modified file 'lib/lp/registry/tests/test_person.py'
--- lib/lp/registry/tests/test_person.py 2016-01-26 15:47:37 +0000
+++ lib/lp/registry/tests/test_person.py 2018-01-02 14:40:48 +0000
@@ -822,7 +822,7 @@
sample_person = Person.byName('name12')
login(sample_person.preferredemail.email)
sample_person.deactivate(comment="blah!")
- self.failUnlessEqual(sample_person.name, 'name12-deactivatedaccount')
+ self.assertEqual(sample_person.name, 'name12-deactivatedaccount')
# Now that name12 is free Foo Bar can use it.
foo_bar = Person.byName('name16')
foo_bar.name = 'name12'
@@ -831,7 +831,7 @@
# in use.
login(foo_bar.preferredemail.email)
foo_bar.deactivate(comment="blah!")
- self.failUnlessEqual(foo_bar.name, 'name12-deactivatedaccount1')
+ self.assertEqual(foo_bar.name, 'name12-deactivatedaccount1')
def test_deactivate_reassigns_owner_and_driver(self):
"""Product owner and driver are reassigned.
@@ -860,9 +860,9 @@
# turn is a proposed member of Ubuntu Team. That means
# sample_person._getDirectMemberIParticipateIn(ubuntu_team) will fail
# with an AssertionError.
- self.failUnless(sample_person in warty_team.activemembers)
- self.failUnless(warty_team in ubuntu_team.invited_members)
- self.failUnlessRaises(
+ self.assertIn(sample_person, warty_team.activemembers)
+ self.assertIn(warty_team, ubuntu_team.invited_members)
+ self.assertRaises(
AssertionError, sample_person._getDirectMemberIParticipateIn,
ubuntu_team)
@@ -871,8 +871,8 @@
# warty_team.
login(warty_team.teamowner.preferredemail.email)
warty_team.acceptInvitationToBeMemberOf(ubuntu_team, comment="foo")
- self.failUnless(warty_team in ubuntu_team.activemembers)
- self.failUnlessEqual(
+ self.assertIn(warty_team, ubuntu_team.activemembers)
+ self.assertEqual(
sample_person._getDirectMemberIParticipateIn(ubuntu_team),
warty_team)
@@ -1005,15 +1005,14 @@
self, params, assignee=None, bug_subscriber=None,
owner=None, bug_commenter=None, bug_reporter=None,
structural_subscriber=None):
- self.failUnlessEqual(assignee, params.assignee)
+ self.assertEqual(assignee, params.assignee)
# fromSearchForm() takes a bug_subscriber parameter, but saves
# it as subscriber on the parameter object.
- self.failUnlessEqual(bug_subscriber, params.subscriber)
- self.failUnlessEqual(owner, params.owner)
- self.failUnlessEqual(bug_commenter, params.bug_commenter)
- self.failUnlessEqual(bug_reporter, params.bug_reporter)
- self.failUnlessEqual(structural_subscriber,
- params.structural_subscriber)
+ self.assertEqual(bug_subscriber, params.subscriber)
+ self.assertEqual(owner, params.owner)
+ self.assertEqual(bug_commenter, params.bug_commenter)
+ self.assertEqual(bug_reporter, params.bug_reporter)
+ self.assertEqual(structural_subscriber, params.structural_subscriber)
def test_get_person_bugtasks_search_params(self):
# With no specified options, get_person_bugtasks_search_params()
=== modified file 'lib/lp/registry/tests/test_personnotification.py'
--- lib/lp/registry/tests/test_personnotification.py 2015-10-26 14:54:43 +0000
+++ lib/lp/registry/tests/test_personnotification.py 2018-01-02 14:40:48 +0000
@@ -87,7 +87,7 @@
user, 'subject', 'body')
unsent = self.manager.sendNotifications()
self.assertEqual(None, unsent)
- self.failIf(notification.date_emailed is None)
+ self.assertIsNotNone(notification.date_emailed)
def test_sendNotifications_unsent(self):
user = self.factory.makePerson()
@@ -105,7 +105,7 @@
team, 'subject', 'body')
unsent = self.manager.sendNotifications()
self.assertEqual(None, unsent)
- self.failIf(notification.date_emailed is None)
+ self.assertIsNotNone(notification.date_emailed)
def test_purgeNotifications_old(self):
user = self.factory.makePerson()
=== modified file 'lib/lp/registry/tests/test_personset.py'
--- lib/lp/registry/tests/test_personset.py 2017-09-28 13:06:10 +0000
+++ lib/lp/registry/tests/test_personset.py 2018-01-02 14:40:48 +0000
@@ -95,8 +95,8 @@
def test_isNameBlacklisted(self):
cursor().execute(
"INSERT INTO NameBlacklist(id, regexp) VALUES (-100, 'foo')")
- self.failUnless(self.person_set.isNameBlacklisted('foo'))
- self.failIf(self.person_set.isNameBlacklisted('bar'))
+ self.assertTrue(self.person_set.isNameBlacklisted('foo'))
+ self.assertFalse(self.person_set.isNameBlacklisted('bar'))
def test_isNameBlacklisted_user_is_admin(self):
team = self.factory.makeTeam()
@@ -110,13 +110,13 @@
def test_getByEmail_ignores_case_and_whitespace(self):
person1_email = 'foo.bar@xxxxxxxxxxxxx'
person1 = self.person_set.getByEmail(person1_email)
- self.failIf(
- person1 is None,
+ self.assertIsNotNone(
+ person1,
"PersonSet.getByEmail() could not find %r" % person1_email)
person2 = self.person_set.getByEmail(' foo.BAR@xxxxxxxxxxxxx ')
- self.failIf(
- person2 is None,
+ self.assertIsNotNone(
+ person2,
"PersonSet.getByEmail() should ignore case and whitespace.")
self.assertEqual(person1, person2)
=== modified file 'lib/lp/registry/tests/test_pillarname_triggers.py'
--- lib/lp/registry/tests/test_pillarname_triggers.py 2012-01-20 15:42:44 +0000
+++ lib/lp/registry/tests/test_pillarname_triggers.py 2018-01-02 14:40:48 +0000
@@ -30,7 +30,7 @@
ON Distribution.id = PillarName.distribution
WHERE Distribution.name != PillarName.name
""")
- self.failUnlessEqual(cur.fetchone()[0], 0)
+ self.assertEqual(cur.fetchone()[0], 0)
def is_in_sync(name):
cur.execute("""
@@ -55,26 +55,26 @@
'whatever', 'whatever', 'whatever', 1, 1
)
""")
- self.failUnless(is_in_sync('whatever'))
+ self.assertTrue(is_in_sync('whatever'))
# Updating the Distribution.name will propogate changes to PillarName
cur.execute("""
UPDATE Distribution SET name='whatever2' where name='whatever'
""")
- self.failUnless(is_in_sync('whatever2'))
+ self.assertTrue(is_in_sync('whatever2'))
# Updating other fields won't do any harm.
cur.execute("""
UPDATE Distribution SET description='whatever2'
WHERE name='whatever2'
""")
- self.failUnless(is_in_sync('whatever2'))
+ self.assertTrue(is_in_sync('whatever2'))
# Deleting a Distribution removes the corresponding entry in
# PillarName
cur.execute("DELETE FROM Distribution WHERE name='whatever2'")
cur.execute("SELECT COUNT(*) FROM PillarName WHERE name='whatever2'")
- self.failUnlessEqual(cur.fetchone()[0], 0)
+ self.assertEqual(cur.fetchone()[0], 0)
def testProductTable(self):
cur = cursor()
@@ -87,7 +87,7 @@
ON Product.id = PillarName.product
WHERE Product.name != PillarName.name
""")
- self.failUnlessEqual(cur.fetchone()[0], 0)
+ self.assertEqual(cur.fetchone()[0], 0)
def is_in_sync(name):
cur.execute("""
@@ -109,25 +109,25 @@
1, 1, 'whatever', 'whatever', 'whatever', 'whatever'
)
""")
- self.failUnless(is_in_sync('whatever'))
+ self.assertTrue(is_in_sync('whatever'))
# Updating the Product.name will propogate changes to PillarName
cur.execute("""
UPDATE Product SET name='whatever2' where name='whatever'
""")
- self.failUnless(is_in_sync('whatever2'))
+ self.assertTrue(is_in_sync('whatever2'))
# Updating other fields won't do any harm.
cur.execute("""
UPDATE Product SET summary='whatever2'
WHERE name='whatever2'
""")
- self.failUnless(is_in_sync('whatever2'))
+ self.assertTrue(is_in_sync('whatever2'))
# Deleting a Product removes the corresponding entry in PillarName
cur.execute("DELETE FROM Product WHERE name='whatever2'")
cur.execute("SELECT COUNT(*) FROM PillarName WHERE name='whatever2'")
- self.failUnlessEqual(cur.fetchone()[0], 0)
+ self.assertEqual(cur.fetchone()[0], 0)
def testProjectTable(self):
cur = cursor()
@@ -140,7 +140,7 @@
ON Project.id = PillarName.project
WHERE Project.name != PillarName.name
""")
- self.failUnlessEqual(cur.fetchone()[0], 0)
+ self.assertEqual(cur.fetchone()[0], 0)
def is_in_sync(name):
cur.execute("""
@@ -165,23 +165,23 @@
'whatever', 'whatever'
)
""")
- self.failUnless(is_in_sync('whatever'))
+ self.assertTrue(is_in_sync('whatever'))
# Updating the ProjectGroup.name will propogate changes to PillarName
cur.execute("""
UPDATE Project SET name='whatever2' where name='whatever'
""")
- self.failUnless(is_in_sync('whatever2'))
+ self.assertTrue(is_in_sync('whatever2'))
# Updating other fields won't do any harm.
cur.execute("""
UPDATE Project SET description='whatever2'
WHERE name='whatever2'
""")
- self.failUnless(is_in_sync('whatever2'))
+ self.assertTrue(is_in_sync('whatever2'))
# Deleting a ProjectGroup removes the corresponding entry in
# PillarName.
cur.execute("DELETE FROM Project WHERE name='whatever2'")
cur.execute("SELECT COUNT(*) FROM PillarName WHERE name='whatever2'")
- self.failUnlessEqual(cur.fetchone()[0], 0)
+ self.assertEqual(cur.fetchone()[0], 0)
=== modified file 'lib/lp/registry/tests/test_poll.py'
--- lib/lp/registry/tests/test_poll.py 2012-01-01 02:58:52 +0000
+++ lib/lp/registry/tests/test_poll.py 2018-01-02 14:40:48 +0000
@@ -28,4 +28,4 @@
poll.storeSimpleVote(owner, None)
# Force closing of the poll so that we can call getWinners().
poll.datecloses = datetime.now(pytz.UTC)
- self.failUnless(poll.getWinners() is None, poll.getWinners())
+ self.assertIsNone(poll.getWinners(), poll.getWinners())
=== modified file 'lib/lp/registry/tests/test_prf_filter.py'
--- lib/lp/registry/tests/test_prf_filter.py 2011-08-12 11:19:40 +0000
+++ lib/lp/registry/tests/test_prf_filter.py 2018-01-02 14:40:48 +0000
@@ -13,7 +13,7 @@
Filter)
from logging import Logger
f = Filter()
- self.failUnless(isinstance(f.log, Logger))
+ self.assertTrue(isinstance(f.log, Logger))
def testCreatesChildLogger(self):
"""Filter creates a child logger if given a parent."""
=== modified file 'lib/lp/registry/tests/test_prf_finder.py'
--- lib/lp/registry/tests/test_prf_finder.py 2014-06-03 10:43:24 +0000
+++ lib/lp/registry/tests/test_prf_finder.py 2018-01-02 14:40:48 +0000
@@ -113,14 +113,14 @@
evo_filters = filters
self.assertEqual(len(evo_filters), 1)
- self.failUnless(isinstance(evo_filters[0], FilterPattern))
+ self.assertTrue(isinstance(evo_filters[0], FilterPattern))
self.assertEqual(evo_filters[0].key, 'trunk')
self.assertEqual(evo_filters[0].base_url,
'http://ftp.gnome.org/pub/GNOME/sources/evolution/2.7/')
self.assertEqual(evo_filters[0].urlglob,
'http://ftp.gnome.org/pub/GNOME/sources/evolution/2.7/'
'evolution-*.tar.gz')
- self.failUnless(evo_filters[0].match(
+ self.assertTrue(evo_filters[0].match(
'http://ftp.gnome.org/pub/GNOME/sources/evolution/2.7/'
'evolution-2.7.1.tar.gz'))
@@ -258,7 +258,7 @@
self.assertEqual(fileinfo.libraryfile.filename, file_name)
# verify that the fileinfo object is sane
- self.failUnless(verifyObject(IProductReleaseFile, fileinfo))
+ self.assertTrue(verifyObject(IProductReleaseFile, fileinfo))
for field in getFields(IProductReleaseFile).values():
# XXX: BradCrittenden 2008-09-04 bug=264829:
# Several interfaces have uploaded files as `Bytes` attributes but
=== modified file 'lib/lp/registry/tests/test_prf_hose.py'
--- lib/lp/registry/tests/test_prf_hose.py 2015-10-26 14:54:43 +0000
+++ lib/lp/registry/tests/test_prf_hose.py 2018-01-02 14:40:48 +0000
@@ -21,7 +21,7 @@
from lp.registry.scripts.productreleasefinder.hose import Hose
from logging import Logger
h = Hose()
- self.failUnless(isinstance(h.log, Logger))
+ self.assertTrue(isinstance(h.log, Logger))
def testCreatesChildLogger(self):
"""Hose creates a child logger if given a parent."""
@@ -39,7 +39,7 @@
from lp.registry.scripts.productreleasefinder.filter import (
Filter)
h = Hose()
- self.failUnless(isinstance(h.filter, Filter))
+ self.assertTrue(isinstance(h.filter, Filter))
def testDefaultsFiltersToEmptyDict(self):
"""Hose creates Filter object with empty dictionary."""
=== modified file 'lib/lp/registry/tests/test_prf_log.py'
--- lib/lp/registry/tests/test_prf_log.py 2011-12-24 17:48:54 +0000
+++ lib/lp/registry/tests/test_prf_log.py 2018-01-02 14:40:48 +0000
@@ -15,7 +15,7 @@
"""get_logger returns a Logger instance."""
from lp.registry.scripts.productreleasefinder.log import get_logger
from logging import Logger
- self.failUnless(isinstance(get_logger("test"), Logger))
+ self.assertTrue(isinstance(get_logger("test"), Logger))
def testNoParent(self):
"""get_logger works if no parent is given."""
=== modified file 'lib/lp/registry/tests/test_prf_walker.py'
--- lib/lp/registry/tests/test_prf_walker.py 2012-01-01 02:58:52 +0000
+++ lib/lp/registry/tests/test_prf_walker.py 2018-01-02 14:40:48 +0000
@@ -22,7 +22,7 @@
"""WalkerBase creates a default logger."""
from logging import Logger
w = WalkerBase("/")
- self.failUnless(isinstance(w.log, Logger))
+ self.assertTrue(isinstance(w.log, Logger))
def testCreatesChildLogger(self):
"""WalkerBase creates a child logger if given a parent."""
=== modified file 'lib/lp/registry/tests/test_product.py'
--- lib/lp/registry/tests/test_product.py 2015-10-01 17:32:41 +0000
+++ lib/lp/registry/tests/test_product.py 2018-01-02 14:40:48 +0000
@@ -2181,10 +2181,10 @@
ws_product = self.wsObject(product, product.owner)
now = datetime.now(tz=pytz.utc)
day = timedelta(days=1)
- self.failUnlessEqual(
+ self.assertEqual(
[oopsid],
ws_product.findReferencedOOPS(start_date=now - day, end_date=now))
- self.failUnlessEqual(
+ self.assertEqual(
[],
ws_product.findReferencedOOPS(
start_date=now + day, end_date=now + day))
@@ -2199,7 +2199,7 @@
ws_product = self.wsObject(product, product.owner)
now = datetime.now(tz=pytz.utc)
day = timedelta(days=1)
- self.failUnlessEqual(
+ self.assertEqual(
[],
ws_product.findReferencedOOPS(start_date=now - day, end_date=now))
=== modified file 'lib/lp/registry/tests/test_ro_user.py'
--- lib/lp/registry/tests/test_ro_user.py 2012-01-20 16:11:11 +0000
+++ lib/lp/registry/tests/test_ro_user.py 2018-01-02 14:40:48 +0000
@@ -32,14 +32,14 @@
# Except on sequences
cur.execute("SAVEPOINT attempt")
- self.failUnlessRaises(
+ self.assertRaises(
psycopg2.Error, cur.execute, "SELECT nextval('person_id_seq')"
)
cur.execute("ROLLBACK TO SAVEPOINT attempt")
# UPDATES should fail
cur.execute("SAVEPOINT attempt")
- self.failUnlessRaises(
+ self.assertRaises(
psycopg2.Error, cur.execute, "UPDATE Person SET password=NULL"
)
cur.execute("ROLLBACK TO SAVEPOINT attempt")
@@ -47,7 +47,7 @@
# DELETES should fail.
# We need to use a table with no FK references to it
cur.execute("SAVEPOINT attempt")
- self.failUnlessRaises(
+ self.assertRaises(
psycopg2.Error, cur.execute, "DELETE FROM WikiName"
)
cur.execute("ROLLBACK TO SAVEPOINT attempt")
=== modified file 'lib/lp/registry/tests/test_samplekarma.py'
--- lib/lp/registry/tests/test_samplekarma.py 2011-12-28 17:03:06 +0000
+++ lib/lp/registry/tests/test_samplekarma.py 2018-01-02 14:40:48 +0000
@@ -21,5 +21,4 @@
WHERE datecreated > '2002-01-01 00:00'::timestamp
""")
dud_rows = cur.fetchone()[0]
- self.failUnlessEqual(
- dud_rows, 0, 'Karma time bombs added to sampledata')
+ self.assertEqual(dud_rows, 0, 'Karma time bombs added to sampledata')
=== modified file 'lib/lp/registry/tests/test_sourcepackage.py'
--- lib/lp/registry/tests/test_sourcepackage.py 2016-01-26 15:47:37 +0000
+++ lib/lp/registry/tests/test_sourcepackage.py 2018-01-02 14:40:48 +0000
@@ -630,14 +630,14 @@
sourcepackage = self.factory.makeSourcePackage()
john_doe = self.factory.makePerson()
with person_logged_in(john_doe):
- self.failIf(
+ self.assertFalse(
checkPermission('launchpad.Edit', sourcepackage),
"Random user shouldn't have launchpad.Edit on source "
"packages.")
def test_cannot_setBranch(self):
sourcepackage = self.factory.makeSourcePackage()
- self.failIf(
+ self.assertFalse(
canAccess(sourcepackage, 'setBranch'),
"setBranch should only be available to admins and uploaders")
=== modified file 'lib/lp/registry/tests/test_teammembership.py'
--- lib/lp/registry/tests/test_teammembership.py 2016-01-26 15:47:37 +0000
+++ lib/lp/registry/tests/test_teammembership.py 2018-01-02 14:40:48 +0000
@@ -164,11 +164,11 @@
self.assertEqual(ubuntu_team.teamowner, membership.proposed_by)
self.assertEqual(membership.proponent_comment, "I like her")
now = datetime.now(pytz.UTC)
- self.failUnless(membership.date_proposed <= now)
- self.failUnless(membership.datejoined <= now)
+ self.assertTrue(membership.date_proposed <= now)
+ self.assertTrue(membership.datejoined <= now)
self.assertEqual(ubuntu_team.teamowner, membership.reviewed_by)
self.assertEqual(membership.reviewer_comment, "I like her")
- self.failUnless(membership.date_reviewed <= now)
+ self.assertTrue(membership.date_reviewed <= now)
self.assertEqual(membership.acknowledged_by, None)
def test_membership_creation_stores_proponent(self):
@@ -184,7 +184,7 @@
comment="I'd like to join")
self.assertEqual(marilize, membership.proposed_by)
self.assertEqual(membership.proponent_comment, "I'd like to join")
- self.failUnless(
+ self.assertTrue(
membership.date_proposed <= datetime.now(pytz.UTC))
self.assertEqual(membership.reviewed_by, None)
self.assertEqual(membership.acknowledged_by, None)
@@ -234,10 +234,10 @@
# or motu.
self.assertEqual(
sample_person_on_ubuntu_dev.status, TeamMembershipStatus.EXPIRED)
- self.failIf(sample_person.inTeam(ubuntu_dev))
+ self.assertFalse(sample_person.inTeam(ubuntu_dev))
self.assertEqual(
sample_person_on_motu.status, TeamMembershipStatus.EXPIRED)
- self.failIf(sample_person.inTeam(motu))
+ self.assertFalse(sample_person.inTeam(motu))
def test_deactivateActiveMemberships(self):
superteam = self.factory.makeTeam(name='super')
@@ -661,9 +661,9 @@
person, membership_policy=TeamMembershipPolicy.MODERATED)
teamB = self.factory.makeTeam(
person, membership_policy=TeamMembershipPolicy.MODERATED)
- self.failUnless(
+ self.assertTrue(
teamA.inTeam(teamA), "teamA is not a participant of itself")
- self.failUnless(
+ self.assertTrue(
teamB.inTeam(teamB), "teamB is not a participant of itself")
teamA.join(teamB, requester=person)
@@ -671,9 +671,9 @@
teamB.setMembershipData(teamA, TeamMembershipStatus.APPROVED, person)
teamA.setMembershipData(teamB, TeamMembershipStatus.DECLINED, person)
- self.failUnless(teamA.hasParticipationEntryFor(teamA),
+ self.assertTrue(teamA.hasParticipationEntryFor(teamA),
"teamA is not a participant of itself")
- self.failUnless(teamB.hasParticipationEntryFor(teamB),
+ self.assertTrue(teamB.hasParticipationEntryFor(teamB),
"teamB is not a participant of itself")
def test_membership_status_changes_are_immediately_flushed_to_db(self):
@@ -720,18 +720,19 @@
TeamMembershipStatus.DECLINED]:
tm = TeamMembership(
person=self.no_priv, team=self.ubuntu_team, status=status)
- self.failIf(
+ self.assertFalse(
tm.proposed_by, "There can be no proponent at this point.")
- self.failIf(
+ self.assertFalse(
tm.date_proposed, "There can be no proposed date this point.")
- self.failIf(tm.proponent_comment,
- "There can be no proponent comment at this point.")
+ self.assertFalse(
+ tm.proponent_comment,
+ "There can be no proponent comment at this point.")
tm.setStatus(
TeamMembershipStatus.PROPOSED, self.foobar,
"Did it 'cause I can")
- self.failUnlessEqual(tm.proposed_by, self.foobar)
- self.failUnlessEqual(tm.proponent_comment, "Did it 'cause I can")
- self.failUnless(
+ self.assertEqual(tm.proposed_by, self.foobar)
+ self.assertEqual(tm.proponent_comment, "Did it 'cause I can")
+ self.assertTrue(
tm.date_proposed <= datetime.now(pytz.UTC))
# Destroy the membership so that we can create another in a
# different state.
@@ -743,19 +744,19 @@
tm = TeamMembership(
person=self.admins, team=self.ubuntu_team,
status=TeamMembershipStatus.INVITED)
- self.failIf(
+ self.assertFalse(
tm.acknowledged_by,
"There can be no acknowledger at this point.")
- self.failIf(
+ self.assertFalse(
tm.date_acknowledged,
"There can be no accepted date this point.")
- self.failIf(tm.acknowledger_comment,
- "There can be no acknowledger comment at this point.")
+ self.assertFalse(
+ tm.acknowledger_comment,
+ "There can be no acknowledger comment at this point.")
tm.setStatus(status, self.foobar, "Did it 'cause I can")
- self.failUnlessEqual(tm.acknowledged_by, self.foobar)
- self.failUnlessEqual(
- tm.acknowledger_comment, "Did it 'cause I can")
- self.failUnless(
+ self.assertEqual(tm.acknowledged_by, self.foobar)
+ self.assertEqual(tm.acknowledger_comment, "Did it 'cause I can")
+ self.assertTrue(
tm.date_acknowledged <= datetime.now(pytz.UTC))
# Destroy the membership so that we can create another in a
# different state.
@@ -774,20 +775,19 @@
for new_status in new_statuses:
tm = TeamMembership(
person=self.no_priv, team=self.ubuntu_team, status=status)
- self.failIf(
+ self.assertFalse(
tm.reviewed_by,
"There can be no approver at this point.")
- self.failIf(
+ self.assertFalse(
tm.date_reviewed,
"There can be no approved date this point.")
- self.failIf(
+ self.assertFalse(
tm.reviewer_comment,
"There can be no approver comment at this point.")
tm.setStatus(new_status, self.foobar, "Did it 'cause I can")
- self.failUnlessEqual(tm.reviewed_by, self.foobar)
- self.failUnlessEqual(
- tm.reviewer_comment, "Did it 'cause I can")
- self.failUnless(
+ self.assertEqual(tm.reviewed_by, self.foobar)
+ self.assertEqual(tm.reviewer_comment, "Did it 'cause I can")
+ self.assertTrue(
tm.date_reviewed <= datetime.now(pytz.UTC))
# Destroy the membership so that we can create another in a
@@ -801,11 +801,11 @@
tm = TeamMembership(
person=self.no_priv, team=self.ubuntu_team,
status=TeamMembershipStatus.PROPOSED)
- self.failIf(
+ self.assertFalse(
tm.datejoined, "There can be no datejoined at this point.")
tm.setStatus(TeamMembershipStatus.APPROVED, self.foobar)
now = datetime.now(pytz.UTC)
- self.failUnless(tm.datejoined <= now)
+ self.assertTrue(tm.datejoined <= now)
# We now set the status to deactivated and change datejoined to a
# date in the past just so that we can easily show it's not changed
@@ -814,7 +814,7 @@
tm.setStatus(TeamMembershipStatus.DEACTIVATED, self.foobar)
tm.datejoined = one_minute_ago
tm.setStatus(TeamMembershipStatus.APPROVED, self.foobar)
- self.failUnless(tm.datejoined <= one_minute_ago)
+ self.assertTrue(tm.datejoined <= one_minute_ago)
def test_no_cyclical_membership_allowed(self):
"""No status change can create cyclical memberships."""
@@ -827,28 +827,24 @@
self.team1, self.team2)
team2_on_team1 = getUtility(ITeamMembershipSet).getByPersonAndTeam(
self.team2, self.team1)
- self.failUnlessEqual(
- team1_on_team2.status, TeamMembershipStatus.INVITED)
- self.failUnlessEqual(
- team2_on_team1.status, TeamMembershipStatus.INVITED)
+ self.assertEqual(team1_on_team2.status, TeamMembershipStatus.INVITED)
+ self.assertEqual(team2_on_team1.status, TeamMembershipStatus.INVITED)
# Now make team1 an active member of team2. From this point onwards,
# team2 cannot be made an active member of team1.
team1_on_team2.setStatus(TeamMembershipStatus.APPROVED, self.foobar)
flush_database_updates()
- self.failUnlessEqual(
- team1_on_team2.status, TeamMembershipStatus.APPROVED)
+ self.assertEqual(team1_on_team2.status, TeamMembershipStatus.APPROVED)
self.assertRaises(
CyclicalTeamMembershipError, team2_on_team1.setStatus,
TeamMembershipStatus.APPROVED, self.foobar)
- self.failUnlessEqual(
- team2_on_team1.status, TeamMembershipStatus.INVITED)
+ self.assertEqual(team2_on_team1.status, TeamMembershipStatus.INVITED)
# It is possible to change the state of team2's membership on team1
# to another inactive state, though.
team2_on_team1.setStatus(
TeamMembershipStatus.INVITATION_DECLINED, self.foobar)
- self.failUnlessEqual(
+ self.assertEqual(
team2_on_team1.status, TeamMembershipStatus.INVITATION_DECLINED)
def test_no_cyclical_participation_allowed(self):
@@ -1195,9 +1191,9 @@
code, out, err = self._runScript()
self.assertEqual(0, code)
self.assertEqual(0, len(out))
- self.failUnless(
+ self.assertTrue(
re.search('missing TeamParticipation entries for zzzzz', err))
- self.failUnless(
+ self.assertTrue(
re.search('spurious TeamParticipation entries for zzzzz', err))
def test_report_circular_team_references(self):
@@ -1232,7 +1228,7 @@
code, out, err = self._runScript()
self.assertEqual(1, code)
self.assertEqual(0, len(out))
- self.failUnless(re.search('Circular references found', err))
+ self.assertTrue(re.search('Circular references found', err))
# A script to create two new people, where both participate in the first,
# and first is missing a self-participation.
=== modified file 'lib/lp/scripts/tests/test_garbo.py'
--- lib/lp/scripts/tests/test_garbo.py 2016-11-12 21:02:10 +0000
+++ lib/lp/scripts/tests/test_garbo.py 2018-01-02 14:40:48 +0000
@@ -153,16 +153,16 @@
"""Ensure garbo-daily.py actually runs."""
rv, out, err = run_script(
"cronscripts/garbo-daily.py", ["-q"], expect_returncode=0)
- self.failIf(out.strip(), "Output to stdout: %s" % out)
- self.failIf(err.strip(), "Output to stderr: %s" % err)
+ self.assertFalse(out.strip(), "Output to stdout: %s" % out)
+ self.assertFalse(err.strip(), "Output to stderr: %s" % err)
DatabaseLayer.force_dirty_database()
def test_hourly_script(self):
"""Ensure garbo-hourly.py actually runs."""
rv, out, err = run_script(
"cronscripts/garbo-hourly.py", ["-q"], expect_returncode=0)
- self.failIf(out.strip(), "Output to stdout: %s" % out)
- self.failIf(err.strip(), "Output to stderr: %s" % err)
+ self.assertFalse(out.strip(), "Output to stdout: %s" % out)
+ self.assertFalse(err.strip(), "Output to stderr: %s" % err)
DatabaseLayer.force_dirty_database()
@@ -466,7 +466,7 @@
store = IMasterStore(OpenIDConsumerNonce)
# Make sure we start with 0 nonces.
- self.failUnlessEqual(store.find(OpenIDConsumerNonce).count(), 0)
+ self.assertEqual(store.find(OpenIDConsumerNonce).count(), 0)
for timestamp in timestamps:
store.add(OpenIDConsumerNonce(
@@ -474,7 +474,7 @@
transaction.commit()
# Make sure we have 4 nonces now.
- self.failUnlessEqual(store.find(OpenIDConsumerNonce).count(), 4)
+ self.assertEqual(store.find(OpenIDConsumerNonce).count(), 4)
# Run the garbage collector.
self.runFrequently(maximum_chunk_size=60) # 1 minute maximum chunks.
@@ -482,11 +482,11 @@
store = IMasterStore(OpenIDConsumerNonce)
# We should now have 2 nonces.
- self.failUnlessEqual(store.find(OpenIDConsumerNonce).count(), 2)
+ self.assertEqual(store.find(OpenIDConsumerNonce).count(), 2)
# And none of them are older than 1 day
earliest = store.find(Min(OpenIDConsumerNonce.timestamp)).one()
- self.failUnless(
+ self.assertTrue(
earliest >= now - 24 * 60 * 60, 'Still have old nonces')
def test_CodeImportResultPruner(self):
@@ -522,26 +522,26 @@
# Nothing is removed, because we always keep the
# ``results_to_keep_count`` latest.
store = IMasterStore(CodeImportResult)
- self.failUnlessEqual(
+ self.assertEqual(
results_to_keep_count,
store.find(CodeImportResult).count())
new_code_import_result(now - timedelta(days=31))
self.runDaily()
store = IMasterStore(CodeImportResult)
- self.failUnlessEqual(
+ self.assertEqual(
results_to_keep_count,
store.find(CodeImportResult).count())
new_code_import_result(now - timedelta(days=29))
self.runDaily()
store = IMasterStore(CodeImportResult)
- self.failUnlessEqual(
+ self.assertEqual(
results_to_keep_count,
store.find(CodeImportResult).count())
# We now have no CodeImportResults older than 30 days
- self.failUnless(
+ self.assertTrue(
store.find(
Min(CodeImportResult.date_created)).one().replace(tzinfo=UTC)
>= now - timedelta(days=30))
@@ -570,7 +570,7 @@
events = list(machine.events)
self.assertEqual(3, len(events))
# We now have no CodeImportEvents older than 30 days
- self.failUnless(
+ self.assertTrue(
store.find(
Min(CodeImportEvent.date_created)).one().replace(tzinfo=UTC)
>= now - timedelta(days=30))
@@ -595,7 +595,7 @@
SELECT COUNT(*) FROM %s
WHERE issued + lifetime < %f
""" % (table_name, now)).get_one()[0]
- self.failUnless(num_expired > 0)
+ self.assertTrue(num_expired > 0)
# Expire all those expirable rows, and possibly a few more if this
# test is running slow.
@@ -610,13 +610,13 @@
SELECT COUNT(*) FROM %s
WHERE issued + lifetime < %f
""" % (table_name, now)).get_one()[0]
- self.failUnlessEqual(num_expired, 0)
+ self.assertEqual(num_expired, 0)
# Confirm that we haven't expired everything. This test will fail
# if it has taken 10 seconds to get this far.
num_unexpired = store.execute(
"SELECT COUNT(*) FROM %s" % table_name).get_one()[0]
- self.failUnless(num_unexpired > 0)
+ self.assertTrue(num_unexpired > 0)
def test_PreviewDiffPruner(self):
switch_dbuser('testadmin')
=== modified file 'lib/lp/scripts/tests/test_runlaunchpad.py'
--- lib/lp/scripts/tests/test_runlaunchpad.py 2012-01-01 02:58:52 +0000
+++ lib/lp/scripts/tests/test_runlaunchpad.py 2018-01-02 14:40:48 +0000
@@ -171,4 +171,4 @@
self.assertEqual([SERVICES['sftp']], services)
def test_launchpad_systems_red(self):
- self.failIf(config.launchpad.launch)
+ self.assertFalse(config.launchpad.launch)
=== modified file 'lib/lp/services/apachelogparser/tests/test_apachelogparser.py'
--- lib/lp/services/apachelogparser/tests/test_apachelogparser.py 2017-10-05 19:01:06 +0000
+++ lib/lp/services/apachelogparser/tests/test_apachelogparser.py 2018-01-02 14:40:48 +0000
@@ -418,7 +418,7 @@
ParsedApacheLog(first_line, len(fd.read()))
files_to_parse = get_files_to_parse([self.file_path])
- self.failUnlessEqual(list(files_to_parse), [])
+ self.assertEqual(list(files_to_parse), [])
def test_parsed_file_with_new_content(self):
# A file that has been parsed already but in which new content was
@@ -452,7 +452,7 @@
fd.close()
files_to_parse = get_files_to_parse([new_path])
positions = map(itemgetter(1), files_to_parse)
- self.failUnlessEqual(positions, [0])
+ self.assertEqual(positions, [0])
def test_fresh_gzipped_file(self):
# get_files_to_parse() handles gzipped files just like uncompressed
@@ -472,7 +472,7 @@
ParsedApacheLog(first_line, len(first_line))
files_to_parse = get_files_to_parse([gz_path])
positions = map(itemgetter(1), files_to_parse)
- self.failUnlessEqual(positions, [len(first_line)])
+ self.assertEqual(positions, [len(first_line)])
class Test_create_or_update_parsedlog_entry(TestCase):
=== modified file 'lib/lp/services/config/tests/test_config_lookup.py'
--- lib/lp/services/config/tests/test_config_lookup.py 2012-01-01 02:58:52 +0000
+++ lib/lp/services/config/tests/test_config_lookup.py 2018-01-02 14:40:48 +0000
@@ -44,7 +44,7 @@
os.environ['LPCONFIG'] = 'from_env'
- self.failUnlessEqual(config.find_instance_name(), 'from_env')
+ self.assertEqual(config.find_instance_name(), 'from_env')
def testByFile(self):
# Create the lookup file.
@@ -53,14 +53,13 @@
# Trash the environment variable so it doesn't override.
del os.environ['LPCONFIG']
- self.failUnlessEqual(config.find_instance_name(), 'from_disk')
+ self.assertEqual(config.find_instance_name(), 'from_disk')
def testByDefault(self):
# Trash the environment variable so it doesn't override.
del os.environ['LPCONFIG']
- self.failUnlessEqual(
- config.find_instance_name(), config.DEFAULT_CONFIG)
+ self.assertEqual(config.find_instance_name(), config.DEFAULT_CONFIG)
class ConfigTestCase(TestCase):
@@ -142,7 +141,7 @@
cfg.root = self.temp_config_root_dir
cfg.generate_overrides()
override_file = os.path.join(cfg.root, 'zcml/+config-overrides.zcml')
- self.failUnless(
+ self.assertTrue(
os.path.isfile(override_file), "Overrides file wasn't created.")
fh = open(override_file)
@@ -150,7 +149,7 @@
fh.close()
magic_line = '<include files="%s/*.zcml" />' % instance_dir
- self.failUnless(
- magic_line in overrides,
+ self.assertIn(
+ magic_line, overrides,
"Overrides doesn't contain the magic include line (%s):\n%s" %
(magic_line, overrides))
=== modified file 'lib/lp/services/database/tests/test_bulk.py'
--- lib/lp/services/database/tests/test_bulk.py 2013-06-20 05:50:00 +0000
+++ lib/lp/services/database/tests/test_bulk.py 2018-01-02 14:40:48 +0000
@@ -57,27 +57,27 @@
class TestBasicFunctions(TestCase):
def test_collate_empty_list(self):
- self.failUnlessEqual([], list(bulk.collate([], object_is_key)))
+ self.assertEqual([], list(bulk.collate([], object_is_key)))
def test_collate_when_object_is_key(self):
- self.failUnlessEqual(
+ self.assertEqual(
[(1, [1])],
list(bulk.collate([1], object_is_key)))
- self.failUnlessEqual(
+ self.assertEqual(
[(1, [1]), (2, [2, 2])],
sorted(bulk.collate([1, 2, 2], object_is_key)))
def test_collate_with_key_function(self):
- self.failUnlessEqual(
+ self.assertEqual(
[(4, ['fred', 'joss']), (6, ['barney'])],
sorted(bulk.collate(['fred', 'barney', 'joss'], len)))
def test_get_type(self):
- self.failUnlessEqual(object, bulk.get_type(object()))
+ self.assertEqual(object, bulk.get_type(object()))
def test_get_type_with_proxied_object(self):
proxied_object = proxy.Proxy('fred', checker.Checker({}))
- self.failUnlessEqual(str, bulk.get_type(proxied_object))
+ self.assertEqual(str, bulk.get_type(proxied_object))
class TestLoaders(TestCaseWithFactory):
@@ -85,16 +85,16 @@
layer = DatabaseFunctionalLayer
def test_gen_reload_queries_with_empty_list(self):
- self.failUnlessEqual([], list(bulk.gen_reload_queries([])))
+ self.assertEqual([], list(bulk.gen_reload_queries([])))
def test_gen_reload_queries_with_single_object(self):
# gen_reload_queries() should generate a single query for a
# single object.
db_objects = [self.factory.makeSourcePackageName()]
db_queries = list(bulk.gen_reload_queries(db_objects))
- self.failUnlessEqual(1, len(db_queries))
+ self.assertEqual(1, len(db_queries))
db_query = db_queries[0]
- self.failUnlessEqual(db_objects, list(db_query))
+ self.assertEqual(db_objects, list(db_query))
def test_gen_reload_queries_with_multiple_similar_objects(self):
# gen_reload_queries() should generate a single query to load
@@ -102,9 +102,9 @@
db_objects = set(
self.factory.makeSourcePackageName() for i in range(5))
db_queries = list(bulk.gen_reload_queries(db_objects))
- self.failUnlessEqual(1, len(db_queries))
+ self.assertEqual(1, len(db_queries))
db_query = db_queries[0]
- self.failUnlessEqual(db_objects, set(db_query))
+ self.assertEqual(db_objects, set(db_query))
def test_gen_reload_queries_with_mixed_objects(self):
# gen_reload_queries() should return one query for each
@@ -114,15 +114,15 @@
db_objects.update(
self.factory.makeComponent() for i in range(5))
db_queries = list(bulk.gen_reload_queries(db_objects))
- self.failUnlessEqual(2, len(db_queries))
+ self.assertEqual(2, len(db_queries))
db_objects_loaded = set()
for db_query in db_queries:
objects = set(db_query)
# None of these objects should have been loaded before.
- self.failUnlessEqual(
+ self.assertEqual(
set(), objects.intersection(db_objects_loaded))
db_objects_loaded.update(objects)
- self.failUnlessEqual(db_objects, db_objects_loaded)
+ self.assertEqual(db_objects, db_objects_loaded)
def test_gen_reload_queries_with_mixed_stores(self):
# gen_reload_queries() returns one query for each distinct
@@ -136,15 +136,15 @@
(IMasterStore(db_object).get(db_object_type, db_object.id),
ISlaveStore(db_object).get(db_object_type, db_object.id)))
db_queries = list(bulk.gen_reload_queries(db_objects))
- self.failUnlessEqual(2, len(db_queries))
+ self.assertEqual(2, len(db_queries))
db_objects_loaded = set()
for db_query in db_queries:
objects = set(db_query)
# None of these objects should have been loaded before.
- self.failUnlessEqual(
+ self.assertEqual(
set(), objects.intersection(db_objects_loaded))
db_objects_loaded.update(objects)
- self.failUnlessEqual(db_objects, db_objects_loaded)
+ self.assertEqual(db_objects, db_objects_loaded)
def test_gen_reload_queries_with_non_Storm_objects(self):
# gen_reload_queries() does not like non-Storm objects.
@@ -166,11 +166,11 @@
db_object_naked = proxy.removeSecurityProxy(db_object)
db_object_info = get_obj_info(db_object_naked)
IStore(db_object).flush()
- self.failUnlessEqual(None, db_object_info.get('invalidated'))
+ self.assertIsNone(db_object_info.get('invalidated'))
IStore(db_object).invalidate(db_object)
- self.failUnlessEqual(True, db_object_info.get('invalidated'))
+ self.assertEqual(True, db_object_info.get('invalidated'))
bulk.reload([db_object])
- self.failUnlessEqual(None, db_object_info.get('invalidated'))
+ self.assertIsNone(db_object_info.get('invalidated'))
def test_load(self):
# load() loads objects of the given type by their primary keys.
=== modified file 'lib/lp/services/database/tests/test_isolation.py'
--- lib/lp/services/database/tests/test_isolation.py 2012-01-01 02:58:52 +0000
+++ lib/lp/services/database/tests/test_isolation.py 2018-01-02 14:40:48 +0000
@@ -21,7 +21,7 @@
def createTransaction(self):
stores = list(store for _, store in getUtility(IZStorm).iterstores())
- self.failUnless(len(stores) > 0, "No stores to test.")
+ self.assertTrue(len(stores) > 0, "No stores to test.")
# One or more of the stores may be set to auto-commit. The transaction
# status remains unchanged for these stores hence they are not useful
# for these tests, so execute a query in every store; one of them will
@@ -35,11 +35,11 @@
transaction.abort()
for name, status in isolation.gen_store_statuses():
self.assertIsInstance(name, (str, unicode))
- self.failUnless(status in (None, TRANSACTION_STATUS_IDLE))
+ self.assertIn(status, (None, TRANSACTION_STATUS_IDLE))
# At least one store will not be idle when a transaction has
# begun.
self.createTransaction()
- self.failUnless(
+ self.assertTrue(
any(status not in (None, TRANSACTION_STATUS_IDLE)
for _, status in isolation.gen_store_statuses()))
@@ -47,11 +47,11 @@
# is_transaction_in_progress() returns False when all
# transactions have been aborted.
transaction.abort()
- self.failIf(isolation.is_transaction_in_progress())
+ self.assertFalse(isolation.is_transaction_in_progress())
# is_transaction_in_progress() returns True when a
# transactions has begun.
self.createTransaction()
- self.failUnless(isolation.is_transaction_in_progress())
+ self.assertTrue(isolation.is_transaction_in_progress())
def test_check_no_transaction(self):
# check_no_transaction() should be a no-op when there are no
@@ -75,7 +75,7 @@
# echo() will just return the given args no transaction is in
# progress.
transaction.abort()
- self.failUnlessEqual(
+ self.assertEqual(
((1, 2, 3), {'a': 4, 'b': 5, 'c': 6}),
echo(1, 2, 3, a=4, b=5, c=6))
# echo() will break with TransactionInProgress when a
=== modified file 'lib/lp/services/database/tests/test_isolation_changes.py'
--- lib/lp/services/database/tests/test_isolation_changes.py 2012-04-06 17:28:25 +0000
+++ lib/lp/services/database/tests/test_isolation_changes.py 2018-01-02 14:40:48 +0000
@@ -48,62 +48,62 @@
return cur.fetchone()[0]
def test_default(self):
- self.failUnlessEqual(self.getCurrentIsolation(), 'read committed')
+ self.assertEqual(self.getCurrentIsolation(), 'read committed')
def test_autocommit(self):
set_isolation_level('autocommit')
# There is no actual 'autocommit' mode in PostgreSQL. psycopg
# implements this feature by using read committed isolation and
# issuing commit() statements after every query.
- self.failUnlessEqual(self.getCurrentIsolation(), 'read committed')
+ self.assertEqual(self.getCurrentIsolation(), 'read committed')
# So we need to confirm we are actually in autocommit mode
# by seeing if we an roll back
cur = cursor()
cur.execute(
"SELECT COUNT(*) FROM Person WHERE homepage_content IS NULL")
- self.failIfEqual(cur.fetchone()[0], 0)
+ self.assertNotEqual(cur.fetchone()[0], 0)
cur.execute("UPDATE Person SET homepage_content=NULL")
transaction.abort()
cur = cursor()
cur.execute(
"SELECT COUNT(*) FROM Person WHERE homepage_content IS NOT NULL")
- self.failUnlessEqual(cur.fetchone()[0], 0)
+ self.assertEqual(cur.fetchone()[0], 0)
def test_readCommitted(self):
set_isolation_level('read_committed')
- self.failUnlessEqual(self.getCurrentIsolation(), 'read committed')
+ self.assertEqual(self.getCurrentIsolation(), 'read committed')
def test_repeatableRead(self):
set_isolation_level('repeatable_read')
- self.failUnlessEqual(self.getCurrentIsolation(), 'repeatable read')
+ self.assertEqual(self.getCurrentIsolation(), 'repeatable read')
def test_serializable(self):
set_isolation_level('serializable')
- self.failUnlessEqual(self.getCurrentIsolation(), 'serializable')
+ self.assertEqual(self.getCurrentIsolation(), 'serializable')
def test_commit(self):
# Change the isolation level
- self.failUnlessEqual(self.getCurrentIsolation(), 'read committed')
+ self.assertEqual(self.getCurrentIsolation(), 'read committed')
set_isolation_level('serializable')
- self.failUnlessEqual(self.getCurrentIsolation(), 'serializable')
+ self.assertEqual(self.getCurrentIsolation(), 'serializable')
cur = cursor()
cur.execute("UPDATE Person SET homepage_content=NULL")
transaction.commit()
cur.execute("UPDATE Person SET homepage_content='foo'")
- self.failUnlessEqual(self.getCurrentIsolation(), 'serializable')
+ self.assertEqual(self.getCurrentIsolation(), 'serializable')
def test_rollback(self):
# Change the isolation level
- self.failUnlessEqual(self.getCurrentIsolation(), 'read committed')
+ self.assertEqual(self.getCurrentIsolation(), 'read committed')
set_isolation_level('serializable')
- self.failUnlessEqual(self.getCurrentIsolation(), 'serializable')
+ self.assertEqual(self.getCurrentIsolation(), 'serializable')
cur = cursor()
cur.execute("UPDATE Person SET homepage_content=NULL")
transaction.abort()
- self.failUnlessEqual(self.getCurrentIsolation(), 'serializable')
+ self.assertEqual(self.getCurrentIsolation(), 'serializable')
def test_script(self):
# Ensure that things work in stand alone scripts too, in case out
@@ -113,8 +113,8 @@
cmd = [sys.executable, script]
process = Popen(cmd, stdout=PIPE, stderr=STDOUT, stdin=PIPE)
(script_output, _empty) = process.communicate()
- self.failUnlessEqual(process.returncode, 0, 'Error: ' + script_output)
- self.failUnlessEqual(script_output, dedent("""\
+ self.assertEqual(process.returncode, 0, 'Error: ' + script_output)
+ self.assertEqual(script_output, dedent("""\
read committed
read committed
repeatable read
@@ -125,19 +125,19 @@
# Ensure connect() method returns a connection with the correct
# default isolation
con = connect()
- self.failUnlessEqual(self.getCurrentIsolation(con), 'read committed')
+ self.assertEqual(self.getCurrentIsolation(con), 'read committed')
con.rollback()
- self.failUnlessEqual(self.getCurrentIsolation(con), 'read committed')
+ self.assertEqual(self.getCurrentIsolation(con), 'read committed')
# Ensure that changing the isolation sticks.
con = connect(isolation=ISOLATION_LEVEL_SERIALIZABLE)
- self.failUnlessEqual(self.getCurrentIsolation(con), 'serializable')
+ self.assertEqual(self.getCurrentIsolation(con), 'serializable')
con.rollback()
- self.failUnlessEqual(self.getCurrentIsolation(con), 'serializable')
+ self.assertEqual(self.getCurrentIsolation(con), 'serializable')
# But on a fresh connection, it works just fine.
con = connect()
con.set_isolation_level(ISOLATION_LEVEL_SERIALIZABLE)
- self.failUnlessEqual(self.getCurrentIsolation(con), 'serializable')
+ self.assertEqual(self.getCurrentIsolation(con), 'serializable')
con.rollback()
- self.failUnlessEqual(self.getCurrentIsolation(con), 'serializable')
+ self.assertEqual(self.getCurrentIsolation(con), 'serializable')
=== modified file 'lib/lp/services/database/tests/test_transaction_decorators.py'
--- lib/lp/services/database/tests/test_transaction_decorators.py 2013-06-20 05:50:00 +0000
+++ lib/lp/services/database/tests/test_transaction_decorators.py 2018-01-02 14:40:48 +0000
@@ -38,8 +38,8 @@
def no_op():
pass
no_op()
- self.failIf(
- self.file_content is self._getTestFileContent(),
+ self.assertIsNot(
+ self.file_content, self._getTestFileContent(),
"Store wasn't reset properly.")
def test_write_transaction_reset_store(self):
@@ -48,8 +48,8 @@
def no_op():
pass
no_op()
- self.failIf(
- self.file_content is self._getTestFileContent(),
+ self.assertIsNot(
+ self.file_content, self._getTestFileContent(),
"Store wasn't reset properly.")
def test_write_transaction_reset_store_with_raise(self):
@@ -58,8 +58,8 @@
def no_op():
raise RuntimeError('an error occured')
self.assertRaises(RuntimeError, no_op)
- self.failIf(
- self.file_content is self._getTestFileContent(),
+ self.assertIsNot(
+ self.file_content, self._getTestFileContent(),
"Store wasn't reset properly.")
def test_writing_transaction_reset_store_on_commit_failure(self):
@@ -79,8 +79,8 @@
def no_op():
pass
self.assertRaises(RuntimeError, no_op)
- self.failIf(
- self.file_content is self._getTestFileContent(),
+ self.assertIsNot(
+ self.file_content, self._getTestFileContent(),
"Store wasn't reset properly.")
finally:
transaction.manager.unregisterSynch(aborter)
=== modified file 'lib/lp/services/geoip/tests/test_request_country.py'
--- lib/lp/services/geoip/tests/test_request_country.py 2012-01-01 02:58:52 +0000
+++ lib/lp/services/geoip/tests/test_request_country.py 2018-01-02 14:40:48 +0000
@@ -30,29 +30,29 @@
def testRemoteAddr(self):
country = request_country({'REMOTE_ADDR': self.lp})
- self.failUnlessEqual(country.name, u'United Kingdom')
+ self.assertEqual(country.name, u'United Kingdom')
def testXForwardedFor(self):
country = request_country({
'HTTP_X_FORWARDED_FOR': self.lp,
'REMOTE_ADDR': '1.2.3.4',
})
- self.failUnlessEqual(country.name, u'United Kingdom')
+ self.assertEqual(country.name, u'United Kingdom')
def testNestedProxies(self):
country = request_country({
'HTTP_X_FORWARDED_FOR':
'localhost, 127.0.0.1, %s, 1,1,1,1' % self.lp,
})
- self.failUnlessEqual(country.name, u'United Kingdom')
+ self.assertEqual(country.name, u'United Kingdom')
def testMissingHeaders(self):
country = request_country({})
- self.failUnless(country is None)
+ self.assertIsNone(country)
def testIgnoreLocalhost(self):
country = request_country({'HTTP_X_FORWARDED_FOR': '127.0.0.1'})
- self.failUnless(country is None)
+ self.assertIsNone(country)
country = request_country({'REMOTE_ADDR': '127.0.0.1'})
- self.failUnless(country is None)
+ self.assertIsNone(country)
=== modified file 'lib/lp/services/googlesearch/tests/test_googleservice.py'
--- lib/lp/services/googlesearch/tests/test_googleservice.py 2012-06-29 08:40:05 +0000
+++ lib/lp/services/googlesearch/tests/test_googleservice.py 2018-01-02 14:40:48 +0000
@@ -22,8 +22,9 @@
def test_stale_pid_file_cleanup(self):
"""The service should be able to clean up invalid PID files."""
bogus_pid = 9999999
- self.failIf(process_exists(bogus_pid),
- "There is already a process with PID '%d'." % bogus_pid)
+ self.assertFalse(
+ process_exists(bogus_pid),
+ "There is already a process with PID '%d'." % bogus_pid)
# Create a stale/bogus PID file.
filepath = pidfile_path(googletestservice.service_name)
@@ -33,8 +34,9 @@
# The PID clean-up code should silently remove the file and return.
googletestservice.kill_running_process()
- self.failIf(os.path.exists(filepath),
- "The PID file '%s' should have been deleted." % filepath)
+ self.assertFalse(
+ os.path.exists(filepath),
+ "The PID file '%s' should have been deleted." % filepath)
def process_exists(pid):
=== modified file 'lib/lp/services/gpg/tests/test_gpghandler.py'
--- lib/lp/services/gpg/tests/test_gpghandler.py 2017-01-26 12:23:56 +0000
+++ lib/lp/services/gpg/tests/test_gpghandler.py 2018-01-02 14:40:48 +0000
@@ -145,7 +145,7 @@
def test_non_ascii_filter(self):
"""localKeys should not error if passed non-ascii unicode strings."""
filtered_keys = self.gpg_handler.localKeys(u'non-ascii \u8463')
- self.failUnlessRaises(StopIteration, filtered_keys.next)
+ self.assertRaises(StopIteration, filtered_keys.next)
def testTestkeyrings(self):
"""Do we have the expected test keyring files"""
=== modified file 'lib/lp/services/librarian/tests/test_client.py'
--- lib/lp/services/librarian/tests/test_client.py 2016-05-31 16:56:01 +0000
+++ lib/lp/services/librarian/tests/test_client.py 2018-01-02 14:40:48 +0000
@@ -172,7 +172,7 @@
client = InstrumentedLibrarianClient()
client.addFile(
'sample.txt', 6, StringIO('sample'), 'text/plain')
- self.failUnless(client.sentDatabaseName,
+ self.assertTrue(client.sentDatabaseName,
"Database-Name header not sent by addFile")
def test_remoteAddFileDoesntSendDatabaseName(self):
@@ -184,7 +184,7 @@
DatabaseLayer.force_dirty_database()
client.remoteAddFile('sample.txt', 6, StringIO('sample'),
'text/plain')
- self.failUnless(client.sentDatabaseName,
+ self.assertTrue(client.sentDatabaseName,
"Database-Name header not sent by remoteAddFile")
def test_clientWrongDatabase(self):
@@ -197,7 +197,7 @@
client.addFile('sample.txt', 6, StringIO('sample'), 'text/plain')
except UploadFailed as e:
msg = e.args[0]
- self.failUnless(
+ self.assertTrue(
msg.startswith('Server said: 400 Wrong database'),
'Unexpected UploadFailed error: ' + msg)
else:
@@ -281,7 +281,7 @@
# download_port.
expected_host = 'http://example.org:1234/'
download_url = client._getURLForDownload(alias_id)
- self.failUnless(download_url.startswith(expected_host),
+ self.assertTrue(download_url.startswith(expected_host),
'expected %s to start with %s' % (download_url,
expected_host))
# If the alias has been deleted, _getURLForDownload returns None.
@@ -317,7 +317,7 @@
# download_port.
expected_host = 'http://example.com:5678/'
download_url = client._getURLForDownload(alias_id)
- self.failUnless(download_url.startswith(expected_host),
+ self.assertTrue(download_url.startswith(expected_host),
'expected %s to start with %s' % (download_url,
expected_host))
# If the alias has been deleted, _getURLForDownload returns None.
@@ -339,11 +339,11 @@
alias_id = client.addFile(
'sample.txt', 6, StringIO('sample'), 'text/plain')
transaction.commit() # Make sure the file is in the "remote" database.
- self.failIf(client.called_getURLForDownload)
+ self.assertFalse(client.called_getURLForDownload)
# (Test:)
f = client.getFileByAlias(alias_id)
self.assertEqual(f.read(), 'sample')
- self.failUnless(client.called_getURLForDownload)
+ self.assertTrue(client.called_getURLForDownload)
def test_getFileByAliasLookupError(self):
# The Librarian server can return a 404 HTTPError;
=== modified file 'lib/lp/services/librarianserver/tests/test_gc.py'
--- lib/lp/services/librarianserver/tests/test_gc.py 2017-12-07 12:05:01 +0000
+++ lib/lp/services/librarianserver/tests/test_gc.py 2018-01-02 14:40:48 +0000
@@ -137,8 +137,8 @@
f2 = LibraryFileAlias.get(f2_id)
# Make sure the duplicates really are distinct
- self.failIfEqual(f1_id, f2_id)
- self.failIfEqual(f1.contentID, f2.contentID)
+ self.assertNotEqual(f1_id, f2_id)
+ self.assertNotEqual(f1.contentID, f2.contentID)
f1.date_created = self.ancient_past
f2.date_created = self.ancient_past
@@ -159,7 +159,7 @@
def test_files_exist(self):
# Confirm the files we expect created by the test harness
# actually exist.
- self.failUnless(self.file_exists(self.f1_id))
+ self.assertTrue(self.file_exists(self.f1_id))
def test_MergeDuplicates(self):
# Merge the duplicates
@@ -173,7 +173,7 @@
self.ztm.begin()
f1 = LibraryFileAlias.get(self.f1_id)
f2 = LibraryFileAlias.get(self.f2_id)
- self.failUnlessEqual(f1.contentID, f2.contentID)
+ self.assertEqual(f1.contentID, f2.contentID)
def test_DeleteUnreferencedAliases(self):
self.ztm.begin()
@@ -346,19 +346,19 @@
AND LibraryFileContent.id IN (%d, %d)
""" % (self.f1_id, self.f2_id))
results = cur.fetchall()
- self.failUnlessEqual(len(results), 1)
+ self.assertEqual(len(results), 1)
unreferenced_id = results[0][0]
self.ztm.abort()
# Make sure the file exists on disk
- self.failUnless(self.file_exists(unreferenced_id))
+ self.assertTrue(self.file_exists(unreferenced_id))
# Delete unreferenced content
librariangc.delete_unreferenced_content(self.con)
# Make sure the file is gone
- self.failIf(self.file_exists(unreferenced_id))
+ self.assertFalse(self.file_exists(unreferenced_id))
# delete_unreferenced_content should have committed
self.ztm.begin()
@@ -373,7 +373,7 @@
WHERE LibraryFileAlias.id IS NULL
""")
results = list(cur.fetchall())
- self.failUnlessEqual(
+ self.assertEqual(
len(results), 0, 'Too many results %r' % (results,)
)
@@ -407,23 +407,23 @@
AND LibraryFileContent.id IN (%d, %d)
""" % (self.f1_id, self.f2_id))
results = cur.fetchall()
- self.failUnlessEqual(len(results), 1)
+ self.assertEqual(len(results), 1)
unreferenced_id = results[0][0]
self.ztm.abort()
# Make sure the file exists on disk
- self.failUnless(self.file_exists(unreferenced_id))
+ self.assertTrue(self.file_exists(unreferenced_id))
# Remove the file from disk
self.remove_file(unreferenced_id)
- self.failIf(self.file_exists(unreferenced_id))
+ self.assertFalse(self.file_exists(unreferenced_id))
# Delete unreferenced content
librariangc.delete_unreferenced_content(self.con)
# Make sure the file is gone
- self.failIf(self.file_exists(unreferenced_id))
+ self.assertFalse(self.file_exists(unreferenced_id))
# delete_unreferenced_content should have committed
self.ztm.begin()
@@ -438,7 +438,7 @@
WHERE LibraryFileAlias.id IS NULL
""")
results = list(cur.fetchall())
- self.failUnlessEqual(
+ self.assertEqual(
len(results), 0, 'Too many results %r' % (results,)
)
@@ -485,7 +485,7 @@
""", (content_id,))
self.ztm.commit()
- self.failUnless(self.file_exists(content_id))
+ self.assertTrue(self.file_exists(content_id))
# Ensure delete_unreferenced_files does not remove the file, because
# it will have just been created (has a recent date_created). There
@@ -493,14 +493,14 @@
# bothering to remove the file to avoid the race condition where the
# garbage collector is run whilst a file is being uploaded.
librariangc.delete_unwanted_files(self.con)
- self.failUnless(self.file_exists(content_id))
+ self.assertTrue(self.file_exists(content_id))
# To test removal does occur when we want it to, we need to trick
# the garbage collector into thinking it is tomorrow.
with self.librariangc_thinking_it_is_tomorrow():
librariangc.delete_unwanted_files(self.con)
- self.failIf(self.file_exists(content_id))
+ self.assertFalse(self.file_exists(content_id))
# Make sure nothing else has been removed from disk
self.ztm.begin()
@@ -509,7 +509,7 @@
SELECT id FROM LibraryFileContent
""")
for content_id in (row[0] for row in cur.fetchall()):
- self.failUnless(self.file_exists(content_id))
+ self.assertTrue(self.file_exists(content_id))
def test_delete_unwanted_files_bug437084(self):
# There was a bug where delete_unwanted_files() would die
@@ -563,9 +563,9 @@
cmd = [sys.executable, script_path, '-q']
process = Popen(cmd, stdout=PIPE, stderr=STDOUT, stdin=PIPE)
(script_output, _empty) = process.communicate()
- self.failUnlessEqual(
+ self.assertEqual(
process.returncode, 0, 'Error: %s' % script_output)
- self.failUnlessEqual(script_output, '')
+ self.assertEqual(script_output, '')
# Make sure that our example files have been garbage collected
self.ztm.begin()
@@ -577,10 +577,10 @@
cur = cursor()
cur.execute("SELECT count(*) FROM LibraryFileAlias")
count = cur.fetchone()[0]
- self.failIfEqual(count, 0)
+ self.assertNotEqual(count, 0)
cur.execute("SELECT count(*) FROM LibraryFileContent")
count = cur.fetchone()[0]
- self.failIfEqual(count, 0)
+ self.assertNotEqual(count, 0)
def test_confirm_no_clock_skew(self):
# There should not be any clock skew when running the test suite.
@@ -951,21 +951,21 @@
SELECT * FROM TemporaryBlobStorage WHERE id=%s
""", (self.expired_blob_id,)
)
- self.failUnless(cur.fetchone() is None)
+ self.assertIsNone(cur.fetchone())
# As should our expired blob linked elsewhere.
cur.execute("""
SELECT * FROM TemporaryBlobStorage WHERE id=%s
""", (self.expired2_blob_id,)
)
- self.failUnless(cur.fetchone() is None)
+ self.assertIsNone(cur.fetchone())
# But our unexpired blob is still hanging around.
cur.execute("""
SELECT * FROM TemporaryBlobStorage WHERE id=%s
""", (self.unexpired_blob_id,)
)
- self.failUnless(cur.fetchone() is not None)
+ self.assertIsNotNone(cur.fetchone())
# Now delete our unreferenced aliases and unreferenced content
cur.execute(
@@ -983,11 +983,11 @@
cur.execute("""
SELECT * FROM LibraryFileAlias WHERE id=%s
""", (self.expired_lfa_id,))
- self.failUnless(cur.fetchone() is None)
+ self.assertIsNone(cur.fetchone())
cur.execute("""
SELECT * FROM LibraryFileContent WHERE id=%s
""", (self.expired_lfc_id,))
- self.failUnless(cur.fetchone() is None)
+ self.assertIsNone(cur.fetchone())
# The second expired blob will has lost its LibraryFileAlias,
# but the content is still hanging around because something else
@@ -995,21 +995,21 @@
cur.execute("""
SELECT * FROM LibraryFileAlias WHERE id=%s
""", (self.expired2_lfa_id,))
- self.failUnless(cur.fetchone() is None)
+ self.assertIsNone(cur.fetchone())
cur.execute("""
SELECT * FROM LibraryFileContent WHERE id=%s
""", (self.expired2_lfc_id,))
- self.failUnless(cur.fetchone() is not None)
+ self.assertIsNotNone(cur.fetchone())
# The unexpired blob should be unaffected
cur.execute("""
SELECT * FROM LibraryFileAlias WHERE id=%s
""", (self.unexpired_lfa_id,))
- self.failUnless(cur.fetchone() is not None)
+ self.assertIsNotNone(cur.fetchone())
cur.execute("""
SELECT * FROM LibraryFileContent WHERE id=%s
""", (self.unexpired_lfc_id,))
- self.failUnless(cur.fetchone() is not None)
+ self.assertIsNotNone(cur.fetchone())
def test_cronscript(self):
# Run the cronscript
@@ -1019,16 +1019,16 @@
cmd = [sys.executable, script_path, '-q']
process = Popen(cmd, stdout=PIPE, stderr=STDOUT, stdin=PIPE)
(script_output, _empty) = process.communicate()
- self.failUnlessEqual(
+ self.assertEqual(
process.returncode, 0, 'Error: %s' % script_output)
- self.failUnlessEqual(script_output, '')
+ self.assertEqual(script_output, '')
cur = self.con.cursor()
# Make sure that our blobs have been garbage collectd
cur.execute("SELECT count(*) FROM TemporaryBlobStorage")
count = cur.fetchone()[0]
- self.failUnlessEqual(count, 1)
+ self.assertEqual(count, 1)
cur.execute("""
SELECT count(*) FROM LibraryFileAlias
@@ -1039,7 +1039,7 @@
self.unexpired_lfa_id
))
count = cur.fetchone()[0]
- self.failUnlessEqual(count, 1)
+ self.assertEqual(count, 1)
cur.execute("""
SELECT count(*) FROM LibraryFileContent
@@ -1050,4 +1050,4 @@
self.unexpired_lfc_id
))
count = cur.fetchone()[0]
- self.failIfEqual(count, 2)
+ self.assertNotEqual(count, 2)
=== modified file 'lib/lp/services/librarianserver/tests/test_storage.py'
--- lib/lp/services/librarianserver/tests/test_storage.py 2015-10-14 15:22:01 +0000
+++ lib/lp/services/librarianserver/tests/test_storage.py 2018-01-02 14:40:48 +0000
@@ -52,7 +52,7 @@
def test_hasFile_missing(self):
# Make sure hasFile returns False when a file is missing
- self.failIf(self.storage.hasFile(9999999))
+ self.assertFalse(self.storage.hasFile(9999999))
def _sameFileTestHelper(self, data1, data2):
# Make two temporary files
@@ -76,23 +76,26 @@
def test_sameFile(self):
# Make sure sameFile returns True when the files are the same
- self.failUnless(self._sameFileTestHelper('data ' * 5000,
- 'data ' * 5000))
+ self.assertTrue(
+ self._sameFileTestHelper('data ' * 5000, 'data ' * 5000))
def test_notSameFile(self):
# Make sure sameFile returns False when the files are different, even
# if they are the same length.
- self.failIf(self._sameFileTestHelper('data ' * 5000, 'fred ' * 5000))
+ self.assertFalse(
+ self._sameFileTestHelper('data ' * 5000, 'fred ' * 5000))
def test_differentFileShorter(self):
# Make sure sameFile returns False when the second file is shorter
# than the first, even if they were the same up to that length.
- self.failIf(self._sameFileTestHelper('data ' * 5000, 'data ' * 4999))
+ self.assertFalse(
+ self._sameFileTestHelper('data ' * 5000, 'data ' * 4999))
def test_differentFileLonger(self):
# Make sure sameFile returns False when the second file is longer than
# the first, even if they were the same up to that length.
- self.failIf(self._sameFileTestHelper('data ' * 5000, 'data ' * 5001))
+ self.assertFalse(
+ self._sameFileTestHelper('data ' * 5000, 'data ' * 5001))
def test_prefixDirectories(self):
# _relFileLocation splits eight hex digits across four path segments
@@ -133,8 +136,8 @@
self.assertEqual(0x11111112, fileid2)
# Did the files both get stored?
- self.failUnless(self.storage.hasFile(fileid1))
- self.failUnless(self.storage.hasFile(fileid2))
+ self.assertTrue(self.storage.hasFile(fileid1))
+ self.assertTrue(self.storage.hasFile(fileid2))
def test_hashes(self):
# Check that the MD5, SHA1 and SHA256 hashes are correct.
=== modified file 'lib/lp/services/librarianserver/tests/test_storage_db.py'
--- lib/lp/services/librarianserver/tests/test_storage_db.py 2013-06-14 04:51:40 +0000
+++ lib/lp/services/librarianserver/tests/test_storage_db.py 2018-01-02 14:40:48 +0000
@@ -37,7 +37,7 @@
newfile.srcDigest = digest
newfile.append(data)
fileid, aliasid = newfile.store()
- self.failUnless(self.storage.hasFile(fileid))
+ self.assertTrue(self.storage.hasFile(fileid))
def test_addFiles_identical(self):
# Start adding two files with identical data
@@ -55,7 +55,7 @@
# But they are two different ids, because we leave duplicate handling
# to the garbage collector
- self.failIfEqual(id1, id2)
+ self.assertNotEqual(id1, id2)
def test_badDigest(self):
data = 'data ' * 50
=== modified file 'lib/lp/services/librarianserver/tests/test_swift.py'
--- lib/lp/services/librarianserver/tests/test_swift.py 2014-12-16 10:51:41 +0000
+++ lib/lp/services/librarianserver/tests/test_swift.py 2018-01-02 14:40:48 +0000
@@ -151,7 +151,7 @@
# Confirm that all the files have gone from disk.
for lfc in self.lfcs:
- self.failIf(os.path.exists(swift.filesystem_path(lfc.id)))
+ self.assertFalse(os.path.exists(swift.filesystem_path(lfc.id)))
# Confirm all the files are in Swift.
swift_client = self.swift_fixture.connect()
@@ -211,7 +211,7 @@
# Data round trips when served from Swift.
swift.to_swift(BufferLogger(), remove_func=os.unlink)
- self.failIf(os.path.exists(swift.filesystem_path(lfc.id)))
+ self.assertFalse(os.path.exists(swift.filesystem_path(lfc.id)))
lfa = self.librarian_client.getFileByAlias(lfa_id)
self.assertEqual(expected_content, lfa.read())
@@ -234,7 +234,7 @@
# Data round trips when served from Swift.
swift.to_swift(BufferLogger(), remove_func=os.unlink)
lfa = self.librarian_client.getFileByAlias(lfa_id)
- self.failIf(os.path.exists(swift.filesystem_path(lfc.id)))
+ self.assertFalse(os.path.exists(swift.filesystem_path(lfc.id)))
self.assertEqual(expected_content, lfa.read())
def test_large_file_to_swift(self):
=== modified file 'lib/lp/services/librarianserver/tests/test_web.py'
--- lib/lp/services/librarianserver/tests/test_web.py 2016-02-05 16:51:12 +0000
+++ lib/lp/services/librarianserver/tests/test_web.py 2018-01-02 14:40:48 +0000
@@ -121,9 +121,9 @@
fileObj = urlopen(url)
mimetype = fileObj.headers['content-type']
encoding = fileObj.headers['content-encoding']
- self.failUnless(mimetype == "text/plain; charset=utf-8",
+ self.assertTrue(mimetype == "text/plain; charset=utf-8",
"Wrong mimetype. %s != 'text/plain'." % mimetype)
- self.failUnless(encoding == "gzip",
+ self.assertTrue(encoding == "gzip",
"Wrong encoding. %s != 'gzip'." % encoding)
def test_checkNoEncoding(self):
@@ -143,7 +143,7 @@
mimetype = fileObj.headers['content-type']
self.assertRaises(KeyError, fileObj.headers.__getitem__,
'content-encoding')
- self.failUnless(
+ self.assertTrue(
mimetype == "application/x-tar",
"Wrong mimetype. %s != 'application/x-tar'." % mimetype)
@@ -180,12 +180,12 @@
self.assertEqual(urlopen(url).read(), 'sample')
# Change the aliasid and assert we get a 404
- self.failUnless(str(aid) in url)
+ self.assertIn(str(aid), url)
bad_id_url = uri_path_replace(url, str(aid), str(aid + 1))
self.require404(bad_id_url)
# Change the filename and assert we get a 404
- self.failUnless(filename in url)
+ self.assertIn(filename, url)
bad_name_url = uri_path_replace(url, filename, 'different.txt')
self.require404(bad_name_url)
@@ -195,18 +195,18 @@
id1 = client.addFile(filename, 6, StringIO('sample'), 'text/plain')
id2 = client.addFile(filename, 6, StringIO('sample'), 'text/plain')
- self.failIfEqual(id1, id2, 'Got allocated the same id!')
+ self.assertNotEqual(id1, id2, 'Got allocated the same id!')
self.commit()
- self.failUnlessEqual(client.getFileByAlias(id1).read(), 'sample')
- self.failUnlessEqual(client.getFileByAlias(id2).read(), 'sample')
+ self.assertEqual(client.getFileByAlias(id1).read(), 'sample')
+ self.assertEqual(client.getFileByAlias(id2).read(), 'sample')
def test_robotsTxt(self):
url = 'http://%s:%d/robots.txt' % (
config.librarian.download_host, config.librarian.download_port)
f = urlopen(url)
- self.failUnless('Disallow: /' in f.read())
+ self.assertIn('Disallow: /', f.read())
def test_headers(self):
client = LibrarianClient()
@@ -235,10 +235,10 @@
# URLs point to the same content for ever, so we have a hardcoded
# 1 year max-age cache policy.
- self.failUnlessEqual(cache_control_header, 'max-age=31536000, public')
+ self.assertEqual(cache_control_header, 'max-age=31536000, public')
# And we should have a correct Last-Modified header too.
- self.failUnlessEqual(
+ self.assertEqual(
last_modified_header, 'Tue, 30 Jan 2001 13:45:59 GMT')
def test_missing_storage(self):
@@ -427,9 +427,9 @@
last_modified_header = result.info()['Last-Modified']
cache_control_header = result.info()['Cache-Control']
# No caching for restricted files.
- self.failUnlessEqual(cache_control_header, 'max-age=0, private')
+ self.assertEqual(cache_control_header, 'max-age=0, private')
# And we should have a correct Last-Modified header too.
- self.failUnlessEqual(
+ self.assertEqual(
last_modified_header, 'Tue, 30 Jan 2001 13:45:59 GMT')
# Perhaps we should also set Expires to the Last-Modified.
@@ -439,7 +439,7 @@
urlopen(url)
self.fail('404 not raised')
except HTTPError as e:
- self.failUnlessEqual(e.code, 404)
+ self.assertEqual(e.code, 404)
class LibrarianZopelessWebTestCase(LibrarianWebTestCase):
@@ -494,7 +494,7 @@
# And it can be retrieved via the web
url = alias.http_url
retrieved_content = urlopen(url).read()
- self.failUnlessEqual(retrieved_content, 'xxx\nxxx\n')
+ self.assertEqual(retrieved_content, 'xxx\nxxx\n')
# But when we flag the content as deleted
cur = cursor()
@@ -505,11 +505,11 @@
# Things become not found
alias = getUtility(ILibraryFileAliasSet)[alias_id]
- self.failUnlessRaises(DownloadFailed, alias.open)
+ self.assertRaises(DownloadFailed, alias.open)
# And people see a 404 page
try:
urlopen(url)
self.fail('404 not raised')
except HTTPError as x:
- self.failUnlessEqual(x.code, 404)
+ self.assertEqual(x.code, 404)
=== modified file 'lib/lp/services/statistics/tests/test_update_stats.py'
--- lib/lp/services/statistics/tests/test_update_stats.py 2017-11-09 18:02:18 +0000
+++ lib/lp/services/statistics/tests/test_update_stats.py 2018-01-02 14:40:48 +0000
@@ -89,13 +89,13 @@
(stdout, empty_stderr) = process.communicate()
# Ensure it returned a success code
- self.failUnlessEqual(
+ self.assertEqual(
process.returncode, 0,
'update-stats.py exited with return code %d. Output was %r' % (
process.returncode, stdout))
# With the -q option, it should produce no output if things went
# well.
- self.failUnlessEqual(
+ self.assertEqual(
stdout, '',
'update-stats.py was noisy. Emitted:\n%s' % stdout)
@@ -106,11 +106,11 @@
# Make sure all DistroSeries.messagecount entries are updated
cur.execute(
"SELECT COUNT(*) FROM DistroSeries WHERE messagecount=-1")
- self.failUnlessEqual(cur.fetchone()[0], 0)
+ self.assertEqual(cur.fetchone()[0], 0)
# Make sure we have created missing DistroSeriesLanguage entries
cur.execute("SELECT COUNT(*) FROM DistroSeriesLanguage")
- self.failUnless(cur.fetchone()[0] > num_distroserieslanguage)
+ self.assertTrue(cur.fetchone()[0] > num_distroserieslanguage)
# Make sure existing DistroSeriesLangauge entries have been updated.
cur.execute("""
@@ -118,35 +118,35 @@
WHERE DistroSeriesLanguage.language = Language.id AND
Language.visible = TRUE AND currentcount = -1
""")
- self.failUnlessEqual(cur.fetchone()[0], 0)
+ self.assertEqual(cur.fetchone()[0], 0)
cur.execute("""
SELECT COUNT(*) FROM DistroSeriesLanguage, Language
WHERE DistroSeriesLanguage.language = Language.id AND
Language.visible = TRUE AND updatescount = -1
""")
- self.failUnlessEqual(cur.fetchone()[0], 0)
+ self.assertEqual(cur.fetchone()[0], 0)
cur.execute("""
SELECT COUNT(*) FROM DistroSeriesLanguage, Language
WHERE DistroSeriesLanguage.language = Language.id AND
Language.visible = TRUE AND rosettacount = -1
""")
- self.failUnlessEqual(cur.fetchone()[0], 0)
+ self.assertEqual(cur.fetchone()[0], 0)
cur.execute("""
SELECT COUNT(*) FROM DistroSeriesLanguage, Language
WHERE DistroSeriesLanguage.language = Language.id AND
Language.visible = TRUE AND unreviewed_count = -1
""")
- self.failUnlessEqual(cur.fetchone()[0], 0)
+ self.assertEqual(cur.fetchone()[0], 0)
cur.execute("""
SELECT COUNT(*) FROM DistroSeriesLanguage, Language
WHERE DistroSeriesLanguage.language = Language.id AND
Language.visible = TRUE AND contributorcount = -1
""")
- self.failUnlessEqual(cur.fetchone()[0], 0)
+ self.assertEqual(cur.fetchone()[0], 0)
cur.execute("""
SELECT COUNT(*) FROM DistroSeriesLanguage, Language
@@ -154,19 +154,19 @@
Language.visible = TRUE AND
dateupdated < now() - '2 days'::interval
""")
- self.failUnlessEqual(cur.fetchone()[0], 0)
+ self.assertEqual(cur.fetchone()[0], 0)
# All LaunchpadStatistic rows should have been updated
cur.execute("""
SELECT COUNT(*) FROM LaunchpadStatistic
WHERE value=-1
""")
- self.failUnlessEqual(cur.fetchone()[0], 0)
+ self.assertEqual(cur.fetchone()[0], 0)
cur.execute("""
SELECT COUNT(*) FROM LaunchpadStatistic
WHERE dateupdated < now() - '2 days'::interval
""")
- self.failUnlessEqual(cur.fetchone()[0], 0)
+ self.assertEqual(cur.fetchone()[0], 0)
keys = [
'potemplate_count', 'pofile_count', 'pomsgid_count',
@@ -184,8 +184,8 @@
SELECT value from LaunchpadStatistic WHERE name=%(key)s
""", dict(key=key))
row = cur.fetchone()
- self.failIf(row is None, '%s not updated' % key)
- self.failUnless(row[0] >= 0, '%s is invalid' % key)
+ self.assertIsNotNone(row, '%s not updated' % key)
+ self.assertTrue(row[0] >= 0, '%s is invalid' % key)
class UpdateTranslationStatsTest(unittest.TestCase):
@@ -220,7 +220,7 @@
if template.distroseries == hoary:
pmount_template = template
- self.failIfEqual(pmount_template, None)
+ self.assertIsNotNone(pmount_template)
# Let's calculate the statistics ourselves so we can check that cached
# values are the right ones.
@@ -236,14 +236,13 @@
spanish).count()
# The amount of messages to translate in Hoary is the expected.
- self.failUnlessEqual(hoary.messagecount, messagecount)
+ self.assertEqual(hoary.messagecount, messagecount)
# And the same for translations and contributors.
- self.failUnlessEqual(spanish_hoary.currentCount(), currentcount)
+ self.assertEqual(spanish_hoary.currentCount(), currentcount)
# XXX Danilo Segan 2010-08-06: we should not assert that
# sampledata is correct. Bug #614397.
- #self.failUnlessEqual(spanish_hoary.contributor_count,
- # contributor_count)
+ #self.assertEqual(spanish_hoary.contributor_count, contributor_count)
# Let's set 'pmount' template as not current for Hoary.
pmount_template.iscurrent = False
@@ -271,7 +270,7 @@
(stdout, empty_stderr) = process.communicate()
# Ensure it returned a success code
- self.failUnlessEqual(
+ self.assertEqual(
process.returncode, 0,
'update-stats.py exited with return code %d. Output was %r' % (
process.returncode, stdout))
@@ -304,22 +303,22 @@
# The amount of messages to translate in Hoary is now lower because we
# don't count anymore pmount messages.
- self.failUnlessEqual(hoary.messagecount, new_messagecount)
- self.failIf(messagecount <= new_messagecount)
- self.failUnlessEqual(messagecount - pmount_messages, new_messagecount)
+ self.assertEqual(hoary.messagecount, new_messagecount)
+ self.assertFalse(messagecount <= new_messagecount)
+ self.assertEqual(messagecount - pmount_messages, new_messagecount)
# The amount of messages translate into Spanish is also lower now
# because we don't count Spanish translations for pmount anymore.
- self.failUnlessEqual(spanish_hoary.currentCount(), new_currentcount)
- self.failIf(currentcount <= new_currentcount)
- self.failUnlessEqual(currentcount - pmount_spanish_translated,
+ self.assertEqual(spanish_hoary.currentCount(), new_currentcount)
+ self.assertFalse(currentcount <= new_currentcount)
+ self.assertEqual(currentcount - pmount_spanish_translated,
new_currentcount)
# Also, there are two Spanish translators that only did contributions
# to pmount, so they are gone now.
- self.failUnlessEqual(
+ self.assertEqual(
spanish_hoary.contributor_count, new_contributor_count)
- self.failIf(contributor_count <= new_contributor_count)
+ self.assertFalse(contributor_count <= new_contributor_count)
def test_english(self):
"""Test that English is handled correctly by DistroSeries.
@@ -338,11 +337,10 @@
for template in moz_templates:
if template.distroseries == hoary:
moz_template = template
- self.failIfEqual(
- moz_template, None,
- 'The pkgconf-mozilla template for hoary is None.')
+ self.assertIsNotNone(
+ moz_template, 'The pkgconf-mozilla template for hoary is None.')
moz_english_count = moz_template.getPOFileByLang('en').messageCount()
- self.failIf(
+ self.assertFalse(
0 == moz_english_count,
'moz_english_pofile should have messages translated')
@@ -353,7 +351,7 @@
cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
(stdout, empty_stderr) = process.communicate()
- self.failUnlessEqual(
+ self.assertEqual(
process.returncode, 0,
'update-stats.py exited with return code %d. Output was %r' % (
process.returncode, stdout))
@@ -362,6 +360,6 @@
# of the moz_english_pofile template.
english = self.languageset['en']
english_dsl = hoary.getDistroSeriesLanguage(english)
- self.failUnlessEqual(
+ self.assertEqual(
None, english_dsl, 'The English DistroSeriesLangauge must '
'not exist.')
=== modified file 'lib/lp/services/tests/test_browser_helpers.py'
--- lib/lp/services/tests/test_browser_helpers.py 2016-12-31 05:02:35 +0000
+++ lib/lp/services/tests/test_browser_helpers.py 2018-01-02 14:40:48 +0000
@@ -22,8 +22,8 @@
'Firefox/3.0.10')
version = get_user_agent_distroseries(user_agent)
- self.failUnlessEqual('10.09', version,
- "Incorrect version string returned.")
+ self.assertEqual('10.09', version,
+ "Incorrect version string returned.")
def test_get_user_agent_distroseries_when_invalid(self):
"""None should be returned when the version is not matched."""
@@ -33,5 +33,5 @@
'Firefox/3.0.10')
version = get_user_agent_distroseries(user_agent)
- self.failUnless(version is None,
- "None should be returned when the match fails.")
+ self.assertIsNone(
+ version, "None should be returned when the match fails.")
=== modified file 'lib/lp/services/twistedsupport/tests/test_processmonitor.py'
--- lib/lp/services/twistedsupport/tests/test_processmonitor.py 2015-10-14 15:22:01 +0000
+++ lib/lp/services/twistedsupport/tests/test_processmonitor.py 2018-01-02 14:40:48 +0000
@@ -175,10 +175,10 @@
self.protocol.transport.only_sigkill_kills = True
self.protocol.terminateProcess()
saved_delayed_call = self.protocol._sigkill_delayed_call
- self.failUnless(self.protocol._sigkill_delayed_call.active())
+ self.assertTrue(self.protocol._sigkill_delayed_call.active())
self.simulateProcessExit(clean=False)
- self.failUnless(self.protocol._sigkill_delayed_call is None)
- self.failIf(saved_delayed_call.active())
+ self.assertIsNone(self.protocol._sigkill_delayed_call)
+ self.assertFalse(saved_delayed_call.active())
class TestProcessMonitorProtocol(ProcessTestsMixin, TestCase):
@@ -267,7 +267,7 @@
self.simulateProcessExit()
notificaion_pending = True
self.termination_deferred.addCallback(
- lambda ignored: self.failIf(notificaion_pending))
+ lambda ignored: self.assertFalse(notificaion_pending))
notificaion_pending = False
deferred.callback(None)
return self.termination_deferred
=== modified file 'lib/lp/services/webapp/tests/test_authorization.py'
--- lib/lp/services/webapp/tests/test_authorization.py 2016-03-03 19:16:36 +0000
+++ lib/lp/services/webapp/tests/test_authorization.py 2018-01-02 14:40:48 +0000
@@ -450,7 +450,7 @@
"""Principal's access level is used when no scope is given."""
self.principal.access_level = AccessLevel.WRITE_PUBLIC
self.principal.scope_url = None
- self.failUnlessEqual(
+ self.assertEqual(
self.security._getPrincipalsAccessLevel(
self.principal, LoneObject()),
self.principal.access_level)
@@ -460,7 +460,7 @@
obj = LoneObject()
self.principal.access_level = AccessLevel.WRITE_PUBLIC
self.principal.scope_url = '/+loneobject/%d' % obj.id
- self.failUnlessEqual(
+ self.assertEqual(
self.security._getPrincipalsAccessLevel(self.principal, obj),
self.principal.access_level)
@@ -471,17 +471,17 @@
self.principal.scope_url = '/+loneobject/%d' % obj.id
self.principal.access_level = AccessLevel.WRITE_PUBLIC
- self.failUnlessEqual(
+ self.assertEqual(
self.security._getPrincipalsAccessLevel(self.principal, obj2),
AccessLevel.READ_PUBLIC)
self.principal.access_level = AccessLevel.READ_PRIVATE
- self.failUnlessEqual(
+ self.assertEqual(
self.security._getPrincipalsAccessLevel(self.principal, obj2),
AccessLevel.READ_PUBLIC)
self.principal.access_level = AccessLevel.WRITE_PRIVATE
- self.failUnlessEqual(
+ self.assertEqual(
self.security._getPrincipalsAccessLevel(self.principal, obj2),
AccessLevel.READ_PUBLIC)
=== modified file 'lib/lp/services/webapp/tests/test_dbpolicy.py'
--- lib/lp/services/webapp/tests/test_dbpolicy.py 2015-10-14 15:22:01 +0000
+++ lib/lp/services/webapp/tests/test_dbpolicy.py 2018-01-02 14:40:48 +0000
@@ -73,7 +73,7 @@
def test_dbusers(self):
store_selector = getUtility(IStoreSelector)
main_store = store_selector.get(MAIN_STORE, DEFAULT_FLAVOR)
- self.failUnlessEqual(self.getDBUser(main_store), 'launchpad_main')
+ self.assertEqual(self.getDBUser(main_store), 'launchpad_main')
def getDBUser(self, store):
return store.execute(
@@ -129,7 +129,7 @@
def test_master_allowed(self):
for store in ALL_STORES:
- self.failUnlessRaises(
+ self.assertRaises(
DisallowedStore,
getUtility(IStoreSelector).get, store, MASTER_FLAVOR)
@@ -149,7 +149,7 @@
SERVER_URL='http://xmlrpc-private.launchpad.dev')
setFirstLayer(request, IXMLRPCRequest)
policy = getAdapter(request, IDatabasePolicy)
- self.failUnless(
+ self.assertTrue(
isinstance(policy, MasterDatabasePolicy),
"Expected MasterDatabasePolicy, not %s." % policy)
=== modified file 'lib/lp/services/webapp/tests/test_errorlog.py'
--- lib/lp/services/webapp/tests/test_errorlog.py 2017-10-05 12:45:46 +0000
+++ lib/lp/services/webapp/tests/test_errorlog.py 2018-01-02 14:40:48 +0000
@@ -529,7 +529,7 @@
request = TestRequest(
environ={'SERVER_URL': 'http://api.launchpad.dev'},
form={'oauth_signature': '&BTXPJ6pQTvh49r9p'})
- self.failUnless(_is_sensitive(request, 'oauth_signature'))
+ self.assertTrue(_is_sensitive(request, 'oauth_signature'))
@implementer(IUnauthenticatedPrincipal)
=== modified file 'lib/lp/services/webapp/tests/test_initialization.py'
--- lib/lp/services/webapp/tests/test_initialization.py 2017-10-05 12:45:46 +0000
+++ lib/lp/services/webapp/tests/test_initialization.py 2018-01-02 14:40:48 +0000
@@ -37,11 +37,11 @@
# First, we will verify that it is available as a namespace.
namespace = self.sm.getMultiAdapter(
(self.context, self.request), ITraversable, 'oops')
- self.failUnless(isinstance(namespace, OopsNamespace))
+ self.assertTrue(isinstance(namespace, OopsNamespace))
# However, it is not available as a view.
not_a_namespace = self.sm.queryMultiAdapter(
(self.context, self.request), Interface, 'oops')
- self.failIf(isinstance(not_a_namespace, OopsNamespace))
+ self.assertFalse(isinstance(not_a_namespace, OopsNamespace))
def test_no_namespaces_are_views(self):
# This tests an abstract superset of test_oops_namespace_not_view.
=== modified file 'lib/lp/services/webapp/tests/test_login_account.py'
--- lib/lp/services/webapp/tests/test_login_account.py 2013-04-10 08:36:30 +0000
+++ lib/lp/services/webapp/tests/test_login_account.py 2018-01-02 14:40:48 +0000
@@ -45,7 +45,7 @@
getUtility(IAccountSet).new(
AccountCreationRationale.UNKNOWN, 'Dummy name')
person = self.factory.makePerson('foo.bar@xxxxxxxxxxx')
- self.failIfEqual(person.id, person.account.id)
+ self.assertNotEqual(person.id, person.account.id)
self.principal = LaunchpadPrincipal(
person.account.id, person.displayname,
person.displayname, person)
@@ -63,7 +63,7 @@
session = ISession(self.request)
# logInPrincipal() stores the account ID in a variable named
# 'accountid'.
- self.failUnlessEqual(
+ self.assertEqual(
session['launchpad.authenticateduser']['accountid'],
int(self.principal.id))
@@ -74,13 +74,13 @@
principal = getUtility(IPlacelessAuthUtility).authenticate(
self.request)
- self.failUnlessEqual(self.principal.id, principal.id)
+ self.assertEqual(self.principal.id, principal.id)
logoutPerson(self.request)
principal = getUtility(IPlacelessAuthUtility).authenticate(
self.request)
- self.failUnless(principal is None)
+ self.assertIsNone(principal)
def test_CookieLogoutPage(self):
# This test shows that the CookieLogoutPage redirects as we expect:
@@ -115,7 +115,7 @@
principal = getUtility(IPlacelessAuthUtility).authenticate(
self.request)
- self.failUnless(principal is None)
+ self.assertIsNone(principal)
# The view should have redirected us, with no actual response body.
@@ -162,11 +162,11 @@
principal = getUtility(IPlacelessAuthUtility).authenticate(
self.request)
- self.failUnlessEqual(self.principal.id, principal.id)
- self.failUnlessEqual(self.principal.person, principal.person)
+ self.assertEqual(self.principal.id, principal.id)
+ self.assertEqual(self.principal.person, principal.person)
logoutPerson(self.request)
principal = getUtility(IPlacelessAuthUtility).authenticate(
self.request)
- self.failUnless(principal is None)
+ self.assertIsNone(principal)
=== modified file 'lib/lp/services/webapp/tests/test_pgsession.py'
--- lib/lp/services/webapp/tests/test_pgsession.py 2014-09-01 07:11:52 +0000
+++ lib/lp/services/webapp/tests/test_pgsession.py 2018-01-02 14:40:48 +0000
@@ -55,8 +55,8 @@
def test_sdc_basics(self):
# Make sure we have the correct class and it provides the required
# interface.
- self.failUnless(isinstance(self.sdc, PGSessionDataContainer))
- self.failUnless(ISessionDataContainer.providedBy(self.sdc))
+ self.assertTrue(isinstance(self.sdc, PGSessionDataContainer))
+ self.assertTrue(ISessionDataContainer.providedBy(self.sdc))
client_id = 'Client Id'
@@ -70,8 +70,8 @@
# Once __setitem__ is called, we can access the SessionData
session_data = self.sdc[client_id]
- self.failUnless(isinstance(session_data, PGSessionData))
- self.failUnless(ISessionData.providedBy(session_data))
+ self.assertTrue(isinstance(session_data, PGSessionData))
+ self.assertTrue(ISessionData.providedBy(session_data))
def test_storage(self):
client_id1 = 'Client Id #1'
@@ -87,8 +87,8 @@
# Set some values in the session
session1a['key1'] = 'value1'
session1a['key2'] = PicklingTest('value2')
- self.failUnlessEqual(session1a['key1'], 'value1')
- self.failUnlessEqual(session1a['key2'].value, 'value2')
+ self.assertEqual(session1a['key1'], 'value1')
+ self.assertEqual(session1a['key2'].value, 'value2')
# Make sure no leakage between sessions
session1b = self.sdc[client_id1][product_id2]
@@ -100,26 +100,26 @@
session1a_dupe = self.sdc[client_id1][product_id1]
# This new session should not be the same object
- self.failIf(session1a is session1a_dupe)
+ self.assertIsNot(session1a, session1a_dupe)
# But it should contain copies of the same data, unpickled from the
# database
- self.failUnlessEqual(session1a['key1'], session1a_dupe['key1'])
- self.failUnlessEqual(session1a['key2'], session1a_dupe['key2'])
+ self.assertEqual(session1a['key1'], session1a_dupe['key1'])
+ self.assertEqual(session1a['key2'], session1a_dupe['key2'])
# They must be copies - not the same object
- self.failIf(session1a['key2'] is session1a_dupe['key2'])
+ self.assertIsNot(session1a['key2'], session1a_dupe['key2'])
# Ensure the keys method works as it is suppsed to
- self.failUnlessEqual(sorted(session1a.keys()), ['key1', 'key2'])
- self.failUnlessEqual(session2a.keys(), [])
+ self.assertEqual(sorted(session1a.keys()), ['key1', 'key2'])
+ self.assertEqual(session2a.keys(), [])
# Ensure we can delete and alter things from the session
del session1a['key1']
session1a['key2'] = 'new value2'
self.assertRaises(KeyError, session1a.__getitem__, 'key1')
- self.failUnlessEqual(session1a['key2'], 'new value2')
- self.failUnlessEqual(session1a.keys(), ['key2'])
+ self.assertEqual(session1a['key2'], 'new value2')
+ self.assertEqual(session1a.keys(), ['key2'])
# Note that deleting will not raise a KeyError
del session1a['key1']
@@ -129,8 +129,8 @@
# And ensure that these changes are persistent
session1a_dupe = self.sdc[client_id1][product_id1]
self.assertRaises(KeyError, session1a_dupe.__getitem__, 'key1')
- self.failUnlessEqual(session1a_dupe['key2'], 'new value2')
- self.failUnlessEqual(session1a_dupe.keys(), ['key2'])
+ self.assertEqual(session1a_dupe['key2'], 'new value2')
+ self.assertEqual(session1a_dupe.keys(), ['key2'])
def test_session_only_stored_when_changed(self):
# A record of the session is only stored in the database when
=== modified file 'lib/lp/services/webapp/tests/test_publication.py'
--- lib/lp/services/webapp/tests/test_publication.py 2017-05-08 11:38:20 +0000
+++ lib/lp/services/webapp/tests/test_publication.py 2018-01-02 14:40:48 +0000
@@ -99,7 +99,7 @@
# different.
self.factory.makeAccount('Personless account')
person = self.factory.makePerson()
- self.failIfEqual(person.id, person.account.id)
+ self.assertNotEqual(person.id, person.account.id)
# Create an access token for our new person.
consumer = getUtility(IOAuthConsumerSet).new(u'test-consumer')
@@ -124,7 +124,7 @@
# having the same IDs as their associated person entries to work.
request = self._getRequestForPersonAndAccountWithDifferentIDs()
principal = WebServicePublication(None).getPrincipal(request)
- self.failIf(principal is None)
+ self.assertIsNotNone(principal)
def test_disconnect_logs_oops(self):
# Ensure that OOPS reports are generated for database
=== modified file 'lib/lp/services/webapp/tests/test_servers.py'
--- lib/lp/services/webapp/tests/test_servers.py 2017-12-17 00:56:02 +0000
+++ lib/lp/services/webapp/tests/test_servers.py 2018-01-02 14:40:48 +0000
@@ -255,30 +255,30 @@
getUtility(IWebServiceConfiguration).path_override, 'api',
"Sanity check: The web service path override should be 'api'.")
- self.assert_(
+ self.assertTrue(
self.factory.isWebServicePath('/api'),
"The factory should handle URLs that start with /api.")
- self.assert_(
+ self.assertTrue(
self.factory.isWebServicePath('/api/foo'),
"The factory should handle URLs that start with /api.")
- self.failIf(
+ self.assertFalse(
self.factory.isWebServicePath('/foo'),
"The factory should not handle URLs that do not start with "
"/api.")
- self.failIf(
+ self.assertFalse(
self.factory.isWebServicePath('/'),
"The factory should not handle URLs that do not start with "
"/api.")
- self.failIf(
+ self.assertFalse(
self.factory.isWebServicePath('/apifoo'),
"The factory should not handle URLs that do not start with "
"/api.")
- self.failIf(
+ self.assertFalse(
self.factory.isWebServicePath('/foo/api'),
"The factory should not handle URLs that do not start with "
"/api.")
@@ -319,14 +319,14 @@
request = config.createRequest(data, env)
stack = request.getTraversalStack()
- self.assert_(config.path_override in stack,
+ self.assertIn(config.path_override, stack,
"Sanity check: the API path should show up in the request's "
"traversal stack: %r" % stack)
request.traverse(None)
stack = request.getTraversalStack()
- self.failIf(config.path_override in stack,
+ self.assertNotIn(config.path_override, stack,
"Web service paths should be dropped from the webservice "
"request traversal stack: %r" % stack)
=== modified file 'lib/lp/services/worlddata/tests/test_language.py'
--- lib/lp/services/worlddata/tests/test_language.py 2013-06-20 05:50:00 +0000
+++ lib/lp/services/worlddata/tests/test_language.py 2018-01-02 14:40:48 +0000
@@ -32,7 +32,7 @@
layer = DatabaseFunctionalLayer
def test_translators(self):
- self.failUnless(
+ self.assertTrue(
IDoNotSnapshot.providedBy(ILanguage['translators']),
"ILanguage.translators should not be included in snapshots, "
"see bug 553093.")
=== modified file 'lib/lp/soyuz/browser/tests/test_distributionsourcepackagerelease.py'
--- lib/lp/soyuz/browser/tests/test_distributionsourcepackagerelease.py 2014-12-18 13:05:10 +0000
+++ lib/lp/soyuz/browser/tests/test_distributionsourcepackagerelease.py 2018-01-02 14:40:48 +0000
@@ -82,7 +82,7 @@
# The snippet renders appropriately when there are no files.
view = create_initialized_view(self.source_package_release, "+files")
html = view.__call__()
- self.failUnless('No files available for download.' in html)
+ self.assertIn('No files available for download.', html)
def test_spr_files_one(self):
# The snippet links to the file when present.
@@ -91,7 +91,7 @@
self.source_package_release.addFile(library_file)
view = create_initialized_view(self.source_package_release, "+files")
html = view.__call__()
- self.failUnless('test_file.dsc' in html)
+ self.assertIn('test_file.dsc', html)
def test_spr_files_deleted(self):
# The snippet handles deleted files too.
@@ -101,4 +101,4 @@
removeSecurityProxy(library_file).content = None
view = create_initialized_view(self.source_package_release, "+files")
html = view.__call__()
- self.failUnless('test_file.dsc (deleted)' in html)
+ self.assertIn('test_file.dsc (deleted)', html)
=== modified file 'lib/lp/soyuz/browser/tests/test_distrosourcepackagerelease.py'
--- lib/lp/soyuz/browser/tests/test_distrosourcepackagerelease.py 2013-01-07 02:40:55 +0000
+++ lib/lp/soyuz/browser/tests/test_distrosourcepackagerelease.py 2018-01-02 14:40:48 +0000
@@ -44,11 +44,11 @@
# The snippet links to the file when present.
view = create_initialized_view(self.dspr, "+index")
html = view.__call__()
- self.failUnless('test_file.dsc' in html)
+ self.assertIn('test_file.dsc', html)
def test_spr_files_deleted(self):
# The snippet handles deleted files too.
removeSecurityProxy(self.library_file).content = None
view = create_initialized_view(self.dspr, "+index")
html = view.__call__()
- self.failUnless('test_file.dsc (deleted)' in html)
+ self.assertIn('test_file.dsc (deleted)', html)
=== modified file 'lib/lp/soyuz/browser/tests/test_sourceslistentries.py'
--- lib/lp/soyuz/browser/tests/test_sourceslistentries.py 2013-01-07 02:40:55 +0000
+++ lib/lp/soyuz/browser/tests/test_sourceslistentries.py 2018-01-02 14:40:48 +0000
@@ -156,9 +156,9 @@
def testNoSelectorForOneSeries(self):
# The selector should not be presented when there is only one series
- self.failUnless(self.view.sources_in_more_than_one_series is False)
+ self.assertTrue(self.view.sources_in_more_than_one_series is False)
def testDefaultDistroSeries(self):
# When there is only one distro series it should always be the
# default.
- self.failUnless(self.view.default_series == self.series[0])
+ self.assertTrue(self.view.default_series == self.series[0])
=== modified file 'lib/lp/soyuz/tests/test_distroseriesbinarypackage.py'
--- lib/lp/soyuz/tests/test_distroseriesbinarypackage.py 2016-05-18 08:32:36 +0000
+++ lib/lp/soyuz/tests/test_distroseriesbinarypackage.py 2018-01-02 14:40:48 +0000
@@ -68,7 +68,7 @@
self.distroseries, [self.binary_package_name],
distro_archive_2, logger)
- self.failUnlessEqual(
+ self.assertEqual(
'Foo is the best', self.distroseries_binary_package.summary)
def test_none_cache_passed_at_init_counts_as_cached(self):
=== modified file 'lib/lp/soyuz/tests/test_hasbuildrecords.py'
--- lib/lp/soyuz/tests/test_hasbuildrecords.py 2017-06-29 12:02:11 +0000
+++ lib/lp/soyuz/tests/test_hasbuildrecords.py 2018-01-02 14:40:48 +0000
@@ -172,17 +172,17 @@
BuildFarmJobType.RECIPEBRANCHBUILD, archive=self.context)
builds = self.context.getBuildRecords(binary_only=True)
- self.failUnlessEqual(3, builds.count())
+ self.assertEqual(3, builds.count())
builds = self.context.getBuildRecords(binary_only=False)
- self.failUnlessEqual(4, builds.count())
+ self.assertEqual(4, builds.count())
def test_incompatible_arguments(self):
# binary_only=False is incompatible with arch_tag and name.
- self.failUnlessRaises(
+ self.assertRaises(
IncompatibleArguments, self.context.getBuildRecords,
binary_only=False, arch_tag="anything")
- self.failUnlessRaises(
+ self.assertRaises(
IncompatibleArguments, self.context.getBuildRecords,
binary_only=False, name="anything")
@@ -225,10 +225,10 @@
def test_incompatible_arguments(self):
# binary_only=False is incompatible with arch_tag and name.
- self.failUnlessRaises(
+ self.assertRaises(
IncompatibleArguments, self.context.getBuildRecords,
binary_only=False, arch_tag="anything")
- self.failUnlessRaises(
+ self.assertRaises(
IncompatibleArguments, self.context.getBuildRecords,
binary_only=False, name="anything")
=== modified file 'lib/lp/soyuz/tests/test_packageset.py'
--- lib/lp/soyuz/tests/test_packageset.py 2015-10-21 09:37:08 +0000
+++ lib/lp/soyuz/tests/test_packageset.py 2018-01-02 14:40:48 +0000
@@ -53,7 +53,7 @@
packageset = self.ps_set.new(
self.factory.getUniqueUnicode(), self.factory.getUniqueUnicode(),
self.factory.makePerson(), distroseries=experimental_series)
- self.failUnlessEqual(experimental_series, packageset.distroseries)
+ self.assertEqual(experimental_series, packageset.distroseries)
def test_new_creates_new_packageset_group(self):
# Creating a new packageset should also create a new packageset
@@ -63,7 +63,7 @@
packageset = self.ps_set.new(
self.factory.getUniqueUnicode(), self.factory.getUniqueUnicode(),
owner, distroseries=experimental_series)
- self.failUnlessEqual(owner, packageset.packagesetgroup.owner)
+ self.assertEqual(owner, packageset.packagesetgroup.owner)
def test_new_duplicate_name_for_same_distroseries(self):
# Creating a packageset with a duplicate name for the
@@ -279,7 +279,7 @@
u'kernel', u'Contains all OS kernel packages', self.person1,
self.distroseries_current)
- self.failUnlessEqual(packageset.relatedSets().count(), 0)
+ self.assertEqual(packageset.relatedSets().count(), 0)
def test_related_set_found(self):
# Creating a new package set while specifying a `related_set` should
@@ -315,7 +315,7 @@
# Unsurprisingly, the unrelated package set is not associated with any
# other package set.
- self.failUnlessEqual(pset3.relatedSets().count(), 0)
+ self.assertEqual(pset3.relatedSets().count(), 0)
def test_destroy(self):
series = self.factory.makeDistroSeries()
=== modified file 'lib/lp/testing/tests/test_layers_functional.py'
--- lib/lp/testing/tests/test_layers_functional.py 2015-10-13 14:01:25 +0000
+++ lib/lp/testing/tests/test_layers_functional.py 2018-01-02 14:40:48 +0000
@@ -174,28 +174,24 @@
want_rabbitmq = False
def testBaseIsSetUpFlag(self):
- self.failUnlessEqual(BaseLayer.isSetUp, True)
+ self.assertEqual(BaseLayer.isSetUp, True)
def testFunctionalIsSetUp(self):
- self.failUnlessEqual(
- FunctionalLayer.isSetUp, self.want_functional_flag
- )
+ self.assertEqual(FunctionalLayer.isSetUp, self.want_functional_flag)
def testZopelessIsSetUp(self):
- self.failUnlessEqual(
- ZopelessLayer.isSetUp, self.want_zopeless_flag
- )
+ self.assertEqual(ZopelessLayer.isSetUp, self.want_zopeless_flag)
def testComponentArchitecture(self):
try:
getUtility(ILibrarianClient)
except ComponentLookupError:
- self.failIf(
+ self.assertFalse(
self.want_component_architecture,
'Component Architecture should be available.'
)
else:
- self.failUnless(
+ self.assertTrue(
self.want_component_architecture,
'Component Architecture should not be available.'
)
@@ -207,12 +203,12 @@
# not currently available.
try:
urlopen(config.librarian.download_url).read()
- self.failUnless(
+ self.assertTrue(
self.want_librarian_running,
'Librarian should not be running.'
)
except IOError:
- self.failIf(
+ self.assertFalse(
self.want_librarian_running,
'Librarian should be running.'
)
@@ -232,20 +228,20 @@
'foo.txt', len(data), StringIO(data), 'text/plain'
)
except UploadFailed:
- self.failIf(
+ self.assertFalse(
want_librarian_working,
'Librarian should be fully operational'
)
# Since we use IMasterStore that doesn't throw either AttributeError
# or ComponentLookupError.
except TypeError:
- self.failIf(
+ self.assertFalse(
want_librarian_working,
'Librarian not operational as component architecture '
'not loaded'
)
else:
- self.failUnless(
+ self.assertTrue(
want_librarian_working,
'Librarian should not be operational'
)
@@ -357,7 +353,7 @@
self.sample_data, len(self.sample_data),
StringIO(self.sample_data), 'text/plain'
)
- self.failUnlessEqual(
+ self.assertEqual(
urlopen(LibrarianTestCase.url).read(), self.sample_data
)
# Perform the librarian specific between-test code:
@@ -367,7 +363,7 @@
# XXX: StuartBishop 2006-06-30 Bug=51370:
# We should get a DownloadFailed exception here.
data = urlopen(LibrarianTestCase.url).read()
- self.failIfEqual(data, self.sample_data)
+ self.assertNotEqual(data, self.sample_data)
class LibrarianHideTestCase(testtools.TestCase):
@@ -417,7 +413,7 @@
con = DatabaseLayer.connect()
cur = con.cursor()
cur.execute("DELETE FROM Wikiname")
- self.failUnlessEqual(self.getWikinameCount(con), 0)
+ self.assertEqual(self.getWikinameCount(con), 0)
con.commit()
# Run the per-test code for the Database layer.
DatabaseLayer.testTearDown()
@@ -500,8 +496,8 @@
# Test that the app server is up and running.
mainsite = LayerProcessController.appserver_config.vhost.mainsite
home_page = urlopen(mainsite.rooturl).read()
- self.failUnless(
- 'Is your project registered yet?' in home_page,
+ self.assertIn(
+ 'Is your project registered yet?', home_page,
"Home page couldn't be retrieved:\n%s" % home_page)
def testSMTPServerIsAvailable(self):
@@ -543,9 +539,10 @@
LayerProcessController.appserver_config)
LayerProcessController.stopAppServer()
self.assertRaises(OSError, os.kill, pid, 0)
- self.failIf(os.path.exists(pid_file), "PID file wasn't removed")
- self.failUnless(LayerProcessController.appserver is None,
- "appserver class attribute wasn't reset")
+ self.assertFalse(os.path.exists(pid_file), "PID file wasn't removed")
+ self.assertIsNone(
+ LayerProcessController.appserver,
+ "appserver class attribute wasn't reset")
def test_postTestInvariants(self):
# A LayerIsolationError should be raised if the app server dies in the
@@ -573,7 +570,7 @@
layer = BaseLayer
def testTestName(self):
- self.failUnlessEqual(
+ self.assertEqual(
BaseLayer.test_name,
"testTestName "
"(lp.testing.tests.test_layers_functional.TestNameTestCase)")
=== modified file 'lib/lp/testing/tests/test_pages.py'
--- lib/lp/testing/tests/test_pages.py 2011-12-28 17:03:06 +0000
+++ lib/lp/testing/tests/test_pages.py 2018-01-02 14:40:48 +0000
@@ -39,7 +39,7 @@
# The test directory is looked up relative to the calling
# module's path.
suite = PageTestSuite(os.path.basename(self.tempdir))
- self.failUnless(isinstance(suite, unittest.TestSuite))
+ self.assertTrue(isinstance(suite, unittest.TestSuite))
tests = list(iter_suite_tests(suite))
# Each unnumbered file appears as an independent test.
=== modified file 'lib/lp/testing/tests/test_sampledata.py'
--- lib/lp/testing/tests/test_sampledata.py 2012-01-01 02:58:52 +0000
+++ lib/lp/testing/tests/test_sampledata.py 2018-01-02 14:40:48 +0000
@@ -50,4 +50,4 @@
stdin=subprocess.PIPE)
(stdout, stderr) = proc.communicate()
rv = proc.wait()
- self.failUnlessEqual(rv, 0, "Dump/Restore failed: %s" % stdout)
+ self.assertEqual(rv, 0, "Dump/Restore failed: %s" % stdout)
=== modified file 'lib/lp/testing/tests/test_yuixhr.py'
--- lib/lp/testing/tests/test_yuixhr.py 2012-03-14 04:41:36 +0000
+++ lib/lp/testing/tests/test_yuixhr.py 2018-01-02 14:40:48 +0000
@@ -65,7 +65,7 @@
object, view, request = test_traverse(
'http://launchpad.dev/+yuitest/'
'lib/lp/testing/tests/test_yuixhr_fixture.js')
- self.assertEquals(
+ self.assertEqual(
'lib/lp/testing/tests/test_yuixhr_fixture.js',
removeSecurityProxy(view).traversed_path)
@@ -73,13 +73,13 @@
view = create_traversed_view(
path_info='/+yuitest/lp/testing/tests/test_yuixhr_fixture.js')
view.initialize()
- self.assertEquals(view.JAVASCRIPT, view.action)
+ self.assertEqual(view.JAVASCRIPT, view.action)
def test_request_is_html(self):
view = create_traversed_view(
path_info='/+yuitest/lp/testing/tests/test_yuixhr_fixture')
view.initialize()
- self.assertEquals(view.HTML, view.action)
+ self.assertEqual(view.HTML, view.action)
def test_request_is_setup(self):
view = create_traversed_view(
@@ -87,8 +87,8 @@
form={'action': 'setup', 'fixtures': 'base_line'},
method='POST')
view.initialize()
- self.assertEquals(view.SETUP, view.action)
- self.assertEquals(['base_line'], view.fixtures)
+ self.assertEqual(view.SETUP, view.action)
+ self.assertEqual(['base_line'], view.fixtures)
def test_request_is_teardown(self):
view = create_traversed_view(
@@ -96,8 +96,8 @@
form={'action': 'teardown', 'fixtures': 'base_line'},
method='POST')
view.initialize()
- self.assertEquals(view.TEARDOWN, view.action)
- self.assertEquals(['base_line'], view.fixtures)
+ self.assertEqual(view.TEARDOWN, view.action)
+ self.assertEqual(['base_line'], view.fixtures)
def test_page(self):
view = create_traversed_view(
@@ -173,8 +173,7 @@
form={'action': 'setup', 'fixtures': 'baseline'},
method='POST')
view.initialize()
- self.assertEquals(
- test_yuixhr_fixture._fixtures_, view.get_fixtures())
+ self.assertEqual(test_yuixhr_fixture._fixtures_, view.get_fixtures())
def make_example_setup_function_module(self):
module = types.ModuleType(TEST_MODULE_NAME)
@@ -193,7 +192,7 @@
fixture = setup(module.baseline)
self.assertTrue('_fixtures_' in module.__dict__)
self.assertTrue('baseline' in module._fixtures_)
- self.assertEquals(fixture, module._fixtures_['baseline'])
+ self.assertEqual(fixture, module._fixtures_['baseline'])
self.assertTrue(getattr(fixture, 'add_cleanup', None) is not None)
self.assertTrue(getattr(fixture, 'teardown', None) is not None)
self.assertTrue(getattr(fixture, 'extend', None) is not None)
@@ -259,9 +258,9 @@
def my_teardown(request, data):
result.append('foo')
- self.assertEquals(fixture, fixture.add_cleanup(my_teardown))
+ self.assertEqual(fixture, fixture.add_cleanup(my_teardown))
fixture.teardown(None, None)
- self.assertEquals(['foo'], result)
+ self.assertEqual(['foo'], result)
def test_add_cleanup_decorator_twice(self):
fixture = setup(self.make_example_setup_function_module().baseline)
@@ -272,10 +271,10 @@
def my_other_teardown(request, data):
result.append('bar')
- self.assertEquals(fixture, fixture.add_cleanup(my_teardown))
- self.assertEquals(fixture, fixture.add_cleanup(my_other_teardown))
+ self.assertEqual(fixture, fixture.add_cleanup(my_teardown))
+ self.assertEqual(fixture, fixture.add_cleanup(my_other_teardown))
fixture.teardown(None, None)
- self.assertEquals(['bar', 'foo'], result)
+ self.assertEqual(['bar', 'foo'], result)
def test_do_teardown(self):
del test_yuixhr_fixture._received[:]
@@ -392,11 +391,11 @@
third_fixture.add_cleanup(
lambda request, data: called.append('third'))
third_fixture.teardown(None, dict())
- self.assertEquals(['third', 'second', 'original'], called)
+ self.assertEqual(['third', 'second', 'original'], called)
del called[:]
original_fixture.teardown(None, dict())
- self.assertEquals(['original'], called)
+ self.assertEqual(['original'], called)
def test_python_fixture_does_not_reload_by_default(self):
# Even though the dangers of Python's "reload" are subtle and
@@ -413,7 +412,7 @@
'test_yuixhr_fixture')
view.initialize()
view.render()
- self.assertEquals(
+ self.assertEqual(
'hello', test_yuixhr_fixture._fixtures_['baseline'].scribble)
def test_python_fixture_does_not_reload_without_environ_var(self):
@@ -430,7 +429,7 @@
'test_yuixhr_fixture', form=dict(reload='1'))
view.initialize()
view.render()
- self.assertEquals(
+ self.assertEqual(
'hello', test_yuixhr_fixture._fixtures_['baseline'].scribble)
def test_python_fixture_can_reload(self):
@@ -444,12 +443,11 @@
# reloading only happens at render time, so the scribble is
# still there for now.
view.initialize()
- self.assertEquals(
+ self.assertEqual(
'hello', test_yuixhr_fixture._fixtures_['baseline'].scribble)
# After a render of the html view, the module is reloaded.
view.render()
- self.assertEquals(
- None,
+ self.assertIsNone(
getattr(test_yuixhr_fixture._fixtures_['baseline'],
'scribble',
None))
@@ -467,8 +465,7 @@
view.initialize()
# After a render of the html view, the module is reloaded.
view.render()
- self.assertEquals(
- None,
+ self.assertIsNone(
test_yuixhr_fixture._fixtures_.get('extra_scribble'))
def test_python_fixture_reload_in_html(self):
@@ -481,8 +478,7 @@
view.initialize()
# After a render of the html view, the module is reloaded.
view.renderHTML()
- self.assertEquals(
- None,
+ self.assertIsNone(
test_yuixhr_fixture._fixtures_.get('extra_scribble'))
def test_index_page(self):
=== modified file 'lib/lp/testing/tests/test_zope_test_in_subprocess.py'
--- lib/lp/testing/tests/test_zope_test_in_subprocess.py 2012-06-13 17:55:22 +0000
+++ lib/lp/testing/tests/test_zope_test_in_subprocess.py 2018-01-02 14:40:48 +0000
@@ -107,14 +107,14 @@
def setUp(self):
# Runs in the child process.
super(TestZopeTestInSubProcess, self).setUp()
- self.failUnlessEqual(
+ self.assertEqual(
self.layer.pid_in_testSetUp, self.pid_in_setUp,
"setUp() not called in same process as layer.testSetUp().")
@record_pid
def test(self):
# Runs in the child process.
- self.failUnlessEqual(
+ self.assertEqual(
self.pid_in_setUp, self.pid_in_test,
"test method not run in same process as setUp().")
@@ -122,6 +122,6 @@
def tearDown(self):
# Runs in the child process.
super(TestZopeTestInSubProcess, self).tearDown()
- self.failUnlessEqual(
+ self.assertEqual(
self.pid_in_setUp, self.pid_in_tearDown,
"tearDown() not run in same process as setUp().")
=== modified file 'lib/lp/tests/test_no_conflict_marker.py'
--- lib/lp/tests/test_no_conflict_marker.py 2015-10-26 14:54:43 +0000
+++ lib/lp/tests/test_no_conflict_marker.py 2018-01-02 14:40:48 +0000
@@ -36,7 +36,8 @@
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
env=new_env)
out, err = process.communicate()
- self.failIf(len(out), 'Found spurious conflicts marker:\n%s' % out)
+ self.assertFalse(
+ len(out), 'Found spurious conflicts marker:\n%s' % out)
def test_suite():
=== modified file 'lib/lp/translations/tests/test_autoapproval.py'
--- lib/lp/translations/tests/test_autoapproval.py 2017-05-08 11:38:20 +0000
+++ lib/lp/translations/tests/test_autoapproval.py 2018-01-02 14:40:48 +0000
@@ -258,7 +258,7 @@
self.becomeTheGardener()
el_file = entry.getGuessedPOFile()
- self.failIfEqual(el_file, elx_file)
+ self.assertNotEqual(el_file, elx_file)
self.assertEqual(el_file.language.code, 'el')
def test_CustomLanguageCodeRedirectsMatch(self):
=== modified file 'lib/lp/translations/tests/test_pofile.py'
--- lib/lp/translations/tests/test_pofile.py 2014-01-30 15:04:06 +0000
+++ lib/lp/translations/tests/test_pofile.py 2018-01-02 14:40:48 +0000
@@ -2083,7 +2083,7 @@
[self._createMessageSet(msg) for msg in self.TEST_MESSAGES]
for rownum, row in enumerate(
self.pofile.getTranslationRows()):
- self.failUnlessEqual(
+ self.assertEqual(
row.sequence, self.EXPECTED_SEQUENCE[rownum],
"getTranslationRows does not sort obsolete messages "
"(sequence=0) to the end of the file.")
=== modified file 'lib/lp/translations/tests/test_potemplate.py'
--- lib/lp/translations/tests/test_potemplate.py 2014-08-28 01:17:14 +0000
+++ lib/lp/translations/tests/test_potemplate.py 2018-01-02 14:40:48 +0000
@@ -47,7 +47,7 @@
self.potemplate.path = "testdir/messages.pot"
expected = "testdir/testdomain-eo.po"
result = self.potemplate._composePOFilePath(esperanto)
- self.failUnlessEqual(expected, result,
+ self.assertEqual(expected, result,
"_composePOFilePath does not create a correct file name with "
"directory and language code. "
"(Expected: '%s' Got: '%s')" % (expected, result))
@@ -57,7 +57,7 @@
esperanto_variant = self.factory.makeLanguage(
'eo@variant', 'Esperanto Variant')
result = self.potemplate._composePOFilePath(esperanto_variant)
- self.failUnlessEqual(expected, result,
+ self.assertEqual(expected, result,
"_composePOFilePath does not create a correct file name with "
"directory, language code and variant. "
"(Expected: '%s' Got: '%s')" % (expected, result))
@@ -65,7 +65,7 @@
self.potemplate.path = "/messages.pot"
expected = "/testdomain-eo.po"
result = self.potemplate._composePOFilePath(esperanto)
- self.failUnlessEqual(expected, result,
+ self.assertEqual(expected, result,
"_composePOFilePath does not create a correct file name with "
"leading slash and language code. "
"(Expected: '%s' Got: '%s')" % (expected, result))
@@ -73,7 +73,7 @@
self.potemplate.path = "messages.pot"
expected = "testdomain-eo.po"
result = self.potemplate._composePOFilePath(esperanto)
- self.failUnlessEqual(expected, result,
+ self.assertEqual(expected, result,
"_composePOFilePath does not create a correct file name with "
"missing directory and language code. "
"(Expected: '%s' Got: '%s')" % (expected, result))
=== modified file 'lib/lp/translations/tests/test_productserieslanguage.py'
--- lib/lp/translations/tests/test_productserieslanguage.py 2015-10-14 16:23:18 +0000
+++ lib/lp/translations/tests/test_productserieslanguage.py 2018-01-02 14:40:48 +0000
@@ -148,7 +148,7 @@
# With no templates all counts are zero.
psl = self.psl_set.getProductSeriesLanguage(
self.productseries, self.language)
- self.failUnless(verifyObject(IProductSeriesLanguage, psl))
+ self.assertTrue(verifyObject(IProductSeriesLanguage, psl))
self.assertPSLStatistics(psl, (0, 0, 0, 0, 0, 0, None))
# Adding a single template with 10 messages makes the total
=== modified file 'lib/lp/translations/utilities/tests/test_file_importer.py'
--- lib/lp/translations/utilities/tests/test_file_importer.py 2015-01-16 16:44:27 +0000
+++ lib/lp/translations/utilities/tests/test_file_importer.py 2018-01-02 14:40:48 +0000
@@ -188,22 +188,22 @@
def test_FileImporter_importMessage_NotImplemented(self):
importer = self._createFileImporter()
- self.failUnlessRaises(NotImplementedError,
+ self.assertRaises(NotImplementedError,
importer.importMessage, None)
def test_FileImporter_format_exporter(self):
# Test if format_exporter behaves like a singleton
importer = self._createFileImporter()
- self.failUnless(importer._cached_format_exporter is None,
+ self.assertIsNone(importer._cached_format_exporter,
"FileImporter._cached_format_exporter was not None, "
"although it had not been used yet.")
format_exporter1 = importer.format_exporter
- self.failUnless(format_exporter1 is not None,
+ self.assertIsNotNone(format_exporter1,
"FileImporter.format_exporter was not instantiated on demand.")
format_exporter2 = importer.format_exporter
- self.failUnless(format_exporter1 is format_exporter2,
+ self.assertIs(format_exporter1, format_exporter2,
"FileImporter.format_exporter was instantiated multiple time, "
"but should have been cached.")
@@ -218,17 +218,17 @@
pot_importer.potemplate.getPOTMsgSetByMsgIDText(
message.msgid_singular, plural_text=message.msgid_plural,
context=message.context))
- self.failUnless(potmsgset1 is None,
+ self.assertIsNone(potmsgset1,
"IPOTMsgSet object already existed in DB, unable to test "
"FileImporter.getOrCreatePOTMsgSet")
potmsgset1 = pot_importer.getOrCreatePOTMsgSet(message)
- self.failUnless(potmsgset1 is not None,
+ self.assertIsNotNone(potmsgset1,
"FileImporter.getOrCreatePOTMessageSet did not create a new "
"IPOTMsgSet object in the database.")
potmsgset2 = pot_importer.getOrCreatePOTMsgSet(message)
- self.failUnlessEqual(potmsgset1.id, potmsgset2.id,
+ self.assertEqual(potmsgset1.id, potmsgset2.id,
"FileImporter.getOrCreatePOTMessageSet did not get an existing "
"IPOTMsgSet object from the database.")
@@ -311,21 +311,21 @@
def test_FileImporter_init(self):
(pot_importer, po_importer) = self._createImporterForExportedEntries()
# The number of test messages is constant (see above).
- self.failUnlessEqual(
+ self.assertEqual(
len(pot_importer.translation_file.messages),
NUMBER_OF_TEST_MESSAGES,
"FileImporter.__init__ did not parse the template file "
"correctly.")
# Test if POTFileImporter gets initialized correctly.
- self.failUnless(pot_importer.potemplate is not None,
+ self.assertIsNotNone(pot_importer.potemplate,
"POTFileImporter had no reference to an IPOTemplate.")
- self.failUnless(pot_importer.pofile is None or
+ self.assertTrue(pot_importer.pofile is None or
pot_importer.pofile.language == "en",
"POTFileImporter referenced an IPOFile which was not English.")
# Test if POFileImporter gets initialized correctly.
- self.failUnless(po_importer.potemplate is not None,
+ self.assertIsNotNone(po_importer.potemplate,
"POTFileImporter had no reference to an IPOTemplate.")
- self.failUnless(po_importer.pofile is not None,
+ self.assertIsNotNone(po_importer.pofile,
"POFileImporter had no reference to an IPOFile.")
def test_FileImporter_getPersonByEmail(self):
@@ -337,16 +337,16 @@
personset = getUtility(IPersonSet)
# The account we are going to use is not yet in Launchpad.
- self.failUnless(
- personset.getByEmail(test_email) is None,
+ self.assertIsNone(
+ personset.getByEmail(test_email),
'There is already an account for %s' % test_email)
person = po_importer._getPersonByEmail(test_email)
- self.failUnlessEqual(
+ self.assertEqual(
person.creation_rationale.name, 'POFILEIMPORT',
'%s was not created due to a POFile import' % test_email)
- self.failUnlessEqual(
+ self.assertEqual(
person.creation_comment,
'when importing the %s translation of %s' % (
po_importer.pofile.language.displayname,
@@ -373,17 +373,17 @@
# Run the import and see if PotMsgSet and TranslationMessage
# entries are correctly created in the DB.
errors, warnings = pot_importer.importFile()
- self.failUnlessEqual(len(errors), 0,
+ self.assertEqual(len(errors), 0,
"POTFileImporter.importFile returned errors where there "
"should be none.")
potmsgset = pot_importer.potemplate.getPOTMsgSetByMsgIDText(
TEST_MSGID)
- self.failUnless(potmsgset is not None,
+ self.assertIsNotNone(potmsgset,
"POTFileImporter.importFile did not create an IPOTMsgSet "
"object in the database.")
errors, warnings = po_importer.importFile()
- self.failUnlessEqual(len(errors), 0,
+ self.assertEqual(len(errors), 0,
"POFileImporter.importFile returned errors where there "
"should be none.")
potmsgset = po_importer.pofile.potemplate.getPOTMsgSetByMsgIDText(
@@ -391,7 +391,7 @@
message = potmsgset.getCurrentTranslation(
po_importer.potemplate, po_importer.pofile.language,
po_importer.potemplate.translation_side)
- self.failUnless(message is not None,
+ self.assertIsNotNone(message,
"POFileImporter.importFile did not create an "
"ITranslationMessage object in the database.")
@@ -404,12 +404,12 @@
# First import template.
errors, warnings = pot_importer.importFile()
- self.failUnlessEqual(len(errors), 0,
+ self.assertEqual(len(errors), 0,
"POTFileImporter.importFile returned errors where there should "
"be none.")
# Now import translation.
errors, warnings = po_importer.importFile()
- self.failUnlessEqual(len(errors), 0,
+ self.assertEqual(len(errors), 0,
"POFileImporter.importFile returned errors where there should "
"be none.")
self.fake_librarian.pretendCommit()
@@ -421,10 +421,10 @@
po_importer.pofile)
# Try to import this, too.
errors, warnings = po_importer2.importFile()
- self.failUnlessEqual(len(errors), 1,
+ self.assertEqual(len(errors), 1,
"No error detected when importing a pofile with an earlier "
"export timestamp (update conflict).")
- self.failUnless(
+ self.assertTrue(
errors[0]['error-message'].find(
u"updated by someone else after you") != -1,
"importFile() failed to detect a message update conflict.")
@@ -437,14 +437,14 @@
TEST_TEMPLATE_FOR_ERROR,
TEST_TRANSLATION_FILE_WITH_ERROR, False)
errors, warnings = pot_importer.importFile()
- self.failUnlessEqual(len(errors), 0,
+ self.assertEqual(len(errors), 0,
"POTFileImporter.importFile returned errors where there should "
"be none.")
errors, warnings = po_importer.importFile()
- self.failUnlessEqual(len(errors), 1,
+ self.assertEqual(len(errors), 1,
"No error detected when importing a pofile with mismatched "
"format specifiers.")
- self.failUnless(errors[0]['error-message'].find(
+ self.assertTrue(errors[0]['error-message'].find(
u"format specifications in 'msgid' and 'msgstr' "
u"for argument 1 are not the same") != -1,
"importFile() failed to detect mismatched format specifiers "
@@ -455,7 +455,7 @@
unicode(TEST_MSGID_ERROR))
message = potmsgset.getLocalTranslationMessages(
po_importer.potemplate, po_importer.pofile.language)[0]
- self.failUnless(message is not None,
+ self.assertIsNotNone(message,
"POFileImporter.importFile did not create an "
"ITranslationMessage object with format errors in the database.")
=== modified file 'lib/lp/translations/utilities/tests/test_gettext_po_exporter.py'
--- lib/lp/translations/utilities/tests/test_gettext_po_exporter.py 2012-01-01 02:58:52 +0000
+++ lib/lp/translations/utilities/tests/test_gettext_po_exporter.py 2018-01-02 14:40:48 +0000
@@ -63,19 +63,19 @@
def testInterface(self):
"""Check whether the object follows the interface."""
- self.failUnless(
+ self.assertTrue(
verifyObject(ITranslationFormatExporter,
self.translation_exporter),
"GettextPOExporter doesn't follow the interface")
def testSupportedFormats(self):
"""Check that the exporter reports the correct formats."""
- self.failUnlessEqual(
+ self.assertEqual(
self.translation_exporter.format,
TranslationFileFormat.PO,
"Expected GettextPOExporter to provide PO format "
"but got %r instead." % self.translation_exporter.format)
- self.failUnlessEqual(
+ self.assertEqual(
self.translation_exporter.supported_source_formats,
[TranslationFileFormat.PO, TranslationFileFormat.KDEPO],
"Expected GettextPOExporter to support PO and KDEPO source "
=== modified file 'lib/lp/translations/utilities/tests/test_gettext_po_importer.py'
--- lib/lp/translations/utilities/tests/test_gettext_po_importer.py 2017-04-19 11:01:16 +0000
+++ lib/lp/translations/utilities/tests/test_gettext_po_importer.py 2018-01-02 14:40:48 +0000
@@ -83,7 +83,7 @@
def testInterface(self):
"""Check whether the object follows the interface."""
- self.failUnless(
+ self.assertTrue(
verifyObject(ITranslationFormatImporter, self.template_importer),
"GettextPOImporter doesn't conform to ITranslationFormatImporter"
"interface.")
@@ -91,6 +91,6 @@
def testFormat(self):
# GettextPOImporter reports that it handles the PO file format.
format = self.template_importer.getFormat(BytesIO(test_template))
- self.failUnless(
+ self.assertTrue(
format == TranslationFileFormat.PO,
'GettextPOImporter format expected PO but got %s' % format.name)
=== modified file 'lib/lp/translations/utilities/tests/test_gettext_po_parser.py'
--- lib/lp/translations/utilities/tests/test_gettext_po_parser.py 2015-10-14 16:23:18 +0000
+++ lib/lp/translations/utilities/tests/test_gettext_po_parser.py 2018-01-02 14:40:48 +0000
@@ -431,10 +431,8 @@
# When it's the default one in Gettext (FULL NAME <EMAIL@ADDRESS>),
# used in templates, we get a tuple with None values.
name, email = template_file.header.getLastTranslator()
- self.failUnless(name is None,
- "Didn't detect default Last Translator name")
- self.failUnless(email is None,
- "Didn't detect default Last Translator email")
+ self.assertIsNone(name, "Didn't detect default Last Translator name")
+ self.assertIsNone(email, "Didn't detect default Last Translator email")
translation_file = self.parser.parse('''
msgid ""
=== modified file 'lib/lp/translations/utilities/tests/test_gettext_pochanged_exporter.py'
--- lib/lp/translations/utilities/tests/test_gettext_pochanged_exporter.py 2015-10-14 16:23:18 +0000
+++ lib/lp/translations/utilities/tests/test_gettext_pochanged_exporter.py 2018-01-02 14:40:48 +0000
@@ -28,19 +28,19 @@
def testInterface(self):
"""Check whether the object follows the interface."""
- self.failUnless(
+ self.assertTrue(
verifyObject(ITranslationFormatExporter,
self.translation_exporter),
"GettextPOExporter doesn't follow the interface")
def testSupportedFormats(self):
"""Check that the exporter reports the correct formats."""
- self.failUnlessEqual(
+ self.assertEqual(
self.translation_exporter.format,
TranslationFileFormat.POCHANGED,
"Expected GettextPOChangedExporter to provide POCHANGED format "
"but got %r instead." % self.translation_exporter.format)
- self.failUnlessEqual(
+ self.assertEqual(
self.translation_exporter.supported_source_formats,
[],
"Expected GettextPOChangedExporter to support no source formats "
=== modified file 'lib/lp/translations/utilities/tests/test_kde_po_importer.py'
--- lib/lp/translations/utilities/tests/test_kde_po_importer.py 2017-04-19 11:01:16 +0000
+++ lib/lp/translations/utilities/tests/test_kde_po_importer.py 2018-01-02 14:40:48 +0000
@@ -108,7 +108,7 @@
def testInterface(self):
"""Check whether the object follows the interface."""
- self.failUnless(
+ self.assertTrue(
verifyObject(ITranslationFormatImporter, self.template_importer),
"KdePOImporter doesn't conform to ITranslationFormatImporter"
"interface.")
@@ -116,7 +116,7 @@
def testFormat(self):
"""Check whether KdePOImporter can handle the KDEPO file format."""
format = self.template_importer.getFormat(BytesIO(test_kde_template))
- self.failUnless(
+ self.assertTrue(
format == TranslationFileFormat.KDEPO,
'KdePOImporter format expected KDEPO but got %s' % format.name)
@@ -127,7 +127,7 @@
# priority over GettextPOImporter
gettext_importer = GettextPOImporter()
- self.failUnless(
+ self.assertTrue(
self.template_importer.priority > gettext_importer.priority,
'KdePOImporter priority is not higher than priority of '
'GettextPOImporter')
@@ -135,7 +135,7 @@
def testGettextPOFileFormat(self):
"""Check that non-KDE PO files are recognized as regular PO files."""
format = self.gettext_template_entry.format
- self.failUnless(format == TranslationFileFormat.PO,
+ self.assertTrue(format == TranslationFileFormat.PO,
('KdePOImporter format expected PO '
'but got %s for non-KDE PO file.' % format.name))
@@ -144,7 +144,7 @@
message = self.template_file.messages[0]
singular = message.msgid_singular
plural = message.msgid_plural
- self.failUnless(
+ self.assertTrue(
(singular == u'%1 foo' and plural == u'%1 foos'),
"KdePOImporter didn't import KDE plural forms correctly.")
@@ -153,7 +153,7 @@
"""
message = self.translation_file.messages[0]
translations = message.translations
- self.failUnless(
+ self.assertTrue(
(translations[0] == u'1st plural form %1' and
translations[1] == u'2nd plural form %1' and
translations[2] == u'3rd plural form %1'),
@@ -165,7 +165,7 @@
message = self.template_file.messages[1]
singular = message.msgid_singular
context = message.context
- self.failUnless(
+ self.assertTrue(
(singular == u'Message' and context == u'Context'),
"KdePOImporter didn't import KDE context correctly.")
@@ -175,7 +175,7 @@
singular = message.msgid_singular
context = message.context
translations = message.translations
- self.failUnless(
+ self.assertTrue(
(singular == u'Message' and context == u'Context' and
translations[0] == u'Contextual translation'),
"KdePOImporter didn't import translated KDE context correctly.")
=== modified file 'lib/lp/translations/utilities/tests/test_mozilla_xpi_importer.py'
--- lib/lp/translations/utilities/tests/test_mozilla_xpi_importer.py 2017-04-19 11:01:16 +0000
+++ lib/lp/translations/utilities/tests/test_mozilla_xpi_importer.py 2018-01-02 14:40:48 +0000
@@ -30,19 +30,19 @@
def testInterface(self):
"""Check whether the object follows the interface."""
- self.failUnless(
+ self.assertTrue(
verifyObject(ITranslationFormatImporter, self.importer))
def testFormat(self):
"""Check that MozillaXpiImporter handles the XPI file format."""
format = self.importer.getFormat(BytesIO(b''))
- self.failUnless(
+ self.assertTrue(
format == TranslationFileFormat.XPI,
'MozillaXpiImporter format expected XPI but got %s' % format.name)
def testHasAlternativeMsgID(self):
"""Check that MozillaXpiImporter has an alternative msgid."""
- self.failUnless(
+ self.assertTrue(
self.importer.uses_source_string_msgids,
"MozillaXpiImporter format says it's not using alternative msgid"
" when it really does!")
=== modified file 'lib/lp/translations/utilities/tests/test_translation_exporter.py'
--- lib/lp/translations/utilities/tests/test_translation_exporter.py 2011-12-28 17:03:06 +0000
+++ lib/lp/translations/utilities/tests/test_translation_exporter.py 2018-01-02 14:40:48 +0000
@@ -34,10 +34,10 @@
def testInterface(self):
"""Check whether the object follows the interface."""
- self.failUnless(
+ self.assertTrue(
verifyObject(ITranslationExporter, self.translation_exporter),
"TranslationExporter doesn't follow the interface")
- self.failUnless(
+ self.assertTrue(
verifyObject(
IExportedTranslationFile,
ExportedTranslationFile(StringIO())),
@@ -50,16 +50,16 @@
translation_exporter.getExporterProducingTargetFileFormat(
TranslationFileFormat.PO))
- self.failIf(
- po_format_exporter is None,
+ self.assertIsNotNone(
+ po_format_exporter,
'Expected PO file format exporter was not found')
mo_format_exporter = (
translation_exporter.getExporterProducingTargetFileFormat(
TranslationFileFormat.MO))
- self.failIf(
- mo_format_exporter is None,
+ self.assertIsNotNone(
+ mo_format_exporter,
'Expected MO file format exporter was not found')
def testGetTranslationFormatExportersForFileFormat(self):
=== modified file 'lib/lp/translations/utilities/tests/test_xpi_import.py'
--- lib/lp/translations/utilities/tests/test_xpi_import.py 2017-04-19 11:01:16 +0000
+++ lib/lp/translations/utilities/tests/test_xpi_import.py 2018-01-02 14:40:48 +0000
@@ -83,7 +83,7 @@
"""Check whether invariant part of all messages are correct."""
# msgid and singular_text are always different except for the keyboard
# shortcuts which are the 'accesskey' and 'commandkey' ones.
- self.failIf(
+ self.assertFalse(
(message.msgid_singular.msgid == message.singular_text and
message.msgid_singular.msgid not in (
u'foozilla.menu.accesskey', u'foozilla.menu.commandkey')),
=== modified file 'lib/lp/translations/utilities/tests/test_xpi_manifest.py'
--- lib/lp/translations/utilities/tests/test_xpi_manifest.py 2011-08-12 11:19:40 +0000
+++ lib/lp/translations/utilities/tests/test_xpi_manifest.py 2018-01-02 14:40:48 +0000
@@ -22,7 +22,7 @@
self.assertEqual(len(manifest._locales), 1)
chrome_path, locale = manifest.getChromePathAndLocale(
'directory/file.dtd')
- self.failIf(chrome_path is None, "Failed to match simple path")
+ self.assertIsNotNone(chrome_path, "Failed to match simple path")
self.assertEqual(
chrome_path, "chromepath/file.dtd", "Bad chrome path")
@@ -31,8 +31,8 @@
manifest = XpiManifest("locale chromepath en-US directory/")
chrome_path, locale = manifest.getChromePathAndLocale(
'nonexistent/file')
- self.failIf(chrome_path is not None, "Unexpected path match.")
- self.failIf(locale is not None, "Got locale without a match.")
+ self.assertIsNone(chrome_path, "Unexpected path match.")
+ self.assertIsNone(locale, "Got locale without a match.")
def test_NoUsefulLines(self):
# Parse manifest without useful data. Lines that don't match what
@@ -44,16 +44,16 @@
""".lstrip())
self.assertEqual(len(manifest._locales), 0)
chrome_path, locale = manifest.getChromePathAndLocale('lines')
- self.failIf(chrome_path is not None, "Empty manifest matched a path.")
+ self.assertIsNone(chrome_path, "Empty manifest matched a path.")
chrome_path, locale = manifest.getChromePathAndLocale('')
- self.failIf(chrome_path is not None, "Matched empty path.")
+ self.assertIsNone(chrome_path, "Matched empty path.")
def _checkSortOrder(self, manifest):
"""Verify that manifest is sorted by increasing path length."""
last_entry = None
for entry in manifest._locales:
if last_entry is not None:
- self.failIf(len(entry.path) < len(last_entry.path),
+ self.assertFalse(len(entry.path) < len(last_entry.path),
"Manifest entries not sorted by increasing path length.")
last_entry = entry
@@ -113,7 +113,7 @@
""".lstrip())
self.assertEqual(len(manifest._locales), 1)
chrome_path, locale = manifest.getChromePathAndLocale('foodir/x')
- self.failIf(chrome_path is None, "Garbage lines messed up match.")
+ self.assertIsNotNone(chrome_path, "Garbage lines messed up match.")
self.assertEqual(chrome_path, "okay/x", "Matched wrong line.")
self.assertEqual(locale, "fr", "Inexplicably mismatched locale.")
@@ -129,7 +129,7 @@
"""Helper: look up `path` in `manifest`, expect given output."""
found_chrome_path, found_locale = manifest.getChromePathAndLocale(
path)
- self.failIf(found_chrome_path is None, "No match found for " + path)
+ self.assertIsNotNone(found_chrome_path, "No match found for " + path)
self.assertEqual(found_chrome_path, chrome_path)
self.assertEqual(found_locale, locale)
@@ -258,14 +258,14 @@
# Jar files need to be descended into if any locale line mentions a
# path inside them.
manifest = XpiManifest("locale in my jar:x/foo.jar!/y")
- self.failIf(not manifest.containsLocales("jar:x/foo.jar!/"))
- self.failIf(manifest.containsLocales("jar:zzz/foo.jar!/"))
+ self.assertTrue(manifest.containsLocales("jar:x/foo.jar!/"))
+ self.assertFalse(manifest.containsLocales("jar:zzz/foo.jar!/"))
def test_NormalizeContainsLocales(self):
# "containsLocales" lookup is normalized, just like chrome path
# lookup, so it's not fazed by syntactical misspellings.
manifest = XpiManifest("locale main kh jar:/x/foo.jar!bar.jar!")
- self.failIf(not manifest.containsLocales("x/foo.jar!//bar.jar!/"))
+ self.assertTrue(manifest.containsLocales("x/foo.jar!//bar.jar!/"))
def test_ReverseMapping(self):
# Test "reverse mapping" from chrome path to XPI path.
=== modified file 'lib/lp/translations/utilities/tests/test_xpi_po_exporter.py'
--- lib/lp/translations/utilities/tests/test_xpi_po_exporter.py 2017-04-19 11:01:16 +0000
+++ lib/lp/translations/utilities/tests/test_xpi_po_exporter.py 2018-01-02 14:40:48 +0000
@@ -101,7 +101,7 @@
def test_Interface(self):
"""Check whether the object follows the interface."""
- self.failUnless(
+ self.assertTrue(
verifyObject(
ITranslationFormatExporter, self.translation_exporter),
"XPIPOExporter doesn't follow the interface")
Follow ups