launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #26079
[Merge] ~cjwatson/launchpad:py3-hashlib-bytes into launchpad:master
Colin Watson has proposed merging ~cjwatson/launchpad:py3-hashlib-bytes into launchpad:master.
Commit message:
Pass bytes to hashlib
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
For more details, see:
https://code.launchpad.net/~cjwatson/launchpad/+git/launchpad/+merge/396704
--
Your team Launchpad code reviewers is requested to review the proposed merge of ~cjwatson/launchpad:py3-hashlib-bytes into launchpad:master.
diff --git a/lib/lp/archivepublisher/indices.py b/lib/lp/archivepublisher/indices.py
index 9ea0e9a..d0ca89d 100644
--- a/lib/lp/archivepublisher/indices.py
+++ b/lib/lp/archivepublisher/indices.py
@@ -168,7 +168,7 @@ def build_binary_stanza_fields(bpr, component, section, priority,
# Our formatted description isn't \n-terminated, but apt
# considers the trailing \n to be part of the data to hash.
bin_description_md5 = hashlib.md5(
- description.encode('utf-8') + '\n').hexdigest()
+ description.encode('utf-8') + b'\n').hexdigest()
if separate_long_descriptions:
# If distroseries.include_long_descriptions is False, the
# description should be the summary
@@ -244,7 +244,7 @@ def build_translations_stanza_fields(bpr, packages):
# Our formatted description isn't \n-terminated, but apt
# considers the trailing \n to be part of the data to hash.
bin_description_md5 = hashlib.md5(
- bin_description.encode('utf-8') + '\n').hexdigest()
+ bin_description.encode('utf-8') + b'\n').hexdigest()
if (bpr.name, bin_description_md5) not in packages:
fields = IndexStanzaFields()
fields.append('Package', bpr.name)
diff --git a/lib/lp/buildmaster/tests/test_buildfarmjobbehaviour.py b/lib/lp/buildmaster/tests/test_buildfarmjobbehaviour.py
index 06b0e3f..586c80b 100644
--- a/lib/lp/buildmaster/tests/test_buildfarmjobbehaviour.py
+++ b/lib/lp/buildmaster/tests/test_buildfarmjobbehaviour.py
@@ -13,6 +13,7 @@ import os
import shutil
import tempfile
+import six
from testtools import ExpectedException
from testtools.twistedsupport import AsynchronousDeferredRunTest
from twisted.internet import defer
@@ -71,7 +72,7 @@ class FakeBuildFarmJob:
class FakeLibraryFileContent:
def __init__(self, filename):
- self.sha1 = hashlib.sha1(filename).hexdigest()
+ self.sha1 = hashlib.sha1(six.ensure_binary(filename)).hexdigest()
class FakeLibraryFileAlias:
@@ -184,7 +185,7 @@ class TestDispatchBuildToSlave(StatsMixin, TestCase):
('ensurepresent', 'http://host/bar.tar', 'admin', 'sekrit'),
('ensurepresent', 'http://host/foo.dsc', '', ''),
('build', 'PACKAGEBUILD-1', 'foobuild',
- hashlib.sha1(chroot_filename).hexdigest(),
+ hashlib.sha1(six.ensure_binary(chroot_filename)).hexdigest(),
['foo.dsc', 'bar.tar'],
{'archives': ['http://admin:sekrit@blah/'],
'image_type': image_type,
diff --git a/lib/lp/buildmaster/tests/test_packagebuild.py b/lib/lp/buildmaster/tests/test_packagebuild.py
index a61132b..77d85a6 100644
--- a/lib/lp/buildmaster/tests/test_packagebuild.py
+++ b/lib/lp/buildmaster/tests/test_packagebuild.py
@@ -76,7 +76,7 @@ class TestPackageBuildMixin(TestCaseWithFactory):
self.package_build.storeUploadLog("Some content")
self.assertIsNotNone(self.package_build.upload_log)
self.assertEqual(
- hashlib.sha1("Some content").hexdigest(),
+ hashlib.sha1(b"Some content").hexdigest(),
self.package_build.upload_log.content.sha1)
def test_storeUploadLog_private(self):
diff --git a/lib/lp/code/browser/tests/test_gitref.py b/lib/lp/code/browser/tests/test_gitref.py
index 28f3512..97ef0a3 100644
--- a/lib/lp/code/browser/tests/test_gitref.py
+++ b/lib/lp/code/browser/tests/test_gitref.py
@@ -432,7 +432,8 @@ class TestGitRefView(BrowserTestCase):
datetime(2015, 1, day + 1, tzinfo=pytz.UTC) for day in range(5)]
return [
{
- "sha1": six.ensure_text(hashlib.sha1(str(i)).hexdigest()),
+ "sha1": six.ensure_text(hashlib.sha1(
+ str(i).encode("ASCII")).hexdigest()),
"message": "Commit %d" % i,
"author": {
"name": authors[i].display_name,
@@ -445,8 +446,8 @@ class TestGitRefView(BrowserTestCase):
"time": int(seconds_since_epoch(dates[i])),
},
"parents": [six.ensure_text(
- hashlib.sha1(str(i - 1)).hexdigest())],
- "tree": six.ensure_text(hashlib.sha1("").hexdigest()),
+ hashlib.sha1(str(i - 1).encode("ASCII")).hexdigest())],
+ "tree": six.ensure_text(hashlib.sha1(b"").hexdigest()),
}
for i in range(5)]
@@ -494,7 +495,7 @@ class TestGitRefView(BrowserTestCase):
mp = self.factory.makeBranchMergeProposalForGit(target_ref=ref)
merged_tip = dict(log[-1])
merged_tip["sha1"] = six.ensure_text(
- hashlib.sha1("merged").hexdigest())
+ hashlib.sha1(b"merged").hexdigest())
self.scanRef(mp.merge_source, merged_tip)
mp.markAsMerged(merged_revision_id=log[0]["sha1"])
view = create_initialized_view(ref, "+index")
@@ -524,7 +525,7 @@ class TestGitRefView(BrowserTestCase):
mp = self.factory.makeBranchMergeProposalForGit(target_ref=ref)
merged_tip = dict(log[-1])
merged_tip["sha1"] = six.ensure_text(
- hashlib.sha1("merged").hexdigest())
+ hashlib.sha1(b"merged").hexdigest())
self.scanRef(mp.merge_source, merged_tip)
mp.markAsMerged(merged_revision_id=log[0]["sha1"])
mp.source_git_repository.removeRefs([mp.source_git_path])
diff --git a/lib/lp/code/model/tests/test_branchmergeproposal.py b/lib/lp/code/model/tests/test_branchmergeproposal.py
index 81b94b3..2f6fa07 100644
--- a/lib/lp/code/model/tests/test_branchmergeproposal.py
+++ b/lib/lp/code/model/tests/test_branchmergeproposal.py
@@ -1592,16 +1592,16 @@ class TestBranchMergeProposalBugs(WithVCSScenarios, TestCaseWithFactory):
"message": "Commit 1\n\nLP: #%d" % bugs[0].id,
},
{
- "sha1": six.ensure_text(hashlib.sha1("1").hexdigest()),
+ "sha1": six.ensure_text(hashlib.sha1(b"1").hexdigest()),
# Will not be matched.
"message": "Commit 2; see LP #%d" % bugs[1].id,
},
{
- "sha1": six.ensure_text(hashlib.sha1("2").hexdigest()),
+ "sha1": six.ensure_text(hashlib.sha1(b"2").hexdigest()),
"message": "Commit 3; LP: #%d" % bugs[2].id,
},
{
- "sha1": six.ensure_text(hashlib.sha1("3").hexdigest()),
+ "sha1": six.ensure_text(hashlib.sha1(b"3").hexdigest()),
# Non-existent bug ID will not be returned.
"message": "Non-existent bug; LP: #%d" % (bugs[2].id + 100),
},
@@ -1621,7 +1621,8 @@ class TestBranchMergeProposalBugs(WithVCSScenarios, TestCaseWithFactory):
"""Set up a fake log response referring to the given bugs."""
self.hosting_fixture.getLog.result = [
{
- "sha1": six.ensure_text(hashlib.sha1(str(i)).hexdigest()),
+ "sha1": six.ensure_text(hashlib.sha1(
+ str(i).encode("ASCII")).hexdigest()),
"message": "LP: #%d" % bug.id,
}
for i, bug in enumerate(bugs)]
diff --git a/lib/lp/code/model/tests/test_branchmergeproposaljobs.py b/lib/lp/code/model/tests/test_branchmergeproposaljobs.py
index cf5aa10..c2bed7f 100644
--- a/lib/lp/code/model/tests/test_branchmergeproposaljobs.py
+++ b/lib/lp/code/model/tests/test_branchmergeproposaljobs.py
@@ -280,7 +280,7 @@ class TestUpdatePreviewDiffJob(DiffTestCase):
committer = self.factory.makePerson()
self.hosting_fixture.getLog.result = [
{
- "sha1": six.ensure_text(hashlib.sha1("tip").hexdigest()),
+ "sha1": six.ensure_text(hashlib.sha1(b"tip").hexdigest()),
"message": "Fix upside-down messages\n\nLP: #%d" % bug.id,
"committer": {
"name": committer.display_name,
diff --git a/lib/lp/code/model/tests/test_gitjob.py b/lib/lp/code/model/tests/test_gitjob.py
index 3f606c0..18c91c9 100644
--- a/lib/lp/code/model/tests/test_gitjob.py
+++ b/lib/lp/code/model/tests/test_gitjob.py
@@ -104,7 +104,7 @@ class TestGitRefScanJob(TestCaseWithFactory):
def makeFakeRefs(paths):
return {
path: {"object": {
- "sha1": hashlib.sha1(path).hexdigest(),
+ "sha1": hashlib.sha1(path.encode("UTF-8")).hexdigest(),
"type": "commit",
}}
for path in paths}
@@ -113,7 +113,8 @@ class TestGitRefScanJob(TestCaseWithFactory):
def makeFakeCommits(author, author_date_gen, paths):
dates = {path: next(author_date_gen) for path in paths}
return [{
- "sha1": six.ensure_text(hashlib.sha1(path).hexdigest()),
+ "sha1": six.ensure_text(hashlib.sha1(
+ path.encode("UTF-8")).hexdigest()),
"message": "tip of %s" % path,
"author": {
"name": author.displayname,
@@ -126,7 +127,7 @@ class TestGitRefScanJob(TestCaseWithFactory):
"time": int(seconds_since_epoch(dates[path])),
},
"parents": [],
- "tree": six.ensure_text(hashlib.sha1("").hexdigest()),
+ "tree": six.ensure_text(hashlib.sha1(b"").hexdigest()),
} for path in paths]
def assertRefsMatch(self, refs, repository, paths):
@@ -134,7 +135,8 @@ class TestGitRefScanJob(TestCaseWithFactory):
MatchesStructure.byEquality(
repository=repository,
path=path,
- commit_sha1=six.ensure_text(hashlib.sha1(path).hexdigest()),
+ commit_sha1=six.ensure_text(hashlib.sha1(
+ path.encode("UTF-8")).hexdigest()),
object_type=GitObjectType.COMMIT)
for path in paths]
self.assertThat(refs, MatchesSetwise(*matchers))
@@ -200,11 +202,11 @@ class TestGitRefScanJob(TestCaseWithFactory):
'git_repository_path': Equals(repository.unique_name),
'ref_changes': Equals({
'refs/tags/1.0': {
- 'old': {'commit_sha1': sha1('refs/tags/1.0')},
+ 'old': {'commit_sha1': sha1(b'refs/tags/1.0')},
'new': None},
'refs/tags/2.0': {
'old': None,
- 'new': {'commit_sha1': sha1('refs/tags/2.0')}},
+ 'new': {'commit_sha1': sha1(b'refs/tags/2.0')}},
})})
self.assertThat(
delivery,
@@ -276,10 +278,10 @@ class TestGitRefScanJob(TestCaseWithFactory):
sha1 = lambda s: hashlib.sha1(s).hexdigest()
new_refs = {
'refs/heads/master': {
- 'sha1': sha1('master-ng'),
+ 'sha1': sha1(b'master-ng'),
'type': 'commit'},
'refs/tags/2.0': {
- 'sha1': sha1('2.0'),
+ 'sha1': sha1(b'2.0'),
'type': 'commit'},
}
removed_refs = ['refs/tags/1.0']
@@ -292,14 +294,14 @@ class TestGitRefScanJob(TestCaseWithFactory):
'git_repository_path': repository.unique_name,
'ref_changes': {
'refs/heads/master': {
- 'old': {'commit_sha1': sha1('refs/heads/master')},
- 'new': {'commit_sha1': sha1('master-ng')}},
+ 'old': {'commit_sha1': sha1(b'refs/heads/master')},
+ 'new': {'commit_sha1': sha1(b'master-ng')}},
'refs/tags/1.0': {
- 'old': {'commit_sha1': sha1('refs/tags/1.0')},
+ 'old': {'commit_sha1': sha1(b'refs/tags/1.0')},
'new': None},
'refs/tags/2.0': {
'old': None,
- 'new': {'commit_sha1': sha1('2.0')}}}},
+ 'new': {'commit_sha1': sha1(b'2.0')}}}},
payload)
diff --git a/lib/lp/code/model/tests/test_gitref.py b/lib/lp/code/model/tests/test_gitref.py
index 715bbb5..b7140ce 100644
--- a/lib/lp/code/model/tests/test_gitref.py
+++ b/lib/lp/code/model/tests/test_gitref.py
@@ -147,8 +147,8 @@ class TestGitRefGetCommits(TestCaseWithFactory):
datetime(2015, 1, 1, 0, 0, 0, tzinfo=pytz.UTC),
datetime(2015, 1, 2, 0, 0, 0, tzinfo=pytz.UTC),
]
- self.sha1_tip = six.ensure_text(hashlib.sha1("tip").hexdigest())
- self.sha1_root = six.ensure_text(hashlib.sha1("root").hexdigest())
+ self.sha1_tip = six.ensure_text(hashlib.sha1(b"tip").hexdigest())
+ self.sha1_root = six.ensure_text(hashlib.sha1(b"root").hexdigest())
self.log = [
{
"sha1": self.sha1_tip,
@@ -164,7 +164,7 @@ class TestGitRefGetCommits(TestCaseWithFactory):
"time": int(seconds_since_epoch(self.dates[1])),
},
"parents": [self.sha1_root],
- "tree": six.ensure_text(hashlib.sha1("").hexdigest()),
+ "tree": six.ensure_text(hashlib.sha1(b"").hexdigest()),
},
{
"sha1": self.sha1_root,
@@ -180,7 +180,7 @@ class TestGitRefGetCommits(TestCaseWithFactory):
"time": int(seconds_since_epoch(self.dates[0])),
},
"parents": [],
- "tree": six.ensure_text(hashlib.sha1("").hexdigest()),
+ "tree": six.ensure_text(hashlib.sha1(b"").hexdigest()),
},
]
self.hosting_fixture = self.useFixture(GitHostingFixture(log=self.log))
@@ -752,7 +752,7 @@ class TestGitRefWebservice(TestCaseWithFactory):
self.assertThat(result["repository_link"], EndsWith(repository_url))
self.assertEqual("refs/heads/master", result["path"])
self.assertEqual(
- six.ensure_text(hashlib.sha1("refs/heads/master").hexdigest()),
+ six.ensure_text(hashlib.sha1(b"refs/heads/master").hexdigest()),
result["commit_sha1"])
def test_landing_candidates(self):
diff --git a/lib/lp/code/model/tests/test_gitrepository.py b/lib/lp/code/model/tests/test_gitrepository.py
index eb5f819..a1ccd02 100644
--- a/lib/lp/code/model/tests/test_gitrepository.py
+++ b/lib/lp/code/model/tests/test_gitrepository.py
@@ -1523,7 +1523,7 @@ class TestGitRepositoryRefs(TestCaseWithFactory):
def test__convertRefInfo(self):
# _convertRefInfo converts a valid info dictionary.
- sha1 = six.ensure_text(hashlib.sha1("").hexdigest())
+ sha1 = six.ensure_text(hashlib.sha1(b"").hexdigest())
info = {"object": {"sha1": sha1, "type": "commit"}}
expected_info = {"sha1": sha1, "type": GitObjectType.COMMIT}
self.assertEqual(expected_info, GitRepository._convertRefInfo(info))
@@ -1568,7 +1568,8 @@ class TestGitRepositoryRefs(TestCaseWithFactory):
MatchesStructure.byEquality(
repository=repository,
path=path,
- commit_sha1=six.ensure_text(hashlib.sha1(path).hexdigest()),
+ commit_sha1=six.ensure_text(hashlib.sha1(
+ path.encode("UTF-8")).hexdigest()),
object_type=GitObjectType.COMMIT)
for path in paths]
self.assertThat(refs, MatchesSetwise(*matchers))
@@ -1717,12 +1718,12 @@ class TestGitRepositoryRefs(TestCaseWithFactory):
},
"refs/heads/foo": {
"sha1": six.ensure_text(
- hashlib.sha1("refs/heads/foo").hexdigest()),
+ hashlib.sha1(b"refs/heads/foo").hexdigest()),
"type": GitObjectType.COMMIT,
},
"refs/tags/1.0": {
"sha1": six.ensure_text(
- hashlib.sha1("refs/heads/master").hexdigest()),
+ hashlib.sha1(b"refs/heads/master").hexdigest()),
"type": GitObjectType.COMMIT,
},
}
@@ -1734,7 +1735,7 @@ class TestGitRepositoryRefs(TestCaseWithFactory):
# non-commits.
repository = self.factory.makeGitRepository()
blob_sha1 = six.ensure_text(
- hashlib.sha1("refs/heads/blob").hexdigest())
+ hashlib.sha1(b"refs/heads/blob").hexdigest())
refs_info = {
"refs/heads/blob": {
"sha1": blob_sha1,
@@ -1808,8 +1809,8 @@ class TestGitRepositoryRefs(TestCaseWithFactory):
# fetchRefCommits fetches detailed tip commit metadata for the
# requested refs.
master_sha1 = six.ensure_text(
- hashlib.sha1("refs/heads/master").hexdigest())
- foo_sha1 = six.ensure_text(hashlib.sha1("refs/heads/foo").hexdigest())
+ hashlib.sha1(b"refs/heads/master").hexdigest())
+ foo_sha1 = six.ensure_text(hashlib.sha1(b"refs/heads/foo").hexdigest())
author = self.factory.makePerson()
with person_logged_in(author):
author_email = author.preferredemail.email
@@ -1830,7 +1831,7 @@ class TestGitRepositoryRefs(TestCaseWithFactory):
"time": int(seconds_since_epoch(committer_date)),
},
"parents": [],
- "tree": six.ensure_text(hashlib.sha1("").hexdigest()),
+ "tree": six.ensure_text(hashlib.sha1(b"").hexdigest()),
}]))
refs = {
"refs/heads/master": {
@@ -1906,9 +1907,9 @@ class TestGitRepositoryRefs(TestCaseWithFactory):
expected_sha1s = [
("refs/heads/master", "1111111111111111111111111111111111111111"),
("refs/heads/foo",
- six.ensure_text(hashlib.sha1("refs/heads/foo").hexdigest())),
+ six.ensure_text(hashlib.sha1(b"refs/heads/foo").hexdigest())),
("refs/tags/1.0",
- six.ensure_text(hashlib.sha1("refs/heads/master").hexdigest())),
+ six.ensure_text(hashlib.sha1(b"refs/heads/master").hexdigest())),
]
matchers = [
MatchesStructure.byEquality(
diff --git a/lib/lp/code/xmlrpc/tests/test_git.py b/lib/lp/code/xmlrpc/tests/test_git.py
index dcbcee6..ce3a0a6 100644
--- a/lib/lp/code/xmlrpc/tests/test_git.py
+++ b/lib/lp/code/xmlrpc/tests/test_git.py
@@ -299,13 +299,13 @@ class TestGitAPIMixin:
GitHostingFixture(refs={
'refs/heads/master': {
"object": {
- "sha1": sha1('master-branch'),
+ "sha1": sha1(b'master-branch'),
"type": "commit",
},
},
'refs/heads/foo': {
"object": {
- "sha1": sha1('foo-branch'),
+ "sha1": sha1(b'foo-branch'),
"type": "commit",
},
}}))
@@ -336,7 +336,7 @@ class TestGitAPIMixin:
{'refs/heads/foo', 'refs/heads/master'},
{i.path for i in git_repository.refs})
self.assertEqual(
- {sha1('foo-branch'), sha1('master-branch')},
+ {sha1(b'foo-branch'), sha1(b'master-branch')},
{i.commit_sha1 for i in git_repository.refs})
def assertConfirmRepoCreationFails(
diff --git a/lib/lp/scripts/tests/test_garbo.py b/lib/lp/scripts/tests/test_garbo.py
index 2fbc80d..ffc5bbf 100644
--- a/lib/lp/scripts/tests/test_garbo.py
+++ b/lib/lp/scripts/tests/test_garbo.py
@@ -1181,9 +1181,9 @@ class TestGarbo(FakeAdapterMixin, TestCaseWithFactory):
path="sample path"))))
self.runDaily()
self.assertEqual(0, len(list(store.find(TimeLimitedToken,
- path="sample path", token=hashlib.sha256("foo").hexdigest()))))
+ path="sample path", token=hashlib.sha256(b"foo").hexdigest()))))
self.assertEqual(1, len(list(store.find(TimeLimitedToken,
- path="sample path", token=hashlib.sha256("bar").hexdigest()))))
+ path="sample path", token=hashlib.sha256(b"bar").hexdigest()))))
def test_CacheSuggestivePOTemplates(self):
switch_dbuser('testadmin')
diff --git a/lib/lp/services/identity/model/emailaddress.py b/lib/lp/services/identity/model/emailaddress.py
index 00697f9..a866a0a 100644
--- a/lib/lp/services/identity/model/emailaddress.py
+++ b/lib/lp/services/identity/model/emailaddress.py
@@ -90,7 +90,8 @@ class EmailAddress(SQLBase, HasOwnerMixin):
@property
def rdf_sha1(self):
"""See `IEmailAddress`."""
- return hashlib.sha1('mailto:' + self.email).hexdigest().upper()
+ return hashlib.sha1(
+ ('mailto:' + self.email).encode('UTF-8')).hexdigest().upper()
@implementer(IEmailAddressSet)
diff --git a/lib/lp/services/librarian/doc/librarian.txt b/lib/lp/services/librarian/doc/librarian.txt
index 6bb7a87..5f64a74 100644
--- a/lib/lp/services/librarian/doc/librarian.txt
+++ b/lib/lp/services/librarian/doc/librarian.txt
@@ -344,7 +344,7 @@ provide such a token.
>>> private_path = TimeLimitedToken.url_to_token_path(
... file_alias.private_url)
- >>> url_token = token_url.split('=')[1]
+ >>> url_token = token_url.split('=')[1].encode('ASCII')
>>> hashlib.sha256(url_token).hexdigest() == session_store().find(
... TimeLimitedToken, path=private_path).any().token
True
diff --git a/lib/lp/services/librarian/utils.py b/lib/lp/services/librarian/utils.py
index 7457fb3..ca8411a 100644
--- a/lib/lp/services/librarian/utils.py
+++ b/lib/lp/services/librarian/utils.py
@@ -39,13 +39,10 @@ def copy_and_close(from_file, to_file):
def sha1_from_path(path):
"""Return the hexdigest SHA1 for the contents of the path."""
- the_file = open(path)
- the_hash = hashlib.sha1()
-
- for chunk in filechunks(the_file):
- the_hash.update(chunk)
-
- the_file.close()
+ with open(path, 'rb') as the_file:
+ the_hash = hashlib.sha1()
+ for chunk in filechunks(the_file):
+ the_hash.update(chunk)
return the_hash.hexdigest()
diff --git a/lib/lp/services/oauth/tests/test_tokens.py b/lib/lp/services/oauth/tests/test_tokens.py
index b35ee25..c683786 100644
--- a/lib/lp/services/oauth/tests/test_tokens.py
+++ b/lib/lp/services/oauth/tests/test_tokens.py
@@ -111,7 +111,7 @@ class TestRequestTokens(TestOAuth):
self.assertIsInstance(secret, six.text_type)
self.assertEqual(
removeSecurityProxy(request_token)._secret,
- hashlib.sha256(secret).hexdigest())
+ hashlib.sha256(secret.encode('ASCII')).hexdigest())
def test_key_and_secret_automatically_generated(self):
request_token, secret = self.consumer.newRequestToken()
@@ -285,7 +285,7 @@ class TestAccessTokens(TestOAuth):
self.assertIsInstance(access_secret, six.text_type)
self.assertEqual(
removeSecurityProxy(access_token)._secret,
- hashlib.sha256(access_secret).hexdigest())
+ hashlib.sha256(access_secret.encode('ASCII')).hexdigest())
def test_access_token_inherits_data_fields_from_request_token(self):
request_token, access_token, _ = (
diff --git a/lib/lp/services/verification/model/logintoken.py b/lib/lp/services/verification/model/logintoken.py
index cad1dac..4190340 100644
--- a/lib/lp/services/verification/model/logintoken.py
+++ b/lib/lp/services/verification/model/logintoken.py
@@ -78,7 +78,8 @@ class LoginToken(SQLBase):
token = kwargs.pop('token', None)
if token is not None:
self._plaintext_token = token
- kwargs['_token'] = hashlib.sha256(token).hexdigest()
+ kwargs['_token'] = hashlib.sha256(
+ token.encode('UTF-8')).hexdigest()
super(LoginToken, self).__init__(*args, **kwargs)
_plaintext_token = None
@@ -346,7 +347,8 @@ class LoginTokenSet:
def __getitem__(self, tokentext):
"""See ILoginTokenSet."""
token = IStore(LoginToken).find(
- LoginToken, _token=hashlib.sha256(tokentext).hexdigest()).one()
+ LoginToken,
+ _token=hashlib.sha256(tokentext.encode('UTF-8')).hexdigest()).one()
if token is None:
raise NotFoundError(tokentext)
token._plaintext_token = tokentext
diff --git a/lib/lp/services/webapp/tests/test_pgsession.py b/lib/lp/services/webapp/tests/test_pgsession.py
index 034da88..4cfa50a 100644
--- a/lib/lp/services/webapp/tests/test_pgsession.py
+++ b/lib/lp/services/webapp/tests/test_pgsession.py
@@ -158,7 +158,9 @@ class TestPgSession(TestCase):
result = store.execute(
"SELECT client_id FROM SessionData ORDER BY client_id")
client_ids = [row[0] for row in result]
- self.assertEqual(client_ids, [hashlib.sha256(client_id).hexdigest()])
+ self.assertEqual(
+ client_ids,
+ [hashlib.sha256(client_id.encode('ASCII')).hexdigest()])
# The session cookie also is now set, via the same "trigger".
self.assertNotEqual(
diff --git a/lib/lp/snappy/tests/test_snapstoreclient.py b/lib/lp/snappy/tests/test_snapstoreclient.py
index 512cf54..51b0039 100644
--- a/lib/lp/snappy/tests/test_snapstoreclient.py
+++ b/lib/lp/snappy/tests/test_snapstoreclient.py
@@ -93,9 +93,9 @@ class TestMacaroonAuth(TestCase):
def test_good(self):
r = Request()
- root_key = hashlib.sha256("root").hexdigest()
+ root_key = hashlib.sha256(b"root").hexdigest()
root_macaroon = Macaroon(key=root_key)
- discharge_key = hashlib.sha256("discharge").hexdigest()
+ discharge_key = hashlib.sha256(b"discharge").hexdigest()
discharge_caveat_id = '{"secret": "thing"}'
root_macaroon.add_third_party_caveat(
"sso.example", discharge_key, discharge_caveat_id)
@@ -113,7 +113,7 @@ class TestMacaroonAuth(TestCase):
def test_good_no_discharge(self):
r = Request()
- root_key = hashlib.sha256("root").hexdigest()
+ root_key = hashlib.sha256(b"root").hexdigest()
root_macaroon = Macaroon(key=root_key)
MacaroonAuth(root_macaroon.serialize())(r)
auth_value = r.headers["Authorization"]
@@ -137,9 +137,9 @@ class TestMacaroonAuth(TestCase):
def test_logging(self):
r = Request()
- root_key = hashlib.sha256("root").hexdigest()
+ root_key = hashlib.sha256(b"root").hexdigest()
root_macaroon = Macaroon(key=root_key)
- discharge_key = hashlib.sha256("discharge").hexdigest()
+ discharge_key = hashlib.sha256(b"discharge").hexdigest()
discharge_caveat_id = '{"secret": "thing"}'
root_macaroon.add_third_party_caveat(
"sso.example", discharge_key, discharge_caveat_id)
@@ -234,10 +234,10 @@ class TestSnapStoreClient(TestCaseWithFactory):
def _make_store_secrets(self, encrypted=False):
self.root_key = hashlib.sha256(
- self.factory.getUniqueString()).hexdigest()
+ self.factory.getUniqueBytes()).hexdigest()
root_macaroon = Macaroon(key=self.root_key)
self.discharge_key = hashlib.sha256(
- self.factory.getUniqueString()).hexdigest()
+ self.factory.getUniqueBytes()).hexdigest()
self.discharge_caveat_id = self.factory.getUniqueString()
root_macaroon.add_third_party_caveat(
"sso.example", self.discharge_key, self.discharge_caveat_id)
@@ -431,7 +431,7 @@ class TestSnapStoreClient(TestCaseWithFactory):
@responses.activate
def test_upload_no_discharge(self):
- root_key = hashlib.sha256(self.factory.getUniqueString()).hexdigest()
+ root_key = hashlib.sha256(self.factory.getUniqueBytes()).hexdigest()
root_macaroon = Macaroon(key=root_key)
snapbuild = self.makeUploadableSnapBuild(
store_secrets={"root": root_macaroon.serialize()})
diff --git a/lib/lp/soyuz/browser/tests/test_distroarchseries_webservice.py b/lib/lp/soyuz/browser/tests/test_distroarchseries_webservice.py
index c9e6db4..8766959 100644
--- a/lib/lp/soyuz/browser/tests/test_distroarchseries_webservice.py
+++ b/lib/lp/soyuz/browser/tests/test_distroarchseries_webservice.py
@@ -126,8 +126,8 @@ class TestDistroArchSeriesWebservice(TestCaseWithFactory):
user = das.distroseries.distribution.main_archive.owner
webservice = launchpadlib_for("testing", user)
ws_das = ws_object(webservice, das)
- sha1 = hashlib.sha1('abcxyz').hexdigest()
- sha256 = hashlib.sha256('abcxyz').hexdigest()
+ sha1 = hashlib.sha1(b'abcxyz').hexdigest()
+ sha256 = hashlib.sha256(b'abcxyz').hexdigest()
ws_das.setChroot(data=b'abcxyz', sha1sum=sha1)
self.assertThat(
das.getChrootHash(
@@ -142,7 +142,7 @@ class TestDistroArchSeriesWebservice(TestCaseWithFactory):
das.architecturetag)
webservice = launchpadlib_for("testing", user)
ws_das = ws_object(webservice, das)
- sha1 = hashlib.sha1('abcxyz').hexdigest()
+ sha1 = hashlib.sha1(b'abcxyz').hexdigest()
ws_das.setChroot(data=b'abcxyz', sha1sum=sha1)
self.assertTrue(ws_das.chroot_url.endswith(expected_file))
ws_das.removeChroot()
@@ -155,9 +155,9 @@ class TestDistroArchSeriesWebservice(TestCaseWithFactory):
user = das.distroseries.distribution.main_archive.owner
webservice = launchpadlib_for("testing", user)
ws_das = ws_object(webservice, das)
- sha1_1 = hashlib.sha1('abcxyz').hexdigest()
+ sha1_1 = hashlib.sha1(b'abcxyz').hexdigest()
ws_das.setChroot(data=b'abcxyz', sha1sum=sha1_1)
- sha1_2 = hashlib.sha1('123456').hexdigest()
+ sha1_2 = hashlib.sha1(b'123456').hexdigest()
ws_das.setChroot(data=b'123456', sha1sum=sha1_2, pocket='Updates')
release_chroot = das.getChroot(pocket=PackagePublishingPocket.RELEASE)
self.assertEqual(sha1_1, release_chroot.content.sha1)
@@ -196,9 +196,9 @@ class TestDistroArchSeriesWebservice(TestCaseWithFactory):
user = das.distroseries.distribution.main_archive.owner
webservice = launchpadlib_for("testing", user)
ws_das = ws_object(webservice, das)
- sha1_1 = hashlib.sha1('abcxyz').hexdigest()
+ sha1_1 = hashlib.sha1(b'abcxyz').hexdigest()
ws_das.setChroot(data=b'abcxyz', sha1sum=sha1_1)
- sha1_2 = hashlib.sha1('123456').hexdigest()
+ sha1_2 = hashlib.sha1(b'123456').hexdigest()
ws_das.setChroot(data=b'123456', sha1sum=sha1_2, image_type='LXD image')
chroot_image = das.getChroot(image_type=BuildBaseImageType.CHROOT)
self.assertEqual(sha1_1, chroot_image.content.sha1)
diff --git a/lib/lp/soyuz/doc/soyuz-upload.txt b/lib/lp/soyuz/doc/soyuz-upload.txt
index ba6cca2..5c203f9 100644
--- a/lib/lp/soyuz/doc/soyuz-upload.txt
+++ b/lib/lp/soyuz/doc/soyuz-upload.txt
@@ -97,7 +97,8 @@ files match the uploaded ones.
>>> import hashlib
>>> def get_md5(filename):
- ... return hashlib.md5(open(filename).read()).digest()
+ ... with open(filename, 'rb') as f:
+ ... return hashlib.md5(f.read()).digest()
>>> import scandir
>>> def get_upload_dir(num, dir=incoming_dir):