launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #30743
[Merge] ~cjwatson/launchpad:pyupgrade-py38 into launchpad:master
Colin Watson has proposed merging ~cjwatson/launchpad:pyupgrade-py38 into launchpad:master.
Commit message:
Run "pyupgrade --py38-plus"
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
For more details, see:
https://code.launchpad.net/~cjwatson/launchpad/+git/launchpad/+merge/455988
This drops support for running on xenial.
--
Your team Launchpad code reviewers is requested to review the proposed merge of ~cjwatson/launchpad:pyupgrade-py38 into launchpad:master.
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 55d7cf5..8f1b599 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -29,7 +29,7 @@ repos:
rev: v3.15.0
hooks:
- id: pyupgrade
- args: [--keep-percent-format]
+ args: [--keep-percent-format, --py38-plus]
exclude: |
(?x)^(
lib/contrib/.*
diff --git a/charm/launchpad-postgresql-extras/files/push-backups b/charm/launchpad-postgresql-extras/files/push-backups
index 8a48574..68c3b46 100755
--- a/charm/launchpad-postgresql-extras/files/push-backups
+++ b/charm/launchpad-postgresql-extras/files/push-backups
@@ -71,7 +71,7 @@ def main():
command.extend([f"{args.backups_path}/", destination])
if args.dry_run:
- print("Would run:", " ".join(shlex.quote(arg) for arg in command))
+ print("Would run:", shlex.join(command))
else:
subprocess.run(command, check=True)
diff --git a/database/schema/upgrade.py b/database/schema/upgrade.py
index ee21fb1..a265588 100755
--- a/database/schema/upgrade.py
+++ b/database/schema/upgrade.py
@@ -291,12 +291,12 @@ def get_vcs_details():
branch_nick = subprocess.check_output(
["git", "rev-parse", "--abbrev-ref", "HEAD"],
cwd=SCHEMA_DIR,
- universal_newlines=True,
+ text=True,
).rstrip("\n")
revision_id = subprocess.check_output(
["git", "rev-parse", "HEAD"],
cwd=SCHEMA_DIR,
- universal_newlines=True,
+ text=True,
).rstrip("\n")
else:
branch_nick, revision_id = None, None
diff --git a/lib/lp/app/browser/tests/test_stringformatter.py b/lib/lp/app/browser/tests/test_stringformatter.py
index 78a332f..4df6954 100644
--- a/lib/lp/app/browser/tests/test_stringformatter.py
+++ b/lib/lp/app/browser/tests/test_stringformatter.py
@@ -829,7 +829,7 @@ class MarksDownAs(Matcher):
self.expected_html = expected_html
def __str__(self):
- return "MarksDownAs({!r})".format(self.expected_html)
+ return f"MarksDownAs({self.expected_html!r})"
def match(self, input_string):
return Equals(self.expected_html).match(
diff --git a/lib/lp/archivepublisher/model/ftparchive.py b/lib/lp/archivepublisher/model/ftparchive.py
index 529118e..94885ea 100644
--- a/lib/lp/archivepublisher/model/ftparchive.py
+++ b/lib/lp/archivepublisher/model/ftparchive.py
@@ -550,10 +550,10 @@ class FTPArchiveHandler:
self._config.overrideroot,
"override.%s.extra.%s" % (suite, component),
)
- ef_override_new = "{}.new".format(ef_override)
+ ef_override_new = f"{ef_override}.new"
# Create the files as .new and then move into place to prevent
# race conditions with other processes handling these files
- main_override_new = "{}.new".format(main_override)
+ main_override_new = f"{main_override}.new"
source_override = os.path.join(
self._config.overrideroot,
"override.%s.%s.src" % (suite, component),
@@ -632,7 +632,7 @@ class FTPArchiveHandler:
def _outputSimpleOverrides(filename, overrides):
# Write to a different file, then move into place
- filename_new = "{}.new".format(filename)
+ filename_new = f"{filename}.new"
sf = open(filename_new, "w")
for tup in overrides:
sf.write("\t".join(tup))
@@ -850,7 +850,7 @@ class FTPArchiveHandler:
# Prevent race conditions with other processes handling these
# files, create as .new and then move into place
new_path = os.path.join(
- self._config.overrideroot, "{}.new".format(filename)
+ self._config.overrideroot, f"{filename}.new"
)
final_path = os.path.join(self._config.overrideroot, filename)
with open(new_path, "w") as f:
diff --git a/lib/lp/archivepublisher/tests/test_publisher.py b/lib/lp/archivepublisher/tests/test_publisher.py
index 354ca4a..99b2a61 100644
--- a/lib/lp/archivepublisher/tests/test_publisher.py
+++ b/lib/lp/archivepublisher/tests/test_publisher.py
@@ -522,7 +522,7 @@ class ByHashHasContents(Matcher):
)
def __str__(self):
- return "ByHashHasContents({})".format(self.contents)
+ return f"ByHashHasContents({self.contents})"
def match(self, by_hash_path):
mismatch = DirContains(self.expected_hashes.keys()).match(by_hash_path)
@@ -562,7 +562,7 @@ class ByHashesHaveContents(Matcher):
self.path_contents = path_contents
def __str__(self):
- return "ByHashesHaveContents({})".format(self.path_contents)
+ return f"ByHashesHaveContents({self.path_contents})"
def match(self, root):
children = set()
diff --git a/lib/lp/archivepublisher/tests/test_signing.py b/lib/lp/archivepublisher/tests/test_signing.py
index 4318cb8..c352f3d 100644
--- a/lib/lp/archivepublisher/tests/test_signing.py
+++ b/lib/lp/archivepublisher/tests/test_signing.py
@@ -67,7 +67,7 @@ class SignedMatches(Matcher):
self.expected = expected
def __str__(self):
- return "SignedMatches({})".format(self.expected)
+ return f"SignedMatches({self.expected})"
def match(self, base):
content = []
diff --git a/lib/lp/archiveuploader/ocirecipeupload.py b/lib/lp/archiveuploader/ocirecipeupload.py
index b25d5a9..3db2993 100644
--- a/lib/lp/archiveuploader/ocirecipeupload.py
+++ b/lib/lp/archiveuploader/ocirecipeupload.py
@@ -45,7 +45,7 @@ class OCIRecipeUpload:
continue
# Open the digest file
digest_path = os.path.join(dirpath, "digests.json")
- self.logger.debug("Digest path: {}".format(digest_path))
+ self.logger.debug(f"Digest path: {digest_path}")
with open(digest_path) as digest_fp:
digests = json.load(digest_fp)
@@ -54,10 +54,8 @@ class OCIRecipeUpload:
for data in single_digest.values():
digest = data["digest"]
layer_id = data["layer_id"]
- layer_path = os.path.join(
- dirpath, "{}.tar.gz".format(layer_id)
- )
- self.logger.debug("Layer path: {}".format(layer_path))
+ layer_path = os.path.join(dirpath, f"{layer_id}.tar.gz")
+ self.logger.debug(f"Layer path: {layer_path}")
# If the file is already in the librarian,
# we can just reuse it.
existing_file = getUtility(IOCIFileSet).getByLayerDigest(
@@ -72,9 +70,7 @@ class OCIRecipeUpload:
)
continue
if not os.path.exists(layer_path):
- raise UploadError(
- "Missing layer file: {}.".format(layer_id)
- )
+ raise UploadError(f"Missing layer file: {layer_id}.")
# Upload layer
libraryfile = self.librarian.create(
os.path.basename(layer_path),
@@ -88,7 +84,7 @@ class OCIRecipeUpload:
for filename in filenames:
if filename.endswith(".json"):
file_path = os.path.join(dirpath, filename)
- self.logger.debug("JSON file: {}".format(file_path))
+ self.logger.debug(f"JSON file: {file_path}")
libraryfile = self.librarian.create(
os.path.basename(file_path),
os.stat(file_path).st_size,
diff --git a/lib/lp/archiveuploader/uploadprocessor.py b/lib/lp/archiveuploader/uploadprocessor.py
index 7cda7bc..562f7a4 100644
--- a/lib/lp/archiveuploader/uploadprocessor.py
+++ b/lib/lp/archiveuploader/uploadprocessor.py
@@ -730,9 +730,7 @@ class BuildUploadHandler(UploadHandler):
if logger is None:
logger = self.processor.log
try:
- logger.info(
- "Processing OCI Image upload {}".format(self.upload_path)
- )
+ logger.info(f"Processing OCI Image upload {self.upload_path}")
OCIRecipeUpload(self.upload_path, logger).process(self.build)
if self.processor.dry_run:
diff --git a/lib/lp/blueprints/browser/person_upcomingwork.py b/lib/lp/blueprints/browser/person_upcomingwork.py
index e2cbf3e..0861a52 100644
--- a/lib/lp/blueprints/browser/person_upcomingwork.py
+++ b/lib/lp/blueprints/browser/person_upcomingwork.py
@@ -57,7 +57,7 @@ class PersonUpcomingWorkView(LaunchpadView):
if total_items > 0:
done_or_postponed = total_done + total_postponed
percent_done = 100.0 * done_or_postponed / total_items
- self.progress_per_date[date] = "{:.0f}".format(percent_done)
+ self.progress_per_date[date] = f"{percent_done:.0f}"
@property
def label(self):
@@ -123,7 +123,7 @@ class WorkItemContainer:
self.postponed_items
)
percent_done = 100.0 * done_or_postponed / len(self._items)
- return "{:.0f}".format(percent_done)
+ return f"{percent_done:.0f}"
@property
def has_incomplete_work(self):
diff --git a/lib/lp/bugs/adapters/bugchange.py b/lib/lp/bugs/adapters/bugchange.py
index e97d6e8..1532bfc 100644
--- a/lib/lp/bugs/adapters/bugchange.py
+++ b/lib/lp/bugs/adapters/bugchange.py
@@ -804,7 +804,7 @@ class BugLocked(BugChangeBase):
"""See `IBugChange`."""
text = "** Bug metadata locked and limited to project staff"
if self.reason:
- text = "{}: {}".format(text, self.reason)
+ text = f"{text}: {self.reason}"
return {"text": text}
@@ -850,7 +850,7 @@ class BugLockReasonSet(BugChangeBase):
if self.new_reason is None:
text = "Bug lock reason unset"
else:
- text = "** Bug lock reason changed: {}".format(self.new_reason)
+ text = f"** Bug lock reason changed: {self.new_reason}"
return {"text": text}
diff --git a/lib/lp/bugs/browser/bug.py b/lib/lp/bugs/browser/bug.py
index 190ba78..3805a15 100644
--- a/lib/lp/bugs/browser/bug.py
+++ b/lib/lp/bugs/browser/bug.py
@@ -1288,13 +1288,13 @@ class BugTextView(LaunchpadView):
"""Return a text representation of a bug attachment."""
if attachment.url:
if attachment.title != attachment.url:
- return "{}: {}".format(attachment.title, attachment.url)
+ return f"{attachment.title}: {attachment.url}"
return attachment.url
elif attachment.libraryfile:
mime_type = normalize_mime_type.sub(
" ", attachment.libraryfile.mimetype
)
- return "{} {}".format(attachment.displayed_url, mime_type)
+ return f"{attachment.displayed_url} {mime_type}"
raise AssertionError()
def comment_text(self):
diff --git a/lib/lp/bugs/browser/bugtask.py b/lib/lp/bugs/browser/bugtask.py
index 32dd881..2b2aaed 100644
--- a/lib/lp/bugs/browser/bugtask.py
+++ b/lib/lp/bugs/browser/bugtask.py
@@ -2807,7 +2807,7 @@ class BugActivityItem:
"Metadata changes locked{}and " "limited to project staff"
)
if self.message:
- reason = " ({}) ".format(html_escape(self.message))
+ reason = f" ({html_escape(self.message)}) "
return detail.format(reason)
else:
return "Metadata changes unlocked"
@@ -2819,7 +2819,7 @@ class BugActivityItem:
)
pass
elif self.newvalue != "unset":
- return "{}".format(self.newvalue)
+ return f"{self.newvalue}"
else:
return "Unset"
elif attribute == "milestone":
diff --git a/lib/lp/bugs/browser/cvereport.py b/lib/lp/bugs/browser/cvereport.py
index 2c5ae2d..6d6b16f 100644
--- a/lib/lp/bugs/browser/cvereport.py
+++ b/lib/lp/bugs/browser/cvereport.py
@@ -39,13 +39,9 @@ class BugTaskCve:
return self.bugtasks[0].bug
-BugTaskCves = NamedTuple(
- "BugTaskCves",
- (
- ("open", List[BugTaskCve]),
- ("resolved", List[BugTaskCve]),
- ),
-)
+class BugTaskCves(NamedTuple):
+ open: List[BugTaskCve]
+ resolved: List[BugTaskCve]
def get_cve_display_data(cve):
diff --git a/lib/lp/bugs/browser/tests/test_edit_bug_lock_status.py b/lib/lp/bugs/browser/tests/test_edit_bug_lock_status.py
index c57b5b9..82b63b0 100644
--- a/lib/lp/bugs/browser/tests/test_edit_bug_lock_status.py
+++ b/lib/lp/bugs/browser/tests/test_edit_bug_lock_status.py
@@ -314,7 +314,7 @@ class TestBugLockFeatures(BrowserTestCase):
text="Change lock status",
attrs={
"class": "edit",
- "href": "{}/+lock-status".format(bugtask_url),
+ "href": f"{bugtask_url}/+lock-status",
},
)
)
@@ -338,7 +338,7 @@ class TestBugLockFeatures(BrowserTestCase):
text="Change lock status",
attrs={
"class": "edit",
- "href": "{}/+lock-status".format(bugtask_url),
+ "href": f"{bugtask_url}/+lock-status",
},
)
),
diff --git a/lib/lp/bugs/browser/tests/test_vulnerability.py b/lib/lp/bugs/browser/tests/test_vulnerability.py
index 169cf05..898f9e3 100644
--- a/lib/lp/bugs/browser/tests/test_vulnerability.py
+++ b/lib/lp/bugs/browser/tests/test_vulnerability.py
@@ -34,7 +34,7 @@ class TestVulnerabilityPage(BrowserTestCase):
def get_vulnerability_field_tag(self, name, text):
return Within(
Tag(
- "{} dl".format(name),
+ f"{name} dl",
"dl",
attrs={"id": "-".join(name.lower().split())},
text=text,
@@ -108,9 +108,7 @@ class TestVulnerabilityPage(BrowserTestCase):
)
matchers = []
for field in fields:
- matchers.append(
- HTMLContains(Tag(field, "dt", text="{}:".format(field)))
- )
+ matchers.append(HTMLContains(Tag(field, "dt", text=f"{field}:")))
self.assertThat(browser.contents, MatchesAll(*matchers))
def test_vulnerability_page_default_values(self):
@@ -246,22 +244,22 @@ class TestVulnerabilityPage(BrowserTestCase):
HTMLContains(
Tag("Related bugs", "div", attrs={"id": "related-bugs"}),
Tag(
- "Bug #{}".format(bug1.id),
+ f"Bug #{bug1.id}",
"a",
attrs={
"class": "sprite bug",
"href": canonical_url(bug1, force_local_path=True),
},
- text="Bug #{}: {}".format(bug1.id, bug1.title),
+ text=f"Bug #{bug1.id}: {bug1.title}",
),
Tag(
- "Bug #{}".format(bug2.id),
+ f"Bug #{bug2.id}",
"a",
attrs={
"class": "sprite bug",
"href": canonical_url(bug2, force_local_path=True),
},
- text="Bug #{}: {}".format(bug2.id, bug2.title),
+ text=f"Bug #{bug2.id}: {bug2.title}",
),
),
)
@@ -397,7 +395,7 @@ class TestVulnerabilityListingPage(BrowserTestCase):
"vulnerability div",
"div",
attrs={
- "id": "vulnerability-{}".format(vulnerability.id),
+ "id": f"vulnerability-{vulnerability.id}",
},
)
vulnerability_img = Tag(
diff --git a/lib/lp/bugs/browser/vulnerability.py b/lib/lp/bugs/browser/vulnerability.py
index 2071463..64a5845 100644
--- a/lib/lp/bugs/browser/vulnerability.py
+++ b/lib/lp/bugs/browser/vulnerability.py
@@ -36,7 +36,7 @@ class VulnerabilityIndexView(BugLinksListingView):
class VulnerabilitySetIndexView(LaunchpadView):
@property
def label(self):
- return "{} vulnerabilities".format(self.context.displayname)
+ return f"{self.context.displayname} vulnerabilities"
@property
def page_title(self):
diff --git a/lib/lp/bugs/model/tests/test_bugtask.py b/lib/lp/bugs/model/tests/test_bugtask.py
index 0df9c0b..81e242c 100644
--- a/lib/lp/bugs/model/tests/test_bugtask.py
+++ b/lib/lp/bugs/model/tests/test_bugtask.py
@@ -580,7 +580,7 @@ class TestEditingBugTask(TestCaseWithFactory):
user = self.factory.makePerson()
bug = self.factory.makeBug(owner=user)
task = bug.default_bugtask
- whatchanged = "{}: importance explanation".format(task.bugtargetname)
+ whatchanged = f"{task.bugtargetname}: importance explanation"
self.assertEqual(1, bug.activity.count())
@@ -632,7 +632,7 @@ class TestEditingBugTask(TestCaseWithFactory):
user = self.factory.makePerson()
bug = self.factory.makeBug(owner=user)
task = bug.default_bugtask
- whatchanged = "{}: status explanation".format(task.bugtargetname)
+ whatchanged = f"{task.bugtargetname}: status explanation"
self.assertEqual(1, bug.activity.count())
diff --git a/lib/lp/bugs/model/tests/test_bugtasksearch.py b/lib/lp/bugs/model/tests/test_bugtasksearch.py
index e16a56b..3dc70a7 100644
--- a/lib/lp/bugs/model/tests/test_bugtasksearch.py
+++ b/lib/lp/bugs/model/tests/test_bugtasksearch.py
@@ -1936,7 +1936,7 @@ class TestBugTaskSetStatusSearchClauses(TestCase):
# in a "NOT".
status = BugTaskStatus.INCOMPLETE
base_query = self.searchClause(status)
- expected_negative_query = "NOT ({})".format(base_query)
+ expected_negative_query = f"NOT ({base_query})"
self.assertEqual(
expected_negative_query, self.searchClause(not_equals(status))
)
diff --git a/lib/lp/bugs/model/vulnerability.py b/lib/lp/bugs/model/vulnerability.py
index 772cb9f..d147171 100644
--- a/lib/lp/bugs/model/vulnerability.py
+++ b/lib/lp/bugs/model/vulnerability.py
@@ -157,8 +157,8 @@ class Vulnerability(StormBase, BugLinkTargetMixin, InformationTypeMixin):
if self.cve:
displayname = self.cve.displayname
else:
- displayname = "#{}".format(self.id)
- return "Vulnerability {}".format(displayname)
+ displayname = f"#{self.id}"
+ return f"Vulnerability {displayname}"
@property
def bugs(self):
diff --git a/lib/lp/bugs/scripts/uct/models.py b/lib/lp/bugs/scripts/uct/models.py
index 71f6c74..91cb076 100644
--- a/lib/lp/bugs/scripts/uct/models.py
+++ b/lib/lp/bugs/scripts/uct/models.py
@@ -54,13 +54,9 @@ __all__ = [
logger = logging.getLogger(__name__)
-CVSS = NamedTuple(
- "CVSS",
- (
- ("authority", str),
- ("vector_string", str),
- ),
-)
+class CVSS(NamedTuple):
+ authority: str
+ vector_string: str
class UCTRecord:
@@ -88,34 +84,22 @@ class UCTRecord:
NEEDED = "needed"
PENDING = "pending"
- SeriesPackageStatus = NamedTuple(
- "SeriesPackageStatus",
- (
- ("series", str),
- ("status", PackageStatus),
- ("reason", str),
- ("priority", Optional[Priority]),
- ),
- )
-
- Patch = NamedTuple(
- "Patch",
- (
- ("patch_type", str),
- ("entry", str),
- ),
- )
-
- Package = NamedTuple(
- "Package",
- (
- ("name", str),
- ("statuses", List[SeriesPackageStatus]),
- ("priority", Optional[Priority]),
- ("tags", Set[str]),
- ("patches", List[Patch]),
- ),
- )
+ class SeriesPackageStatus(NamedTuple):
+ series: str
+ status: "UCTRecord.PackageStatus"
+ reason: str
+ priority: Optional["UCTRecord.Priority"]
+
+ class Patch(NamedTuple):
+ patch_type: str
+ entry: str
+
+ class Package(NamedTuple):
+ name: str
+ statuses: List["UCTRecord.SeriesPackageStatus"]
+ priority: Optional["UCTRecord.Priority"]
+ tags: Set[str]
+ patches: List["UCTRecord.Patch"]
def __init__(
self,
@@ -205,7 +189,7 @@ class UCTRecord:
)
package_priority = cls._pop_cve_property(
cve_data,
- "Priority_{package}".format(package=package),
+ f"Priority_{package}",
required=False,
)
packages.append(
@@ -285,9 +269,7 @@ class UCTRecord:
# make sure all fields are consumed
if cve_data:
- raise AssertionError(
- "not all fields are consumed: {}".format(cve_data)
- )
+ raise AssertionError(f"not all fields are consumed: {cve_data}")
return entry
@@ -341,17 +323,15 @@ class UCTRecord:
for package in self.packages:
output.write("\n")
patches = [
- "{}: {}".format(patch.patch_type, patch.entry)
+ f"{patch.patch_type}: {patch.entry}"
for patch in package.patches
]
- self._write_field(
- "Patches_{}".format(package.name), patches, output
- )
+ self._write_field(f"Patches_{package.name}", patches, output)
for status in package.statuses:
self._write_field(
- "{}_{}".format(status.series, package.name),
+ f"{status.series}_{package.name}",
(
- "{} ({})".format(status.status.value, status.reason)
+ f"{status.status.value} ({status.reason})"
if status.reason
else status.status.value
),
@@ -359,21 +339,21 @@ class UCTRecord:
)
if package.priority:
self._write_field(
- "Priority_{}".format(package.name),
+ f"Priority_{package.name}",
package.priority.value,
output,
)
for status in package.statuses:
if status.priority:
self._write_field(
- "Priority_{}_{}".format(package.name, status.series),
+ f"Priority_{package.name}_{status.series}",
status.priority.value,
output,
)
if package.tags:
self._write_field(
- "Tags_{}".format(package.name),
+ f"Tags_{package.name}",
" ".join(package.tags),
output,
)
@@ -399,13 +379,13 @@ class UCTRecord:
) -> None:
if isinstance(value, str):
if value:
- output.write("{}: {}\n".format(name, value))
+ output.write(f"{name}: {value}\n")
else:
- output.write("{}:\n".format(name))
+ output.write(f"{name}:\n")
elif isinstance(value, list):
- output.write("{}:\n".format(name))
+ output.write(f"{name}:\n")
for line in value:
- output.write(" {}\n".format(line))
+ output.write(f" {line}\n")
else:
raise AssertionError()
@@ -418,7 +398,7 @@ class UCTRecord:
lines = []
for author, text in notes:
note_lines = text.split("\n")
- lines.append("{}> {}".format(author, note_lines[0]))
+ lines.append(f"{author}> {note_lines[0]}")
for line in note_lines[1:]:
lines.append(" " + line)
return "\n".join(lines)
@@ -431,46 +411,30 @@ class CVE:
Do not confuse this with `Cve` database model.
"""
- DistroPackage = NamedTuple(
- "DistroPackage",
- (
- ("target", DistributionSourcePackage),
- ("package_name", SourcePackageName),
- ("importance", Optional[BugTaskImportance]),
- ),
- )
-
- SeriesPackage = NamedTuple(
- "SeriesPackage",
- (
- ("target", SourcePackage),
- ("package_name", SourcePackageName),
- ("importance", Optional[BugTaskImportance]),
- ("status", BugTaskStatus),
- ("status_explanation", str),
- ),
- )
-
- UpstreamPackage = NamedTuple(
- "UpstreamPackage",
- (
- ("target", Product),
- ("package_name", SourcePackageName),
- ("importance", Optional[BugTaskImportance]),
- ("status", BugTaskStatus),
- ("status_explanation", str),
- ),
- )
-
- PatchURL = NamedTuple(
- "PatchURL",
- (
- ("package_name", SourcePackageName),
- ("type", str),
- ("url", str),
- ("notes", Optional[str]),
- ),
- )
+ class DistroPackage(NamedTuple):
+ target: DistributionSourcePackage
+ package_name: SourcePackageName
+ importance: Optional[BugTaskImportance]
+
+ class SeriesPackage(NamedTuple):
+ target: SourcePackage
+ package_name: SourcePackageName
+ importance: Optional[BugTaskImportance]
+ status: BugTaskStatus
+ status_explanation: str
+
+ class UpstreamPackage(NamedTuple):
+ target: Product
+ package_name: SourcePackageName
+ importance: Optional[BugTaskImportance]
+ status: BugTaskStatus
+ status_explanation: str
+
+ class PatchURL(NamedTuple):
+ package_name: SourcePackageName
+ type: str
+ url: str
+ notes: Optional[str]
# Example:
# https://github.com/389ds/389-ds-base/commit/123 (1.4.4)
@@ -731,7 +695,7 @@ class CVE:
if distro_name != "ubuntu":
if distro_name == "ubuntu-esm":
distro_name = "esm"
- series_name = "{}/{}".format(series_name, distro_name)
+ series_name = f"{series_name}/{distro_name}"
statuses.append(
UCTRecord.SeriesPackageStatus(
series=series_name,
@@ -789,7 +753,7 @@ class CVE:
for patch_url in self.patch_urls:
entry = patch_url.url
if patch_url.notes:
- entry = "{} ({})".format(entry, patch_url.notes)
+ entry = f"{entry} ({patch_url.notes})"
packages_by_name[patch_url.package_name.name].patches.append(
UCTRecord.Patch(
patch_type=patch_url.type,
diff --git a/lib/lp/bugs/scripts/uct/uctexport.py b/lib/lp/bugs/scripts/uct/uctexport.py
index 8a1dd27..e91fba5 100644
--- a/lib/lp/bugs/scripts/uct/uctexport.py
+++ b/lib/lp/bugs/scripts/uct/uctexport.py
@@ -38,13 +38,9 @@ class UCTExporter:
UCT CVE files.
"""
- ParsedDescription = NamedTuple(
- "ParsedDescription",
- (
- ("description", str),
- ("references", List[str]),
- ),
- )
+ class ParsedDescription(NamedTuple):
+ description: str
+ references: List[str]
# Example:
# linux/upstream
@@ -93,7 +89,7 @@ class UCTExporter:
vulnerabilities = list(bug.vulnerabilities)
if not vulnerabilities:
raise ValueError(
- "Bug with ID: {} does not have vulnerabilities".format(bug.id)
+ f"Bug with ID: {bug.id} does not have vulnerabilities"
)
vulnerability = vulnerabilities[0] # type: Vulnerability
if not vulnerability.cve:
@@ -227,7 +223,7 @@ class UCTExporter:
)
return CVE(
- sequence="CVE-{}".format(lp_cve.sequence),
+ sequence=f"CVE-{lp_cve.sequence}",
date_made_public=vulnerability.date_made_public,
date_notice_issued=vulnerability.date_notice_issued,
date_coordinated_release=vulnerability.date_coordinated_release,
diff --git a/lib/lp/bugs/scripts/uct/uctimport.py b/lib/lp/bugs/scripts/uct/uctimport.py
index 5bc8411..17a4ed7 100644
--- a/lib/lp/bugs/scripts/uct/uctimport.py
+++ b/lib/lp/bugs/scripts/uct/uctimport.py
@@ -190,7 +190,7 @@ class UCTImporter:
datechanged=UTC_NOW,
person=self.bug_importer,
whatchanged="bug",
- message="UCT CVE entry {}".format(cve.sequence),
+ message=f"UCT CVE entry {cve.sequence}",
)
# Create the Vulnerabilities
@@ -435,9 +435,9 @@ class UCTImporter:
def _update_patches(self, bug: BugModel, patch_urls: List[CVE.PatchURL]):
attachments_by_url = {a.url: a for a in bug.attachments if a.url}
for patch_url in patch_urls:
- title = "{}/{}".format(patch_url.package_name.name, patch_url.type)
+ title = f"{patch_url.package_name.name}/{patch_url.type}"
if patch_url.notes:
- title = "{}/{}".format(title, patch_url.notes)
+ title = f"{title}/{patch_url.notes}"
if patch_url in attachments_by_url:
attachment = removeSecurityProxy(
attachments_by_url[patch_url.url]
diff --git a/lib/lp/bugs/subscribers/webhooks.py b/lib/lp/bugs/subscribers/webhooks.py
index 7f5b16c..dc06dc8 100644
--- a/lib/lp/bugs/subscribers/webhooks.py
+++ b/lib/lp/bugs/subscribers/webhooks.py
@@ -133,7 +133,7 @@ def bug_modified(bug: IBug, event: IObjectCreatedEvent):
previous_state = event.object_before_modification
for field in changed_fields:
- action = "{}-changed".format(field)
+ action = f"{field}-changed"
for bugtask in bug.bugtasks:
_trigger_bugtask_webhook(action, bugtask, previous_state)
@@ -149,7 +149,7 @@ def bugtask_modified(bugtask: IBugTask, event: IObjectModifiedEvent):
previous_state = event.object_before_modification
for field in changed_fields:
- action = "{}-changed".format(field)
+ action = f"{field}-changed"
_trigger_bugtask_webhook(action, bugtask, previous_state)
diff --git a/lib/lp/bugs/tests/test_bugtaskflat_triggers.py b/lib/lp/bugs/tests/test_bugtaskflat_triggers.py
index c62f7eb..7f91c86 100644
--- a/lib/lp/bugs/tests/test_bugtaskflat_triggers.py
+++ b/lib/lp/bugs/tests/test_bugtaskflat_triggers.py
@@ -22,35 +22,32 @@ from lp.testing import TestCaseWithFactory, login_person, person_logged_in
from lp.testing.dbuser import dbuser
from lp.testing.layers import DatabaseFunctionalLayer
-BugTaskFlat = NamedTuple(
- "BugTaskFlat",
- (
- ("bugtask", Any),
- ("bug", Any),
- ("datecreated", Any),
- ("latest_patch_uploaded", Any),
- ("date_closed", Any),
- ("date_last_updated", Any),
- ("duplicateof", Any),
- ("bug_owner", Any),
- ("fti", Any),
- ("information_type", Any),
- ("heat", Any),
- ("product", Any),
- ("productseries", Any),
- ("distribution", Any),
- ("distroseries", Any),
- ("sourcepackagename", Any),
- ("status", Any),
- ("importance", Any),
- ("assignee", Any),
- ("milestone", Any),
- ("owner", Any),
- ("active", Any),
- ("access_policies", Any),
- ("access_grants", Any),
- ),
-)
+
+class BugTaskFlat(NamedTuple):
+ bugtask: Any
+ bug: Any
+ datecreated: Any
+ latest_patch_uploaded: Any
+ date_closed: Any
+ date_last_updated: Any
+ duplicateof: Any
+ bug_owner: Any
+ fti: Any
+ information_type: Any
+ heat: Any
+ product: Any
+ productseries: Any
+ distribution: Any
+ distroseries: Any
+ sourcepackagename: Any
+ status: Any
+ importance: Any
+ assignee: Any
+ milestone: Any
+ owner: Any
+ active: Any
+ access_policies: Any
+ access_grants: Any
class BugTaskFlatTestMixin(TestCaseWithFactory):
diff --git a/lib/lp/buildmaster/browser/tests/test_builder.py b/lib/lp/buildmaster/browser/tests/test_builder.py
index a582dc7..ea90ae3 100644
--- a/lib/lp/buildmaster/browser/tests/test_builder.py
+++ b/lib/lp/buildmaster/browser/tests/test_builder.py
@@ -236,7 +236,7 @@ class TestBuildersHomepage(TestCaseWithFactory, BuildCreationMixin):
content = builders_homepage_render()
# We don't show a duration for a builder that has only been cleaning
# for a short time.
- expected_text = ["{}\nCleaning".format(builders[0].name)]
+ expected_text = [f"{builders[0].name}\nCleaning"]
# We show durations for builders that have been cleaning for more
# than ten minutes.
expected_text.extend(
diff --git a/lib/lp/buildmaster/builderproxy.py b/lib/lp/buildmaster/builderproxy.py
index 2182194..cdc5c36 100644
--- a/lib/lp/buildmaster/builderproxy.py
+++ b/lib/lp/buildmaster/builderproxy.py
@@ -76,7 +76,7 @@ class BuilderProxyMixin:
proxy_username = "{build_id}-{timestamp}".format(
build_id=self.build.build_cookie, timestamp=timestamp
)
- auth_string = "{}:{}".format(admin_username, secret).strip()
+ auth_string = f"{admin_username}:{secret}".strip()
auth_header = b"Basic " + base64.b64encode(auth_string.encode("ASCII"))
token = yield self._worker.process_pool.doWork(
diff --git a/lib/lp/buildmaster/interfaces/buildfarmjobbehaviour.py b/lib/lp/buildmaster/interfaces/buildfarmjobbehaviour.py
index bdb07b6..6a808b3 100644
--- a/lib/lp/buildmaster/interfaces/buildfarmjobbehaviour.py
+++ b/lib/lp/buildmaster/interfaces/buildfarmjobbehaviour.py
@@ -13,6 +13,7 @@ from typing import Any, Dict, Generator, List, Sequence, Union
from typing_extensions import TypedDict
from zope.interface import Attribute, Interface
+
# XXX cjwatson 2023-01-04: This should ultimately end up as a protocol
# specification maintained in launchpad-buildd as (probably) pydantic
# models, but this is difficult while Launchpad runs on Python < 3.7.
@@ -23,129 +24,124 @@ from zope.interface import Attribute, Interface
# on Python 3.5. In the meantime, job type constraints are noted in the
# comments with the type name (i.e. `IBuildFarmJobBehaviour.builder_type`)
# in brackets.
-BuildArgs = TypedDict(
- "BuildArgs",
- {
- # True if this build should build architecture-independent packages
- # as well as architecture-dependent packages [binarypackage].
- "arch_indep": bool,
- # The architecture tag to build for.
- "arch_tag": str,
- # Whether this is a build in a private archive. (This causes URLs
- # in the build log to be sanitized.)
- "archive_private": bool,
- # The name of the target archive's purpose, e.g. PRIMARY or PPA
- # [binarypackage; required for sourcepackagerecipe].
- "archive_purpose": str,
- # A list of sources.list lines to use for this build.
- "archives": List[str],
- # The email address of the person who requested the recipe build
- # [required for sourcepackagerecipe].
- "author_email": str,
- # The name of the person who requested the recipe build [required
- # for sourcepackagerecipe].
- "author_name": str,
- # The URL of the Bazaar branch to build from [charm, ci, oci, snap,
- # translation-templates].
- "branch": str,
- # The URL of the Bazaar branch to build from
- # [translation-templates]. Deprecated alias for branch.
- "branch_url": str,
- # ARG variables to pass when building this OCI recipe [oci].
- "build_args": Dict[str, str],
- # If True, this build should also build debug symbol packages
- # [binarypackage].
- "build_debug_symbols": bool,
- # The relative path to the build file within this recipe's branch
- # [oci].
- "build_file": str,
- # The subdirectory within this recipe's branch containing the build
- # file [charm, oci].
- "build_path": str,
- # The ID of the build request that prompted this build [snap].
- "build_request_id": int,
- # The RFC3339-formatted time when the build request that prompted
- # this build was made [snap].
- "build_request_timestamp": str,
- # If True, also build a tarball containing all source code [snap].
- "build_source_tarball": bool,
- # The URL of this build.
- "build_url": str,
- # Builder resource tags required by this build farm job.
- "builder_constraints": Sequence[str],
- # Source snap channels to use for this build [charm, ci, snap].
- "channels": Dict[str, str],
- # The date stamp to set in the built image [livefs].
- "datestamp": str,
- # A dictionary of additional environment variables to pass to the CI
- # build runner [ci].
- "environment_variables": Dict[str, str],
- # If True, this build is running in an ephemeral environment; skip
- # final cleanup steps.
- "fast_cleanup": bool,
- # True if this build is for a Git-based source package recipe,
- # otherwise False [sourcepackagerecipe].
- "git": bool,
- # The Git branch path to build from [charm, ci, oci, snap,
- # translation-templates].
- "git_path": str,
- # The URL of the Git repository to build from [charm, ci, oci, snap,
- # translation-templates].
- "git_repository": str,
- # A list of stages in this build's configured pipeline [required for
- # ci].
- "jobs": List[str],
- # Dictionary of additional metadata to pass to the build [oci].
- # XXX cjwatson 2023-01-04: This doesn't appear to be used by
- # launchpad-buildd at the moment.
- "metadata": Dict[str, Any],
- # The name of the recipe [required for charm, oci, snap].
- "name": str,
- # The name of the component to build for [required for
- # binarypackage, sourcepackagerecipe]. This argument has a strange
- # name due to a historical in-joke: because components form a sort
- # of layered structure where "outer" components like universe
- # include "inner" components like main, the component structure was
- # at one point referred to as the "ogre model" (from the movie
- # "Shrek": "Ogres have layers. Onions have layers. You get it? We
- # both have layers.").
- "ogrecomponent": str,
- # A list of sources.list lines for the CI build runner to use [ci].
- "package_repositories": List[str],
- # A dictionary of plugin settings to pass to the CI build runner
- # [ci].
- "plugin_settings": Dict[str, str],
- # The lower-cased name of the pocket to build from [required for
- # livefs].
- "pocket": str,
- # If True, the source of this build is private [snap; also passed
- # for charm and ci but currently unused there].
- "private": bool,
- # The URL of the proxy for internet access [charm, ci, oci, snap].
- "proxy_url": str,
- # The text of the recipe to build [required for
- # sourcepackagerecipe].
- "recipe_text": str,
- # The URL for revoking proxy authorization tokens [charm, ci, oci,
- # snap].
- "revocation_endpoint": str,
- # If True, scan job output for malware [ci].
- "scan_malware": bool,
- # A dictionary of secrets to pass to the CI build runner [ci].
- "secrets": Dict[str, str],
- # The name of the series to build for [required for all types].
- "series": str,
- # The name of the suite to build for [required for binarypackage,
- # sourcepackagerecipe].
- "suite": str,
- # A list of target architecture tags to build for [snap].
- "target_architectures": List[str],
- # A list of base64-encoded public keys for apt archives used by this
- # build.
- "trusted_keys": List[str],
- },
- total=False,
-)
+class BuildArgs(TypedDict, total=False):
+ # True if this build should build architecture-independent packages
+ # as well as architecture-dependent packages [binarypackage].
+ arch_indep: bool
+ # The architecture tag to build for.
+ arch_tag: str
+ # Whether this is a build in a private archive. (This causes URLs
+ # in the build log to be sanitized.)
+ archive_private: bool
+ # The name of the target archive's purpose, e.g. PRIMARY or PPA
+ # [binarypackage; required for sourcepackagerecipe].
+ archive_purpose: str
+ # A list of sources.list lines to use for this build.
+ archives: List[str]
+ # The email address of the person who requested the recipe build
+ # [required for sourcepackagerecipe].
+ author_email: str
+ # The name of the person who requested the recipe build [required
+ # for sourcepackagerecipe].
+ author_name: str
+ # The URL of the Bazaar branch to build from [charm, ci, oci, snap,
+ # translation-templates].
+ branch: str
+ # The URL of the Bazaar branch to build from
+ # [translation-templates]. Deprecated alias for branch.
+ branch_url: str
+ # ARG variables to pass when building this OCI recipe [oci].
+ build_args: Dict[str, str]
+ # If True, this build should also build debug symbol packages
+ # [binarypackage].
+ build_debug_symbols: bool
+ # The relative path to the build file within this recipe's branch
+ # [oci].
+ build_file: str
+ # The subdirectory within this recipe's branch containing the build
+ # file [charm, oci].
+ build_path: str
+ # The ID of the build request that prompted this build [snap].
+ build_request_id: int
+ # The RFC3339-formatted time when the build request that prompted
+ # this build was made [snap].
+ build_request_timestamp: str
+ # If True, also build a tarball containing all source code [snap].
+ build_source_tarball: bool
+ # The URL of this build.
+ build_url: str
+ # Builder resource tags required by this build farm job.
+ builder_constraints: Sequence[str]
+ # Source snap channels to use for this build [charm, ci, snap].
+ channels: Dict[str, str]
+ # The date stamp to set in the built image [livefs].
+ datestamp: str
+ # A dictionary of additional environment variables to pass to the CI
+ # build runner [ci].
+ environment_variables: Dict[str, str]
+ # If True, this build is running in an ephemeral environment; skip
+ # final cleanup steps.
+ fast_cleanup: bool
+ # True if this build is for a Git-based source package recipe,
+ # otherwise False [sourcepackagerecipe].
+ git: bool
+ # The Git branch path to build from [charm, ci, oci, snap,
+ # translation-templates].
+ git_path: str
+ # The URL of the Git repository to build from [charm, ci, oci, snap,
+ # translation-templates].
+ git_repository: str
+ # A list of stages in this build's configured pipeline [required for
+ # ci].
+ jobs: List[str]
+ # Dictionary of additional metadata to pass to the build [oci].
+ # XXX cjwatson 2023-01-04: This doesn't appear to be used by
+ # launchpad-buildd at the moment.
+ metadata: Dict[str, Any]
+ # The name of the recipe [required for charm, oci, snap].
+ name: str
+ # The name of the component to build for [required for
+ # binarypackage, sourcepackagerecipe]. This argument has a strange
+ # name due to a historical in-joke: because components form a sort
+ # of layered structure where "outer" components like universe
+ # include "inner" components like main, the component structure was
+ # at one point referred to as the "ogre model" (from the movie
+ # "Shrek": "Ogres have layers. Onions have layers. You get it? We
+ # both have layers.").
+ ogrecomponent: str
+ # A list of sources.list lines for the CI build runner to use [ci].
+ package_repositories: List[str]
+ # A dictionary of plugin settings to pass to the CI build runner
+ # [ci].
+ plugin_settings: Dict[str, str]
+ # The lower-cased name of the pocket to build from [required for
+ # livefs].
+ pocket: str
+ # If True, the source of this build is private [snap; also passed
+ # for charm and ci but currently unused there].
+ private: bool
+ # The URL of the proxy for internet access [charm, ci, oci, snap].
+ proxy_url: str
+ # The text of the recipe to build [required for
+ # sourcepackagerecipe].
+ recipe_text: str
+ # The URL for revoking proxy authorization tokens [charm, ci, oci,
+ # snap].
+ revocation_endpoint: str
+ # If True, scan job output for malware [ci].
+ scan_malware: bool
+ # A dictionary of secrets to pass to the CI build runner [ci].
+ secrets: Dict[str, str]
+ # The name of the series to build for [required for all types].
+ series: str
+ # The name of the suite to build for [required for binarypackage,
+ # sourcepackagerecipe].
+ suite: str
+ # A list of target architecture tags to build for [snap].
+ target_architectures: List[str]
+ # A list of base64-encoded public keys for apt archives used by this
+ # build.
+ trusted_keys: List[str]
class IBuildFarmJobBehaviour(Interface):
diff --git a/lib/lp/buildmaster/tests/builderproxy.py b/lib/lp/buildmaster/tests/builderproxy.py
index 3f7fc4f..a891b6c 100644
--- a/lib/lp/buildmaster/tests/builderproxy.py
+++ b/lib/lp/buildmaster/tests/builderproxy.py
@@ -98,7 +98,7 @@ class ProxyURLMatcher(MatchesStructure):
def __init__(self, job, now):
super().__init__(
scheme=Equals("http"),
- username=Equals("{}-{}".format(job.build.build_cookie, int(now))),
+ username=Equals(f"{job.build.build_cookie}-{int(now)}"),
password=HasLength(32),
hostname=Equals(config.builddmaster.builder_proxy_host),
port=Equals(config.builddmaster.builder_proxy_port),
diff --git a/lib/lp/charms/adapters/buildarch.py b/lib/lp/charms/adapters/buildarch.py
index 615a9f0..2579ad8 100644
--- a/lib/lp/charms/adapters/buildarch.py
+++ b/lib/lp/charms/adapters/buildarch.py
@@ -20,7 +20,7 @@ class MissingPropertyError(CharmBasesParserError):
def __init__(self, prop):
super().__init__(
- "Base specification is missing the {!r} property".format(prop)
+ f"Base specification is missing the {prop!r} property"
)
self.property = prop
diff --git a/lib/lp/charms/browser/tests/test_charmrecipe.py b/lib/lp/charms/browser/tests/test_charmrecipe.py
index 997b711..427b96f 100644
--- a/lib/lp/charms/browser/tests/test_charmrecipe.py
+++ b/lib/lp/charms/browser/tests/test_charmrecipe.py
@@ -1097,9 +1097,7 @@ class TestCharmRecipeAuthorizeView(BaseTestCharmRecipeView):
lambda b: json.loads(b.decode()),
Equals(
{
- "description": (
- "{} for launchpad.test".format(store_name)
- ),
+ "description": (f"{store_name} for launchpad.test"),
"packages": [
{
"type": "charm",
diff --git a/lib/lp/charms/model/charmhubclient.py b/lib/lp/charms/model/charmhubclient.py
index cfc9567..ad11dd0 100644
--- a/lib/lp/charms/model/charmhubclient.py
+++ b/lib/lp/charms/model/charmhubclient.py
@@ -43,15 +43,13 @@ def _get_macaroon(recipe):
macaroon_raw = store_secrets.get("exchanged_encrypted")
if macaroon_raw is None:
raise UnauthorizedUploadResponse(
- "{} is not authorized for upload to Charmhub".format(recipe)
+ f"{recipe} is not authorized for upload to Charmhub"
)
container = getUtility(IEncryptedContainer, "charmhub-secrets")
try:
return container.decrypt(macaroon_raw).decode()
except CryptoError as e:
- raise UnauthorizedUploadResponse(
- "Failed to decrypt macaroon: {}".format(e)
- )
+ raise UnauthorizedUploadResponse(f"Failed to decrypt macaroon: {e}")
@implementer(ICharmhubClient)
@@ -213,7 +211,7 @@ class CharmhubClient:
assert recipe.can_upload_to_store
push_url = urlappend(
config.charms.charmhub_url,
- "v1/charm/{}/revisions".format(quote(recipe.store_name)),
+ f"v1/charm/{quote(recipe.store_name)}/revisions",
)
macaroon_raw = _get_macaroon(recipe)
data = {"upload-id": upload_id}
@@ -225,7 +223,7 @@ class CharmhubClient:
response = urlfetch(
push_url,
method="POST",
- headers={"Authorization": "Macaroon {}".format(macaroon_raw)},
+ headers={"Authorization": f"Macaroon {macaroon_raw}"},
json=data,
)
response_data = response.json()
@@ -252,7 +250,7 @@ class CharmhubClient:
try:
response = urlfetch(
status_url,
- headers={"Authorization": "Macaroon {}".format(macaroon_raw)},
+ headers={"Authorization": f"Macaroon {macaroon_raw}"},
)
response_data = response.json()
# We're asking for a single upload ID, so the response should
@@ -288,7 +286,7 @@ class CharmhubClient:
assert recipe.store_channels
release_url = urlappend(
config.charms.charmhub_url,
- "v1/charm/{}/releases".format(quote(recipe.store_name)),
+ f"v1/charm/{quote(recipe.store_name)}/releases",
)
macaroon_raw = _get_macaroon(recipe)
data = [
@@ -303,7 +301,7 @@ class CharmhubClient:
urlfetch(
release_url,
method="POST",
- headers={"Authorization": "Macaroon {}".format(macaroon_raw)},
+ headers={"Authorization": f"Macaroon {macaroon_raw}"},
json=data,
)
except requests.HTTPError as e:
diff --git a/lib/lp/charms/tests/test_charmhubclient.py b/lib/lp/charms/tests/test_charmhubclient.py
index c33a955..1d5c3bf 100644
--- a/lib/lp/charms/tests/test_charmhubclient.py
+++ b/lib/lp/charms/tests/test_charmhubclient.py
@@ -59,7 +59,7 @@ class MacaroonVerifies(Matcher):
self.key = key
def __str__(self):
- return "MacaroonVerifies({!r})".format(self.key)
+ return f"MacaroonVerifies({self.key!r})"
def match(self, macaroon_raw):
macaroon = Macaroon.deserialize(macaroon_raw)
@@ -210,7 +210,7 @@ class TestCharmhubClient(TestCaseWithFactory):
def _addCharmReleaseResponse(self, name):
responses.add(
"POST",
- "http://charmhub.example/v1/charm/{}/releases".format(quote(name)),
+ f"http://charmhub.example/v1/charm/{quote(name)}/releases",
json={},
)
diff --git a/lib/lp/charms/tests/test_charmrecipe.py b/lib/lp/charms/tests/test_charmrecipe.py
index b4922dc..5fad120 100644
--- a/lib/lp/charms/tests/test_charmrecipe.py
+++ b/lib/lp/charms/tests/test_charmrecipe.py
@@ -1049,7 +1049,7 @@ class TestCharmRecipeAuthorization(TestCaseWithFactory):
Equals(
{
"description": (
- "{} for launchpad.test".format(recipe.store_name)
+ f"{recipe.store_name} for launchpad.test"
),
"packages": [
{
@@ -2158,9 +2158,7 @@ class TestCharmRecipeWebservice(TestCaseWithFactory):
)
with person_logged_in(self.person):
expected_body = {
- "description": (
- "{} for launchpad.test".format(recipe.store_name)
- ),
+ "description": (f"{recipe.store_name} for launchpad.test"),
"packages": [{"type": "charm", "name": recipe.store_name}],
"permissions": [
"package-manage-releases",
diff --git a/lib/lp/code/browser/branchmergeproposal.py b/lib/lp/code/browser/branchmergeproposal.py
index 1e6bda0..ecf1251 100644
--- a/lib/lp/code/browser/branchmergeproposal.py
+++ b/lib/lp/code/browser/branchmergeproposal.py
@@ -670,7 +670,7 @@ class BranchMergeProposalView(
adjusted for this user."""
base_url = urlsplit(self.context.source_git_repository.git_ssh_url)
url = list(base_url)
- url[1] = "{}@{}".format(self.user.name, base_url.hostname)
+ url[1] = f"{self.user.name}@{base_url.hostname}"
return urlunsplit(url)
@cachedproperty
diff --git a/lib/lp/code/browser/gitref.py b/lib/lp/code/browser/gitref.py
index 3400ff1..9b6b000 100644
--- a/lib/lp/code/browser/gitref.py
+++ b/lib/lp/code/browser/gitref.py
@@ -120,7 +120,7 @@ class GitRefView(
"""The git+ssh:// URL for this branch, adjusted for this user."""
base_url = urlsplit(self.context.repository.git_ssh_url)
url = list(base_url)
- url[1] = "{}@{}".format(self.user.name, base_url.hostname)
+ url[1] = f"{self.user.name}@{base_url.hostname}"
return urlunsplit(url)
@property
@@ -138,7 +138,7 @@ class GitRefView(
config.codehosting.git_ssh_root, contributor.shortened_path
)
url = list(urlsplit(base_url))
- url[1] = "{}@{}".format(self.user.name, url[1])
+ url[1] = f"{self.user.name}@{url[1]}"
return urlunsplit(url)
@property
@@ -461,7 +461,7 @@ class GitRefRegisterMergeProposalView(LaunchpadFormView):
self.addError(str(error))
def _validateRef(self, data, name):
- ref = data["{}_git_ref".format(name)]
+ ref = data[f"{name}_git_ref"]
if ref == self.context:
self.setFieldError(
"%s_git_ref" % name,
diff --git a/lib/lp/code/browser/gitrepository.py b/lib/lp/code/browser/gitrepository.py
index 359ed2c..d6da0d3 100644
--- a/lib/lp/code/browser/gitrepository.py
+++ b/lib/lp/code/browser/gitrepository.py
@@ -429,7 +429,7 @@ class GitRepositoryView(
"""The git+ssh:// URL for this repository, adjusted for this user."""
base_url = urlsplit(self.context.git_ssh_url)
url = list(base_url)
- url[1] = "{}@{}".format(self.user.name, base_url.hostname)
+ url[1] = f"{self.user.name}@{base_url.hostname}"
return urlunsplit(url)
@property
@@ -446,7 +446,7 @@ class GitRepositoryView(
config.codehosting.git_ssh_root, contributor.shortened_path
)
url = list(urlsplit(base_url))
- url[1] = "{}@{}".format(self.user.name, url[1])
+ url[1] = f"{self.user.name}@{url[1]}"
return urlunsplit(url)
@property
diff --git a/lib/lp/code/browser/tests/test_branch.py b/lib/lp/code/browser/tests/test_branch.py
index 3935dc8..4987a31 100644
--- a/lib/lp/code/browser/tests/test_branch.py
+++ b/lib/lp/code/browser/tests/test_branch.py
@@ -716,13 +716,11 @@ class TestBranchDeletionView(BrowserTestCase):
def test_owner_can_delete(self):
browser = self.getUserBrowser(self.branch_url, user=self.branch.owner)
browser.open(self.branch_url)
- self.assertIn(
- "Delete branch {}".format(self.branch_name), browser.contents
- )
+ self.assertIn(f"Delete branch {self.branch_name}", browser.contents)
browser.getControl("Delete").click()
self.assertIn(
- "Branch {} deleted.".format(self.branch_unique_name),
+ f"Branch {self.branch_unique_name} deleted.",
browser.contents,
)
@@ -730,12 +728,10 @@ class TestBranchDeletionView(BrowserTestCase):
expert = self.factory.makeRegistryExpert()
browser = self.getUserBrowser(self.branch_url, user=expert)
browser.open(self.branch_url)
- self.assertIn(
- "Delete branch {}".format(self.branch_name), browser.contents
- )
+ self.assertIn(f"Delete branch {self.branch_name}", browser.contents)
browser.getControl("Delete").click()
self.assertIn(
- "Branch {} deleted.".format(self.branch_unique_name),
+ f"Branch {self.branch_unique_name} deleted.",
browser.contents,
)
@@ -743,12 +739,10 @@ class TestBranchDeletionView(BrowserTestCase):
commercial_admin = self.factory.makeCommercialAdmin()
browser = self.getUserBrowser(self.branch_url, user=commercial_admin)
browser.open(self.branch_url)
- self.assertIn(
- "Delete branch {}".format(self.branch_name), browser.contents
- )
+ self.assertIn(f"Delete branch {self.branch_name}", browser.contents)
browser.getControl("Delete").click()
self.assertIn(
- "Branch {} deleted.".format(self.branch_unique_name),
+ f"Branch {self.branch_unique_name} deleted.",
browser.contents,
)
diff --git a/lib/lp/code/browser/widgets/tests/test_gitrefwidget.py b/lib/lp/code/browser/widgets/tests/test_gitrefwidget.py
index 1cf4478..49c4eb8 100644
--- a/lib/lp/code/browser/widgets/tests/test_gitrefwidget.py
+++ b/lib/lp/code/browser/widgets/tests/test_gitrefwidget.py
@@ -262,7 +262,7 @@ class TestGitRefWidget(WithScenarios, TestCaseWithFactory):
getUtility(IGitRepositorySet).setDefaultRepositoryForOwner(
owner, target, repo, owner
)
- short_url = "~{}/{}".format(owner.name, target.name)
+ short_url = f"~{owner.name}/{target.name}"
form = {
"field.git_ref.repository": short_url,
"field.git_ref.path": ref.path,
diff --git a/lib/lp/code/model/diff.py b/lib/lp/code/model/diff.py
index 27f9192..da25c0c 100644
--- a/lib/lp/code/model/diff.py
+++ b/lib/lp/code/model/diff.py
@@ -433,14 +433,14 @@ class PreviewDiff(StormBase):
revision_id=self.source_revision_id
)
if source_revision and source_revision.sequence:
- source_rev = "r{}".format(source_revision.sequence)
+ source_rev = f"r{source_revision.sequence}"
else:
source_rev = self.source_revision_id
target_revision = bmp.target_branch.getBranchRevision(
revision_id=self.target_revision_id
)
if target_revision and target_revision.sequence:
- target_rev = "r{}".format(target_revision.sequence)
+ target_rev = f"r{target_revision.sequence}"
else:
target_rev = self.target_revision_id
else:
@@ -451,7 +451,7 @@ class PreviewDiff(StormBase):
source_rev = self.source_revision_id[:7]
target_rev = self.target_revision_id[:7]
- return "{} into {}".format(source_rev, target_rev)
+ return f"{source_rev} into {target_rev}"
@property
def has_conflicts(self):
diff --git a/lib/lp/code/model/lpci.py b/lib/lp/code/model/lpci.py
index 1aa04e2..5a59758 100644
--- a/lib/lp/code/model/lpci.py
+++ b/lib/lp/code/model/lpci.py
@@ -55,7 +55,7 @@ def load_configuration(configuration_file):
for required_key in "pipeline", "jobs":
if required_key not in content:
raise LPCIConfigurationError(
- "Configuration file does not declare '{}'".format(required_key)
+ f"Configuration file does not declare '{required_key}'"
)
# normalize each element of `pipeline` into a list
expanded_values = content.copy()
diff --git a/lib/lp/codehosting/scripts/sync_branches.py b/lib/lp/codehosting/scripts/sync_branches.py
index 8bbd875..b0c8132 100644
--- a/lib/lp/codehosting/scripts/sync_branches.py
+++ b/lib/lp/codehosting/scripts/sync_branches.py
@@ -48,7 +48,7 @@ class SyncBranchesScript(LaunchpadScript):
"%s/" % branch_dir,
]
try:
- subprocess.check_output(args, universal_newlines=True)
+ subprocess.check_output(args, text=True)
except subprocess.CalledProcessError as e:
if "No such file or directory" in e.output:
self.logger.warning(
diff --git a/lib/lp/oci/browser/ocirecipe.py b/lib/lp/oci/browser/ocirecipe.py
index 6b04623..ca574a7 100644
--- a/lib/lp/oci/browser/ocirecipe.py
+++ b/lib/lp/oci/browser/ocirecipe.py
@@ -334,7 +334,7 @@ class OCIRecipeView(LaunchpadView):
status["date_estimated"] = build_job.estimate
return {
"builds": [build_job],
- "job_id": "build{}".format(build_job.id),
+ "job_id": f"build{build_job.id}",
"date_created": build_job.date_created,
"date_finished": build_job.date_finished,
"build_status": status,
diff --git a/lib/lp/oci/browser/tests/test_ocirecipe.py b/lib/lp/oci/browser/tests/test_ocirecipe.py
index edeed49..bb11c3c 100644
--- a/lib/lp/oci/browser/tests/test_ocirecipe.py
+++ b/lib/lp/oci/browser/tests/test_ocirecipe.py
@@ -347,7 +347,7 @@ class TestOCIRecipeAddView(OCIConfigHelperMixin, BaseTestOCIRecipeView):
browser.getControl("Create OCI recipe").click()
content = find_main_content(browser.contents)
self.assertThat(
- "Registry image name:\n{}".format(image_name),
+ f"Registry image name:\n{image_name}",
MatchesTagText(content, "image-name"),
)
@@ -1051,7 +1051,7 @@ class TestOCIRecipeEditView(OCIConfigHelperMixin, BaseTestOCIRecipeView):
browser.getControl("Update OCI recipe").click()
content = find_main_content(browser.contents)
self.assertThat(
- "Registry image name:\n{}".format(image_name),
+ f"Registry image name:\n{image_name}",
MatchesTagText(content, "image-name"),
)
diff --git a/lib/lp/oci/model/ociregistryclient.py b/lib/lp/oci/model/ociregistryclient.py
index 4316efb..04061a7 100644
--- a/lib/lp/oci/model/ociregistryclient.py
+++ b/lib/lp/oci/model/ociregistryclient.py
@@ -111,10 +111,10 @@ class OCIRegistryClient:
# Check if it already exists
try:
head_response = http_client.requestPath(
- "/blobs/{}".format(digest), method="HEAD"
+ f"/blobs/{digest}", method="HEAD"
)
if head_response.status_code == 200:
- log.info("{} already found".format(digest))
+ log.info(f"{digest} already found")
return
except HTTPError as http_error:
# A 404 is fine, we're about to upload the layer anyway
@@ -222,7 +222,7 @@ class OCIRegistryClient:
"config": {
"mediaType": "application/vnd.docker.container.image.v1+json",
"size": len(config_json),
- "digest": "sha256:{}".format(config_sha),
+ "digest": f"sha256:{config_sha}",
},
"layers": [],
}
@@ -305,7 +305,7 @@ class OCIRegistryClient:
"""Get the current manifest for the given push rule. If manifest
doesn't exist, raises HTTPError.
"""
- url = "/manifests/{}".format(tag)
+ url = f"/manifests/{tag}"
accept = "application/vnd.docker.distribution.manifest.list.v2+json"
response = http_client.requestPath(
url, method="GET", headers={"Accept": accept}
@@ -330,14 +330,14 @@ class OCIRegistryClient:
digest = None
data = json.dumps(registry_manifest).encode("UTF-8")
if tag is None:
- tag = "sha256:{}".format(hashlib.sha256(data).hexdigest())
+ tag = f"sha256:{hashlib.sha256(data).hexdigest()}"
size = len(data)
content_type = registry_manifest.get(
"mediaType", "application/vnd.docker.distribution.manifest.v2+json"
)
try:
manifest_response = http_client.requestPath(
- "/manifests/{}".format(tag),
+ f"/manifests/{tag}",
data=data,
headers={"Content-Type": content_type},
method="PUT",
@@ -430,7 +430,7 @@ class OCIRegistryClient:
config_json = json.dumps(config).encode("UTF-8")
config_sha = hashlib.sha256(config_json).hexdigest()
cls._upload(
- "sha256:{}".format(config_sha),
+ f"sha256:{config_sha}",
push_rule,
BytesIO(config_json),
len(config_json),
@@ -578,11 +578,9 @@ class OCIRegistryClient:
for build in uploaded_builds:
build_manifest = build_request.uploaded_manifests.get(build.id)
if not build_manifest:
- log.info(
- "No build manifest found for build {}".format(build.id)
- )
+ log.info(f"No build manifest found for build {build.id}")
continue
- log.info("Build manifest found for build {}".format(build.id))
+ log.info(f"Build manifest found for build {build.id}")
digest = build_manifest["digest"]
size = build_manifest["size"]
arch = build.distro_arch_series.architecturetag
@@ -697,7 +695,7 @@ class RegistryHTTPClient:
def api_url(self):
"""Returns the base API URL for this registry."""
push_rule = self.push_rule
- return "{}/v2/{}".format(push_rule.registry_url, push_rule.image_name)
+ return f"{push_rule.registry_url}/v2/{push_rule.image_name}"
def request(self, url, *args, **request_kwargs):
username, password = self.credentials
@@ -707,7 +705,7 @@ class RegistryHTTPClient:
def requestPath(self, path, *args, **request_kwargs):
"""Shortcut to do a request to {self.api_url}/{path}."""
- url = "{}{}".format(self.api_url, path)
+ url = f"{self.api_url}{path}"
return self.request(url, *args, **request_kwargs)
@classmethod
@@ -720,7 +718,7 @@ class RegistryHTTPClient:
if domain.endswith(".amazonaws.com"):
return AWSRegistryHTTPClient(push_rule)
try:
- proxy_urlfetch("{}/v2/".format(push_rule.registry_url))
+ proxy_urlfetch(f"{push_rule.registry_url}/v2/")
# No authorization error? Just return the basic RegistryHTTPClient.
return RegistryHTTPClient(push_rule)
except HTTPError as e:
diff --git a/lib/lp/oci/tests/test_ocirecipe.py b/lib/lp/oci/tests/test_ocirecipe.py
index 1850458..432be38 100644
--- a/lib/lp/oci/tests/test_ocirecipe.py
+++ b/lib/lp/oci/tests/test_ocirecipe.py
@@ -1450,7 +1450,7 @@ class TestOCIRecipeProcessors(TestCaseWithFactory):
def test_valid_branch_format_invalid_uses_risk(self):
for risk in ["stable", "candidate", "beta", "edge"]:
recipe = self.factory.makeOCIRecipe()
- path = "refs/heads/{}-20.04".format(risk)
+ path = f"refs/heads/{risk}-20.04"
[git_ref] = self.factory.makeGitRefs(paths=[path])
recipe.git_ref = git_ref
self.assertFalse(recipe.is_valid_branch_format)
@@ -1611,7 +1611,7 @@ class TestOCIRecipeSet(TestCaseWithFactory):
[ref] = self.factory.makeGitRefs(
repository=repository,
# Needs a unique path, otherwise we can't search for it.
- paths=["refs/heads/v1.{}-20.04".format(str(i))],
+ paths=[f"refs/heads/v1.{str(i)}-20.04"],
)
oci_recipes.append(self.factory.makeOCIRecipe(git_ref=ref))
oci_recipe_set = getUtility(IOCIRecipeSet)
diff --git a/lib/lp/oci/tests/test_ociregistryclient.py b/lib/lp/oci/tests/test_ociregistryclient.py
index 5128bcd..197bca9 100644
--- a/lib/lp/oci/tests/test_ociregistryclient.py
+++ b/lib/lp/oci/tests/test_ociregistryclient.py
@@ -737,7 +737,7 @@ class TestOCIRegistryClient(
push_rule = self.build.recipe.push_rules[0]
http_client = RegistryHTTPClient(push_rule)
blobs_url = "{}/blobs/{}".format(http_client.api_url, "test-digest")
- uploads_url = "{}/blobs/uploads/".format(http_client.api_url)
+ uploads_url = f"{http_client.api_url}/blobs/uploads/"
upload_url = "{}/blobs/uploads/{}".format(
http_client.api_url, uuid.uuid4()
)
@@ -785,7 +785,7 @@ class TestOCIRegistryClient(
push_rule = self.build.recipe.push_rules[0]
http_client = RegistryHTTPClient(push_rule)
blobs_url = "{}/blobs/{}".format(http_client.api_url, "test-digest")
- uploads_url = "{}/blobs/uploads/".format(http_client.api_url)
+ uploads_url = f"{http_client.api_url}/blobs/uploads/"
upload_url = "{}/blobs/uploads/{}".format(
http_client.api_url, uuid.uuid4()
)
@@ -836,9 +836,7 @@ class TestOCIRegistryClient(
)
push_rule = self.build.recipe.push_rules[0]
- responses.add(
- "GET", "{}/v2/".format(push_rule.registry_url), status=200
- )
+ responses.add("GET", f"{push_rule.registry_url}/v2/", status=200)
put_errors = [
{
@@ -884,9 +882,7 @@ class TestOCIRegistryClient(
)
push_rule = self.build.recipe.push_rules[0]
- responses.add(
- "GET", "{}/v2/".format(push_rule.registry_url), status=200
- )
+ responses.add("GET", f"{push_rule.registry_url}/v2/", status=200)
self.addManifestResponses(push_rule, status_code=200)
@@ -917,7 +913,7 @@ class TestOCIRegistryClient(
push_rule = self.build.recipe.push_rules[0]
http_client = RegistryHTTPClient(push_rule)
blobs_url = "{}/blobs/{}".format(http_client.api_url, "test-digest")
- uploads_url = "{}/blobs/uploads/".format(http_client.api_url)
+ uploads_url = f"{http_client.api_url}/blobs/uploads/"
upload_url = "{}/blobs/uploads/{}".format(
http_client.api_url, uuid.uuid4()
)
@@ -1055,9 +1051,7 @@ class TestOCIRegistryClient(
)
self.addManifestResponses(push_rule, status_code=201)
- responses.add(
- "GET", "{}/v2/".format(push_rule.registry_url), status=200
- )
+ responses.add("GET", f"{push_rule.registry_url}/v2/", status=200)
self.addManifestResponses(push_rule, status_code=201)
# Let's try to generate the manifest for just 2 of the 3 builds:
@@ -1168,9 +1162,7 @@ class TestOCIRegistryClient(
)
self.addManifestResponses(push_rule, status_code=201)
- responses.add(
- "GET", "{}/v2/".format(push_rule.registry_url), status=200
- )
+ responses.add("GET", f"{push_rule.registry_url}/v2/", status=200)
self.addManifestResponses(push_rule, status_code=201)
self.client.uploadManifestList(build_request, [build1, build2])
@@ -1256,9 +1248,7 @@ class TestOCIRegistryClient(
)
self.addManifestResponses(push_rule, status_code=201)
- responses.add(
- "GET", "{}/v2/".format(push_rule.registry_url), status=200
- )
+ responses.add("GET", f"{push_rule.registry_url}/v2/", status=200)
self.addManifestResponses(push_rule, status_code=201)
self.client.uploadManifestList(build_request, [build1])
@@ -1313,9 +1303,7 @@ class TestOCIRegistryClient(
status=503,
)
- responses.add(
- "GET", "{}/v2/".format(push_rule.registry_url), status=200
- )
+ responses.add("GET", f"{push_rule.registry_url}/v2/", status=200)
self.addManifestResponses(push_rule, status_code=201)
self.assertRaises(
@@ -1336,7 +1324,7 @@ class TestOCIRegistryClient(
push_rule = self.build.recipe.push_rules[0]
http_client = RegistryHTTPClient(push_rule)
blobs_url = "{}/blobs/{}".format(http_client.api_url, "test-digest")
- uploads_url = "{}/blobs/uploads/".format(http_client.api_url)
+ uploads_url = f"{http_client.api_url}/blobs/uploads/"
upload_url = "{}/blobs/uploads/{}".format(
http_client.api_url, uuid.uuid4()
)
@@ -1431,9 +1419,7 @@ class TestOCIRegistryClient(
)
self.addManifestResponses(push_rule, status_code=201)
- responses.add(
- "GET", "{}/v2/".format(push_rule.registry_url), status=200
- )
+ responses.add("GET", f"{push_rule.registry_url}/v2/", status=200)
self.addManifestResponses(push_rule, status_code=201)
self.client.uploadManifestList(build_request, [build1])
diff --git a/lib/lp/registry/browser/ociproject.py b/lib/lp/registry/browser/ociproject.py
index 43e277a..3c4016a 100644
--- a/lib/lp/registry/browser/ociproject.py
+++ b/lib/lp/registry/browser/ociproject.py
@@ -271,7 +271,7 @@ class OCIProjectIndexView(LaunchpadView):
)
)
url = list(base_url)
- url[1] = "{}@{}".format(self.user.name, base_url.hostname)
+ url[1] = f"{self.user.name}@{base_url.hostname}"
return urlunsplit(url)
@property
diff --git a/lib/lp/registry/browser/product.py b/lib/lp/registry/browser/product.py
index c6dc562..3d55e4b 100644
--- a/lib/lp/registry/browser/product.py
+++ b/lib/lp/registry/browser/product.py
@@ -1993,7 +1993,7 @@ class ProductSetBranchView(
urlutils.join(config.codehosting.git_ssh_root, self.context.name)
)
url = list(base_url)
- url[1] = "{}@{}".format(self.user.name, base_url.hostname)
+ url[1] = f"{self.user.name}@{base_url.hostname}"
return urlunsplit(url)
@property
diff --git a/lib/lp/registry/browser/tests/test_codeofconduct.py b/lib/lp/registry/browser/tests/test_codeofconduct.py
index 7315a26..6dc7dfb 100644
--- a/lib/lp/registry/browser/tests/test_codeofconduct.py
+++ b/lib/lp/registry/browser/tests/test_codeofconduct.py
@@ -182,7 +182,7 @@ class TestAffirmCodeOfConductView(BrowserTestCase):
browser.getControl("I agree to this Code of Conduct").click()
browser.getControl("Affirm").click()
self.assertEqual(
- "http://launchpad.test/~{}/+codesofconduct".format(name),
+ f"http://launchpad.test/~{name}/+codesofconduct",
browser.url,
)
- self.assertIn("affirmed by {}".format(displayname), browser.contents)
+ self.assertIn(f"affirmed by {displayname}", browser.contents)
diff --git a/lib/lp/registry/browser/widgets/ocicredentialswidget.py b/lib/lp/registry/browser/widgets/ocicredentialswidget.py
index 8e01c54..05ed223 100644
--- a/lib/lp/registry/browser/widgets/ocicredentialswidget.py
+++ b/lib/lp/registry/browser/widgets/ocicredentialswidget.py
@@ -80,12 +80,12 @@ class OCICredentialsWidget(BrowserWidget, InputWidget):
def hasInput(self):
"""See `IInputWidget`."""
field_names = [
- "{}.url".format(self.name),
- "{}.region".format(self.name),
- "{}.username".format(self.name),
- "{}.password".format(self.name),
- "{}.confirm_password".format(self.name),
- "{}.delete".format(self.name),
+ f"{self.name}.url",
+ f"{self.name}.region",
+ f"{self.name}.username",
+ f"{self.name}.password",
+ f"{self.name}.confirm_password",
+ f"{self.name}.delete",
]
return any(self.request.form.get(x) for x in field_names)
diff --git a/lib/lp/registry/interfaces/distroseries.py b/lib/lp/registry/interfaces/distroseries.py
index bcb5a24..615ee50 100644
--- a/lib/lp/registry/interfaces/distroseries.py
+++ b/lib/lp/registry/interfaces/distroseries.py
@@ -169,28 +169,24 @@ class DistroSeriesVersionField(UniqueField):
raise LaunchpadValidationError("'%s': %s" % (version, error))
-DistroSeriesTranslationTemplateStatistics = TypedDict(
- "DistroSeriesTranslationTemplateStatistics",
- {
- # The name of the source package that uses the template.
- "sourcepackage": str,
- # The translation domain for the template.
- "translation_domain": str,
- # The name of the template.
- "template_name": str,
- # The number of translation messages for the template.
- "total": int,
- # Whether the template is active.
- "enabled": bool,
- # Whether the template is part of a language pack.
- "languagepack": bool,
- # A number that describes how important this template is; templates
- # with higher priorities should be translated first.
- "priority": int,
- # When the template was last updated.
- "date_last_updated": datetime,
- },
-)
+class DistroSeriesTranslationTemplateStatistics(TypedDict):
+ # The name of the source package that uses the template.
+ sourcepackage: str
+ # The translation domain for the template.
+ translation_domain: str
+ # The name of the template.
+ template_name: str
+ # The number of translation messages for the template.
+ total: int
+ # Whether the template is active.
+ enabled: bool
+ # Whether the template is part of a language pack.
+ languagepack: bool
+ # A number that describes how important this template is; templates
+ # with higher priorities should be translated first.
+ priority: int
+ # When the template was last updated.
+ date_last_updated: datetime
class IDistroSeriesPublic(
diff --git a/lib/lp/registry/model/distributionsourcepackage.py b/lib/lp/registry/model/distributionsourcepackage.py
index bc80a4f..5d00db5 100644
--- a/lib/lp/registry/model/distributionsourcepackage.py
+++ b/lib/lp/registry/model/distributionsourcepackage.py
@@ -115,7 +115,7 @@ class DistributionSourcePackage(
self.sourcepackagename = sourcepackagename
def __repr__(self):
- return "<{} '{}'>".format(self.__class__.__name__, self.display_name)
+ return f"<{self.__class__.__name__} '{self.display_name}'>"
@property
def name(self):
diff --git a/lib/lp/registry/model/person.py b/lib/lp/registry/model/person.py
index 4281303..779616d 100644
--- a/lib/lp/registry/model/person.py
+++ b/lib/lp/registry/model/person.py
@@ -4890,7 +4890,7 @@ class PersonSet:
# We should only have one result
if email_results.count() > 1:
- raise ValueError("Multiple records for {}".format(email))
+ raise ValueError(f"Multiple records for {email}")
# If we don't have any results at all, we have no data!
if email_results.is_empty():
@@ -5055,7 +5055,7 @@ class PersonSet:
query_arguments = {
"fingerprint": "on",
"op": "index",
- "search": "0x{}".format(key.fingerprint),
+ "search": f"0x{key.fingerprint}",
}
req.prepare_url(keyserver_url, query_arguments)
urls.append(req.url)
diff --git a/lib/lp/registry/scripts/closeaccount.py b/lib/lp/registry/scripts/closeaccount.py
index 7df78d0..6f92c73 100644
--- a/lib/lp/registry/scripts/closeaccount.py
+++ b/lib/lp/registry/scripts/closeaccount.py
@@ -469,7 +469,7 @@ def close_account(username, log):
count = result.get_one()[0]
if count:
referenced_ppa_ids.add(ppa_id)
- reference = "{}.{}".format(src_tab, src_col)
+ reference = f"{src_tab}.{src_col}"
log.warning(
"PPA %d is still referenced by %d %s values"
% (ppa_id, count, reference)
@@ -504,7 +504,7 @@ def close_account(username, log):
)
count = result.get_one()[0]
if count:
- reference_counts.append(("product.{}".format(col), count))
+ reference_counts.append((f"product.{col}", count))
skip.add(("product", col))
for col in "driver", "owner":
count = store.find(
@@ -514,7 +514,7 @@ def close_account(username, log):
getattr(ProductSeries, col) == person,
).count()
if count:
- reference_counts.append(("productseries.{}".format(col), count))
+ reference_counts.append((f"productseries.{col}", count))
skip.add(("productseries", col))
# Check announcements, skipping the ones
@@ -596,7 +596,7 @@ def close_account(username, log):
.count()
)
if count:
- reference_counts.append(("branch.{}".format(col_name), count))
+ reference_counts.append((f"branch.{col_name}", count))
skip.add(("branch", col_name))
# Check Specification, skipping the ones
@@ -636,7 +636,7 @@ def close_account(username, log):
)
count = result.get_one()[0]
if count:
- reference_counts.append(("{}.{}".format(src_tab, src_col), count))
+ reference_counts.append((f"{src_tab}.{src_col}", count))
if reference_counts:
for reference, count in reference_counts:
diff --git a/lib/lp/registry/scripts/createbotaccount.py b/lib/lp/registry/scripts/createbotaccount.py
index c53484d..03b0520 100644
--- a/lib/lp/registry/scripts/createbotaccount.py
+++ b/lib/lp/registry/scripts/createbotaccount.py
@@ -77,15 +77,15 @@ class CreateBotAccountScript(LaunchpadScript):
openid_suffix = six.ensure_text(self.options.openid)
if "/" in openid_suffix:
raise LaunchpadScriptFailure(
- "Invalid OpenID suffix {}".format(openid_suffix)
+ f"Invalid OpenID suffix {openid_suffix}"
)
- displayname = "\U0001f916 {}".format(username) # U+1f916==ROBOT FACE
+ displayname = f"\U0001f916 {username}" # U+1f916==ROBOT FACE
if self.options.email:
emailaddress = six.ensure_text(self.options.email)
else:
- emailaddress = "webops+{}@canonical.com".format(username)
+ emailaddress = f"webops+{username}@canonical.com"
if self.options.teams:
teamnames = [
@@ -107,16 +107,12 @@ class CreateBotAccountScript(LaunchpadScript):
person = person_set.getByName(username)
if person is None:
- raise LaunchpadScriptFailure(
- "Account {} does not exist".format(username)
- )
+ raise LaunchpadScriptFailure(f"Account {username} does not exist")
if person.account is None:
- raise LaunchpadScriptFailure(
- "Person {} has no Account".format(username)
- )
+ raise LaunchpadScriptFailure(f"Person {username} has no Account")
if person.account.openid_identifiers.count() != 1:
raise LaunchpadScriptFailure(
- "Account {} has invalid OpenID identifiers".format(username)
+ f"Account {username} has invalid OpenID identifiers"
)
openid_identifier = person.account.openid_identifiers.one()
@@ -142,9 +138,7 @@ class CreateBotAccountScript(LaunchpadScript):
for teamname in teamnames:
team = person_set.getByName(teamname)
if team is None or not team.is_team:
- raise LaunchpadScriptFailure(
- "{} is not a team".format(teamname)
- )
+ raise LaunchpadScriptFailure(f"{teamname} is not a team")
team.addMember(person, person)
# Add ssh key
@@ -154,5 +148,5 @@ class CreateBotAccountScript(LaunchpadScript):
):
sshkey_set.new(person, sshkey_text, send_notification=False)
- self.logger.info("Created or updated {}".format(canonical_url(person)))
+ self.logger.info(f"Created or updated {canonical_url(person)}")
self.txn.commit()
diff --git a/lib/lp/registry/scripts/suspendbotaccount.py b/lib/lp/registry/scripts/suspendbotaccount.py
index 2842656..d7bfac9 100644
--- a/lib/lp/registry/scripts/suspendbotaccount.py
+++ b/lib/lp/registry/scripts/suspendbotaccount.py
@@ -47,5 +47,5 @@ class SuspendBotAccountScript(LaunchpadScript):
"Suspended by suspend-bot-account.py",
)
- self.logger.info("Suspended {}".format(canonical_url(person)))
+ self.logger.info(f"Suspended {canonical_url(person)}")
self.txn.commit()
diff --git a/lib/lp/services/auth/model.py b/lib/lp/services/auth/model.py
index 2b4fe2b..1bfba44 100644
--- a/lib/lp/services/auth/model.py
+++ b/lib/lp/services/auth/model.py
@@ -83,7 +83,7 @@ class AccessToken(StormBase):
elif IProduct.providedBy(target):
self.project = target
else:
- raise TypeError("Unsupported target: {!r}".format(target))
+ raise TypeError(f"Unsupported target: {target!r}")
self.scopes = scopes
self.date_created = UTC_NOW
self.date_expires = date_expires
@@ -194,7 +194,7 @@ class AccessTokenSet:
elif IProduct.providedBy(target):
clauses.append(AccessToken.project == target)
else:
- raise TypeError("Unsupported target: {!r}".format(target))
+ raise TypeError(f"Unsupported target: {target!r}")
if visible_by_user is not None:
# Evaluate if user owns the target (directly or indirectly).
diff --git a/lib/lp/services/fields/__init__.py b/lib/lp/services/fields/__init__.py
index dff799d..4c76094 100644
--- a/lib/lp/services/fields/__init__.py
+++ b/lib/lp/services/fields/__init__.py
@@ -1030,10 +1030,7 @@ class SnapBuildChannelsField(Dict):
else:
description = description_prefix + "\n"
description += "Supported snap names: {}".format(
- ", ".join(
- "'{}'".format(snap_name)
- for snap_name in sorted(snap_names)
- )
+ ", ".join(f"'{snap_name}'" for snap_name in sorted(snap_names))
)
# TODO: custom validation that validates `_byarch` values
# as dictionaries mapping architecture names to the current
diff --git a/lib/lp/services/librarianserver/librariangc.py b/lib/lp/services/librarianserver/librariangc.py
index 1cec619..bf2c4cf 100644
--- a/lib/lp/services/librarianserver/librariangc.py
+++ b/lib/lp/services/librarianserver/librariangc.py
@@ -599,9 +599,7 @@ class UnreferencedContentPruner:
swift.quiet_swiftclient(
swift_connection.delete_object, container, name
)
- removed.append(
- "Swift {}".format(connection_pool.os_auth_url)
- )
+ removed.append(f"Swift {connection_pool.os_auth_url}")
except swiftclient.ClientException as x:
if x.http_status != 404:
log.exception(
diff --git a/lib/lp/services/librarianserver/storage.py b/lib/lp/services/librarianserver/storage.py
index cabded8..52d6d20 100644
--- a/lib/lp/services/librarianserver/storage.py
+++ b/lib/lp/services/librarianserver/storage.py
@@ -336,6 +336,6 @@ def _relFileLocation(file_id):
file_id = int(file_id)
assert (
file_id <= 4294967295
- ), "file id {!r} has exceeded filesystem db maximum".format(file_id)
+ ), f"file id {file_id!r} has exceeded filesystem db maximum"
h = "%08x" % file_id
return "%s/%s/%s/%s" % (h[:2], h[2:4], h[4:6], h[6:])
diff --git a/lib/lp/services/librarianserver/swift.py b/lib/lp/services/librarianserver/swift.py
index acd55e2..54107bc 100644
--- a/lib/lp/services/librarianserver/swift.py
+++ b/lib/lp/services/librarianserver/swift.py
@@ -112,7 +112,7 @@ def to_swift(
# an aborted job.
dirnames.sort()
- log.debug("Scanning {} for matching files".format(dirpath))
+ log.debug(f"Scanning {dirpath} for matching files")
_filename_re = re.compile("^[0-9a-f]{2}$")
@@ -135,14 +135,12 @@ def to_swift(
rel_fs_path = fs_path[len(fs_root) + 1 :]
hex_lfc = "".join(rel_fs_path.split("/"))
if len(hex_lfc) != 8:
- log.warning(
- "Filename length fail, skipping {}".format(fs_path)
- )
+ log.warning(f"Filename length fail, skipping {fs_path}")
continue
try:
lfc = int(hex_lfc, 16)
except ValueError:
- log.warning("Invalid hex fail, skipping {}".format(fs_path))
+ log.warning(f"Invalid hex fail, skipping {fs_path}")
continue
if instance_id is not None and num_instances is not None:
if (lfc % num_instances) != instance_id:
@@ -154,13 +152,13 @@ def to_swift(
log.debug("Skipping recent upload %s" % fs_path)
continue
- log.debug("Found {} ({})".format(lfc, filename))
+ log.debug(f"Found {lfc} ({filename})")
if (
IStandbyStore(LibraryFileContent).get(LibraryFileContent, lfc)
is None
):
- log.info("{} exists on disk but not in the db".format(lfc))
+ log.info(f"{lfc} exists on disk but not in the db")
continue
_to_swift_file(log, swift_connection, lfc, fs_path)
@@ -182,11 +180,11 @@ def _to_swift_file(log, swift_connection, lfc_id, fs_path):
try:
quiet_swiftclient(swift_connection.head_container, container)
- log.debug2("{} container already exists".format(container))
+ log.debug2(f"{container} container already exists")
except swiftclient.ClientException as x:
if x.http_status != 404:
raise
- log.info("Creating {} container".format(container))
+ log.info(f"Creating {container} container")
swift_connection.put_container(container)
try:
@@ -289,7 +287,7 @@ def _put(log, swift_connection, lfc_id, container, obj_name, fs_path):
)
raise AssertionError("md5 mismatch")
- manifest = "{}/{}/".format(quote(container), quote(obj_name))
+ manifest = f"{quote(container)}/{quote(obj_name)}/"
manifest_headers = {"X-Object-Manifest": manifest}
swift_connection.put_object(
container, obj_name, b"", 0, headers=manifest_headers
diff --git a/lib/lp/services/librarianserver/tests/test_gc.py b/lib/lp/services/librarianserver/tests/test_gc.py
index 90f2039..23a7eac 100644
--- a/lib/lp/services/librarianserver/tests/test_gc.py
+++ b/lib/lp/services/librarianserver/tests/test_gc.py
@@ -79,7 +79,7 @@ class TestLibrarianGarbageCollectionBase:
if not os.path.exists(path):
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
- content_bytes = "{} content".format(content.id).encode("UTF-8")
+ content_bytes = f"{content.id} content".encode()
with open(path, "wb") as f:
f.write(content_bytes)
os.utime(path, (0, 0)) # Ancient past, never considered new.
diff --git a/lib/lp/services/librarianserver/tests/test_swift.py b/lib/lp/services/librarianserver/tests/test_swift.py
index 673bd52..e3ce72d 100644
--- a/lib/lp/services/librarianserver/tests/test_swift.py
+++ b/lib/lp/services/librarianserver/tests/test_swift.py
@@ -46,7 +46,7 @@ class TestFeedSwift(TestCase):
self.librarian_client = LibrarianClient()
self.contents = [str(i).encode("ASCII") * i for i in range(1, 5)]
self.lfa_ids = [
- self.add_file("file_{}".format(i), content, when=the_past)
+ self.add_file(f"file_{i}", content, when=the_past)
for i, content in enumerate(self.contents)
]
self.lfas = [
@@ -319,15 +319,15 @@ class TestFeedSwift(TestCase):
self.assertEqual(obj, b"")
# The segments we expect are all in their expected locations.
- _, obj1 = swift_client.get_object(container, "{}/0000".format(name))
- _, obj2 = swift_client.get_object(container, "{}/0001".format(name))
- _, obj3 = swift_client.get_object(container, "{}/0002".format(name))
+ _, obj1 = swift_client.get_object(container, f"{name}/0000")
+ _, obj2 = swift_client.get_object(container, f"{name}/0001")
+ _, obj3 = swift_client.get_object(container, f"{name}/0002")
self.assertRaises(
swiftclient.ClientException,
swift.quiet_swiftclient,
swift_client.get_object,
container,
- "{}/0003".format(name),
+ f"{name}/0003",
)
# Our object round tripped
diff --git a/lib/lp/services/openid/fetcher.py b/lib/lp/services/openid/fetcher.py
index d74aca1..eadf129 100644
--- a/lib/lp/services/openid/fetcher.py
+++ b/lib/lp/services/openid/fetcher.py
@@ -38,7 +38,7 @@ def set_default_openid_fetcher():
instance_name = "development"
else:
instance_name = config.instance_name
- cert_path = "configs/{}/launchpad.crt".format(instance_name)
+ cert_path = f"configs/{instance_name}/launchpad.crt"
cafile = os.path.join(config.root, cert_path)
fetcher.urlopen = partial(urlopen, cafile=cafile)
setDefaultFetcher(fetcher)
diff --git a/lib/lp/services/tests/test_timeout.py b/lib/lp/services/tests/test_timeout.py
index 017aa1c..0c68401 100644
--- a/lib/lp/services/tests/test_timeout.py
+++ b/lib/lp/services/tests/test_timeout.py
@@ -425,7 +425,7 @@ class TestTimeout(TestCase):
url = "ftp://localhost/"
e = self.assertRaises(InvalidSchema, urlfetch, url)
self.assertEqual(
- "No connection adapters were found for {!r}".format(url), str(e)
+ f"No connection adapters were found for {url!r}", str(e)
)
def test_urlfetch_supports_ftp_urls_if_allow_ftp(self):
@@ -471,7 +471,7 @@ class TestTimeout(TestCase):
url = "file://" + test_path
e = self.assertRaises(InvalidSchema, urlfetch, url)
self.assertEqual(
- "No connection adapters were found for {!r}".format(url), str(e)
+ f"No connection adapters were found for {url!r}", str(e)
)
def test_urlfetch_supports_file_urls_if_allow_file(self):
diff --git a/lib/lp/services/twistedsupport/features.py b/lib/lp/services/twistedsupport/features.py
index 8094409..c809139 100644
--- a/lib/lp/services/twistedsupport/features.py
+++ b/lib/lp/services/twistedsupport/features.py
@@ -52,13 +52,13 @@ def _install_and_reschedule(controller, script_name):
try:
refresh = float(refresh)
except ValueError:
- log.msg("Invalid value {!r} for twisted.flags.refresh".format(refresh))
+ log.msg(f"Invalid value {refresh!r} for twisted.flags.refresh")
refresh = 60.0
global _last_refresh
if refresh != _last_refresh:
if _last_refresh is not None:
- log.msg("twisted.flags.refresh changed to {}".format(refresh))
+ log.msg(f"twisted.flags.refresh changed to {refresh}")
_last_refresh = refresh
reactor.callLater(refresh, update, script_name)
diff --git a/lib/lp/services/utils.py b/lib/lp/services/utils.py
index 89d3e3d..ab3878a 100644
--- a/lib/lp/services/utils.py
+++ b/lib/lp/services/utils.py
@@ -59,7 +59,7 @@ class AutoDecorateMetaClass(type):
def __new__(mcs, class_name, bases, class_dict):
class_dict = dict(class_dict)
- decorators = class_dict.pop("_{}__decorators".format(class_name), None)
+ decorators = class_dict.pop(f"_{class_name}__decorators", None)
if decorators is not None:
for name, value in class_dict.items():
if type(value) == FunctionType:
diff --git a/lib/lp/services/webapp/tests/test_candid.py b/lib/lp/services/webapp/tests/test_candid.py
index ae05d57..ea1898b 100644
--- a/lib/lp/services/webapp/tests/test_candid.py
+++ b/lib/lp/services/webapp/tests/test_candid.py
@@ -345,10 +345,10 @@ class TestCandidCallbackView(TestCaseWithFactory):
browser = self.getUserBrowser(user=person)
browser.addHeader(
"Cookie",
- "{}={}".format(config.launchpad_session.cookie, session.client_id),
+ f"{config.launchpad_session.cookie}={session.client_id}",
)
browser.open(
- "http://launchpad.test/+candid-callback?{}".format(urlencode(form))
+ f"http://launchpad.test/+candid-callback?{urlencode(form)}"
)
return request, browser
diff --git a/lib/lp/services/webapp/tests/test_error.py b/lib/lp/services/webapp/tests/test_error.py
index dfa0290..0ebc506 100644
--- a/lib/lp/services/webapp/tests/test_error.py
+++ b/lib/lp/services/webapp/tests/test_error.py
@@ -109,7 +109,7 @@ class TestDatabaseErrorViews(TestCase):
else:
self.add_retry_failure_details(bouncer)
raise TimeoutException(
- "Launchpad did not come up after {} attempts.".format(retries)
+ f"Launchpad did not come up after {retries} attempts."
)
def test_disconnectionerror_view_integration(self):
diff --git a/lib/lp/snappy/tests/test_snapbuildbehaviour.py b/lib/lp/snappy/tests/test_snapbuildbehaviour.py
index 1729666..5ec5804 100644
--- a/lib/lp/snappy/tests/test_snapbuildbehaviour.py
+++ b/lib/lp/snappy/tests/test_snapbuildbehaviour.py
@@ -372,9 +372,7 @@ class TestAsyncSnapBuildBehaviour(StatsMixin, TestSnapBuildBehaviourBase):
with dbuser(config.builddmaster.dbuser):
args = yield job.extraBuildArgs()
for key, value in args.items():
- self.assertFalse(
- isProxy(value), "{} is a security proxy".format(key)
- )
+ self.assertFalse(isProxy(value), f"{key} is a security proxy")
@defer.inlineCallbacks
def test_extraBuildArgs_bzr(self):
diff --git a/lib/lp/snappy/tests/test_snapstoreclient.py b/lib/lp/snappy/tests/test_snapstoreclient.py
index 86dbc4a..d6f75bd 100644
--- a/lib/lp/snappy/tests/test_snapstoreclient.py
+++ b/lib/lp/snappy/tests/test_snapstoreclient.py
@@ -62,7 +62,7 @@ class MacaroonsVerify(Matcher):
self.key = key
def __str__(self):
- return "MacaroonsVerify({!r})".format(self.key)
+ return f"MacaroonsVerify({self.key!r})"
def match(self, macaroons):
mismatch = Contains("root").match(macaroons)
diff --git a/lib/lp/soyuz/model/packagediffjob.py b/lib/lp/soyuz/model/packagediffjob.py
index 794dc99..616e0c1 100644
--- a/lib/lp/soyuz/model/packagediffjob.py
+++ b/lib/lp/soyuz/model/packagediffjob.py
@@ -68,9 +68,7 @@ class PackageDiffJob(PackageDiffJobDerived):
)
]
if diff.requester is not None:
- parts.append(
- " for {requester}".format(requester=diff.requester.name)
- )
+ parts.append(f" for {diff.requester.name}")
return "<{repr}>".format(repr="".join(parts))
@property
diff --git a/lib/lp/soyuz/tests/test_archive.py b/lib/lp/soyuz/tests/test_archive.py
index 68134c5..0456d75 100644
--- a/lib/lp/soyuz/tests/test_archive.py
+++ b/lib/lp/soyuz/tests/test_archive.py
@@ -3875,7 +3875,7 @@ class TestGetPublishedSourcesWebService(TestCaseWithFactory):
# via a wrapper to improving performance (by reducing the
# number of queries issued)
ppa = self.createTestingPPA()
- ppa_url = "/~{}/+archive/ubuntu/ppa".format(ppa.owner.name)
+ ppa_url = f"/~{ppa.owner.name}/+archive/ubuntu/ppa"
webservice = webservice_for_person(
ppa.owner, permission=OAuthPermission.READ_PRIVATE
)
diff --git a/lib/lp/soyuz/tests/test_packagediff.py b/lib/lp/soyuz/tests/test_packagediff.py
index c5db107..4ca0dc7 100644
--- a/lib/lp/soyuz/tests/test_packagediff.py
+++ b/lib/lp/soyuz/tests/test_packagediff.py
@@ -311,7 +311,7 @@ class TestPackageDiffsView(BrowserTestCase):
"""Verify that expected text exists in the packages diffs view"""
login_person(self.user)
browser = self.getViewBrowser(self.to_archive, "+packages")
- expander_id = "pub{spph_id}-expander".format(spph_id=self.spph_id)
+ expander_id = f"pub{self.spph_id}-expander"
browser.getLink(id=expander_id).click()
self.assertIn(expected_text, extract_text(browser.contents))
return browser
@@ -355,7 +355,7 @@ class TestPackageDiffsView(BrowserTestCase):
)
# There is no link while diff is pending
- expected_text = "Available diffs\n{} (pending)".format(expected_title)
+ expected_text = f"Available diffs\n{expected_title} (pending)"
browser = self.assert_text_in_diffs_view(expected_text)
self.assertRaises(LinkNotFoundError, browser.getLink, expected_title)
@@ -363,7 +363,7 @@ class TestPackageDiffsView(BrowserTestCase):
login_person(self.user)
self.perform_fake_diff(diff, "biscuit_1.0-3_1.0-4.diff.gz")
transaction.commit()
- expected_text = "Available diffs\n{} (3 bytes)".format(expected_title)
+ expected_text = f"Available diffs\n{expected_title} (3 bytes)"
browser = self.assert_text_in_diffs_view(expected_text)
url = browser.getLink(expected_title).url
self.assertIn("/+files/biscuit_1.0-3_1.0-4.diff.gz", url)
diff --git a/lib/lp/testing/factory.py b/lib/lp/testing/factory.py
index 0e9236e..cc882f1 100644
--- a/lib/lp/testing/factory.py
+++ b/lib/lp/testing/factory.py
@@ -1867,7 +1867,7 @@ class LaunchpadObjectFactory(ObjectFactory):
def makeDiff(self, size="small"):
diff_path = os.path.join(
- os.path.dirname(__file__), "data/{}.diff".format(size)
+ os.path.dirname(__file__), f"data/{size}.diff"
)
with open(os.path.join(diff_path), "rb") as diff:
diff_text = diff.read()
@@ -6555,7 +6555,7 @@ class LaunchpadObjectFactory(ObjectFactory):
oci_project = self.makeOCIProject()
if git_ref is None:
component = self.getUniqueUnicode()
- paths = ["refs/heads/{}-20.04".format(component)]
+ paths = [f"refs/heads/{component}-20.04"]
[git_ref] = self.makeGitRefs(paths=paths)
if build_file is None:
build_file = self.getUniqueUnicode("build_file_for")
diff --git a/lib/lp/testing/tests/test_html5browser.py b/lib/lp/testing/tests/test_html5browser.py
index d6ebc11..a9d7dd9 100644
--- a/lib/lp/testing/tests/test_html5browser.py
+++ b/lib/lp/testing/tests/test_html5browser.py
@@ -51,7 +51,7 @@ class TestBrowser(TestCase):
"""
)
self.file.flush()
- self.file_uri = "file://{}".format(self.file.name)
+ self.file_uri = f"file://{self.file.name}"
self.addCleanup(self.file.close)
def test_load_test_results(self):
diff --git a/lib/lp/translations/browser/tests/test_sharing_details.py b/lib/lp/translations/browser/tests/test_sharing_details.py
index d3126a5..04f0175 100644
--- a/lib/lp/translations/browser/tests/test_sharing_details.py
+++ b/lib/lp/translations/browser/tests/test_sharing_details.py
@@ -647,9 +647,7 @@ class TestSourcePackageTranslationSharingDetailsViewPackagingLinks(
elif self.user_type == "package_owner":
self.user = self.sourcepackage.owner
else:
- raise AssertionError(
- "Unknown user type: {}".format(self.user_type)
- )
+ raise AssertionError(f"Unknown user type: {self.user_type}")
def _getExpectedPackagingLink(self, id, url, icon, text, visible):
url = "%s/%s" % (canonical_url(self.sourcepackage), url)
diff --git a/lib/lp/translations/browser/tests/test_translationmessage_view.py b/lib/lp/translations/browser/tests/test_translationmessage_view.py
index 746b0a6..6c4b0c8 100644
--- a/lib/lp/translations/browser/tests/test_translationmessage_view.py
+++ b/lib/lp/translations/browser/tests/test_translationmessage_view.py
@@ -313,7 +313,7 @@ class TestCurrentTranslationMessageView(TestCaseWithFactory):
external_pofile,
external_potmsgset,
translator=self.owner,
- translations=["rejected message {}".format(i)],
+ translations=[f"rejected message {i}"],
date_created=externally_used_message.date_reviewed
- timedelta(days=1 + i),
)
diff --git a/lib/lp/translations/interfaces/currenttranslations.py b/lib/lp/translations/interfaces/currenttranslations.py
index bd27e2f..401d6ac 100644
--- a/lib/lp/translations/interfaces/currenttranslations.py
+++ b/lib/lp/translations/interfaces/currenttranslations.py
@@ -9,15 +9,12 @@ __all__ = [
"ICurrentTranslations",
]
-CurrentTranslationKey = NamedTuple(
- "CurrentTranslationKey",
- (
- ("potmsgset_id", int),
- ("potemplate_id", Optional[int]),
- ("language_id", int),
- ("side", int),
- ),
-)
+
+class CurrentTranslationKey(NamedTuple):
+ potmsgset_id: int
+ potemplate_id: Optional[int]
+ language_id: int
+ side: int
class ICurrentTranslations(Interface):
diff --git a/scripts/migrate-librarian-content-md5.py b/scripts/migrate-librarian-content-md5.py
index 90846d8..e6c889f 100755
--- a/scripts/migrate-librarian-content-md5.py
+++ b/scripts/migrate-librarian-content-md5.py
@@ -29,7 +29,7 @@ def main(path, minimumID=0):
continue
filename = os.path.join(dirpath, filename)
md5sum = subprocess.check_output(
- ["md5sum", filename], universal_newlines=True
+ ["md5sum", filename], text=True
).split(" ", 1)[0]
yield databaseID, md5sum
diff --git a/test_on_merge.py b/test_on_merge.py
index 540a28c..6179b35 100755
--- a/test_on_merge.py
+++ b/test_on_merge.py
@@ -146,7 +146,7 @@ def run_test_process():
"--server-args=-screen 0 1024x768x24",
os.path.join(HERE, "bin", "test"),
] + sys.argv[1:]
- print("Running command:", " ".join(shlex.quote(arg) for arg in cmd))
+ print("Running command:", shlex.join(cmd))
# Run the test suite. Make the suite the leader of a new process group
# so that we can signal the group without signaling ourselves.
diff --git a/utilities/link-external-sourcecode b/utilities/link-external-sourcecode
index 06af63f..50c275b 100755
--- a/utilities/link-external-sourcecode
+++ b/utilities/link-external-sourcecode
@@ -13,7 +13,7 @@ from sys import stderr, stdout
def get_main_worktree(branch_dir):
"""Return the main worktree directory, otherwise `None`."""
worktree_list = subprocess.check_output(
- ["git", "worktree", "list", "--porcelain"], universal_newlines=True
+ ["git", "worktree", "list", "--porcelain"], text=True
)
main_worktree = worktree_list.splitlines()[0].split(" ", 1)[1]
if realpath(main_worktree) != realpath(branch_dir):
diff --git a/utilities/massage-bug-import-xml b/utilities/massage-bug-import-xml
index 8142896..cb4c0a2 100755
--- a/utilities/massage-bug-import-xml
+++ b/utilities/massage-bug-import-xml
@@ -31,15 +31,15 @@ def truncate(text, message=None):
def problem(message):
- sys.stderr.write("{}\n".format(message))
+ sys.stderr.write(f"{message}\n")
def problem_detail(message):
- sys.stderr.write(" {}\n".format(message))
+ sys.stderr.write(f" {message}\n")
def problem_resolution(message):
- sys.stderr.write(" --> {}\n".format(message))
+ sys.stderr.write(f" --> {message}\n")
def problem_resolved():
diff --git a/utilities/publish-to-swift b/utilities/publish-to-swift
index 566d111..4577279 100755
--- a/utilities/publish-to-swift
+++ b/utilities/publish-to-swift
@@ -119,12 +119,12 @@ def prune_old_files_from_swift(options, container_name, object_dir, suffix):
deployed_mtime = None
for stats in swift.list(
container=container_name,
- options={"prefix": "{}/".format(object_dir)},
+ options={"prefix": f"{object_dir}/"},
):
if not stats["success"]:
raise stats["error"]
for item in stats["listing"]:
- if not item["name"].startswith("{}/".format(object_dir)):
+ if not item["name"].startswith(f"{object_dir}/"):
continue
# Only consider pruning builds that have the given suffix.
# (For example, if we're publishing a build with the suffix
@@ -202,7 +202,7 @@ def main():
if key.startswith("OS_"):
if key not in safe_keys:
value = "<redacted>"
- print("{}: {}".format(key, value))
+ print(f"{key}: {value}")
options = vars(args)
process_options(options)