launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #28878
[Merge] ~cjwatson/launchpad:black-cronscripts-scripts into launchpad:master
Colin Watson has proposed merging ~cjwatson/launchpad:black-cronscripts-scripts into launchpad:master.
Commit message:
{cronscripts,scripts}: Apply black
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
For more details, see:
https://code.launchpad.net/~cjwatson/launchpad/+git/launchpad/+merge/427292
--
Your team Launchpad code reviewers is requested to review the proposed merge of ~cjwatson/launchpad:black-cronscripts-scripts into launchpad:master.
diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs
index 6ac57c2..2c740b8 100644
--- a/.git-blame-ignore-revs
+++ b/.git-blame-ignore-revs
@@ -96,3 +96,5 @@ c7dedd4d8a3c642f77570e525be0af9714703eba
b56a741985ca580c281f142bea589b1ef05d3e93
# apply black to lp.xmlrpc
0bf877facbd96ece340dd26a7429ebbf0fb9b65a
+# apply black to {cronscripts,scripts}
+86d834967ddae3eecd13deda5ac9eefea538195d
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index ffb69fd..4cc7030 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -40,29 +40,33 @@ repos:
hooks:
- id: black
files: |
- (?x)^lib/lp/(
- answers
- |app
- |archivepublisher
- |archiveuploader
- |blueprints
- |bugs
- |buildmaster
- |charms
- |code
- |codehosting
- |coop
- |oci
- |registry
+ (?x)^(
+ cronscripts
+ |lib/lp/(
+ answers
+ |app
+ |archivepublisher
+ |archiveuploader
+ |blueprints
+ |bugs
+ |buildmaster
+ |charms
+ |code
+ |codehosting
+ |coop
+ |oci
+ |registry
+ |scripts
+ |services
+ |snappy
+ |soyuz
+ |testing
+ |testopenid
+ |tests
+ |translations
+ |xmlrpc
+ )
|scripts
- |services
- |snappy
- |soyuz
- |testing
- |testopenid
- |tests
- |translations
- |xmlrpc
)/
- repo: https://github.com/PyCQA/isort
rev: 5.9.2
@@ -79,58 +83,66 @@ repos:
- --multi-line=8
- --dont-order-by-type
exclude: |
- (?x)^lib/lp/(
- answers
- |app
- |archivepublisher
- |archiveuploader
- |blueprints
- |bugs
- |buildmaster
- |charms
- |code
- |codehosting
- |coop
- |oci
- |registry
+ (?x)^(
+ cronscripts
+ |lib/lp/(
+ answers
+ |app
+ |archivepublisher
+ |archiveuploader
+ |blueprints
+ |bugs
+ |buildmaster
+ |charms
+ |code
+ |codehosting
+ |coop
+ |oci
+ |registry
+ |scripts
+ |services
+ |snappy
+ |soyuz
+ |testing
+ |testopenid
+ |tests
+ |translations
+ |xmlrpc
+ )
|scripts
- |services
- |snappy
- |soyuz
- |testing
- |testopenid
- |tests
- |translations
- |xmlrpc
)/
- id: isort
alias: isort-black
name: isort (black)
args: [--profile, black]
files: |
- (?x)^lib/lp/(
- answers
- |app
- |archivepublisher
- |archiveuploader
- |blueprints
- |bugs
- |buildmaster
- |charms
- |code
- |codehosting
- |coop
- |oci
- |registry
+ (?x)^(
+ cronscripts
+ |lib/lp/(
+ answers
+ |app
+ |archivepublisher
+ |archiveuploader
+ |blueprints
+ |bugs
+ |buildmaster
+ |charms
+ |code
+ |codehosting
+ |coop
+ |oci
+ |registry
+ |scripts
+ |services
+ |snappy
+ |soyuz
+ |testing
+ |testopenid
+ |tests
+ |translations
+ |xmlrpc
+ )
|scripts
- |services
- |snappy
- |soyuz
- |testing
- |testopenid
- |tests
- |translations
- |xmlrpc
)/
- repo: https://github.com/PyCQA/flake8
rev: 3.9.2
diff --git a/cronscripts/allocate-revision-karma.py b/cronscripts/allocate-revision-karma.py
index d4ed075..34f40c0 100755
--- a/cronscripts/allocate-revision-karma.py
+++ b/cronscripts/allocate-revision-karma.py
@@ -8,8 +8,8 @@ import _pythonpath # noqa: F401
from lp.code.scripts.revisionkarma import RevisionKarmaAllocator
from lp.services.config import config
-
-if __name__ == '__main__':
- script = RevisionKarmaAllocator('allocate-revision-karma',
- dbuser=config.revisionkarma.dbuser)
+if __name__ == "__main__":
+ script = RevisionKarmaAllocator(
+ "allocate-revision-karma", dbuser=config.revisionkarma.dbuser
+ )
script.lock_and_run()
diff --git a/cronscripts/buildd-retry-depwait.py b/cronscripts/buildd-retry-depwait.py
index 0d51bde..127e26c 100755
--- a/cronscripts/buildd-retry-depwait.py
+++ b/cronscripts/buildd-retry-depwait.py
@@ -10,17 +10,21 @@ from lp.soyuz.scripts.retrydepwait import RetryDepwaitTunableLoop
class RetryDepwait(LaunchpadCronScript):
-
def add_my_options(self):
self.parser.add_option(
- "-n", "--dry-run", action="store_true",
- dest="dry_run", default=False,
- help="Don't commit changes to the DB.")
+ "-n",
+ "--dry-run",
+ action="store_true",
+ dest="dry_run",
+ default=False,
+ help="Don't commit changes to the DB.",
+ )
def main(self):
updater = RetryDepwaitTunableLoop(self.logger, self.options.dry_run)
updater.run()
-if __name__ == '__main__':
- script = RetryDepwait('retry-depwait', dbuser='retry_depwait')
+
+if __name__ == "__main__":
+ script = RetryDepwait("retry-depwait", dbuser="retry_depwait")
script.lock_and_run()
diff --git a/cronscripts/check-teamparticipation.py b/cronscripts/check-teamparticipation.py
index f3343f6..4aa205e 100755
--- a/cronscripts/check-teamparticipation.py
+++ b/cronscripts/check-teamparticipation.py
@@ -23,12 +23,9 @@ from lp.registry.scripts.teamparticipation import (
check_teamparticipation_consistency,
fetch_team_participation_info,
fix_teamparticipation_consistency,
- )
+)
from lp.services.scripts.base import LaunchpadCronScript
-from lp.services.utils import (
- load_bz2_pickle,
- save_bz2_pickle,
- )
+from lp.services.utils import load_bz2_pickle, save_bz2_pickle
class CheckTeamParticipationScript(LaunchpadCronScript):
@@ -37,14 +34,22 @@ class CheckTeamParticipationScript(LaunchpadCronScript):
def add_my_options(self):
self.parser.add_option(
"--load-participation-info",
- dest="load_info", metavar="FILE", help=(
+ dest="load_info",
+ metavar="FILE",
+ help=(
"File from which to load participation information "
- "instead of going to the database."))
+ "instead of going to the database."
+ ),
+ )
self.parser.add_option(
"--save-participation-info",
- dest="save_info", metavar="FILE", help=(
+ dest="save_info",
+ metavar="FILE",
+ help=(
"File in which to save participation information, for "
- "later processing with --load-participation-info."))
+ "later processing with --load-participation-info."
+ ),
+ )
def main(self):
"""Perform various checks on the `TeamParticipation` table."""
@@ -57,9 +62,10 @@ class CheckTeamParticipationScript(LaunchpadCronScript):
save_bz2_pickle(participation_info, self.options.save_info)
else:
errors = check_teamparticipation_consistency(
- self.logger, participation_info)
+ self.logger, participation_info
+ )
fix_teamparticipation_consistency(self.logger, errors)
-if __name__ == '__main__':
+if __name__ == "__main__":
CheckTeamParticipationScript("check-teamparticipation").run()
diff --git a/cronscripts/checkwatches.py b/cronscripts/checkwatches.py
index 25a5d07..5026c3e 100755
--- a/cronscripts/checkwatches.py
+++ b/cronscripts/checkwatches.py
@@ -12,8 +12,8 @@ import _pythonpath # noqa: F401
from lp.bugs.scripts.checkwatches import CheckWatchesCronScript
from lp.services.config import config
-
-if __name__ == '__main__':
+if __name__ == "__main__":
script = CheckWatchesCronScript(
- "checkwatches", dbuser=config.checkwatches.dbuser)
+ "checkwatches", dbuser=config.checkwatches.dbuser
+ )
script.lock_and_run()
diff --git a/cronscripts/daily_product_jobs.py b/cronscripts/daily_product_jobs.py
index 726cc72..72941de 100755
--- a/cronscripts/daily_product_jobs.py
+++ b/cronscripts/daily_product_jobs.py
@@ -18,7 +18,7 @@ class RequestProductJobs(LaunchpadCronScript):
"""Create `ProductJobs` for products that need updating."""
def __init__(self):
- name = 'daily_product_jobs'
+ name = "daily_product_jobs"
dbuser = config.ICommercialExpiredJobSource.dbuser
LaunchpadCronScript.__init__(self, name, dbuser)
@@ -26,10 +26,10 @@ class RequestProductJobs(LaunchpadCronScript):
globalErrorUtility.configure(self.name)
manager = ProductJobManager(self.logger)
job_count = manager.createAllDailyJobs()
- self.logger.info('Requested %d total product jobs.' % job_count)
+ self.logger.info("Requested %d total product jobs." % job_count)
transaction.commit()
-if __name__ == '__main__':
+if __name__ == "__main__":
script = RequestProductJobs()
script.lock_and_run()
diff --git a/cronscripts/distributionmirror-prober.py b/cronscripts/distributionmirror-prober.py
index 073e4e2..64a5b56 100755
--- a/cronscripts/distributionmirror-prober.py
+++ b/cronscripts/distributionmirror-prober.py
@@ -13,68 +13,107 @@ from lp.services.config import config
from lp.services.scripts.base import (
LaunchpadCronScript,
LaunchpadScriptFailure,
- )
+)
from lp.services.timeout import set_default_timeout_function
class DistroMirrorProberScript(LaunchpadCronScript):
- usage = ('%prog --content-type=(archive|cdimage) [--force] '
- '[--no-owner-notification] [--max-mirrors=N]')
+ usage = (
+ "%prog --content-type=(archive|cdimage) [--force] "
+ "[--no-owner-notification] [--max-mirrors=N]"
+ )
def add_my_options(self):
- self.parser.add_option('--content-type',
- dest='content_type', default=None, action='store',
- help='Probe only mirrors of the given type')
- self.parser.add_option('--force',
- dest='force', default=False, action='store_true',
- help='Force the probing of mirrors that were probed recently')
- self.parser.add_option('--no-owner-notification',
- dest='no_owner_notification', default=False, action='store_true',
- help='Do not send failure notification to mirror owners.')
- self.parser.add_option('--no-remote-hosts',
- dest='no_remote_hosts', default=False, action='store_true',
- help='Do not try to connect to any host other than localhost.')
- self.parser.add_option('--max-mirrors',
- dest='max_mirrors', default=None, action='store', type="int",
- help='Only probe N mirrors.')
+ self.parser.add_option(
+ "--content-type",
+ dest="content_type",
+ default=None,
+ action="store",
+ help="Probe only mirrors of the given type",
+ )
+ self.parser.add_option(
+ "--force",
+ dest="force",
+ default=False,
+ action="store_true",
+ help="Force the probing of mirrors that were probed recently",
+ )
+ self.parser.add_option(
+ "--no-owner-notification",
+ dest="no_owner_notification",
+ default=False,
+ action="store_true",
+ help="Do not send failure notification to mirror owners.",
+ )
+ self.parser.add_option(
+ "--no-remote-hosts",
+ dest="no_remote_hosts",
+ default=False,
+ action="store_true",
+ help="Do not try to connect to any host other than localhost.",
+ )
+ self.parser.add_option(
+ "--max-mirrors",
+ dest="max_mirrors",
+ default=None,
+ action="store",
+ type="int",
+ help="Only probe N mirrors.",
+ )
# IMPORTANT: Don't change this unless you really know what you're
# doing. Using a too big value can cause spurious failures on lots of
# mirrors and a too small one can cause the prober to run for hours.
- self.parser.add_option('--max-parallel-per-host',
- dest='max_parallel_per_host', default=2,
- action='store', type="int",
- help='Keep maximum N parallel requests per host at a time.'
- ' (default=2)')
+ self.parser.add_option(
+ "--max-parallel-per-host",
+ dest="max_parallel_per_host",
+ default=2,
+ action="store",
+ type="int",
+ help="Keep maximum N parallel requests per host at a time."
+ " (default=2)",
+ )
# We limit the overall number of simultaneous requests as well to
# prevent them from stalling and timing out before they even get a
# chance to start connecting.
- self.parser.add_option('--max-parallel',
- dest='max_parallel', default=100,
- action='store', type="int",
- help='Keep maximum N parallel requests at a time (default=100).')
+ self.parser.add_option(
+ "--max-parallel",
+ dest="max_parallel",
+ default=100,
+ action="store",
+ type="int",
+ help="Keep maximum N parallel requests at a time (default=100).",
+ )
def main(self):
- if self.options.content_type == 'archive':
+ if self.options.content_type == "archive":
content_type = MirrorContent.ARCHIVE
- elif self.options.content_type == 'cdimage':
+ elif self.options.content_type == "cdimage":
content_type = MirrorContent.RELEASE
else:
raise LaunchpadScriptFailure(
- 'Wrong value for argument --content-type: %s'
- % self.options.content_type)
+ "Wrong value for argument --content-type: %s"
+ % self.options.content_type
+ )
set_default_timeout_function(
- lambda: config.distributionmirrorprober.timeout)
+ lambda: config.distributionmirrorprober.timeout
+ )
DistroMirrorProber(self.txn, self.logger).probe(
- content_type, self.options.no_remote_hosts, self.options.force,
- self.options.max_mirrors, not self.options.no_owner_notification,
- self.options.max_parallel, self.options.max_parallel_per_host)
+ content_type,
+ self.options.no_remote_hosts,
+ self.options.force,
+ self.options.max_mirrors,
+ not self.options.no_owner_notification,
+ self.options.max_parallel,
+ self.options.max_parallel_per_host,
+ )
-if __name__ == '__main__':
+if __name__ == "__main__":
script = DistroMirrorProberScript(
- 'distributionmirror-prober',
- dbuser=config.distributionmirrorprober.dbuser)
- script.lock_and_run(isolation='autocommit')
+ "distributionmirror-prober",
+ dbuser=config.distributionmirrorprober.dbuser,
+ )
+ script.lock_and_run(isolation="autocommit")
diff --git a/cronscripts/expire-archive-files.py b/cronscripts/expire-archive-files.py
index 59f3bec..4cb865a 100755
--- a/cronscripts/expire-archive-files.py
+++ b/cronscripts/expire-archive-files.py
@@ -12,8 +12,8 @@ import _pythonpath # noqa: F401
from lp.services.config import config
from lp.soyuz.scripts.expire_archive_files import ArchiveExpirer
-
-if __name__ == '__main__':
+if __name__ == "__main__":
script = ArchiveExpirer(
- 'expire-archive-files', dbuser=config.binaryfile_expire.dbuser)
+ "expire-archive-files", dbuser=config.binaryfile_expire.dbuser
+ )
script.lock_and_run()
diff --git a/cronscripts/expire-bugtasks.py b/cronscripts/expire-bugtasks.py
index 3f77590..34fa932 100755
--- a/cronscripts/expire-bugtasks.py
+++ b/cronscripts/expire-bugtasks.py
@@ -25,16 +25,29 @@ class ExpireBugTasks(LaunchpadCronScript):
the status of Invalid (expired). The expiration period is configured
through config.malone.days_before_expiration.
"""
+
usage = "usage: %prog [options]"
- description = ' %s' % __doc__
+ description = " %s" % __doc__
def add_my_options(self):
- self.parser.add_option('-u', '--ubuntu', action='store_true',
- dest='ubuntu', default=False,
- help='Only expire Ubuntu bug tasks.')
- self.parser.add_option('-l', '--limit', action='store', dest='limit',
- type='int', metavar='NUMBER', default=None,
- help='Limit expiry to NUMBER of bug tasks.')
+ self.parser.add_option(
+ "-u",
+ "--ubuntu",
+ action="store_true",
+ dest="ubuntu",
+ default=False,
+ help="Only expire Ubuntu bug tasks.",
+ )
+ self.parser.add_option(
+ "-l",
+ "--limit",
+ action="store",
+ dest="limit",
+ type="int",
+ metavar="NUMBER",
+ default=None,
+ help="Limit expiry to NUMBER of bug tasks.",
+ )
def main(self):
"""Run the BugJanitor."""
@@ -42,13 +55,16 @@ class ExpireBugTasks(LaunchpadCronScript):
if self.options.ubuntu:
# Avoid circular import.
from lp.registry.interfaces.distribution import IDistributionSet
- target = getUtility(IDistributionSet).getByName('ubuntu')
+
+ target = getUtility(IDistributionSet).getByName("ubuntu")
janitor = BugJanitor(
- log=self.logger, target=target, limit=self.options.limit)
+ log=self.logger, target=target, limit=self.options.limit
+ )
janitor.expireBugTasks(self.txn)
-if __name__ == '__main__':
+if __name__ == "__main__":
script = ExpireBugTasks(
- 'expire-bugtasks', dbuser=config.malone.expiration_dbuser)
+ "expire-bugtasks", dbuser=config.malone.expiration_dbuser
+ )
script.lock_and_run()
diff --git a/cronscripts/expire-questions.py b/cronscripts/expire-questions.py
index dc710fd..213fe63 100755
--- a/cronscripts/expire-questions.py
+++ b/cronscripts/expire-questions.py
@@ -10,7 +10,7 @@ The expiration period is configured through
config.answertracker.days_before_expiration
"""
-__all__ = ['ExpireQuestions']
+__all__ = ["ExpireQuestions"]
import _pythonpath # noqa: F401
@@ -26,6 +26,7 @@ class ExpireQuestions(LaunchpadCronScript):
didn't have any activity in the last X days. The number of days is
configured through config.answertracker.days_before_expiration.
"""
+
usage = "usage: %prog [options]"
description = __doc__
@@ -35,7 +36,6 @@ class ExpireQuestions(LaunchpadCronScript):
janitor.expireQuestions(self.txn)
-if __name__ == '__main__':
- script = ExpireQuestions(
- 'expire-questions', dbuser='expire_questions')
+if __name__ == "__main__":
+ script = ExpireQuestions("expire-questions", dbuser="expire_questions")
script.lock_and_run()
diff --git a/cronscripts/flag-expired-memberships.py b/cronscripts/flag-expired-memberships.py
index 61a40aa..af97c36 100755
--- a/cronscripts/flag-expired-memberships.py
+++ b/cronscripts/flag-expired-memberships.py
@@ -15,7 +15,7 @@ from lp.services.config import config
from lp.services.scripts.base import (
LaunchpadCronScript,
LaunchpadScriptFailure,
- )
+)
class ExpireMemberships(LaunchpadCronScript):
@@ -37,21 +37,26 @@ class ExpireMemberships(LaunchpadCronScript):
memberships_to_warn = membershipset.getExpiringMembershipsToWarn()
for membership in memberships_to_warn:
membership.sendExpirationWarningEmail()
- self.logger.debug("Sent warning email to %s in %s team."
- % (membership.person.name, membership.team.name))
+ self.logger.debug(
+ "Sent warning email to %s in %s team."
+ % (membership.person.name, membership.team.name)
+ )
self.txn.commit()
def main(self):
"""Flag expired team memberships."""
if self.args:
raise LaunchpadScriptFailure(
- "Unhandled arguments %s" % repr(self.args))
+ "Unhandled arguments %s" % repr(self.args)
+ )
self.logger.info("Flagging expired team memberships.")
self.flag_expired_memberships_and_send_warnings()
self.logger.info("Finished flagging expired team memberships.")
-if __name__ == '__main__':
- script = ExpireMemberships('flag-expired-memberships',
- dbuser=config.expiredmembershipsflagger.dbuser)
+if __name__ == "__main__":
+ script = ExpireMemberships(
+ "flag-expired-memberships",
+ dbuser=config.expiredmembershipsflagger.dbuser,
+ )
script.lock_and_run()
diff --git a/cronscripts/foaf-update-karma-cache.py b/cronscripts/foaf-update-karma-cache.py
index da50f04..5d24300 100755
--- a/cronscripts/foaf-update-karma-cache.py
+++ b/cronscripts/foaf-update-karma-cache.py
@@ -10,10 +10,7 @@ from zope.component import getUtility
from lp.app.errors import NotFoundError
from lp.registry.interfaces.karma import IKarmaCacheManager
from lp.services.config import config
-from lp.services.database.sqlbase import (
- cursor,
- flush_database_updates,
- )
+from lp.services.database.sqlbase import cursor, flush_database_updates
from lp.services.scripts.base import LaunchpadCronScript
@@ -52,8 +49,9 @@ class KarmaCacheUpdater(LaunchpadCronScript):
# worthless after karma_expires_after. This query produces odd results
# when datecreated is in the future, but there is really no point
# adding the extra WHEN clause.
- karma_expires_after = '1 year'
- self.cur.execute("""
+ karma_expires_after = "1 year"
+ self.cur.execute(
+ """
SELECT person, category, product, distribution,
ROUND(SUM(
CASE WHEN karma.datecreated + %s::interval
@@ -67,7 +65,9 @@ class KarmaCacheUpdater(LaunchpadCronScript):
FROM Karma
JOIN KarmaAction ON action = KarmaAction.id
GROUP BY person, category, product, distribution
- """, (karma_expires_after, karma_expires_after))
+ """,
+ (karma_expires_after, karma_expires_after),
+ )
# Suck into RAM to avoid tieing up resources on the DB.
results = list(self.cur.fetchall())
@@ -82,13 +82,17 @@ class KarmaCacheUpdater(LaunchpadCronScript):
# Delete the entries we're going to replace.
self.cur.execute("DELETE FROM KarmaCache WHERE category IS NULL")
- self.cur.execute("""
+ self.cur.execute(
+ """
DELETE FROM KarmaCache
- WHERE project IS NOT NULL AND product IS NULL""")
- self.cur.execute("""
+ WHERE project IS NOT NULL AND product IS NULL"""
+ )
+ self.cur.execute(
+ """
DELETE FROM KarmaCache
WHERE category IS NOT NULL AND project IS NULL AND product IS NULL
- AND distribution IS NULL AND sourcepackagename IS NULL""")
+ AND distribution IS NULL AND sourcepackagename IS NULL"""
+ )
# Don't allow our table to bloat with inactive users.
self.cur.execute("DELETE FROM KarmaCache WHERE karmavalue <= 0")
@@ -99,12 +103,15 @@ class KarmaCacheUpdater(LaunchpadCronScript):
def B_update_karmatotalcache(self):
self.logger.info("Step B: Rebuilding KarmaTotalCache")
# Trash old records
- self.cur.execute("""
+ self.cur.execute(
+ """
DELETE FROM KarmaTotalCache
WHERE person NOT IN (SELECT person FROM KarmaCache)
- """)
+ """
+ )
# Update existing records.
- self.cur.execute("""
+ self.cur.execute(
+ """
UPDATE KarmaTotalCache SET karma_total=sum_karmavalue
FROM (
SELECT person AS sum_person, SUM(karmavalue) AS sum_karmavalue
@@ -112,7 +119,8 @@ class KarmaCacheUpdater(LaunchpadCronScript):
GROUP BY person
) AS sums
WHERE KarmaTotalCache.person = sum_person
- """)
+ """
+ )
# VACUUM KarmaTotalCache since we have just touched every row in it.
self.cur.execute("""VACUUM KarmaTotalCache""")
@@ -131,12 +139,14 @@ class KarmaCacheUpdater(LaunchpadCronScript):
## FOR UPDATE
## """)
- self.cur.execute("""
+ self.cur.execute(
+ """
INSERT INTO KarmaTotalCache (person, karma_total)
SELECT person, SUM(karmavalue) FROM KarmaCache
WHERE person NOT IN (SELECT person FROM KarmaTotalCache)
GROUP BY person
- """)
+ """
+ )
## self.cur.execute("COMMIT")
@@ -151,7 +161,8 @@ class KarmaCacheUpdater(LaunchpadCronScript):
# - All actions with a specific category of a person.
# - All actions with a specific category of a person.
- self.cur.execute("""
+ self.cur.execute(
+ """
INSERT INTO KarmaCache
(person, category, karmavalue, product, distribution,
sourcepackagename, project)
@@ -159,10 +170,12 @@ class KarmaCacheUpdater(LaunchpadCronScript):
FROM KarmaCache
WHERE category IS NOT NULL
GROUP BY person, category
- """)
+ """
+ )
# - All actions of a person on a given product.
- self.cur.execute("""
+ self.cur.execute(
+ """
INSERT INTO KarmaCache
(person, category, karmavalue, product, distribution,
sourcepackagename, project)
@@ -170,10 +183,12 @@ class KarmaCacheUpdater(LaunchpadCronScript):
FROM KarmaCache
WHERE product IS NOT NULL
GROUP BY person, product
- """)
+ """
+ )
# - All actions of a person on a given distribution.
- self.cur.execute("""
+ self.cur.execute(
+ """
INSERT INTO KarmaCache
(person, category, karmavalue, product, distribution,
sourcepackagename, project)
@@ -182,10 +197,12 @@ class KarmaCacheUpdater(LaunchpadCronScript):
FROM KarmaCache
WHERE distribution IS NOT NULL
GROUP BY person, distribution
- """)
+ """
+ )
# - All actions of a person on a given project group.
- self.cur.execute("""
+ self.cur.execute(
+ """
INSERT INTO KarmaCache
(person, category, karmavalue, product, distribution,
sourcepackagename, project)
@@ -196,14 +213,16 @@ class KarmaCacheUpdater(LaunchpadCronScript):
WHERE Product.project IS NOT NULL AND product IS NOT NULL
AND category IS NOT NULL
GROUP BY person, Product.project
- """)
+ """
+ )
# - All actions with a specific category of a person on a given
# project group.
# IMPORTANT: This has to be the latest step; otherwise the rows
# inserted here will be included in the calculation of the overall
# karma of a person on a given project group.
- self.cur.execute("""
+ self.cur.execute(
+ """
INSERT INTO KarmaCache
(person, category, karmavalue, product, distribution,
sourcepackagename, project)
@@ -214,7 +233,8 @@ class KarmaCacheUpdater(LaunchpadCronScript):
WHERE Product.project IS NOT NULL AND product IS NOT NULL
AND category IS NOT NULL
GROUP BY person, category, Product.project
- """)
+ """
+ )
def calculate_scaling(self, results):
"""Return a dict of scaling factors keyed on category ID"""
@@ -245,13 +265,15 @@ class KarmaCacheUpdater(LaunchpadCronScript):
max_scaling = config.karmacacheupdater.max_scaling
if scaling[category] > max_scaling:
self.logger.info(
- 'Scaling %s by a factor of %0.4f (capped to %0.4f)'
- % (categories[category], scaling[category], max_scaling))
+ "Scaling %s by a factor of %0.4f (capped to %0.4f)"
+ % (categories[category], scaling[category], max_scaling)
+ )
scaling[category] = max_scaling
else:
self.logger.info(
- 'Scaling %s by a factor of %0.4f'
- % (categories[category], scaling[category]))
+ "Scaling %s by a factor of %0.4f"
+ % (categories[category], scaling[category])
+ )
return scaling
def update_one_karma_cache_entry(self, entry, scaling):
@@ -263,35 +285,43 @@ class KarmaCacheUpdater(LaunchpadCronScript):
"""
(person_id, category_id, product_id, distribution_id, points) = entry
points *= scaling[category_id] # Scaled. wow.
- self.logger.debug("Setting person_id=%d, category_id=%d, points=%d"
- % (person_id, category_id, points))
+ self.logger.debug(
+ "Setting person_id=%d, category_id=%d, points=%d"
+ % (person_id, category_id, points)
+ )
points = int(points)
- context = {'product_id': product_id,
- 'distribution_id': distribution_id}
+ context = {
+ "product_id": product_id,
+ "distribution_id": distribution_id,
+ }
try:
self.karmacachemanager.updateKarmaValue(
- points, person_id, category_id, **context)
+ points, person_id, category_id, **context
+ )
self.logger.debug(
"Updated karmacache for person=%s, points=%s, category=%s, "
- "context=%s" % (person_id, points, category_id, context))
+ "context=%s" % (person_id, points, category_id, context)
+ )
except NotFoundError:
# Row didn't exist; do an insert.
self.karmacachemanager.new(
- points, person_id, category_id, **context)
+ points, person_id, category_id, **context
+ )
self.logger.debug(
"Created karmacache for person=%s, points=%s, category=%s, "
- "context=%s" % (person_id, points, category_id, context))
+ "context=%s" % (person_id, points, category_id, context)
+ )
-if __name__ == '__main__':
+if __name__ == "__main__":
script = KarmaCacheUpdater(
- 'karma-update',
- dbuser=config.karmacacheupdater.dbuser)
+ "karma-update", dbuser=config.karmacacheupdater.dbuser
+ )
# We use the autocommit transaction isolation level to minimize
# contention. It also allows us to not bother explicitly calling
# COMMIT all the time. However, if we interrupt this script mid-run
# it will need to be re-run as the data will be inconsistent (only
# part of the caches will have been recalculated).
- script.lock_and_run(isolation='autocommit')
+ script.lock_and_run(isolation="autocommit")
diff --git a/cronscripts/garbo-daily.py b/cronscripts/garbo-daily.py
index 11cc815..7de3dda 100755
--- a/cronscripts/garbo-daily.py
+++ b/cronscripts/garbo-daily.py
@@ -15,8 +15,7 @@ import _pythonpath # noqa: F401
from lp.scripts.garbo import DailyDatabaseGarbageCollector
-
-if __name__ == '__main__':
+if __name__ == "__main__":
script = DailyDatabaseGarbageCollector()
script.continue_on_failure = True
script.lock_and_run()
diff --git a/cronscripts/garbo-frequently.py b/cronscripts/garbo-frequently.py
index 9a2b390..adacbc8 100755
--- a/cronscripts/garbo-frequently.py
+++ b/cronscripts/garbo-frequently.py
@@ -15,8 +15,7 @@ import _pythonpath # noqa: F401
from lp.scripts.garbo import FrequentDatabaseGarbageCollector
-
-if __name__ == '__main__':
+if __name__ == "__main__":
script = FrequentDatabaseGarbageCollector()
script.continue_on_failure = True
script.lock_and_run()
diff --git a/cronscripts/garbo-hourly.py b/cronscripts/garbo-hourly.py
index 9e6cb24..8756c2e 100755
--- a/cronscripts/garbo-hourly.py
+++ b/cronscripts/garbo-hourly.py
@@ -15,8 +15,7 @@ import _pythonpath # noqa: F401
from lp.scripts.garbo import HourlyDatabaseGarbageCollector
-
-if __name__ == '__main__':
+if __name__ == "__main__":
script = HourlyDatabaseGarbageCollector()
script.continue_on_failure = True
script.lock_and_run()
diff --git a/cronscripts/generate-contents-files.py b/cronscripts/generate-contents-files.py
index 7b78a5e..39fd0d4 100755
--- a/cronscripts/generate-contents-files.py
+++ b/cronscripts/generate-contents-files.py
@@ -9,10 +9,10 @@ import _pythonpath # noqa: F401
from lp.archivepublisher.scripts.generate_contents_files import (
GenerateContentsFiles,
- )
-
+)
-if __name__ == '__main__':
+if __name__ == "__main__":
script = GenerateContentsFiles(
- "generate-contents", dbuser='generate_contents_files')
+ "generate-contents", dbuser="generate_contents_files"
+ )
script.lock_and_run()
diff --git a/cronscripts/generate-ppa-htaccess.py b/cronscripts/generate-ppa-htaccess.py
index e4044f2..d9f59dc 100755
--- a/cronscripts/generate-ppa-htaccess.py
+++ b/cronscripts/generate-ppa-htaccess.py
@@ -9,11 +9,11 @@ import _pythonpath # noqa: F401
from lp.archivepublisher.scripts.generate_ppa_htaccess import (
HtaccessTokenGenerator,
- )
+)
from lp.services.config import config
-
-if __name__ == '__main__':
+if __name__ == "__main__":
script = HtaccessTokenGenerator(
- 'generate-ppa-htaccess', dbuser=config.generateppahtaccess.dbuser)
+ "generate-ppa-htaccess", dbuser=config.generateppahtaccess.dbuser
+ )
script.lock_and_run()
diff --git a/cronscripts/language-pack-exporter.py b/cronscripts/language-pack-exporter.py
index cc383b8..f20b2ba 100755
--- a/cronscripts/language-pack-exporter.py
+++ b/cronscripts/language-pack-exporter.py
@@ -10,39 +10,40 @@ import _pythonpath # noqa: F401
from lp.services.scripts.base import (
LaunchpadCronScript,
LaunchpadScriptFailure,
- )
+)
from lp.translations.scripts.language_pack import export_language_pack
class RosettaLangPackExporter(LaunchpadCronScript):
"""Export language packs for a distribution series."""
- usage = '%prog [options] distribution series'
+
+ usage = "%prog [options] distribution series"
def add_my_options(self):
"""See `LaunchpadScript`."""
self.parser.add_option(
- '--output',
- dest='output',
+ "--output",
+ dest="output",
default=None,
- action='store',
- help='A file to send the generated tarball to, rather than the'
- ' Libraran.'
- )
+ action="store",
+ help="A file to send the generated tarball to, rather than the"
+ " Libraran.",
+ )
self.parser.add_option(
- '--component',
- dest='component',
+ "--component",
+ dest="component",
default=None,
- action='store',
- help='Select a concrete archive component to export.'
- )
+ action="store",
+ help="Select a concrete archive component to export.",
+ )
self.parser.add_option(
- '--force-utf8-encoding',
- dest='force_utf8',
+ "--force-utf8-encoding",
+ dest="force_utf8",
default=False,
- action='store_true',
- help='Whether the exported files should be exported using UTF-8'
- ' encoding.'
- )
+ action="store_true",
+ help="Whether the exported files should be exported using UTF-8"
+ " encoding.",
+ )
def args(self):
"""Return the list of command-line arguments."""
@@ -52,8 +53,9 @@ class RosettaLangPackExporter(LaunchpadCronScript):
"""Set distribution_name and series_name from the args."""
if len(args) != 2:
raise LaunchpadScriptFailure(
- 'Wrong number of arguments: should include distribution '
- 'and series name.')
+ "Wrong number of arguments: should include distribution "
+ "and series name."
+ )
self._args = args
self.distribution_name, self.series_name = self._args
@@ -68,30 +70,37 @@ class RosettaLangPackExporter(LaunchpadCronScript):
The script can run concurrently for different distroseries.
"""
lockfile_name = "launchpad-%s__%s__%s.lock" % (
- self.name, self.distribution_name, self.series_name)
- self.logger.info('Setting lockfile name to %s.' % lockfile_name)
+ self.name,
+ self.distribution_name,
+ self.series_name,
+ )
+ self.logger.info("Setting lockfile name to %s." % lockfile_name)
return lockfile_name
def main(self):
"""See `LaunchpadScript`."""
self.logger.info(
- 'Exporting translations for series %s of distribution %s.',
- self.series_name, self.distribution_name)
+ "Exporting translations for series %s of distribution %s.",
+ self.series_name,
+ self.distribution_name,
+ )
success = export_language_pack(
distribution_name=self.distribution_name,
series_name=self.series_name,
component=self.options.component,
force_utf8=self.options.force_utf8,
output_file=self.options.output,
- logger=self.logger)
+ logger=self.logger,
+ )
if not success:
- raise LaunchpadScriptFailure('Language pack generation failed')
+ raise LaunchpadScriptFailure("Language pack generation failed")
else:
self.txn.commit()
-if __name__ == '__main__':
+if __name__ == "__main__":
script = RosettaLangPackExporter(
- 'language-pack-exporter', dbuser='langpack')
+ "language-pack-exporter", dbuser="langpack"
+ )
script.lock_and_run()
diff --git a/cronscripts/librarian-feed-swift.py b/cronscripts/librarian-feed-swift.py
index 8921ece..8653ae5 100755
--- a/cronscripts/librarian-feed-swift.py
+++ b/cronscripts/librarian-feed-swift.py
@@ -18,47 +18,90 @@ from lp.services.scripts.base import LaunchpadCronScript
class LibrarianFeedSwift(LaunchpadCronScript):
def add_my_options(self):
self.parser.add_option(
- "-i", "--id", action="append", dest="ids", default=[],
- metavar="CONTENT_ID", help="Migrate a single file")
+ "-i",
+ "--id",
+ action="append",
+ dest="ids",
+ default=[],
+ metavar="CONTENT_ID",
+ help="Migrate a single file",
+ )
self.parser.add_option(
- "--remove", action="store_true", default=False,
- help="Remove files from disk after migration (default: False)")
+ "--remove",
+ action="store_true",
+ default=False,
+ help="Remove files from disk after migration (default: False)",
+ )
self.parser.add_option(
- "--rename", action="store_true", default=False,
- help="Rename files on disk after migration (default: False)")
+ "--rename",
+ action="store_true",
+ default=False,
+ help="Rename files on disk after migration (default: False)",
+ )
self.parser.add_option(
- "-s", "--start", action="store", type=int, default=None,
- dest="start", metavar="CONTENT_ID",
- help="Migrate files starting from CONTENT_ID")
+ "-s",
+ "--start",
+ action="store",
+ type=int,
+ default=None,
+ dest="start",
+ metavar="CONTENT_ID",
+ help="Migrate files starting from CONTENT_ID",
+ )
self.parser.add_option(
- "--start-since", action="store", dest='start_since',
- default=None, metavar="INTERVAL",
- help="Migrate files older than INTERVAL (PostgreSQL syntax)")
+ "--start-since",
+ action="store",
+ dest="start_since",
+ default=None,
+ metavar="INTERVAL",
+ help="Migrate files older than INTERVAL (PostgreSQL syntax)",
+ )
self.parser.add_option(
- "-e", "--end", action="store", type=int, default=None,
- dest="end", metavar="CONTENT_ID",
- help="Migrate files up to and including CONTENT_ID")
+ "-e",
+ "--end",
+ action="store",
+ type=int,
+ default=None,
+ dest="end",
+ metavar="CONTENT_ID",
+ help="Migrate files up to and including CONTENT_ID",
+ )
self.parser.add_option(
- "--end-at", action="store", dest='end_at',
- default=None, metavar="INTERVAL",
+ "--end-at",
+ action="store",
+ dest="end_at",
+ default=None,
+ metavar="INTERVAL",
help="Don't migrate files older than INTERVAL "
- "(PostgreSQL syntax)")
+ "(PostgreSQL syntax)",
+ )
self.parser.add_option(
- "--instance-id", action="store", type=int, default=None,
+ "--instance-id",
+ action="store",
+ type=int,
+ default=None,
metavar="INSTANCE_ID",
help=(
"Run as instance INSTANCE_ID (starting at 0) out of "
- "NUM_INSTANCES parallel workers"))
+ "NUM_INSTANCES parallel workers"
+ ),
+ )
self.parser.add_option(
- "--num-instances", action="store", type=int, default=None,
+ "--num-instances",
+ action="store",
+ type=int,
+ default=None,
metavar="NUM_INSTANCES",
- help="Run NUM_INSTANCES parallel workers")
+ help="Run NUM_INSTANCES parallel workers",
+ )
@property
def lockfilename(self):
if self.options.instance_id is not None:
return "launchpad-%s-%d.lock" % (
- self.name, self.options.instance_id)
+ self.name,
+ self.options.instance_id,
+ )
else:
return "launchpad-%s.lock" % self.name
@@ -73,54 +116,76 @@ class LibrarianFeedSwift(LaunchpadCronScript):
remove = None
if self.options.start_since:
- self.options.start = IStandbyStore(LibraryFileContent).execute("""
+ self.options.start = (
+ IStandbyStore(LibraryFileContent)
+ .execute(
+ """
SELECT MAX(id) FROM LibraryFileContent
WHERE datecreated < current_timestamp at time zone 'UTC'
- CAST(%s AS INTERVAL)
- """, (str(self.options.start_since),)).get_one()[0]
+ """,
+ (str(self.options.start_since),),
+ )
+ .get_one()[0]
+ )
if self.options.end_at:
- self.options.end = IStandbyStore(LibraryFileContent).execute("""
+ self.options.end = (
+ IStandbyStore(LibraryFileContent)
+ .execute(
+ """
SELECT MAX(id) FROM LibraryFileContent
WHERE datecreated < current_timestamp at time zone 'UTC'
- CAST(%s AS INTERVAL)
- """, (str(self.options.end_at),)).get_one()[0]
-
- if ((self.options.instance_id is None) !=
- (self.options.num_instances is None)):
+ """,
+ (str(self.options.end_at),),
+ )
+ .get_one()[0]
+ )
+
+ if (self.options.instance_id is None) != (
+ self.options.num_instances is None
+ ):
self.parser.error(
"Must specify both or neither of --instance-id and "
- "--num-instances")
+ "--num-instances"
+ )
kwargs = {
"instance_id": self.options.instance_id,
"num_instances": self.options.num_instances,
"remove_func": remove,
- }
+ }
if self.options.ids and (self.options.start or self.options.end):
self.parser.error(
- "Cannot specify both individual file(s) and range")
+ "Cannot specify both individual file(s) and range"
+ )
elif self.options.ids:
for lfc in self.options.ids:
swift.to_swift(
- self.logger, start_lfc_id=lfc, end_lfc_id=lfc, **kwargs)
+ self.logger, start_lfc_id=lfc, end_lfc_id=lfc, **kwargs
+ )
else:
swift.to_swift(
- self.logger, start_lfc_id=self.options.start,
- end_lfc_id=self.options.end, **kwargs)
- self.logger.info('Done')
+ self.logger,
+ start_lfc_id=self.options.start,
+ end_lfc_id=self.options.end,
+ **kwargs,
+ )
+ self.logger.info("Done")
-if __name__ == '__main__':
+if __name__ == "__main__":
# Ensure that our connections to Swift are direct, and not going via
# a web proxy that would likely block us in any case.
- if 'http_proxy' in os.environ:
- del os.environ['http_proxy']
- if 'HTTP_PROXY' in os.environ:
- del os.environ['HTTP_PROXY']
+ if "http_proxy" in os.environ:
+ del os.environ["http_proxy"]
+ if "HTTP_PROXY" in os.environ:
+ del os.environ["HTTP_PROXY"]
script = LibrarianFeedSwift(
- 'librarian-feed-swift', dbuser='librarianfeedswift')
- script.lock_and_run(isolation='autocommit')
+ "librarian-feed-swift", dbuser="librarianfeedswift"
+ )
+ script.lock_and_run(isolation="autocommit")
diff --git a/cronscripts/librarian-gc.py b/cronscripts/librarian-gc.py
index 8a07cb2..7f8d39e 100755
--- a/cronscripts/librarian-gc.py
+++ b/cronscripts/librarian-gc.py
@@ -14,14 +14,8 @@ import _pythonpath # noqa: F401
import logging
-from lp.services.config import (
- config,
- dbconfig,
- )
-from lp.services.database.sqlbase import (
- connect,
- ISOLATION_LEVEL_AUTOCOMMIT,
- )
+from lp.services.config import config, dbconfig
+from lp.services.database.sqlbase import ISOLATION_LEVEL_AUTOCOMMIT, connect
from lp.services.librarianserver import librariangc
from lp.services.scripts.base import LaunchpadCronScript
@@ -29,36 +23,54 @@ from lp.services.scripts.base import LaunchpadCronScript
class LibrarianGC(LaunchpadCronScript):
def add_my_options(self):
self.parser.add_option(
- '', "--skip-duplicates", action="store_true", default=False,
- dest="skip_duplicates",
- help="Skip duplicate LibraryFileContent merging"
- )
+ "",
+ "--skip-duplicates",
+ action="store_true",
+ default=False,
+ dest="skip_duplicates",
+ help="Skip duplicate LibraryFileContent merging",
+ )
self.parser.add_option(
- '', "--skip-aliases", action="store_true", default=False,
- dest="skip_aliases",
- help="Skip unreferenced LibraryFileAlias removal"
- )
+ "",
+ "--skip-aliases",
+ action="store_true",
+ default=False,
+ dest="skip_aliases",
+ help="Skip unreferenced LibraryFileAlias removal",
+ )
self.parser.add_option(
- '', "--skip-content", action="store_true", default=False,
- dest="skip_content",
- help="Skip unreferenced LibraryFileContent removal"
- )
+ "",
+ "--skip-content",
+ action="store_true",
+ default=False,
+ dest="skip_content",
+ help="Skip unreferenced LibraryFileContent removal",
+ )
self.parser.add_option(
- '', "--skip-blobs", action="store_true", default=False,
- dest="skip_blobs",
- help="Skip removing expired TemporaryBlobStorage rows"
- )
+ "",
+ "--skip-blobs",
+ action="store_true",
+ default=False,
+ dest="skip_blobs",
+ help="Skip removing expired TemporaryBlobStorage rows",
+ )
self.parser.add_option(
- '', "--skip-files", action="store_true", default=False,
- dest="skip_files",
- help="Skip removing files on disk with no database references"
- " or flagged for deletion."
- )
+ "",
+ "--skip-files",
+ action="store_true",
+ default=False,
+ dest="skip_files",
+ help="Skip removing files on disk with no database references"
+ " or flagged for deletion.",
+ )
self.parser.add_option(
- '', "--skip-expiry", action="store_true", default=False,
- dest="skip_expiry",
- help="Skip expiring aliases with an expiry date in the past."
- )
+ "",
+ "--skip-expiry",
+ action="store_true",
+ default=False,
+ dest="skip_expiry",
+ help="Skip expiring aliases with an expiry date in the past.",
+ )
def main(self):
librariangc.log = self.logger
@@ -67,7 +79,8 @@ class LibrarianGC(LaunchpadCronScript):
librariangc.debug = True
conn = connect(
- user=dbconfig.dbuser, isolation=ISOLATION_LEVEL_AUTOCOMMIT)
+ user=dbconfig.dbuser, isolation=ISOLATION_LEVEL_AUTOCOMMIT
+ )
# Refuse to run if we have significant clock skew between the
# librarian and the database.
@@ -93,6 +106,6 @@ class LibrarianGC(LaunchpadCronScript):
librariangc.delete_unwanted_files(conn)
-if __name__ == '__main__':
- script = LibrarianGC('librarian-gc', dbuser=config.librarian_gc.dbuser)
- script.lock_and_run(isolation='autocommit')
+if __name__ == "__main__":
+ script = LibrarianGC("librarian-gc", dbuser=config.librarian_gc.dbuser)
+ script.lock_and_run(isolation="autocommit")
diff --git a/cronscripts/parse-librarian-apache-access-logs.py b/cronscripts/parse-librarian-apache-access-logs.py
index 06c51c1..935f12e 100755
--- a/cronscripts/parse-librarian-apache-access-logs.py
+++ b/cronscripts/parse-librarian-apache-access-logs.py
@@ -25,7 +25,7 @@ from lp.services.librarian.interfaces import ILibraryFileAliasSet
from lp.services.librarianserver.apachelogparser import (
DBUSER,
get_library_file_id,
- )
+)
class ParseLibrarianApacheLogs(ParseApacheLogs):
@@ -54,6 +54,6 @@ class ParseLibrarianApacheLogs(ParseApacheLogs):
return None
-if __name__ == '__main__':
- script = ParseLibrarianApacheLogs('parse-librarian-apache-logs', DBUSER)
+if __name__ == "__main__":
+ script = ParseLibrarianApacheLogs("parse-librarian-apache-logs", DBUSER)
script.lock_and_run()
diff --git a/cronscripts/parse-ppa-apache-access-logs.py b/cronscripts/parse-ppa-apache-access-logs.py
index 015ce5d..c64d301 100755
--- a/cronscripts/parse-ppa-apache-access-logs.py
+++ b/cronscripts/parse-ppa-apache-access-logs.py
@@ -16,10 +16,7 @@ from lp.registry.interfaces.person import IPersonSet
from lp.services.apachelogparser.script import ParseApacheLogs
from lp.services.config import config
from lp.soyuz.interfaces.archive import IArchiveSet
-from lp.soyuz.scripts.ppa_apache_log_parser import (
- DBUSER,
- get_ppa_file_key,
- )
+from lp.soyuz.scripts.ppa_apache_log_parser import DBUSER, get_ppa_file_key
class ParsePPAApacheLogs(ParseApacheLogs):
@@ -51,7 +48,8 @@ class ParsePPAApacheLogs(ParseApacheLogs):
if distro is None:
return None
archive = getUtility(IArchiveSet).getPPAOwnedByPerson(
- person, distribution=distro, name=file_id[1])
+ person, distribution=distro, name=file_id[1]
+ )
if archive is None:
return None
bpr = archive.getBinaryPackageReleaseByFileName(file_id[3])
@@ -61,6 +59,6 @@ class ParsePPAApacheLogs(ParseApacheLogs):
return functools.partial(archive.updatePackageDownloadCount, bpr)
-if __name__ == '__main__':
- script = ParsePPAApacheLogs('parse-ppa-apache-logs', DBUSER)
+if __name__ == "__main__":
+ script = ParsePPAApacheLogs("parse-ppa-apache-logs", DBUSER)
script.lock_and_run()
diff --git a/cronscripts/ppa-generate-keys.py b/cronscripts/ppa-generate-keys.py
index 6a4d936..6d97176 100755
--- a/cronscripts/ppa-generate-keys.py
+++ b/cronscripts/ppa-generate-keys.py
@@ -10,8 +10,8 @@ import _pythonpath # noqa: F401
from lp.services.config import config
from lp.soyuz.scripts.ppakeygenerator import PPAKeyGenerator
-
-if __name__ == '__main__':
+if __name__ == "__main__":
script = PPAKeyGenerator(
- "ppa-generate-keys", config.archivepublisher.dbuser)
+ "ppa-generate-keys", config.archivepublisher.dbuser
+ )
script.lock_and_run()
diff --git a/cronscripts/process-job-source-groups.py b/cronscripts/process-job-source-groups.py
index 4e95fcd..8fd321b 100755
--- a/cronscripts/process-job-source-groups.py
+++ b/cronscripts/process-job-source-groups.py
@@ -7,11 +7,11 @@
import _pythonpath # noqa: F401
-from optparse import IndentedHelpFormatter
import os
import subprocess
import sys
import textwrap
+from optparse import IndentedHelpFormatter
from lp.services.config import config
from lp.services.helpers import english_list
@@ -24,7 +24,7 @@ class LongEpilogHelpFormatter(IndentedHelpFormatter):
def format_epilog(self, epilog):
if epilog:
- return '\n%s\n' % epilog
+ return "\n%s\n" % epilog
else:
return ""
@@ -36,16 +36,24 @@ class ProcessJobSourceGroups(LaunchpadCronScript):
self.parser.usage = "%prog [ -e JOB_SOURCE ] GROUP [GROUP]..."
self.parser.epilog = (
textwrap.fill(
- "At least one group must be specified. Excluding job sources "
- "is useful when you want to run all the other job sources in "
- "a group.")
- + "\n\n" + self.group_help)
+ "At least one group must be specified. Excluding job sources "
+ "is useful when you want to run all the other job sources in "
+ "a group."
+ )
+ + "\n\n"
+ + self.group_help
+ )
self.parser.formatter = LongEpilogHelpFormatter()
self.parser.add_option(
- '-e', '--exclude', dest='excluded_job_sources',
- metavar="JOB_SOURCE", default=[], action='append',
- help="Exclude specific job sources.")
+ "-e",
+ "--exclude",
+ dest="excluded_job_sources",
+ metavar="JOB_SOURCE",
+ default=[],
+ action="append",
+ help="Exclude specific job sources.",
+ )
def main(self):
selected_groups = self.args
@@ -61,25 +69,27 @@ class ProcessJobSourceGroups(LaunchpadCronScript):
for source in self.options.excluded_job_sources:
if source not in selected_job_sources:
self.logger.info(
- '%r is not in %s' % (
- source, english_list(selected_groups, "or")))
+ "%r is not in %s"
+ % (source, english_list(selected_groups, "or"))
+ )
else:
selected_job_sources.remove(source)
if not selected_job_sources:
return
# Process job sources.
command = os.path.join(
- os.path.dirname(sys.argv[0]), 'process-job-source.py')
+ os.path.dirname(sys.argv[0]), "process-job-source.py"
+ )
child_args = [command]
if self.options.verbose:
- child_args.append('-v')
+ child_args.append("-v")
child_args.extend(sorted(selected_job_sources))
subprocess.check_call(child_args)
@cachedproperty
def all_job_sources(self):
- job_sources = config['process-job-source-groups'].job_sources
- return [job_source.strip() for job_source in job_sources.split(',')]
+ job_sources = config["process-job-source-groups"].job_sources
+ return [job_source.strip() for job_source in job_sources.split(",")]
@cachedproperty
def grouped_sources(self):
@@ -94,12 +104,13 @@ class ProcessJobSourceGroups(LaunchpadCronScript):
@cachedproperty
def group_help(self):
- return '\n\n'.join(
- 'Group: %s\n %s' % (group, '\n '.join(sources))
- for group, sources in sorted(self.grouped_sources.items()))
+ return "\n\n".join(
+ "Group: %s\n %s" % (group, "\n ".join(sources))
+ for group, sources in sorted(self.grouped_sources.items())
+ )
-if __name__ == '__main__':
+if __name__ == "__main__":
script = ProcessJobSourceGroups()
# We do not need to take a lock here; all the interesting work is done
# by process-job-source.py, which takes its own per-job-source locks.
diff --git a/cronscripts/process-job-source.py b/cronscripts/process-job-source.py
index dd65c46..3f1b574 100755
--- a/cronscripts/process-job-source.py
+++ b/cronscripts/process-job-source.py
@@ -6,8 +6,7 @@ import _pythonpath # noqa: F401
from lp.services.job.scripts.process_job_source import ProcessJobSource
-
-if __name__ == '__main__':
+if __name__ == "__main__":
script = ProcessJobSource()
# ProcessJobSource handles its own locking.
script.run()
diff --git a/cronscripts/process-mail.py b/cronscripts/process-mail.py
index 7b4335c..f326441 100755
--- a/cronscripts/process-mail.py
+++ b/cronscripts/process-mail.py
@@ -15,13 +15,16 @@ from lp.services.mail.mailbox import IMailBox
from lp.services.scripts.base import (
LaunchpadCronScript,
LaunchpadScriptFailure,
- )
+)
class ProcessMail(LaunchpadCronScript):
- usage = """%prog [options]
+ usage = (
+ """%prog [options]
- """ + __doc__
+ """
+ + __doc__
+ )
def main(self):
try:
@@ -31,9 +34,10 @@ class ProcessMail(LaunchpadCronScript):
raise
raise LaunchpadScriptFailure(
"No mail box is configured. "
- "Please see mailbox.rst for info on how to configure one.")
+ "Please see mailbox.rst for info on how to configure one."
+ )
-if __name__ == '__main__':
- script = ProcessMail('process-mail', dbuser=config.processmail.dbuser)
+if __name__ == "__main__":
+ script = ProcessMail("process-mail", dbuser=config.processmail.dbuser)
script.lock_and_run(use_web_security=True)
diff --git a/cronscripts/product-release-finder.py b/cronscripts/product-release-finder.py
index f1c2d3a..2b9b496 100755
--- a/cronscripts/product-release-finder.py
+++ b/cronscripts/product-release-finder.py
@@ -13,7 +13,7 @@ import _pythonpath # noqa: F401
from lp.registry.scripts.productreleasefinder.finder import (
ProductReleaseFinder,
- )
+)
from lp.services.config import config
from lp.services.scripts.base import LaunchpadCronScript
@@ -25,6 +25,7 @@ class ReleaseFinderScript(LaunchpadCronScript):
if __name__ == "__main__":
- script = ReleaseFinderScript('productreleasefinder',
- dbuser=config.productreleasefinder.dbuser)
+ script = ReleaseFinderScript(
+ "productreleasefinder", dbuser=config.productreleasefinder.dbuser
+ )
script.lock_and_run()
diff --git a/cronscripts/publish-ftpmaster.py b/cronscripts/publish-ftpmaster.py
index 351fa4d..e1bfc68 100755
--- a/cronscripts/publish-ftpmaster.py
+++ b/cronscripts/publish-ftpmaster.py
@@ -9,8 +9,6 @@ import _pythonpath # noqa: F401
from lp.archivepublisher.scripts.publish_ftpmaster import PublishFTPMaster
-
-if __name__ == '__main__':
- script = PublishFTPMaster(
- "publish-ftpmaster", 'publish_ftpmaster')
+if __name__ == "__main__":
+ script = PublishFTPMaster("publish-ftpmaster", "publish_ftpmaster")
script.lock_and_run()
diff --git a/cronscripts/repack_git_repositories.py b/cronscripts/repack_git_repositories.py
index b601223..761f9d8 100755
--- a/cronscripts/repack_git_repositories.py
+++ b/cronscripts/repack_git_repositories.py
@@ -12,22 +12,26 @@ from lp.services.timeout import set_default_timeout_function
class RepackGitRepositories(LaunchpadCronScript):
-
def add_my_options(self):
self.parser.add_option(
- "--dry-run", action="store_true",
- dest="dry_run", default=False,
+ "--dry-run",
+ action="store_true",
+ dest="dry_run",
+ default=False,
help="Reports which repositories would be repacked without "
- "actually repacking the repositories.")
+ "actually repacking the repositories.",
+ )
def main(self):
set_default_timeout_function(
- lambda: config.repack_git_repositories.timeout)
+ lambda: config.repack_git_repositories.timeout
+ )
updater = RepackTunableLoop(self.logger, self.options.dry_run)
updater.run()
-if __name__ == '__main__':
+if __name__ == "__main__":
script = RepackGitRepositories(
- 'repack_git_repositories', dbuser='branchscanner')
+ "repack_git_repositories", dbuser="branchscanner"
+ )
script.lock_and_run()
diff --git a/cronscripts/request_daily_builds.py b/cronscripts/request_daily_builds.py
index a86df27..a947926 100755
--- a/cronscripts/request_daily_builds.py
+++ b/cronscripts/request_daily_builds.py
@@ -13,7 +13,7 @@ from zope.component import getUtility
from lp.charms.interfaces.charmrecipe import ICharmRecipeSet
from lp.code.interfaces.sourcepackagerecipebuild import (
ISourcePackageRecipeBuildSource,
- )
+)
from lp.services.config import config
from lp.services.scripts.base import LaunchpadCronScript
from lp.services.timeout import set_default_timeout_function
@@ -25,29 +25,33 @@ class RequestDailyBuilds(LaunchpadCronScript):
"""Run create merge proposal jobs."""
def __init__(self):
- name = 'request_daily_builds'
+ name = "request_daily_builds"
dbuser = config.request_daily_builds.dbuser
LaunchpadCronScript.__init__(self, name, dbuser)
def main(self):
globalErrorUtility.configure(self.name)
set_default_timeout_function(
- lambda: config.request_daily_builds.timeout)
+ lambda: config.request_daily_builds.timeout
+ )
source = getUtility(ISourcePackageRecipeBuildSource)
builds = source.makeDailyBuilds(self.logger)
- self.logger.info('Requested %d daily recipe builds.' % len(builds))
+ self.logger.info("Requested %d daily recipe builds." % len(builds))
build_requests = getUtility(ISnapSet).makeAutoBuilds(self.logger)
self.logger.info(
- 'Requested %d sets of automatic snap package builds.' %
- len(build_requests))
+ "Requested %d sets of automatic snap package builds."
+ % len(build_requests)
+ )
build_requests = getUtility(ICharmRecipeSet).makeAutoBuilds(
- self.logger)
+ self.logger
+ )
self.logger.info(
- 'Requested %d sets of automatic charm recipe builds.' %
- len(build_requests))
+ "Requested %d sets of automatic charm recipe builds."
+ % len(build_requests)
+ )
transaction.commit()
-if __name__ == '__main__':
+if __name__ == "__main__":
script = RequestDailyBuilds()
script.lock_and_run()
diff --git a/cronscripts/rosetta-approve-imports.py b/cronscripts/rosetta-approve-imports.py
index 1ba9d7b..99fc08a 100755
--- a/cronscripts/rosetta-approve-imports.py
+++ b/cronscripts/rosetta-approve-imports.py
@@ -9,9 +9,9 @@ import _pythonpath # noqa: F401
from lp.translations.scripts.import_queue_gardener import ImportQueueGardener
-
-if __name__ == '__main__':
+if __name__ == "__main__":
script = ImportQueueGardener(
- 'translations-import-queue-gardener',
- dbuser='translations_import_queue_gardener')
+ "translations-import-queue-gardener",
+ dbuser="translations_import_queue_gardener",
+ )
script.lock_and_run()
diff --git a/cronscripts/rosetta-export-queue.py b/cronscripts/rosetta-export-queue.py
index 6427347..aefb4fb 100755
--- a/cronscripts/rosetta-export-queue.py
+++ b/cronscripts/rosetta-export-queue.py
@@ -18,6 +18,6 @@ class RosettaExportQueue(LaunchpadCronScript):
process_queue(self.txn, self.logger)
-if __name__ == '__main__':
- script = RosettaExportQueue('rosetta-export-queue', dbuser='poexport')
+if __name__ == "__main__":
+ script = RosettaExportQueue("rosetta-export-queue", dbuser="poexport")
script.lock_and_run()
diff --git a/cronscripts/rosetta-pofile-stats.py b/cronscripts/rosetta-pofile-stats.py
index ca0dce7..1b56096 100755
--- a/cronscripts/rosetta-pofile-stats.py
+++ b/cronscripts/rosetta-pofile-stats.py
@@ -10,16 +10,20 @@ import _pythonpath # noqa: F401
from lp.services.scripts.base import LaunchpadCronScript
from lp.translations.scripts.verify_pofile_stats import (
VerifyPOFileStatsProcess,
- )
+)
class VerifyPOFileStats(LaunchpadCronScript):
"""Trawl `POFile` table, verifying and updating cached statistics."""
def add_my_options(self):
- self.parser.add_option('-i', '--start-id', dest='start_id',
- type='int',
- help="Verify from this POFile id upward.")
+ self.parser.add_option(
+ "-i",
+ "--start-id",
+ dest="start_id",
+ type="int",
+ help="Verify from this POFile id upward.",
+ )
def main(self):
if self.options.start_id is not None:
@@ -28,10 +32,11 @@ class VerifyPOFileStats(LaunchpadCronScript):
start_id = 0
verifier = VerifyPOFileStatsProcess(
- self.txn, self.logger, start_at_id=start_id)
+ self.txn, self.logger, start_at_id=start_id
+ )
verifier.run()
-if __name__ == '__main__':
- script = VerifyPOFileStats(name="pofile-stats", dbuser='pofilestats')
+if __name__ == "__main__":
+ script = VerifyPOFileStats(name="pofile-stats", dbuser="pofilestats")
script.lock_and_run()
diff --git a/cronscripts/rosetta-poimport.py b/cronscripts/rosetta-poimport.py
index 0c0f082..2a95e03 100755
--- a/cronscripts/rosetta-poimport.py
+++ b/cronscripts/rosetta-poimport.py
@@ -8,8 +8,8 @@ import _pythonpath # noqa: F401
from lp.services.config import config
from lp.translations.scripts.po_import import TranslationsImport
-
-if __name__ == '__main__':
+if __name__ == "__main__":
script = TranslationsImport(
- 'rosetta-poimport', dbuser=config.poimport.dbuser)
+ "rosetta-poimport", dbuser=config.poimport.dbuser
+ )
script.lock_and_run()
diff --git a/cronscripts/send-bug-notifications.py b/cronscripts/send-bug-notifications.py
index f5c2400..990b996 100755
--- a/cronscripts/send-bug-notifications.py
+++ b/cronscripts/send-bug-notifications.py
@@ -14,8 +14,8 @@ import _pythonpath # noqa: F401
from lp.bugs.scripts.bugnotification import SendBugNotifications
from lp.services.config import config
-
-if __name__ == '__main__':
- script = SendBugNotifications('send-bug-notifications',
- dbuser=config.malone.bugnotification_dbuser)
+if __name__ == "__main__":
+ script = SendBugNotifications(
+ "send-bug-notifications", dbuser=config.malone.bugnotification_dbuser
+ )
script.lock_and_run()
diff --git a/cronscripts/send-person-notifications.py b/cronscripts/send-person-notifications.py
index febb99c..21e114c 100755
--- a/cronscripts/send-person-notifications.py
+++ b/cronscripts/send-person-notifications.py
@@ -32,7 +32,8 @@ class SendPersonNotifications(LaunchpadCronScript):
manager.purgeNotifications(unsent_notifications)
-if __name__ == '__main__':
+if __name__ == "__main__":
script = SendPersonNotifications(
- 'send-person-notifications', dbuser=config.person_notification.dbuser)
+ "send-person-notifications", dbuser=config.person_notification.dbuser
+ )
script.lock_and_run()
diff --git a/cronscripts/supermirror-pull.py b/cronscripts/supermirror-pull.py
index d1b82ed..02f2eb4 100755
--- a/cronscripts/supermirror-pull.py
+++ b/cronscripts/supermirror-pull.py
@@ -7,22 +7,16 @@ import _pythonpath # noqa: F401
from optparse import OptionParser
-from twisted.internet import (
- defer,
- reactor,
- )
+from twisted.internet import defer, reactor
from twisted.python import log as tplog
-from lp.codehosting.puller import (
- mirror,
- scheduler,
- )
+from lp.codehosting.puller import mirror, scheduler
from lp.services.config import config
from lp.services.scripts import logger_options
from lp.services.twistedsupport.loggingsupport import (
LoggingProxy,
set_up_logging_for_script,
- )
+)
def clean_shutdown(ignored):
@@ -43,19 +37,23 @@ def run_mirror(log, manager):
deferred.addErrback(shutdown_with_errors)
-if __name__ == '__main__':
+if __name__ == "__main__":
parser = OptionParser()
logger_options(parser)
- parser.add_option('--branch-type', action='append', default=[])
+ parser.add_option("--branch-type", action="append", default=[])
(options, arguments) = parser.parse_args()
if arguments:
parser.error("Unhandled arguments %s" % repr(arguments))
log = set_up_logging_for_script(
- options, 'supermirror_puller', options.log_file)
+ options, "supermirror_puller", options.log_file
+ )
manager = scheduler.JobScheduler(
LoggingProxy(
- config.codehosting.codehosting_endpoint.encode('UTF-8'), log),
- log, options.branch_type)
+ config.codehosting.codehosting_endpoint.encode("UTF-8"), log
+ ),
+ log,
+ options.branch_type,
+ )
reactor.callWhenRunning(run_mirror, log, manager)
reactor.run()
diff --git a/cronscripts/translations-export-to-branch.py b/cronscripts/translations-export-to-branch.py
index ca5d1dd..008915a 100755
--- a/cronscripts/translations-export-to-branch.py
+++ b/cronscripts/translations-export-to-branch.py
@@ -19,11 +19,11 @@ import _pythonpath # noqa: F401
from lp.translations.scripts.translations_to_branch import (
ExportTranslationsToBranch,
- )
-
+)
-if __name__ == '__main__':
+if __name__ == "__main__":
script = ExportTranslationsToBranch(
- 'translations-export-to-branch', dbuser='translationstobranch')
- script.config_name = 'translations_export_to_branch'
+ "translations-export-to-branch", dbuser="translationstobranch"
+ )
+ script.config_name = "translations_export_to_branch"
script.lock_and_run()
diff --git a/cronscripts/update-bugtask-targetnamecaches.py b/cronscripts/update-bugtask-targetnamecaches.py
index 907c6b6..4d348cc 100755
--- a/cronscripts/update-bugtask-targetnamecaches.py
+++ b/cronscripts/update-bugtask-targetnamecaches.py
@@ -9,7 +9,7 @@ import _pythonpath # noqa: F401
from lp.bugs.scripts.bugtasktargetnamecaches import (
BugTaskTargetNameCacheUpdater,
- )
+)
from lp.services.config import config
from lp.services.scripts.base import LaunchpadCronScript
@@ -20,13 +20,15 @@ class UpdateBugTaskTargetNameCaches(LaunchpadCronScript):
This ensures that the cache values are up-to-date even after, for
example, an IDistribution being renamed.
"""
+
def main(self):
updater = BugTaskTargetNameCacheUpdater(self.txn, self.logger)
updater.run()
-if __name__ == '__main__':
+if __name__ == "__main__":
script = UpdateBugTaskTargetNameCaches(
- 'launchpad-targetnamecacheupdater',
- dbuser=config.targetnamecacheupdater.dbuser)
+ "launchpad-targetnamecacheupdater",
+ dbuser=config.targetnamecacheupdater.dbuser,
+ )
script.lock_and_run()
diff --git a/cronscripts/update-bugzilla-remote-components.py b/cronscripts/update-bugzilla-remote-components.py
index 2c3e11f..b98b467 100755
--- a/cronscripts/update-bugzilla-remote-components.py
+++ b/cronscripts/update-bugzilla-remote-components.py
@@ -9,24 +9,26 @@ import time
from lp.bugs.scripts.bzremotecomponentfinder import (
BugzillaRemoteComponentFinder,
- )
+)
from lp.services.config import config
from lp.services.scripts.base import LaunchpadCronScript
class UpdateRemoteComponentsFromBugzilla(LaunchpadCronScript):
-
def add_my_options(self):
self.parser.add_option(
- "-b", "--bugtracker", dest="bugtracker",
- help="Update only the bug tracker with this name in launchpad")
+ "-b",
+ "--bugtracker",
+ dest="bugtracker",
+ help="Update only the bug tracker with this name in launchpad",
+ )
def main(self):
start_time = time.time()
- finder = BugzillaRemoteComponentFinder(
- self.logger)
+ finder = BugzillaRemoteComponentFinder(self.logger)
finder.getRemoteProductsAndComponents(
- bugtracker_name=self.options.bugtracker)
+ bugtracker_name=self.options.bugtracker
+ )
run_time = time.time() - start_time
print("Time for this run: %.3f seconds." % run_time)
@@ -36,5 +38,6 @@ if __name__ == "__main__":
updater = UpdateRemoteComponentsFromBugzilla(
"updatebugzillaremotecomponents",
- dbuser=config.updatebugzillaremotecomponents.dbuser)
+ dbuser=config.updatebugzillaremotecomponents.dbuser,
+ )
updater.lock_and_run()
diff --git a/cronscripts/update-cve.py b/cronscripts/update-cve.py
index 91e3cf7..dc6a416 100755
--- a/cronscripts/update-cve.py
+++ b/cronscripts/update-cve.py
@@ -11,7 +11,6 @@ import _pythonpath # noqa: F401
from lp.bugs.scripts.cveimport import CVEUpdater
from lp.services.config import config
-
-if __name__ == '__main__':
+if __name__ == "__main__":
script = CVEUpdater("updatecve", config.cveupdater.dbuser)
script.lock_and_run()
diff --git a/cronscripts/update-database-stats.py b/cronscripts/update-database-stats.py
index b0cce1e..3e63626 100755
--- a/cronscripts/update-database-stats.py
+++ b/cronscripts/update-database-stats.py
@@ -16,7 +16,7 @@ class UpdateDatabaseStats(LaunchpadCronScript):
"""Populate the DatabaseTableStats and DatabaseCpuStats tables."""
def main(self):
- "Run UpdateDatabaseTableStats."""
+ "Run UpdateDatabaseTableStats." ""
store = IMasterStore(Person)
# The logic is in a stored procedure because we want to run
@@ -32,7 +32,9 @@ class UpdateDatabaseStats(LaunchpadCronScript):
"""Add standard database command line options."""
db_options(self.parser)
-if __name__ == '__main__':
+
+if __name__ == "__main__":
script = UpdateDatabaseStats(
- 'update-database-stats', dbuser='database_stats_update')
+ "update-database-stats", dbuser="database_stats_update"
+ )
script.lock_and_run()
diff --git a/cronscripts/update-pkgcache.py b/cronscripts/update-pkgcache.py
index 07dca6a..3794d1f 100755
--- a/cronscripts/update-pkgcache.py
+++ b/cronscripts/update-pkgcache.py
@@ -11,7 +11,6 @@ import _pythonpath # noqa: F401
from lp.soyuz.scripts.update_pkgcache import PackageCacheUpdater
-
if __name__ == "__main__":
script = PackageCacheUpdater("update-cache", dbuser="update-pkg-cache")
script.lock_and_run()
diff --git a/cronscripts/update-remote-product.py b/cronscripts/update-remote-product.py
index 63ada7a..6a6b428 100755
--- a/cronscripts/update-remote-product.py
+++ b/cronscripts/update-remote-product.py
@@ -19,7 +19,6 @@ from lp.services.scripts.base import LaunchpadCronScript
class UpdateRemoteProduct(LaunchpadCronScript):
-
def main(self):
start_time = time.time()
@@ -30,7 +29,8 @@ class UpdateRemoteProduct(LaunchpadCronScript):
self.logger.info("Time for this run: %.3f seconds." % run_time)
-if __name__ == '__main__':
+if __name__ == "__main__":
script = UpdateRemoteProduct(
- "updateremoteproduct", dbuser=config.updateremoteproduct.dbuser)
+ "updateremoteproduct", dbuser=config.updateremoteproduct.dbuser
+ )
script.lock_and_run()
diff --git a/cronscripts/update-sourceforge-remote-products.py b/cronscripts/update-sourceforge-remote-products.py
index 7e3a624..897e88c 100755
--- a/cronscripts/update-sourceforge-remote-products.py
+++ b/cronscripts/update-sourceforge-remote-products.py
@@ -11,13 +11,12 @@ import time
from lp.bugs.scripts.sfremoteproductfinder import (
SourceForgeRemoteProductFinder,
- )
+)
from lp.services.config import config
from lp.services.scripts.base import LaunchpadCronScript
class UpdateRemoteProductsFromSourceForge(LaunchpadCronScript):
-
def main(self):
start_time = time.time()
@@ -28,8 +27,9 @@ class UpdateRemoteProductsFromSourceForge(LaunchpadCronScript):
self.logger.info("Time for this run: %.3f seconds." % run_time)
-if __name__ == '__main__':
+if __name__ == "__main__":
script = UpdateRemoteProductsFromSourceForge(
"updateremoteproduct",
- dbuser=config.updatesourceforgeremoteproduct.dbuser)
+ dbuser=config.updatesourceforgeremoteproduct.dbuser,
+ )
script.lock_and_run()
diff --git a/cronscripts/update-standing.py b/cronscripts/update-standing.py
index bc91e46..046870a 100755
--- a/cronscripts/update-standing.py
+++ b/cronscripts/update-standing.py
@@ -13,9 +13,8 @@ import _pythonpath # noqa: F401
from lp.registry.scripts.standing import UpdatePersonalStanding
from lp.services.config import config
-
-if __name__ == '__main__':
+if __name__ == "__main__":
script = UpdatePersonalStanding(
- 'update-personal-standing',
- dbuser=config.standingupdater.dbuser)
+ "update-personal-standing", dbuser=config.standingupdater.dbuser
+ )
script.lock_and_run()
diff --git a/cronscripts/update-stats.py b/cronscripts/update-stats.py
index 11a74a3..5284198 100755
--- a/cronscripts/update-stats.py
+++ b/cronscripts/update-stats.py
@@ -17,9 +17,8 @@ from lp.services.statistics.interfaces.statistic import ILaunchpadStatisticSet
class StatUpdater(LaunchpadCronScript):
-
def main(self):
- self.logger.debug('Starting the stats update')
+ self.logger.debug("Starting the stats update")
# Note that we do not issue commits here in the script; content
# objects are responsible for committing.
@@ -33,9 +32,9 @@ class StatUpdater(LaunchpadCronScript):
getUtility(IPersonSet).updateStatistics()
- self.logger.debug('Finished the stats update')
+ self.logger.debug("Finished the stats update")
-if __name__ == '__main__':
- script = StatUpdater('launchpad-stats', dbuser=config.statistician.dbuser)
+if __name__ == "__main__":
+ script = StatUpdater("launchpad-stats", dbuser=config.statistician.dbuser)
script.lock_and_run()
diff --git a/scripts/add-missing-builds.py b/scripts/add-missing-builds.py
index 1caa92d..a413f7f 100755
--- a/scripts/add-missing-builds.py
+++ b/scripts/add-missing-builds.py
@@ -8,8 +8,8 @@ import _pythonpath # noqa: F401
from lp.services.config import config
from lp.soyuz.scripts.add_missing_builds import AddMissingBuilds
-
if __name__ == "__main__":
script = AddMissingBuilds(
- "add-missing-builds", dbuser=config.uploader.dbuser)
+ "add-missing-builds", dbuser=config.uploader.dbuser
+ )
script.lock_and_run()
diff --git a/scripts/branch-rewrite.py b/scripts/branch-rewrite.py
index e8febc0..c752744 100755
--- a/scripts/branch-rewrite.py
+++ b/scripts/branch-rewrite.py
@@ -19,10 +19,7 @@ import transaction
from lp.codehosting.rewrite import BranchRewriter
from lp.services.config import config
-from lp.services.log.loglevels import (
- INFO,
- WARNING,
- )
+from lp.services.log.loglevels import INFO, WARNING
from lp.services.scripts.base import LaunchpadScript
@@ -45,8 +42,8 @@ class BranchRewriteScript(LaunchpadScript):
log_file_directory = os.path.dirname(log_file_location)
if not os.path.isdir(log_file_directory):
os.makedirs(log_file_directory)
- self.parser.defaults['log_file'] = log_file_location
- self.parser.defaults['log_file_level'] = INFO
+ self.parser.defaults["log_file"] = log_file_location
+ self.parser.defaults["log_file_level"] = INFO
def main(self):
rewriter = BranchRewriter(self.logger)
@@ -65,7 +62,7 @@ class BranchRewriteScript(LaunchpadScript):
except KeyboardInterrupt:
sys.exit()
except Exception:
- self.logger.exception('Exception occurred:')
+ self.logger.exception("Exception occurred:")
print("NULL")
sys.stdout.flush()
# The exception might have been a DisconnectionError or
@@ -74,9 +71,10 @@ class BranchRewriteScript(LaunchpadScript):
try:
transaction.abort()
except Exception:
- self.logger.exception('Exception occurred in abort:')
+ self.logger.exception("Exception occurred in abort:")
-if __name__ == '__main__':
- BranchRewriteScript("branch-rewrite", dbuser='branch-rewrite').run(
- isolation='autocommit', use_web_security=True)
+if __name__ == "__main__":
+ BranchRewriteScript("branch-rewrite", dbuser="branch-rewrite").run(
+ isolation="autocommit", use_web_security=True
+ )
diff --git a/scripts/bug-export.py b/scripts/bug-export.py
index 0e5f2e2..726e2e1 100755
--- a/scripts/bug-export.py
+++ b/scripts/bug-export.py
@@ -20,32 +20,52 @@ class BugExportScript(LaunchpadScript):
def add_my_options(self):
self.parser.add_option(
- '-o', '--output', metavar='FILE', action='store',
- help='Export bugs to this file', type='string', dest='output')
+ "-o",
+ "--output",
+ metavar="FILE",
+ action="store",
+ help="Export bugs to this file",
+ type="string",
+ dest="output",
+ )
self.parser.add_option(
- '-p', '--product', metavar='PRODUCT', action='store',
- help='Which product to export', type='string', dest='product')
+ "-p",
+ "--product",
+ metavar="PRODUCT",
+ action="store",
+ help="Which product to export",
+ type="string",
+ dest="product",
+ )
self.parser.add_option(
- '--include-private', action='store_true',
- help='Include private bugs in dump', dest='include_private',
- default=False)
+ "--include-private",
+ action="store_true",
+ help="Include private bugs in dump",
+ dest="include_private",
+ default=False,
+ )
def main(self):
if self.options.product is None:
- self.parser.error('No product specified')
+ self.parser.error("No product specified")
if self.options.output is not None:
- output = open(self.options.output, 'wb')
+ output = open(self.options.output, "wb")
else:
- output = getattr(sys.stdout, 'buffer', sys.stdout)
+ output = getattr(sys.stdout, "buffer", sys.stdout)
product = getUtility(IProductSet).getByName(self.options.product)
if product is None:
self.parser.error(
- 'Product %s does not exist' % self.options.product)
+ "Product %s does not exist" % self.options.product
+ )
export_bugtasks(
- transaction, product, output,
- include_private=self.options.include_private)
+ transaction,
+ product,
+ output,
+ include_private=self.options.include_private,
+ )
-if __name__ == '__main__':
+
+if __name__ == "__main__":
BugExportScript("bug-export").run()
diff --git a/scripts/bug-import.py b/scripts/bug-import.py
index cfc65d1..4b20259 100755
--- a/scripts/bug-import.py
+++ b/scripts/bug-import.py
@@ -24,27 +24,46 @@ class BugImportScript(LaunchpadScript):
def add_my_options(self):
self.parser.add_option(
- '-p', '--product', metavar='PRODUCT', action='store',
- help='Which product to export', type='string', dest='product',
- default=None)
+ "-p",
+ "--product",
+ metavar="PRODUCT",
+ action="store",
+ help="Which product to export",
+ type="string",
+ dest="product",
+ default=None,
+ )
self.parser.add_option(
- '--cache', metavar='FILE', action='store',
- help='Cache for bug ID mapping', type='string',
- dest='cache_filename', default='bug-map.pickle')
+ "--cache",
+ metavar="FILE",
+ action="store",
+ help="Cache for bug ID mapping",
+ type="string",
+ dest="cache_filename",
+ default="bug-map.pickle",
+ )
# XXX: jamesh 2007-04-11 bugs=86352
# Not verifying users created by a bug import can result in
# problems with mail notification, so should not be used for
# imports.
self.parser.add_option(
- '--dont-verify-users', dest='verify_users',
- help="Don't verify newly created users", action='store_false',
- default=True)
+ "--dont-verify-users",
+ dest="verify_users",
+ help="Don't verify newly created users",
+ action="store_false",
+ default=True,
+ )
self.parser.add_option(
- '--testing', dest="testing", action="store_true", help=(
+ "--testing",
+ dest="testing",
+ action="store_true",
+ help=(
"Testing mode; if --product=name is omitted, create a "
"product with Launchpad's test object factory. Do *not* "
- "use in production!"),
- default=False)
+ "use in production!"
+ ),
+ default=False,
+ )
def create_test_product(self):
"""Create a test product with `LaunchpadObjectFactory`.
@@ -62,9 +81,9 @@ class BugImportScript(LaunchpadScript):
self.options.product = self.create_test_product()
self.logger.info("Product %s created", self.options.product)
else:
- self.parser.error('No product specified')
+ self.parser.error("No product specified")
if len(self.args) != 1:
- self.parser.error('Please specify a bug XML file to import')
+ self.parser.error("Please specify a bug XML file to import")
bugs_filename = self.args[0]
# don't send email
@@ -72,21 +91,26 @@ class BugImportScript(LaunchpadScript):
[immediate_mail]
send_email: False
"""
- config.push('send_email_data', send_email_data)
- self.login('bug-importer@xxxxxxxxxxxxx')
+ config.push("send_email_data", send_email_data)
+ self.login("bug-importer@xxxxxxxxxxxxx")
product = getUtility(IProductSet).getByName(self.options.product)
if product is None:
- self.parser.error('Product %s does not exist'
- % self.options.product)
+ self.parser.error(
+ "Product %s does not exist" % self.options.product
+ )
importer = BugImporter(
- product, bugs_filename, self.options.cache_filename,
- verify_users=self.options.verify_users, logger=self.logger)
+ product,
+ bugs_filename,
+ self.options.cache_filename,
+ verify_users=self.options.verify_users,
+ logger=self.logger,
+ )
importer.importBugs(self.txn)
- config.pop('send_email_data')
+ config.pop("send_email_data")
-if __name__ == '__main__':
- script = BugImportScript('lp.services.scripts.bugimport')
+if __name__ == "__main__":
+ script = BugImportScript("lp.services.scripts.bugimport")
script.run()
diff --git a/scripts/bugsummary-rebuild.py b/scripts/bugsummary-rebuild.py
index d697b2b..c22cc9a 100755
--- a/scripts/bugsummary-rebuild.py
+++ b/scripts/bugsummary-rebuild.py
@@ -10,19 +10,25 @@ from lp.services.scripts.base import LaunchpadScript
class BugSummaryRebuild(LaunchpadScript):
-
def add_my_options(self):
self.parser.add_option(
- "-n", "--dry-run", action="store_true",
- dest="dry_run", default=False,
- help="Don't commit changes to the DB.")
+ "-n",
+ "--dry-run",
+ action="store_true",
+ dest="dry_run",
+ default=False,
+ help="Don't commit changes to the DB.",
+ )
def main(self):
updater = BugSummaryRebuildTunableLoop(
- self.logger, self.options.dry_run)
+ self.logger, self.options.dry_run
+ )
updater.run()
-if __name__ == '__main__':
+
+if __name__ == "__main__":
script = BugSummaryRebuild(
- 'bugsummary-rebuild', dbuser='bugsummaryrebuild')
+ "bugsummary-rebuild", dbuser="bugsummaryrebuild"
+ )
script.lock_and_run()
diff --git a/scripts/cache-country-mirrors.py b/scripts/cache-country-mirrors.py
index 5ffde81..177b56a 100755
--- a/scripts/cache-country-mirrors.py
+++ b/scripts/cache-country-mirrors.py
@@ -20,43 +20,41 @@ from zope.component import getUtility
from lp.app.interfaces.launchpad import ILaunchpadCelebrities
from lp.registry.interfaces.distributionmirror import MirrorContent
-from lp.services.scripts.base import (
- LaunchpadScript,
- LaunchpadScriptFailure,
- )
+from lp.services.scripts.base import LaunchpadScript, LaunchpadScriptFailure
from lp.services.worlddata.interfaces.country import ICountrySet
class CacheCountryMirrors(LaunchpadScript):
- usage = '%prog <target-directory>'
+ usage = "%prog <target-directory>"
def main(self):
if len(self.args) != 1:
raise LaunchpadScriptFailure(
"You must specify the full path of the directory where the "
- "files will be stored.")
+ "files will be stored."
+ )
[dir_name] = self.args
if not os.path.isdir(dir_name):
- raise LaunchpadScriptFailure(
- "'%s' is not a directory." % dir_name)
+ raise LaunchpadScriptFailure("'%s' is not a directory." % dir_name)
for country in getUtility(ICountrySet):
mirrors = getUtility(
- ILaunchpadCelebrities).ubuntu.getBestMirrorsForCountry(
- country, MirrorContent.ARCHIVE)
+ ILaunchpadCelebrities
+ ).ubuntu.getBestMirrorsForCountry(country, MirrorContent.ARCHIVE)
# Write the content to a temporary file first, to avoid problems
# if the script is killed or something like that.
fd, tmpfile = tempfile.mkstemp()
- mirrors_file = os.fdopen(fd, 'w')
+ mirrors_file = os.fdopen(fd, "w")
mirrors_file.write(
- "\n".join(mirror.base_url for mirror in mirrors))
+ "\n".join(mirror.base_url for mirror in mirrors)
+ )
mirrors_file.close()
- filename = os.path.join(dir_name, '%s.txt' % country.iso3166code2)
+ filename = os.path.join(dir_name, "%s.txt" % country.iso3166code2)
shutil.move(tmpfile, filename)
os.chmod(filename, 0o644)
-if __name__ == '__main__':
- CacheCountryMirrors('cache-country-mirrors').lock_and_run()
+if __name__ == "__main__":
+ CacheCountryMirrors("cache-country-mirrors").lock_and_run()
diff --git a/scripts/close-account.py b/scripts/close-account.py
index e6a1bf0..3cdaf9f 100755
--- a/scripts/close-account.py
+++ b/scripts/close-account.py
@@ -9,7 +9,6 @@ import _pythonpath # noqa: F401
from lp.registry.scripts.closeaccount import CloseAccountScript
-
-if __name__ == '__main__':
- script = CloseAccountScript('close-account', dbuser='launchpad')
+if __name__ == "__main__":
+ script = CloseAccountScript("close-account", dbuser="launchpad")
script.run()
diff --git a/scripts/convert-person-to-team.py b/scripts/convert-person-to-team.py
index 077586a..0b34a99 100755
--- a/scripts/convert-person-to-team.py
+++ b/scripts/convert-person-to-team.py
@@ -15,40 +15,41 @@ from zope.component import getUtility
from lp.registry.interfaces.person import IPersonSet
from lp.services.identity.interfaces.account import AccountStatus
-from lp.services.scripts.base import (
- LaunchpadScript,
- LaunchpadScriptFailure,
- )
+from lp.services.scripts.base import LaunchpadScript, LaunchpadScriptFailure
class ConvertPersonToTeamScript(LaunchpadScript):
- usage = '%prog <person-to-convert> <team-owner>'
+ usage = "%prog <person-to-convert> <team-owner>"
def main(self):
if len(self.args) != 2:
raise LaunchpadScriptFailure(
"You must specify the name of the person to be converted "
- "and the person/team who should be its teamowner.")
+ "and the person/team who should be its teamowner."
+ )
person_set = getUtility(IPersonSet)
person_name, owner_name = self.args
person = person_set.getByName(person_name)
if person is None:
raise LaunchpadScriptFailure(
- "There's no person named '%s'." % person_name)
+ "There's no person named '%s'." % person_name
+ )
if person.account_status != AccountStatus.NOACCOUNT:
raise LaunchpadScriptFailure(
- "Only people which have no account can be turned into teams.")
+ "Only people which have no account can be turned into teams."
+ )
owner = person_set.getByName(owner_name)
if owner is None:
raise LaunchpadScriptFailure(
- "There's no person named '%s'." % owner_name)
+ "There's no person named '%s'." % owner_name
+ )
person.convertToTeam(owner)
self.txn.commit()
-if __name__ == '__main__':
- script = ConvertPersonToTeamScript('convert-person-to-team')
+if __name__ == "__main__":
+ script = ConvertPersonToTeamScript("convert-person-to-team")
script.lock_and_run()
diff --git a/scripts/copy-distroseries-translations.py b/scripts/copy-distroseries-translations.py
index a4b683d..669d25a 100755
--- a/scripts/copy-distroseries-translations.py
+++ b/scripts/copy-distroseries-translations.py
@@ -21,7 +21,7 @@ from lp.services.scripts.base import LaunchpadCronScript
from lp.soyuz.interfaces.archive import IArchiveSet
from lp.translations.scripts.copy_distroseries_translations import (
copy_distroseries_translations,
- )
+)
class TranslationsCopier(LaunchpadCronScript):
@@ -31,68 +31,107 @@ class TranslationsCopier(LaunchpadCronScript):
"""
def add_my_options(self):
- self.parser.add_option('-d', '--distribution', dest='distro',
- default='ubuntu',
- help='The target distribution.')
- self.parser.add_option('-s', '--series', dest='series',
- help='The target distroseries.')
- self.parser.add_option('--from-distribution', dest='from_distro',
+ self.parser.add_option(
+ "-d",
+ "--distribution",
+ dest="distro",
+ default="ubuntu",
+ help="The target distribution.",
+ )
+ self.parser.add_option(
+ "-s", "--series", dest="series", help="The target distroseries."
+ )
+ self.parser.add_option(
+ "--from-distribution",
+ dest="from_distro",
help=(
"The source distribution (if omitted, target's previous "
- "series will be used)."))
- self.parser.add_option('--from-series', dest='from_series',
+ "series will be used)."
+ ),
+ )
+ self.parser.add_option(
+ "--from-series",
+ dest="from_series",
help=(
"The source distroseries (if omitted, target's previous "
- "series will be used)."))
+ "series will be used)."
+ ),
+ )
self.parser.add_option(
- '--published-sources-only', dest='published_sources_only',
- action="store_true", default=False,
+ "--published-sources-only",
+ dest="published_sources_only",
+ action="store_true",
+ default=False,
help=(
"Copy only templates for sources that are published in the "
- "target series."))
- self.parser.add_option('--check-archive', dest='check_archive',
+ "target series."
+ ),
+ )
+ self.parser.add_option(
+ "--check-archive",
+ dest="check_archive",
help=(
"With --published-sources-only, check publication in this "
"archive (if omitted, the target's main archive will be "
- "checked)."))
- self.parser.add_option('--check-distroseries',
- dest='check_distroseries',
+ "checked)."
+ ),
+ )
+ self.parser.add_option(
+ "--check-distroseries",
+ dest="check_distroseries",
help=(
"With --published-sources-only, check publication in this "
"distroseries (if omitted, the target distroseries will be "
- "checked)."))
- self.parser.add_option('-f', '--force', dest='force',
- action="store_true", default=False,
+ "checked)."
+ ),
+ )
+ self.parser.add_option(
+ "-f",
+ "--force",
+ dest="force",
+ action="store_true",
+ default=False,
help="Don't check if target's UI and imports are blocked; "
- "actively block them.")
- self.parser.add_option('--skip-duplicates', dest='skip_duplicates',
- action="store_true", default=False,
+ "actively block them.",
+ )
+ self.parser.add_option(
+ "--skip-duplicates",
+ dest="skip_duplicates",
+ action="store_true",
+ default=False,
help=(
"Allow the target distroseries to have some translation "
"templates; skip any templates and translations for "
- "sources that already have a template in the target."))
+ "sources that already have a template in the target."
+ ),
+ )
def main(self):
target = getUtility(IDistributionSet)[self.options.distro][
- self.options.series]
+ self.options.series
+ ]
if self.options.from_distro:
source = getUtility(IDistributionSet)[self.options.from_distro][
- self.options.from_series]
+ self.options.from_series
+ ]
else:
source = target.previous_series
if source is None:
self.parser.error(
"No source series specified and target has no previous "
- "series.")
+ "series."
+ )
if self.options.check_archive is not None:
check_archive = getUtility(IArchiveSet).getByReference(
- self.options.check_archive)
+ self.options.check_archive
+ )
else:
check_archive = target.main_archive
check_distribution = check_archive.distribution
if self.options.check_distroseries is not None:
check_distroseries = check_distribution[
- self.options.check_distroseries]
+ self.options.check_distroseries
+ ]
else:
check_distroseries = check_distribution[self.options.series]
@@ -100,25 +139,32 @@ class TranslationsCopier(LaunchpadCronScript):
# while the copy is in progress, to reduce the chances of deadlocks or
# other conflicts.
blocked = (
- target.hide_all_translations and target.defer_translation_imports)
+ target.hide_all_translations and target.defer_translation_imports
+ )
if not blocked and not self.options.force:
self.txn.abort()
self.logger.error(
- 'Before this process starts, set the '
- 'hide_all_translations and defer_translation_imports '
- 'flags for distribution %s, series %s; or use the '
- '--force option to make it happen automatically.' % (
- self.options.distro, self.options.series))
+ "Before this process starts, set the "
+ "hide_all_translations and defer_translation_imports "
+ "flags for distribution %s, series %s; or use the "
+ "--force option to make it happen automatically."
+ % (self.options.distro, self.options.series)
+ )
sys.exit(1)
- self.logger.info('Starting...')
+ self.logger.info("Starting...")
# Actual work is done here.
copy_distroseries_translations(
- source, target, self.txn, self.logger,
+ source,
+ target,
+ self.txn,
+ self.logger,
published_sources_only=self.options.published_sources_only,
- check_archive=check_archive, check_distroseries=check_distroseries,
- skip_duplicates=self.options.skip_duplicates)
+ check_archive=check_archive,
+ check_distroseries=check_distroseries,
+ skip_duplicates=self.options.skip_duplicates,
+ )
# We would like to update the DistroRelase statistics, but it takes
# too long so this should be done after.
@@ -129,7 +175,7 @@ class TranslationsCopier(LaunchpadCronScript):
# series.updateStatistics(self.txn)
self.txn.commit()
- self.logger.info('Done.')
+ self.logger.info("Done.")
@property
def lockfilename(self):
@@ -140,11 +186,15 @@ class TranslationsCopier(LaunchpadCronScript):
property is ever accessed. Luckily that is what the `LaunchpadScript`
code does!
"""
- return "launchpad-%s-%s-%s.lock" % (self.name, self.options.distro,
- self.options.series)
+ return "launchpad-%s-%s-%s.lock" % (
+ self.name,
+ self.options.distro,
+ self.options.series,
+ )
-if __name__ == '__main__':
+if __name__ == "__main__":
script = TranslationsCopier(
- 'copy-missing-translations', dbuser='translations_distroseries_copy')
+ "copy-missing-translations", dbuser="translations_distroseries_copy"
+ )
script.lock_and_run()
diff --git a/scripts/copy-signingkeys.py b/scripts/copy-signingkeys.py
index 9db9f5b..b281c4f 100755
--- a/scripts/copy-signingkeys.py
+++ b/scripts/copy-signingkeys.py
@@ -9,8 +9,8 @@ import _pythonpath # noqa: F401
from lp.archivepublisher.scripts.copy_signingkeys import CopySigningKeysScript
from lp.services.config import config
-
-if __name__ == '__main__':
+if __name__ == "__main__":
script = CopySigningKeysScript(
- 'copy-signingkeys', dbuser=config.archivepublisher.dbuser)
+ "copy-signingkeys", dbuser=config.archivepublisher.dbuser
+ )
script.lock_and_run()
diff --git a/scripts/create-bot-account.py b/scripts/create-bot-account.py
index 186705f..d2e8eab 100755
--- a/scripts/create-bot-account.py
+++ b/scripts/create-bot-account.py
@@ -7,7 +7,6 @@ import _pythonpath # noqa: F401
from lp.registry.scripts.createbotaccount import CreateBotAccountScript
-
-if __name__ == '__main__':
- script = CreateBotAccountScript('create-bot-account', dbuser='launchpad')
+if __name__ == "__main__":
+ script = CreateBotAccountScript("create-bot-account", dbuser="launchpad")
script.run()
diff --git a/scripts/fix-translations-opening.py b/scripts/fix-translations-opening.py
index ad33945..09d16f3 100755
--- a/scripts/fix-translations-opening.py
+++ b/scripts/fix-translations-opening.py
@@ -11,13 +11,9 @@ from zope.interface import implementer
from lp.registry.interfaces.distribution import IDistributionSet
from lp.registry.model.distroseries import DistroSeries
from lp.services.database.interfaces import IMasterStore
-from lp.services.looptuner import (
- DBLoopTuner,
- ITunableLoop,
- )
+from lp.services.looptuner import DBLoopTuner, ITunableLoop
from lp.services.scripts.base import LaunchpadScript
-
delete_pofiletranslator = """\
DELETE FROM POFileTranslator
WHERE POFileTranslator.id IN (
@@ -99,12 +95,11 @@ statements = [
delete_packagingjob,
null_translationimportqueueentry_potemplate,
delete_potemplate,
- ]
+]
@implementer(ITunableLoop)
class ExecuteLoop:
-
def __init__(self, statement, series, logger):
self.statement = statement
self.series = series
@@ -116,14 +111,24 @@ class ExecuteLoop:
def __call__(self, chunk_size):
self.logger.info(
- "%s (limited to %d rows)", self.statement.splitlines()[0],
- chunk_size)
+ "%s (limited to %d rows)",
+ self.statement.splitlines()[0],
+ chunk_size,
+ )
store = IMasterStore(DistroSeries)
- result = store.execute(self.statement, (self.series.id, chunk_size,))
- self.done = (result.rowcount == 0)
+ result = store.execute(
+ self.statement,
+ (
+ self.series.id,
+ chunk_size,
+ ),
+ )
+ self.done = result.rowcount == 0
self.logger.info(
- "%d rows deleted (%s)", result.rowcount,
- ("done" if self.done else "not done"))
+ "%d rows deleted (%s)",
+ result.rowcount,
+ ("done" if self.done else "not done"),
+ )
store.commit()
@@ -132,11 +137,19 @@ class WipeSeriesTranslationsScript(LaunchpadScript):
description = "Wipe translations for a series."
def add_my_options(self):
- self.parser.add_option('-d', '--distribution', dest='distro',
- default='ubuntu',
- help='Name of distribution to delete translations in.')
- self.parser.add_option('-s', '--series', dest='series',
- help='Name of distroseries whose translations should be removed')
+ self.parser.add_option(
+ "-d",
+ "--distribution",
+ dest="distro",
+ default="ubuntu",
+ help="Name of distribution to delete translations in.",
+ )
+ self.parser.add_option(
+ "-s",
+ "--series",
+ dest="series",
+ help="Name of distroseries whose translations should be removed",
+ )
def _getTargetSeries(self):
series = self.options.series
@@ -150,5 +163,5 @@ class WipeSeriesTranslationsScript(LaunchpadScript):
tuner.run()
-if __name__ == '__main__':
- WipeSeriesTranslationsScript(dbuser='rosettaadmin').run()
+if __name__ == "__main__":
+ WipeSeriesTranslationsScript(dbuser="rosettaadmin").run()
diff --git a/scripts/ftpmaster-tools/buildd-mass-retry.py b/scripts/ftpmaster-tools/buildd-mass-retry.py
index 68f8d5c..9595620 100755
--- a/scripts/ftpmaster-tools/buildd-mass-retry.py
+++ b/scripts/ftpmaster-tools/buildd-mass-retry.py
@@ -18,10 +18,7 @@ from lp.app.errors import NotFoundError
from lp.buildmaster.enums import BuildStatus
from lp.registry.interfaces.distribution import IDistributionSet
from lp.registry.interfaces.pocket import PackagePublishingPocket
-from lp.services.scripts.base import (
- LaunchpadScript,
- LaunchpadScriptFailure,
- )
+from lp.services.scripts.base import LaunchpadScript, LaunchpadScriptFailure
class BuilddMassRetryScript(LaunchpadScript):
@@ -30,45 +27,76 @@ class BuilddMassRetryScript(LaunchpadScript):
def add_my_options(self):
self.parser.add_option(
- "-d", "--distribution", dest="distribution",
- metavar="DISTRIBUTION", default="ubuntu",
- help="distribution name")
+ "-d",
+ "--distribution",
+ dest="distribution",
+ metavar="DISTRIBUTION",
+ default="ubuntu",
+ help="distribution name",
+ )
self.parser.add_option(
- "-s", "--suite", dest="suite", metavar="SUITE", help="suite name")
+ "-s", "--suite", dest="suite", metavar="SUITE", help="suite name"
+ )
self.parser.add_option(
- "-a", "--architecture", dest="architecture", metavar="ARCH",
- help="architecture tag")
+ "-a",
+ "--architecture",
+ dest="architecture",
+ metavar="ARCH",
+ help="architecture tag",
+ )
self.parser.add_option(
- "-N", "--dry-run", action="store_true", dest="dryrun",
- metavar="DRY_RUN", default=False,
- help="Whether to treat this as a dry-run or not.")
+ "-N",
+ "--dry-run",
+ action="store_true",
+ dest="dryrun",
+ metavar="DRY_RUN",
+ default=False,
+ help="Whether to treat this as a dry-run or not.",
+ )
self.parser.add_option(
- "-F", "--failed", action="store_true", dest="failed",
- default=False, help="Reset builds in FAILED state.")
+ "-F",
+ "--failed",
+ action="store_true",
+ dest="failed",
+ default=False,
+ help="Reset builds in FAILED state.",
+ )
self.parser.add_option(
- "-D", "--dep-wait", action="store_true", dest="depwait",
- default=False, help="Reset builds in DEPWAIT state.")
+ "-D",
+ "--dep-wait",
+ action="store_true",
+ dest="depwait",
+ default=False,
+ help="Reset builds in DEPWAIT state.",
+ )
self.parser.add_option(
- "-C", "--chroot-wait", action="store_true", dest="chrootwait",
- default=False, help="Reset builds in CHROOTWAIT state.")
+ "-C",
+ "--chroot-wait",
+ action="store_true",
+ dest="chrootwait",
+ default=False,
+ help="Reset builds in CHROOTWAIT state.",
+ )
def main(self):
try:
distribution = getUtility(IDistributionSet)[
- self.options.distribution]
+ self.options.distribution
+ ]
except NotFoundError as info:
raise LaunchpadScriptFailure("Distribution not found: %s" % info)
try:
if self.options.suite is not None:
series, pocket = distribution.getDistroSeriesAndPocket(
- self.options.suite)
+ self.options.suite
+ )
else:
series = distribution.currentseries
pocket = PackagePublishingPocket.RELEASE
@@ -89,13 +117,14 @@ class BuilddMassRetryScript(LaunchpadScript):
self.logger.info(
"Initializing Build Mass-Retry for '%s/%s'"
- % (build_provider.title, pocket.name))
+ % (build_provider.title, pocket.name)
+ )
requested_states_map = {
BuildStatus.FAILEDTOBUILD: self.options.failed,
BuildStatus.MANUALDEPWAIT: self.options.depwait,
BuildStatus.CHROOTWAIT: self.options.chrootwait,
- }
+ }
# XXX cprov 2006-08-31: one query per requested state
# could organise it in a single one nicely if I have
@@ -107,34 +136,36 @@ class BuilddMassRetryScript(LaunchpadScript):
self.logger.info("Processing builds in '%s'" % target_state.title)
target_builds = build_provider.getBuildRecords(
- build_state=target_state, pocket=pocket)
+ build_state=target_state, pocket=pocket
+ )
for build in target_builds:
# Skip builds for superseded sources; they won't ever
# actually build.
if not build.current_source_publication:
self.logger.debug(
- 'Skipping superseded %s (%s)'
- % (build.title, build.id))
+ "Skipping superseded %s (%s)" % (build.title, build.id)
+ )
continue
if not build.can_be_retried:
self.logger.warning(
- 'Can not retry %s (%s)' % (build.title, build.id))
+ "Can not retry %s (%s)" % (build.title, build.id)
+ )
continue
- self.logger.info('Retrying %s (%s)' % (build.title, build.id))
+ self.logger.info("Retrying %s (%s)" % (build.title, build.id))
build.retry()
self.logger.info("Success.")
if self.options.dryrun:
transaction.abort()
- self.logger.info('Dry-run.')
+ self.logger.info("Dry-run.")
else:
transaction.commit()
self.logger.info("Committed")
-if __name__ == '__main__':
- BuilddMassRetryScript('buildd-mass-retry', 'fiera').lock_and_run()
+if __name__ == "__main__":
+ BuilddMassRetryScript("buildd-mass-retry", "fiera").lock_and_run()
diff --git a/scripts/ftpmaster-tools/obsolete-distroseries.py b/scripts/ftpmaster-tools/obsolete-distroseries.py
index 6b39fdf..3cf745b 100755
--- a/scripts/ftpmaster-tools/obsolete-distroseries.py
+++ b/scripts/ftpmaster-tools/obsolete-distroseries.py
@@ -14,8 +14,8 @@ import _pythonpath # noqa: F401
from lp.services.config import config
from lp.soyuz.scripts.obsolete_distroseries import ObsoleteDistroseries
-
-if __name__ == '__main__':
+if __name__ == "__main__":
script = ObsoleteDistroseries(
- 'obsolete-distroseries', dbuser=config.archivepublisher.dbuser)
+ "obsolete-distroseries", dbuser=config.archivepublisher.dbuser
+ )
script.lock_and_run()
diff --git a/scripts/generate-access-token.py b/scripts/generate-access-token.py
index e494888..fc23e95 100755
--- a/scripts/generate-access-token.py
+++ b/scripts/generate-access-token.py
@@ -18,42 +18,37 @@ from lp.services.oauth.interfaces import IOAuthConsumerSet
from lp.services.scripts.base import LaunchpadScript
from lp.services.webapp.interfaces import OAuthPermission
-
-LP_API_URL = 'https://api.launchpad.test/devel'
+LP_API_URL = "https://api.launchpad.test/devel"
def print_local_settings(user, key, token, secret):
- print("Access token for {user} generated with the following settings:\n\n"
- "LP_API_URL = '{url}'\n"
- "LP_API_CONSUMER_KEY = '{key}'\n"
- "LP_API_TOKEN = '{token}'\n"
- "LP_API_TOKEN_SECRET = '{secret}'").format(
- user=user,
- url=LP_API_URL,
- key=key,
- token=token,
- secret=secret)
+ print(
+ "Access token for {user} generated with the following settings:\n\n"
+ "LP_API_URL = '{url}'\n"
+ "LP_API_CONSUMER_KEY = '{key}'\n"
+ "LP_API_TOKEN = '{token}'\n"
+ "LP_API_TOKEN_SECRET = '{secret}'"
+ ).format(user=user, url=LP_API_URL, key=key, token=token, secret=secret)
class AccessTokenGenerator(LaunchpadScript):
-
def add_my_options(self):
self.parser.usage = "%prog username [-n CONSUMER NAME]"
self.parser.add_option("-n", "--name", dest="consumer_name")
def main(self):
if len(self.args) < 1:
- self.parser.error('No username supplied')
+ self.parser.error("No username supplied")
username = self.args[0]
key = six.ensure_text(self.options.consumer_name)
- consumer = getUtility(IOAuthConsumerSet).new(key, '')
+ consumer = getUtility(IOAuthConsumerSet).new(key, "")
request_token, _ = consumer.newRequestToken()
# review by username
person = getUtility(IPersonSet).getByName(username)
if not person:
- print('Error: No account for username %s.' % username)
+ print("Error: No account for username %s." % username)
sys.exit(1)
request_token.review(person, OAuthPermission.WRITE_PRIVATE)
@@ -62,11 +57,13 @@ class AccessTokenGenerator(LaunchpadScript):
self.txn.commit()
- print_local_settings(person.name,
- self.options.consumer_name,
- access_token.key,
- access_secret)
+ print_local_settings(
+ person.name,
+ self.options.consumer_name,
+ access_token.key,
+ access_secret,
+ )
-if __name__ == '__main__':
- AccessTokenGenerator('generate-access-token').lock_and_run()
+if __name__ == "__main__":
+ AccessTokenGenerator("generate-access-token").lock_and_run()
diff --git a/scripts/get-stacked-on-branches.py b/scripts/get-stacked-on-branches.py
index 6c15bc0..18ce8f9 100755
--- a/scripts/get-stacked-on-branches.py
+++ b/scripts/get-stacked-on-branches.py
@@ -35,6 +35,7 @@ def get_stacked_branches():
"""Iterate over all branches that, according to the db, are stacked."""
# Avoiding circular import.
from lp.code.model.branch import Branch
+
return IStandbyStore(Branch).find(Branch, Not(Branch.stacked_on == None))
@@ -44,16 +45,24 @@ def main():
See the module docstring for more information.
"""
parser = OptionParser(
- description="List the stacked branches in Launchpad.")
+ description="List the stacked branches in Launchpad."
+ )
parser.parse_args()
execute_zcml_for_scripts()
for db_branch in get_stacked_branches():
stacked_on = db_branch.stacked_on
- print('%s %s %s %s %s' % (
- db_branch.id, db_branch.branch_type.name, db_branch.unique_name,
- stacked_on.id, stacked_on.unique_name))
-
-
-if __name__ == '__main__':
+ print(
+ "%s %s %s %s %s"
+ % (
+ db_branch.id,
+ db_branch.branch_type.name,
+ db_branch.unique_name,
+ stacked_on.id,
+ stacked_on.unique_name,
+ )
+ )
+
+
+if __name__ == "__main__":
main()
diff --git a/scripts/gina.py b/scripts/gina.py
index e8fb3ed..ea9230f 100755
--- a/scripts/gina.py
+++ b/scripts/gina.py
@@ -24,21 +24,30 @@ from lp.soyuz.scripts.gina.runner import run_gina
class Gina(LaunchpadCronScript):
-
def __init__(self):
- super().__init__(name='gina', dbuser=config.gina.dbuser)
+ super().__init__(name="gina", dbuser=config.gina.dbuser)
@property
def usage(self):
return "%s [options] (targets|--all)" % sys.argv[0]
def add_my_options(self):
- self.parser.add_option("-a", "--all", action="store_true",
+ self.parser.add_option(
+ "-a",
+ "--all",
+ action="store_true",
help="Run all sections defined in launchpad-lazr.conf (in order)",
- dest="all", default=False)
- self.parser.add_option("-l", "--list-targets", action="store_true",
- help="List configured import targets", dest="list_targets",
- default=False)
+ dest="all",
+ default=False,
+ )
+ self.parser.add_option(
+ "-l",
+ "--list-targets",
+ action="store_true",
+ help="List configured import targets",
+ dest="list_targets",
+ default=False,
+ )
def getConfiguredTargets(self):
"""Get the configured import targets.
@@ -46,9 +55,8 @@ class Gina(LaunchpadCronScript):
Gina's targets are configured as "[gina_target.*]" sections in the
LAZR config.
"""
- sections = config.getByCategory('gina_target', [])
- targets = [
- target.category_and_section_names[1] for target in sections]
+ sections = config.getByCategory("gina_target", [])
+ targets = [target.category_and_section_names[1] for target in sections]
if len(targets) == 0:
self.logger.warning("No gina_target entries configured.")
return targets
@@ -66,11 +74,13 @@ class Gina(LaunchpadCronScript):
else:
if not targets:
self.parser.error(
- "Must specify at least one target to run, or --all")
+ "Must specify at least one target to run, or --all"
+ )
for target in targets:
if target not in possible_targets:
self.parser.error(
- "No Gina target %s in config file" % target)
+ "No Gina target %s in config file" % target
+ )
return targets
def main(self):
@@ -81,7 +91,7 @@ class Gina(LaunchpadCronScript):
return
for target in self.getTargets(possible_targets):
- target_section = config['gina_target.%s' % target]
+ target_section = config["gina_target.%s" % target]
run_gina(self.options, self.txn, target_section)
diff --git a/scripts/import-debian-bugs.py b/scripts/import-debian-bugs.py
index aca974d..2fd0430 100755
--- a/scripts/import-debian-bugs.py
+++ b/scripts/import-debian-bugs.py
@@ -32,9 +32,13 @@ class DebianBugImportScript(LaunchpadScript):
def add_my_options(self):
self.parser.add_option(
- '-n', '--dry-run', action='store_true',
- help="Don't commit the DB transaction.",
- dest='dry_run', default=False)
+ "-n",
+ "--dry-run",
+ action="store_true",
+ help="Don't commit the DB transaction.",
+ dest="dry_run",
+ default=False,
+ )
def main(self):
if len(self.args) < 1:
@@ -51,8 +55,9 @@ class DebianBugImportScript(LaunchpadScript):
self.txn.commit()
-if __name__ == '__main__':
+if __name__ == "__main__":
script = DebianBugImportScript(
- 'lp.services.scripts.importdebianbugs',
- dbuser=config.checkwatches.dbuser)
+ "lp.services.scripts.importdebianbugs",
+ dbuser=config.checkwatches.dbuser,
+ )
script.run()
diff --git a/scripts/librarian-report.py b/scripts/librarian-report.py
index f5c3b84..8151b29 100755
--- a/scripts/librarian-report.py
+++ b/scripts/librarian-report.py
@@ -9,15 +9,11 @@ __all__ = []
import _pythonpath # noqa: F401
-from optparse import OptionParser
import sys
+from optparse import OptionParser
from lp.services.database.postgresql import listReferences
-from lp.services.database.sqlbase import (
- connect,
- quoteIdentifier,
- sqlvalues,
- )
+from lp.services.database.sqlbase import connect, quoteIdentifier, sqlvalues
from lp.services.scripts import db_options
@@ -26,11 +22,21 @@ def main():
db_options(parser)
parser.add_option(
- "-f", "--from", dest="from_date", default=None,
- metavar="DATE", help="Only count new files since DATE (yyyy/mm/dd)")
+ "-f",
+ "--from",
+ dest="from_date",
+ default=None,
+ metavar="DATE",
+ help="Only count new files since DATE (yyyy/mm/dd)",
+ )
parser.add_option(
- "-u", "--until", dest="until_date", default=None,
- metavar="DATE", help="Only count new files until DATE (yyyy/mm/dd)")
+ "-u",
+ "--until",
+ dest="until_date",
+ default=None,
+ metavar="DATE",
+ help="Only count new files until DATE (yyyy/mm/dd)",
+ )
options, args = parser.parse_args()
if len(args) > 0:
@@ -41,15 +47,15 @@ def main():
# disk space usage. A new row in the database linking to a
# previously existing file in the Librarian takes up no new space.
if options.from_date is not None:
- from_date = 'AND LFC.datecreated >= %s' % sqlvalues(
- options.from_date)
+ from_date = "AND LFC.datecreated >= %s" % sqlvalues(options.from_date)
else:
- from_date = ''
+ from_date = ""
if options.until_date is not None:
- until_date = 'AND LFC.datecreated <= %s' % sqlvalues(
- options.until_date)
+ until_date = "AND LFC.datecreated <= %s" % sqlvalues(
+ options.until_date
+ )
else:
- until_date = ''
+ until_date = ""
con = connect()
cur = con.cursor()
@@ -60,18 +66,20 @@ def main():
# Note that listReferences is recursive, which we don't
# care about in this simple report. We also ignore the
# irrelevant constraint type update and delete flags.
- for from_table, from_column, to_table, to_column, update, delete
- in listReferences(cur, 'libraryfilealias', 'id')
- if to_table == 'libraryfilealias'
- }
+ for from_table, from_column, to_table, _, _, _ in listReferences(
+ cur, "libraryfilealias", "id"
+ )
+ if to_table == "libraryfilealias"
+ }
totals = set()
for referring_table, referring_column in sorted(references):
- if referring_table == 'libraryfiledownloadcount':
+ if referring_table == "libraryfiledownloadcount":
continue
quoted_referring_table = quoteIdentifier(referring_table)
quoted_referring_column = quoteIdentifier(referring_column)
- cur.execute("""
+ cur.execute(
+ """
SELECT
COALESCE(SUM(filesize), 0),
pg_size_pretty(CAST(COALESCE(SUM(filesize), 0) AS bigint)),
@@ -87,18 +95,25 @@ def main():
%s %s
ORDER BY LFC.id
) AS Whatever
- """ % (
- quoted_referring_table, quoted_referring_table,
- quoted_referring_column, from_date, until_date))
+ """
+ % (
+ quoted_referring_table,
+ quoted_referring_table,
+ quoted_referring_column,
+ from_date,
+ until_date,
+ )
+ )
total_bytes, formatted_size, num_files = cur.fetchone()
totals.add((total_bytes, referring_table, formatted_size, num_files))
for total_bytes, tab_name, formatted_size, num_files in sorted(
- totals, reverse=True):
- print('%-10s %s in %d files' % (formatted_size, tab_name, num_files))
+ totals, reverse=True
+ ):
+ print("%-10s %s in %d files" % (formatted_size, tab_name, num_files))
return 0
-if __name__ == '__main__':
+if __name__ == "__main__":
sys.exit(main())
diff --git a/scripts/list-team-members b/scripts/list-team-members
index b7a7e61..8aa2809 100755
--- a/scripts/list-team-members
+++ b/scripts/list-team-members
@@ -8,34 +8,45 @@ import _pythonpath # noqa: F401
import logging
import sys
-from lp.registry.scripts.listteammembers import (
- NoSuchTeamError,
- process_team,
- )
-from lp.services.scripts.base import (
- LaunchpadScript,
- LaunchpadScriptFailure,
- )
+from lp.registry.scripts.listteammembers import NoSuchTeamError, process_team
+from lp.services.scripts.base import LaunchpadScript, LaunchpadScriptFailure
class ListTeamMembersScript(LaunchpadScript):
description = "Create a list of members of a team."
- usage = "usage: %s [-e|--email-only|-f|--full-details|-s|--ssh-keys] " \
+ usage = (
+ "usage: %s [-e|--email-only|-f|--full-details|-s|--ssh-keys] "
"team-name [team-name-2] .. [team-name-n]" % sys.argv[0]
+ )
loglevel = logging.INFO
def add_my_options(self):
- self.parser.set_defaults(format='simple')
+ self.parser.set_defaults(format="simple")
self.parser.add_option(
- '-e', '--email-only', action='store_const', const='email',
- help='Only print email addresses', dest='format')
+ "-e",
+ "--email-only",
+ action="store_const",
+ const="email",
+ help="Only print email addresses",
+ dest="format",
+ )
self.parser.add_option(
- '-f', '--full-details', action='store_const', const='full',
- help='Print full details', dest='format')
+ "-f",
+ "--full-details",
+ action="store_const",
+ const="full",
+ help="Print full details",
+ dest="format",
+ )
self.parser.add_option(
- '-s', '--ssh-keys', action='store_const', const='sshkeys',
- help='Print sshkeys', dest='format')
+ "-s",
+ "--ssh-keys",
+ action="store_const",
+ const="sshkeys",
+ help="Print sshkeys",
+ dest="format",
+ )
def main(self):
@@ -43,7 +54,7 @@ class ListTeamMembersScript(LaunchpadScript):
teamnames = self.args
if not teamnames:
- self.parser.error('No team specified')
+ self.parser.error("No team specified")
# We don't want duplicates, so use a set to enforce uniqueness.
member_details = set()
@@ -56,7 +67,8 @@ class ListTeamMembersScript(LaunchpadScript):
print(detail)
-if __name__ == '__main__':
+if __name__ == "__main__":
script = ListTeamMembersScript(
- 'lp.services.scripts.listteammembers', dbuser='listteammembers')
+ "lp.services.scripts.listteammembers", dbuser="listteammembers"
+ )
script.run()
diff --git a/scripts/memcached-stats.py b/scripts/memcached-stats.py
index bc2c0cd..73e0a2f 100755
--- a/scripts/memcached-stats.py
+++ b/scripts/memcached-stats.py
@@ -8,27 +8,26 @@ __all__ = []
import _pythonpath # noqa: F401
+import sys
+import time
from optparse import OptionParser
from pprint import pprint
-import sys
from textwrap import dedent
-import time
from zope.component import getUtility
from lp.services.memcache.interfaces import IMemcacheClient
from lp.services.scripts import execute_zcml_for_scripts
-
# The interesting bits we pull from the memcached stats.
INTERESTING_KEYS = [
- 'cmd_set', # Number of sets.
- 'get_hits', # Number of gets that hit.
- 'get_misses', # Number of gets that missed.
- 'evictions', # Objects evicted from memcached.
- 'bytes_read', # Bytes read from memcached.
- 'bytes_written', # Bytes written to memcached.
- ]
+ "cmd_set", # Number of sets.
+ "get_hits", # Number of gets that hit.
+ "get_misses", # Number of gets that missed.
+ "evictions", # Objects evicted from memcached.
+ "bytes_read", # Bytes read from memcached.
+ "bytes_written", # Bytes written to memcached.
+]
def get_summary(all_raw_stats):
@@ -42,14 +41,19 @@ def get_summary(all_raw_stats):
def print_stats(stats):
"""Output human readable statistics."""
- print(dedent('''\
+ print(
+ dedent(
+ """\
Sets: %(cmd_set)s
Hits: %(get_hits)s
Misses: %(get_misses)s
Evictions: %(evictions)s
Bytes read: %(bytes_read)s
Bytes written: %(bytes_written)s
- ''' % stats))
+ """
+ % stats
+ )
+ )
def print_summary(all_raw_stats):
@@ -73,28 +77,38 @@ def print_cricket(all_raw_stats):
summary = get_summary(all_raw_stats)
now = time.time()
for key in INTERESTING_KEYS:
- print('memcached_total_%s:%s@%d' % (
- key, summary[key], now))
+ print("memcached_total_%s:%s@%d" % (key, summary[key], now))
for server, stats in all_raw_stats:
# Convert the '127.0.0.1:11217 (1)' style server string to a
# cricket key.
- server = server.split()[0].replace(':','_').replace('.','_')
+ server = server.split()[0].replace(":", "_").replace(".", "_")
for key in INTERESTING_KEYS:
- print('memcached_%s_%s:%s@%d' % (
- server, key, stats[key], now))
+ print("memcached_%s_%s:%s@%d" % (server, key, stats[key], now))
def main():
parser = OptionParser()
parser.add_option(
- "-r", "--raw", action="store_true", default=False,
- help="Output full raw data")
+ "-r",
+ "--raw",
+ action="store_true",
+ default=False,
+ help="Output full raw data",
+ )
parser.add_option(
- "-f", "--full", action="store_true", default=False,
- help="Output individual memcached server stats.")
+ "-f",
+ "--full",
+ action="store_true",
+ default=False,
+ help="Output individual memcached server stats.",
+ )
parser.add_option(
- "-c", "--cricket", action="store_true", default=False,
- help="Output stats in cricket compatible format.")
+ "-c",
+ "--cricket",
+ action="store_true",
+ default=False,
+ help="Output stats in cricket compatible format.",
+ )
options, args = parser.parse_args()
if len(args) > 0:
parser.error("Too many arguments.")
@@ -112,5 +126,5 @@ def main():
return 0
-if __name__ == '__main__':
+if __name__ == "__main__":
sys.exit(main())
diff --git a/scripts/migrate-librarian-content-md5.py b/scripts/migrate-librarian-content-md5.py
index 4ae5c25..90846d8 100755
--- a/scripts/migrate-librarian-content-md5.py
+++ b/scripts/migrate-librarian-content-md5.py
@@ -11,30 +11,30 @@ import os
import subprocess
import sys
-
SQL = "UPDATE LibraryFileContent SET md5 = '%s' WHERE id = %d;"
def main(path, minimumID=0):
- if not path.endswith('/'):
- path += '/'
+ if not path.endswith("/"):
+ path += "/"
for dirpath, dirname, filenames in os.walk(path):
dirname.sort()
- databaseID = dirpath[len(path):]
+ databaseID = dirpath[len(path) :]
if not len(databaseID) == 8: # "xx/xx/xx"
continue
for filename in filenames:
- databaseID = int(databaseID.replace('/', '') + filename, 16)
+ databaseID = int(databaseID.replace("/", "") + filename, 16)
if databaseID < minimumID:
continue
filename = os.path.join(dirpath, filename)
md5sum = subprocess.check_output(
- ['md5sum', filename], universal_newlines=True).split(' ', 1)[0]
+ ["md5sum", filename], universal_newlines=True
+ ).split(" ", 1)[0]
yield databaseID, md5sum
-if __name__ == '__main__':
+if __name__ == "__main__":
if len(sys.argv) > 2:
minimumID = int(sys.argv[2])
else:
diff --git a/scripts/mirror-branch.py b/scripts/mirror-branch.py
index 2ec56bb..3265ade 100755
--- a/scripts/mirror-branch.py
+++ b/scripts/mirror-branch.py
@@ -28,26 +28,25 @@ Where:
import _pythonpath # noqa: F401
-from optparse import OptionParser
import os
import resource
import sys
+from optparse import OptionParser
import breezy.repository
from lp.code.enums import BranchType
from lp.codehosting.puller.worker import (
- install_worker_ui_factory,
PullerWorker,
PullerWorkerProtocol,
- )
+ install_worker_ui_factory,
+)
from lp.services.webapp.errorlog import globalErrorUtility
-
branch_type_map = {
- BranchType.MIRRORED: 'mirror',
- BranchType.IMPORTED: 'import'
- }
+ BranchType.MIRRORED: "mirror",
+ BranchType.IMPORTED: "import",
+}
def shut_up_deprecation_warning():
@@ -57,24 +56,36 @@ def shut_up_deprecation_warning():
breezy.repository._deprecation_warning_done = True
-if __name__ == '__main__':
+if __name__ == "__main__":
parser = OptionParser()
(options, arguments) = parser.parse_args()
- (source_url, destination_url, branch_id, unique_name,
- branch_type_name, default_stacked_on_url) = arguments
+ (
+ source_url,
+ destination_url,
+ branch_id,
+ unique_name,
+ branch_type_name,
+ default_stacked_on_url,
+ ) = arguments
branch_type = BranchType.items[branch_type_name]
- if branch_type == BranchType.IMPORTED and 'http_proxy' in os.environ:
- del os.environ['http_proxy']
- section_name = 'supermirror_%s_puller' % branch_type_map[branch_type]
+ if branch_type == BranchType.IMPORTED and "http_proxy" in os.environ:
+ del os.environ["http_proxy"]
+ section_name = "supermirror_%s_puller" % branch_type_map[branch_type]
globalErrorUtility.configure(section_name)
shut_up_deprecation_warning()
resource.setrlimit(resource.RLIMIT_AS, (1500000000, 1500000000))
# The worker outputs netstrings, which are bytes.
- protocol = PullerWorkerProtocol(getattr(sys.stdout, 'buffer', sys.stdout))
+ protocol = PullerWorkerProtocol(getattr(sys.stdout, "buffer", sys.stdout))
install_worker_ui_factory(protocol)
PullerWorker(
- source_url, destination_url, int(branch_id), unique_name, branch_type,
- default_stacked_on_url, protocol).mirror()
+ source_url,
+ destination_url,
+ int(branch_id),
+ unique_name,
+ branch_type,
+ default_stacked_on_url,
+ protocol,
+ ).mirror()
diff --git a/scripts/mlist-import.py b/scripts/mlist-import.py
index b418b5c..263c031 100755
--- a/scripts/mlist-import.py
+++ b/scripts/mlist-import.py
@@ -11,8 +11,8 @@
# - Import archives.
__all__ = [
- 'MailingListImport',
- ]
+ "MailingListImport",
+]
import _pythonpath # noqa: F401
@@ -36,7 +36,7 @@ class MailingListImport(LaunchpadScript):
"""
loglevel = logging.INFO
- description = 'Import data into a Launchpad mailing list.'
+ description = "Import data into a Launchpad mailing list."
def __init__(self, name, dbuser=None):
self.usage = textwrap.dedent(self.__doc__)
@@ -44,22 +44,30 @@ class MailingListImport(LaunchpadScript):
def add_my_options(self):
"""See `LaunchpadScript`."""
- self.parser.add_option('-f', '--filename', default='-', help=(
- 'The file name containing the addresses to import, one '
- "per line. If '-' is used or this option is not given, "
- 'then addresses are read from standard input.'))
- self.parser.add_option('--notifications',
- default=False, action='store_true',
- help=(
- 'Enable team-join notification sending to team admins.'))
+ self.parser.add_option(
+ "-f",
+ "--filename",
+ default="-",
+ help=(
+ "The file name containing the addresses to import, one "
+ "per line. If '-' is used or this option is not given, "
+ "then addresses are read from standard input."
+ ),
+ )
+ self.parser.add_option(
+ "--notifications",
+ default=False,
+ action="store_true",
+ help=("Enable team-join notification sending to team admins."),
+ )
def main(self):
"""See `LaunchpadScript`."""
team_name = None
if len(self.args) == 0:
- self.parser.error('Missing team name')
+ self.parser.error("Missing team name")
elif len(self.args) > 1:
- self.parser.error('Too many arguments')
+ self.parser.error("Too many arguments")
else:
team_name = self.args[0]
@@ -68,19 +76,22 @@ class MailingListImport(LaunchpadScript):
# Suppress sending emails based on the (absence) of the --notification
# switch. Notifications are disabled by default because they can
# cause huge amounts to be sent to the team owner.
- send_email_config = """
+ send_email_config = (
+ """
[immediate_mail]
send_email: %s
- """ % self.options.notifications
- config.push('send_email_config', send_email_config)
+ """
+ % self.options.notifications
+ )
+ config.push("send_email_config", send_email_config)
- if self.options.filename == '-':
+ if self.options.filename == "-":
# Read all the addresses from standard input, parse them
# here, and use the direct interface to the importer.
addresses = []
while True:
line = sys.stdin.readline()
- if line == '':
+ if line == "":
break
addresses.append(line[:-1])
importer.importAddresses(addresses)
@@ -92,7 +103,7 @@ class MailingListImport(LaunchpadScript):
return 0
-if __name__ == '__main__':
- script = MailingListImport('scripts.mlist-import', 'mlist-import')
+if __name__ == "__main__":
+ script = MailingListImport("scripts.mlist-import", "mlist-import")
status = script.lock_and_run()
sys.exit(status)
diff --git a/scripts/modified-branches.py b/scripts/modified-branches.py
index 71df108..9b2a48c 100755
--- a/scripts/modified-branches.py
+++ b/scripts/modified-branches.py
@@ -12,8 +12,8 @@ import _pythonpath # noqa: F401
from lp.codehosting.scripts.modifiedbranches import ModifiedBranchesScript
-
-if __name__ == '__main__':
+if __name__ == "__main__":
script = ModifiedBranchesScript(
- 'modified-branches', dbuser='modified-branches')
+ "modified-branches", dbuser="modified-branches"
+ )
script.run()
diff --git a/scripts/populate-archive.py b/scripts/populate-archive.py
index 1b72e27..eb41bd2 100755
--- a/scripts/populate-archive.py
+++ b/scripts/populate-archive.py
@@ -14,8 +14,8 @@ import _pythonpath # noqa: F401
from lp.services.config import config
from lp.soyuz.scripts.populate_archive import ArchivePopulator
-
-if __name__ == '__main__':
+if __name__ == "__main__":
script = ArchivePopulator(
- 'populate-archive', dbuser=config.archivepublisher.dbuser)
+ "populate-archive", dbuser=config.archivepublisher.dbuser
+ )
script.lock_and_run()
diff --git a/scripts/populate-distroseriesdiff.py b/scripts/populate-distroseriesdiff.py
index 6ccbf80..243c797 100755
--- a/scripts/populate-distroseriesdiff.py
+++ b/scripts/populate-distroseriesdiff.py
@@ -7,8 +7,7 @@ import _pythonpath # noqa: F401
from lp.registry.scripts.populate_distroseriesdiff import (
PopulateDistroSeriesDiff,
- )
+)
-
-if __name__ == '__main__':
- PopulateDistroSeriesDiff('populate-distroseriesdiff').run()
+if __name__ == "__main__":
+ PopulateDistroSeriesDiff("populate-distroseriesdiff").run()
diff --git a/scripts/ppa-report.py b/scripts/ppa-report.py
index 8ccd14e..5b5713e 100755
--- a/scripts/ppa-report.py
+++ b/scripts/ppa-report.py
@@ -7,7 +7,6 @@ import _pythonpath # noqa: F401
from lp.soyuz.scripts.ppareport import PPAReportScript
-
-if __name__ == '__main__':
- script = PPAReportScript('ppareport', dbuser='ro')
+if __name__ == "__main__":
+ script = PPAReportScript("ppareport", dbuser="ro")
script.run()
diff --git a/scripts/process-accepted.py b/scripts/process-accepted.py
index 1c8dda4..3f2352e 100755
--- a/scripts/process-accepted.py
+++ b/scripts/process-accepted.py
@@ -14,8 +14,6 @@ import _pythonpath # noqa: F401
from lp.archivepublisher.scripts.processaccepted import ProcessAccepted
-
-if __name__ == '__main__':
- script = ProcessAccepted(
- "process-accepted", dbuser='process_accepted')
+if __name__ == "__main__":
+ script = ProcessAccepted("process-accepted", dbuser="process_accepted")
script.lock_and_run()
diff --git a/scripts/process-death-row.py b/scripts/process-death-row.py
index 986fd04..7c9e20a 100755
--- a/scripts/process-death-row.py
+++ b/scripts/process-death-row.py
@@ -25,8 +25,6 @@ import _pythonpath # noqa: F401
from lp.archivepublisher.scripts.processdeathrow import DeathRowProcessor
-
if __name__ == "__main__":
- script = DeathRowProcessor(
- 'process-death-row', dbuser='process_death_row')
+ script = DeathRowProcessor("process-death-row", dbuser="process_death_row")
script.lock_and_run()
diff --git a/scripts/process-one-mail.py b/scripts/process-one-mail.py
index 3cb8598..9ec2ce5 100755
--- a/scripts/process-one-mail.py
+++ b/scripts/process-one-mail.py
@@ -17,20 +17,23 @@ from lp.services.scripts.base import LaunchpadScript
class ProcessMail(LaunchpadScript):
- usage = """%prog [options] [MAIL_FILE]
+ usage = (
+ """%prog [options] [MAIL_FILE]
Process one incoming email, read from the specified file or from stdin.
Any mail generated in response is printed to stdout.
- """ + __doc__
+ """
+ + __doc__
+ )
def main(self):
self.txn.begin()
# NB: This somewhat duplicates handleMail, but there it's mixed in
# with handling a mailbox, which we're avoiding here.
if len(self.args) >= 1:
- from_file = open(self.args[0], 'rb')
+ from_file = open(self.args[0], "rb")
else:
from_file = sys.stdin.buffer
self.logger.debug("reading message from %r" % (from_file,))
@@ -44,15 +47,18 @@ class ProcessMail(LaunchpadScript):
# Kinda kludgey way to cause sendmail to just print it.
config.sendmail_to_stdout = True
handle_one_mail(
- self.logger, parsed_mail,
- file_alias, file_alias.http_url,
- signature_timestamp_checker=None)
+ self.logger,
+ parsed_mail,
+ file_alias,
+ file_alias.http_url,
+ signature_timestamp_checker=None,
+ )
self.logger.debug("mail handling complete")
self.txn.commit()
-if __name__ == '__main__':
- script = ProcessMail('process-one-mail', dbuser=config.processmail.dbuser)
+if __name__ == "__main__":
+ script = ProcessMail("process-one-mail", dbuser=config.processmail.dbuser)
# No need to lock; you can run as many as you want as they use no global
# resources (like a mailbox).
script.run(use_web_security=True)
diff --git a/scripts/process-upload.py b/scripts/process-upload.py
index 361a4b1..43cab16 100755
--- a/scripts/process-upload.py
+++ b/scripts/process-upload.py
@@ -13,7 +13,6 @@ import _pythonpath # noqa: F401
from lp.archiveuploader.scripts.processupload import ProcessUpload
-
-if __name__ == '__main__':
- script = ProcessUpload('process-upload', dbuser='process_upload')
+if __name__ == "__main__":
+ script = ProcessUpload("process-upload", dbuser="process_upload")
script.lock_and_run()
diff --git a/scripts/publish-distro.py b/scripts/publish-distro.py
index 4d13062..3b031a0 100755
--- a/scripts/publish-distro.py
+++ b/scripts/publish-distro.py
@@ -7,7 +7,6 @@ import _pythonpath # noqa: F401
from lp.archivepublisher.scripts.publishdistro import PublishDistro
-
if __name__ == "__main__":
- script = PublishDistro('publish-distro', dbuser='publish_distro')
+ script = PublishDistro("publish-distro", dbuser="publish_distro")
script.lock_and_run()
diff --git a/scripts/rosetta/fix_translation_credits.py b/scripts/rosetta/fix_translation_credits.py
index dcd4227..f12b0eb 100755
--- a/scripts/rosetta/fix_translation_credits.py
+++ b/scripts/rosetta/fix_translation_credits.py
@@ -10,7 +10,7 @@ import _pythonpath # noqa: F401
from lp.services.scripts.base import LaunchpadScript
from lp.translations.scripts.fix_translation_credits import (
FixTranslationCreditsProcess,
- )
+)
class FixTranslationCredits(LaunchpadScript):
@@ -21,7 +21,8 @@ class FixTranslationCredits(LaunchpadScript):
fixer.run()
-if __name__ == '__main__':
- script = FixTranslationCredits(name="fix-translation-credits",
- dbuser='rosettaadmin')
+if __name__ == "__main__":
+ script = FixTranslationCredits(
+ name="fix-translation-credits", dbuser="rosettaadmin"
+ )
script.lock_and_run()
diff --git a/scripts/rosetta/gettext_check_messages.py b/scripts/rosetta/gettext_check_messages.py
index 81c4859..c2ace40 100755
--- a/scripts/rosetta/gettext_check_messages.py
+++ b/scripts/rosetta/gettext_check_messages.py
@@ -18,10 +18,7 @@ enabled instead.
import _pythonpath # noqa: F401
-from lp.translations.scripts.gettext_check_messages import (
- GettextCheckMessages,
- )
+from lp.translations.scripts.gettext_check_messages import GettextCheckMessages
-
-if __name__ == '__main__':
- GettextCheckMessages('gettext-check-messages').lock_and_run()
+if __name__ == "__main__":
+ GettextCheckMessages("gettext-check-messages").lock_and_run()
diff --git a/scripts/rosetta/merge-existing-packagings.py b/scripts/rosetta/merge-existing-packagings.py
index 425038c..a3537e5 100755
--- a/scripts/rosetta/merge-existing-packagings.py
+++ b/scripts/rosetta/merge-existing-packagings.py
@@ -5,13 +5,10 @@
import _pythonpath # noqa: F401
-from lp.translations.utilities.translationmerger import (
- MergeExistingPackagings,
- )
-
+from lp.translations.utilities.translationmerger import MergeExistingPackagings
-if __name__ == '__main__':
+if __name__ == "__main__":
script = MergeExistingPackagings(
- 'lp.services.scripts.message-sharing-merge',
- dbuser='rosettaadmin')
+ "lp.services.scripts.message-sharing-merge", dbuser="rosettaadmin"
+ )
script.run()
diff --git a/scripts/rosetta/message-sharing-merge.py b/scripts/rosetta/message-sharing-merge.py
index 86ef389..e69f07f 100755
--- a/scripts/rosetta/message-sharing-merge.py
+++ b/scripts/rosetta/message-sharing-merge.py
@@ -7,7 +7,6 @@ import _pythonpath # noqa: F401
from lp.translations.utilities.translationmerger import MessageSharingMerge
-
# This script merges POTMsgSets for sharing POTemplates. This involves
# deleting records that we'd never delete otherwise. So before running,
# make sure rosettaadmin has the privileges to delete POTMsgSets and
@@ -17,8 +16,8 @@ from lp.translations.utilities.translationmerger import MessageSharingMerge
# GRANT DELETE ON TranslationTemplateItem TO rosettaadmin;
-if __name__ == '__main__':
+if __name__ == "__main__":
script = MessageSharingMerge(
- 'lp.services.scripts.message-sharing-merge',
- dbuser='rosettaadmin')
+ "lp.services.scripts.message-sharing-merge", dbuser="rosettaadmin"
+ )
script.run()
diff --git a/scripts/rosetta/migrate_current_flag.py b/scripts/rosetta/migrate_current_flag.py
index 3e08ab9..3d12bdd 100755
--- a/scripts/rosetta/migrate_current_flag.py
+++ b/scripts/rosetta/migrate_current_flag.py
@@ -10,7 +10,7 @@ import _pythonpath # noqa: F401
from lp.services.scripts.base import LaunchpadScript
from lp.translations.scripts.migrate_current_flag import (
MigrateCurrentFlagProcess,
- )
+)
class MigrateTranslationFlags(LaunchpadScript):
@@ -25,7 +25,8 @@ class MigrateTranslationFlags(LaunchpadScript):
fixer.run()
-if __name__ == '__main__':
+if __name__ == "__main__":
script = MigrateTranslationFlags(
- name="migratecurrentflag", dbuser='rosettaadmin')
+ name="migratecurrentflag", dbuser="rosettaadmin"
+ )
script.lock_and_run()
diff --git a/scripts/rosetta/pottery-generate-intltool.py b/scripts/rosetta/pottery-generate-intltool.py
index 278a774..c574896 100755
--- a/scripts/rosetta/pottery-generate-intltool.py
+++ b/scripts/rosetta/pottery-generate-intltool.py
@@ -12,7 +12,7 @@ import os.path
from lpbuildd.pottery.intltool import generate_pots
from lpbuildd.tests.fakebuilder import (
UncontainedBackend as _UncontainedBackend,
- )
+)
from lp.services.scripts.base import LaunchpadScript
diff --git a/scripts/rosetta/remove-translations-by.py b/scripts/rosetta/remove-translations-by.py
index a47c05b..0b89de2 100755
--- a/scripts/rosetta/remove-translations-by.py
+++ b/scripts/rosetta/remove-translations-by.py
@@ -7,9 +7,8 @@ import _pythonpath # noqa: F401
from lp.translations.scripts.remove_translations import RemoveTranslations
-
-if __name__ == '__main__':
+if __name__ == "__main__":
script = RemoveTranslations(
- 'lp.services.scripts.remove-translations',
- dbuser='rosettaadmin')
+ "lp.services.scripts.remove-translations", dbuser="rosettaadmin"
+ )
script.run()
diff --git a/scripts/rosetta/reupload-translations.py b/scripts/rosetta/reupload-translations.py
index 0a47db4..d04c965 100755
--- a/scripts/rosetta/reupload-translations.py
+++ b/scripts/rosetta/reupload-translations.py
@@ -8,9 +8,8 @@ import _pythonpath # noqa: F401
from lp.translations.scripts.reupload_translations import (
ReuploadPackageTranslations,
- )
+)
-
-if __name__ == '__main__':
- script = ReuploadPackageTranslations('reupload-translations')
+if __name__ == "__main__":
+ script = ReuploadPackageTranslations("reupload-translations")
script.run()
diff --git a/scripts/rosetta/upload-translations.py b/scripts/rosetta/upload-translations.py
index 1daa153..76d891f 100755
--- a/scripts/rosetta/upload-translations.py
+++ b/scripts/rosetta/upload-translations.py
@@ -8,9 +8,8 @@ import _pythonpath # noqa: F401
from lp.translations.scripts.upload_translations import (
UploadPackageTranslations,
- )
+)
-
-if __name__ == '__main__':
- script = UploadPackageTranslations('upload-translations')
+if __name__ == "__main__":
+ script = UploadPackageTranslations("upload-translations")
script.run()
diff --git a/scripts/rosetta/validate-translations-file.py b/scripts/rosetta/validate-translations-file.py
index 342ca8c..e669338 100755
--- a/scripts/rosetta/validate-translations-file.py
+++ b/scripts/rosetta/validate-translations-file.py
@@ -8,8 +8,7 @@ import sys
from lp.translations.scripts.validate_translations_file import (
ValidateTranslationsFile,
- )
-
+)
if __name__ == "__main__":
sys.exit(ValidateTranslationsFile().main())
diff --git a/scripts/script-monitor-nagios.py b/scripts/script-monitor-nagios.py
index de516c9..6832c44 100755
--- a/scripts/script-monitor-nagios.py
+++ b/scripts/script-monitor-nagios.py
@@ -18,25 +18,18 @@ As such, it was felt more appropriate to separate out the scripts,
even though there is some code duplication.
"""
-__all__ = ['check_script']
+__all__ = ["check_script"]
import _pythonpath # noqa: F401
-from datetime import (
- datetime,
- timedelta,
- )
-from optparse import OptionParser
import sys
+from datetime import datetime, timedelta
+from optparse import OptionParser
from time import strftime
from lp.scripts.scriptmonitor import check_script
from lp.services.database.sqlbase import connect
-from lp.services.scripts import (
- db_options,
- logger,
- logger_options,
- )
+from lp.services.scripts import db_options, logger, logger_options
def main():
@@ -45,8 +38,8 @@ def main():
# this should be moved into a testable location.
# Also duplicated code in scripts/script-monitor.py
parser = OptionParser(
- '%prog [options] (minutes) (host:scriptname) [host:scriptname]'
- )
+ "%prog [options] (minutes) (host:scriptname) [host:scriptname]"
+ )
db_options(parser)
logger_options(parser)
@@ -64,12 +57,13 @@ def main():
completed_from = strftime("%Y-%m-%d %H:%M:%S", start_date.timetuple())
completed_to = strftime(
- "%Y-%m-%d %H:%M:%S", datetime.now().timetuple())
+ "%Y-%m-%d %H:%M:%S", datetime.now().timetuple()
+ )
hosts_scripts = []
for arg in args:
try:
- hostname, scriptname = arg.split(':')
+ hostname, scriptname = arg.split(":")
except TypeError:
print("%r is not in the format 'host:scriptname'" % arg)
return 3
@@ -86,14 +80,15 @@ def main():
error_found = False
msg = []
for hostname, scriptname in hosts_scripts:
- failure_msg = check_script(con, log, hostname,
- scriptname, completed_from, completed_to)
+ failure_msg = check_script(
+ con, log, hostname, scriptname, completed_from, completed_to
+ )
if failure_msg is not None:
msg.append("%s:%s" % (hostname, scriptname))
error_found = True
if error_found:
# Construct our return message
- print("Scripts failed to run: %s" % ', '.join(msg))
+ print("Scripts failed to run: %s" % ", ".join(msg))
return 2
else:
# Construct our return message
@@ -105,5 +100,6 @@ def main():
print("Unhandled exception: %s %r" % (e.__class__.__name__, str(e)))
return 3
-if __name__ == '__main__':
+
+if __name__ == "__main__":
sys.exit(main())
diff --git a/scripts/script-monitor.py b/scripts/script-monitor.py
index e8a511b..b89e306 100755
--- a/scripts/script-monitor.py
+++ b/scripts/script-monitor.py
@@ -5,27 +5,20 @@
"""Monitor scripts."""
-__all__ = ['check_script']
+__all__ = ["check_script"]
import _pythonpath # noqa: F401
-from datetime import (
- datetime,
- timedelta,
- )
-from email.mime.text import MIMEText
-from optparse import OptionParser
import smtplib
import sys
+from datetime import datetime, timedelta
+from email.mime.text import MIMEText
+from optparse import OptionParser
from time import strftime
from lp.scripts.scriptmonitor import check_script
from lp.services.database.sqlbase import connect
-from lp.services.scripts import (
- db_options,
- logger,
- logger_options,
- )
+from lp.services.scripts import db_options, logger, logger_options
def main():
@@ -34,16 +27,18 @@ def main():
# emails - this should be moved into a testable location.
# Also duplicated code in scripts/script-monitor-nagios.py
parser = OptionParser(
- '%prog [options] (minutes) (host:scriptname) [host:scriptname]'
- )
+ "%prog [options] (minutes) (host:scriptname) [host:scriptname]"
+ )
db_options(parser)
logger_options(parser)
(options, args) = parser.parse_args()
if len(args) < 2:
- parser.error("Must specify at time in minutes and "
- "at least one host and script")
+ parser.error(
+ "Must specify at time in minutes and "
+ "at least one host and script"
+ )
# First argument is the number of minutes into the past
# we want to look for the scripts on the specified hosts
@@ -53,19 +48,22 @@ def main():
completed_from = strftime("%Y-%m-%d %H:%M:%S", start_date.timetuple())
completed_to = strftime(
- "%Y-%m-%d %H:%M:%S", datetime.now().timetuple())
+ "%Y-%m-%d %H:%M:%S", datetime.now().timetuple()
+ )
hosts_scripts = []
for arg in args:
try:
- hostname, scriptname = arg.split(':')
+ hostname, scriptname = arg.split(":")
except TypeError:
parser.error(
- "%r is not in the format 'host:scriptname'" % (arg,))
+ "%r is not in the format 'host:scriptname'" % (arg,)
+ )
hosts_scripts.append((hostname, scriptname))
except ValueError:
- parser.error("Must specify time in minutes and "
- "at least one host and script")
+ parser.error(
+ "Must specify time in minutes and " "at least one host and script"
+ )
log = logger(options)
@@ -75,32 +73,35 @@ def main():
error_found = False
msg, subj = [], []
for hostname, scriptname in hosts_scripts:
- failure_msg = check_script(con, log, hostname,
- scriptname, completed_from, completed_to)
+ failure_msg = check_script(
+ con, log, hostname, scriptname, completed_from, completed_to
+ )
if failure_msg is not None:
msg.append(failure_msg)
subj.append("%s:%s" % (hostname, scriptname))
error_found = 2
if error_found:
# Construct our email.
- msg = MIMEText('\n'.join(msg))
- msg['Subject'] = "Scripts failed to run: %s" % ", ".join(subj)
- msg['From'] = 'script-failures@xxxxxxxxxxxxx'
- msg['Reply-To'] = 'canonical-launchpad@xxxxxxxxxxxxxxxxxxx'
- msg['To'] = 'launchpad-error-reports@xxxxxxxxxxxxxxxxxxx'
+ msg = MIMEText("\n".join(msg))
+ msg["Subject"] = "Scripts failed to run: %s" % ", ".join(subj)
+ msg["From"] = "script-failures@xxxxxxxxxxxxx"
+ msg["Reply-To"] = "canonical-launchpad@xxxxxxxxxxxxxxxxxxx"
+ msg["To"] = "launchpad-error-reports@xxxxxxxxxxxxxxxxxxx"
# Send out the email.
smtp = smtplib.SMTP()
smtp.connect()
smtp.sendmail(
- 'script-failures@xxxxxxxxxxxxx',
- ['launchpad-error-reports@xxxxxxxxxxxxxxxxxxx'],
- msg.as_string())
+ "script-failures@xxxxxxxxxxxxx",
+ ["launchpad-error-reports@xxxxxxxxxxxxxxxxxxx"],
+ msg.as_string(),
+ )
smtp.close()
return 2
except Exception:
log.exception("Unhandled exception")
return 1
-if __name__ == '__main__':
+
+if __name__ == "__main__":
sys.exit(main())
diff --git a/scripts/start-loggerhead.py b/scripts/start-loggerhead.py
index 0976311..60e5472 100755
--- a/scripts/start-loggerhead.py
+++ b/scripts/start-loggerhead.py
@@ -7,6 +7,5 @@ import _pythonpath # noqa: F401
from launchpad_loggerhead.wsgi import LoggerheadApplication
-
if __name__ == "__main__":
LoggerheadApplication().run()
diff --git a/scripts/stop-loggerhead.py b/scripts/stop-loggerhead.py
index a93c280..d8341c7 100755
--- a/scripts/stop-loggerhead.py
+++ b/scripts/stop-loggerhead.py
@@ -5,16 +5,12 @@
import _pythonpath # noqa: F401
-from optparse import OptionParser
import sys
+from optparse import OptionParser
-from lp.services.osutils import (
- process_exists,
- two_stage_kill,
- )
+from lp.services.osutils import process_exists, two_stage_kill
from lp.services.pidfile import get_pid
-
parser = OptionParser(description="Stop loggerhead.")
parser.parse_args()
@@ -25,11 +21,11 @@ if pid is None:
sys.exit(0)
if not process_exists(pid):
- print('Stale pid file; server is not running.')
+ print("Stale pid file; server is not running.")
sys.exit(1)
print()
-print('Shutting down previous server @ pid %d.' % (pid,))
+print("Shutting down previous server @ pid %d." % (pid,))
print()
# A busy gunicorn can take a while to shut down.
diff --git a/scripts/suspend-bot-account.py b/scripts/suspend-bot-account.py
index f246ed6..a7c1f72 100755
--- a/scripts/suspend-bot-account.py
+++ b/scripts/suspend-bot-account.py
@@ -7,7 +7,6 @@ import _pythonpath # noqa: F401
from lp.registry.scripts.suspendbotaccount import SuspendBotAccountScript
-
-if __name__ == '__main__':
- script = SuspendBotAccountScript('suspend-bot-account', dbuser='launchpad')
+if __name__ == "__main__":
+ script = SuspendBotAccountScript("suspend-bot-account", dbuser="launchpad")
script.run()
diff --git a/scripts/sync-branches.py b/scripts/sync-branches.py
index 0ce3481..cdb9c1c 100755
--- a/scripts/sync-branches.py
+++ b/scripts/sync-branches.py
@@ -7,7 +7,6 @@ import _pythonpath # noqa: F401
from lp.codehosting.scripts.sync_branches import SyncBranchesScript
-
if __name__ == "__main__":
script = SyncBranchesScript("sync-branches", dbuser="ro")
script.lock_and_run()
diff --git a/scripts/sync-signingkeys.py b/scripts/sync-signingkeys.py
index 7eeeab0..1d905a6 100755
--- a/scripts/sync-signingkeys.py
+++ b/scripts/sync-signingkeys.py
@@ -9,8 +9,8 @@ import _pythonpath # noqa: F401
from lp.archivepublisher.scripts.sync_signingkeys import SyncSigningKeysScript
from lp.services.config import config
-
-if __name__ == '__main__':
+if __name__ == "__main__":
script = SyncSigningKeysScript(
- 'sync-signingkeys', dbuser=config.archivepublisher.dbuser)
+ "sync-signingkeys", dbuser=config.archivepublisher.dbuser
+ )
script.lock_and_run()
diff --git a/scripts/uct-import.py b/scripts/uct-import.py
index 867aada..1d162fa 100755
--- a/scripts/uct-import.py
+++ b/scripts/uct-import.py
@@ -20,7 +20,7 @@ class UCTImportScript(LaunchpadScript):
def main(self):
if len(self.args) != 1:
- self.parser.error('Please specify a CVE file to import')
+ self.parser.error("Please specify a CVE file to import")
importer = UCTImporter()
@@ -28,6 +28,6 @@ class UCTImportScript(LaunchpadScript):
importer.import_cve_from_file(cve_path)
-if __name__ == '__main__':
- script = UCTImportScript('lp.services.scripts.uctimport')
+if __name__ == "__main__":
+ script = UCTImportScript("lp.services.scripts.uctimport")
script.run()
diff --git a/scripts/update-stacked-on.py b/scripts/update-stacked-on.py
index d8351d6..e6ead96 100755
--- a/scripts/update-stacked-on.py
+++ b/scripts/update-stacked-on.py
@@ -19,8 +19,8 @@ renamed.
import _pythonpath # noqa: F401
-from collections import namedtuple
import sys
+from collections import namedtuple
from breezy import errors
from breezy.branch import UnstackableBranchFormat
@@ -29,14 +29,10 @@ from breezy.config import TransportConfig
from lp.code.interfaces.codehosting import branch_id_alias
from lp.codehosting.bzrutils import get_branch_stacked_on_url
-from lp.codehosting.vfs import (
- get_ro_server,
- get_rw_server,
- )
+from lp.codehosting.vfs import get_ro_server, get_rw_server
from lp.services.scripts.base import LaunchpadScript
-
-FakeBranch = namedtuple('FakeBranch', 'id')
+FakeBranch = namedtuple("FakeBranch", "id")
def set_branch_stacked_on_url(bzrdir, stacked_on_url):
@@ -47,27 +43,38 @@ def set_branch_stacked_on_url(bzrdir, stacked_on_url):
something we don't yet have.
"""
branch_transport = bzrdir.get_branch_transport(None)
- branch_config = TransportConfig(branch_transport, 'branch.conf')
+ branch_config = TransportConfig(branch_transport, "branch.conf")
stacked_on_url = branch_config.set_option(
- stacked_on_url, 'stacked_on_location')
+ stacked_on_url, "stacked_on_location"
+ )
class UpdateStackedBranches(LaunchpadScript):
"""Update stacked branches so their stacked_on_location matches the db."""
def __init__(self):
- super().__init__('update-stacked-on')
+ super().__init__("update-stacked-on")
def add_my_options(self):
self.parser.add_option(
- '-n', '--dry-run', default=False, action="store_true",
+ "-n",
+ "--dry-run",
+ default=False,
+ action="store_true",
dest="dry_run",
- help=("Don't change anything on disk, just go through the "
- "motions."))
+ help=(
+ "Don't change anything on disk, just go through the "
+ "motions."
+ ),
+ )
self.parser.add_option(
- '-i', '--id', default=False, action="store_true",
+ "-i",
+ "--id",
+ default=False,
+ action="store_true",
dest="stack_on_id",
- help=("Stack on the +branch-id alias."))
+ help=("Stack on the +branch-id alias."),
+ )
def main(self):
if self.options.dry_run:
@@ -76,13 +83,13 @@ class UpdateStackedBranches(LaunchpadScript):
server = get_rw_server()
server.start_server()
if self.options.dry_run:
- self.logger.debug('Running read-only')
- self.logger.debug('Beginning processing')
+ self.logger.debug("Running read-only")
+ self.logger.debug("Beginning processing")
try:
self.updateBranches(self.parseFromStream(sys.stdin))
finally:
server.stop_server()
- self.logger.info('Done')
+ self.logger.info("Done")
def updateStackedOn(self, branch_id, bzr_branch_url, stacked_on_location):
"""Stack the Bazaar branch at 'bzr_branch_url' on the given URL.
@@ -97,28 +104,37 @@ class UpdateStackedBranches(LaunchpadScript):
bzrdir = BzrDir.open(bzr_branch_url)
except errors.NotBranchError:
self.logger.warning(
- "No bzrdir for %r at %r" % (branch_id, bzr_branch_url))
+ "No bzrdir for %r at %r" % (branch_id, bzr_branch_url)
+ )
return
try:
current_stacked_on_location = get_branch_stacked_on_url(bzrdir)
except errors.NotBranchError:
self.logger.warning(
- "No branch for %r at %r" % (branch_id, bzr_branch_url))
+ "No branch for %r at %r" % (branch_id, bzr_branch_url)
+ )
except errors.NotStacked:
self.logger.warning(
"Branch for %r at %r is not stacked at all. Giving up."
- % (branch_id, bzr_branch_url))
+ % (branch_id, bzr_branch_url)
+ )
except UnstackableBranchFormat:
self.logger.error(
"Branch for %r at %r is unstackable. Giving up."
- % (branch_id, bzr_branch_url))
+ % (branch_id, bzr_branch_url)
+ )
else:
if current_stacked_on_location != stacked_on_location:
self.logger.info(
- 'Branch for %r at %r stacked on %r, should be on %r.'
- % (branch_id, bzr_branch_url, current_stacked_on_location,
- stacked_on_location))
+ "Branch for %r at %r stacked on %r, should be on %r."
+ % (
+ branch_id,
+ bzr_branch_url,
+ current_stacked_on_location,
+ stacked_on_location,
+ )
+ )
if not self.options.dry_run:
set_branch_stacked_on_url(bzrdir, stacked_on_location)
@@ -141,17 +157,22 @@ class UpdateStackedBranches(LaunchpadScript):
unique_name, stacked_on_unique_name).
"""
for branch_info in branches:
- (branch_id, branch_type, unique_name,
- stacked_on_id, stacked_on_name) = branch_info
+ (
+ branch_id,
+ branch_type,
+ unique_name,
+ stacked_on_id,
+ stacked_on_name,
+ ) = branch_info
if self.options.stack_on_id:
branch = FakeBranch(stacked_on_id)
stacked_on_location = branch_id_alias(branch)
else:
- stacked_on_location = '/' + stacked_on_name
+ stacked_on_location = "/" + stacked_on_name
self.updateStackedOn(
- branch_id, 'lp-internal:///' + unique_name,
- stacked_on_location)
+ branch_id, "lp-internal:///" + unique_name, stacked_on_location
+ )
-if __name__ == '__main__':
+if __name__ == "__main__":
UpdateStackedBranches().lock_and_run()
diff --git a/scripts/upgrade_all_branches.py b/scripts/upgrade_all_branches.py
index 4c95588..a06c3f5 100755
--- a/scripts/upgrade_all_branches.py
+++ b/scripts/upgrade_all_branches.py
@@ -5,24 +5,22 @@ import _pythonpath # noqa: F401
from lp.codehosting.bzrutils import server
from lp.codehosting.upgrade import Upgrader
from lp.codehosting.vfs.branchfs import get_rw_server
-from lp.services.scripts.base import (
- LaunchpadScript,
- LaunchpadScriptFailure,
- )
+from lp.services.scripts.base import LaunchpadScript, LaunchpadScriptFailure
class UpgradeAllBranches(LaunchpadScript):
-
def add_my_options(self):
self.parser.add_option(
- '--finish', action="store_true",
- help=("Finish the upgrade and move the new branches into place."))
+ "--finish",
+ action="store_true",
+ help=("Finish the upgrade and move the new branches into place."),
+ )
def main(self):
if len(self.args) < 1:
- raise LaunchpadScriptFailure('Please specify a target directory.')
+ raise LaunchpadScriptFailure("Please specify a target directory.")
if len(self.args) > 1:
- raise LaunchpadScriptFailure('Too many arguments.')
+ raise LaunchpadScriptFailure("Too many arguments.")
target_dir = self.args[0]
with server(get_rw_server()):
if self.options.finish:
@@ -33,5 +31,6 @@ class UpgradeAllBranches(LaunchpadScript):
if __name__ == "__main__":
script = UpgradeAllBranches(
- "upgrade-all-branches", dbuser='upgrade-branches')
+ "upgrade-all-branches", dbuser="upgrade-branches"
+ )
script.lock_and_run()
diff --git a/scripts/upload2librarian.py b/scripts/upload2librarian.py
index 31c2313..c06dc30 100755
--- a/scripts/upload2librarian.py
+++ b/scripts/upload2librarian.py
@@ -14,10 +14,7 @@ from zope.component import getUtility
from lp.services.helpers import filenameToContentType
from lp.services.librarian.interfaces import ILibraryFileAliasSet
-from lp.services.scripts.base import (
- LaunchpadScript,
- LaunchpadScriptFailure,
- )
+from lp.services.scripts.base import LaunchpadScript, LaunchpadScriptFailure
class LibrarianUploader(LaunchpadScript):
@@ -26,15 +23,19 @@ class LibrarianUploader(LaunchpadScript):
loglevel = logging.INFO
def add_my_options(self):
- self.parser.set_defaults(format='simple')
+ self.parser.set_defaults(format="simple")
self.parser.add_option(
- "-f", "--file", dest="filepath", metavar="FILE",
- help="filename to upload")
+ "-f",
+ "--file",
+ dest="filepath",
+ metavar="FILE",
+ help="filename to upload",
+ )
def main(self):
"""Upload file, commit the transaction and prints the file URL."""
if self.options.filepath is None:
- raise LaunchpadScriptFailure('File not provided.')
+ raise LaunchpadScriptFailure("File not provided.")
library_file = self.upload_file(self.options.filepath)
@@ -54,16 +55,17 @@ class LibrarianUploader(LaunchpadScript):
try:
file = open(filepath, "rb")
except OSError:
- raise LaunchpadScriptFailure('Could not open: %s' % filepath)
+ raise LaunchpadScriptFailure("Could not open: %s" % filepath)
flen = os.stat(filepath).st_size
filename = os.path.basename(filepath)
ftype = filenameToContentType(filename)
library_file = getUtility(ILibraryFileAliasSet).create(
- filename, flen, file, contentType=ftype)
+ filename, flen, file, contentType=ftype
+ )
return library_file
-if __name__ == '__main__':
- script = LibrarianUploader('librarian-uploader')
+if __name__ == "__main__":
+ script = LibrarianUploader("librarian-uploader")
script.run()
diff --git a/scripts/wsgi-archive-auth.py b/scripts/wsgi-archive-auth.py
index 77fdf62..00042a1 100755
--- a/scripts/wsgi-archive-auth.py
+++ b/scripts/wsgi-archive-auth.py
@@ -13,15 +13,14 @@ as closely as possible.
"""
__all__ = [
- 'check_password',
- ]
+ "check_password",
+]
# mod_wsgi imports this file without a useful sys.path, so we need some
# acrobatics to set ourselves up properly.
import os.path
import sys
-
scripts_dir = os.path.dirname(os.path.abspath(os.path.realpath(__file__)))
if scripts_dir not in sys.path:
sys.path.insert(0, scripts_dir)
@@ -58,7 +57,8 @@ def main():
args = parser.parse_args()
archiveauth._memcache_client = MemcacheFixture()
result = check_password(
- {"SCRIPT_NAME": args.archive_path}, args.username, args.password)
+ {"SCRIPT_NAME": args.archive_path}, args.username, args.password
+ )
if result is None:
return 2
elif result is False: