launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #22314
[Merge] lp:~cjwatson/launchpad/code-import-data-send-dict into lp:launchpad
Colin Watson has proposed merging lp:~cjwatson/launchpad/code-import-data-send-dict into lp:launchpad.
Commit message:
Push blacklisted_hostnames to code import workers via the scheduler.
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
For more details, see:
https://code.launchpad.net/~cjwatson/launchpad/code-import-data-send-dict/+merge/342220
This will make it easier to split out the code import workers to a separate codebase without having to duplicate configuration.
In the process, I've started making use of the fact that code import workers now allow CodeImportSchedulerAPI.getImportDataForJobID to return a dict.
--
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~cjwatson/launchpad/code-import-data-send-dict into lp:launchpad.
=== modified file 'lib/lp/code/xmlrpc/codeimportscheduler.py'
--- lib/lp/code/xmlrpc/codeimportscheduler.py 2018-03-15 20:44:04 +0000
+++ lib/lp/code/xmlrpc/codeimportscheduler.py 2018-03-27 16:47:23 +0000
@@ -13,6 +13,7 @@
from zope.security.proxy import removeSecurityProxy
from lp.code.enums import CodeImportResultStatus
+from lp.code.interfaces.branch import get_blacklisted_hostnames
from lp.code.interfaces.codeimportjob import (
ICodeImportJobSet,
ICodeImportJobWorkflow,
@@ -66,11 +67,14 @@
@return_fault
def _getImportDataForJobID(self, job_id):
job = self._getJob(job_id)
- arguments = job.makeWorkerArguments()
target = job.code_import.target
- target_url = canonical_url(target)
- log_file_name = '%s.log' % target.unique_name[1:].replace('/', '-')
- return (arguments, target_url, log_file_name)
+ return {
+ 'arguments': job.makeWorkerArguments(),
+ 'target_url': canonical_url(target),
+ 'log_file_name': '%s.log' % (
+ target.unique_name[1:].replace('/', '-')),
+ 'blacklisted_hostnames': get_blacklisted_hostnames(),
+ }
@return_fault
def _updateHeartbeat(self, job_id, log_tail):
=== modified file 'lib/lp/code/xmlrpc/tests/test_codeimportscheduler.py'
--- lib/lp/code/xmlrpc/tests/test_codeimportscheduler.py 2018-03-15 20:44:04 +0000
+++ lib/lp/code/xmlrpc/tests/test_codeimportscheduler.py 2018-03-27 16:47:23 +0000
@@ -58,17 +58,19 @@
def test_getImportDataForJobID(self):
# getImportDataForJobID returns the worker arguments, target url and
# log file name for an import corresponding to a particular job.
+ self.pushConfig(
+ 'codehosting', blacklisted_hostnames='localhost,127.0.0.1')
code_import_job = self.makeCodeImportJob(running=True)
code_import = removeSecurityProxy(code_import_job).code_import
- code_import_arguments, target_url, log_file_name = \
- self.api.getImportDataForJobID(code_import_job.id)
- import_as_arguments = code_import_job.makeWorkerArguments()
- expected_log_file_name = '%s.log' % (
- code_import.target.unique_name[1:].replace('/', '-'))
- self.assertEqual(
- (import_as_arguments, canonical_url(code_import.target),
- expected_log_file_name),
- (code_import_arguments, target_url, log_file_name))
+ data = self.api.getImportDataForJobID(code_import_job.id)
+ expected_data = {
+ 'arguments': code_import_job.makeWorkerArguments(),
+ 'target_url': canonical_url(code_import.target),
+ 'log_file_name': '%s.log' % (
+ code_import.target.unique_name[1:].replace('/', '-')),
+ 'blacklisted_hostnames': ['localhost', '127.0.0.1'],
+ }
+ self.assertEqual(expected_data, data)
def test_getImportDataForJobID_not_found(self):
# getImportDataForJobID returns a NoSuchCodeImportJob fault when there
Follow ups