← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] lp:~julian-edwards/maas/backport-r1293 into lp:maas/1.2

 

Julian Edwards has proposed merging lp:~julian-edwards/maas/backport-r1293 into lp:maas/1.2.

Commit message:
Capture log messages emitted during cluster worker start-up. (backport r1293 from trunk)

Requested reviews:
  MAAS Maintainers (maas-maintainers)
Related bugs:
  Bug #1066668 in MAAS: "No indication if cluster controller is failing to connect"
  https://bugs.launchpad.net/maas/+bug/1066668

For more details, see:
https://code.launchpad.net/~julian-edwards/maas/backport-r1293/+merge/137485
-- 
https://code.launchpad.net/~julian-edwards/maas/backport-r1293/+merge/137485
Your team MAAS Maintainers is requested to review the proposed merge of lp:~julian-edwards/maas/backport-r1293 into lp:maas/1.2.
=== modified file 'etc/celeryconfig_common.py'
--- etc/celeryconfig_common.py	2012-10-30 11:26:16 +0000
+++ etc/celeryconfig_common.py	2012-12-03 06:33:20 +0000
@@ -46,11 +46,12 @@
 # this setting.
 BROKER_URL = 'amqp://guest:guest@localhost:5672//'
 
-# Location for log file.
-MAAS_CELERY_LOG = '/var/log/maas/celery.log'
+# Logging.
+CELERYD_LOG_FILE = '/var/log/maas/celery.log'
+CELERYD_LOG_LEVEL = 'INFO'
 
 # Location for the cluster worker schedule file.
-MAAS_CLUSTER_CELERY_DB = '/var/lib/maas/celerybeat-cluster-schedule'
+CELERYBEAT_SCHEDULE_FILENAME = '/var/lib/maas/celerybeat-cluster-schedule'
 
 WORKER_QUEUE_DNS = 'celery'
 

=== modified file 'etc/democeleryconfig.py'
--- etc/democeleryconfig.py	2012-10-03 14:18:17 +0000
+++ etc/democeleryconfig.py	2012-12-03 06:33:20 +0000
@@ -26,5 +26,5 @@
 
 import_settings(democeleryconfig_common)
 
-MAAS_CELERY_LOG = os.path.join(
+CELERYD_LOG_FILE = os.path.join(
     DEV_ROOT_DIRECTORY, 'logs/region-worker/current')

=== modified file 'etc/democeleryconfig_cluster.py'
--- etc/democeleryconfig_cluster.py	2012-10-11 12:42:06 +0000
+++ etc/democeleryconfig_cluster.py	2012-12-03 06:33:20 +0000
@@ -30,8 +30,8 @@
 # maas_local_celeryconfig.
 CLUSTER_UUID = "adfd3977-f251-4f2c-8d61-745dbd690bfc"
 
-MAAS_CELERY_LOG = os.path.join(
+CELERYD_LOG_FILE = os.path.join(
     DEV_ROOT_DIRECTORY, 'logs/cluster-worker/current')
 
-MAAS_CLUSTER_CELERY_DB = os.path.join(
+CELERYBEAT_SCHEDULE_FILENAME = os.path.join(
     DEV_ROOT_DIRECTORY, 'run/celerybeat-cluster-schedule')

=== modified file 'src/provisioningserver/boot_images.py'
--- src/provisioningserver/boot_images.py	2012-10-30 10:25:24 +0000
+++ src/provisioningserver/boot_images.py	2012-12-03 06:33:20 +0000
@@ -25,16 +25,19 @@
     MAASDispatcher,
     MAASOAuth,
     )
+from celery.log import get_task_logger
 from provisioningserver.auth import (
     get_recorded_api_credentials,
     get_recorded_maas_url,
     )
 from provisioningserver.config import Config
-from provisioningserver.logging import task_logger
 from provisioningserver.pxe import tftppath
 from provisioningserver.start_cluster_controller import get_cluster_uuid
 
 
+task_logger = get_task_logger(name=__name__)
+
+
 def get_cached_knowledge():
     """Return cached items required to report to the server.
 

=== modified file 'src/provisioningserver/dhcp/leases.py'
--- src/provisioningserver/dhcp/leases.py	2012-10-04 12:24:31 +0000
+++ src/provisioningserver/dhcp/leases.py	2012-12-03 06:33:20 +0000
@@ -45,6 +45,7 @@
     MAASOAuth,
     )
 from celery.app import app_or_default
+from celery.log import get_task_logger
 from provisioningserver import cache
 from provisioningserver.auth import (
     get_recorded_api_credentials,
@@ -52,7 +53,10 @@
     get_recorded_nodegroup_uuid,
     )
 from provisioningserver.dhcp.leases_parser import parse_leases
-from provisioningserver.logging import task_logger
+
+
+task_logger = get_task_logger(name=__name__)
+
 
 # Cache key for the modification time on last-processed leases file.
 LEASES_TIME_CACHE_KEY = 'leases_time'

=== removed file 'src/provisioningserver/logging.py'
--- src/provisioningserver/logging.py	2012-08-10 14:15:23 +0000
+++ src/provisioningserver/logging.py	1970-01-01 00:00:00 +0000
@@ -1,22 +0,0 @@
-# Copyright 2012 Canonical Ltd.  This software is licensed under the
-# GNU Affero General Public License version 3 (see the file LICENSE).
-
-"""Celery logging."""
-
-from __future__ import (
-    absolute_import,
-    print_function,
-    unicode_literals,
-    )
-
-__metaclass__ = type
-__all__ = [
-    'task_logger',
-    ]
-
-
-from celery.log import get_task_logger
-
-# Celery task logger.  Shared between tasks, as per Celery's recommended
-# practice.
-task_logger = get_task_logger()

=== modified file 'src/provisioningserver/start_cluster_controller.py'
--- src/provisioningserver/start_cluster_controller.py	2012-11-26 02:54:49 +0000
+++ src/provisioningserver/start_cluster_controller.py	2012-12-03 06:33:20 +0000
@@ -32,10 +32,16 @@
     NoAuth,
     )
 from celery.app import app_or_default
-from provisioningserver.logging import task_logger
+from celery.log import (
+    get_task_logger,
+    setup_logging_subsystem,
+    )
 from provisioningserver.network import discover_networks
 
 
+task_logger = get_task_logger(name=__name__)
+
+
 class ClusterControllerRejected(Exception):
     """Request to become a cluster controller has been rejected."""
 
@@ -68,16 +74,6 @@
     return app_or_default().conf.CLUSTER_UUID
 
 
-def get_maas_celery_log():
-    """Read location for MAAS Celery log file from the config."""
-    return app_or_default().conf.MAAS_CELERY_LOG
-
-
-def get_maas_celerybeat_db():
-    """Read location for MAAS Celery schedule file from the config."""
-    return app_or_default().conf.MAAS_CLUSTER_CELERY_DB
-
-
 def register(server_url):
     """Request Rabbit connection details from the domain controller.
 
@@ -136,15 +132,7 @@
     # and the URL for the region controller.
     env = dict(
         os.environ, CELERY_BROKER_URL=broker_url, MAAS_URL=server_url)
-
-    command = [
-        'celeryd',
-        '--logfile=%s' % get_maas_celery_log(),
-        '--schedule=%s' % get_maas_celerybeat_db(),
-        '--loglevel=INFO',
-        '--beat',
-        '-Q', get_cluster_uuid(),
-        ]
+    command = 'celeryd', '--beat', '--queues', get_cluster_uuid()
 
     # Change gid first, just in case changing the uid might deprive
     # us of the privileges required to setgid.
@@ -184,6 +172,7 @@
     If this system is still awaiting approval as a cluster controller, this
     command will keep looping until it gets a definite answer.
     """
+    setup_logging_subsystem()
     connection_details = register(args.server_url)
     while connection_details is None:
         sleep(60)

=== modified file 'src/provisioningserver/tags.py'
--- src/provisioningserver/tags.py	2012-10-11 10:41:00 +0000
+++ src/provisioningserver/tags.py	2012-12-03 06:33:20 +0000
@@ -19,22 +19,23 @@
 
 
 import httplib
-import simplejson as json
-from lxml import etree
 
 from apiclient.maas_client import (
     MAASClient,
     MAASDispatcher,
     MAASOAuth,
     )
-
+from celery.log import get_task_logger
+from lxml import etree
 from provisioningserver.auth import (
     get_recorded_api_credentials,
     get_recorded_maas_url,
     get_recorded_nodegroup_uuid,
     )
-
-from provisioningserver.logging import task_logger
+import simplejson as json
+
+
+task_logger = get_task_logger(name=__name__)
 
 
 class MissingCredentials(Exception):

=== modified file 'src/provisioningserver/tests/test_start_cluster_controller.py'
--- src/provisioningserver/tests/test_start_cluster_controller.py	2012-11-26 02:54:49 +0000
+++ src/provisioningserver/tests/test_start_cluster_controller.py	2012-12-03 06:33:20 +0000
@@ -94,6 +94,7 @@
         self.patch(os, 'setuid')
         self.patch(os, 'setgid')
         self.patch(os, 'execvpe').side_effect = Executing()
+        self.patch(start_cluster_controller, 'setup_logging_subsystem')
         get_uuid = self.patch(start_cluster_controller, 'get_cluster_uuid')
         get_uuid.return_value = factory.getRandomUUID()