← Back to team overview

launchpad-reviewers team mailing list archive

[Merge] lp:~jtv/maas/defer-task-logger into lp:maas

 

Jeroen T. Vermeulen has proposed merging lp:~jtv/maas/defer-task-logger into lp:maas.

Commit message:
Defer celery imports in pserv that may load config and issue warnings.

Requested reviews:
  Launchpad code reviewers (launchpad-reviewers)

For more details, see:
https://code.launchpad.net/~jtv/maas/defer-task-logger/+merge/134679

Discussed the details with Gavin.  This avoids problems with maas-provision importing celery modules that, as a side effect, try to read celeryconfig.py.  That module is not always available, e.g. on a dev system, or on a cluster controller that isn't also a region controller where celery hasn't been told to load the right config module, so that it may output inappropriate warnings.

Ultimately the start_cluster_controller command will still need the problematic celery imports, but at least they won't be triggered by mere imports when running other maas-provision commands.


Jeroen
-- 
https://code.launchpad.net/~jtv/maas/defer-task-logger/+merge/134679
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~jtv/maas/defer-task-logger into lp:maas.
=== modified file 'src/provisioningserver/boot_images.py'
--- src/provisioningserver/boot_images.py	2012-11-08 06:34:48 +0000
+++ src/provisioningserver/boot_images.py	2012-11-16 14:58:20 +0000
@@ -19,13 +19,13 @@
     ]
 
 import json
+from logging import getLogger
 
 from apiclient.maas_client import (
     MAASClient,
     MAASDispatcher,
     MAASOAuth,
     )
-from celery.log import get_task_logger
 from provisioningserver.auth import (
     get_recorded_api_credentials,
     get_recorded_maas_url,
@@ -35,7 +35,7 @@
 from provisioningserver.start_cluster_controller import get_cluster_uuid
 
 
-task_logger = get_task_logger(name=__name__)
+logger = getLogger(__name__)
 
 
 def get_cached_knowledge():
@@ -46,10 +46,10 @@
     """
     maas_url = get_recorded_maas_url()
     if maas_url is None:
-        task_logger.debug("Not reporting boot images: don't have API URL yet.")
+        logger.debug("Not reporting boot images: don't have API URL yet.")
     api_credentials = get_recorded_api_credentials()
     if api_credentials is None:
-        task_logger.debug("Not reporting boot images: don't have API key yet.")
+        logger.debug("Not reporting boot images: don't have API key yet.")
     return maas_url, api_credentials
 
 

=== modified file 'src/provisioningserver/dhcp/leases.py'
--- src/provisioningserver/dhcp/leases.py	2012-10-19 15:08:52 +0000
+++ src/provisioningserver/dhcp/leases.py	2012-11-16 14:58:20 +0000
@@ -34,6 +34,7 @@
 
 import errno
 import json
+from logging import getLogger
 from os import (
     fstat,
     stat,
@@ -45,7 +46,6 @@
     MAASOAuth,
     )
 from celery.app import app_or_default
-from celery.log import get_task_logger
 from provisioningserver import cache
 from provisioningserver.auth import (
     get_recorded_api_credentials,
@@ -55,7 +55,7 @@
 from provisioningserver.dhcp.leases_parser import parse_leases
 
 
-task_logger = get_task_logger(name=__name__)
+logger = getLogger(__name__)
 
 
 # Cache key for the modification time on last-processed leases file.
@@ -157,7 +157,7 @@
     if None in knowledge.values():
         # The MAAS server hasn't sent us enough information for us to do
         # this yet.  Leave it for another time.
-        task_logger.info(
+        logger.info(
             "Not sending DHCP leases to server: not all required knowledge "
             "received from server yet.  "
             "Missing: %s"
@@ -189,7 +189,7 @@
         timestamp, leases = parse_result
         process_leases(timestamp, leases)
     else:
-        task_logger.info(
+        logger.info(
             "The DHCP leases file does not exist.  This is only a problem if "
             "this cluster controller is managing its DHCP server.  If that's "
             "the case then you need to install the 'maas-dhcp' package on "

=== modified file 'src/provisioningserver/start_cluster_controller.py'
--- src/provisioningserver/start_cluster_controller.py	2012-10-23 12:48:31 +0000
+++ src/provisioningserver/start_cluster_controller.py	2012-11-16 14:58:20 +0000
@@ -18,6 +18,7 @@
 from grp import getgrnam
 import httplib
 import json
+from logging import getLogger
 import os
 from pwd import getpwnam
 from time import sleep
@@ -31,15 +32,10 @@
     MAASDispatcher,
     NoAuth,
     )
-from celery.app import app_or_default
-from celery.log import (
-    get_task_logger,
-    setup_logging_subsystem,
-    )
 from provisioningserver.network import discover_networks
 
 
-task_logger = get_task_logger(name=__name__)
+logger = getLogger(__name__)
 
 
 class ClusterControllerRejected(Exception):
@@ -59,7 +55,7 @@
 
 
 def log_error(exception):
-    task_logger.info(
+    logger.info(
         "Could not register with region controller: %s."
         % exception.reason)
 
@@ -71,6 +67,9 @@
 
 def get_cluster_uuid():
     """Read this cluster's UUID from the config."""
+    # Import this lazily.  It reads config as a side effect, which can
+    # produce warnings.
+    from celery.app import app_or_default
     return app_or_default().conf.CLUSTER_UUID
 
 
@@ -145,7 +144,7 @@
     try:
         client.post('api/1.0/nodegroups/', 'refresh_workers')
     except URLError as e:
-        task_logger.warn(
+        logger.warn(
             "Could not request secrets from region controller: %s"
             % e.reason)
 
@@ -164,13 +163,23 @@
     start_celery(connection_details, user=user, group=group)
 
 
+def set_up_logging():
+    """Set up logging."""
+    # This import has side effects (it imports celeryconfig) and may
+    # produce warnings (if there is no celeryconfig).
+    # Postpone the import so that we don't go through that every time
+    # anything imports this module.
+    from celery.log import setup_logging_subsystem
+    setup_logging_subsystem()
+
+
 def run(args):
     """Start the cluster controller.
 
     If this system is still awaiting approval as a cluster controller, this
     command will keep looping until it gets a definite answer.
     """
-    setup_logging_subsystem()
+    set_up_logging()
     connection_details = register(args.server_url)
     while connection_details is None:
         sleep(60)

=== modified file 'src/provisioningserver/tags.py'
--- src/provisioningserver/tags.py	2012-10-26 10:24:52 +0000
+++ src/provisioningserver/tags.py	2012-11-16 14:58:20 +0000
@@ -19,6 +19,7 @@
 
 
 import httplib
+from logging import getLogger
 import urllib2
 
 from apiclient.maas_client import (
@@ -26,7 +27,6 @@
     MAASDispatcher,
     MAASOAuth,
     )
-from celery.log import get_task_logger
 from lxml import etree
 from provisioningserver.auth import (
     get_recorded_api_credentials,
@@ -36,7 +36,7 @@
 import simplejson as json
 
 
-task_logger = get_task_logger(name=__name__)
+logger = getLogger(__name__)
 
 
 class MissingCredentials(Exception):
@@ -53,15 +53,15 @@
     """
     maas_url = get_recorded_maas_url()
     if maas_url is None:
-        task_logger.error("Not updating tags: don't have API URL yet.")
+        logger.error("Not updating tags: don't have API URL yet.")
         return None, None
     api_credentials = get_recorded_api_credentials()
     if api_credentials is None:
-        task_logger.error("Not updating tags: don't have API key yet.")
+        logger.error("Not updating tags: don't have API key yet.")
         return None, None
     nodegroup_uuid = get_recorded_nodegroup_uuid()
     if nodegroup_uuid is None:
-        task_logger.error("Not updating tags: don't have UUID yet.")
+        logger.error("Not updating tags: don't have UUID yet.")
         return None, None
     client = MAASClient(MAASOAuth(*api_credentials), MAASDispatcher(),
         maas_url)
@@ -119,7 +119,8 @@
     :param removed: Set of nodes to remove
     """
     path = '/api/1.0/tags/%s/' % (tag_name,)
-    task_logger.debug('Updating nodes for %s %s, adding %s removing %s'
+    logger.debug(
+        "Updating nodes for %s %s, adding %s removing %s"
         % (tag_name, uuid, len(added), len(removed)))
     try:
         return process_response(client.post(
@@ -131,7 +132,7 @@
                 msg = e.fp.read()
             else:
                 msg = e.msg
-            task_logger.info('Got a CONFLICT while updating tag: %s', msg)
+            logger.info("Got a CONFLICT while updating tag: %s", msg)
             return {}
         raise
 
@@ -148,8 +149,8 @@
             try:
                 xml = etree.XML(hw_xml)
             except etree.XMLSyntaxError as e:
-                task_logger.debug('Invalid hardware_details for %s: %s'
-                    % (system_id, e))
+                logger.debug(
+                    "Invalid hardware_details for %s: %s" % (system_id, e))
             else:
                 if xpath(xml):
                     matched = True
@@ -166,16 +167,17 @@
         batch_size = DEFAULT_BATCH_SIZE
     all_matched = []
     all_unmatched = []
-    task_logger.debug('processing %d system_ids for tag %s nodegroup %s'
-                      % (len(system_ids), tag_name, nodegroup_uuid))
+    logger.debug(
+        "processing %d system_ids for tag %s nodegroup %s"
+        % (len(system_ids), tag_name, nodegroup_uuid))
     for i in range(0, len(system_ids), batch_size):
         selected_ids = system_ids[i:i + batch_size]
         details = get_hardware_details_for_nodes(
             client, nodegroup_uuid, selected_ids)
         matched, unmatched = process_batch(xpath, details)
-        task_logger.debug(
-            'processing batch of %d ids received %d details'
-            ' (%d matched, %d unmatched)'
+        logger.debug(
+            "processing batch of %d ids received %d details"
+            " (%d matched, %d unmatched)"
             % (len(selected_ids), len(details), len(matched), len(unmatched)))
         all_matched.extend(matched)
         all_unmatched.extend(unmatched)
@@ -197,8 +199,9 @@
     """
     client, nodegroup_uuid = get_cached_knowledge()
     if not all([client, nodegroup_uuid]):
-        task_logger.error('Unable to update tag: %s for definition %r'
-            ' please refresh secrets, then rebuild this tag'
+        logger.error(
+            "Unable to update tag: %s for definition %r. "
+            "Please refresh secrets, then rebuild this tag."
             % (tag_name, tag_definition))
         raise MissingCredentials()
     # We evaluate this early, so we can fail before sending a bunch of data to

=== modified file 'src/provisioningserver/tests/test_start_cluster_controller.py'
--- src/provisioningserver/tests/test_start_cluster_controller.py	2012-10-23 12:48:31 +0000
+++ src/provisioningserver/tests/test_start_cluster_controller.py	2012-11-16 14:58:20 +0000
@@ -25,7 +25,10 @@
 
 from apiclient.maas_client import MAASDispatcher
 from apiclient.testing.django import parse_headers_and_body_with_django
-from fixtures import EnvironmentVariableFixture
+from fixtures import (
+    EnvironmentVariableFixture,
+    FakeLogger,
+    )
 from maastesting.factory import factory
 from mock import (
     call,
@@ -84,6 +87,10 @@
 
     def setUp(self):
         super(TestStartClusterController, self).setUp()
+
+        self.useFixture(FakeLogger())
+        self.patch(start_cluster_controller, 'set_up_logging')
+
         # Patch out anything that could be remotely harmful if we did it
         # accidentally in the test.  Make the really outrageous ones
         # raise exceptions.

=== modified file 'src/provisioningserver/tests/test_tags.py'
--- src/provisioningserver/tests/test_tags.py	2012-10-24 16:29:02 +0000
+++ src/provisioningserver/tests/test_tags.py	2012-11-16 14:58:20 +0000
@@ -12,21 +12,21 @@
 __metaclass__ = type
 __all__ = []
 
+import httplib
+import urllib2
+
 from apiclient.maas_client import MAASClient
-import httplib
+from fixtures import FakeLogger
 from lxml import etree
 from maastesting.factory import factory
 from maastesting.fakemethod import (
     FakeMethod,
     MultiFakeMethod,
     )
-import urllib2
 from mock import MagicMock
-from provisioningserver.auth import (
-    get_recorded_nodegroup_uuid,
-    )
+from provisioningserver import tags
+from provisioningserver.auth import get_recorded_nodegroup_uuid
 from provisioningserver.testing.testcase import PservTestCase
-from provisioningserver import tags
 
 
 class FakeResponse:
@@ -41,6 +41,10 @@
 
 class TestTagUpdating(PservTestCase):
 
+    def setUp(self):
+        super(TestTagUpdating, self).setUp()
+        self.useFixture(FakeLogger())
+
     def test_get_cached_knowledge_knows_nothing(self):
         # If we haven't given it any secrets, we should get back nothing
         self.assertEqual((None, None), tags.get_cached_knowledge())