launchpad-reviewers team mailing list archive
-
launchpad-reviewers team
-
Mailing list archive
-
Message #10926
[Merge] lp:~rvb/maas/share-mem into lp:maas
Raphaël Badin has proposed merging lp:~rvb/maas/share-mem into lp:maas.
Requested reviews:
Launchpad code reviewers (launchpad-reviewers)
For more details, see:
https://code.launchpad.net/~rvb/maas/share-mem/+merge/119570
This branch uses the process-safe data containers provided by python's multiprocessing module to share objects across celery's workers.
This was discussed with Jeroen.
= Notes =
- the main change in this branch is the addition of the cache.py module. All the module which previously used global objects (non process safe) now use this module as their backend storage. I could have exposed an object created by Manager().dict() directly but I thought it was better to have a tiny layer around that in case we need to extend the cache later.
- The diff is a little bit messed up because the first version of this branch renamed auth.py into cache.py but then, the second version (the one you're about to review) reinstated auth.py (simply delegation all the caching work to the new cache.py module)
--
https://code.launchpad.net/~rvb/maas/share-mem/+merge/119570
Your team Launchpad code reviewers is requested to review the proposed merge of lp:~rvb/maas/share-mem into lp:maas.
=== modified file 'src/maasserver/testing/testcase.py'
--- src/maasserver/testing/testcase.py 2012-06-07 11:44:14 +0000
+++ src/maasserver/testing/testcase.py 2012-08-14 15:08:44 +0000
@@ -17,18 +17,20 @@
'TestModelTestCase',
]
-from django.core.cache import cache
+from django.core.cache import cache as django_cache
from maasserver.testing import reset_fake_provisioning_api_proxy
from maasserver.testing.factory import factory
from maastesting.celery import CeleryFixture
import maastesting.djangotestcase
+from provisioningserver.cache import cache as pserv_cache
class TestCase(maastesting.djangotestcase.DjangoTestCase):
def setUp(self):
super(TestCase, self).setUp()
- self.addCleanup(cache.clear)
+ self.addCleanup(django_cache.clear)
+ self.addCleanup(pserv_cache.clear)
self.addCleanup(reset_fake_provisioning_api_proxy)
self.celery = self.useFixture(CeleryFixture())
=== added file 'src/provisioningserver/auth.py'
--- src/provisioningserver/auth.py 1970-01-01 00:00:00 +0000
+++ src/provisioningserver/auth.py 2012-08-14 15:08:44 +0000
@@ -0,0 +1,72 @@
+# Copyright 2012 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""API credentials for node-group workers."""
+
+from __future__ import (
+ absolute_import,
+ print_function,
+ unicode_literals,
+ )
+
+__metaclass__ = type
+__all__ = [
+ 'get_recorded_api_credentials',
+ 'get_recorded_nodegroup_name',
+ 'locate_maas_api',
+ 'record_api_credentials',
+ 'record_nodegroup_name',
+ ]
+
+from apiclient.creds import convert_string_to_tuple
+from provisioningserver.cache import cache
+
+
+# Cache key for the API credentials as last sent by the server.
+API_CREDENTIALS_KEY_CACHE_NAME = 'api_credentials'
+
+# Cache key for the name of the nodegroup that this worker manages.
+RECORDED_NODEGROUP_NAME_KEY_CACHE_NAME = 'nodegroup_name'
+
+
+def locate_maas_api():
+ """Return the base URL for the MAAS API."""
+# TODO: Configure this somehow. What you see here is a placeholder.
+ return "http://localhost/MAAS/"
+
+
+def record_api_credentials(api_credentials):
+ """Update the recorded API credentials.
+
+ :param api_credentials: Newly received API credentials, in the form of
+ a single string: consumer key, resource token, and resource seret
+ separated by colons.
+ """
+ cache.set(API_CREDENTIALS_KEY_CACHE_NAME, api_credentials)
+
+
+def get_recorded_api_credentials():
+ """Return API credentials as last received from the server.
+
+ :return: If credentials have been received, a tuple of
+ (consumer_key, resource_token, resource_secret) as expected by
+ :class:`MAASOauth`. Otherwise, None.
+ """
+ credentials_string = cache.get(API_CREDENTIALS_KEY_CACHE_NAME)
+ if credentials_string is None:
+ return None
+ else:
+ return convert_string_to_tuple(credentials_string)
+
+
+def record_nodegroup_name(nodegroup_name):
+ """Record the name of the nodegroup we manage, as sent by the server."""
+ cache.set(RECORDED_NODEGROUP_NAME_KEY_CACHE_NAME, nodegroup_name)
+
+
+def get_recorded_nodegroup_name():
+ """Return the name of this worker's nodegroup, as sent by the server.
+
+ If the server has not sent the name yet, returns None.
+ """
+ return cache.get(RECORDED_NODEGROUP_NAME_KEY_CACHE_NAME)
=== renamed file 'src/provisioningserver/auth.py' => 'src/provisioningserver/cache.py'
--- src/provisioningserver/auth.py 2012-08-13 05:41:02 +0000
+++ src/provisioningserver/cache.py 2012-08-14 15:08:44 +0000
@@ -11,66 +11,31 @@
__metaclass__ = type
__all__ = [
- 'get_recorded_api_credentials',
- 'get_recorded_nodegroup_name',
- 'locate_maas_api',
- 'record_api_credentials',
- 'record_nodegroup_name',
+ 'cache',
]
-from apiclient.creds import convert_string_to_tuple
-
-# API credentials as last sent by the server. The worker uses these
-# credentials to access the MAAS API.
-# Shared between threads.
-recorded_api_credentials = None
-
-
-def locate_maas_api():
- """Return the base URL for the MAAS API."""
-# TODO: Configure this somehow. What you see here is a placeholder.
- return "http://localhost/MAAS/"
-
-
-# The name of the nodegroup that this worker manages.
-# Shared between threads.
-recorded_nodegroup_name = None
-
-
-def record_api_credentials(api_credentials):
- """Update the recorded API credentials.
-
- :param api_credentials: Newly received API credentials, in the form of
- a single string: consumer key, resource token, and resource seret
- separated by colons.
- """
- global recorded_api_credentials
- recorded_api_credentials = api_credentials
-
-
-def get_recorded_api_credentials():
- """Return API credentials as last received from the server.
-
- :return: If credentials have been received, a tuple of
- (consumer_key, resource_token, resource_secret) as expected by
- :class:`MAASOauth`. Otherwise, None.
- """
- credentials_string = recorded_api_credentials
- if credentials_string is None:
- return None
- else:
- return convert_string_to_tuple(credentials_string)
-
-
-def record_nodegroup_name(nodegroup_name):
- """Record the name of the nodegroup we manage, as sent by the server."""
- global recorded_nodegroup_name
- recorded_nodegroup_name = nodegroup_name
-
-
-def get_recorded_nodegroup_name():
- """Return the name of this worker's nodegroup, as sent by the server.
-
- If the server has not sent the name yet, returns None.
- """
- return recorded_nodegroup_name
+
+from multiprocessing import Manager
+
+
+class Cache(object):
+ """A process-safe dict-like cache."""
+
+ def __init__(self, cache_backend):
+ self.cache_backend = cache_backend
+
+ def set(self, key, value):
+ self.cache_backend[key] = value
+
+ def get(self, key):
+ return self.cache_backend.get(key, None)
+
+ def clear(self):
+ self.cache_backend.clear()
+
+
+_manager = Manager()
+
+
+# Initialize the process-safe singleton cache.
+cache = Cache(_manager.dict())
=== modified file 'src/provisioningserver/dhcp/leases.py'
--- src/provisioningserver/dhcp/leases.py 2012-08-10 14:15:23 +0000
+++ src/provisioningserver/dhcp/leases.py 2012-08-14 15:08:44 +0000
@@ -48,29 +48,25 @@
get_recorded_nodegroup_name,
locate_maas_api,
)
+from provisioningserver.cache import cache
from provisioningserver.dhcp.leases_parser import parse_leases
from provisioningserver.logging import task_logger
-# Modification time on last-processed leases file.
-# Shared between celery threads.
-recorded_leases_time = None
-
-# Leases as last parsed.
-# Shared between celery threads.
-recorded_leases = None
-
-# Shared key for use with omshell. We don't store this key
-# persistently, but when the server sends it, we keep a copy in memory
-# so that celerybeat jobs (which do not originate with the server and
-# therefore don't receive this argument) can make use of it.
-# Shared between celery threads.
-recorded_omapi_shared_key = None
+# Cache key for the modification time on last-processed leases file.
+LEASES_TIME_KEY_CACHE_NAME = 'leases_time'
+
+
+# Cache key for the leases as last parsed.
+LEASES_KEY_CACHE_NAME = 'recorded_leases'
+
+
+# Cache key for the shared key for use with omshell.
+OMAPI_SHARED_KEY_CACHE_NAME = 'omapi_shared_key'
def record_omapi_shared_key(shared_key):
"""Record the OMAPI shared key as received from the server."""
- global recorded_omapi_shared_key
- recorded_omapi_shared_key = shared_key
+ cache.set(OMAPI_SHARED_KEY_CACHE_NAME, shared_key)
def get_leases_timestamp():
@@ -95,8 +91,8 @@
# These variables are shared between threads. A bit of
# inconsistency due to concurrent updates is not a problem, but read
# them both at once here to reduce the scope for trouble.
- previous_leases = recorded_leases
- previous_leases_time = recorded_leases_time
+ previous_leases = cache.get(LEASES_KEY_CACHE_NAME)
+ previous_leases_time = cache.get(LEASES_TIME_KEY_CACHE_NAME)
if get_leases_timestamp() == previous_leases_time:
return None
@@ -115,10 +111,8 @@
:param leases: A dict mapping each leased IP address to the MAC address
that it has been assigned to.
"""
- global recorded_leases_time
- global recorded_leases
- recorded_leases_time = last_change
- recorded_leases = leases
+ cache.set(LEASES_TIME_KEY_CACHE_NAME, last_change)
+ cache.set(LEASES_KEY_CACHE_NAME, leases)
def identify_new_leases(current_leases):
@@ -127,9 +121,9 @@
:param current_leases: A dict mapping IP addresses to the respective
MAC addresses that own them.
"""
- # The recorded_leases reference is shared between threads. Read it
+ # The recorded leases is shared between threads. Read it
# just once to reduce the impact of concurrent changes.
- previous_leases = recorded_leases
+ previous_leases = cache.get(LEASES_KEY_CACHE_NAME)
if previous_leases is None:
return current_leases
else:
@@ -149,7 +143,7 @@
# The recorded_omapi_shared_key is shared between threads, so read
# it just once, atomically.
- omapi_key = recorded_omapi_shared_key
+ omapi_key = cache.get(OMAPI_SHARED_KEY_CACHE_NAME)
if omapi_key is None:
task_logger.info(
"Not registering new leases: "
@@ -177,7 +171,7 @@
return
api_path = 'nodegroups/%s/' % nodegroup_name
- oauth = MAASOAuth(*get_recorded_api_credentials())
+ oauth = MAASOAuth(*api_credentials)
MAASClient(oauth, MAASDispatcher(), locate_maas_api()).post(
api_path, 'update_leases', leases=leases)
=== modified file 'src/provisioningserver/dhcp/tests/test_leases.py'
--- src/provisioningserver/dhcp/tests/test_leases.py 2012-08-10 13:11:19 +0000
+++ src/provisioningserver/dhcp/tests/test_leases.py 2012-08-14 15:08:44 +0000
@@ -21,16 +21,18 @@
from apiclient.maas_client import MAASClient
from maastesting.factory import factory
from maastesting.fakemethod import FakeMethod
-from maastesting.testcase import TestCase
from maastesting.utils import (
age_file,
get_write_time,
)
-from provisioningserver import auth
+from provisioningserver.cache import cache
from provisioningserver.dhcp import leases as leases_module
from provisioningserver.dhcp.leases import (
check_lease_changes,
identify_new_leases,
+ LEASES_KEY_CACHE_NAME,
+ LEASES_TIME_KEY_CACHE_NAME,
+ OMAPI_SHARED_KEY_CACHE_NAME,
parse_leases_file,
process_leases,
record_lease_state,
@@ -41,27 +43,25 @@
upload_leases,
)
from provisioningserver.omshell import Omshell
+from provisioningserver.testing.testcase import TestCase
from testtools.testcase import ExpectedException
class TestHelpers(TestCase):
def test_record_omapi_shared_key_records_shared_key(self):
- self.patch(leases_module, 'recorded_omapi_shared_key', None)
key = factory.getRandomString()
record_omapi_shared_key(key)
- self.assertEqual(key, leases_module.recorded_omapi_shared_key)
+ self.assertEqual(key, cache.get(OMAPI_SHARED_KEY_CACHE_NAME))
def test_record_lease_state_records_time_and_leases(self):
time = datetime.utcnow()
leases = {factory.getRandomIPAddress(): factory.getRandomMACAddress()}
- self.patch(leases_module, 'recorded_leases_time', None)
- self.patch(leases_module, 'recorded_leases', None)
record_lease_state(time, leases)
self.assertEqual(
(time, leases), (
- leases_module.recorded_leases_time,
- leases_module.recorded_leases,
+ cache.get(LEASES_TIME_KEY_CACHE_NAME),
+ cache.get(LEASES_KEY_CACHE_NAME),
))
@@ -124,31 +124,19 @@
"""Set a recorded omapi key for the duration of this test."""
if key is None:
key = factory.getRandomString()
- self.patch(leases_module, 'recorded_omapi_shared_key', key)
-
- def clear_omapi_key(self):
- """Clear the recorded omapi key for the duration of this test."""
- self.patch(leases_module, 'omapi_shared_key', None)
+ cache.set(OMAPI_SHARED_KEY_CACHE_NAME, key)
def set_nodegroup_name(self):
"""Set the recorded nodegroup name for the duration of this test."""
name = factory.make_name('nodegroup')
- self.patch(auth, 'recorded_nodegroup_name', name)
+ cache.set('nodegroup_name', name)
return name
def set_api_credentials(self):
"""Set recorded API credentials for the duration of this test."""
creds_string = ':'.join(
factory.getRandomString() for counter in range(3))
- self.patch(auth, 'recorded_api_credentials', creds_string)
-
- def clear_api_credentials(self):
- """Clear recorded API credentials for the duration of this test."""
- self.patch(auth, 'recorded_api_credentials', None)
-
- def clear_nodegroup_name(self):
- """Set the recorded nodegroup name for the duration of this test."""
- self.patch(auth, 'recorded_nodegroup_name', None)
+ cache.set('api_credentials', creds_string)
def set_lease_state(self, time=None, leases=None):
"""Set the recorded state of DHCP leases.
@@ -157,8 +145,8 @@
state so that it gets reset at the end of the test. Using this will
prevent recorded lease state from leaking into other tests.
"""
- self.patch(leases_module, 'recorded_leases_time', time)
- self.patch(leases_module, 'recorded_leases', leases)
+ cache.set(LEASES_TIME_KEY_CACHE_NAME, time)
+ cache.set(LEASES_KEY_CACHE_NAME, leases)
def test_record_lease_state_sets_leases_and_timestamp(self):
time = datetime.utcnow()
@@ -167,8 +155,8 @@
record_lease_state(time, leases)
self.assertEqual(
(time, leases), (
- leases_module.recorded_leases_time,
- leases_module.recorded_leases,
+ cache.get(LEASES_TIME_KEY_CACHE_NAME),
+ cache.get(LEASES_KEY_CACHE_NAME),
))
def test_check_lease_changes_returns_tuple_if_no_state_cached(self):
@@ -249,7 +237,6 @@
def test_process_leases_records_update(self):
self.set_lease_state()
- self.clear_omapi_key()
self.patch(leases_module, 'send_leases', FakeMethod())
new_leases = {
factory.getRandomIPAddress(): factory.getRandomMACAddress(),
@@ -349,7 +336,7 @@
old_leases = {
factory.getRandomIPAddress(): factory.getRandomMACAddress(),
}
- self.patch(leases_module, 'recorded_leases', old_leases)
+ cache.set(LEASES_KEY_CACHE_NAME, old_leases)
new_leases = {
factory.getRandomIPAddress(): factory.getRandomMACAddress(),
}
@@ -381,7 +368,6 @@
def test_register_new_leases_does_nothing_without_omapi_key(self):
self.patch(Omshell, 'create', FakeMethod())
self.set_lease_state()
- self.clear_omapi_key()
self.set_nodegroup_name()
new_leases = {
factory.getRandomIPAddress(): factory.getRandomMACAddress(),
@@ -392,8 +378,6 @@
def test_register_new_leases_does_nothing_without_nodegroup_name(self):
self.patch(Omshell, 'create', FakeMethod())
self.set_lease_state()
- self.clear_omapi_key()
- self.clear_nodegroup_name()
new_leases = {
factory.getRandomIPAddress(): factory.getRandomMACAddress(),
}
@@ -416,7 +400,6 @@
MAASClient.post.calls)
def test_send_leases_does_nothing_without_credentials(self):
- self.clear_api_credentials()
self.patch(MAASClient, 'post', FakeMethod())
leases = {
factory.getRandomIPAddress(): factory.getRandomMACAddress(),
=== added file 'src/provisioningserver/testing/testcase.py'
--- src/provisioningserver/testing/testcase.py 1970-01-01 00:00:00 +0000
+++ src/provisioningserver/testing/testcase.py 2012-08-14 15:08:44 +0000
@@ -0,0 +1,25 @@
+# Copyright 2012 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Provisioningserver-specific test-case classes."""
+
+from __future__ import (
+ absolute_import,
+ print_function,
+ unicode_literals,
+ )
+
+__metaclass__ = type
+__all__ = [
+ 'TestCase',
+ ]
+
+from maastesting import testcase
+from provisioningserver.cache import cache as pserv_cache
+
+
+class TestCase(testcase.TestCase):
+
+ def setUp(self):
+ super(TestCase, self).setUp()
+ self.addCleanup(pserv_cache.clear)
=== added file 'src/provisioningserver/tests/test_auth.py'
--- src/provisioningserver/tests/test_auth.py 1970-01-01 00:00:00 +0000
+++ src/provisioningserver/tests/test_auth.py 2012-08-14 15:08:44 +0000
@@ -0,0 +1,50 @@
+# Copyright 2012 Canonical Ltd. This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+
+"""Tests for management of node-group workers' API credentials."""
+
+from __future__ import (
+ absolute_import,
+ print_function,
+ unicode_literals,
+ )
+
+__metaclass__ = type
+__all__ = []
+
+from apiclient.creds import convert_tuple_to_string
+from maastesting.factory import factory
+from maastesting.testcase import TestCase
+from provisioningserver import auth
+from provisioningserver.cache import cache
+
+
+def make_credentials():
+ """Produce a tuple of API credentials."""
+ return (
+ factory.make_name('consumer-key'),
+ factory.make_name('resource-token'),
+ factory.make_name('resource-secret'),
+ )
+
+
+class TestAuth(TestCase):
+
+ def test_record_api_credentials_records_credentials_string(self):
+ creds_string = convert_tuple_to_string(make_credentials())
+ auth.record_api_credentials(creds_string)
+ self.assertEqual(
+ creds_string, cache.get(auth.API_CREDENTIALS_KEY_CACHE_NAME))
+
+ def test_get_recorded_api_credentials_returns_credentials_as_tuple(self):
+ creds = make_credentials()
+ auth.record_api_credentials(convert_tuple_to_string(creds))
+ self.assertEqual(creds, auth.get_recorded_api_credentials())
+
+ def test_get_recorded_api_credentials_returns_None_without_creds(self):
+ self.assertIsNone(auth.get_recorded_api_credentials())
+
+ def test_get_recorded_nodegroup_name_vs_record_nodegroup_name(self):
+ nodegroup_name = factory.make_name('nodegroup')
+ auth.record_nodegroup_name(nodegroup_name)
+ self.assertEqual(nodegroup_name, auth.get_recorded_nodegroup_name())
=== renamed file 'src/provisioningserver/tests/test_auth.py' => 'src/provisioningserver/tests/test_cache.py'
--- src/provisioningserver/tests/test_auth.py 2012-08-13 05:41:02 +0000
+++ src/provisioningserver/tests/test_cache.py 2012-08-14 15:08:44 +0000
@@ -1,7 +1,7 @@
# Copyright 2012 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
-"""Tests for management of node-group workers' API credentials."""
+"""Tests cache."""
from __future__ import (
absolute_import,
@@ -12,41 +12,25 @@
__metaclass__ = type
__all__ = []
-from apiclient.creds import convert_tuple_to_string
+from multiprocessing.managers import DictProxy
+
from maastesting.factory import factory
-from maastesting.testcase import TestCase
-from provisioningserver import auth
-
-
-def make_credentials():
- """Produce a tuple of API credentials."""
- return (
- factory.make_name('consumer-key'),
- factory.make_name('resource-token'),
- factory.make_name('resource-secret'),
- )
-
-
-class TestAuth(TestCase):
-
- def test_record_api_credentials_records_credentials_string(self):
- self.patch(auth, 'recorded_api_credentials', None)
- creds_string = convert_tuple_to_string(make_credentials())
- auth.record_api_credentials(creds_string)
- self.assertEqual(creds_string, auth.recorded_api_credentials)
-
- def test_get_recorded_api_credentials_returns_credentials_as_tuple(self):
- self.patch(auth, 'recorded_api_credentials', None)
- creds = make_credentials()
- auth.record_api_credentials(convert_tuple_to_string(creds))
- self.assertEqual(creds, auth.get_recorded_api_credentials())
-
- def test_get_recorded_api_credentials_returns_None_without_creds(self):
- self.patch(auth, 'recorded_api_credentials', None)
- self.assertIsNone(auth.get_recorded_api_credentials())
-
- def test_get_recorded_nodegroup_name_vs_record_nodegroup_name(self):
- self.patch(auth, 'recorded_nodegroup_name', None)
- nodegroup_name = factory.make_name('nodegroup')
- auth.record_nodegroup_name(nodegroup_name)
- self.assertEqual(nodegroup_name, auth.get_recorded_nodegroup_name())
+from provisioningserver.cache import cache
+from provisioningserver.testing.testcase import TestCase
+
+
+class TestCache(TestCase):
+
+ def test_cache_has_initialized_backend(self):
+ self.assertIsInstance(cache.cache_backend, DictProxy)
+
+ def test_cache_stores_value(self):
+ key = factory.getRandomString()
+ value = factory.getRandomString()
+ cache.set(key, value)
+ self.assertEqual(value, cache.get(key))
+
+ def test_cache_clears_cache(self):
+ cache.set(factory.getRandomString(), factory.getRandomString())
+ cache.clear()
+ self.assertEqual(0, len(cache.cache_backend))
=== modified file 'src/provisioningserver/tests/test_tasks.py'
--- src/provisioningserver/tests/test_tasks.py 2012-08-13 05:41:02 +0000
+++ src/provisioningserver/tests/test_tasks.py 2012-08-14 15:08:44 +0000
@@ -24,13 +24,12 @@
MultiFakeMethod,
)
from maastesting.matchers import ContainsAll
-from maastesting.testcase import TestCase
from netaddr import IPNetwork
from provisioningserver import (
auth,
tasks,
)
-from provisioningserver.dhcp import leases
+from provisioningserver.cache import cache
from provisioningserver.dns.config import (
conf,
DNSZoneConfig,
@@ -57,6 +56,7 @@
write_full_dns_config,
)
from provisioningserver.testing import network_infos
+from provisioningserver.testing.testcase import TestCase
from testresources import FixtureResource
from testtools.matchers import (
Equals,
@@ -82,25 +82,6 @@
# Nothing is refreshed, but there is no error either.
pass
- def test_calls_refresh_function(self):
- value = factory.make_name('new-value')
- refresh_function = FakeMethod()
- self.patch(tasks, 'refresh_functions', {'my_item': refresh_function})
- refresh_secrets(my_item=value)
- self.assertEqual([(value, )], refresh_function.extract_args())
-
- def test_refreshes_even_if_None(self):
- refresh_function = FakeMethod()
- self.patch(tasks, 'refresh_functions', {'my_item': refresh_function})
- refresh_secrets(my_item=None)
- self.assertEqual([(None, )], refresh_function.extract_args())
-
- def test_does_not_refresh_if_omitted(self):
- refresh_function = FakeMethod()
- self.patch(tasks, 'refresh_functions', {'my_item': refresh_function})
- refresh_secrets()
- self.assertEqual([], refresh_function.extract_args())
-
def test_breaks_on_unknown_item(self):
self.assertRaises(AssertionError, refresh_secrets, not_an_item=None)
@@ -113,22 +94,19 @@
factory.make_name('token'),
factory.make_name('secret'),
)
- self.patch(auth, 'recorded_api_credentials', None)
refresh_secrets(
api_credentials=convert_tuple_to_string(credentials))
self.assertEqual(credentials, auth.get_recorded_api_credentials())
def test_updates_nodegroup_name(self):
nodegroup_name = factory.make_name('nodegroup')
- self.patch(auth, 'recorded_nodegroup_name', None)
refresh_secrets(nodegroup_name=nodegroup_name)
- self.assertEqual(nodegroup_name, auth.get_recorded_nodegroup_name())
+ self.assertEqual(nodegroup_name, cache.get('nodegroup_name'))
def test_updates_omapi_shared_key(self):
- self.patch(leases, 'recorded_omapi_shared_key', None)
key = factory.make_name('omapi-shared-key')
refresh_secrets(omapi_shared_key=key)
- self.assertEqual(key, leases.recorded_omapi_shared_key)
+ self.assertEqual(key, cache.get('omapi_shared_key'))
class TestPowerTasks(TestCase):
@@ -199,7 +177,7 @@
key = factory.getRandomString()
self.patch(Omshell, '_run', FakeMethod())
add_new_dhcp_host_map({}, factory.make_name('server'), key)
- self.assertEqual(key, leases.recorded_omapi_shared_key)
+ self.assertEqual(key, cache.get('omapi_shared_key'))
def test_remove_dhcp_host_map(self):
# We don't want to actually run omshell in the task, so we stub
@@ -230,7 +208,7 @@
self.patch(Omshell, '_run', FakeMethod((0, "obj: <null>")))
remove_dhcp_host_map(
factory.getRandomIPAddress(), factory.make_name('server'), key)
- self.assertEqual(key, leases.recorded_omapi_shared_key)
+ self.assertEqual(key, cache.get('omapi_shared_key'))
def test_write_dhcp_config_writes_config(self):
conf_file = self.make_file(contents=factory.getRandomString())
Follow ups