cloud-init-dev team mailing list archive
-
cloud-init-dev team
-
Mailing list archive
-
Message #00374
[Merge] lp:~harlowja/cloud-init/not-found-userdata into lp:cloud-init
Joshua Harlow has proposed merging lp:~harlowja/cloud-init/not-found-userdata into lp:cloud-init.
Requested reviews:
cloud init development team (cloud-init-dev)
For more details, see:
https://code.launchpad.net/~harlowja/cloud-init/not-found-userdata/+merge/202963
Skip retry and continued fetch of userdata when NOT_FOUND
When a 404 http code comes back from the fetching of ec2
data, instead of retrying immediately stop the fetching process
and in the userdata fetching function handle this case as a
special case of no userdata being fetched (an empty string
in this case).
--
https://code.launchpad.net/~harlowja/cloud-init/not-found-userdata/+merge/202963
Your team cloud init development team is requested to review the proposed merge of lp:~harlowja/cloud-init/not-found-userdata into lp:cloud-init.
=== modified file 'cloudinit/ec2_utils.py'
--- cloudinit/ec2_utils.py 2014-01-17 22:08:58 +0000
+++ cloudinit/ec2_utils.py 2014-01-23 22:43:32 +0000
@@ -16,6 +16,7 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
+import httplib
from urlparse import (urlparse, urlunparse)
import functools
@@ -23,9 +24,11 @@
import urllib
from cloudinit import log as logging
+from cloudinit import url_helper
from cloudinit import util
LOG = logging.getLogger(__name__)
+SKIP_USERDATA_CODES = frozenset([httplib.NOT_FOUND])
def maybe_json_object(text):
@@ -138,20 +141,38 @@
return joined
+def _skip_retry_on_codes(status_codes, request_args, cause):
+ """Returns if a request should retry based on a given set of codes that
+ case retrying to be stopped/skipped.
+ """
+ if cause.code in status_codes:
+ return False
+ return True
+
+
def get_instance_userdata(api_version='latest',
metadata_address='http://169.254.169.254',
ssl_details=None, timeout=5, retries=5):
ud_url = combine_url(metadata_address, api_version)
ud_url = combine_url(ud_url, 'user-data')
+ user_data = ''
try:
+ # It is ok for userdata to not exist (thats why we are stopping if
+ # NOT_FOUND occurs) and just in that case returning an empty string.
+ exception_cb = functools.partial(_skip_retry_on_codes,
+ SKIP_USERDATA_CODES)
response = util.read_file_or_url(ud_url,
ssl_details=ssl_details,
timeout=timeout,
- retries=retries)
- return str(response)
+ retries=retries,
+ exception_cb=exception_cb)
+ user_data = str(response)
+ except url_helper.UrlError as e:
+ if e.code not in SKIP_USERDATA_CODES:
+ util.logexc(LOG, "Failed fetching userdata from url %s", ud_url)
except Exception:
util.logexc(LOG, "Failed fetching userdata from url %s", ud_url)
- return ''
+ return user_data
def get_instance_metadata(api_version='latest',
=== modified file 'cloudinit/url_helper.py'
--- cloudinit/url_helper.py 2013-05-03 22:11:32 +0000
+++ cloudinit/url_helper.py 2014-01-23 22:43:32 +0000
@@ -103,7 +103,7 @@
def readurl(url, data=None, timeout=None, retries=0, sec_between=1,
headers=None, headers_cb=None, ssl_details=None,
- check_status=True, allow_redirects=True):
+ check_status=True, allow_redirects=True, exception_cb=None):
url = _cleanurl(url)
req_args = {
'url': url,
@@ -163,14 +163,13 @@
# Handle retrying ourselves since the built-in support
# doesn't handle sleeping between tries...
for i in range(0, manual_tries):
+ req_args['headers'] = headers_cb(url)
+ filtered_req_args = {}
+ for (k, v) in req_args.items():
+ if k == 'data':
+ continue
+ filtered_req_args[k] = v
try:
- req_args['headers'] = headers_cb(url)
- filtered_req_args = {}
- for (k, v) in req_args.items():
- if k == 'data':
- continue
- filtered_req_args[k] = v
-
LOG.debug("[%s/%s] open '%s' with %s configuration", i,
manual_tries, url, filtered_req_args)
@@ -196,6 +195,8 @@
# ssl exceptions are not going to get fixed by waiting a
# few seconds
break
+ if exception_cb and not exception_cb(filtered_req_args, e):
+ break
if i + 1 < manual_tries and sec_between > 0:
LOG.debug("Please wait %s seconds while we wait to try again",
sec_between)
=== modified file 'cloudinit/util.py'
--- cloudinit/util.py 2014-01-23 21:48:12 +0000
+++ cloudinit/util.py 2014-01-23 22:43:32 +0000
@@ -691,7 +691,7 @@
def read_file_or_url(url, timeout=5, retries=10,
headers=None, data=None, sec_between=1, ssl_details=None,
- headers_cb=None):
+ headers_cb=None, exception_cb=None):
url = url.lstrip()
if url.startswith("/"):
url = "file://%s" % url
@@ -708,7 +708,8 @@
headers_cb=headers_cb,
data=data,
sec_between=sec_between,
- ssl_details=ssl_details)
+ ssl_details=ssl_details,
+ exception_cb=exception_cb)
def load_yaml(blob, default=None, allowed=(dict,)):
=== modified file 'tests/unittests/test_datasource/test_maas.py'
--- tests/unittests/test_datasource/test_maas.py 2013-04-25 16:13:08 +0000
+++ tests/unittests/test_datasource/test_maas.py 2014-01-23 22:43:32 +0000
@@ -119,7 +119,8 @@
mock_request(url, headers=None, timeout=mocker.ANY,
data=mocker.ANY, sec_between=mocker.ANY,
ssl_details=mocker.ANY, retries=mocker.ANY,
- headers_cb=my_headers_cb)
+ headers_cb=my_headers_cb,
+ exception_cb=mocker.ANY)
resp = valid.get(key)
self.mocker.result(util.StringResponse(resp))
self.mocker.replay()
=== modified file 'tests/unittests/test_ec2_util.py'
--- tests/unittests/test_ec2_util.py 2014-01-17 21:34:53 +0000
+++ tests/unittests/test_ec2_util.py 2014-01-23 22:43:32 +0000
@@ -34,6 +34,14 @@
self.assertEquals('', userdata)
@hp.activate
+ def test_userdata_fetch_fail_server_not_found(self):
+ hp.register_uri(hp.GET,
+ 'http://169.254.169.254/%s/user-data' % (self.VERSION),
+ status=404)
+ userdata = eu.get_instance_userdata(self.VERSION)
+ self.assertEquals('', userdata)
+
+ @hp.activate
def test_metadata_fetch_no_keys(self):
base_url = 'http://169.254.169.254/%s/meta-data' % (self.VERSION)
hp.register_uri(hp.GET, base_url, status=200,
References