← Back to team overview

cloud-init-dev team mailing list archive

[Merge] lp:~harlowja/cloud-init/early-abort into lp:cloud-init

 

Joshua Harlow has proposed merging lp:~harlowja/cloud-init/early-abort into lp:cloud-init.

Requested reviews:
  cloud init development team (cloud-init-dev)

For more details, see:
https://code.launchpad.net/~harlowja/cloud-init/early-abort/+merge/215036

Abort early if all the same net location
-- 
https://code.launchpad.net/~harlowja/cloud-init/early-abort/+merge/215036
Your team cloud init development team is requested to review the proposed merge of lp:~harlowja/cloud-init/early-abort into lp:cloud-init.
=== modified file 'cloudinit/sources/DataSourceOpenStack.py'
--- cloudinit/sources/DataSourceOpenStack.py	2014-02-24 22:41:42 +0000
+++ cloudinit/sources/DataSourceOpenStack.py	2014-04-09 21:12:22 +0000
@@ -17,6 +17,7 @@
 #    along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
 import time
+import urlparse
 
 from cloudinit import log as logging
 from cloudinit import sources
@@ -86,6 +87,7 @@
             urls = [DEF_MD_URL]
 
         md_urls = []
+        netlocs = set()
         url2base = {}
         for url in urls:
             for version in openstack.OS_VERSIONS + (openstack.OS_LATEST,):
@@ -93,11 +95,18 @@
                                                 version, 'meta_data.json')
                 md_urls.append(md_url)
                 url2base[md_url] = url
+                url_parsed = urlparse.urlparse(md_url)
+                if url_parsed.netloc:
+                    netlocs.add(url_parsed.netloc)
 
+        abort_early = False
+        if len(netlocs) == 1:
+            abort_early = True
         (max_wait, timeout) = self._get_url_settings()
         start_time = time.time()
         avail_url = url_helper.wait_for_url(urls=md_urls, max_wait=max_wait,
-                                            timeout=timeout)
+                                            timeout=timeout,
+                                            abort_early=abort_early)
         if avail_url:
             LOG.debug("Using metadata source: '%s'", url2base[avail_url])
         else:

=== modified file 'cloudinit/url_helper.py'
--- cloudinit/url_helper.py	2014-02-13 17:13:42 +0000
+++ cloudinit/url_helper.py	2014-04-09 21:12:22 +0000
@@ -260,7 +260,7 @@
 
 def wait_for_url(urls, max_wait=None, timeout=None,
                  status_cb=None, headers_cb=None, sleep_time=1,
-                 exception_cb=None):
+                 exception_cb=None, abort_early=False):
     """
     urls:      a list of urls to try
     max_wait:  roughly the maximum time to wait before giving up
@@ -273,6 +273,7 @@
                 for request.
     exception_cb: call method with 2 arguments 'msg' (per status_cb) and
                   'exception', the exception that occurred.
+    abort_early: abort waiting early if any url fails with a timeout error
 
     the idea of this routine is to wait for the EC2 metdata service to
     come up.  On both Eucalyptus and EC2 we have seen the case where
@@ -313,6 +314,7 @@
                     # shorten timeout to not run way over max_time
                     timeout = int((start_time + max_wait) - now)
 
+            maybe_abort = False
             reason = ""
             e = None
             try:
@@ -335,6 +337,8 @@
                     return url
             except UrlError as e:
                 reason = "request error [%s]" % e
+                if isinstance(e.cause, exceptions.Timeout):
+                    maybe_abort = True
             except Exception as e:
                 reason = "unexpected error [%s]" % e
 
@@ -350,6 +354,9 @@
                 # does.
                 exception_cb(msg=status_msg, exception=e)
 
+            if maybe_abort and abort_early:
+                return False
+
         if timeup(max_wait, start_time):
             break
 


Follow ups