← Back to team overview

cloud-init-dev team mailing list archive

[Merge] lp:~harlowja/cloud-init/query-tool-is-back into lp:cloud-init

 

Scott Moser has proposed merging lp:~harlowja/cloud-init/query-tool-is-back into lp:cloud-init.

Requested reviews:
  cloud init development team (cloud-init-dev)

For more details, see:
https://code.launchpad.net/~harlowja/cloud-init/query-tool-is-back/+merge/123394
-- 
https://code.launchpad.net/~harlowja/cloud-init/query-tool-is-back/+merge/123394
Your team cloud init development team is requested to review the proposed merge of lp:~harlowja/cloud-init/query-tool-is-back into lp:cloud-init.
=== modified file 'Requires'
--- Requires	2012-07-09 20:41:45 +0000
+++ Requires	2012-09-08 01:32:17 +0000
@@ -26,3 +26,6 @@
 
 # The new main entrypoint uses argparse instead of optparse
 argparse
+
+# Used for aes encryption...
+pycrypto

=== modified file 'bin/cloud-init'
--- bin/cloud-init	2012-08-10 03:48:01 +0000
+++ bin/cloud-init	2012-09-08 01:32:17 +0000
@@ -53,11 +53,17 @@
 MOD_SECTION_TPL = "cloud_%s_modules"
 
 # Things u can query on
-QUERY_DATA_TYPES = [
-    'data',
-    'data_raw',
-    'instance_id',
-]
+QUERY_DATA_TYPES = {
+    'ud': (lambda ds: ds.userdata),
+    'ud_raw': (lambda ds: ds.userdata_raw),
+    'iid': (lambda ds: ds.get_instance_id()),
+    'metadata': (lambda ds: ds.metadata),
+    'ds_config': (lambda ds: ds.ds_cfg),
+    'launch_idx': (lambda ds: ds.launch_index),
+    'locale': (lambda ds: ds.get_locale()),
+    'avail_zone': (lambda ds: ds.availability_zone()),
+    'hostname': (lambda ds: ds.get_hostname()),
+}
 
 # Frequency shortname to full name
 # (so users don't have to remember the full name...)
@@ -342,9 +348,30 @@
     return run_module_section(mods, name, name)
 
 
-def main_query(name, _args):
-    raise NotImplementedError(("Action '%s' is not"
-                               " currently implemented") % (name))
+def main_query(name, args):
+    w_msg = welcome_format(name)
+    welcome(name, msg=w_msg)
+    items = args.what
+    if not items:
+        return 1
+    init = stages.Init()
+    ds = None
+    try:
+        # Try the 'privileged' datasource first
+        ds = init.fetch(attempt_find=False)
+    except Exception:
+        pass
+    if not ds:
+        # Use the safer version (if its there)
+        ds = init.fetch(safe=True)
+    if not ds:
+        print("No datasource available for querying!")
+        return 1
+    for i in items:
+        how_get_func = QUERY_DATA_TYPES[i]
+        print("%s =>" % (i.title()))
+        print("%s" % (how_get_func(ds)))
+    return 0
 
 
 def main_single(name, args):
@@ -464,10 +491,10 @@
     parser_query = subparsers.add_parser('query',
                                          help=('query information stored '
                                                'in cloud-init'))
-    parser_query.add_argument("--name", '-n', action="store",
+    parser_query.add_argument("--what", '-w', action="store",
                               help="item name to query on",
                               required=True,
-                              choices=QUERY_DATA_TYPES)
+                              choices=sorted(QUERY_DATA_TYPES.keys()))
     parser_query.set_defaults(action=('query', main_query))
 
     # This subcommand allows you to run a single module

=== modified file 'cloudinit/sources/__init__.py'
--- cloudinit/sources/__init__.py	2012-08-28 03:51:00 +0000
+++ cloudinit/sources/__init__.py	2012-09-08 01:32:17 +0000
@@ -23,9 +23,11 @@
 from email.mime.multipart import MIMEMultipart
 
 import abc
+import copy
 
 from cloudinit import importer
 from cloudinit import log as logging
+from cloudinit import ssh_util
 from cloudinit import user_data as ud
 from cloudinit import util
 
@@ -52,6 +54,7 @@
         self.paths = paths
         self.userdata = None
         self.metadata = None
+        self.encrypted = False
         self.userdata_raw = None
         name = util.obj_name(self)
         if name.startswith(DS_PREFIX):
@@ -63,6 +66,33 @@
         else:
             self.ud_proc = ud_proc
 
+    def copy(self, safe=False):
+        if not safe:
+            return self
+        nds = copy.deepcopy(self)
+        nds.ud_proc = None
+        # TODO(harlowa): Get a user...
+        user_keys = ssh_util.find_keys('root', nds.paths, private=True)
+        enc_with = None
+        if user_keys:
+            contents = util.load_file(user_keys[0], quiet=True)
+            if contents:
+                enc_with = util.hash_blob(contents, 'SHA256', give_hex=False)
+        if not enc_with:
+            # Clear it out, nothing to see here...
+            nds.userdata = None
+            nds.userdata_raw = None
+            nds.metadata = None
+            return nds
+        # Encrypt using the private key + aes
+        if nds.userdata_raw:
+            nds.userdata_raw = util.encrypt(str(nds.userdata_raw), enc_with)
+            nds.encrypted = True
+        if nds.userdata:
+            nds.userdata = util.encrypt(str(nds.userdata), enc_with)
+            nds.encrypted = True
+        return nds
+
     def get_userdata(self, apply_filter=False):
         if self.userdata is None:
             self.userdata = self.ud_proc.process(self.get_userdata_raw())

=== modified file 'cloudinit/ssh_util.py'
--- cloudinit/ssh_util.py	2012-08-19 04:15:52 +0000
+++ cloudinit/ssh_util.py	2012-09-08 01:32:17 +0000
@@ -298,6 +298,23 @@
             return v
 
 
+def find_keys(username, paths, private=True):
+    (ssh_dir, _pwent) = users_ssh_info(username, paths)
+    keys = []
+    look_for = []
+    if not private:
+        look_for.append('id_rsa.pub')
+        look_for.append('id_dsa.pub')
+    else:
+        look_for.append('id_rsa')
+        look_for.append('id_dsa')
+    for fn in look_for:
+        key_fn = os.path.join(ssh_dir, fn)
+        if os.path.isfile(key_fn):
+            keys.append(key_fn)
+    return keys
+
+
 def parse_ssh_config(fname):
     # See: man sshd_config
     # The file contains keyword-argument pairs, one per line.

=== modified file 'cloudinit/stages.py'
--- cloudinit/stages.py	2012-08-26 22:04:06 +0000
+++ cloudinit/stages.py	2012-09-08 01:32:17 +0000
@@ -43,6 +43,7 @@
 from cloudinit import importer
 from cloudinit import log as logging
 from cloudinit import sources
+from cloudinit import ssh_util
 from cloudinit import util
 
 LOG = logging.getLogger(__name__)
@@ -60,6 +61,8 @@
         self._distro = None
         # Created only when a fetch occurs
         self.datasource = None
+        # Only created if asked and available
+        self.safe_datasource = None
 
     @property
     def distro(self):
@@ -170,11 +173,11 @@
                                       base_cfg=self._read_base_cfg())
         return merger.cfg
 
-    def _restore_from_cache(self):
+    def _restore_from_cache(self, name):
         # We try to restore from a current link and static path
         # by using the instance link, if purge_cache was called
         # the file wont exist.
-        pickled_fn = self.paths.get_ipath_cur('obj_pkl')
+        pickled_fn = self.paths.get_ipath_cur(name)
         pickle_contents = None
         try:
             pickle_contents = util.load_file(pickled_fn)
@@ -190,21 +193,18 @@
             util.logexc(LOG, "Failed loading pickled blob from %s", pickled_fn)
             return None
 
-    def _write_to_cache(self):
-        if not self.datasource:
-            return False
-        pickled_fn = self.paths.get_ipath_cur("obj_pkl")
-        try:
-            pk_contents = pickle.dumps(self.datasource)
-        except Exception:
-            util.logexc(LOG, "Failed pickling datasource %s", self.datasource)
-            return False
-        try:
-            util.write_file(pickled_fn, pk_contents, mode=0400)
+    def _write_to_cache(self, safe):
+        if not safe:
+            fmode = 0400
+            pickled_fn = self.paths.get_ipath_cur("obj_pkl")
+        else:
+            fmode = 0644
+            pickled_fn = self.paths.get_ipath_cur("safe_obj_pkl")
+        try:
+            pk_contents = pickle.dumps(self.datasource.copy(safe))
+            util.write_file(pickled_fn, pk_contents, mode=fmode)
         except Exception:
             util.logexc(LOG, "Failed pickling datasource to %s", pickled_fn)
-            return False
-        return True
 
     def _get_datasources(self):
         # Any config provided???
@@ -216,12 +216,23 @@
         cfg_list = self.cfg.get('datasource_list') or []
         return (cfg_list, pkg_list)
 
-    def _get_data_source(self):
+    def _get_safe_datasource(self):
+        if self.safe_datasource:
+            return self.safe_datasource
+        ds = self._restore_from_cache('safe_obj_pkl')
+        if ds:
+            LOG.debug("Restored from cache, safe datasource: %s", ds)
+            self.safe_datasource = ds
+        return ds
+
+    def _get_data_source(self, attempt_find):
         if self.datasource:
             return self.datasource
-        ds = self._restore_from_cache()
+        ds = self._restore_from_cache('obj_pkl')
         if ds:
             LOG.debug("Restored from cache, datasource: %s", ds)
+        if not ds and not attempt_find:
+            return None
         if not ds:
             (cfg_list, pkg_list) = self._get_datasources()
             # Deep copy so that user-data handlers can not modify
@@ -298,8 +309,11 @@
                         "%s\n" % (previous_iid))
         return iid
 
-    def fetch(self):
-        return self._get_data_source()
+    def fetch(self, safe=False, attempt_find=True):
+        if not safe:
+            return self._get_data_source(attempt_find)
+        else:
+            return self._get_safe_datasource()
 
     def instancify(self):
         return self._reflect_cur_instance()
@@ -311,8 +325,10 @@
                            self.distro, helpers.Runners(self.paths))
 
     def update(self):
-        if not self._write_to_cache():
-            return
+        if not self.datasource:
+            raise RuntimeError("Unable to update with the given datasource, no datasource fetched!")
+        self._write_to_cache(False)
+        self._write_to_cache(True)
         self._store_userdata()
 
     def _store_userdata(self):

=== modified file 'cloudinit/util.py'
--- cloudinit/util.py	2012-08-28 03:51:00 +0000
+++ cloudinit/util.py	2012-09-08 01:32:17 +0000
@@ -24,6 +24,7 @@
 
 from StringIO import StringIO
 
+import base64
 import contextlib
 import copy as obj_copy
 import errno
@@ -46,6 +47,8 @@
 import types
 import urlparse
 
+from Crypto.Cipher import AES
+
 import yaml
 
 from cloudinit import importer
@@ -67,6 +70,9 @@
 # Helper utils to see if running in a container
 CONTAINER_TESTS = ['running-in-container', 'lxc-is-container']
 
+# AES encryption will use this block size
+CRYPT_BLOCK_SIZE = 32
+
 
 class ProcessExecutionError(IOError):
 
@@ -261,6 +267,19 @@
     return fn
 
 
+def encrypt(text, secret, pad=' '):
+    text_padding = ((CRYPT_BLOCK_SIZE - len(text) % CRYPT_BLOCK_SIZE) * pad)
+    text = text + text_padding
+    cipher = AES.new(secret)
+    return base64.b64encode(cipher.encrypt(text))
+
+
+def decrypt(text, secret, pad=' '):
+    cipher = AES.new(secret)
+    text = cipher.decrypt(base64.b64decode(text))
+    return text.rstrip(pad)
+
+
 def decomp_gzip(data, quiet=True):
     try:
         buf = StringIO(str(data))
@@ -1093,10 +1112,13 @@
     log.debug(msg, exc_info=1, *args)
 
 
-def hash_blob(blob, routine, mlen=None):
+def hash_blob(blob, routine, mlen=None, give_hex=True):
     hasher = hashlib.new(routine)
     hasher.update(blob)
-    digest = hasher.hexdigest()
+    if give_hex:
+        digest = hasher.hexdigest()
+    else:
+        digest = hasher.digest()
     # Don't get to long now
     if mlen is not None:
         return digest[0:mlen]


Follow ups