cloud-init-dev team mailing list archive
-
cloud-init-dev team
-
Mailing list archive
-
Message #00182
[Merge] lp:~harlowja/cloud-init/query-back-duo into lp:cloud-init
Joshua Harlow has proposed merging lp:~harlowja/cloud-init/query-back-duo into lp:cloud-init.
Requested reviews:
cloud init development team (cloud-init-dev)
For more details, see:
https://code.launchpad.net/~harlowja/cloud-init/query-back-duo/+merge/141198
Reworking the query tool back to life addition. This method is a little less intrusive.
--
https://code.launchpad.net/~harlowja/cloud-init/query-back-duo/+merge/141198
Your team cloud init development team is requested to review the proposed merge of lp:~harlowja/cloud-init/query-back-duo into lp:cloud-init.
=== modified file 'bin/cloud-init'
--- bin/cloud-init 2012-09-20 01:19:43 +0000
+++ bin/cloud-init 2012-12-24 04:32:20 +0000
@@ -26,6 +26,8 @@
import sys
import traceback
+import cPickle as pickle
+
# This is more just for running from the bin folder so that
# cloud-init binary can find the cloudinit module
possible_topdir = os.path.normpath(os.path.join(os.path.abspath(
@@ -45,6 +47,8 @@
from cloudinit import util
from cloudinit import version
+from cloudinit.sources import DataSource
+
from cloudinit.settings import (PER_INSTANCE, PER_ALWAYS, PER_ONCE,
CLOUD_CONFIG)
@@ -56,12 +60,18 @@
# Module section template
MOD_SECTION_TPL = "cloud_%s_modules"
-# Things u can query on
-QUERY_DATA_TYPES = [
- 'data',
- 'data_raw',
- 'instance_id',
-]
+# Things u can query on and the lambda to fetch that
+# item when given a datasource to apply on
+QUERY_DATA_TYPES = {
+ 'metadata': lambda ds: ds.metadata,
+ 'userdata': lambda ds: ds.userdata,
+ 'iid': lambda ds: ds.get_instance_id(),
+ 'hostname': lambda ds: ds.get_hostname(),
+ 'locale': lambda ds: ds.get_locale(),
+ 'az': lambda ds: ds.availability_zone,
+ 'launch_index': lambda ds: ds.launch_index,
+ 'public_keys': lambda ds: ds.get_public_ssh_keys(),
+}
# Frequency shortname to full name
# (so users don't have to remember the full name...)
@@ -86,6 +96,10 @@
sys.stderr.write("\n")
+def is_root():
+ return os.geteuid() == 0
+
+
def welcome(action, msg=None):
if not msg:
msg = welcome_format(action)
@@ -346,9 +360,48 @@
return run_module_section(mods, name, name)
-def main_query(name, _args):
- raise NotImplementedError(("Action '%s' is not"
- " currently implemented") % (name))
+def main_query(name, args):
+ # TODO(harlowja) refactor init so that
+ # it is not the sole entrypoint for so many operations
+ # perhaps split into
+ init = stages.Init(ds_deps=[])
+ safe_ds_where = init.paths.get_ipath_cur("obj_pkl_safe")
+ if not os.path.isfile(safe_ds_where):
+ if is_root() and args.populate:
+ # At this point we can make a safe datasource from whatever
+ # may exist locally (if anything)
+ ds = init.fetch(True)
+ if not ds:
+ raise NotImplementedError(("No datasource found to "
+ "convert into a 'safe' datasource"))
+ else:
+ # Keep only the basics, ignoring the userdata since typically
+ # said userdata contains contents and data that users should
+ # not always be able to see.
+ safe_ds = DataSource(ds.sys_cfg, ds.distro, ds.paths)
+ safe_ds.metadata = ds.metadata
+ pk_contents = pickle.dumps(safe_ds)
+ util.write_file(safe_ds_where, pk_contents, mode=0644)
+ if args.what:
+ ds = None
+ if is_root():
+ # Use a full datasource if running as root
+ ds = init.fetch(True)
+ if not ds:
+ try:
+ pk_contents = util.load_file(safe_ds_where)
+ ds = pickle.loads(pk_contents)
+ except:
+ pass
+ if not ds:
+ raise NotImplementedError("No datasource found for querying.")
+ for i in args.what:
+ i_canon = i.lower().strip()
+ if i_canon not in QUERY_DATA_TYPES:
+ print("Unknown how to query on %s!" % (i))
+ else:
+ func = QUERY_DATA_TYPES[i_canon]
+ print("%s: %s" % (i, func(ds)))
def main_single(name, args):
@@ -468,10 +521,14 @@
parser_query = subparsers.add_parser('query',
help=('query information stored '
'in cloud-init'))
- parser_query.add_argument("--name", '-n', action="store",
- help="item name to query on",
- required=True,
- choices=QUERY_DATA_TYPES)
+ if is_root():
+ parser_query.add_argument("--populate", '-p', action="store_true",
+ help=("populate the 'safe' "
+ "datasource that users can query"),
+ default=False)
+ query_help = "query the given datasource field (%s)"
+ query_help = query_help % (", ".join(sorted(QUERY_DATA_TYPES.keys())))
+ parser_query.add_argument('what', nargs='*', help=query_help)
parser_query.set_defaults(action=('query', main_query))
# This subcommand allows you to run a single module
=== modified file 'cloudinit/sources/__init__.py'
--- cloudinit/sources/__init__.py 2012-11-12 17:26:49 +0000
+++ cloudinit/sources/__init__.py 2012-12-24 04:32:20 +0000
@@ -20,7 +20,6 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-import abc
import os
from cloudinit import importer
@@ -42,9 +41,6 @@
class DataSource(object):
-
- __metaclass__ = abc.ABCMeta
-
def __init__(self, sys_cfg, distro, paths, ud_proc=None):
self.sys_cfg = sys_cfg
self.distro = distro
=== modified file 'cloudinit/stages.py'
--- cloudinit/stages.py 2012-12-17 13:41:11 +0000
+++ cloudinit/stages.py 2012-12-24 04:32:20 +0000
@@ -217,13 +217,14 @@
cfg_list = self.cfg.get('datasource_list') or []
return (cfg_list, pkg_list)
- def _get_data_source(self):
+ def _get_data_source(self, local_only):
if self.datasource is not NULL_DATA_SOURCE:
return self.datasource
ds = self._restore_from_cache()
if ds:
LOG.debug("Restored from cache, datasource: %s", ds)
- if not ds:
+ # Try to find one that will work
+ if not ds and not local_only:
(cfg_list, pkg_list) = self._get_datasources()
# Deep copy so that user-data handlers can not modify
# (which will affect user-data handlers down the line...)
@@ -303,8 +304,8 @@
self._reset()
return iid
- def fetch(self):
- return self._get_data_source()
+ def fetch(self, local_only=False):
+ return self._get_data_source(local_only)
def instancify(self):
return self._reflect_cur_instance()
Follow ups