← Back to team overview

cloud-init-dev team mailing list archive

[Merge] lp:~barry/cloud-init/py2-3 into lp:cloud-init

 

Barry Warsaw has proposed merging lp:~barry/cloud-init/py2-3 into lp:cloud-init.

Requested reviews:
  cloud init development team (cloud-init-dev)

For more details, see:
https://code.launchpad.net/~barry/cloud-init/py2-3/+merge/247239

Please note, this is a work in progress.  I'm mostly creating this MP to gather feedback and contributions.  While the test suite passes against Python 2.7 for me on Ubuntu 15.04, it does not yet pass with Python 3.4, nor has it even been run against Python 2.6.  Stay tuned.
-- 
Your team cloud init development team is requested to review the proposed merge of lp:~barry/cloud-init/py2-3 into lp:cloud-init.
=== added file '.bzrignore'
--- .bzrignore	1970-01-01 00:00:00 +0000
+++ .bzrignore	2015-01-21 23:02:49 +0000
@@ -0,0 +1,4 @@
+.tox
+dist
+cloud_init.egg-info
+__pycache__

=== added file 'MANIFEST.in'
--- MANIFEST.in	1970-01-01 00:00:00 +0000
+++ MANIFEST.in	2015-01-21 23:02:49 +0000
@@ -0,0 +1,8 @@
+include *.py MANIFEST.in ChangeLog
+global-include *.txt *.rst *.ini *.in *.conf *.cfg *.sh
+graft tools
+prune build
+prune dist
+prune .tox
+prune .bzr
+exclude .bzrignore

=== modified file 'cloudinit/config/cc_apt_configure.py'
--- cloudinit/config/cc_apt_configure.py	2014-08-26 18:50:11 +0000
+++ cloudinit/config/cc_apt_configure.py	2015-01-21 23:02:49 +0000
@@ -126,7 +126,7 @@
 
 
 def rename_apt_lists(old_mirrors, new_mirrors, lists_d="/var/lib/apt/lists"):
-    for (name, omirror) in old_mirrors.iteritems():
+    for (name, omirror) in old_mirrors.items():
         nmirror = new_mirrors.get(name)
         if not nmirror:
             continue

=== modified file 'cloudinit/config/cc_debug.py'
--- cloudinit/config/cc_debug.py	2014-11-22 02:10:16 +0000
+++ cloudinit/config/cc_debug.py	2015-01-21 23:02:49 +0000
@@ -34,7 +34,8 @@
 """
 
 import copy
-from StringIO import StringIO
+
+from six import StringIO
 
 from cloudinit import type_utils
 from cloudinit import util
@@ -77,7 +78,7 @@
     dump_cfg = copy.deepcopy(cfg)
     for k in SKIP_KEYS:
         dump_cfg.pop(k, None)
-    all_keys = list(dump_cfg.keys())
+    all_keys = list(dump_cfg)
     for k in all_keys:
         if k.startswith("_"):
             dump_cfg.pop(k, None)
@@ -103,6 +104,6 @@
         line = "ci-info: %s\n" % (line)
         content_to_file.append(line)
     if out_file:
-        util.write_file(out_file, "".join(content_to_file), 0644, "w")
+        util.write_file(out_file, "".join(content_to_file), 0o644, "w")
     else:
         util.multi_log("".join(content_to_file), console=True, stderr=False)

=== modified file 'cloudinit/config/cc_landscape.py'
--- cloudinit/config/cc_landscape.py	2014-01-27 22:34:35 +0000
+++ cloudinit/config/cc_landscape.py	2015-01-21 23:02:49 +0000
@@ -20,7 +20,7 @@
 
 import os
 
-from StringIO import StringIO
+from six import StringIO
 
 from configobj import ConfigObj
 

=== modified file 'cloudinit/config/cc_mcollective.py'
--- cloudinit/config/cc_mcollective.py	2014-01-27 22:34:35 +0000
+++ cloudinit/config/cc_mcollective.py	2015-01-21 23:02:49 +0000
@@ -19,7 +19,8 @@
 #    You should have received a copy of the GNU General Public License
 #    along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
-from StringIO import StringIO
+import six
+from six import StringIO
 
 # Used since this can maintain comments
 # and doesn't need a top level section
@@ -51,17 +52,17 @@
         # original file in order to be able to mix the rest up
         mcollective_config = ConfigObj(SERVER_CFG)
         # See: http://tiny.cc/jh9agw
-        for (cfg_name, cfg) in mcollective_cfg['conf'].iteritems():
+        for (cfg_name, cfg) in mcollective_cfg['conf'].items():
             if cfg_name == 'public-cert':
-                util.write_file(PUBCERT_FILE, cfg, mode=0644)
+                util.write_file(PUBCERT_FILE, cfg, mode=0o644)
                 mcollective_config['plugin.ssl_server_public'] = PUBCERT_FILE
                 mcollective_config['securityprovider'] = 'ssl'
             elif cfg_name == 'private-cert':
-                util.write_file(PRICERT_FILE, cfg, mode=0600)
+                util.write_file(PRICERT_FILE, cfg, mode=0o600)
                 mcollective_config['plugin.ssl_server_private'] = PRICERT_FILE
                 mcollective_config['securityprovider'] = 'ssl'
             else:
-                if isinstance(cfg, (basestring, str)):
+                if isinstance(cfg, six.string_types):
                     # Just set it in the 'main' section
                     mcollective_config[cfg_name] = cfg
                 elif isinstance(cfg, (dict)):
@@ -69,7 +70,7 @@
                     # if it is needed and then add/or create items as needed
                     if cfg_name not in mcollective_config.sections:
                         mcollective_config[cfg_name] = {}
-                    for (o, v) in cfg.iteritems():
+                    for (o, v) in cfg.items():
                         mcollective_config[cfg_name][o] = v
                 else:
                     # Otherwise just try to convert it to a string
@@ -81,7 +82,7 @@
         contents = StringIO()
         mcollective_config.write(contents)
         contents = contents.getvalue()
-        util.write_file(SERVER_CFG, contents, mode=0644)
+        util.write_file(SERVER_CFG, contents, mode=0o644)
 
     # Start mcollective
     util.subp(['service', 'mcollective', 'start'], capture=False)

=== modified file 'cloudinit/config/cc_phone_home.py'
--- cloudinit/config/cc_phone_home.py	2014-08-26 18:50:11 +0000
+++ cloudinit/config/cc_phone_home.py	2015-01-21 23:02:49 +0000
@@ -81,7 +81,7 @@
         'pub_key_ecdsa': '/etc/ssh/ssh_host_ecdsa_key.pub',
     }
 
-    for (n, path) in pubkeys.iteritems():
+    for (n, path) in pubkeys.items():
         try:
             all_keys[n] = util.load_file(path)
         except:
@@ -99,7 +99,7 @@
 
     # Get them read to be posted
     real_submit_keys = {}
-    for (k, v) in submit_keys.iteritems():
+    for (k, v) in submit_keys.items():
         if v is None:
             real_submit_keys[k] = 'N/A'
         else:

=== modified file 'cloudinit/config/cc_puppet.py'
--- cloudinit/config/cc_puppet.py	2014-02-05 15:36:47 +0000
+++ cloudinit/config/cc_puppet.py	2015-01-21 23:02:49 +0000
@@ -18,7 +18,7 @@
 #    You should have received a copy of the GNU General Public License
 #    along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
-from StringIO import StringIO
+from six import StringIO
 
 import os
 import socket
@@ -81,13 +81,13 @@
         cleaned_contents = '\n'.join(cleaned_lines)
         puppet_config.readfp(StringIO(cleaned_contents),
                              filename=PUPPET_CONF_PATH)
-        for (cfg_name, cfg) in puppet_cfg['conf'].iteritems():
+        for (cfg_name, cfg) in puppet_cfg['conf'].items():
             # Cert configuration is a special case
             # Dump the puppet master ca certificate in the correct place
             if cfg_name == 'ca_cert':
                 # Puppet ssl sub-directory isn't created yet
                 # Create it with the proper permissions and ownership
-                util.ensure_dir(PUPPET_SSL_DIR, 0771)
+                util.ensure_dir(PUPPET_SSL_DIR, 0o771)
                 util.chownbyname(PUPPET_SSL_DIR, 'puppet', 'root')
                 util.ensure_dir(PUPPET_SSL_CERT_DIR)
                 util.chownbyname(PUPPET_SSL_CERT_DIR, 'puppet', 'root')
@@ -96,7 +96,7 @@
             else:
                 # Iterate throug the config items, we'll use ConfigParser.set
                 # to overwrite or create new items as needed
-                for (o, v) in cfg.iteritems():
+                for (o, v) in cfg.items():
                     if o == 'certname':
                         # Expand %f as the fqdn
                         # TODO(harlowja) should this use the cloud fqdn??

=== modified file 'cloudinit/config/cc_resolv_conf.py'
--- cloudinit/config/cc_resolv_conf.py	2014-08-21 20:26:43 +0000
+++ cloudinit/config/cc_resolv_conf.py	2015-01-21 23:02:49 +0000
@@ -66,8 +66,8 @@
     false_flags = []
 
     if 'options' in params:
-        for key, val in params['options'].iteritems():
-            if type(val) == bool:
+        for key, val in params['options'].items():
+            if isinstance(val, bool):
                 if val:
                     flags.append(key)
                 else:

=== modified file 'cloudinit/config/cc_seed_random.py'
--- cloudinit/config/cc_seed_random.py	2014-03-04 19:35:09 +0000
+++ cloudinit/config/cc_seed_random.py	2015-01-21 23:02:49 +0000
@@ -21,7 +21,8 @@
 
 import base64
 import os
-from StringIO import StringIO
+
+from six import StringIO
 
 from cloudinit.settings import PER_INSTANCE
 from cloudinit import log as logging

=== modified file 'cloudinit/config/cc_ssh.py'
--- cloudinit/config/cc_ssh.py	2014-08-26 18:50:11 +0000
+++ cloudinit/config/cc_ssh.py	2015-01-21 23:02:49 +0000
@@ -34,12 +34,12 @@
 "rather than the user \\\"root\\\".\';echo;sleep 10\"")
 
 KEY_2_FILE = {
-    "rsa_private": ("/etc/ssh/ssh_host_rsa_key", 0600),
-    "rsa_public": ("/etc/ssh/ssh_host_rsa_key.pub", 0644),
-    "dsa_private": ("/etc/ssh/ssh_host_dsa_key", 0600),
-    "dsa_public": ("/etc/ssh/ssh_host_dsa_key.pub", 0644),
-    "ecdsa_private": ("/etc/ssh/ssh_host_ecdsa_key", 0600),
-    "ecdsa_public": ("/etc/ssh/ssh_host_ecdsa_key.pub", 0644),
+    "rsa_private": ("/etc/ssh/ssh_host_rsa_key", 0o600),
+    "rsa_public": ("/etc/ssh/ssh_host_rsa_key.pub", 0o644),
+    "dsa_private": ("/etc/ssh/ssh_host_dsa_key", 0o600),
+    "dsa_public": ("/etc/ssh/ssh_host_dsa_key.pub", 0o644),
+    "ecdsa_private": ("/etc/ssh/ssh_host_ecdsa_key", 0o600),
+    "ecdsa_public": ("/etc/ssh/ssh_host_ecdsa_key.pub", 0o644),
 }
 
 PRIV_2_PUB = {
@@ -68,13 +68,13 @@
 
     if "ssh_keys" in cfg:
         # if there are keys in cloud-config, use them
-        for (key, val) in cfg["ssh_keys"].iteritems():
+        for (key, val) in cfg["ssh_keys"].items():
             if key in KEY_2_FILE:
                 tgt_fn = KEY_2_FILE[key][0]
                 tgt_perms = KEY_2_FILE[key][1]
                 util.write_file(tgt_fn, val, tgt_perms)
 
-        for (priv, pub) in PRIV_2_PUB.iteritems():
+        for (priv, pub) in PRIV_2_PUB.items():
             if pub in cfg['ssh_keys'] or priv not in cfg['ssh_keys']:
                 continue
             pair = (KEY_2_FILE[priv][0], KEY_2_FILE[pub][0])

=== modified file 'cloudinit/config/cc_yum_add_repo.py'
--- cloudinit/config/cc_yum_add_repo.py	2014-08-26 18:50:11 +0000
+++ cloudinit/config/cc_yum_add_repo.py	2015-01-21 23:02:49 +0000
@@ -18,10 +18,11 @@
 
 import os
 
+import configobj
+import six
+
 from cloudinit import util
 
-import configobj
-
 
 def _canonicalize_id(repo_id):
     repo_id = repo_id.lower().replace("-", "_")
@@ -37,7 +38,7 @@
         # Can handle 'lists' in certain cases
         # See: http://bit.ly/Qqrf1t
         return "\n    ".join([_format_repo_value(v) for v in val])
-    if not isinstance(val, (basestring, str)):
+    if not isinstance(val, six.string_types):
         return str(val)
     return val
 

=== modified file 'cloudinit/distros/__init__.py'
--- cloudinit/distros/__init__.py	2015-01-16 19:29:48 +0000
+++ cloudinit/distros/__init__.py	2015-01-21 23:02:49 +0000
@@ -21,7 +21,8 @@
 #    You should have received a copy of the GNU General Public License
 #    along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
-from StringIO import StringIO
+import six
+from six import StringIO
 
 import abc
 import itertools
@@ -272,7 +273,7 @@
             if header:
                 contents.write("%s\n" % (header))
             contents.write("%s\n" % (eh))
-            util.write_file(self.hosts_fn, contents.getvalue(), mode=0644)
+            util.write_file(self.hosts_fn, contents.getvalue(), mode=0o644)
 
     def _bring_up_interface(self, device_name):
         cmd = ['ifup', device_name]
@@ -334,7 +335,7 @@
         redact_opts = ['passwd']
 
         # Check the values and create the command
-        for key, val in kwargs.iteritems():
+        for key, val in kwargs.items():
 
             if key in adduser_opts and val and isinstance(val, str):
                 adduser_cmd.extend([adduser_opts[key], val])
@@ -393,7 +394,7 @@
         if 'ssh_authorized_keys' in kwargs:
             # Try to handle this in a smart manner.
             keys = kwargs['ssh_authorized_keys']
-            if isinstance(keys, (basestring, str)):
+            if isinstance(keys, six.string_types):
                 keys = [keys]
             if isinstance(keys, dict):
                 keys = list(keys.values())
@@ -468,7 +469,7 @@
                              util.make_header(base="added"),
                              "#includedir %s" % (path), '']
                     sudoers_contents = "\n".join(lines)
-                    util.write_file(sudo_base, sudoers_contents, 0440)
+                    util.write_file(sudo_base, sudoers_contents, 0o440)
                 else:
                     lines = ['', util.make_header(base="added"),
                              "#includedir %s" % (path), '']
@@ -478,7 +479,7 @@
             except IOError as e:
                 util.logexc(LOG, "Failed to write %s", sudo_base)
                 raise e
-        util.ensure_dir(path, 0750)
+        util.ensure_dir(path, 0o750)
 
     def write_sudo_rules(self, user, rules, sudo_file=None):
         if not sudo_file:
@@ -491,7 +492,7 @@
         if isinstance(rules, (list, tuple)):
             for rule in rules:
                 lines.append("%s %s" % (user, rule))
-        elif isinstance(rules, (basestring, str)):
+        elif isinstance(rules, six.string_types):
             lines.append("%s %s" % (user, rules))
         else:
             msg = "Can not create sudoers rule addition with type %r"
@@ -506,7 +507,7 @@
                 content,
             ]
             try:
-                util.write_file(sudo_file, "\n".join(contents), 0440)
+                util.write_file(sudo_file, "\n".join(contents), 0o440)
             except IOError as e:
                 util.logexc(LOG, "Failed to write sudoers file %s", sudo_file)
                 raise e
@@ -561,10 +562,10 @@
         subst['ec2_region'] = "%s" % availability_zone[0:-1]
 
     results = {}
-    for (name, mirror) in mirror_info.get('failsafe', {}).iteritems():
+    for (name, mirror) in mirror_info.get('failsafe', {}).items():
         results[name] = mirror
 
-    for (name, searchlist) in mirror_info.get('search', {}).iteritems():
+    for (name, searchlist) in mirror_info.get('search', {}).items():
         mirrors = []
         for tmpl in searchlist:
             try:
@@ -604,30 +605,30 @@
 # is the standard form used in the rest
 # of cloud-init
 def _normalize_groups(grp_cfg):
-    if isinstance(grp_cfg, (str, basestring)):
+    if isinstance(grp_cfg, six.string_types):
         grp_cfg = grp_cfg.strip().split(",")
-    if isinstance(grp_cfg, (list)):
+    if isinstance(grp_cfg, list):
         c_grp_cfg = {}
         for i in grp_cfg:
-            if isinstance(i, (dict)):
+            if isinstance(i, dict):
                 for k, v in i.items():
                     if k not in c_grp_cfg:
-                        if isinstance(v, (list)):
+                        if isinstance(v, list):
                             c_grp_cfg[k] = list(v)
-                        elif isinstance(v, (basestring, str)):
+                        elif isinstance(v, six.string_types):
                             c_grp_cfg[k] = [v]
                         else:
                             raise TypeError("Bad group member type %s" %
                                             type_utils.obj_name(v))
                     else:
-                        if isinstance(v, (list)):
+                        if isinstance(v, list):
                             c_grp_cfg[k].extend(v)
-                        elif isinstance(v, (basestring, str)):
+                        elif isinstance(v, six.string_types):
                             c_grp_cfg[k].append(v)
                         else:
                             raise TypeError("Bad group member type %s" %
                                             type_utils.obj_name(v))
-            elif isinstance(i, (str, basestring)):
+            elif isinstance(i, six.string_types):
                 if i not in c_grp_cfg:
                     c_grp_cfg[i] = []
             else:
@@ -635,7 +636,7 @@
                                 type_utils.obj_name(i))
         grp_cfg = c_grp_cfg
     groups = {}
-    if isinstance(grp_cfg, (dict)):
+    if isinstance(grp_cfg, dict):
         for (grp_name, grp_members) in grp_cfg.items():
             groups[grp_name] = util.uniq_merge_sorted(grp_members)
     else:
@@ -661,29 +662,29 @@
 # entry 'default' which will be marked as true
 # all other users will be marked as false.
 def _normalize_users(u_cfg, def_user_cfg=None):
-    if isinstance(u_cfg, (dict)):
+    if isinstance(u_cfg, dict):
         ad_ucfg = []
         for (k, v) in u_cfg.items():
-            if isinstance(v, (bool, int, basestring, str, float)):
+            if isinstance(v, (bool, int, float) + six.string_types):
                 if util.is_true(v):
                     ad_ucfg.append(str(k))
-            elif isinstance(v, (dict)):
+            elif isinstance(v, dict):
                 v['name'] = k
                 ad_ucfg.append(v)
             else:
                 raise TypeError(("Unmappable user value type %s"
                                  " for key %s") % (type_utils.obj_name(v), k))
         u_cfg = ad_ucfg
-    elif isinstance(u_cfg, (str, basestring)):
+    elif isinstance(u_cfg, six.string_types):
         u_cfg = util.uniq_merge_sorted(u_cfg)
 
     users = {}
     for user_config in u_cfg:
-        if isinstance(user_config, (str, basestring, list)):
+        if isinstance(user_config, (list,) + six.string_types):
             for u in util.uniq_merge(user_config):
                 if u and u not in users:
                     users[u] = {}
-        elif isinstance(user_config, (dict)):
+        elif isinstance(user_config, dict):
             if 'name' in user_config:
                 n = user_config.pop('name')
                 prev_config = users.get(n) or {}
@@ -784,11 +785,11 @@
         old_user = cfg['user']
         # Translate it into the format that is more useful
         # going forward
-        if isinstance(old_user, (basestring, str)):
+        if isinstance(old_user, six.string_types):
             old_user = {
                 'name': old_user,
             }
-        if not isinstance(old_user, (dict)):
+        if not isinstance(old_user, dict):
             LOG.warn(("Format for 'user' key must be a string or "
                       "dictionary and not %s"), type_utils.obj_name(old_user))
             old_user = {}
@@ -813,7 +814,7 @@
     default_user_config = util.mergemanydict([old_user, distro_user_config])
 
     base_users = cfg.get('users', [])
-    if not isinstance(base_users, (list, dict, str, basestring)):
+    if not isinstance(base_users, (list, dict) + six.string_types):
         LOG.warn(("Format for 'users' key must be a comma separated string"
                   " or a dictionary or a list and not %s"),
                  type_utils.obj_name(base_users))
@@ -822,12 +823,12 @@
     if old_user:
         # Ensure that when user: is provided that this user
         # always gets added (as the default user)
-        if isinstance(base_users, (list)):
+        if isinstance(base_users, list):
             # Just add it on at the end...
             base_users.append({'name': 'default'})
-        elif isinstance(base_users, (dict)):
+        elif isinstance(base_users, dict):
             base_users['default'] = dict(base_users).get('default', True)
-        elif isinstance(base_users, (str, basestring)):
+        elif isinstance(base_users, six.string_types):
             # Just append it on to be re-parsed later
             base_users += ",default"
 

=== modified file 'cloudinit/distros/arch.py'
--- cloudinit/distros/arch.py	2015-01-16 19:29:48 +0000
+++ cloudinit/distros/arch.py	2015-01-21 23:02:49 +0000
@@ -66,7 +66,7 @@
                   settings, entries)
         dev_names = entries.keys()
         # Format for netctl
-        for (dev, info) in entries.iteritems():
+        for (dev, info) in entries.items():
             nameservers = []
             net_fn = self.network_conf_dir + dev
             net_cfg = {

=== modified file 'cloudinit/distros/freebsd.py'
--- cloudinit/distros/freebsd.py	2015-01-16 19:29:48 +0000
+++ cloudinit/distros/freebsd.py	2015-01-21 23:02:49 +0000
@@ -16,7 +16,8 @@
 #    You should have received a copy of the GNU General Public License
 #    along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
-from StringIO import StringIO
+import six
+from six import StringIO
 
 import re
 
@@ -203,8 +204,9 @@
 
         redact_opts = ['passwd']
 
-        for key, val in kwargs.iteritems():
-            if key in adduser_opts and val and isinstance(val, basestring):
+        for key, val in kwargs.items():
+            if (key in adduser_opts and val
+                    and isinstance(val, six.string_types)):
                 adduser_cmd.extend([adduser_opts[key], val])
 
                 # Redact certain fields from the logs
@@ -271,7 +273,7 @@
         nameservers = []
         searchdomains = []
         dev_names = entries.keys()
-        for (device, info) in entries.iteritems():
+        for (device, info) in entries.items():
             # Skip the loopback interface.
             if device.startswith('lo'):
                 continue
@@ -323,7 +325,7 @@
                 resolvconf.add_search_domain(domain)
             except ValueError:
                 util.logexc(LOG, "Failed to add search domain %s", domain)
-        util.write_file(self.resolv_conf_fn, str(resolvconf), 0644)
+        util.write_file(self.resolv_conf_fn, str(resolvconf), 0o644)
 
         return dev_names
 

=== modified file 'cloudinit/distros/net_util.py'
--- cloudinit/distros/net_util.py	2015-01-06 17:02:38 +0000
+++ cloudinit/distros/net_util.py	2015-01-21 23:02:49 +0000
@@ -103,7 +103,7 @@
             consume[cmd] = args
     # Check if anything left over to consume
     absorb = False
-    for (cmd, args) in consume.iteritems():
+    for (cmd, args) in consume.items():
         if cmd == 'iface':
             absorb = True
     if absorb:

=== modified file 'cloudinit/distros/parsers/hostname.py'
--- cloudinit/distros/parsers/hostname.py	2012-11-12 22:30:08 +0000
+++ cloudinit/distros/parsers/hostname.py	2015-01-21 23:02:49 +0000
@@ -16,7 +16,7 @@
 #    You should have received a copy of the GNU General Public License
 #    along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
-from StringIO import StringIO
+from six import StringIO
 
 from cloudinit.distros.parsers import chop_comment
 

=== modified file 'cloudinit/distros/parsers/hosts.py'
--- cloudinit/distros/parsers/hosts.py	2012-11-13 06:14:31 +0000
+++ cloudinit/distros/parsers/hosts.py	2015-01-21 23:02:49 +0000
@@ -16,7 +16,7 @@
 #    You should have received a copy of the GNU General Public License
 #    along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
-from StringIO import StringIO
+from six import StringIO
 
 from cloudinit.distros.parsers import chop_comment
 

=== modified file 'cloudinit/distros/parsers/resolv_conf.py'
--- cloudinit/distros/parsers/resolv_conf.py	2014-08-26 19:53:41 +0000
+++ cloudinit/distros/parsers/resolv_conf.py	2015-01-21 23:02:49 +0000
@@ -16,7 +16,7 @@
 #    You should have received a copy of the GNU General Public License
 #    along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
-from StringIO import StringIO
+from six import StringIO
 
 from cloudinit import util
 

=== modified file 'cloudinit/distros/parsers/sys_conf.py'
--- cloudinit/distros/parsers/sys_conf.py	2012-11-12 22:30:08 +0000
+++ cloudinit/distros/parsers/sys_conf.py	2015-01-21 23:02:49 +0000
@@ -16,7 +16,8 @@
 #    You should have received a copy of the GNU General Public License
 #    along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
-from StringIO import StringIO
+import six
+from six import StringIO
 
 import pipes
 import re
@@ -69,7 +70,7 @@
         return out_contents.getvalue()
 
     def _quote(self, value, multiline=False):
-        if not isinstance(value, (str, basestring)):
+        if not isinstance(value, six.string_types):
             raise ValueError('Value "%s" is not a string' % (value))
         if len(value) == 0:
             return ''

=== modified file 'cloudinit/distros/rhel.py'
--- cloudinit/distros/rhel.py	2015-01-06 17:02:38 +0000
+++ cloudinit/distros/rhel.py	2015-01-21 23:02:49 +0000
@@ -73,7 +73,7 @@
         searchservers = []
         dev_names = entries.keys()
         use_ipv6 = False
-        for (dev, info) in entries.iteritems():
+        for (dev, info) in entries.items():
             net_fn = self.network_script_tpl % (dev)
             net_cfg = {
                 'DEVICE': dev,

=== modified file 'cloudinit/distros/sles.py'
--- cloudinit/distros/sles.py	2015-01-16 19:29:48 +0000
+++ cloudinit/distros/sles.py	2015-01-21 23:02:49 +0000
@@ -62,7 +62,7 @@
         nameservers = []
         searchservers = []
         dev_names = entries.keys()
-        for (dev, info) in entries.iteritems():
+        for (dev, info) in entries.items():
             net_fn = self.network_script_tpl % (dev)
             mode = info.get('auto')
             if mode and mode.lower() == 'true':

=== modified file 'cloudinit/ec2_utils.py'
--- cloudinit/ec2_utils.py	2014-09-05 17:24:19 +0000
+++ cloudinit/ec2_utils.py	2015-01-21 23:02:49 +0000
@@ -17,7 +17,6 @@
 #    along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
 import functools
-import httplib
 import json
 
 from cloudinit import log as logging
@@ -25,7 +24,7 @@
 from cloudinit import util
 
 LOG = logging.getLogger(__name__)
-SKIP_USERDATA_CODES = frozenset([httplib.NOT_FOUND])
+SKIP_USERDATA_CODES = frozenset([url_helper.NOT_FOUND])
 
 
 class MetadataLeafDecoder(object):
@@ -123,7 +122,7 @@
         leaf_contents = {}
         for (field, resource) in leaves.items():
             leaf_url = url_helper.combine_url(base_url, resource)
-            leaf_blob = str(self._caller(leaf_url))
+            leaf_blob = self._caller(leaf_url).contents
             leaf_contents[field] = self._leaf_decoder(field, leaf_blob)
         joined = {}
         joined.update(child_contents)
@@ -160,7 +159,7 @@
                                          timeout=timeout,
                                          retries=retries,
                                          exception_cb=exception_cb)
-        user_data = str(response)
+        user_data = response.contents
     except url_helper.UrlError as e:
         if e.code not in SKIP_USERDATA_CODES:
             util.logexc(LOG, "Failed fetching userdata from url %s", ud_url)
@@ -183,7 +182,7 @@
 
     try:
         response = caller(md_url)
-        materializer = MetadataMaterializer(str(response),
+        materializer = MetadataMaterializer(response.contents,
                                             md_url, caller,
                                             leaf_decoder=leaf_decoder)
         md = materializer.materialize()

=== modified file 'cloudinit/handlers/__init__.py'
--- cloudinit/handlers/__init__.py	2014-01-16 21:57:21 +0000
+++ cloudinit/handlers/__init__.py	2015-01-21 23:02:49 +0000
@@ -147,7 +147,7 @@
     if not modfname.endswith(".py"):
         modfname = "%s.py" % (modfname)
     # TODO(harlowja): Check if path exists??
-    util.write_file(modfname, payload, 0600)
+    util.write_file(modfname, payload, 0o600)
     handlers = pdata['handlers']
     try:
         mod = fixup_handler(importer.import_module(modname))

=== modified file 'cloudinit/handlers/boot_hook.py'
--- cloudinit/handlers/boot_hook.py	2014-08-26 19:53:41 +0000
+++ cloudinit/handlers/boot_hook.py	2015-01-21 23:02:49 +0000
@@ -50,7 +50,7 @@
         filepath = os.path.join(self.boothook_dir, filename)
         contents = util.strip_prefix_suffix(util.dos2unix(payload),
                                             prefix=BOOTHOOK_PREFIX)
-        util.write_file(filepath, contents.lstrip(), 0700)
+        util.write_file(filepath, contents.lstrip(), 0o700)
         return filepath
 
     def handle_part(self, data, ctype, filename, payload, frequency):

=== modified file 'cloudinit/handlers/cloud_config.py'
--- cloudinit/handlers/cloud_config.py	2014-08-26 19:53:41 +0000
+++ cloudinit/handlers/cloud_config.py	2015-01-21 23:02:49 +0000
@@ -95,7 +95,7 @@
             lines.append(util.yaml_dumps(self.cloud_buf))
         else:
             lines = []
-        util.write_file(self.cloud_fn, "\n".join(lines), 0600)
+        util.write_file(self.cloud_fn, "\n".join(lines), 0o600)
 
     def _extract_mergers(self, payload, headers):
         merge_header_headers = ''

=== modified file 'cloudinit/handlers/shell_script.py'
--- cloudinit/handlers/shell_script.py	2014-08-26 19:53:41 +0000
+++ cloudinit/handlers/shell_script.py	2015-01-21 23:02:49 +0000
@@ -52,4 +52,4 @@
         filename = util.clean_filename(filename)
         payload = util.dos2unix(payload)
         path = os.path.join(self.script_dir, filename)
-        util.write_file(path, payload, 0700)
+        util.write_file(path, payload, 0o700)

=== modified file 'cloudinit/handlers/upstart_job.py'
--- cloudinit/handlers/upstart_job.py	2014-08-26 19:53:41 +0000
+++ cloudinit/handlers/upstart_job.py	2015-01-21 23:02:49 +0000
@@ -65,7 +65,7 @@
 
         payload = util.dos2unix(payload)
         path = os.path.join(self.upstart_dir, filename)
-        util.write_file(path, payload, 0644)
+        util.write_file(path, payload, 0o644)
 
         if SUITABLE_UPSTART:
             util.subp(["initctl", "reload-configuration"], capture=False)

=== modified file 'cloudinit/helpers.py'
--- cloudinit/helpers.py	2014-01-17 20:12:31 +0000
+++ cloudinit/helpers.py	2015-01-21 23:02:49 +0000
@@ -23,10 +23,11 @@
 from time import time
 
 import contextlib
-import io
 import os
 
-from ConfigParser import (NoSectionError, NoOptionError, RawConfigParser)
+import six
+from six.moves.configparser import (
+    NoSectionError, NoOptionError, RawConfigParser)
 
 from cloudinit.settings import (PER_INSTANCE, PER_ALWAYS, PER_ONCE,
                                 CFG_ENV_NAME)
@@ -318,10 +319,10 @@
         return self.registered[content_type]
 
     def items(self):
-        return self.registered.items()
+        return list(self.registered.items())
 
-    def iteritems(self):
-        return self.registered.iteritems()
+    # XXX This should really go away.
+    iteritems = items
 
 
 class Paths(object):
@@ -449,7 +450,7 @@
 
     def stringify(self, header=None):
         contents = ''
-        with io.BytesIO() as outputstream:
+        with six.StringIO() as outputstream:
             self.write(outputstream)
             outputstream.flush()
             contents = outputstream.getvalue()

=== modified file 'cloudinit/log.py'
--- cloudinit/log.py	2013-04-17 16:42:55 +0000
+++ cloudinit/log.py	2015-01-21 23:02:49 +0000
@@ -28,7 +28,8 @@
 import os
 import sys
 
-from StringIO import StringIO
+import six
+from six import StringIO
 
 # Logging levels for easy access
 CRITICAL = logging.CRITICAL
@@ -72,13 +73,13 @@
 
     log_cfgs = []
     log_cfg = cfg.get('logcfg')
-    if log_cfg and isinstance(log_cfg, (str, basestring)):
+    if log_cfg and isinstance(log_cfg, six.string_types):
         # If there is a 'logcfg' entry in the config,
         # respect it, it is the old keyname
         log_cfgs.append(str(log_cfg))
     elif "log_cfgs" in cfg:
         for a_cfg in cfg['log_cfgs']:
-            if isinstance(a_cfg, (basestring, str)):
+            if isinstance(a_cfg, six.string_types):
                 log_cfgs.append(a_cfg)
             elif isinstance(a_cfg, (collections.Iterable)):
                 cfg_str = [str(c) for c in a_cfg]

=== modified file 'cloudinit/mergers/__init__.py'
--- cloudinit/mergers/__init__.py	2014-09-02 20:31:18 +0000
+++ cloudinit/mergers/__init__.py	2015-01-21 23:02:49 +0000
@@ -18,6 +18,8 @@
 
 import re
 
+import six
+
 from cloudinit import importer
 from cloudinit import log as logging
 from cloudinit import type_utils
@@ -95,7 +97,7 @@
         raw_mergers = config.pop('merge_type', None)
     if raw_mergers is None:
         return parsed_mergers
-    if isinstance(raw_mergers, (str, basestring)):
+    if isinstance(raw_mergers, six.string_types):
         return string_extract_mergers(raw_mergers)
     for m in raw_mergers:
         if isinstance(m, (dict)):

=== modified file 'cloudinit/mergers/m_dict.py'
--- cloudinit/mergers/m_dict.py	2013-05-03 22:05:45 +0000
+++ cloudinit/mergers/m_dict.py	2015-01-21 23:02:49 +0000
@@ -16,6 +16,8 @@
 #    You should have received a copy of the GNU General Public License
 #    along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
+import six
+
 DEF_MERGE_TYPE = 'no_replace'
 MERGE_TYPES = ('replace', DEF_MERGE_TYPE,)
 
@@ -57,7 +59,7 @@
                 return new_v
             if isinstance(new_v, (list, tuple)) and self._recurse_array:
                 return self._merger.merge(old_v, new_v)
-            if isinstance(new_v, (basestring)) and self._recurse_str:
+            if isinstance(new_v, six.string_types) and self._recurse_str:
                 return self._merger.merge(old_v, new_v)
             if isinstance(new_v, (dict)) and self._recurse_dict:
                 return self._merger.merge(old_v, new_v)

=== modified file 'cloudinit/mergers/m_list.py'
--- cloudinit/mergers/m_list.py	2014-08-26 18:50:11 +0000
+++ cloudinit/mergers/m_list.py	2015-01-21 23:02:49 +0000
@@ -16,6 +16,8 @@
 #    You should have received a copy of the GNU General Public License
 #    along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
+import six
+
 DEF_MERGE_TYPE = 'replace'
 MERGE_TYPES = ('append', 'prepend', DEF_MERGE_TYPE, 'no_replace')
 
@@ -73,7 +75,7 @@
                 return old_v
             if isinstance(new_v, (list, tuple)) and self._recurse_array:
                 return self._merger.merge(old_v, new_v)
-            if isinstance(new_v, (str, basestring)) and self._recurse_str:
+            if isinstance(new_v, six.string_types) and self._recurse_str:
                 return self._merger.merge(old_v, new_v)
             if isinstance(new_v, (dict)) and self._recurse_dict:
                 return self._merger.merge(old_v, new_v)
@@ -82,6 +84,6 @@
         # Ok now we are replacing same indexes
         merged_list.extend(value)
         common_len = min(len(merged_list), len(merge_with))
-        for i in xrange(0, common_len):
+        for i in range(0, common_len):
             merged_list[i] = merge_same_index(merged_list[i], merge_with[i])
         return merged_list

=== modified file 'cloudinit/mergers/m_str.py'
--- cloudinit/mergers/m_str.py	2013-05-03 21:41:28 +0000
+++ cloudinit/mergers/m_str.py	2015-01-21 23:02:49 +0000
@@ -17,6 +17,8 @@
 # You should have received a copy of the GNU General Public License
 # along with this program. If not, see <http://www.gnu.org/licenses/>.
 
+import six
+
 
 class Merger(object):
     def __init__(self, _merger, opts):
@@ -34,11 +36,11 @@
     # perform the following action, if appending we will
     # merge them together, otherwise we will just return value.
     def _on_str(self, value, merge_with):
-        if not isinstance(value, (basestring)):
+        if not isinstance(value, six.string_types):
             return merge_with
         if not self._append:
             return merge_with
-        if isinstance(value, unicode):
-            return value + unicode(merge_with)
+        if isinstance(value, six.text_type):
+            return value + six.text_type(merge_with)
         else:
-            return value + str(merge_with)
+            return value + six.binary_type(merge_with)

=== modified file 'cloudinit/netinfo.py'
--- cloudinit/netinfo.py	2014-12-01 05:35:38 +0000
+++ cloudinit/netinfo.py	2015-01-21 23:02:49 +0000
@@ -87,7 +87,7 @@
                     devs[curdev][target] = toks[i][len(field) + 1:]
 
     if empty != "":
-        for (_devname, dev) in devs.iteritems():
+        for (_devname, dev) in devs.items():
             for field in dev:
                 if dev[field] == "":
                     dev[field] = empty
@@ -181,7 +181,7 @@
     else:
         fields = ['Device', 'Up', 'Address', 'Mask', 'Scope', 'Hw-Address']
         tbl = PrettyTable(fields)
-        for (dev, d) in netdev.iteritems():
+        for (dev, d) in netdev.items():
             tbl.add_row([dev, d["up"], d["addr"], d["mask"], ".", d["hwaddr"]])
             if d.get('addr6'):
                 tbl.add_row([dev, d["up"],

=== modified file 'cloudinit/signal_handler.py'
--- cloudinit/signal_handler.py	2012-09-19 20:33:56 +0000
+++ cloudinit/signal_handler.py	2015-01-21 23:02:49 +0000
@@ -22,7 +22,7 @@
 import signal
 import sys
 
-from StringIO import StringIO
+from six import StringIO
 
 from cloudinit import log as logging
 from cloudinit import util

=== modified file 'cloudinit/sources/DataSourceConfigDrive.py'
--- cloudinit/sources/DataSourceConfigDrive.py	2015-01-06 17:02:38 +0000
+++ cloudinit/sources/DataSourceConfigDrive.py	2015-01-21 23:02:49 +0000
@@ -216,11 +216,11 @@
     files = data.get('files', {})
     if files:
         LOG.debug("Writing %s injected files", len(files))
-        for (filename, content) in files.iteritems():
+        for (filename, content) in files.items():
             if not filename.startswith(os.sep):
                 filename = os.sep + filename
             try:
-                util.write_file(filename, content, mode=0660)
+                util.write_file(filename, content, mode=0o660)
             except IOError:
                 util.logexc(LOG, "Failed writing file: %s", filename)
 

=== modified file 'cloudinit/sources/DataSourceDigitalOcean.py'
--- cloudinit/sources/DataSourceDigitalOcean.py	2015-01-06 17:02:38 +0000
+++ cloudinit/sources/DataSourceDigitalOcean.py	2015-01-21 23:02:49 +0000
@@ -18,7 +18,7 @@
 from cloudinit import util
 from cloudinit import sources
 from cloudinit import ec2_utils
-from types import StringType
+
 import functools
 
 
@@ -72,10 +72,11 @@
         return "\n".join(self.metadata['vendor-data'])
 
     def get_public_ssh_keys(self):
-        if type(self.metadata['public-keys']) is StringType:
-            return [self.metadata['public-keys']]
+        public_keys = self.metadata['public-keys']
+        if isinstance(public_keys, list):
+            return public_keys
         else:
-            return self.metadata['public-keys']
+            return [public_keys]
 
     @property
     def availability_zone(self):

=== modified file 'cloudinit/sources/DataSourceEc2.py'
--- cloudinit/sources/DataSourceEc2.py	2014-02-01 20:03:32 +0000
+++ cloudinit/sources/DataSourceEc2.py	2015-01-21 23:02:49 +0000
@@ -156,8 +156,8 @@
         # 'ephemeral0': '/dev/sdb',
         # 'root': '/dev/sda1'}
         found = None
-        bdm_items = self.metadata['block-device-mapping'].iteritems()
-        for (entname, device) in bdm_items:
+        bdm = self.metadata['block-device-mapping']
+        for (entname, device) in bdm.items():
             if entname == name:
                 found = device
                 break

=== modified file 'cloudinit/sources/DataSourceMAAS.py'
--- cloudinit/sources/DataSourceMAAS.py	2013-04-25 15:58:38 +0000
+++ cloudinit/sources/DataSourceMAAS.py	2015-01-21 23:02:49 +0000
@@ -262,7 +262,7 @@
 
     userdata = content.get('user-data', "")
     md = {}
-    for (key, val) in content.iteritems():
+    for (key, val) in content.items():
         if key == 'user-data':
             continue
         md[key] = val

=== modified file 'cloudinit/sources/DataSourceOVF.py'
--- cloudinit/sources/DataSourceOVF.py	2014-09-22 18:35:03 +0000
+++ cloudinit/sources/DataSourceOVF.py	2015-01-21 23:02:49 +0000
@@ -66,7 +66,7 @@
             np = {'iso': transport_iso9660,
                   'vmware-guestd': transport_vmware_guestd, }
             name = None
-            for (name, transfunc) in np.iteritems():
+            for (name, transfunc) in np.items():
                 (contents, _dev, _fname) = transfunc()
                 if contents:
                     break
@@ -138,7 +138,7 @@
     ud = ""
     cfg_props = ['password']
     md_props = ['seedfrom', 'local-hostname', 'public-keys', 'instance-id']
-    for (prop, val) in props.iteritems():
+    for (prop, val) in props.items():
         if prop == 'hostname':
             prop = "local-hostname"
         if prop in md_props:
@@ -183,7 +183,7 @@
 
     # Go through mounts to see if it was already mounted
     mounts = util.mounts()
-    for (dev, info) in mounts.iteritems():
+    for (dev, info) in mounts.items():
         fstype = info['fstype']
         if fstype != "iso9660" and require_iso:
             continue

=== modified file 'cloudinit/sources/DataSourceSmartOS.py'
--- cloudinit/sources/DataSourceSmartOS.py	2014-08-26 18:50:11 +0000
+++ cloudinit/sources/DataSourceSmartOS.py	2015-01-21 23:02:49 +0000
@@ -30,12 +30,12 @@
 #       Comments with "@datadictionary" are snippets of the definition
 
 import base64
+import os
+import serial
+
 from cloudinit import log as logging
 from cloudinit import sources
 from cloudinit import util
-import os
-import os.path
-import serial
 
 
 LOG = logging.getLogger(__name__)
@@ -201,7 +201,7 @@
         if b64_all is not None:
             self.b64_all = util.is_true(b64_all)
 
-        for ci_noun, attribute in SMARTOS_ATTRIB_MAP.iteritems():
+        for ci_noun, attribute in SMARTOS_ATTRIB_MAP.items():
             smartos_noun, strip = attribute
             md[ci_noun] = self.query(smartos_noun, strip=strip)
 
@@ -218,11 +218,12 @@
         user_script = os.path.join(data_d, 'user-script')
         u_script_l = "%s/user-script" % LEGACY_USER_D
         write_boot_content(md.get('user-script'), content_f=user_script,
-                           link=u_script_l, shebang=True, mode=0700)
+                           link=u_script_l, shebang=True, mode=0o700)
 
         operator_script = os.path.join(data_d, 'operator-script')
         write_boot_content(md.get('operator-script'),
-                           content_f=operator_script, shebang=False, mode=0700)
+                           content_f=operator_script, shebang=False,
+                           mode=0o700)
 
         # @datadictionary:  This key has no defined format, but its value
         # is written to the file /var/db/mdata-user-data on each boot prior
@@ -381,7 +382,7 @@
 
 
 def write_boot_content(content, content_f, link=None, shebang=False,
-                       mode=0400):
+                       mode=0o400):
     """
     Write the content to content_f. Under the following rules:
         1. If no content, remove the file

=== modified file 'cloudinit/sources/__init__.py'
--- cloudinit/sources/__init__.py	2014-09-03 18:46:20 +0000
+++ cloudinit/sources/__init__.py	2015-01-21 23:02:49 +0000
@@ -23,6 +23,8 @@
 import abc
 import os
 
+import six
+
 from cloudinit import importer
 from cloudinit import log as logging
 from cloudinit import type_utils
@@ -130,7 +132,7 @@
         # we want to return the correct value for what will actually
         # exist in this instance
         mappings = {"sd": ("vd", "xvd", "vtb")}
-        for (nfrom, tlist) in mappings.iteritems():
+        for (nfrom, tlist) in mappings.items():
             if not short_name.startswith(nfrom):
                 continue
             for nto in tlist:
@@ -218,18 +220,18 @@
     if not pubkey_data:
         return keys
 
-    if isinstance(pubkey_data, (basestring, str)):
+    if isinstance(pubkey_data, six.string_types):
         return str(pubkey_data).splitlines()
 
     if isinstance(pubkey_data, (list, set)):
         return list(pubkey_data)
 
     if isinstance(pubkey_data, (dict)):
-        for (_keyname, klist) in pubkey_data.iteritems():
+        for (_keyname, klist) in pubkey_data.items():
             # lp:506332 uec metadata service responds with
             # data that makes boto populate a string for 'klist' rather
             # than a list.
-            if isinstance(klist, (str, basestring)):
+            if isinstance(klist, six.string_types):
                 klist = [klist]
             if isinstance(klist, (list, set)):
                 for pkey in klist:

=== modified file 'cloudinit/sources/helpers/openstack.py'
--- cloudinit/sources/helpers/openstack.py	2014-09-11 14:41:10 +0000
+++ cloudinit/sources/helpers/openstack.py	2015-01-21 23:02:49 +0000
@@ -24,6 +24,8 @@
 import functools
 import os
 
+import six
+
 from cloudinit import ec2_utils
 from cloudinit import log as logging
 from cloudinit import sources
@@ -205,7 +207,7 @@
         """
 
         load_json_anytype = functools.partial(
-            util.load_json, root_types=(dict, basestring, list))
+            util.load_json, root_types=(dict, list) + six.string_types)
 
         def datafiles(version):
             files = {}
@@ -234,7 +236,7 @@
             'version': 2,
         }
         data = datafiles(self._find_working_version())
-        for (name, (path, required, translator)) in data.iteritems():
+        for (name, (path, required, translator)) in data.items():
             path = self._path_join(self.base_path, path)
             data = None
             found = False
@@ -364,7 +366,7 @@
             raise NonReadable("%s: no files found" % (self.base_path))
 
         md = {}
-        for (name, (key, translator, default)) in FILES_V1.iteritems():
+        for (name, (key, translator, default)) in FILES_V1.items():
             if name in found:
                 path = found[name]
                 try:
@@ -478,7 +480,7 @@
     """
     if not data:
         return None
-    if isinstance(data, (str, unicode, basestring)):
+    if isinstance(data, six.string_types):
         return data
     if isinstance(data, list):
         return copy.deepcopy(data)

=== modified file 'cloudinit/ssh_util.py'
--- cloudinit/ssh_util.py	2014-11-12 13:52:28 +0000
+++ cloudinit/ssh_util.py	2015-01-21 23:02:49 +0000
@@ -239,7 +239,7 @@
     # Make sure the users .ssh dir is setup accordingly
     (ssh_dir, pwent) = users_ssh_info(username)
     if not os.path.isdir(ssh_dir):
-        util.ensure_dir(ssh_dir, mode=0700)
+        util.ensure_dir(ssh_dir, mode=0o700)
         util.chownbyid(ssh_dir, pwent.pw_uid, pwent.pw_gid)
 
     # Turn the 'update' keys given into actual entries
@@ -252,8 +252,8 @@
     (auth_key_fn, auth_key_entries) = extract_authorized_keys(username)
     with util.SeLinuxGuard(ssh_dir, recursive=True):
         content = update_authorized_keys(auth_key_entries, key_entries)
-        util.ensure_dir(os.path.dirname(auth_key_fn), mode=0700)
-        util.write_file(auth_key_fn, content, mode=0600)
+        util.ensure_dir(os.path.dirname(auth_key_fn), mode=0o700)
+        util.write_file(auth_key_fn, content, mode=0o600)
         util.chownbyid(auth_key_fn, pwent.pw_uid, pwent.pw_gid)
 
 

=== modified file 'cloudinit/stages.py'
--- cloudinit/stages.py	2014-09-02 20:31:18 +0000
+++ cloudinit/stages.py	2015-01-21 23:02:49 +0000
@@ -20,12 +20,13 @@
 #    You should have received a copy of the GNU General Public License
 #    along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
-import cPickle as pickle
-
 import copy
 import os
 import sys
 
+import six
+from six.moves import cPickle as pickle
+
 from cloudinit.settings import (PER_INSTANCE, FREQUENCIES, CLOUD_CONFIG)
 
 from cloudinit import handlers
@@ -202,7 +203,7 @@
             util.logexc(LOG, "Failed pickling datasource %s", self.datasource)
             return False
         try:
-            util.write_file(pickled_fn, pk_contents, mode=0400)
+            util.write_file(pickled_fn, pk_contents, mode=0o400)
         except Exception:
             util.logexc(LOG, "Failed pickling datasource to %s", pickled_fn)
             return False
@@ -324,15 +325,15 @@
 
     def _store_userdata(self):
         raw_ud = "%s" % (self.datasource.get_userdata_raw())
-        util.write_file(self._get_ipath('userdata_raw'), raw_ud, 0600)
+        util.write_file(self._get_ipath('userdata_raw'), raw_ud, 0o600)
         processed_ud = "%s" % (self.datasource.get_userdata())
-        util.write_file(self._get_ipath('userdata'), processed_ud, 0600)
+        util.write_file(self._get_ipath('userdata'), processed_ud, 0o600)
 
     def _store_vendordata(self):
         raw_vd = "%s" % (self.datasource.get_vendordata_raw())
-        util.write_file(self._get_ipath('vendordata_raw'), raw_vd, 0600)
+        util.write_file(self._get_ipath('vendordata_raw'), raw_vd, 0o600)
         processed_vd = "%s" % (self.datasource.get_vendordata())
-        util.write_file(self._get_ipath('vendordata'), processed_vd, 0600)
+        util.write_file(self._get_ipath('vendordata'), processed_vd, 0o600)
 
     def _default_handlers(self, opts=None):
         if opts is None:
@@ -384,7 +385,7 @@
             if not path or not os.path.isdir(path):
                 return
             potential_handlers = util.find_modules(path)
-            for (fname, mod_name) in potential_handlers.iteritems():
+            for (fname, mod_name) in potential_handlers.items():
                 try:
                     mod_locs, looked_locs = importer.find_module(
                         mod_name, [''], ['list_types', 'handle_part'])
@@ -422,7 +423,7 @@
 
         def init_handlers():
             # Init the handlers first
-            for (_ctype, mod) in c_handlers.iteritems():
+            for (_ctype, mod) in c_handlers.items():
                 if mod in c_handlers.initialized:
                     # Avoid initing the same module twice (if said module
                     # is registered to more than one content-type).
@@ -449,7 +450,7 @@
 
         def finalize_handlers():
             # Give callbacks opportunity to finalize
-            for (_ctype, mod) in c_handlers.iteritems():
+            for (_ctype, mod) in c_handlers.items():
                 if mod not in c_handlers.initialized:
                     # Said module was never inited in the first place, so lets
                     # not attempt to finalize those that never got called.
@@ -574,7 +575,7 @@
         for item in cfg_mods:
             if not item:
                 continue
-            if isinstance(item, (str, basestring)):
+            if isinstance(item, six.string_types):
                 module_list.append({
                     'mod': item.strip(),
                 })

=== modified file 'cloudinit/type_utils.py'
--- cloudinit/type_utils.py	2014-08-26 19:53:41 +0000
+++ cloudinit/type_utils.py	2015-01-21 23:02:49 +0000
@@ -22,11 +22,31 @@
 
 import types
 
+import six
+
+
+if six.PY3:
+    _NAME_TYPES = (
+        types.ModuleType,
+        types.FunctionType,
+        types.LambdaType,
+        type,
+    )
+else:
+    _NAME_TYPES = (
+        types.TypeType,
+        types.ModuleType,
+        types.FunctionType,
+        types.LambdaType,
+        types.ClassType,
+    )
+
 
 def obj_name(obj):
-    if isinstance(obj, (types.TypeType,
-                        types.ModuleType,
-                        types.FunctionType,
-                        types.LambdaType)):
-        return str(obj.__name__)
-    return obj_name(obj.__class__)
+    if isinstance(obj, _NAME_TYPES):
+        return six.text_type(obj.__name__)
+    else:
+        if not hasattr(obj, '__class__'):
+            return repr(obj)
+        else:
+            return obj_name(obj.__class__)

=== modified file 'cloudinit/url_helper.py'
--- cloudinit/url_helper.py	2014-08-26 19:53:41 +0000
+++ cloudinit/url_helper.py	2015-01-21 23:02:49 +0000
@@ -20,21 +20,29 @@
 #    You should have received a copy of the GNU General Public License
 #    along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
-import httplib
 import time
-import urllib
+
+import six
 
 import requests
 from requests import exceptions
 
-from urlparse import (urlparse, urlunparse)
+from six.moves.urllib.parse import (
+    urlparse, urlunparse,
+    quote as urlquote)
 
 from cloudinit import log as logging
 from cloudinit import version
 
 LOG = logging.getLogger(__name__)
 
-NOT_FOUND = httplib.NOT_FOUND
+if six.PY2:
+    import httplib
+    NOT_FOUND = httplib.NOT_FOUND
+else:
+    import http.client
+    NOT_FOUND = http.client.NOT_FOUND
+
 
 # Check if requests has ssl support (added in requests >= 0.8.8)
 SSL_ENABLED = False
@@ -70,7 +78,7 @@
         path = url_parsed[2]
         if path and not path.endswith("/"):
             path += "/"
-        path += urllib.quote(str(add_on), safe="/:")
+        path += urlquote(str(add_on), safe="/:")
         url_parsed[2] = path
         return urlunparse(url_parsed)
 
@@ -111,7 +119,7 @@
 
     @property
     def contents(self):
-        return self._response.content
+        return self._response.text
 
     @property
     def url(self):
@@ -135,7 +143,7 @@
         return self._response.status_code
 
     def __str__(self):
-        return self.contents
+        return self._response.text
 
 
 class UrlError(IOError):

=== modified file 'cloudinit/user_data.py'
--- cloudinit/user_data.py	2014-01-24 20:29:09 +0000
+++ cloudinit/user_data.py	2015-01-21 23:02:49 +0000
@@ -29,6 +29,8 @@
 from email.mime.nonmultipart import MIMENonMultipart
 from email.mime.text import MIMEText
 
+import six
+
 from cloudinit import handlers
 from cloudinit import log as logging
 from cloudinit import util
@@ -235,7 +237,7 @@
                 resp = util.read_file_or_url(include_url,
                                              ssl_details=self.ssl_details)
                 if include_once_on and resp.ok():
-                    util.write_file(include_once_fn, str(resp), mode=0600)
+                    util.write_file(include_once_fn, str(resp), mode=0o600)
                 if resp.ok():
                     content = str(resp)
                 else:
@@ -256,7 +258,7 @@
             #    filename and type not be present
             # or
             #  scalar(payload)
-            if isinstance(ent, (str, basestring)):
+            if isinstance(ent, six.string_types):
                 ent = {'content': ent}
             if not isinstance(ent, (dict)):
                 # TODO(harlowja) raise?
@@ -337,7 +339,7 @@
     data = util.decomp_gzip(raw_data)
     if "mime-version:" in data[0:4096].lower():
         msg = email.message_from_string(data)
-        for (key, val) in headers.iteritems():
+        for (key, val) in headers.items():
             _replace_header(msg, key, val)
     else:
         mtype = headers.get(CONTENT_TYPE, NOT_MULTIPART_TYPE)

=== modified file 'cloudinit/util.py'
--- cloudinit/util.py	2015-01-06 17:02:38 +0000
+++ cloudinit/util.py	2015-01-21 23:02:49 +0000
@@ -20,8 +20,6 @@
 #    You should have received a copy of the GNU General Public License
 #    along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
-from StringIO import StringIO
-
 import contextlib
 import copy as obj_copy
 import ctypes
@@ -45,8 +43,10 @@
 import sys
 import tempfile
 import time
-import urlparse
-
+
+from six.moves.urllib import parse as urlparse
+
+import six
 import yaml
 
 from cloudinit import importer
@@ -69,8 +69,26 @@
 }
 FN_ALLOWED = ('_-.()' + string.digits + string.ascii_letters)
 
+TRUE_STRINGS = ('true', '1', 'on', 'yes')
+FALSE_STRINGS = ('off', '0', 'no', 'false')
+
+
 # Helper utils to see if running in a container
-CONTAINER_TESTS = ['running-in-container', 'lxc-is-container']
+CONTAINER_TESTS = ('running-in-container', 'lxc-is-container')
+
+
+def decode_binary(blob, encoding='utf-8'):
+    # Converts a binary type into a text type using given encoding.
+    if isinstance(blob, six.text_type):
+        return blob
+    return blob.decode(encoding)
+
+
+def encode_text(text, encoding='utf-8'):
+    # Converts a text string into a binary type using given encoding.
+    if isinstance(text, six.binary_type):
+        return text
+    return text.encode(encoding)
 
 
 class ProcessExecutionError(IOError):
@@ -95,7 +113,7 @@
         else:
             self.description = description
 
-        if not isinstance(exit_code, (long, int)):
+        if not isinstance(exit_code, six.integer_types):
             self.exit_code = '-'
         else:
             self.exit_code = exit_code
@@ -151,7 +169,8 @@
 
         path = os.path.realpath(self.path)
         # path should be a string, not unicode
-        path = str(path)
+        if six.PY2:
+            path = str(path)
         try:
             stats = os.lstat(path)
             self.selinux.matchpathcon(path, stats[stat.ST_MODE])
@@ -209,10 +228,10 @@
 def is_true(val, addons=None):
     if isinstance(val, (bool)):
         return val is True
-    check_set = ['true', '1', 'on', 'yes']
+    check_set = TRUE_STRINGS
     if addons:
-        check_set = check_set + addons
-    if str(val).lower().strip() in check_set:
+        check_set = list(check_set) + addons
+    if six.text_type(val).lower().strip() in check_set:
         return True
     return False
 
@@ -220,10 +239,10 @@
 def is_false(val, addons=None):
     if isinstance(val, (bool)):
         return val is False
-    check_set = ['off', '0', 'no', 'false']
+    check_set = FALSE_STRINGS
     if addons:
-        check_set = check_set + addons
-    if str(val).lower().strip() in check_set:
+        check_set = list(check_set) + addons
+    if six.text_type(val).lower().strip() in check_set:
         return True
     return False
 
@@ -273,7 +292,7 @@
 def uniq_merge(*lists):
     combined_list = []
     for a_list in lists:
-        if isinstance(a_list, (str, basestring)):
+        if isinstance(a_list, six.string_types):
             a_list = a_list.strip().split(",")
             # Kickout the empty ones
             a_list = [a for a in a_list if len(a)]
@@ -282,7 +301,7 @@
 
 
 def clean_filename(fn):
-    for (k, v) in FN_REPLACEMENTS.iteritems():
+    for (k, v) in FN_REPLACEMENTS.items():
         fn = fn.replace(k, v)
     removals = []
     for k in fn:
@@ -296,14 +315,14 @@
 
 def decomp_gzip(data, quiet=True):
     try:
-        buf = StringIO(str(data))
+        buf = six.BytesIO(encode_text(data))
         with contextlib.closing(gzip.GzipFile(None, "rb", 1, buf)) as gh:
-            return gh.read()
+            return decode_binary(gh.read())
     except Exception as e:
         if quiet:
             return data
         else:
-            raise DecompressionError(str(e))
+            raise DecompressionError(six.text_type(e))
 
 
 def extract_usergroup(ug_pair):
@@ -362,7 +381,7 @@
 
 
 def load_json(text, root_types=(dict,)):
-    decoded = json.loads(text)
+    decoded = json.loads(decode_binary(text))
     if not isinstance(decoded, tuple(root_types)):
         expected_types = ", ".join([str(t) for t in root_types])
         raise TypeError("(%s) root types expected, got %s instead"
@@ -394,7 +413,7 @@
     if key not in yobj:
         return default
     val = yobj[key]
-    if not isinstance(val, (str, basestring)):
+    if not isinstance(val, six.string_types):
         val = str(val)
     return val
 
@@ -433,7 +452,7 @@
     if isinstance(val, (list)):
         cval = [v for v in val]
         return cval
-    if not isinstance(val, (basestring)):
+    if not isinstance(val, six.string_types):
         val = str(val)
     return [val]
 
@@ -708,10 +727,10 @@
 
 def load_yaml(blob, default=None, allowed=(dict,)):
     loaded = default
+    blob = decode_binary(blob)
     try:
-        blob = str(blob)
-        LOG.debug(("Attempting to load yaml from string "
-                 "of length %s with allowed root types %s"),
+        LOG.debug("Attempting to load yaml from string "
+                 "of length %s with allowed root types %s",
                  len(blob), allowed)
         converted = safeyaml.load(blob)
         if not isinstance(converted, allowed):
@@ -746,14 +765,12 @@
     md_resp = read_file_or_url(md_url, timeout, retries, file_retries)
     md = None
     if md_resp.ok():
-        md_str = str(md_resp)
-        md = load_yaml(md_str, default={})
+        md = load_yaml(md_resp.contents, default={})
 
     ud_resp = read_file_or_url(ud_url, timeout, retries, file_retries)
     ud = None
     if ud_resp.ok():
-        ud_str = str(ud_resp)
-        ud = ud_str
+        ud = ud_resp.contents
 
     return (md, ud)
 
@@ -784,7 +801,7 @@
     if "conf_d" in cfg:
         confd = cfg['conf_d']
         if confd:
-            if not isinstance(confd, (str, basestring)):
+            if not isinstance(confd, six.string_types):
                 raise TypeError(("Config file %s contains 'conf_d' "
                                  "with non-string type %s") %
                                  (cfgfile, type_utils.obj_name(confd)))
@@ -921,8 +938,8 @@
         return (None, None, None)
 
     resp = read_file_or_url(url)
-    if resp.contents.startswith(starts) and resp.ok():
-        return (key, url, str(resp))
+    if resp.ok() and resp.contents.startswith(starts):
+        return (key, url, resp.contents)
 
     return (key, url, None)
 
@@ -1076,9 +1093,9 @@
     return out_list
 
 
-def load_file(fname, read_cb=None, quiet=False):
+def load_file(fname, read_cb=None, quiet=False, decode=True):
     LOG.debug("Reading from %s (quiet=%s)", fname, quiet)
-    ofh = StringIO()
+    ofh = six.BytesIO()
     try:
         with open(fname, 'rb') as ifh:
             pipe_in_out(ifh, ofh, chunk_cb=read_cb)
@@ -1089,7 +1106,10 @@
             raise
     contents = ofh.getvalue()
     LOG.debug("Read %s bytes from %s", len(contents), fname)
-    return contents
+    if decode:
+        return decode_binary(contents)
+    else:
+        return contents
 
 
 def get_cmdline():
@@ -1219,7 +1239,7 @@
 
 def hash_blob(blob, routine, mlen=None):
     hasher = hashlib.new(routine)
-    hasher.update(blob)
+    hasher.update(encode_text(blob))
     digest = hasher.hexdigest()
     # Don't get to long now
     if mlen is not None:
@@ -1250,7 +1270,7 @@
     os.rename(src, dest)
 
 
-def ensure_dirs(dirlist, mode=0755):
+def ensure_dirs(dirlist, mode=0o755):
     for d in dirlist:
         ensure_dir(d, mode)
 
@@ -1264,7 +1284,7 @@
             return
         try:
             if key and content:
-                write_file(target_fn, content, mode=0600)
+                write_file(target_fn, content, mode=0o600)
                 LOG.debug(("Wrote to %s with contents of command line"
                           " url %s (len=%s)"), target_fn, url, len(content))
             elif key and not content:
@@ -1280,8 +1300,7 @@
                           indent=4,
                           explicit_start=explicit_start,
                           explicit_end=explicit_end,
-                          default_flow_style=False,
-                          allow_unicode=True)
+                          default_flow_style=False)
 
 
 def ensure_dir(path, mode=None):
@@ -1489,7 +1508,7 @@
     write_file(path, content, omode="ab", mode=None)
 
 
-def ensure_file(path, mode=0644):
+def ensure_file(path, mode=0o644):
     write_file(path, content='', omode="ab", mode=mode)
 
 
@@ -1507,7 +1526,7 @@
             os.chmod(path, real_mode)
 
 
-def write_file(filename, content, mode=0644, omode="wb"):
+def write_file(filename, content, mode=0o644, omode="wb"):
     """
     Writes a file with the given content and sets the file mode as specified.
     Resotres the SELinux context if possible.
@@ -1515,11 +1534,17 @@
     @param filename: The full path of the file to write.
     @param content: The content to write to the file.
     @param mode: The filesystem mode to set on the file.
-    @param omode: The open mode used when opening the file (r, rb, a, etc.)
+    @param omode: The open mode used when opening the file (w, wb, a, etc.)
     """
     ensure_dir(os.path.dirname(filename))
-    LOG.debug("Writing to %s - %s: [%s] %s bytes",
-               filename, omode, mode, len(content))
+    if 'b' in omode.lower():
+        content = encode_text(content)
+        write_type = 'bytes'
+    else:
+        content = decode_binary(content)
+        write_type = 'characters'
+    LOG.debug("Writing to %s - %s: [%s] %s %s",
+               filename, omode, mode, len(content), write_type)
     with SeLinuxGuard(path=filename):
         with open(filename, omode) as fh:
             fh.write(content)
@@ -1608,10 +1633,10 @@
         if isinstance(args, list):
             fixed = []
             for f in args:
-                fixed.append("'%s'" % (str(f).replace("'", escaped)))
+                fixed.append("'%s'" % (six.text_type(f).replace("'", escaped)))
             content = "%s%s\n" % (content, ' '.join(fixed))
             cmds_made += 1
-        elif isinstance(args, (str, basestring)):
+        elif isinstance(args, six.string_types):
             content = "%s%s\n" % (content, args)
             cmds_made += 1
         else:
@@ -1722,7 +1747,7 @@
 
     pkglist = []
     for pkg in pkgs:
-        if isinstance(pkg, basestring):
+        if isinstance(pkg, six.string_types):
             pkglist.append(pkg)
             continue
 

=== modified file 'packages/bddeb'
--- packages/bddeb	2014-07-24 12:49:42 +0000
+++ packages/bddeb	2015-01-21 23:02:49 +0000
@@ -38,6 +38,7 @@
     'pyserial': 'python-serial',
     'pyyaml': 'python-yaml',
     'requests': 'python-requests',
+    'six': 'python-six',
 }
 DEBUILD_ARGS = ["-S", "-d"]
 

=== modified file 'packages/brpm'
--- packages/brpm	2014-10-13 22:36:30 +0000
+++ packages/brpm	2015-01-21 23:02:49 +0000
@@ -45,6 +45,7 @@
         'pyserial': 'pyserial',
         'pyyaml': 'PyYAML',
         'requests': 'python-requests',
+        'six': 'python-six',
     },
     'suse': {
         'argparse': 'python-argparse',
@@ -56,6 +57,7 @@
         'pyserial': 'python-pyserial',
         'pyyaml': 'python-yaml',
         'requests': 'python-requests',
+        'six': 'python-six',
     }
 }
 

=== modified file 'requirements.txt'
--- requirements.txt	2014-03-05 23:05:59 +0000
+++ requirements.txt	2015-01-21 23:02:49 +0000
@@ -1,7 +1,6 @@
 # Pypi requirements for cloud-init to work
 
 # Used for untemplating any files or strings with parameters.
-cheetah
 jinja2
 
 # This is used for any pretty printing of tabular data.
@@ -32,3 +31,6 @@
 
 # For patching pieces of cloud-config together
 jsonpatch
+
+# For Python 2/3 compatibility
+six

=== modified file 'setup.py'
--- setup.py	2015-01-06 17:02:38 +0000
+++ setup.py	2015-01-21 23:02:49 +0000
@@ -45,7 +45,8 @@
         stdout = None
         stderr = None
     sp = subprocess.Popen(cmd, stdout=stdout,
-                    stderr=stderr, stdin=None)
+                    stderr=stderr, stdin=None,
+                    universal_newlines=True)
     (out, err) = sp.communicate()
     ret = sp.returncode
     if ret not in [0]:
@@ -144,9 +145,9 @@
             raise DistutilsArgError(
                 "Invalid --init-system: %s" % (','.join(bad)))
 
-        for sys in self.init_system:
+        for system in self.init_system:
             self.distribution.data_files.append(
-                (INITSYS_ROOTS[sys], INITSYS_FILES[sys]))
+                (INITSYS_ROOTS[system], INITSYS_FILES[system]))
         # Force that command to reinitalize (with new file list)
         self.distribution.reinitialize_command('install_data', True)
 
@@ -174,6 +175,11 @@
     }
 
 
+requirements = read_requires()
+if sys.version_info < (3,):
+    requirements.append('cheetah')
+
+
 setuptools.setup(name='cloud-init',
       version=get_version(),
       description='EC2 initialisation magic',
@@ -186,6 +192,6 @@
                ],
       license='GPLv3',
       data_files=data_files,
-      install_requires=read_requires(),
+      install_requires=requirements,
       cmdclass=cmdclass,
       )

=== modified file 'tests/unittests/test__init__.py'
--- tests/unittests/test__init__.py	2014-08-26 19:53:41 +0000
+++ tests/unittests/test__init__.py	2015-01-21 23:02:49 +0000
@@ -48,7 +48,7 @@
         # Mock the write_file function
         write_file_mock = self.mocker.replace(util.write_file,
                                               passthrough=False)
-        write_file_mock(expected_file_fullname, self.payload, 0600)
+        write_file_mock(expected_file_fullname, self.payload, 0o600)
 
     def test_no_errors(self):
         """Payload gets written to file and added to C{pdata}."""

=== modified file 'tests/unittests/test_data.py'
--- tests/unittests/test_data.py	2014-09-10 18:32:37 +0000
+++ tests/unittests/test_data.py	2015-01-21 23:02:49 +0000
@@ -1,11 +1,11 @@
 """Tests for handling of userdata within cloud init."""
 
-import StringIO
-
 import gzip
 import logging
 import os
 
+from six import BytesIO, StringIO
+
 from email.mime.application import MIMEApplication
 from email.mime.base import MIMEBase
 from email.mime.multipart import MIMEMultipart
@@ -53,7 +53,7 @@
         self.patchUtils(root)
 
     def capture_log(self, lvl=logging.DEBUG):
-        log_file = StringIO.StringIO()
+        log_file = StringIO()
         self._log_handler = logging.StreamHandler(log_file)
         self._log_handler.setLevel(lvl)
         self._log = log.getLogger()
@@ -337,7 +337,7 @@
 
         mock_write = self.mocker.replace("cloudinit.util.write_file",
                                          passthrough=False)
-        mock_write(ci.paths.get_ipath("cloud_config"), "", 0600)
+        mock_write(ci.paths.get_ipath("cloud_config"), "", 0o600)
         self.mocker.replay()
 
         log_file = self.capture_log(logging.WARNING)
@@ -351,9 +351,9 @@
         """Tests that individual message gzip encoding works."""
 
         def gzip_part(text):
-            contents = StringIO.StringIO()
-            f = gzip.GzipFile(fileobj=contents, mode='w')
-            f.write(str(text))
+            contents = BytesIO()
+            f = gzip.GzipFile(fileobj=contents, mode='wb')
+            f.write(util.encode_text(text))
             f.flush()
             f.close()
             return MIMEApplication(contents.getvalue(), 'gzip')
@@ -396,7 +396,7 @@
 
         mock_write = self.mocker.replace("cloudinit.util.write_file",
                                          passthrough=False)
-        mock_write(ci.paths.get_ipath("cloud_config"), "", 0600)
+        mock_write(ci.paths.get_ipath("cloud_config"), "", 0o600)
         self.mocker.replay()
 
         log_file = self.capture_log(logging.WARNING)
@@ -415,8 +415,8 @@
         outpath = os.path.join(ci.paths.get_ipath_cur("scripts"), "part-001")
         mock_write = self.mocker.replace("cloudinit.util.write_file",
                                          passthrough=False)
-        mock_write(ci.paths.get_ipath("cloud_config"), "", 0600)
-        mock_write(outpath, script, 0700)
+        mock_write(ci.paths.get_ipath("cloud_config"), "", 0o600)
+        mock_write(outpath, script, 0o700)
         self.mocker.replay()
 
         log_file = self.capture_log(logging.WARNING)
@@ -435,8 +435,8 @@
         outpath = os.path.join(ci.paths.get_ipath_cur("scripts"), "part-001")
         mock_write = self.mocker.replace("cloudinit.util.write_file",
                                          passthrough=False)
-        mock_write(ci.paths.get_ipath("cloud_config"), "", 0600)
-        mock_write(outpath, script, 0700)
+        mock_write(ci.paths.get_ipath("cloud_config"), "", 0o600)
+        mock_write(outpath, script, 0o700)
         self.mocker.replay()
 
         log_file = self.capture_log(logging.WARNING)
@@ -455,8 +455,8 @@
         outpath = os.path.join(ci.paths.get_ipath_cur("scripts"), "part-001")
         mock_write = self.mocker.replace("cloudinit.util.write_file",
                                          passthrough=False)
-        mock_write(outpath, script, 0700)
-        mock_write(ci.paths.get_ipath("cloud_config"), "", 0600)
+        mock_write(outpath, script, 0o700)
+        mock_write(ci.paths.get_ipath("cloud_config"), "", 0o600)
         self.mocker.replay()
 
         log_file = self.capture_log(logging.WARNING)

=== modified file 'tests/unittests/test_datasource/test_altcloud.py'
--- tests/unittests/test_datasource/test_altcloud.py	2014-02-27 19:12:01 +0000
+++ tests/unittests/test_datasource/test_altcloud.py	2015-01-21 23:02:49 +0000
@@ -45,7 +45,7 @@
     cifile = open(cloudinit.sources.DataSourceAltCloud.CLOUD_INFO_FILE, 'w')
     cifile.write(value)
     cifile.close()
-    os.chmod(cloudinit.sources.DataSourceAltCloud.CLOUD_INFO_FILE, 0664)
+    os.chmod(cloudinit.sources.DataSourceAltCloud.CLOUD_INFO_FILE, 0o664)
 
 
 def _remove_cloud_info_file():
@@ -66,12 +66,12 @@
     udfile = open(deltacloud_user_data_file, 'w')
     udfile.write(value)
     udfile.close()
-    os.chmod(deltacloud_user_data_file, 0664)
+    os.chmod(deltacloud_user_data_file, 0o664)
 
     udfile = open(user_data_file, 'w')
     udfile.write(value)
     udfile.close()
-    os.chmod(user_data_file, 0664)
+    os.chmod(user_data_file, 0o664)
 
 
 def _remove_user_data_files(mount_dir,

=== modified file 'tests/unittests/test_datasource/test_azure.py'
--- tests/unittests/test_datasource/test_azure.py	2014-08-26 18:50:11 +0000
+++ tests/unittests/test_datasource/test_azure.py	2015-01-21 23:02:49 +0000
@@ -153,7 +153,7 @@
         ret = dsrc.get_data()
         self.assertTrue(ret)
         self.assertTrue(os.path.isdir(self.waagent_d))
-        self.assertEqual(stat.S_IMODE(os.stat(self.waagent_d).st_mode), 0700)
+        self.assertEqual(stat.S_IMODE(os.stat(self.waagent_d).st_mode), 0o700)
 
     def test_user_cfg_set_agent_command_plain(self):
         # set dscfg in via plaintext

=== modified file 'tests/unittests/test_datasource/test_nocloud.py'
--- tests/unittests/test_datasource/test_nocloud.py	2014-09-10 18:32:37 +0000
+++ tests/unittests/test_datasource/test_nocloud.py	2015-01-21 23:02:49 +0000
@@ -85,7 +85,7 @@
 
         data = {
             'fs_label': None,
-            'meta-data': {'instance-id': 'IID'},
+            'meta-data': yaml.safe_dump({'instance-id': 'IID'}),
             'user-data': "USER_DATA_RAW",
         }
 

=== modified file 'tests/unittests/test_datasource/test_openstack.py'
--- tests/unittests/test_datasource/test_openstack.py	2014-10-20 18:29:54 +0000
+++ tests/unittests/test_datasource/test_openstack.py	2015-01-21 23:02:49 +0000
@@ -20,12 +20,11 @@
 import json
 import re
 
-from StringIO import StringIO
-
-from urlparse import urlparse
-
 from .. import helpers as test_helpers
 
+from six import StringIO
+from six.moves.urllib.parse import urlparse
+
 from cloudinit import helpers
 from cloudinit import settings
 from cloudinit.sources import DataSourceOpenStack as ds

=== modified file 'tests/unittests/test_distros/test_netconfig.py'
--- tests/unittests/test_distros/test_netconfig.py	2015-01-06 17:02:38 +0000
+++ tests/unittests/test_distros/test_netconfig.py	2015-01-21 23:02:49 +0000
@@ -4,6 +4,8 @@
 
 import os
 
+from six import StringIO
+
 from cloudinit import distros
 from cloudinit import helpers
 from cloudinit import settings
@@ -11,8 +13,6 @@
 
 from cloudinit.distros.parsers.sys_conf import SysConf
 
-from StringIO import StringIO
-
 
 BASE_NET_CFG = '''
 auto lo
@@ -96,7 +96,7 @@
 
         write_bufs = {}
 
-        def replace_write(filename, content, mode=0644, omode="wb"):
+        def replace_write(filename, content, mode=0o644, omode="wb"):
             buf = WriteBuffer()
             buf.mode = mode
             buf.omode = omode
@@ -112,7 +112,7 @@
         self.assertIn('/etc/network/interfaces', write_bufs)
         write_buf = write_bufs['/etc/network/interfaces']
         self.assertEquals(str(write_buf).strip(), BASE_NET_CFG.strip())
-        self.assertEquals(write_buf.mode, 0644)
+        self.assertEquals(write_buf.mode, 0o644)
 
     def assertCfgEquals(self, blob1, blob2):
         b1 = dict(SysConf(blob1.strip().splitlines()))
@@ -136,7 +136,7 @@
 
         write_bufs = {}
 
-        def replace_write(filename, content, mode=0644, omode="wb"):
+        def replace_write(filename, content, mode=0o644, omode="wb"):
             buf = WriteBuffer()
             buf.mode = mode
             buf.omode = omode
@@ -169,7 +169,7 @@
 ONBOOT=yes
 '''
         self.assertCfgEquals(expected_buf, str(write_buf))
-        self.assertEquals(write_buf.mode, 0644)
+        self.assertEquals(write_buf.mode, 0o644)
 
         self.assertIn('/etc/sysconfig/network-scripts/ifcfg-eth0', write_bufs)
         write_buf = write_bufs['/etc/sysconfig/network-scripts/ifcfg-eth0']
@@ -183,7 +183,7 @@
 BROADCAST="192.168.1.0"
 '''
         self.assertCfgEquals(expected_buf, str(write_buf))
-        self.assertEquals(write_buf.mode, 0644)
+        self.assertEquals(write_buf.mode, 0o644)
 
         self.assertIn('/etc/sysconfig/network-scripts/ifcfg-eth1', write_bufs)
         write_buf = write_bufs['/etc/sysconfig/network-scripts/ifcfg-eth1']
@@ -193,7 +193,7 @@
 ONBOOT=yes
 '''
         self.assertCfgEquals(expected_buf, str(write_buf))
-        self.assertEquals(write_buf.mode, 0644)
+        self.assertEquals(write_buf.mode, 0o644)
 
         self.assertIn('/etc/sysconfig/network', write_bufs)
         write_buf = write_bufs['/etc/sysconfig/network']
@@ -202,7 +202,7 @@
 NETWORKING=yes
 '''
         self.assertCfgEquals(expected_buf, str(write_buf))
-        self.assertEquals(write_buf.mode, 0644)
+        self.assertEquals(write_buf.mode, 0o644)
 
     def test_write_ipv6_rhel(self):
         rh_distro = self._get_distro('rhel')
@@ -215,7 +215,7 @@
 
         write_bufs = {}
 
-        def replace_write(filename, content, mode=0644, omode="wb"):
+        def replace_write(filename, content, mode=0o644, omode="wb"):
             buf = WriteBuffer()
             buf.mode = mode
             buf.omode = omode
@@ -248,7 +248,7 @@
 ONBOOT=yes
 '''
         self.assertCfgEquals(expected_buf, str(write_buf))
-        self.assertEquals(write_buf.mode, 0644)
+        self.assertEquals(write_buf.mode, 0o644)
 
         self.assertIn('/etc/sysconfig/network-scripts/ifcfg-eth0', write_bufs)
         write_buf = write_bufs['/etc/sysconfig/network-scripts/ifcfg-eth0']
@@ -265,7 +265,7 @@
 IPV6_DEFAULTGW="2607:f0d0:1002:0011::1"
 '''
         self.assertCfgEquals(expected_buf, str(write_buf))
-        self.assertEquals(write_buf.mode, 0644)
+        self.assertEquals(write_buf.mode, 0o644)
         self.assertIn('/etc/sysconfig/network-scripts/ifcfg-eth1', write_bufs)
         write_buf = write_bufs['/etc/sysconfig/network-scripts/ifcfg-eth1']
         expected_buf = '''
@@ -281,7 +281,7 @@
 IPV6_DEFAULTGW="2607:f0d0:1002:0011::1"
 '''
         self.assertCfgEquals(expected_buf, str(write_buf))
-        self.assertEquals(write_buf.mode, 0644)
+        self.assertEquals(write_buf.mode, 0o644)
 
         self.assertIn('/etc/sysconfig/network', write_bufs)
         write_buf = write_bufs['/etc/sysconfig/network']
@@ -292,7 +292,7 @@
 IPV6_AUTOCONF=no
 '''
         self.assertCfgEquals(expected_buf, str(write_buf))
-        self.assertEquals(write_buf.mode, 0644)
+        self.assertEquals(write_buf.mode, 0o644)
 
     def test_simple_write_freebsd(self):
         fbsd_distro = self._get_distro('freebsd')
@@ -319,7 +319,7 @@
             '/etc/resolv.conf': '',
         }
 
-        def replace_write(filename, content, mode=0644, omode="wb"):
+        def replace_write(filename, content, mode=0o644, omode="wb"):
             buf = WriteBuffer()
             buf.mode = mode
             buf.omode = omode
@@ -355,4 +355,4 @@
 defaultrouter="192.168.1.254"
 '''
         self.assertCfgEquals(expected_buf, str(write_buf))
-        self.assertEquals(write_buf.mode, 0644)
+        self.assertEquals(write_buf.mode, 0o644)

=== modified file 'tests/unittests/test_handler/test_handler_apt_configure.py'
--- tests/unittests/test_handler/test_handler_apt_configure.py	2013-08-15 17:21:40 +0000
+++ tests/unittests/test_handler/test_handler_apt_configure.py	2015-01-21 23:02:49 +0000
@@ -16,12 +16,12 @@
         self.cfile = os.path.join(self.tmp, "config.cfg")
 
     def _search_apt_config(self, contents, ptype, value):
-        print(
-            r"acquire::%s::proxy\s+[\"']%s[\"'];\n" % (ptype, value),
-            contents, "flags=re.IGNORECASE")
-        return(re.search(
-            r"acquire::%s::proxy\s+[\"']%s[\"'];\n" % (ptype, value),
-            contents, flags=re.IGNORECASE))
+        ## print(
+        ##     r"acquire::%s::proxy\s+[\"']%s[\"'];\n" % (ptype, value),
+        ##     contents, "flags=re.IGNORECASE")
+        return re.search(
+            r"acquire::%s::proxy\s+[\"']%s[\"'];\n" % (ptype, value),
+            contents, flags=re.IGNORECASE)
 
     def test_apt_proxy_written(self):
         cfg = {'apt_proxy': 'myproxy'}

=== modified file 'tests/unittests/test_handler/test_handler_ca_certs.py'
--- tests/unittests/test_handler/test_handler_ca_certs.py	2012-12-02 02:46:27 +0000
+++ tests/unittests/test_handler/test_handler_ca_certs.py	2015-01-21 23:02:49 +0000
@@ -150,7 +150,7 @@
         mock_load = self.mocker.replace(util.load_file, passthrough=False)
 
         mock_write("/usr/share/ca-certificates/cloud-init-ca-certs.crt",
-                   cert, mode=0644)
+                   cert, mode=0o644)
 
         mock_load("/etc/ca-certificates.conf")
         self.mocker.result(ca_certs_content)
@@ -171,7 +171,7 @@
         mock_load = self.mocker.replace(util.load_file, passthrough=False)
 
         mock_write("/usr/share/ca-certificates/cloud-init-ca-certs.crt",
-                   cert, mode=0644)
+                   cert, mode=0o644)
 
         mock_load("/etc/ca-certificates.conf")
         self.mocker.result(ca_certs_content)
@@ -192,7 +192,7 @@
         mock_load = self.mocker.replace(util.load_file, passthrough=False)
 
         mock_write("/usr/share/ca-certificates/cloud-init-ca-certs.crt",
-                   expected_cert_file, mode=0644)
+                   expected_cert_file, mode=0o644)
 
         ca_certs_content = "line1\nline2\nline3"
         mock_load("/etc/ca-certificates.conf")
@@ -233,7 +233,7 @@
 
         mock_delete_dir_contents("/usr/share/ca-certificates/")
         mock_delete_dir_contents("/etc/ssl/certs/")
-        mock_write("/etc/ca-certificates.conf", "", mode=0644)
+        mock_write("/etc/ca-certificates.conf", "", mode=0o644)
         mock_subp(('debconf-set-selections', '-'),
                   "ca-certificates ca-certificates/trust_new_crts select no")
         self.mocker.replay()

=== modified file 'tests/unittests/test_handler/test_handler_growpart.py'
--- tests/unittests/test_handler/test_handler_growpart.py	2014-08-26 19:53:41 +0000
+++ tests/unittests/test_handler/test_handler_growpart.py	2015-01-21 23:02:49 +0000
@@ -145,7 +145,7 @@
         # this patches out devent2dev, os.stat, and device_part_info
         # so in the end, doesn't test a lot
         devs = ["/dev/XXda1", "/dev/YYda2"]
-        devstat_ret = Bunch(st_mode=25008, st_ino=6078, st_dev=5L,
+        devstat_ret = Bunch(st_mode=25008, st_ino=6078, st_dev=5,
                             st_nlink=1, st_uid=0, st_gid=6, st_size=0,
                             st_atime=0, st_mtime=0, st_ctime=0)
         enoent = ["/dev/NOENT"]

=== modified file 'tests/unittests/test_handler/test_handler_locale.py'
--- tests/unittests/test_handler/test_handler_locale.py	2014-07-23 16:16:07 +0000
+++ tests/unittests/test_handler/test_handler_locale.py	2015-01-21 23:02:49 +0000
@@ -29,7 +29,7 @@
 
 from configobj import ConfigObj
 
-from StringIO import StringIO
+from six import BytesIO
 
 import logging
 
@@ -59,6 +59,6 @@
         cc = self._get_cloud('sles')
         cc_locale.handle('cc_locale', cfg, cc, LOG, [])
 
-        contents = util.load_file('/etc/sysconfig/language')
-        n_cfg = ConfigObj(StringIO(contents))
+        contents = util.load_file('/etc/sysconfig/language', decode=False)
+        n_cfg = ConfigObj(BytesIO(contents))
         self.assertEquals({'RC_LANG': cfg['locale']}, dict(n_cfg))

=== modified file 'tests/unittests/test_handler/test_handler_seed_random.py'
--- tests/unittests/test_handler/test_handler_seed_random.py	2014-07-24 13:06:16 +0000
+++ tests/unittests/test_handler/test_handler_seed_random.py	2015-01-21 23:02:49 +0000
@@ -22,7 +22,7 @@
 import gzip
 import tempfile
 
-from StringIO import StringIO
+from six import StringIO
 
 from cloudinit import cloud
 from cloudinit import distros

=== modified file 'tests/unittests/test_handler/test_handler_set_hostname.py'
--- tests/unittests/test_handler/test_handler_set_hostname.py	2014-10-17 19:32:41 +0000
+++ tests/unittests/test_handler/test_handler_set_hostname.py	2015-01-21 23:02:49 +0000
@@ -9,7 +9,7 @@
 
 import logging
 
-from StringIO import StringIO
+from six import BytesIO
 
 from configobj import ConfigObj
 
@@ -38,8 +38,8 @@
         cc_set_hostname.handle('cc_set_hostname',
                                cfg, cc, LOG, [])
         if not distro.uses_systemd():
-            contents = util.load_file("/etc/sysconfig/network")
-            n_cfg = ConfigObj(StringIO(contents))
+            contents = util.load_file("/etc/sysconfig/network", decode=False)
+            n_cfg = ConfigObj(BytesIO(contents))
             self.assertEquals({'HOSTNAME': 'blah.blah.blah.yahoo.com'},
                               dict(n_cfg))
 

=== modified file 'tests/unittests/test_handler/test_handler_timezone.py'
--- tests/unittests/test_handler/test_handler_timezone.py	2014-07-23 16:16:07 +0000
+++ tests/unittests/test_handler/test_handler_timezone.py	2015-01-21 23:02:49 +0000
@@ -29,7 +29,7 @@
 
 from configobj import ConfigObj
 
-from StringIO import StringIO
+from six import BytesIO
 
 import logging
 
@@ -67,8 +67,8 @@
 
         cc_timezone.handle('cc_timezone', cfg, cc, LOG, [])
 
-        contents = util.load_file('/etc/sysconfig/clock')
-        n_cfg = ConfigObj(StringIO(contents))
+        contents = util.load_file('/etc/sysconfig/clock', decode=False)
+        n_cfg = ConfigObj(BytesIO(contents))
         self.assertEquals({'TIMEZONE': cfg['timezone']}, dict(n_cfg))
 
         contents = util.load_file('/etc/localtime')

=== modified file 'tests/unittests/test_handler/test_handler_yum_add_repo.py'
--- tests/unittests/test_handler/test_handler_yum_add_repo.py	2015-01-06 17:02:38 +0000
+++ tests/unittests/test_handler/test_handler_yum_add_repo.py	2015-01-21 23:02:49 +0000
@@ -6,7 +6,7 @@
 
 import logging
 
-from StringIO import StringIO
+from six import BytesIO
 
 import configobj
 
@@ -52,8 +52,9 @@
         }
         self.patchUtils(self.tmp)
         cc_yum_add_repo.handle('yum_add_repo', cfg, None, LOG, [])
-        contents = util.load_file("/etc/yum.repos.d/epel_testing.repo")
-        contents = configobj.ConfigObj(StringIO(contents))
+        contents = util.load_file("/etc/yum.repos.d/epel_testing.repo",
+                                  decode=False)
+        contents = configobj.ConfigObj(BytesIO(contents))
         expected = {
             'epel_testing': {
                 'name': 'Extra Packages for Enterprise Linux 5 - Testing',

=== modified file 'tests/unittests/test_runs/test_simple_run.py'
--- tests/unittests/test_runs/test_simple_run.py	2014-07-23 16:25:35 +0000
+++ tests/unittests/test_runs/test_simple_run.py	2015-01-21 23:02:49 +0000
@@ -41,7 +41,7 @@
                 {
                     'path': '/etc/blah.ini',
                     'content': 'blah',
-                    'permissions': 0755,
+                    'permissions': 0o755,
                 },
             ],
             'cloud_init_modules': ['write-files'],

=== modified file 'tests/unittests/test_templating.py'
--- tests/unittests/test_templating.py	2015-01-06 17:02:38 +0000
+++ tests/unittests/test_templating.py	2015-01-21 23:02:49 +0000
@@ -16,6 +16,9 @@
 #    You should have received a copy of the GNU General Public License
 #    along with this program.  If not, see <http://www.gnu.org/licenses/>.
 
+import six
+import unittest
+
 from . import helpers as test_helpers
 import textwrap
 
@@ -38,6 +41,7 @@
         out_data = templater.basic_render(in_data, {'b': 2})
         self.assertEqual(expected_data.strip(), out_data)
 
+    @unittest.skipIf(six.PY3, 'Cheetah is not compatible with Python 3')
     def test_detection(self):
         blob = "## template:cheetah"
 

=== modified file 'tests/unittests/test_util.py'
--- tests/unittests/test_util.py	2014-08-26 19:53:41 +0000
+++ tests/unittests/test_util.py	2015-01-21 23:02:49 +0000
@@ -79,7 +79,7 @@
             create_contents = f.read()
             self.assertEqual(contents, create_contents)
         file_stat = os.stat(path)
-        self.assertEqual(0644, stat.S_IMODE(file_stat.st_mode))
+        self.assertEqual(0o644, stat.S_IMODE(file_stat.st_mode))
 
     def test_dir_is_created_if_required(self):
         """Verifiy that directories are created is required."""
@@ -97,12 +97,12 @@
         path = os.path.join(self.tmp, "NewFile.txt")
         contents = "Hey there"
 
-        util.write_file(path, contents, mode=0666)
+        util.write_file(path, contents, mode=0o666)
 
         self.assertTrue(os.path.exists(path))
         self.assertTrue(os.path.isfile(path))
         file_stat = os.stat(path)
-        self.assertEqual(0666, stat.S_IMODE(file_stat.st_mode))
+        self.assertEqual(0o666, stat.S_IMODE(file_stat.st_mode))
 
     def test_custom_omode(self):
         """Verify custom omode works properly."""

=== added file 'tox.ini'
--- tox.ini	1970-01-01 00:00:00 +0000
+++ tox.ini	2015-01-21 23:02:49 +0000
@@ -0,0 +1,13 @@
+[tox]
+envlist = py26,py27,py34
+recreate = True
+
+[testenv]
+commands = python -m nose tests
+deps =
+     httpretty>=0.7.1
+     mock
+     mocker
+     nose
+     pep8==1.5.7
+     pyflakes


Follow ups