nagios-charmers team mailing list archive
-
nagios-charmers team
-
Mailing list archive
-
Message #00498
[Merge] ~xavpaice/thruk-agent-charm:update_charmhelpers into thruk-agent-charm:master
Xav Paice has proposed merging ~xavpaice/thruk-agent-charm:update_charmhelpers into thruk-agent-charm:master.
Requested reviews:
Nagios Charm developers (nagios-charmers)
For more details, see:
https://code.launchpad.net/~xavpaice/thruk-agent-charm/+git/thruk-agent-charm/+merge/368892
--
Your team Nagios Charm developers is requested to review the proposed merge of ~xavpaice/thruk-agent-charm:update_charmhelpers into thruk-agent-charm:master.
diff --git a/bin/charm_helpers_sync.py b/bin/charm_helpers_sync.py
index f67fdb9..7c0c194 100644
--- a/bin/charm_helpers_sync.py
+++ b/bin/charm_helpers_sync.py
@@ -2,19 +2,17 @@
# Copyright 2014-2015 Canonical Limited.
#
-# This file is part of charm-helpers.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
#
-# charm-helpers is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3 as
-# published by the Free Software Foundation.
+# http://www.apache.org/licenses/LICENSE-2.0
#
-# charm-helpers is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with charm-helpers. If not, see <http://www.gnu.org/licenses/>.
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
# Authors:
# Adam Gandelman <adamg@xxxxxxxxxx>
@@ -31,7 +29,7 @@ from fnmatch import fnmatch
import six
-CHARM_HELPERS_BRANCH = 'lp:charm-helpers'
+CHARM_HELPERS_REPO = 'https://github.com/juju/charm-helpers'
def parse_config(conf_file):
@@ -41,10 +39,16 @@ def parse_config(conf_file):
return yaml.load(open(conf_file).read())
-def clone_helpers(work_dir, branch):
+def clone_helpers(work_dir, repo):
dest = os.path.join(work_dir, 'charm-helpers')
- logging.info('Checking out %s to %s.' % (branch, dest))
- cmd = ['bzr', 'checkout', '--lightweight', branch, dest]
+ logging.info('Cloning out %s to %s.' % (repo, dest))
+ branch = None
+ if '@' in repo:
+ repo, branch = repo.split('@', 1)
+ cmd = ['git', 'clone', '--depth=1']
+ if branch is not None:
+ cmd += ['--branch', branch]
+ cmd += [repo, dest]
subprocess.check_call(cmd)
return dest
@@ -176,6 +180,9 @@ def extract_options(inc, global_options=None):
def sync_helpers(include, src, dest, options=None):
+ if os.path.exists(dest):
+ logging.debug('Removing existing directory: %s' % dest)
+ shutil.rmtree(dest)
if not os.path.isdir(dest):
os.makedirs(dest)
@@ -193,14 +200,15 @@ def sync_helpers(include, src, dest, options=None):
inc, opts = extract_options(m, global_options)
sync(src, dest, '%s.%s' % (k, inc), opts)
+
if __name__ == '__main__':
parser = optparse.OptionParser()
parser.add_option('-c', '--config', action='store', dest='config',
default=None, help='helper config file')
parser.add_option('-D', '--debug', action='store_true', dest='debug',
default=False, help='debug')
- parser.add_option('-b', '--branch', action='store', dest='branch',
- help='charm-helpers bzr branch (overrides config)')
+ parser.add_option('-r', '--repository', action='store', dest='repo',
+ help='charm-helpers git repository (overrides config)')
parser.add_option('-d', '--destination', action='store', dest='dest_dir',
help='sync destination dir (overrides config)')
(opts, args) = parser.parse_args()
@@ -219,10 +227,10 @@ if __name__ == '__main__':
else:
config = {}
- if 'branch' not in config:
- config['branch'] = CHARM_HELPERS_BRANCH
- if opts.branch:
- config['branch'] = opts.branch
+ if 'repo' not in config:
+ config['repo'] = CHARM_HELPERS_REPO
+ if opts.repo:
+ config['repo'] = opts.repo
if opts.dest_dir:
config['destination'] = opts.dest_dir
@@ -242,7 +250,7 @@ if __name__ == '__main__':
sync_options = config['options']
tmpd = tempfile.mkdtemp()
try:
- checkout = clone_helpers(tmpd, config['branch'])
+ checkout = clone_helpers(tmpd, config['repo'])
sync_helpers(config['include'], checkout, config['destination'],
options=sync_options)
except Exception as e:
diff --git a/hooks/charmhelpers/__init__.py b/hooks/charmhelpers/__init__.py
index e7aa471..61ef907 100644
--- a/hooks/charmhelpers/__init__.py
+++ b/hooks/charmhelpers/__init__.py
@@ -23,22 +23,22 @@ import subprocess
import sys
try:
- import six # flake8: noqa
+ import six # NOQA:F401
except ImportError:
if sys.version_info.major == 2:
subprocess.check_call(['apt-get', 'install', '-y', 'python-six'])
else:
subprocess.check_call(['apt-get', 'install', '-y', 'python3-six'])
- import six # flake8: noqa
+ import six # NOQA:F401
try:
- import yaml # flake8: noqa
+ import yaml # NOQA:F401
except ImportError:
if sys.version_info.major == 2:
subprocess.check_call(['apt-get', 'install', '-y', 'python-yaml'])
else:
subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml'])
- import yaml # flake8: noqa
+ import yaml # NOQA:F401
# Holds a list of mapping of mangled function names that have been deprecated
diff --git a/hooks/charmhelpers/core/hookenv.py b/hooks/charmhelpers/core/hookenv.py
index e44e22b..4744eb4 100644
--- a/hooks/charmhelpers/core/hookenv.py
+++ b/hooks/charmhelpers/core/hookenv.py
@@ -22,10 +22,12 @@ from __future__ import print_function
import copy
from distutils.version import LooseVersion
from functools import wraps
+from collections import namedtuple
import glob
import os
import json
import yaml
+import re
import subprocess
import sys
import errno
@@ -38,12 +40,20 @@ if not six.PY3:
else:
from collections import UserDict
+
CRITICAL = "CRITICAL"
ERROR = "ERROR"
WARNING = "WARNING"
INFO = "INFO"
DEBUG = "DEBUG"
+TRACE = "TRACE"
MARKER = object()
+SH_MAX_ARG = 131071
+
+
+RANGE_WARNING = ('Passing NO_PROXY string that includes a cidr. '
+ 'This may not be compatible with software you are '
+ 'running in your shell.')
cache = {}
@@ -64,7 +74,7 @@ def cached(func):
@wraps(func)
def wrapper(*args, **kwargs):
global cache
- key = str((func, args, kwargs))
+ key = json.dumps((func, args, kwargs), sort_keys=True, default=str)
try:
return cache[key]
except KeyError:
@@ -94,7 +104,7 @@ def log(message, level=None):
command += ['-l', level]
if not isinstance(message, six.string_types):
message = repr(message)
- command += [message]
+ command += [message[:SH_MAX_ARG]]
# Missing juju-log should not cause failures in unit tests
# Send log output to stderr
try:
@@ -197,11 +207,58 @@ def remote_unit():
return os.environ.get('JUJU_REMOTE_UNIT', None)
-def service_name():
- """The name service group this unit belongs to"""
+def application_name():
+ """
+ The name of the deployed application this unit belongs to.
+ """
return local_unit().split('/')[0]
+def service_name():
+ """
+ .. deprecated:: 0.19.1
+ Alias for :func:`application_name`.
+ """
+ return application_name()
+
+
+def model_name():
+ """
+ Name of the model that this unit is deployed in.
+ """
+ return os.environ['JUJU_MODEL_NAME']
+
+
+def model_uuid():
+ """
+ UUID of the model that this unit is deployed in.
+ """
+ return os.environ['JUJU_MODEL_UUID']
+
+
+def principal_unit():
+ """Returns the principal unit of this unit, otherwise None"""
+ # Juju 2.2 and above provides JUJU_PRINCIPAL_UNIT
+ principal_unit = os.environ.get('JUJU_PRINCIPAL_UNIT', None)
+ # If it's empty, then this unit is the principal
+ if principal_unit == '':
+ return os.environ['JUJU_UNIT_NAME']
+ elif principal_unit is not None:
+ return principal_unit
+ # For Juju 2.1 and below, let's try work out the principle unit by
+ # the various charms' metadata.yaml.
+ for reltype in relation_types():
+ for rid in relation_ids(reltype):
+ for unit in related_units(rid):
+ md = _metadata_unit(unit)
+ if not md:
+ continue
+ subordinate = md.pop('subordinate', None)
+ if not subordinate:
+ return unit
+ return None
+
+
@cached
def remote_service_name(relid=None):
"""The remote service name for a given relation-id (or the current relation)"""
@@ -263,7 +320,7 @@ class Config(dict):
self.implicit_save = True
self._prev_dict = None
self.path = os.path.join(charm_dir(), Config.CONFIG_FILE_NAME)
- if os.path.exists(self.path):
+ if os.path.exists(self.path) and os.stat(self.path).st_size:
self.load_previous()
atexit(self._implicit_save)
@@ -283,7 +340,11 @@ class Config(dict):
"""
self.path = path or self.path
with open(self.path) as f:
- self._prev_dict = json.load(f)
+ try:
+ self._prev_dict = json.load(f)
+ except ValueError as e:
+ log('Unable to parse previous config data - {}'.format(str(e)),
+ level=ERROR)
for k, v in copy.deepcopy(self._prev_dict).items():
if k not in self:
self[k] = v
@@ -319,6 +380,7 @@ class Config(dict):
"""
with open(self.path, 'w') as f:
+ os.fchmod(f.fileno(), 0o600)
json.dump(self, f)
def _implicit_save(self):
@@ -326,22 +388,40 @@ class Config(dict):
self.save()
-@cached
+_cache_config = None
+
+
def config(scope=None):
- """Juju charm configuration"""
- config_cmd_line = ['config-get']
- if scope is not None:
- config_cmd_line.append(scope)
- else:
- config_cmd_line.append('--all')
- config_cmd_line.append('--format=json')
+ """
+ Get the juju charm configuration (scope==None) or individual key,
+ (scope=str). The returned value is a Python data structure loaded as
+ JSON from the Juju config command.
+
+ :param scope: If set, return the value for the specified key.
+ :type scope: Optional[str]
+ :returns: Either the whole config as a Config, or a key from it.
+ :rtype: Any
+ """
+ global _cache_config
+ config_cmd_line = ['config-get', '--all', '--format=json']
try:
- config_data = json.loads(
- subprocess.check_output(config_cmd_line).decode('UTF-8'))
+ # JSON Decode Exception for Python3.5+
+ exc_json = json.decoder.JSONDecodeError
+ except AttributeError:
+ # JSON Decode Exception for Python2.7 through Python3.4
+ exc_json = ValueError
+ try:
+ if _cache_config is None:
+ config_data = json.loads(
+ subprocess.check_output(config_cmd_line).decode('UTF-8'))
+ _cache_config = Config(config_data)
if scope is not None:
- return config_data
- return Config(config_data)
- except ValueError:
+ return _cache_config.get(scope)
+ return _cache_config
+ except (exc_json, UnicodeDecodeError) as e:
+ log('Unable to parse output from config-get: config_cmd_line="{}" '
+ 'message="{}"'
+ .format(config_cmd_line, str(e)), level=ERROR)
return None
@@ -435,6 +515,67 @@ def related_units(relid=None):
subprocess.check_output(units_cmd_line).decode('UTF-8')) or []
+def expected_peer_units():
+ """Get a generator for units we expect to join peer relation based on
+ goal-state.
+
+ The local unit is excluded from the result to make it easy to gauge
+ completion of all peers joining the relation with existing hook tools.
+
+ Example usage:
+ log('peer {} of {} joined peer relation'
+ .format(len(related_units()),
+ len(list(expected_peer_units()))))
+
+ This function will raise NotImplementedError if used with juju versions
+ without goal-state support.
+
+ :returns: iterator
+ :rtype: types.GeneratorType
+ :raises: NotImplementedError
+ """
+ if not has_juju_version("2.4.0"):
+ # goal-state first appeared in 2.4.0.
+ raise NotImplementedError("goal-state")
+ _goal_state = goal_state()
+ return (key for key in _goal_state['units']
+ if '/' in key and key != local_unit())
+
+
+def expected_related_units(reltype=None):
+ """Get a generator for units we expect to join relation based on
+ goal-state.
+
+ Note that you can not use this function for the peer relation, take a look
+ at expected_peer_units() for that.
+
+ This function will raise KeyError if you request information for a
+ relation type for which juju goal-state does not have information. It will
+ raise NotImplementedError if used with juju versions without goal-state
+ support.
+
+ Example usage:
+ log('participant {} of {} joined relation {}'
+ .format(len(related_units()),
+ len(list(expected_related_units())),
+ relation_type()))
+
+ :param reltype: Relation type to list data for, default is to list data for
+ the realtion type we are currently executing a hook for.
+ :type reltype: str
+ :returns: iterator
+ :rtype: types.GeneratorType
+ :raises: KeyError, NotImplementedError
+ """
+ if not has_juju_version("2.4.4"):
+ # goal-state existed in 2.4.0, but did not list individual units to
+ # join a relation in 2.4.1 through 2.4.3. (LP: #1794739)
+ raise NotImplementedError("goal-state relation unit count")
+ reltype = reltype or relation_type()
+ _goal_state = goal_state()
+ return (key for key in _goal_state['relations'][reltype] if '/' in key)
+
+
@cached
def relation_for_unit(unit=None, rid=None):
"""Get the json represenation of a unit's relation"""
@@ -478,6 +619,24 @@ def metadata():
return yaml.safe_load(md)
+def _metadata_unit(unit):
+ """Given the name of a unit (e.g. apache2/0), get the unit charm's
+ metadata.yaml. Very similar to metadata() but allows us to inspect
+ other units. Unit needs to be co-located, such as a subordinate or
+ principal/primary.
+
+ :returns: metadata.yaml as a python object.
+
+ """
+ basedir = os.sep.join(charm_dir().split(os.sep)[:-2])
+ unitdir = 'unit-{}'.format(unit.replace(os.sep, '-'))
+ joineddir = os.path.join(basedir, unitdir, 'charm', 'metadata.yaml')
+ if not os.path.exists(joineddir):
+ return None
+ with open(joineddir) as md:
+ return yaml.safe_load(md)
+
+
@cached
def relation_types():
"""Get a list of relation types supported by this charm"""
@@ -602,18 +761,31 @@ def is_relation_made(relation, keys='private-address'):
return False
+def _port_op(op_name, port, protocol="TCP"):
+ """Open or close a service network port"""
+ _args = [op_name]
+ icmp = protocol.upper() == "ICMP"
+ if icmp:
+ _args.append(protocol)
+ else:
+ _args.append('{}/{}'.format(port, protocol))
+ try:
+ subprocess.check_call(_args)
+ except subprocess.CalledProcessError:
+ # Older Juju pre 2.3 doesn't support ICMP
+ # so treat it as a no-op if it fails.
+ if not icmp:
+ raise
+
+
def open_port(port, protocol="TCP"):
"""Open a service network port"""
- _args = ['open-port']
- _args.append('{}/{}'.format(port, protocol))
- subprocess.check_call(_args)
+ _port_op('open-port', port, protocol)
def close_port(port, protocol="TCP"):
"""Close a service network port"""
- _args = ['close-port']
- _args.append('{}/{}'.format(port, protocol))
- subprocess.check_call(_args)
+ _port_op('close-port', port, protocol)
def open_ports(start, end, protocol="TCP"):
@@ -630,6 +802,17 @@ def close_ports(start, end, protocol="TCP"):
subprocess.check_call(_args)
+def opened_ports():
+ """Get the opened ports
+
+ *Note that this will only show ports opened in a previous hook*
+
+ :returns: Opened ports as a list of strings: ``['8080/tcp', '8081-8083/tcp']``
+ """
+ _args = ['opened-ports', '--format=json']
+ return json.loads(subprocess.check_output(_args).decode('UTF-8'))
+
+
@cached
def unit_get(attribute):
"""Get the unit ID for the remote unit"""
@@ -751,8 +934,15 @@ class Hooks(object):
return wrapper
+class NoNetworkBinding(Exception):
+ pass
+
+
def charm_dir():
"""Return the root directory of the current charm"""
+ d = os.environ.get('JUJU_CHARM_DIR')
+ if d is not None:
+ return d
return os.environ.get('CHARM_DIR')
@@ -874,6 +1064,14 @@ def application_version_set(version):
@translate_exc(from_exc=OSError, to_exc=NotImplementedError)
+@cached
+def goal_state():
+ """Juju goal state values"""
+ cmd = ['goal-state', '--format=json']
+ return json.loads(subprocess.check_output(cmd).decode('UTF-8'))
+
+
+@translate_exc(from_exc=OSError, to_exc=NotImplementedError)
def is_leader():
"""Does the current unit hold the juju leadership
@@ -967,7 +1165,6 @@ def juju_version():
universal_newlines=True).strip()
-@cached
def has_juju_version(minimum_version):
"""Return True if the Juju version is at least the provided version"""
return LooseVersion(juju_version()) >= LooseVersion(minimum_version)
@@ -1027,6 +1224,8 @@ def _run_atexit():
@translate_exc(from_exc=OSError, to_exc=NotImplementedError)
def network_get_primary_address(binding):
'''
+ Deprecated since Juju 2.3; use network_get()
+
Retrieve the primary network address for a named binding
:param binding: string. The name of a relation of extra-binding
@@ -1034,7 +1233,41 @@ def network_get_primary_address(binding):
:raise: NotImplementedError if run on Juju < 2.0
'''
cmd = ['network-get', '--primary-address', binding]
- return subprocess.check_output(cmd).decode('UTF-8').strip()
+ try:
+ response = subprocess.check_output(
+ cmd,
+ stderr=subprocess.STDOUT).decode('UTF-8').strip()
+ except CalledProcessError as e:
+ if 'no network config found for binding' in e.output.decode('UTF-8'):
+ raise NoNetworkBinding("No network binding for {}"
+ .format(binding))
+ else:
+ raise
+ return response
+
+
+def network_get(endpoint, relation_id=None):
+ """
+ Retrieve the network details for a relation endpoint
+
+ :param endpoint: string. The name of a relation endpoint
+ :param relation_id: int. The ID of the relation for the current context.
+ :return: dict. The loaded YAML output of the network-get query.
+ :raise: NotImplementedError if request not supported by the Juju version.
+ """
+ if not has_juju_version('2.2'):
+ raise NotImplementedError(juju_version()) # earlier versions require --primary-address
+ if relation_id and not has_juju_version('2.3'):
+ raise NotImplementedError # 2.3 added the -r option
+
+ cmd = ['network-get', endpoint, '--format', 'yaml']
+ if relation_id:
+ cmd.append('-r')
+ cmd.append(relation_id)
+ response = subprocess.check_output(
+ cmd,
+ stderr=subprocess.STDOUT).decode('UTF-8').strip()
+ return yaml.safe_load(response)
def add_metric(*args, **kwargs):
@@ -1066,3 +1299,192 @@ def meter_info():
"""Get the meter status information, if running in the meter-status-changed
hook."""
return os.environ.get('JUJU_METER_INFO')
+
+
+def iter_units_for_relation_name(relation_name):
+ """Iterate through all units in a relation
+
+ Generator that iterates through all the units in a relation and yields
+ a named tuple with rid and unit field names.
+
+ Usage:
+ data = [(u.rid, u.unit)
+ for u in iter_units_for_relation_name(relation_name)]
+
+ :param relation_name: string relation name
+ :yield: Named Tuple with rid and unit field names
+ """
+ RelatedUnit = namedtuple('RelatedUnit', 'rid, unit')
+ for rid in relation_ids(relation_name):
+ for unit in related_units(rid):
+ yield RelatedUnit(rid, unit)
+
+
+def ingress_address(rid=None, unit=None):
+ """
+ Retrieve the ingress-address from a relation when available.
+ Otherwise, return the private-address.
+
+ When used on the consuming side of the relation (unit is a remote
+ unit), the ingress-address is the IP address that this unit needs
+ to use to reach the provided service on the remote unit.
+
+ When used on the providing side of the relation (unit == local_unit()),
+ the ingress-address is the IP address that is advertised to remote
+ units on this relation. Remote units need to use this address to
+ reach the local provided service on this unit.
+
+ Note that charms may document some other method to use in
+ preference to the ingress_address(), such as an address provided
+ on a different relation attribute or a service discovery mechanism.
+ This allows charms to redirect inbound connections to their peers
+ or different applications such as load balancers.
+
+ Usage:
+ addresses = [ingress_address(rid=u.rid, unit=u.unit)
+ for u in iter_units_for_relation_name(relation_name)]
+
+ :param rid: string relation id
+ :param unit: string unit name
+ :side effect: calls relation_get
+ :return: string IP address
+ """
+ settings = relation_get(rid=rid, unit=unit)
+ return (settings.get('ingress-address') or
+ settings.get('private-address'))
+
+
+def egress_subnets(rid=None, unit=None):
+ """
+ Retrieve the egress-subnets from a relation.
+
+ This function is to be used on the providing side of the
+ relation, and provides the ranges of addresses that client
+ connections may come from. The result is uninteresting on
+ the consuming side of a relation (unit == local_unit()).
+
+ Returns a stable list of subnets in CIDR format.
+ eg. ['192.168.1.0/24', '2001::F00F/128']
+
+ If egress-subnets is not available, falls back to using the published
+ ingress-address, or finally private-address.
+
+ :param rid: string relation id
+ :param unit: string unit name
+ :side effect: calls relation_get
+ :return: list of subnets in CIDR format. eg. ['192.168.1.0/24', '2001::F00F/128']
+ """
+ def _to_range(addr):
+ if re.search(r'^(?:\d{1,3}\.){3}\d{1,3}$', addr) is not None:
+ addr += '/32'
+ elif ':' in addr and '/' not in addr: # IPv6
+ addr += '/128'
+ return addr
+
+ settings = relation_get(rid=rid, unit=unit)
+ if 'egress-subnets' in settings:
+ return [n.strip() for n in settings['egress-subnets'].split(',') if n.strip()]
+ if 'ingress-address' in settings:
+ return [_to_range(settings['ingress-address'])]
+ if 'private-address' in settings:
+ return [_to_range(settings['private-address'])]
+ return [] # Should never happen
+
+
+def unit_doomed(unit=None):
+ """Determines if the unit is being removed from the model
+
+ Requires Juju 2.4.1.
+
+ :param unit: string unit name, defaults to local_unit
+ :side effect: calls goal_state
+ :side effect: calls local_unit
+ :side effect: calls has_juju_version
+ :return: True if the unit is being removed, already gone, or never existed
+ """
+ if not has_juju_version("2.4.1"):
+ # We cannot risk blindly returning False for 'we don't know',
+ # because that could cause data loss; if call sites don't
+ # need an accurate answer, they likely don't need this helper
+ # at all.
+ # goal-state existed in 2.4.0, but did not handle removals
+ # correctly until 2.4.1.
+ raise NotImplementedError("is_doomed")
+ if unit is None:
+ unit = local_unit()
+ gs = goal_state()
+ units = gs.get('units', {})
+ if unit not in units:
+ return True
+ # I don't think 'dead' units ever show up in the goal-state, but
+ # check anyway in addition to 'dying'.
+ return units[unit]['status'] in ('dying', 'dead')
+
+
+def env_proxy_settings(selected_settings=None):
+ """Get proxy settings from process environment variables.
+
+ Get charm proxy settings from environment variables that correspond to
+ juju-http-proxy, juju-https-proxy and juju-no-proxy (available as of 2.4.2,
+ see lp:1782236) in a format suitable for passing to an application that
+ reacts to proxy settings passed as environment variables. Some applications
+ support lowercase or uppercase notation (e.g. curl), some support only
+ lowercase (e.g. wget), there are also subjectively rare cases of only
+ uppercase notation support. no_proxy CIDR and wildcard support also varies
+ between runtimes and applications as there is no enforced standard.
+
+ Some applications may connect to multiple destinations and expose config
+ options that would affect only proxy settings for a specific destination
+ these should be handled in charms in an application-specific manner.
+
+ :param selected_settings: format only a subset of possible settings
+ :type selected_settings: list
+ :rtype: Option(None, dict[str, str])
+ """
+ SUPPORTED_SETTINGS = {
+ 'http': 'HTTP_PROXY',
+ 'https': 'HTTPS_PROXY',
+ 'no_proxy': 'NO_PROXY',
+ 'ftp': 'FTP_PROXY'
+ }
+ if selected_settings is None:
+ selected_settings = SUPPORTED_SETTINGS
+
+ selected_vars = [v for k, v in SUPPORTED_SETTINGS.items()
+ if k in selected_settings]
+ proxy_settings = {}
+ for var in selected_vars:
+ var_val = os.getenv(var)
+ if var_val:
+ proxy_settings[var] = var_val
+ proxy_settings[var.lower()] = var_val
+ # Now handle juju-prefixed environment variables. The legacy vs new
+ # environment variable usage is mutually exclusive
+ charm_var_val = os.getenv('JUJU_CHARM_{}'.format(var))
+ if charm_var_val:
+ proxy_settings[var] = charm_var_val
+ proxy_settings[var.lower()] = charm_var_val
+ if 'no_proxy' in proxy_settings:
+ if _contains_range(proxy_settings['no_proxy']):
+ log(RANGE_WARNING, level=WARNING)
+ return proxy_settings if proxy_settings else None
+
+
+def _contains_range(addresses):
+ """Check for cidr or wildcard domain in a string.
+
+ Given a string comprising a comma seperated list of ip addresses
+ and domain names, determine whether the string contains IP ranges
+ or wildcard domains.
+
+ :param addresses: comma seperated list of domains and ip addresses.
+ :type addresses: str
+ """
+ return (
+ # Test for cidr (e.g. 10.20.20.0/24)
+ "/" in addresses or
+ # Test for wildcard domains (*.foo.com or .foo.com)
+ "*" in addresses or
+ addresses.startswith(".") or
+ ",." in addresses or
+ " ." in addresses)
diff --git a/hooks/charmhelpers/core/host.py b/hooks/charmhelpers/core/host.py
index b0043cb..32754ff 100644
--- a/hooks/charmhelpers/core/host.py
+++ b/hooks/charmhelpers/core/host.py
@@ -34,21 +34,23 @@ import six
from contextlib import contextmanager
from collections import OrderedDict
-from .hookenv import log
+from .hookenv import log, INFO, DEBUG, local_unit, charm_name
from .fstab import Fstab
from charmhelpers.osplatform import get_platform
__platform__ = get_platform()
if __platform__ == "ubuntu":
- from charmhelpers.core.host_factory.ubuntu import (
+ from charmhelpers.core.host_factory.ubuntu import ( # NOQA:F401
service_available,
add_new_group,
lsb_release,
cmp_pkgrevno,
CompareHostReleases,
+ get_distrib_codename,
+ arch
) # flake8: noqa -- ignore F401 for this import
elif __platform__ == "centos":
- from charmhelpers.core.host_factory.centos import (
+ from charmhelpers.core.host_factory.centos import ( # NOQA:F401
service_available,
add_new_group,
lsb_release,
@@ -58,6 +60,7 @@ elif __platform__ == "centos":
UPDATEDB_PATH = '/etc/updatedb.conf'
+
def service_start(service_name, **kwargs):
"""Start a system service.
@@ -287,8 +290,8 @@ def service_running(service_name, **kwargs):
for key, value in six.iteritems(kwargs):
parameter = '%s=%s' % (key, value)
cmd.append(parameter)
- output = subprocess.check_output(cmd,
- stderr=subprocess.STDOUT).decode('UTF-8')
+ output = subprocess.check_output(
+ cmd, stderr=subprocess.STDOUT).decode('UTF-8')
except subprocess.CalledProcessError:
return False
else:
@@ -441,6 +444,51 @@ def add_user_to_group(username, group):
subprocess.check_call(cmd)
+def chage(username, lastday=None, expiredate=None, inactive=None,
+ mindays=None, maxdays=None, root=None, warndays=None):
+ """Change user password expiry information
+
+ :param str username: User to update
+ :param str lastday: Set when password was changed in YYYY-MM-DD format
+ :param str expiredate: Set when user's account will no longer be
+ accessible in YYYY-MM-DD format.
+ -1 will remove an account expiration date.
+ :param str inactive: Set the number of days of inactivity after a password
+ has expired before the account is locked.
+ -1 will remove an account's inactivity.
+ :param str mindays: Set the minimum number of days between password
+ changes to MIN_DAYS.
+ 0 indicates the password can be changed anytime.
+ :param str maxdays: Set the maximum number of days during which a
+ password is valid.
+ -1 as MAX_DAYS will remove checking maxdays
+ :param str root: Apply changes in the CHROOT_DIR directory
+ :param str warndays: Set the number of days of warning before a password
+ change is required
+ :raises subprocess.CalledProcessError: if call to chage fails
+ """
+ cmd = ['chage']
+ if root:
+ cmd.extend(['--root', root])
+ if lastday:
+ cmd.extend(['--lastday', lastday])
+ if expiredate:
+ cmd.extend(['--expiredate', expiredate])
+ if inactive:
+ cmd.extend(['--inactive', inactive])
+ if mindays:
+ cmd.extend(['--mindays', mindays])
+ if maxdays:
+ cmd.extend(['--maxdays', maxdays])
+ if warndays:
+ cmd.extend(['--warndays', warndays])
+ cmd.append(username)
+ subprocess.check_call(cmd)
+
+
+remove_password_expiry = functools.partial(chage, expiredate='-1', inactive='-1', mindays='0', maxdays='-1')
+
+
def rsync(from_path, to_path, flags='-r', options=None, timeout=None):
"""Replicate the contents of a path"""
options = options or ['--delete', '--executability']
@@ -487,13 +535,45 @@ def mkdir(path, owner='root', group='root', perms=0o555, force=False):
def write_file(path, content, owner='root', group='root', perms=0o444):
"""Create or overwrite a file with the contents of a byte string."""
- log("Writing file {} {}:{} {:o}".format(path, owner, group, perms))
uid = pwd.getpwnam(owner).pw_uid
gid = grp.getgrnam(group).gr_gid
- with open(path, 'wb') as target:
- os.fchown(target.fileno(), uid, gid)
- os.fchmod(target.fileno(), perms)
- target.write(content)
+ # lets see if we can grab the file and compare the context, to avoid doing
+ # a write.
+ existing_content = None
+ existing_uid, existing_gid, existing_perms = None, None, None
+ try:
+ with open(path, 'rb') as target:
+ existing_content = target.read()
+ stat = os.stat(path)
+ existing_uid, existing_gid, existing_perms = (
+ stat.st_uid, stat.st_gid, stat.st_mode
+ )
+ except Exception:
+ pass
+ if content != existing_content:
+ log("Writing file {} {}:{} {:o}".format(path, owner, group, perms),
+ level=DEBUG)
+ with open(path, 'wb') as target:
+ os.fchown(target.fileno(), uid, gid)
+ os.fchmod(target.fileno(), perms)
+ if six.PY3 and isinstance(content, six.string_types):
+ content = content.encode('UTF-8')
+ target.write(content)
+ return
+ # the contents were the same, but we might still need to change the
+ # ownership or permissions.
+ if existing_uid != uid:
+ log("Changing uid on already existing content: {} -> {}"
+ .format(existing_uid, uid), level=DEBUG)
+ os.chown(path, uid, -1)
+ if existing_gid != gid:
+ log("Changing gid on already existing content: {} -> {}"
+ .format(existing_gid, gid), level=DEBUG)
+ os.chown(path, -1, gid)
+ if existing_perms != perms:
+ log("Changing permissions on existing content: {} -> {}"
+ .format(existing_perms, perms), level=DEBUG)
+ os.chmod(path, perms)
def fstab_remove(mp):
@@ -758,7 +838,7 @@ def list_nics(nic_type=None):
ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n')
ip_output = (line.strip() for line in ip_output if line)
- key = re.compile('^[0-9]+:\s+(.+):')
+ key = re.compile(r'^[0-9]+:\s+(.+):')
for line in ip_output:
matched = re.search(key, line)
if matched:
@@ -903,6 +983,20 @@ def is_container():
def add_to_updatedb_prunepath(path, updatedb_path=UPDATEDB_PATH):
+ """Adds the specified path to the mlocate's udpatedb.conf PRUNEPATH list.
+
+ This method has no effect if the path specified by updatedb_path does not
+ exist or is not a file.
+
+ @param path: string the path to add to the updatedb.conf PRUNEPATHS value
+ @param updatedb_path: the path the updatedb.conf file
+ """
+ if not os.path.exists(updatedb_path) or os.path.isdir(updatedb_path):
+ # If the updatedb.conf file doesn't exist then don't attempt to update
+ # the file as the package providing mlocate may not be installed on
+ # the local system
+ return
+
with open(updatedb_path, 'r+') as f_id:
updatedb_text = f_id.read()
output = updatedb(updatedb_text, path)
@@ -922,3 +1016,62 @@ def updatedb(updatedb_text, new_path):
lines[i] = 'PRUNEPATHS="{}"'.format(' '.join(paths))
output = "\n".join(lines)
return output
+
+
+def modulo_distribution(modulo=3, wait=30, non_zero_wait=False):
+ """ Modulo distribution
+
+ This helper uses the unit number, a modulo value and a constant wait time
+ to produce a calculated wait time distribution. This is useful in large
+ scale deployments to distribute load during an expensive operation such as
+ service restarts.
+
+ If you have 1000 nodes that need to restart 100 at a time 1 minute at a
+ time:
+
+ time.wait(modulo_distribution(modulo=100, wait=60))
+ restart()
+
+ If you need restarts to happen serially set modulo to the exact number of
+ nodes and set a high constant wait time:
+
+ time.wait(modulo_distribution(modulo=10, wait=120))
+ restart()
+
+ @param modulo: int The modulo number creates the group distribution
+ @param wait: int The constant time wait value
+ @param non_zero_wait: boolean Override unit % modulo == 0,
+ return modulo * wait. Used to avoid collisions with
+ leader nodes which are often given priority.
+ @return: int Calculated time to wait for unit operation
+ """
+ unit_number = int(local_unit().split('/')[1])
+ calculated_wait_time = (unit_number % modulo) * wait
+ if non_zero_wait and calculated_wait_time == 0:
+ return modulo * wait
+ else:
+ return calculated_wait_time
+
+
+def install_ca_cert(ca_cert, name=None):
+ """
+ Install the given cert as a trusted CA.
+
+ The ``name`` is the stem of the filename where the cert is written, and if
+ not provided, it will default to ``juju-{charm_name}``.
+
+ If the cert is empty or None, or is unchanged, nothing is done.
+ """
+ if not ca_cert:
+ return
+ if not isinstance(ca_cert, bytes):
+ ca_cert = ca_cert.encode('utf8')
+ if not name:
+ name = 'juju-{}'.format(charm_name())
+ cert_file = '/usr/local/share/ca-certificates/{}.crt'.format(name)
+ new_hash = hashlib.md5(ca_cert).hexdigest()
+ if file_hash(cert_file) == new_hash:
+ return
+ log("Installing new CA cert at: {}".format(cert_file), level=INFO)
+ write_file(cert_file, ca_cert)
+ subprocess.check_call(['update-ca-certificates', '--fresh'])
diff --git a/hooks/charmhelpers/core/host_factory/ubuntu.py b/hooks/charmhelpers/core/host_factory/ubuntu.py
index d8dc378..0ee2b66 100644
--- a/hooks/charmhelpers/core/host_factory/ubuntu.py
+++ b/hooks/charmhelpers/core/host_factory/ubuntu.py
@@ -1,5 +1,6 @@
import subprocess
+from charmhelpers.core.hookenv import cached
from charmhelpers.core.strutils import BasicStringComparator
@@ -20,6 +21,9 @@ UBUNTU_RELEASES = (
'yakkety',
'zesty',
'artful',
+ 'bionic',
+ 'cosmic',
+ 'disco',
)
@@ -70,6 +74,14 @@ def lsb_release():
return d
+def get_distrib_codename():
+ """Return the codename of the distribution
+ :returns: The codename
+ :rtype: str
+ """
+ return lsb_release()['DISTRIB_CODENAME'].lower()
+
+
def cmp_pkgrevno(package, revno, pkgcache=None):
"""Compare supplied revno with the revno of the installed package.
@@ -87,3 +99,16 @@ def cmp_pkgrevno(package, revno, pkgcache=None):
pkgcache = apt_cache()
pkg = pkgcache[package]
return apt_pkg.version_compare(pkg.current_ver.ver_str, revno)
+
+
+@cached
+def arch():
+ """Return the package architecture as a string.
+
+ :returns: the architecture
+ :rtype: str
+ :raises: subprocess.CalledProcessError if dpkg command fails
+ """
+ return subprocess.check_output(
+ ['dpkg', '--print-architecture']
+ ).rstrip().decode('UTF-8')
diff --git a/hooks/charmhelpers/core/kernel.py b/hooks/charmhelpers/core/kernel.py
index 2d40452..e01f4f8 100644
--- a/hooks/charmhelpers/core/kernel.py
+++ b/hooks/charmhelpers/core/kernel.py
@@ -26,12 +26,12 @@ from charmhelpers.core.hookenv import (
__platform__ = get_platform()
if __platform__ == "ubuntu":
- from charmhelpers.core.kernel_factory.ubuntu import (
+ from charmhelpers.core.kernel_factory.ubuntu import ( # NOQA:F401
persistent_modprobe,
update_initramfs,
) # flake8: noqa -- ignore F401 for this import
elif __platform__ == "centos":
- from charmhelpers.core.kernel_factory.centos import (
+ from charmhelpers.core.kernel_factory.centos import ( # NOQA:F401
persistent_modprobe,
update_initramfs,
) # flake8: noqa -- ignore F401 for this import
diff --git a/hooks/charmhelpers/core/services/base.py b/hooks/charmhelpers/core/services/base.py
index ca9dc99..179ad4f 100644
--- a/hooks/charmhelpers/core/services/base.py
+++ b/hooks/charmhelpers/core/services/base.py
@@ -307,23 +307,34 @@ class PortManagerCallback(ManagerCallback):
"""
def __call__(self, manager, service_name, event_name):
service = manager.get_service(service_name)
- new_ports = service.get('ports', [])
+ # turn this generator into a list,
+ # as we'll be going over it multiple times
+ new_ports = list(service.get('ports', []))
port_file = os.path.join(hookenv.charm_dir(), '.{}.ports'.format(service_name))
if os.path.exists(port_file):
with open(port_file) as fp:
old_ports = fp.read().split(',')
for old_port in old_ports:
- if bool(old_port):
- old_port = int(old_port)
- if old_port not in new_ports:
- hookenv.close_port(old_port)
+ if bool(old_port) and not self.ports_contains(old_port, new_ports):
+ hookenv.close_port(old_port)
with open(port_file, 'w') as fp:
fp.write(','.join(str(port) for port in new_ports))
for port in new_ports:
+ # A port is either a number or 'ICMP'
+ protocol = 'TCP'
+ if str(port).upper() == 'ICMP':
+ protocol = 'ICMP'
if event_name == 'start':
- hookenv.open_port(port)
+ hookenv.open_port(port, protocol)
elif event_name == 'stop':
- hookenv.close_port(port)
+ hookenv.close_port(port, protocol)
+
+ def ports_contains(self, port, ports):
+ if not bool(port):
+ return False
+ if str(port).upper() != 'ICMP':
+ port = int(port)
+ return port in ports
def service_stop(service_name):
diff --git a/hooks/charmhelpers/core/strutils.py b/hooks/charmhelpers/core/strutils.py
index 685dabd..e8df045 100644
--- a/hooks/charmhelpers/core/strutils.py
+++ b/hooks/charmhelpers/core/strutils.py
@@ -61,13 +61,19 @@ def bytes_from_string(value):
if isinstance(value, six.string_types):
value = six.text_type(value)
else:
- msg = "Unable to interpret non-string value '%s' as boolean" % (value)
+ msg = "Unable to interpret non-string value '%s' as bytes" % (value)
raise ValueError(msg)
matches = re.match("([0-9]+)([a-zA-Z]+)", value)
- if not matches:
- msg = "Unable to interpret string value '%s' as bytes" % (value)
- raise ValueError(msg)
- return int(matches.group(1)) * (1024 ** BYTE_POWER[matches.group(2)])
+ if matches:
+ size = int(matches.group(1)) * (1024 ** BYTE_POWER[matches.group(2)])
+ else:
+ # Assume that value passed in is bytes
+ try:
+ size = int(value)
+ except ValueError:
+ msg = "Unable to interpret string value '%s' as bytes" % (value)
+ raise ValueError(msg)
+ return size
class BasicStringComparator(object):
diff --git a/hooks/charmhelpers/core/sysctl.py b/hooks/charmhelpers/core/sysctl.py
index 6e413e3..f1f4a28 100644
--- a/hooks/charmhelpers/core/sysctl.py
+++ b/hooks/charmhelpers/core/sysctl.py
@@ -28,27 +28,38 @@ from charmhelpers.core.hookenv import (
__author__ = 'Jorge Niedbalski R. <jorge.niedbalski@xxxxxxxxxxxxx>'
-def create(sysctl_dict, sysctl_file):
+def create(sysctl_dict, sysctl_file, ignore=False):
"""Creates a sysctl.conf file from a YAML associative array
- :param sysctl_dict: a YAML-formatted string of sysctl options eg "{ 'kernel.max_pid': 1337 }"
+ :param sysctl_dict: a dict or YAML-formatted string of sysctl
+ options eg "{ 'kernel.max_pid': 1337 }"
:type sysctl_dict: str
:param sysctl_file: path to the sysctl file to be saved
:type sysctl_file: str or unicode
+ :param ignore: If True, ignore "unknown variable" errors.
+ :type ignore: bool
:returns: None
"""
- try:
- sysctl_dict_parsed = yaml.safe_load(sysctl_dict)
- except yaml.YAMLError:
- log("Error parsing YAML sysctl_dict: {}".format(sysctl_dict),
- level=ERROR)
- return
+ if type(sysctl_dict) is not dict:
+ try:
+ sysctl_dict_parsed = yaml.safe_load(sysctl_dict)
+ except yaml.YAMLError:
+ log("Error parsing YAML sysctl_dict: {}".format(sysctl_dict),
+ level=ERROR)
+ return
+ else:
+ sysctl_dict_parsed = sysctl_dict
with open(sysctl_file, "w") as fd:
for key, value in sysctl_dict_parsed.items():
fd.write("{}={}\n".format(key, value))
- log("Updating sysctl_file: %s values: %s" % (sysctl_file, sysctl_dict_parsed),
+ log("Updating sysctl_file: {} values: {}".format(sysctl_file,
+ sysctl_dict_parsed),
level=DEBUG)
- check_call(["sysctl", "-p", sysctl_file])
+ call = ["sysctl", "-p", sysctl_file]
+ if ignore:
+ call.append("-e")
+
+ check_call(call)
diff --git a/hooks/charmhelpers/core/templating.py b/hooks/charmhelpers/core/templating.py
index 7b801a3..9014015 100644
--- a/hooks/charmhelpers/core/templating.py
+++ b/hooks/charmhelpers/core/templating.py
@@ -20,7 +20,8 @@ from charmhelpers.core import hookenv
def render(source, target, context, owner='root', group='root',
- perms=0o444, templates_dir=None, encoding='UTF-8', template_loader=None):
+ perms=0o444, templates_dir=None, encoding='UTF-8',
+ template_loader=None, config_template=None):
"""
Render a template.
@@ -32,6 +33,9 @@ def render(source, target, context, owner='root', group='root',
The context should be a dict containing the values to be replaced in the
template.
+ config_template may be provided to render from a provided template instead
+ of loading from a file.
+
The `owner`, `group`, and `perms` options will be passed to `write_file`.
If omitted, `templates_dir` defaults to the `templates` folder in the charm.
@@ -65,14 +69,19 @@ def render(source, target, context, owner='root', group='root',
if templates_dir is None:
templates_dir = os.path.join(hookenv.charm_dir(), 'templates')
template_env = Environment(loader=FileSystemLoader(templates_dir))
- try:
- source = source
- template = template_env.get_template(source)
- except exceptions.TemplateNotFound as e:
- hookenv.log('Could not load template %s from %s.' %
- (source, templates_dir),
- level=hookenv.ERROR)
- raise e
+
+ # load from a string if provided explicitly
+ if config_template is not None:
+ template = template_env.from_string(config_template)
+ else:
+ try:
+ source = source
+ template = template_env.get_template(source)
+ except exceptions.TemplateNotFound as e:
+ hookenv.log('Could not load template %s from %s.' %
+ (source, templates_dir),
+ level=hookenv.ERROR)
+ raise e
content = template.render(context)
if target is not None:
target_dir = os.path.dirname(target)
diff --git a/hooks/charmhelpers/core/unitdata.py b/hooks/charmhelpers/core/unitdata.py
index 54ec969..ab55432 100644
--- a/hooks/charmhelpers/core/unitdata.py
+++ b/hooks/charmhelpers/core/unitdata.py
@@ -166,6 +166,10 @@ class Storage(object):
To support dicts, lists, integer, floats, and booleans values
are automatically json encoded/decoded.
+
+ Note: to facilitate unit testing, ':memory:' can be passed as the
+ path parameter which causes sqlite3 to only build the db in memory.
+ This should only be used for testing purposes.
"""
def __init__(self, path=None):
self.db_path = path
@@ -175,6 +179,9 @@ class Storage(object):
else:
self.db_path = os.path.join(
os.environ.get('CHARM_DIR', ''), '.unit-state.db')
+ if self.db_path != ':memory:':
+ with open(self.db_path, 'a') as f:
+ os.fchmod(f.fileno(), 0o600)
self.conn = sqlite3.connect('%s' % self.db_path)
self.cursor = self.conn.cursor()
self.revision = None
@@ -358,7 +365,7 @@ class Storage(object):
try:
yield self.revision
self.revision = None
- except:
+ except Exception:
self.flush(False)
self.revision = None
raise
diff --git a/hooks/charmhelpers/fetch/__init__.py b/hooks/charmhelpers/fetch/__init__.py
index 480a627..8572d34 100644
--- a/hooks/charmhelpers/fetch/__init__.py
+++ b/hooks/charmhelpers/fetch/__init__.py
@@ -84,6 +84,7 @@ module = "charmhelpers.fetch.%s" % __platform__
fetch = importlib.import_module(module)
filter_installed_packages = fetch.filter_installed_packages
+filter_missing_packages = fetch.filter_missing_packages
install = fetch.apt_install
upgrade = fetch.apt_upgrade
update = _fetch_update = fetch.apt_update
@@ -96,6 +97,7 @@ if __platform__ == "ubuntu":
apt_update = fetch.apt_update
apt_upgrade = fetch.apt_upgrade
apt_purge = fetch.apt_purge
+ apt_autoremove = fetch.apt_autoremove
apt_mark = fetch.apt_mark
apt_hold = fetch.apt_hold
apt_unhold = fetch.apt_unhold
diff --git a/hooks/charmhelpers/fetch/archiveurl.py b/hooks/charmhelpers/fetch/archiveurl.py
index dd24f9e..d25587a 100644
--- a/hooks/charmhelpers/fetch/archiveurl.py
+++ b/hooks/charmhelpers/fetch/archiveurl.py
@@ -89,7 +89,7 @@ class ArchiveUrlFetchHandler(BaseFetchHandler):
:param str source: URL pointing to an archive file.
:param str dest: Local path location to download archive file to.
"""
- # propogate all exceptions
+ # propagate all exceptions
# URLError, OSError, etc
proto, netloc, path, params, query, fragment = urlparse(source)
if proto in ('http', 'https'):
diff --git a/hooks/charmhelpers/fetch/bzrurl.py b/hooks/charmhelpers/fetch/bzrurl.py
index 07cd029..c4ab3ff 100644
--- a/hooks/charmhelpers/fetch/bzrurl.py
+++ b/hooks/charmhelpers/fetch/bzrurl.py
@@ -13,7 +13,7 @@
# limitations under the License.
import os
-from subprocess import check_call
+from subprocess import STDOUT, check_output
from charmhelpers.fetch import (
BaseFetchHandler,
UnhandledSource,
@@ -55,7 +55,7 @@ class BzrUrlFetchHandler(BaseFetchHandler):
cmd = ['bzr', 'branch']
cmd += cmd_opts
cmd += [source, dest]
- check_call(cmd)
+ check_output(cmd, stderr=STDOUT)
def install(self, source, dest=None, revno=None):
url_parts = self.parse_url(source)
diff --git a/hooks/charmhelpers/fetch/giturl.py b/hooks/charmhelpers/fetch/giturl.py
index 4cf21bc..070ca9b 100644
--- a/hooks/charmhelpers/fetch/giturl.py
+++ b/hooks/charmhelpers/fetch/giturl.py
@@ -13,7 +13,7 @@
# limitations under the License.
import os
-from subprocess import check_call, CalledProcessError
+from subprocess import check_output, CalledProcessError, STDOUT
from charmhelpers.fetch import (
BaseFetchHandler,
UnhandledSource,
@@ -50,7 +50,7 @@ class GitUrlFetchHandler(BaseFetchHandler):
cmd = ['git', 'clone', source, dest, '--branch', branch]
if depth:
cmd.extend(['--depth', depth])
- check_call(cmd)
+ check_output(cmd, stderr=STDOUT)
def install(self, source, branch="master", dest=None, depth=None):
url_parts = self.parse_url(source)
diff --git a/hooks/charmhelpers/fetch/python/__init__.py b/hooks/charmhelpers/fetch/python/__init__.py
new file mode 100644
index 0000000..bff99dc
--- /dev/null
+++ b/hooks/charmhelpers/fetch/python/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2014-2019 Canonical Limited.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/hooks/charmhelpers/fetch/python/debug.py b/hooks/charmhelpers/fetch/python/debug.py
new file mode 100644
index 0000000..757135e
--- /dev/null
+++ b/hooks/charmhelpers/fetch/python/debug.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+# coding: utf-8
+
+# Copyright 2014-2015 Canonical Limited.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+
+import atexit
+import sys
+
+from charmhelpers.fetch.python.rpdb import Rpdb
+from charmhelpers.core.hookenv import (
+ open_port,
+ close_port,
+ ERROR,
+ log
+)
+
+__author__ = "Jorge Niedbalski <jorge.niedbalski@xxxxxxxxxxxxx>"
+
+DEFAULT_ADDR = "0.0.0.0"
+DEFAULT_PORT = 4444
+
+
+def _error(message):
+ log(message, level=ERROR)
+
+
+def set_trace(addr=DEFAULT_ADDR, port=DEFAULT_PORT):
+ """
+ Set a trace point using the remote debugger
+ """
+ atexit.register(close_port, port)
+ try:
+ log("Starting a remote python debugger session on %s:%s" % (addr,
+ port))
+ open_port(port)
+ debugger = Rpdb(addr=addr, port=port)
+ debugger.set_trace(sys._getframe().f_back)
+ except Exception:
+ _error("Cannot start a remote debug session on %s:%s" % (addr,
+ port))
diff --git a/hooks/charmhelpers/fetch/python/packages.py b/hooks/charmhelpers/fetch/python/packages.py
new file mode 100644
index 0000000..6e95028
--- /dev/null
+++ b/hooks/charmhelpers/fetch/python/packages.py
@@ -0,0 +1,154 @@
+#!/usr/bin/env python
+# coding: utf-8
+
+# Copyright 2014-2015 Canonical Limited.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import six
+import subprocess
+import sys
+
+from charmhelpers.fetch import apt_install, apt_update
+from charmhelpers.core.hookenv import charm_dir, log
+
+__author__ = "Jorge Niedbalski <jorge.niedbalski@xxxxxxxxxxxxx>"
+
+
+def pip_execute(*args, **kwargs):
+ """Overriden pip_execute() to stop sys.path being changed.
+
+ The act of importing main from the pip module seems to cause add wheels
+ from the /usr/share/python-wheels which are installed by various tools.
+ This function ensures that sys.path remains the same after the call is
+ executed.
+ """
+ try:
+ _path = sys.path
+ try:
+ from pip import main as _pip_execute
+ except ImportError:
+ apt_update()
+ if six.PY2:
+ apt_install('python-pip')
+ else:
+ apt_install('python3-pip')
+ from pip import main as _pip_execute
+ _pip_execute(*args, **kwargs)
+ finally:
+ sys.path = _path
+
+
+def parse_options(given, available):
+ """Given a set of options, check if available"""
+ for key, value in sorted(given.items()):
+ if not value:
+ continue
+ if key in available:
+ yield "--{0}={1}".format(key, value)
+
+
+def pip_install_requirements(requirements, constraints=None, **options):
+ """Install a requirements file.
+
+ :param constraints: Path to pip constraints file.
+ http://pip.readthedocs.org/en/stable/user_guide/#constraints-files
+ """
+ command = ["install"]
+
+ available_options = ('proxy', 'src', 'log', )
+ for option in parse_options(options, available_options):
+ command.append(option)
+
+ command.append("-r {0}".format(requirements))
+ if constraints:
+ command.append("-c {0}".format(constraints))
+ log("Installing from file: {} with constraints {} "
+ "and options: {}".format(requirements, constraints, command))
+ else:
+ log("Installing from file: {} with options: {}".format(requirements,
+ command))
+ pip_execute(command)
+
+
+def pip_install(package, fatal=False, upgrade=False, venv=None,
+ constraints=None, **options):
+ """Install a python package"""
+ if venv:
+ venv_python = os.path.join(venv, 'bin/pip')
+ command = [venv_python, "install"]
+ else:
+ command = ["install"]
+
+ available_options = ('proxy', 'src', 'log', 'index-url', )
+ for option in parse_options(options, available_options):
+ command.append(option)
+
+ if upgrade:
+ command.append('--upgrade')
+
+ if constraints:
+ command.extend(['-c', constraints])
+
+ if isinstance(package, list):
+ command.extend(package)
+ else:
+ command.append(package)
+
+ log("Installing {} package with options: {}".format(package,
+ command))
+ if venv:
+ subprocess.check_call(command)
+ else:
+ pip_execute(command)
+
+
+def pip_uninstall(package, **options):
+ """Uninstall a python package"""
+ command = ["uninstall", "-q", "-y"]
+
+ available_options = ('proxy', 'log', )
+ for option in parse_options(options, available_options):
+ command.append(option)
+
+ if isinstance(package, list):
+ command.extend(package)
+ else:
+ command.append(package)
+
+ log("Uninstalling {} package with options: {}".format(package,
+ command))
+ pip_execute(command)
+
+
+def pip_list():
+ """Returns the list of current python installed packages
+ """
+ return pip_execute(["list"])
+
+
+def pip_create_virtualenv(path=None):
+ """Create an isolated Python environment."""
+ if six.PY2:
+ apt_install('python-virtualenv')
+ else:
+ apt_install('python3-virtualenv')
+
+ if path:
+ venv_path = path
+ else:
+ venv_path = os.path.join(charm_dir(), 'venv')
+
+ if not os.path.exists(venv_path):
+ subprocess.check_call(['virtualenv', venv_path])
diff --git a/hooks/charmhelpers/fetch/python/rpdb.py b/hooks/charmhelpers/fetch/python/rpdb.py
new file mode 100644
index 0000000..9b31610
--- /dev/null
+++ b/hooks/charmhelpers/fetch/python/rpdb.py
@@ -0,0 +1,56 @@
+# Copyright 2014-2015 Canonical Limited.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Remote Python Debugger (pdb wrapper)."""
+
+import pdb
+import socket
+import sys
+
+__author__ = "Bertrand Janin <b@xxxxxxxxx>"
+__version__ = "0.1.3"
+
+
+class Rpdb(pdb.Pdb):
+
+ def __init__(self, addr="127.0.0.1", port=4444):
+ """Initialize the socket and initialize pdb."""
+
+ # Backup stdin and stdout before replacing them by the socket handle
+ self.old_stdout = sys.stdout
+ self.old_stdin = sys.stdin
+
+ # Open a 'reusable' socket to let the webapp reload on the same port
+ self.skt = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True)
+ self.skt.bind((addr, port))
+ self.skt.listen(1)
+ (clientsocket, address) = self.skt.accept()
+ handle = clientsocket.makefile('rw')
+ pdb.Pdb.__init__(self, completekey='tab', stdin=handle, stdout=handle)
+ sys.stdout = sys.stdin = handle
+
+ def shutdown(self):
+ """Revert stdin and stdout, close the socket."""
+ sys.stdout = self.old_stdout
+ sys.stdin = self.old_stdin
+ self.skt.close()
+ self.set_continue()
+
+ def do_continue(self, arg):
+ """Stop all operation on ``continue``."""
+ self.shutdown()
+ return 1
+
+ do_EOF = do_quit = do_exit = do_c = do_cont = do_continue
diff --git a/hooks/charmhelpers/fetch/python/version.py b/hooks/charmhelpers/fetch/python/version.py
new file mode 100644
index 0000000..3eb4210
--- /dev/null
+++ b/hooks/charmhelpers/fetch/python/version.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+# coding: utf-8
+
+# Copyright 2014-2015 Canonical Limited.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+
+__author__ = "Jorge Niedbalski <jorge.niedbalski@xxxxxxxxxxxxx>"
+
+
+def current_version():
+ """Current system python version"""
+ return sys.version_info
+
+
+def current_version_string():
+ """Current system python version as string major.minor.micro"""
+ return "{0}.{1}.{2}".format(sys.version_info.major,
+ sys.version_info.minor,
+ sys.version_info.micro)
diff --git a/hooks/charmhelpers/fetch/snap.py b/hooks/charmhelpers/fetch/snap.py
index 23c707b..395836c 100644
--- a/hooks/charmhelpers/fetch/snap.py
+++ b/hooks/charmhelpers/fetch/snap.py
@@ -18,21 +18,33 @@ If writing reactive charms, use the snap layer:
https://lists.ubuntu.com/archives/snapcraft/2016-September/001114.html
"""
import subprocess
-from os import environ
+import os
from time import sleep
from charmhelpers.core.hookenv import log
__author__ = 'Joseph Borg <joseph.borg@xxxxxxxxxxxxx>'
-SNAP_NO_LOCK = 1 # The return code for "couldn't acquire lock" in Snap (hopefully this will be improved).
+# The return code for "couldn't acquire lock" in Snap
+# (hopefully this will be improved).
+SNAP_NO_LOCK = 1
SNAP_NO_LOCK_RETRY_DELAY = 10 # Wait X seconds between Snap lock checks.
SNAP_NO_LOCK_RETRY_COUNT = 30 # Retry to acquire the lock X times.
+SNAP_CHANNELS = [
+ 'edge',
+ 'beta',
+ 'candidate',
+ 'stable',
+]
class CouldNotAcquireLockException(Exception):
pass
+class InvalidSnapChannel(Exception):
+ pass
+
+
def _snap_exec(commands):
"""
Execute snap commands.
@@ -47,13 +59,17 @@ def _snap_exec(commands):
while return_code is None or return_code == SNAP_NO_LOCK:
try:
- return_code = subprocess.check_call(['snap'] + commands, env=environ)
+ return_code = subprocess.check_call(['snap'] + commands,
+ env=os.environ)
except subprocess.CalledProcessError as e:
retry_count += + 1
if retry_count > SNAP_NO_LOCK_RETRY_COUNT:
- raise CouldNotAcquireLockException('Could not aquire lock after %s attempts' % SNAP_NO_LOCK_RETRY_COUNT)
+ raise CouldNotAcquireLockException(
+ 'Could not aquire lock after {} attempts'
+ .format(SNAP_NO_LOCK_RETRY_COUNT))
return_code = e.returncode
- log('Snap failed to acquire lock, trying again in %s seconds.' % SNAP_NO_LOCK_RETRY_DELAY, level='WARN')
+ log('Snap failed to acquire lock, trying again in {} seconds.'
+ .format(SNAP_NO_LOCK_RETRY_DELAY, level='WARN'))
sleep(SNAP_NO_LOCK_RETRY_DELAY)
return return_code
@@ -120,3 +136,15 @@ def snap_refresh(packages, *flags):
log(message, level='INFO')
return _snap_exec(['refresh'] + flags + packages)
+
+
+def valid_snap_channel(channel):
+ """ Validate snap channel exists
+
+ :raises InvalidSnapChannel: When channel does not exist
+ :return: Boolean
+ """
+ if channel.lower() in SNAP_CHANNELS:
+ return True
+ else:
+ raise InvalidSnapChannel("Invalid Snap Channel: {}".format(channel))
diff --git a/hooks/charmhelpers/fetch/ubuntu.py b/hooks/charmhelpers/fetch/ubuntu.py
index 57b5fb6..24c76e3 100644
--- a/hooks/charmhelpers/fetch/ubuntu.py
+++ b/hooks/charmhelpers/fetch/ubuntu.py
@@ -19,14 +19,14 @@ import re
import six
import time
import subprocess
-from tempfile import NamedTemporaryFile
-from charmhelpers.core.host import (
- lsb_release
-)
+from charmhelpers.core.host import get_distrib_codename
+
from charmhelpers.core.hookenv import (
log,
DEBUG,
+ WARNING,
+ env_proxy_settings,
)
from charmhelpers.fetch import SourceConfigError, GPGKeyError
@@ -43,6 +43,7 @@ ARCH_TO_PROPOSED_POCKET = {
'x86_64': PROPOSED_POCKET,
'ppc64le': PROPOSED_PORTS_POCKET,
'aarch64': PROPOSED_PORTS_POCKET,
+ 's390x': PROPOSED_PORTS_POCKET,
}
CLOUD_ARCHIVE_URL = "http://ubuntu-cloud.archive.canonical.com/ubuntu"
CLOUD_ARCHIVE_KEY_ID = '5EDB1B62EC4926EA'
@@ -139,7 +140,7 @@ CLOUD_ARCHIVE_POCKETS = {
'xenial-updates/ocata': 'xenial-updates/ocata',
'ocata/proposed': 'xenial-proposed/ocata',
'xenial-ocata/proposed': 'xenial-proposed/ocata',
- 'xenial-ocata/newton': 'xenial-proposed/ocata',
+ 'xenial-proposed/ocata': 'xenial-proposed/ocata',
# Pike
'pike': 'xenial-updates/pike',
'xenial-pike': 'xenial-updates/pike',
@@ -147,7 +148,7 @@ CLOUD_ARCHIVE_POCKETS = {
'xenial-updates/pike': 'xenial-updates/pike',
'pike/proposed': 'xenial-proposed/pike',
'xenial-pike/proposed': 'xenial-proposed/pike',
- 'xenial-pike/newton': 'xenial-proposed/pike',
+ 'xenial-proposed/pike': 'xenial-proposed/pike',
# Queens
'queens': 'xenial-updates/queens',
'xenial-queens': 'xenial-updates/queens',
@@ -155,13 +156,37 @@ CLOUD_ARCHIVE_POCKETS = {
'xenial-updates/queens': 'xenial-updates/queens',
'queens/proposed': 'xenial-proposed/queens',
'xenial-queens/proposed': 'xenial-proposed/queens',
- 'xenial-queens/newton': 'xenial-proposed/queens',
+ 'xenial-proposed/queens': 'xenial-proposed/queens',
+ # Rocky
+ 'rocky': 'bionic-updates/rocky',
+ 'bionic-rocky': 'bionic-updates/rocky',
+ 'bionic-rocky/updates': 'bionic-updates/rocky',
+ 'bionic-updates/rocky': 'bionic-updates/rocky',
+ 'rocky/proposed': 'bionic-proposed/rocky',
+ 'bionic-rocky/proposed': 'bionic-proposed/rocky',
+ 'bionic-proposed/rocky': 'bionic-proposed/rocky',
+ # Stein
+ 'stein': 'bionic-updates/stein',
+ 'bionic-stein': 'bionic-updates/stein',
+ 'bionic-stein/updates': 'bionic-updates/stein',
+ 'bionic-updates/stein': 'bionic-updates/stein',
+ 'stein/proposed': 'bionic-proposed/stein',
+ 'bionic-stein/proposed': 'bionic-proposed/stein',
+ 'bionic-proposed/stein': 'bionic-proposed/stein',
+ # Train
+ 'train': 'bionic-updates/train',
+ 'bionic-train': 'bionic-updates/train',
+ 'bionic-train/updates': 'bionic-updates/train',
+ 'bionic-updates/train': 'bionic-updates/train',
+ 'train/proposed': 'bionic-proposed/train',
+ 'bionic-train/proposed': 'bionic-proposed/train',
+ 'bionic-proposed/train': 'bionic-proposed/train',
}
APT_NO_LOCK = 100 # The return code for "couldn't acquire lock" in APT.
CMD_RETRY_DELAY = 10 # Wait 10 seconds between command retries.
-CMD_RETRY_COUNT = 30 # Retry a failing fatal command X times.
+CMD_RETRY_COUNT = 3 # Retry a failing fatal command X times.
def filter_installed_packages(packages):
@@ -179,6 +204,18 @@ def filter_installed_packages(packages):
return _pkgs
+def filter_missing_packages(packages):
+ """Return a list of packages that are installed.
+
+ :param packages: list of packages to evaluate.
+ :returns list: Packages that are installed.
+ """
+ return list(
+ set(packages) -
+ set(filter_installed_packages(packages))
+ )
+
+
def apt_cache(in_memory=True, progress=None):
"""Build and return an apt cache."""
from apt import apt_pkg
@@ -238,6 +275,14 @@ def apt_purge(packages, fatal=False):
_run_apt_command(cmd, fatal)
+def apt_autoremove(purge=True, fatal=False):
+ """Purge one or more packages."""
+ cmd = ['apt-get', '--assume-yes', 'autoremove']
+ if purge:
+ cmd.append('--purge')
+ _run_apt_command(cmd, fatal)
+
+
def apt_mark(packages, mark, fatal=False):
"""Flag one or more packages using apt-mark."""
log("Marking {} as {}".format(packages, mark))
@@ -261,42 +306,156 @@ def apt_unhold(packages, fatal=False):
return apt_mark(packages, 'unhold', fatal=fatal)
-def import_key(keyid):
- """Import a key in either ASCII Armor or Radix64 format.
+def import_key(key):
+ """Import an ASCII Armor key.
- `keyid` is either the keyid to fetch from a PGP server, or
- the key in ASCII armor foramt.
+ A Radix64 format keyid is also supported for backwards
+ compatibility. In this case Ubuntu keyserver will be
+ queried for a key via HTTPS by its keyid. This method
+ is less preferrable because https proxy servers may
+ require traffic decryption which is equivalent to a
+ man-in-the-middle attack (a proxy server impersonates
+ keyserver TLS certificates and has to be explicitly
+ trusted by the system).
- :param keyid: String of key (or key id).
+ :param key: A GPG key in ASCII armor format,
+ including BEGIN and END markers or a keyid.
+ :type key: (bytes, str)
:raises: GPGKeyError if the key could not be imported
"""
- key = keyid.strip()
- if (key.startswith('-----BEGIN PGP PUBLIC KEY BLOCK-----') and
- key.endswith('-----END PGP PUBLIC KEY BLOCK-----')):
+ key = key.strip()
+ if '-' in key or '\n' in key:
+ # Send everything not obviously a keyid to GPG to import, as
+ # we trust its validation better than our own. eg. handling
+ # comments before the key.
log("PGP key found (looks like ASCII Armor format)", level=DEBUG)
- log("Importing ASCII Armor PGP key", level=DEBUG)
- with NamedTemporaryFile() as keyfile:
- with open(keyfile.name, 'w') as fd:
- fd.write(key)
- fd.write("\n")
- cmd = ['apt-key', 'add', keyfile.name]
- try:
- subprocess.check_call(cmd)
- except subprocess.CalledProcessError:
- error = "Error importing PGP key '{}'".format(key)
- log(error)
- raise GPGKeyError(error)
+ if ('-----BEGIN PGP PUBLIC KEY BLOCK-----' in key and
+ '-----END PGP PUBLIC KEY BLOCK-----' in key):
+ log("Writing provided PGP key in the binary format", level=DEBUG)
+ if six.PY3:
+ key_bytes = key.encode('utf-8')
+ else:
+ key_bytes = key
+ key_name = _get_keyid_by_gpg_key(key_bytes)
+ key_gpg = _dearmor_gpg_key(key_bytes)
+ _write_apt_gpg_keyfile(key_name=key_name, key_material=key_gpg)
+ else:
+ raise GPGKeyError("ASCII armor markers missing from GPG key")
else:
- log("PGP key found (looks like Radix64 format)", level=DEBUG)
- log("Importing PGP key from keyserver", level=DEBUG)
- cmd = ['apt-key', 'adv', '--keyserver',
- 'hkp://keyserver.ubuntu.com:80', '--recv-keys', key]
- try:
- subprocess.check_call(cmd)
- except subprocess.CalledProcessError:
- error = "Error importing PGP key '{}'".format(key)
- log(error)
- raise GPGKeyError(error)
+ log("PGP key found (looks like Radix64 format)", level=WARNING)
+ log("SECURELY importing PGP key from keyserver; "
+ "full key not provided.", level=WARNING)
+ # as of bionic add-apt-repository uses curl with an HTTPS keyserver URL
+ # to retrieve GPG keys. `apt-key adv` command is deprecated as is
+ # apt-key in general as noted in its manpage. See lp:1433761 for more
+ # history. Instead, /etc/apt/trusted.gpg.d is used directly to drop
+ # gpg
+ key_asc = _get_key_by_keyid(key)
+ # write the key in GPG format so that apt-key list shows it
+ key_gpg = _dearmor_gpg_key(key_asc)
+ _write_apt_gpg_keyfile(key_name=key, key_material=key_gpg)
+
+
+def _get_keyid_by_gpg_key(key_material):
+ """Get a GPG key fingerprint by GPG key material.
+ Gets a GPG key fingerprint (40-digit, 160-bit) by the ASCII armor-encoded
+ or binary GPG key material. Can be used, for example, to generate file
+ names for keys passed via charm options.
+
+ :param key_material: ASCII armor-encoded or binary GPG key material
+ :type key_material: bytes
+ :raises: GPGKeyError if invalid key material has been provided
+ :returns: A GPG key fingerprint
+ :rtype: str
+ """
+ # Use the same gpg command for both Xenial and Bionic
+ cmd = 'gpg --with-colons --with-fingerprint'
+ ps = subprocess.Popen(cmd.split(),
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ stdin=subprocess.PIPE)
+ out, err = ps.communicate(input=key_material)
+ if six.PY3:
+ out = out.decode('utf-8')
+ err = err.decode('utf-8')
+ if 'gpg: no valid OpenPGP data found.' in err:
+ raise GPGKeyError('Invalid GPG key material provided')
+ # from gnupg2 docs: fpr :: Fingerprint (fingerprint is in field 10)
+ return re.search(r"^fpr:{9}([0-9A-F]{40}):$", out, re.MULTILINE).group(1)
+
+
+def _get_key_by_keyid(keyid):
+ """Get a key via HTTPS from the Ubuntu keyserver.
+ Different key ID formats are supported by SKS keyservers (the longer ones
+ are more secure, see "dead beef attack" and https://evil32.com/). Since
+ HTTPS is used, if SSLBump-like HTTPS proxies are in place, they will
+ impersonate keyserver.ubuntu.com and generate a certificate with
+ keyserver.ubuntu.com in the CN field or in SubjAltName fields of a
+ certificate. If such proxy behavior is expected it is necessary to add the
+ CA certificate chain containing the intermediate CA of the SSLBump proxy to
+ every machine that this code runs on via ca-certs cloud-init directive (via
+ cloudinit-userdata model-config) or via other means (such as through a
+ custom charm option). Also note that DNS resolution for the hostname in a
+ URL is done at a proxy server - not at the client side.
+
+ 8-digit (32 bit) key ID
+ https://keyserver.ubuntu.com/pks/lookup?search=0x4652B4E6
+ 16-digit (64 bit) key ID
+ https://keyserver.ubuntu.com/pks/lookup?search=0x6E85A86E4652B4E6
+ 40-digit key ID:
+ https://keyserver.ubuntu.com/pks/lookup?search=0x35F77D63B5CEC106C577ED856E85A86E4652B4E6
+
+ :param keyid: An 8, 16 or 40 hex digit keyid to find a key for
+ :type keyid: (bytes, str)
+ :returns: A key material for the specified GPG key id
+ :rtype: (str, bytes)
+ :raises: subprocess.CalledProcessError
+ """
+ # options=mr - machine-readable output (disables html wrappers)
+ keyserver_url = ('https://keyserver.ubuntu.com'
+ '/pks/lookup?op=get&options=mr&exact=on&search=0x{}')
+ curl_cmd = ['curl', keyserver_url.format(keyid)]
+ # use proxy server settings in order to retrieve the key
+ return subprocess.check_output(curl_cmd,
+ env=env_proxy_settings(['https']))
+
+
+def _dearmor_gpg_key(key_asc):
+ """Converts a GPG key in the ASCII armor format to the binary format.
+
+ :param key_asc: A GPG key in ASCII armor format.
+ :type key_asc: (str, bytes)
+ :returns: A GPG key in binary format
+ :rtype: (str, bytes)
+ :raises: GPGKeyError
+ """
+ ps = subprocess.Popen(['gpg', '--dearmor'],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ stdin=subprocess.PIPE)
+ out, err = ps.communicate(input=key_asc)
+ # no need to decode output as it is binary (invalid utf-8), only error
+ if six.PY3:
+ err = err.decode('utf-8')
+ if 'gpg: no valid OpenPGP data found.' in err:
+ raise GPGKeyError('Invalid GPG key material. Check your network setup'
+ ' (MTU, routing, DNS) and/or proxy server settings'
+ ' as well as destination keyserver status.')
+ else:
+ return out
+
+
+def _write_apt_gpg_keyfile(key_name, key_material):
+ """Writes GPG key material into a file at a provided path.
+
+ :param key_name: A key name to use for a key file (could be a fingerprint)
+ :type key_name: str
+ :param key_material: A GPG key material (binary)
+ :type key_material: (str, bytes)
+ """
+ with open('/etc/apt/trusted.gpg.d/{}.gpg'.format(key_name),
+ 'wb') as keyf:
+ keyf.write(key_material)
def add_source(source, key=None, fail_invalid=False):
@@ -364,20 +523,23 @@ def add_source(source, key=None, fail_invalid=False):
(r"^cloud:(.*)-(.*)\/staging$", _add_cloud_staging),
(r"^cloud:(.*)-(.*)$", _add_cloud_distro_check),
(r"^cloud:(.*)$", _add_cloud_pocket),
+ (r"^snap:.*-(.*)-(.*)$", _add_cloud_distro_check),
])
if source is None:
source = ''
for r, fn in six.iteritems(_mapping):
m = re.match(r, source)
if m:
- # call the assoicated function with the captured groups
- # raises SourceConfigError on error.
- fn(*m.groups())
if key:
+ # Import key before adding the source which depends on it,
+ # as refreshing packages could fail otherwise.
try:
import_key(key)
except GPGKeyError as e:
raise SourceConfigError(str(e))
+ # call the associated function with the captured groups
+ # raises SourceConfigError on error.
+ fn(*m.groups())
break
else:
# nothing matched. log an error and maybe sys.exit
@@ -390,13 +552,13 @@ def add_source(source, key=None, fail_invalid=False):
def _add_proposed():
"""Add the PROPOSED_POCKET as /etc/apt/source.list.d/proposed.list
- Uses lsb_release()['DISTRIB_CODENAME'] to determine the correct staza for
+ Uses get_distrib_codename to determine the correct stanza for
the deb line.
For intel architecutres PROPOSED_POCKET is used for the release, but for
other architectures PROPOSED_PORTS_POCKET is used for the release.
"""
- release = lsb_release()['DISTRIB_CODENAME']
+ release = get_distrib_codename()
arch = platform.machine()
if arch not in six.iterkeys(ARCH_TO_PROPOSED_POCKET):
raise SourceConfigError("Arch {} not supported for (distro-)proposed"
@@ -409,8 +571,16 @@ def _add_apt_repository(spec):
"""Add the spec using add_apt_repository
:param spec: the parameter to pass to add_apt_repository
+ :type spec: str
"""
- _run_with_retries(['add-apt-repository', '--yes', spec])
+ if '{series}' in spec:
+ series = get_distrib_codename()
+ spec = spec.replace('{series}', series)
+ # software-properties package for bionic properly reacts to proxy settings
+ # passed as environment variables (See lp:1433761). This is not the case
+ # LTS and non-LTS releases below bionic.
+ _run_with_retries(['add-apt-repository', '--yes', spec],
+ cmd_env=env_proxy_settings(['https']))
def _add_cloud_pocket(pocket):
@@ -479,7 +649,7 @@ def _verify_is_ubuntu_rel(release, os_release):
:raises: SourceConfigError if the release is not the same as the ubuntu
release.
"""
- ubuntu_rel = lsb_release()['DISTRIB_CODENAME']
+ ubuntu_rel = get_distrib_codename()
if release != ubuntu_rel:
raise SourceConfigError(
'Invalid Cloud Archive release specified: {}-{} on this Ubuntu'
@@ -557,7 +727,7 @@ def get_upstream_version(package):
cache = apt_cache()
try:
pkg = cache[package]
- except:
+ except Exception:
# the package is unknown to the current apt cache.
return None
Follow ups