summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDavid Ames <david.ames@canonical.com>2016-09-30 08:56:44 -0700
committerDavid Ames <david.ames@canonical.com>2016-09-30 10:36:26 -0700
commit49b75b04bc0c58d0ce03a7f78f0e3660316a3452 (patch)
tree94875db73073333a2d1496a2b4971faff84362e2
parent5840e68a7414c2cb3c2ddf01ba663c54ada65c03 (diff)
Pre-release charm-helpers sync 16.10
Get each charm up to date with lp:charm-helpers for release testing. Change-Id: I220409cf255378b57016dd6856ef02a87a21f79f
Notes
Notes (review): Verified+1: Canonical CI <uosci-testing-bot@ubuntu.com> Code-Review+2: Ryan Beisner <ryan.beisner@canonical.com> Workflow+1: Ryan Beisner <ryan.beisner@canonical.com> Verified+2: Jenkins Submitted-by: Jenkins Submitted-at: Sat, 01 Oct 2016 01:44:15 +0000 Reviewed-on: https://review.openstack.org/380413 Project: openstack/charm-cinder-backup Branch: refs/heads/master
-rw-r--r--charm-helpers-hooks.yaml1
-rw-r--r--hooks/charmhelpers/contrib/network/ip.py2
-rw-r--r--hooks/charmhelpers/contrib/openstack/amulet/deployment.py72
-rw-r--r--hooks/charmhelpers/contrib/openstack/amulet/utils.py119
-rw-r--r--hooks/charmhelpers/contrib/openstack/context.py6
-rw-r--r--hooks/charmhelpers/contrib/openstack/ip.py9
-rw-r--r--hooks/charmhelpers/contrib/openstack/neutron.py6
-rw-r--r--hooks/charmhelpers/contrib/openstack/utils.py23
-rw-r--r--hooks/charmhelpers/contrib/storage/linux/ceph.py6
-rw-r--r--hooks/charmhelpers/core/hookenv.py14
-rw-r--r--hooks/charmhelpers/core/host.py87
-rw-r--r--hooks/charmhelpers/core/host_factory/__init__.py0
-rw-r--r--hooks/charmhelpers/core/host_factory/centos.py56
-rw-r--r--hooks/charmhelpers/core/host_factory/ubuntu.py56
-rw-r--r--hooks/charmhelpers/core/kernel.py36
-rw-r--r--hooks/charmhelpers/core/kernel_factory/__init__.py0
-rw-r--r--hooks/charmhelpers/core/kernel_factory/centos.py17
-rw-r--r--hooks/charmhelpers/core/kernel_factory/ubuntu.py13
-rw-r--r--hooks/charmhelpers/fetch/__init__.py324
-rw-r--r--hooks/charmhelpers/fetch/bzrurl.py7
-rw-r--r--hooks/charmhelpers/fetch/centos.py171
-rw-r--r--hooks/charmhelpers/fetch/giturl.py7
-rw-r--r--hooks/charmhelpers/fetch/ubuntu.py336
-rw-r--r--hooks/charmhelpers/osplatform.py19
-rw-r--r--hooks/charmhelpers/payload/execd.py5
-rw-r--r--tests/charmhelpers/contrib/openstack/amulet/deployment.py72
-rw-r--r--tests/charmhelpers/contrib/openstack/amulet/utils.py119
27 files changed, 1173 insertions, 410 deletions
diff --git a/charm-helpers-hooks.yaml b/charm-helpers-hooks.yaml
index bf9ff3b..d3d0b0d 100644
--- a/charm-helpers-hooks.yaml
+++ b/charm-helpers-hooks.yaml
@@ -3,6 +3,7 @@ destination: hooks/charmhelpers
3include: 3include:
4 - core 4 - core
5 - fetch 5 - fetch
6 - osplatform
6 - contrib.openstack|inc=* 7 - contrib.openstack|inc=*
7 - contrib.storage 8 - contrib.storage
8 - contrib.hahelpers 9 - contrib.hahelpers
diff --git a/hooks/charmhelpers/contrib/network/ip.py b/hooks/charmhelpers/contrib/network/ip.py
index d6dee17..2d2026e 100644
--- a/hooks/charmhelpers/contrib/network/ip.py
+++ b/hooks/charmhelpers/contrib/network/ip.py
@@ -406,7 +406,7 @@ def is_ip(address):
406 # Test to see if already an IPv4/IPv6 address 406 # Test to see if already an IPv4/IPv6 address
407 address = netaddr.IPAddress(address) 407 address = netaddr.IPAddress(address)
408 return True 408 return True
409 except netaddr.AddrFormatError: 409 except (netaddr.AddrFormatError, ValueError):
410 return False 410 return False
411 411
412 412
diff --git a/hooks/charmhelpers/contrib/openstack/amulet/deployment.py b/hooks/charmhelpers/contrib/openstack/amulet/deployment.py
index 6ce91db..6fe8cf8 100644
--- a/hooks/charmhelpers/contrib/openstack/amulet/deployment.py
+++ b/hooks/charmhelpers/contrib/openstack/amulet/deployment.py
@@ -98,8 +98,47 @@ class OpenStackAmuletDeployment(AmuletDeployment):
98 98
99 return other_services 99 return other_services
100 100
101 def _add_services(self, this_service, other_services): 101 def _add_services(self, this_service, other_services, use_source=None,
102 """Add services to the deployment and set openstack-origin/source.""" 102 no_origin=None):
103 """Add services to the deployment and optionally set
104 openstack-origin/source.
105
106 :param this_service dict: Service dictionary describing the service
107 whose amulet tests are being run
108 :param other_services dict: List of service dictionaries describing
109 the services needed to support the target
110 service
111 :param use_source list: List of services which use the 'source' config
112 option rather than 'openstack-origin'
113 :param no_origin list: List of services which do not support setting
114 the Cloud Archive.
115 Service Dict:
116 {
117 'name': str charm-name,
118 'units': int number of units,
119 'constraints': dict of juju constraints,
120 'location': str location of charm,
121 }
122 eg
123 this_service = {
124 'name': 'openvswitch-odl',
125 'constraints': {'mem': '8G'},
126 }
127 other_services = [
128 {
129 'name': 'nova-compute',
130 'units': 2,
131 'constraints': {'mem': '4G'},
132 'location': cs:~bob/xenial/nova-compute
133 },
134 {
135 'name': 'mysql',
136 'constraints': {'mem': '2G'},
137 },
138 {'neutron-api-odl'}]
139 use_source = ['mysql']
140 no_origin = ['neutron-api-odl']
141 """
103 self.log.info('OpenStackAmuletDeployment: adding services') 142 self.log.info('OpenStackAmuletDeployment: adding services')
104 143
105 other_services = self._determine_branch_locations(other_services) 144 other_services = self._determine_branch_locations(other_services)
@@ -110,16 +149,22 @@ class OpenStackAmuletDeployment(AmuletDeployment):
110 services = other_services 149 services = other_services
111 services.append(this_service) 150 services.append(this_service)
112 151
152 use_source = use_source or []
153 no_origin = no_origin or []
154
113 # Charms which should use the source config option 155 # Charms which should use the source config option
114 use_source = ['mysql', 'mongodb', 'rabbitmq-server', 'ceph', 156 use_source = list(set(
115 'ceph-osd', 'ceph-radosgw', 'ceph-mon', 'ceph-proxy'] 157 use_source + ['mysql', 'mongodb', 'rabbitmq-server', 'ceph',
158 'ceph-osd', 'ceph-radosgw', 'ceph-mon',
159 'ceph-proxy']))
116 160
117 # Charms which can not use openstack-origin, ie. many subordinates 161 # Charms which can not use openstack-origin, ie. many subordinates
118 no_origin = ['cinder-ceph', 'hacluster', 'neutron-openvswitch', 'nrpe', 162 no_origin = list(set(
119 'openvswitch-odl', 'neutron-api-odl', 'odl-controller', 163 no_origin + ['cinder-ceph', 'hacluster', 'neutron-openvswitch',
120 'cinder-backup', 'nexentaedge-data', 164 'nrpe', 'openvswitch-odl', 'neutron-api-odl',
121 'nexentaedge-iscsi-gw', 'nexentaedge-swift-gw', 165 'odl-controller', 'cinder-backup', 'nexentaedge-data',
122 'cinder-nexentaedge', 'nexentaedge-mgmt'] 166 'nexentaedge-iscsi-gw', 'nexentaedge-swift-gw',
167 'cinder-nexentaedge', 'nexentaedge-mgmt']))
123 168
124 if self.openstack: 169 if self.openstack:
125 for svc in services: 170 for svc in services:
@@ -220,7 +265,8 @@ class OpenStackAmuletDeployment(AmuletDeployment):
220 self.trusty_icehouse, self.trusty_juno, self.utopic_juno, 265 self.trusty_icehouse, self.trusty_juno, self.utopic_juno,
221 self.trusty_kilo, self.vivid_kilo, self.trusty_liberty, 266 self.trusty_kilo, self.vivid_kilo, self.trusty_liberty,
222 self.wily_liberty, self.trusty_mitaka, 267 self.wily_liberty, self.trusty_mitaka,
223 self.xenial_mitaka) = range(14) 268 self.xenial_mitaka, self.xenial_newton,
269 self.yakkety_newton) = range(16)
224 270
225 releases = { 271 releases = {
226 ('precise', None): self.precise_essex, 272 ('precise', None): self.precise_essex,
@@ -236,7 +282,10 @@ class OpenStackAmuletDeployment(AmuletDeployment):
236 ('utopic', None): self.utopic_juno, 282 ('utopic', None): self.utopic_juno,
237 ('vivid', None): self.vivid_kilo, 283 ('vivid', None): self.vivid_kilo,
238 ('wily', None): self.wily_liberty, 284 ('wily', None): self.wily_liberty,
239 ('xenial', None): self.xenial_mitaka} 285 ('xenial', None): self.xenial_mitaka,
286 ('xenial', 'cloud:xenial-newton'): self.xenial_newton,
287 ('yakkety', None): self.yakkety_newton,
288 }
240 return releases[(self.series, self.openstack)] 289 return releases[(self.series, self.openstack)]
241 290
242 def _get_openstack_release_string(self): 291 def _get_openstack_release_string(self):
@@ -254,6 +303,7 @@ class OpenStackAmuletDeployment(AmuletDeployment):
254 ('vivid', 'kilo'), 303 ('vivid', 'kilo'),
255 ('wily', 'liberty'), 304 ('wily', 'liberty'),
256 ('xenial', 'mitaka'), 305 ('xenial', 'mitaka'),
306 ('yakkety', 'newton'),
257 ]) 307 ])
258 if self.openstack: 308 if self.openstack:
259 os_origin = self.openstack.split(':')[1] 309 os_origin = self.openstack.split(':')[1]
diff --git a/hooks/charmhelpers/contrib/openstack/amulet/utils.py b/hooks/charmhelpers/contrib/openstack/amulet/utils.py
index 8040b57..24b353e 100644
--- a/hooks/charmhelpers/contrib/openstack/amulet/utils.py
+++ b/hooks/charmhelpers/contrib/openstack/amulet/utils.py
@@ -83,6 +83,56 @@ class OpenStackAmuletUtils(AmuletUtils):
83 if not found: 83 if not found:
84 return 'endpoint not found' 84 return 'endpoint not found'
85 85
86 def validate_v3_endpoint_data(self, endpoints, admin_port, internal_port,
87 public_port, expected):
88 """Validate keystone v3 endpoint data.
89
90 Validate the v3 endpoint data which has changed from v2. The
91 ports are used to find the matching endpoint.
92
93 The new v3 endpoint data looks like:
94
95 [<Endpoint enabled=True,
96 id=0432655fc2f74d1e9fa17bdaa6f6e60b,
97 interface=admin,
98 links={u'self': u'<RESTful URL of this endpoint>'},
99 region=RegionOne,
100 region_id=RegionOne,
101 service_id=17f842a0dc084b928e476fafe67e4095,
102 url=http://10.5.6.5:9312>,
103 <Endpoint enabled=True,
104 id=6536cb6cb92f4f41bf22b079935c7707,
105 interface=admin,
106 links={u'self': u'<RESTful url of this endpoint>'},
107 region=RegionOne,
108 region_id=RegionOne,
109 service_id=72fc8736fb41435e8b3584205bb2cfa3,
110 url=http://10.5.6.6:35357/v3>,
111 ... ]
112 """
113 self.log.debug('Validating v3 endpoint data...')
114 self.log.debug('actual: {}'.format(repr(endpoints)))
115 found = []
116 for ep in endpoints:
117 self.log.debug('endpoint: {}'.format(repr(ep)))
118 if ((admin_port in ep.url and ep.interface == 'admin') or
119 (internal_port in ep.url and ep.interface == 'internal') or
120 (public_port in ep.url and ep.interface == 'public')):
121 found.append(ep.interface)
122 # note we ignore the links member.
123 actual = {'id': ep.id,
124 'region': ep.region,
125 'region_id': ep.region_id,
126 'interface': self.not_null,
127 'url': ep.url,
128 'service_id': ep.service_id, }
129 ret = self._validate_dict_data(expected, actual)
130 if ret:
131 return 'unexpected endpoint data - {}'.format(ret)
132
133 if len(found) != 3:
134 return 'Unexpected number of endpoints found'
135
86 def validate_svc_catalog_endpoint_data(self, expected, actual): 136 def validate_svc_catalog_endpoint_data(self, expected, actual):
87 """Validate service catalog endpoint data. 137 """Validate service catalog endpoint data.
88 138
@@ -100,6 +150,72 @@ class OpenStackAmuletUtils(AmuletUtils):
100 return "endpoint {} does not exist".format(k) 150 return "endpoint {} does not exist".format(k)
101 return ret 151 return ret
102 152
153 def validate_v3_svc_catalog_endpoint_data(self, expected, actual):
154 """Validate the keystone v3 catalog endpoint data.
155
156 Validate a list of dictinaries that make up the keystone v3 service
157 catalogue.
158
159 It is in the form of:
160
161
162 {u'identity': [{u'id': u'48346b01c6804b298cdd7349aadb732e',
163 u'interface': u'admin',
164 u'region': u'RegionOne',
165 u'region_id': u'RegionOne',
166 u'url': u'http://10.5.5.224:35357/v3'},
167 {u'id': u'8414f7352a4b47a69fddd9dbd2aef5cf',
168 u'interface': u'public',
169 u'region': u'RegionOne',
170 u'region_id': u'RegionOne',
171 u'url': u'http://10.5.5.224:5000/v3'},
172 {u'id': u'd5ca31440cc24ee1bf625e2996fb6a5b',
173 u'interface': u'internal',
174 u'region': u'RegionOne',
175 u'region_id': u'RegionOne',
176 u'url': u'http://10.5.5.224:5000/v3'}],
177 u'key-manager': [{u'id': u'68ebc17df0b045fcb8a8a433ebea9e62',
178 u'interface': u'public',
179 u'region': u'RegionOne',
180 u'region_id': u'RegionOne',
181 u'url': u'http://10.5.5.223:9311'},
182 {u'id': u'9cdfe2a893c34afd8f504eb218cd2f9d',
183 u'interface': u'internal',
184 u'region': u'RegionOne',
185 u'region_id': u'RegionOne',
186 u'url': u'http://10.5.5.223:9311'},
187 {u'id': u'f629388955bc407f8b11d8b7ca168086',
188 u'interface': u'admin',
189 u'region': u'RegionOne',
190 u'region_id': u'RegionOne',
191 u'url': u'http://10.5.5.223:9312'}]}
192
193 Note, that an added complication is that the order of admin, public,
194 internal against 'interface' in each region.
195
196 Thus, the function sorts the expected and actual lists using the
197 interface key as a sort key, prior to the comparison.
198 """
199 self.log.debug('Validating v3 service catalog endpoint data...')
200 self.log.debug('actual: {}'.format(repr(actual)))
201 for k, v in six.iteritems(expected):
202 if k in actual:
203 l_expected = sorted(v, key=lambda x: x['interface'])
204 l_actual = sorted(actual[k], key=lambda x: x['interface'])
205 if len(l_actual) != len(l_expected):
206 return ("endpoint {} has differing number of interfaces "
207 " - expected({}), actual({})"
208 .format(k, len(l_expected), len(l_actual)))
209 for i_expected, i_actual in zip(l_expected, l_actual):
210 self.log.debug("checking interface {}"
211 .format(i_expected['interface']))
212 ret = self._validate_dict_data(i_expected, i_actual)
213 if ret:
214 return self.endpoint_error(k, ret)
215 else:
216 return "endpoint {} does not exist".format(k)
217 return ret
218
103 def validate_tenant_data(self, expected, actual): 219 def validate_tenant_data(self, expected, actual):
104 """Validate tenant data. 220 """Validate tenant data.
105 221
@@ -928,7 +1044,8 @@ class OpenStackAmuletUtils(AmuletUtils):
928 retry_delay=5, 1044 retry_delay=5,
929 socket_timeout=1) 1045 socket_timeout=1)
930 connection = pika.BlockingConnection(parameters) 1046 connection = pika.BlockingConnection(parameters)
931 assert connection.server_properties['product'] == 'RabbitMQ' 1047 assert connection.is_open is True
1048 assert connection.is_closing is False
932 self.log.debug('Connect OK') 1049 self.log.debug('Connect OK')
933 return connection 1050 return connection
934 except Exception as e: 1051 except Exception as e:
diff --git a/hooks/charmhelpers/contrib/openstack/context.py b/hooks/charmhelpers/contrib/openstack/context.py
index 76737f2..b601a22 100644
--- a/hooks/charmhelpers/contrib/openstack/context.py
+++ b/hooks/charmhelpers/contrib/openstack/context.py
@@ -1421,9 +1421,9 @@ class InternalEndpointContext(OSContextGenerator):
1421class AppArmorContext(OSContextGenerator): 1421class AppArmorContext(OSContextGenerator):
1422 """Base class for apparmor contexts.""" 1422 """Base class for apparmor contexts."""
1423 1423
1424 def __init__(self): 1424 def __init__(self, profile_name=None):
1425 self._ctxt = None 1425 self._ctxt = None
1426 self.aa_profile = None 1426 self.aa_profile = profile_name
1427 self.aa_utils_packages = ['apparmor-utils'] 1427 self.aa_utils_packages = ['apparmor-utils']
1428 1428
1429 @property 1429 @property
@@ -1442,6 +1442,8 @@ class AppArmorContext(OSContextGenerator):
1442 if config('aa-profile-mode') in ['disable', 'enforce', 'complain']: 1442 if config('aa-profile-mode') in ['disable', 'enforce', 'complain']:
1443 ctxt = {'aa_profile_mode': config('aa-profile-mode'), 1443 ctxt = {'aa_profile_mode': config('aa-profile-mode'),
1444 'ubuntu_release': lsb_release()['DISTRIB_RELEASE']} 1444 'ubuntu_release': lsb_release()['DISTRIB_RELEASE']}
1445 if self.aa_profile:
1446 ctxt['aa_profile'] = self.aa_profile
1445 else: 1447 else:
1446 ctxt = None 1448 ctxt = None
1447 return ctxt 1449 return ctxt
diff --git a/hooks/charmhelpers/contrib/openstack/ip.py b/hooks/charmhelpers/contrib/openstack/ip.py
index 0fd3ac2..d1476b1 100644
--- a/hooks/charmhelpers/contrib/openstack/ip.py
+++ b/hooks/charmhelpers/contrib/openstack/ip.py
@@ -30,6 +30,7 @@ from charmhelpers.contrib.hahelpers.cluster import is_clustered
30PUBLIC = 'public' 30PUBLIC = 'public'
31INTERNAL = 'int' 31INTERNAL = 'int'
32ADMIN = 'admin' 32ADMIN = 'admin'
33ACCESS = 'access'
33 34
34ADDRESS_MAP = { 35ADDRESS_MAP = {
35 PUBLIC: { 36 PUBLIC: {
@@ -49,7 +50,13 @@ ADDRESS_MAP = {
49 'config': 'os-admin-network', 50 'config': 'os-admin-network',
50 'fallback': 'private-address', 51 'fallback': 'private-address',
51 'override': 'os-admin-hostname', 52 'override': 'os-admin-hostname',
52 } 53 },
54 ACCESS: {
55 'binding': 'access',
56 'config': 'access-network',
57 'fallback': 'private-address',
58 'override': 'os-access-hostname',
59 },
53} 60}
54 61
55 62
diff --git a/hooks/charmhelpers/contrib/openstack/neutron.py b/hooks/charmhelpers/contrib/openstack/neutron.py
index 03427b4..08c86fa 100644
--- a/hooks/charmhelpers/contrib/openstack/neutron.py
+++ b/hooks/charmhelpers/contrib/openstack/neutron.py
@@ -245,6 +245,12 @@ def neutron_plugins():
245 'networking_plumgrid.neutron.plugins.plugin.NeutronPluginPLUMgridV2') 245 'networking_plumgrid.neutron.plugins.plugin.NeutronPluginPLUMgridV2')
246 plugins['plumgrid']['server_packages'].remove( 246 plugins['plumgrid']['server_packages'].remove(
247 'neutron-plugin-plumgrid') 247 'neutron-plugin-plumgrid')
248 if release >= 'mitaka':
249 plugins['nsx']['server_packages'].remove('neutron-plugin-vmware')
250 plugins['nsx']['server_packages'].append('python-vmware-nsx')
251 plugins['nsx']['config'] = '/etc/neutron/nsx.ini'
252 plugins['vsp']['driver'] = (
253 'nuage_neutron.plugins.nuage.plugin.NuagePlugin')
248 return plugins 254 return plugins
249 255
250 256
diff --git a/hooks/charmhelpers/contrib/openstack/utils.py b/hooks/charmhelpers/contrib/openstack/utils.py
index 9d3e3d8..9abd4c3 100644
--- a/hooks/charmhelpers/contrib/openstack/utils.py
+++ b/hooks/charmhelpers/contrib/openstack/utils.py
@@ -51,7 +51,8 @@ from charmhelpers.core.hookenv import (
51 relation_set, 51 relation_set,
52 service_name, 52 service_name,
53 status_set, 53 status_set,
54 hook_name 54 hook_name,
55 application_version_set,
55) 56)
56 57
57from charmhelpers.contrib.storage.linux.lvm import ( 58from charmhelpers.contrib.storage.linux.lvm import (
@@ -80,7 +81,12 @@ from charmhelpers.core.host import (
80 service_resume, 81 service_resume,
81 restart_on_change_helper, 82 restart_on_change_helper,
82) 83)
83from charmhelpers.fetch import apt_install, apt_cache, install_remote 84from charmhelpers.fetch import (
85 apt_install,
86 apt_cache,
87 install_remote,
88 get_upstream_version
89)
84from charmhelpers.contrib.storage.linux.utils import is_block_device, zap_disk 90from charmhelpers.contrib.storage.linux.utils import is_block_device, zap_disk
85from charmhelpers.contrib.storage.linux.loopback import ensure_loopback_device 91from charmhelpers.contrib.storage.linux.loopback import ensure_loopback_device
86from charmhelpers.contrib.openstack.exceptions import OSContextError 92from charmhelpers.contrib.openstack.exceptions import OSContextError
@@ -145,7 +151,7 @@ SWIFT_CODENAMES = OrderedDict([
145 ('mitaka', 151 ('mitaka',
146 ['2.5.0', '2.6.0', '2.7.0']), 152 ['2.5.0', '2.6.0', '2.7.0']),
147 ('newton', 153 ('newton',
148 ['2.8.0', '2.9.0']), 154 ['2.8.0', '2.9.0', '2.10.0']),
149]) 155])
150 156
151# >= Liberty version->codename mapping 157# >= Liberty version->codename mapping
@@ -1889,3 +1895,14 @@ def config_flags_parser(config_flags):
1889 flags[key.strip(post_strippers)] = value.rstrip(post_strippers) 1895 flags[key.strip(post_strippers)] = value.rstrip(post_strippers)
1890 1896
1891 return flags 1897 return flags
1898
1899
1900def os_application_version_set(package):
1901 '''Set version of application for Juju 2.0 and later'''
1902 application_version = get_upstream_version(package)
1903 # NOTE(jamespage) if not able to figure out package version, fallback to
1904 # openstack codename version detection.
1905 if not application_version:
1906 application_version_set(os_release(package))
1907 else:
1908 application_version_set(application_version)
diff --git a/hooks/charmhelpers/contrib/storage/linux/ceph.py b/hooks/charmhelpers/contrib/storage/linux/ceph.py
index beff270..edb536c 100644
--- a/hooks/charmhelpers/contrib/storage/linux/ceph.py
+++ b/hooks/charmhelpers/contrib/storage/linux/ceph.py
@@ -87,6 +87,7 @@ clog to syslog = {use_syslog}
87DEFAULT_PGS_PER_OSD_TARGET = 100 87DEFAULT_PGS_PER_OSD_TARGET = 100
88DEFAULT_POOL_WEIGHT = 10.0 88DEFAULT_POOL_WEIGHT = 10.0
89LEGACY_PG_COUNT = 200 89LEGACY_PG_COUNT = 200
90DEFAULT_MINIMUM_PGS = 2
90 91
91 92
92def validator(value, valid_type, valid_range=None): 93def validator(value, valid_type, valid_range=None):
@@ -266,6 +267,11 @@ class Pool(object):
266 target_pgs_per_osd = config('pgs-per-osd') or DEFAULT_PGS_PER_OSD_TARGET 267 target_pgs_per_osd = config('pgs-per-osd') or DEFAULT_PGS_PER_OSD_TARGET
267 num_pg = (target_pgs_per_osd * osd_count * percent_data) // pool_size 268 num_pg = (target_pgs_per_osd * osd_count * percent_data) // pool_size
268 269
270 # NOTE: ensure a sane minimum number of PGS otherwise we don't get any
271 # reasonable data distribution in minimal OSD configurations
272 if num_pg < DEFAULT_MINIMUM_PGS:
273 num_pg = DEFAULT_MINIMUM_PGS
274
269 # The CRUSH algorithm has a slight optimization for placement groups 275 # The CRUSH algorithm has a slight optimization for placement groups
270 # with powers of 2 so find the nearest power of 2. If the nearest 276 # with powers of 2 so find the nearest power of 2. If the nearest
271 # power of 2 is more than 25% below the original value, the next 277 # power of 2 is more than 25% below the original value, the next
diff --git a/hooks/charmhelpers/core/hookenv.py b/hooks/charmhelpers/core/hookenv.py
index 48b2b9d..996e81c 100644
--- a/hooks/charmhelpers/core/hookenv.py
+++ b/hooks/charmhelpers/core/hookenv.py
@@ -843,6 +843,20 @@ def translate_exc(from_exc, to_exc):
843 return inner_translate_exc1 843 return inner_translate_exc1
844 844
845 845
846def application_version_set(version):
847 """Charm authors may trigger this command from any hook to output what
848 version of the application is running. This could be a package version,
849 for instance postgres version 9.5. It could also be a build number or
850 version control revision identifier, for instance git sha 6fb7ba68. """
851
852 cmd = ['application-version-set']
853 cmd.append(version)
854 try:
855 subprocess.check_call(cmd)
856 except OSError:
857 log("Application Version: {}".format(version))
858
859
846@translate_exc(from_exc=OSError, to_exc=NotImplementedError) 860@translate_exc(from_exc=OSError, to_exc=NotImplementedError)
847def is_leader(): 861def is_leader():
848 """Does the current unit hold the juju leadership 862 """Does the current unit hold the juju leadership
diff --git a/hooks/charmhelpers/core/host.py b/hooks/charmhelpers/core/host.py
index 5306859..0f1b2f3 100644
--- a/hooks/charmhelpers/core/host.py
+++ b/hooks/charmhelpers/core/host.py
@@ -30,13 +30,29 @@ import subprocess
30import hashlib 30import hashlib
31import functools 31import functools
32import itertools 32import itertools
33from contextlib import contextmanager
34from collections import OrderedDict
35
36import six 33import six
37 34
35from contextlib import contextmanager
36from collections import OrderedDict
38from .hookenv import log 37from .hookenv import log
39from .fstab import Fstab 38from .fstab import Fstab
39from charmhelpers.osplatform import get_platform
40
41__platform__ = get_platform()
42if __platform__ == "ubuntu":
43 from charmhelpers.core.host_factory.ubuntu import (
44 service_available,
45 add_new_group,
46 lsb_release,
47 cmp_pkgrevno,
48 ) # flake8: noqa -- ignore F401 for this import
49elif __platform__ == "centos":
50 from charmhelpers.core.host_factory.centos import (
51 service_available,
52 add_new_group,
53 lsb_release,
54 cmp_pkgrevno,
55 ) # flake8: noqa -- ignore F401 for this import
40 56
41 57
42def service_start(service_name): 58def service_start(service_name):
@@ -144,8 +160,11 @@ def service_running(service_name):
144 return False 160 return False
145 else: 161 else:
146 # This works for upstart scripts where the 'service' command 162 # This works for upstart scripts where the 'service' command
147 # returns a consistent string to represent running 'start/running' 163 # returns a consistent string to represent running
148 if "start/running" in output: 164 # 'start/running'
165 if ("start/running" in output or
166 "is running" in output or
167 "up and running" in output):
149 return True 168 return True
150 elif os.path.exists(_INIT_D_CONF.format(service_name)): 169 elif os.path.exists(_INIT_D_CONF.format(service_name)):
151 # Check System V scripts init script return codes 170 # Check System V scripts init script return codes
@@ -153,18 +172,6 @@ def service_running(service_name):
153 return False 172 return False
154 173
155 174
156def service_available(service_name):
157 """Determine whether a system service is available"""
158 try:
159 subprocess.check_output(
160 ['service', service_name, 'status'],
161 stderr=subprocess.STDOUT).decode('UTF-8')
162 except subprocess.CalledProcessError as e:
163 return b'unrecognized service' not in e.output
164 else:
165 return True
166
167
168SYSTEMD_SYSTEM = '/run/systemd/system' 175SYSTEMD_SYSTEM = '/run/systemd/system'
169 176
170 177
@@ -173,8 +180,9 @@ def init_is_systemd():
173 return os.path.isdir(SYSTEMD_SYSTEM) 180 return os.path.isdir(SYSTEMD_SYSTEM)
174 181
175 182
176def adduser(username, password=None, shell='/bin/bash', system_user=False, 183def adduser(username, password=None, shell='/bin/bash',
177 primary_group=None, secondary_groups=None, uid=None, home_dir=None): 184 system_user=False, primary_group=None,
185 secondary_groups=None, uid=None, home_dir=None):
178 """Add a user to the system. 186 """Add a user to the system.
179 187
180 Will log but otherwise succeed if the user already exists. 188 Will log but otherwise succeed if the user already exists.
@@ -286,17 +294,7 @@ def add_group(group_name, system_group=False, gid=None):
286 log('group with gid {0} already exists!'.format(gid)) 294 log('group with gid {0} already exists!'.format(gid))
287 except KeyError: 295 except KeyError:
288 log('creating group {0}'.format(group_name)) 296 log('creating group {0}'.format(group_name))
289 cmd = ['addgroup'] 297 add_new_group(group_name, system_group, gid)
290 if gid:
291 cmd.extend(['--gid', str(gid)])
292 if system_group:
293 cmd.append('--system')
294 else:
295 cmd.extend([
296 '--group',
297 ])
298 cmd.append(group_name)
299 subprocess.check_call(cmd)
300 group_info = grp.getgrnam(group_name) 298 group_info = grp.getgrnam(group_name)
301 return group_info 299 return group_info
302 300
@@ -541,16 +539,6 @@ def restart_on_change_helper(lambda_f, restart_map, stopstart=False,
541 return r 539 return r
542 540
543 541
544def lsb_release():
545 """Return /etc/lsb-release in a dict"""
546 d = {}
547 with open('/etc/lsb-release', 'r') as lsb:
548 for l in lsb:
549 k, v = l.split('=')
550 d[k.strip()] = v.strip()
551 return d
552
553
554def pwgen(length=None): 542def pwgen(length=None):
555 """Generate a random pasword.""" 543 """Generate a random pasword."""
556 if length is None: 544 if length is None:
@@ -674,25 +662,6 @@ def get_nic_hwaddr(nic):
674 return hwaddr 662 return hwaddr
675 663
676 664
677def cmp_pkgrevno(package, revno, pkgcache=None):
678 """Compare supplied revno with the revno of the installed package
679
680 * 1 => Installed revno is greater than supplied arg
681 * 0 => Installed revno is the same as supplied arg
682 * -1 => Installed revno is less than supplied arg
683
684 This function imports apt_cache function from charmhelpers.fetch if
685 the pkgcache argument is None. Be sure to add charmhelpers.fetch if
686 you call this function, or pass an apt_pkg.Cache() instance.
687 """
688 import apt_pkg
689 if not pkgcache:
690 from charmhelpers.fetch import apt_cache
691 pkgcache = apt_cache()
692 pkg = pkgcache[package]
693 return apt_pkg.version_compare(pkg.current_ver.ver_str, revno)
694
695
696@contextmanager 665@contextmanager
697def chdir(directory): 666def chdir(directory):
698 """Change the current working directory to a different directory for a code 667 """Change the current working directory to a different directory for a code
diff --git a/hooks/charmhelpers/core/host_factory/__init__.py b/hooks/charmhelpers/core/host_factory/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/hooks/charmhelpers/core/host_factory/__init__.py
diff --git a/hooks/charmhelpers/core/host_factory/centos.py b/hooks/charmhelpers/core/host_factory/centos.py
new file mode 100644
index 0000000..902d469
--- /dev/null
+++ b/hooks/charmhelpers/core/host_factory/centos.py
@@ -0,0 +1,56 @@
1import subprocess
2import yum
3import os
4
5
6def service_available(service_name):
7 # """Determine whether a system service is available."""
8 if os.path.isdir('/run/systemd/system'):
9 cmd = ['systemctl', 'is-enabled', service_name]
10 else:
11 cmd = ['service', service_name, 'is-enabled']
12 return subprocess.call(cmd) == 0
13
14
15def add_new_group(group_name, system_group=False, gid=None):
16 cmd = ['groupadd']
17 if gid:
18 cmd.extend(['--gid', str(gid)])
19 if system_group:
20 cmd.append('-r')
21 cmd.append(group_name)
22 subprocess.check_call(cmd)
23
24
25def lsb_release():
26 """Return /etc/os-release in a dict."""
27 d = {}
28 with open('/etc/os-release', 'r') as lsb:
29 for l in lsb:
30 s = l.split('=')
31 if len(s) != 2:
32 continue
33 d[s[0].strip()] = s[1].strip()
34 return d
35
36
37def cmp_pkgrevno(package, revno, pkgcache=None):
38 """Compare supplied revno with the revno of the installed package.
39
40 * 1 => Installed revno is greater than supplied arg
41 * 0 => Installed revno is the same as supplied arg
42 * -1 => Installed revno is less than supplied arg
43
44 This function imports YumBase function if the pkgcache argument
45 is None.
46 """
47 if not pkgcache:
48 y = yum.YumBase()
49 packages = y.doPackageLists()
50 pkgcache = {i.Name: i.version for i in packages['installed']}
51 pkg = pkgcache[package]
52 if pkg > revno:
53 return 1
54 if pkg < revno:
55 return -1
56 return 0
diff --git a/hooks/charmhelpers/core/host_factory/ubuntu.py b/hooks/charmhelpers/core/host_factory/ubuntu.py
new file mode 100644
index 0000000..8c66af5
--- /dev/null
+++ b/hooks/charmhelpers/core/host_factory/ubuntu.py
@@ -0,0 +1,56 @@
1import subprocess
2
3
4def service_available(service_name):
5 """Determine whether a system service is available"""
6 try:
7 subprocess.check_output(
8 ['service', service_name, 'status'],
9 stderr=subprocess.STDOUT).decode('UTF-8')
10 except subprocess.CalledProcessError as e:
11 return b'unrecognized service' not in e.output
12 else:
13 return True
14
15
16def add_new_group(group_name, system_group=False, gid=None):
17 cmd = ['addgroup']
18 if gid:
19 cmd.extend(['--gid', str(gid)])
20 if system_group:
21 cmd.append('--system')
22 else:
23 cmd.extend([
24 '--group',
25 ])
26 cmd.append(group_name)
27 subprocess.check_call(cmd)
28
29
30def lsb_release():
31 """Return /etc/lsb-release in a dict"""
32 d = {}
33 with open('/etc/lsb-release', 'r') as lsb:
34 for l in lsb:
35 k, v = l.split('=')
36 d[k.strip()] = v.strip()
37 return d
38
39
40def cmp_pkgrevno(package, revno, pkgcache=None):
41 """Compare supplied revno with the revno of the installed package.
42
43 * 1 => Installed revno is greater than supplied arg
44 * 0 => Installed revno is the same as supplied arg
45 * -1 => Installed revno is less than supplied arg
46
47 This function imports apt_cache function from charmhelpers.fetch if
48 the pkgcache argument is None. Be sure to add charmhelpers.fetch if
49 you call this function, or pass an apt_pkg.Cache() instance.
50 """
51 import apt_pkg
52 if not pkgcache:
53 from charmhelpers.fetch import apt_cache
54 pkgcache = apt_cache()
55 pkg = pkgcache[package]
56 return apt_pkg.version_compare(pkg.current_ver.ver_str, revno)
diff --git a/hooks/charmhelpers/core/kernel.py b/hooks/charmhelpers/core/kernel.py
index b166efe..2d40452 100644
--- a/hooks/charmhelpers/core/kernel.py
+++ b/hooks/charmhelpers/core/kernel.py
@@ -15,15 +15,28 @@
15# See the License for the specific language governing permissions and 15# See the License for the specific language governing permissions and
16# limitations under the License. 16# limitations under the License.
17 17
18__author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>" 18import re
19import subprocess
19 20
21from charmhelpers.osplatform import get_platform
20from charmhelpers.core.hookenv import ( 22from charmhelpers.core.hookenv import (
21 log, 23 log,
22 INFO 24 INFO
23) 25)
24 26
25from subprocess import check_call, check_output 27__platform__ = get_platform()
26import re 28if __platform__ == "ubuntu":
29 from charmhelpers.core.kernel_factory.ubuntu import (
30 persistent_modprobe,
31 update_initramfs,
32 ) # flake8: noqa -- ignore F401 for this import
33elif __platform__ == "centos":
34 from charmhelpers.core.kernel_factory.centos import (
35 persistent_modprobe,
36 update_initramfs,
37 ) # flake8: noqa -- ignore F401 for this import
38
39__author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>"
27 40
28 41
29def modprobe(module, persist=True): 42def modprobe(module, persist=True):
@@ -32,11 +45,9 @@ def modprobe(module, persist=True):
32 45
33 log('Loading kernel module %s' % module, level=INFO) 46 log('Loading kernel module %s' % module, level=INFO)
34 47
35 check_call(cmd) 48 subprocess.check_call(cmd)
36 if persist: 49 if persist:
37 with open('/etc/modules', 'r+') as modules: 50 persistent_modprobe(module)
38 if module not in modules.read():
39 modules.write(module)
40 51
41 52
42def rmmod(module, force=False): 53def rmmod(module, force=False):
@@ -46,21 +57,16 @@ def rmmod(module, force=False):
46 cmd.append('-f') 57 cmd.append('-f')
47 cmd.append(module) 58 cmd.append(module)
48 log('Removing kernel module %s' % module, level=INFO) 59 log('Removing kernel module %s' % module, level=INFO)
49 return check_call(cmd) 60 return subprocess.check_call(cmd)
50 61
51 62
52def lsmod(): 63def lsmod():
53 """Shows what kernel modules are currently loaded""" 64 """Shows what kernel modules are currently loaded"""
54 return check_output(['lsmod'], 65 return subprocess.check_output(['lsmod'],
55 universal_newlines=True) 66 universal_newlines=True)
56 67
57 68
58def is_module_loaded(module): 69def is_module_loaded(module):
59 """Checks if a kernel module is already loaded""" 70 """Checks if a kernel module is already loaded"""
60 matches = re.findall('^%s[ ]+' % module, lsmod(), re.M) 71 matches = re.findall('^%s[ ]+' % module, lsmod(), re.M)
61 return len(matches) > 0 72 return len(matches) > 0
62
63
64def update_initramfs(version='all'):
65 """Updates an initramfs image"""
66 return check_call(["update-initramfs", "-k", version, "-u"])
diff --git a/hooks/charmhelpers/core/kernel_factory/__init__.py b/hooks/charmhelpers/core/kernel_factory/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/hooks/charmhelpers/core/kernel_factory/__init__.py
diff --git a/hooks/charmhelpers/core/kernel_factory/centos.py b/hooks/charmhelpers/core/kernel_factory/centos.py
new file mode 100644
index 0000000..1c402c1
--- /dev/null
+++ b/hooks/charmhelpers/core/kernel_factory/centos.py
@@ -0,0 +1,17 @@
1import subprocess
2import os
3
4
5def persistent_modprobe(module):
6 """Load a kernel module and configure for auto-load on reboot."""
7 if not os.path.exists('/etc/rc.modules'):
8 open('/etc/rc.modules', 'a')
9 os.chmod('/etc/rc.modules', 111)
10 with open('/etc/rc.modules', 'r+') as modules:
11 if module not in modules.read():
12 modules.write('modprobe %s\n' % module)
13
14
15def update_initramfs(version='all'):
16 """Updates an initramfs image."""
17 return subprocess.check_call(["dracut", "-f", version])
diff --git a/hooks/charmhelpers/core/kernel_factory/ubuntu.py b/hooks/charmhelpers/core/kernel_factory/ubuntu.py
new file mode 100644
index 0000000..2155964
--- /dev/null
+++ b/hooks/charmhelpers/core/kernel_factory/ubuntu.py
@@ -0,0 +1,13 @@
1import subprocess
2
3
4def persistent_modprobe(module):
5 """Load a kernel module and configure for auto-load on reboot."""
6 with open('/etc/modules', 'r+') as modules:
7 if module not in modules.read():
8 modules.write(module)
9
10
11def update_initramfs(version='all'):
12 """Updates an initramfs image."""
13 return subprocess.check_call(["update-initramfs", "-k", version, "-u"])
diff --git a/hooks/charmhelpers/fetch/__init__.py b/hooks/charmhelpers/fetch/__init__.py
index 52eaf82..ec5e0fe 100644
--- a/hooks/charmhelpers/fetch/__init__.py
+++ b/hooks/charmhelpers/fetch/__init__.py
@@ -13,18 +13,12 @@
13# limitations under the License. 13# limitations under the License.
14 14
15import importlib 15import importlib
16from tempfile import NamedTemporaryFile 16from charmhelpers.osplatform import get_platform
17import time
18from yaml import safe_load 17from yaml import safe_load
19from charmhelpers.core.host import (
20 lsb_release
21)
22import subprocess
23from charmhelpers.core.hookenv import ( 18from charmhelpers.core.hookenv import (
24 config, 19 config,
25 log, 20 log,
26) 21)
27import os
28 22
29import six 23import six
30if six.PY3: 24if six.PY3:
@@ -33,87 +27,6 @@ else:
33 from urlparse import urlparse, urlunparse 27 from urlparse import urlparse, urlunparse
34 28
35 29
36CLOUD_ARCHIVE = """# Ubuntu Cloud Archive
37deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main
38"""
39PROPOSED_POCKET = """# Proposed
40deb http://archive.ubuntu.com/ubuntu {}-proposed main universe multiverse restricted
41"""
42CLOUD_ARCHIVE_POCKETS = {
43 # Folsom
44 'folsom': 'precise-updates/folsom',
45 'precise-folsom': 'precise-updates/folsom',
46 'precise-folsom/updates': 'precise-updates/folsom',
47 'precise-updates/folsom': 'precise-updates/folsom',
48 'folsom/proposed': 'precise-proposed/folsom',
49 'precise-folsom/proposed': 'precise-proposed/folsom',
50 'precise-proposed/folsom': 'precise-proposed/folsom',
51 # Grizzly
52 'grizzly': 'precise-updates/grizzly',
53 'precise-grizzly': 'precise-updates/grizzly',
54 'precise-grizzly/updates': 'precise-updates/grizzly',
55 'precise-updates/grizzly': 'precise-updates/grizzly',
56 'grizzly/proposed': 'precise-proposed/grizzly',
57 'precise-grizzly/proposed': 'precise-proposed/grizzly',
58 'precise-proposed/grizzly': 'precise-proposed/grizzly',
59 # Havana
60 'havana': 'precise-updates/havana',
61 'precise-havana': 'precise-updates/havana',
62 'precise-havana/updates': 'precise-updates/havana',
63 'precise-updates/havana': 'precise-updates/havana',
64 'havana/proposed': 'precise-proposed/havana',
65 'precise-havana/proposed': 'precise-proposed/havana',
66 'precise-proposed/havana': 'precise-proposed/havana',
67 # Icehouse
68 'icehouse': 'precise-updates/icehouse',
69 'precise-icehouse': 'precise-updates/icehouse',
70 'precise-icehouse/updates': 'precise-updates/icehouse',
71 'precise-updates/icehouse': 'precise-updates/icehouse',
72 'icehouse/proposed': 'precise-proposed/icehouse',
73 'precise-icehouse/proposed': 'precise-proposed/icehouse',
74 'precise-proposed/icehouse': 'precise-proposed/icehouse',
75 # Juno
76 'juno': 'trusty-updates/juno',
77 'trusty-juno': 'trusty-updates/juno',
78 'trusty-juno/updates': 'trusty-updates/juno',
79 'trusty-updates/juno': 'trusty-updates/juno',
80 'juno/proposed': 'trusty-proposed/juno',
81 'trusty-juno/proposed': 'trusty-proposed/juno',
82 'trusty-proposed/juno': 'trusty-proposed/juno',
83 # Kilo
84 'kilo': 'trusty-updates/kilo',
85 'trusty-kilo': 'trusty-updates/kilo',
86 'trusty-kilo/updates': 'trusty-updates/kilo',
87 'trusty-updates/kilo': 'trusty-updates/kilo',
88 'kilo/proposed': 'trusty-proposed/kilo',
89 'trusty-kilo/proposed': 'trusty-proposed/kilo',
90 'trusty-proposed/kilo': 'trusty-proposed/kilo',
91 # Liberty
92 'liberty': 'trusty-updates/liberty',
93 'trusty-liberty': 'trusty-updates/liberty',
94 'trusty-liberty/updates': 'trusty-updates/liberty',
95 'trusty-updates/liberty': 'trusty-updates/liberty',
96 'liberty/proposed': 'trusty-proposed/liberty',
97 'trusty-liberty/proposed': 'trusty-proposed/liberty',
98 'trusty-proposed/liberty': 'trusty-proposed/liberty',
99 # Mitaka
100 'mitaka': 'trusty-updates/mitaka',
101 'trusty-mitaka': 'trusty-updates/mitaka',
102 'trusty-mitaka/updates': 'trusty-updates/mitaka',
103 'trusty-updates/mitaka': 'trusty-updates/mitaka',
104 'mitaka/proposed': 'trusty-proposed/mitaka',
105 'trusty-mitaka/proposed': 'trusty-proposed/mitaka',
106 'trusty-proposed/mitaka': 'trusty-proposed/mitaka',
107 # Newton
108 'newton': 'xenial-updates/newton',
109 'xenial-newton': 'xenial-updates/newton',
110 'xenial-newton/updates': 'xenial-updates/newton',
111 'xenial-updates/newton': 'xenial-updates/newton',
112 'newton/proposed': 'xenial-proposed/newton',
113 'xenial-newton/proposed': 'xenial-proposed/newton',
114 'xenial-proposed/newton': 'xenial-proposed/newton',
115}
116
117# The order of this list is very important. Handlers should be listed in from 30# The order of this list is very important. Handlers should be listed in from
118# least- to most-specific URL matching. 31# least- to most-specific URL matching.
119FETCH_HANDLERS = ( 32FETCH_HANDLERS = (
@@ -122,10 +35,6 @@ FETCH_HANDLERS = (
122 'charmhelpers.fetch.giturl.GitUrlFetchHandler', 35 'charmhelpers.fetch.giturl.GitUrlFetchHandler',
123) 36)
124 37
125APT_NO_LOCK = 100 # The return code for "couldn't acquire lock" in APT.
126APT_NO_LOCK_RETRY_DELAY = 10 # Wait 10 seconds between apt lock checks.
127APT_NO_LOCK_RETRY_COUNT = 30 # Retry to acquire the lock X times.
128
129 38
130class SourceConfigError(Exception): 39class SourceConfigError(Exception):
131 pass 40 pass
@@ -163,180 +72,38 @@ class BaseFetchHandler(object):
163 return urlunparse(parts) 72 return urlunparse(parts)
164 73
165 74
166def filter_installed_packages(packages): 75__platform__ = get_platform()
167 """Returns a list of packages that require installation""" 76module = "charmhelpers.fetch.%s" % __platform__
168 cache = apt_cache() 77fetch = importlib.import_module(module)
169 _pkgs = []
170 for package in packages:
171 try:
172 p = cache[package]
173 p.current_ver or _pkgs.append(package)
174 except KeyError:
175 log('Package {} has no installation candidate.'.format(package),
176 level='WARNING')
177 _pkgs.append(package)
178 return _pkgs
179
180
181def apt_cache(in_memory=True, progress=None):
182 """Build and return an apt cache"""
183 from apt import apt_pkg
184 apt_pkg.init()
185 if in_memory:
186 apt_pkg.config.set("Dir::Cache::pkgcache", "")
187 apt_pkg.config.set("Dir::Cache::srcpkgcache", "")
188 return apt_pkg.Cache(progress)
189
190
191def apt_install(packages, options=None, fatal=False):
192 """Install one or more packages"""
193 if options is None:
194 options = ['--option=Dpkg::Options::=--force-confold']
195
196 cmd = ['apt-get', '--assume-yes']
197 cmd.extend(options)
198 cmd.append('install')
199 if isinstance(packages, six.string_types):
200 cmd.append(packages)
201 else:
202 cmd.extend(packages)
203 log("Installing {} with options: {}".format(packages,
204 options))
205 _run_apt_command(cmd, fatal)
206
207
208def apt_upgrade(options=None, fatal=False, dist=False):
209 """Upgrade all packages"""
210 if options is None:
211 options = ['--option=Dpkg::Options::=--force-confold']
212
213 cmd = ['apt-get', '--assume-yes']
214 cmd.extend(options)
215 if dist:
216 cmd.append('dist-upgrade')
217 else:
218 cmd.append('upgrade')
219 log("Upgrading with options: {}".format(options))
220 _run_apt_command(cmd, fatal)
221
222
223def apt_update(fatal=False):
224 """Update local apt cache"""
225 cmd = ['apt-get', 'update']
226 _run_apt_command(cmd, fatal)
227
228
229def apt_purge(packages, fatal=False):
230 """Purge one or more packages"""
231 cmd = ['apt-get', '--assume-yes', 'purge']
232 if isinstance(packages, six.string_types):
233 cmd.append(packages)
234 else:
235 cmd.extend(packages)
236 log("Purging {}".format(packages))
237 _run_apt_command(cmd, fatal)
238
239
240def apt_mark(packages, mark, fatal=False):
241 """Flag one or more packages using apt-mark"""
242 log("Marking {} as {}".format(packages, mark))
243 cmd = ['apt-mark', mark]
244 if isinstance(packages, six.string_types):
245 cmd.append(packages)
246 else:
247 cmd.extend(packages)
248
249 if fatal:
250 subprocess.check_call(cmd, universal_newlines=True)
251 else:
252 subprocess.call(cmd, universal_newlines=True)
253
254
255def apt_hold(packages, fatal=False):
256 return apt_mark(packages, 'hold', fatal=fatal)
257
258
259def apt_unhold(packages, fatal=False):
260 return apt_mark(packages, 'unhold', fatal=fatal)
261
262 78
263def add_source(source, key=None): 79filter_installed_packages = fetch.filter_installed_packages
264 """Add a package source to this system. 80install = fetch.install
81upgrade = fetch.upgrade
82update = fetch.update
83purge = fetch.purge
84add_source = fetch.add_source
265 85
266 @param source: a URL or sources.list entry, as supported by 86if __platform__ == "ubuntu":
267 add-apt-repository(1). Examples:: 87 apt_cache = fetch.apt_cache
268 88 apt_install = fetch.install
269 ppa:charmers/example 89 apt_update = fetch.update
270 deb https://stub:key@private.example.com/ubuntu trusty main 90 apt_upgrade = fetch.upgrade
271 91 apt_purge = fetch.purge
272 In addition: 92 apt_mark = fetch.apt_mark
273 'proposed:' may be used to enable the standard 'proposed' 93 apt_hold = fetch.apt_hold
274 pocket for the release. 94 apt_unhold = fetch.apt_unhold
275 'cloud:' may be used to activate official cloud archive pockets, 95 get_upstream_version = fetch.get_upstream_version
276 such as 'cloud:icehouse' 96elif __platform__ == "centos":
277 'distro' may be used as a noop 97 yum_search = fetch.yum_search
278
279 @param key: A key to be added to the system's APT keyring and used
280 to verify the signatures on packages. Ideally, this should be an
281 ASCII format GPG public key including the block headers. A GPG key
282 id may also be used, but be aware that only insecure protocols are
283 available to retrieve the actual public key from a public keyserver
284 placing your Juju environment at risk. ppa and cloud archive keys
285 are securely added automtically, so sould not be provided.
286 """
287 if source is None:
288 log('Source is not present. Skipping')
289 return
290
291 if (source.startswith('ppa:') or
292 source.startswith('http') or
293 source.startswith('deb ') or
294 source.startswith('cloud-archive:')):
295 subprocess.check_call(['add-apt-repository', '--yes', source])
296 elif source.startswith('cloud:'):
297 apt_install(filter_installed_packages(['ubuntu-cloud-keyring']),
298 fatal=True)
299 pocket = source.split(':')[-1]
300 if pocket not in CLOUD_ARCHIVE_POCKETS:
301 raise SourceConfigError(
302 'Unsupported cloud: source option %s' %
303 pocket)
304 actual_pocket = CLOUD_ARCHIVE_POCKETS[pocket]
305 with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt:
306 apt.write(CLOUD_ARCHIVE.format(actual_pocket))
307 elif source == 'proposed':
308 release = lsb_release()['DISTRIB_CODENAME']
309 with open('/etc/apt/sources.list.d/proposed.list', 'w') as apt:
310 apt.write(PROPOSED_POCKET.format(release))
311 elif source == 'distro':
312 pass
313 else:
314 log("Unknown source: {!r}".format(source))
315
316 if key:
317 if '-----BEGIN PGP PUBLIC KEY BLOCK-----' in key:
318 with NamedTemporaryFile('w+') as key_file:
319 key_file.write(key)
320 key_file.flush()
321 key_file.seek(0)
322 subprocess.check_call(['apt-key', 'add', '-'], stdin=key_file)
323 else:
324 # Note that hkp: is in no way a secure protocol. Using a
325 # GPG key id is pointless from a security POV unless you
326 # absolutely trust your network and DNS.
327 subprocess.check_call(['apt-key', 'adv', '--keyserver',
328 'hkp://keyserver.ubuntu.com:80', '--recv',
329 key])
330 98
331 99
332def configure_sources(update=False, 100def configure_sources(update=False,
333 sources_var='install_sources', 101 sources_var='install_sources',
334 keys_var='install_keys'): 102 keys_var='install_keys'):
335 """ 103 """Configure multiple sources from charm configuration.
336 Configure multiple sources from charm configuration.
337 104
338 The lists are encoded as yaml fragments in the configuration. 105 The lists are encoded as yaml fragments in the configuration.
339 The frament needs to be included as a string. Sources and their 106 The fragment needs to be included as a string. Sources and their
340 corresponding keys are of the types supported by add_source(). 107 corresponding keys are of the types supported by add_source().
341 108
342 Example config: 109 Example config:
@@ -368,12 +135,11 @@ def configure_sources(update=False,
368 for source, key in zip(sources, keys): 135 for source, key in zip(sources, keys):
369 add_source(source, key) 136 add_source(source, key)
370 if update: 137 if update:
371 apt_update(fatal=True) 138 fetch.update(fatal=True)
372 139
373 140
374def install_remote(source, *args, **kwargs): 141def install_remote(source, *args, **kwargs):
375 """ 142 """Install a file tree from a remote source.
376 Install a file tree from a remote source
377 143
378 The specified source should be a url of the form: 144 The specified source should be a url of the form:
379 scheme://[host]/path[#[option=value][&...]] 145 scheme://[host]/path[#[option=value][&...]]
@@ -406,6 +172,7 @@ def install_remote(source, *args, **kwargs):
406 172
407 173
408def install_from_config(config_var_name): 174def install_from_config(config_var_name):
175 """Install a file from config."""
409 charm_config = config() 176 charm_config = config()
410 source = charm_config[config_var_name] 177 source = charm_config[config_var_name]
411 return install_remote(source) 178 return install_remote(source)
@@ -428,40 +195,3 @@ def plugins(fetch_handlers=None):
428 log("FetchHandler {} not found, skipping plugin".format( 195 log("FetchHandler {} not found, skipping plugin".format(
429 handler_name)) 196 handler_name))
430 return plugin_list 197 return plugin_list
431
432
433def _run_apt_command(cmd, fatal=False):
434 """
435 Run an APT command, checking output and retrying if the fatal flag is set
436 to True.
437
438 :param: cmd: str: The apt command to run.
439 :param: fatal: bool: Whether the command's output should be checked and
440 retried.
441 """
442 env = os.environ.copy()
443
444 if 'DEBIAN_FRONTEND' not in env:
445 env['DEBIAN_FRONTEND'] = 'noninteractive'
446
447 if fatal:
448 retry_count = 0
449 result = None
450
451 # If the command is considered "fatal", we need to retry if the apt
452 # lock was not acquired.
453
454 while result is None or result == APT_NO_LOCK:
455 try:
456 result = subprocess.check_call(cmd, env=env)
457 except subprocess.CalledProcessError as e:
458 retry_count = retry_count + 1
459 if retry_count > APT_NO_LOCK_RETRY_COUNT:
460 raise
461 result = e.returncode
462 log("Couldn't acquire DPKG lock. Will retry in {} seconds."
463 "".format(APT_NO_LOCK_RETRY_DELAY))
464 time.sleep(APT_NO_LOCK_RETRY_DELAY)
465
466 else:
467 subprocess.call(cmd, env=env)
diff --git a/hooks/charmhelpers/fetch/bzrurl.py b/hooks/charmhelpers/fetch/bzrurl.py
index b3404d8..07cd029 100644
--- a/hooks/charmhelpers/fetch/bzrurl.py
+++ b/hooks/charmhelpers/fetch/bzrurl.py
@@ -18,19 +18,20 @@ from charmhelpers.fetch import (
18 BaseFetchHandler, 18 BaseFetchHandler,
19 UnhandledSource, 19 UnhandledSource,
20 filter_installed_packages, 20 filter_installed_packages,
21 apt_install, 21 install,
22) 22)
23from charmhelpers.core.host import mkdir 23from charmhelpers.core.host import mkdir
24 24
25 25
26if filter_installed_packages(['bzr']) != []: 26if filter_installed_packages(['bzr']) != []:
27 apt_install(['bzr']) 27 install(['bzr'])
28 if filter_installed_packages(['bzr']) != []: 28 if filter_installed_packages(['bzr']) != []:
29 raise NotImplementedError('Unable to install bzr') 29 raise NotImplementedError('Unable to install bzr')
30 30
31 31
32class BzrUrlFetchHandler(BaseFetchHandler): 32class BzrUrlFetchHandler(BaseFetchHandler):
33 """Handler for bazaar branches via generic and lp URLs""" 33 """Handler for bazaar branches via generic and lp URLs."""
34
34 def can_handle(self, source): 35 def can_handle(self, source):
35 url_parts = self.parse_url(source) 36 url_parts = self.parse_url(source)
36 if url_parts.scheme not in ('bzr+ssh', 'lp', ''): 37 if url_parts.scheme not in ('bzr+ssh', 'lp', ''):
diff --git a/hooks/charmhelpers/fetch/centos.py b/hooks/charmhelpers/fetch/centos.py
new file mode 100644
index 0000000..604bbfb
--- /dev/null
+++ b/hooks/charmhelpers/fetch/centos.py
@@ -0,0 +1,171 @@
1# Copyright 2014-2015 Canonical Limited.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14
15import subprocess
16import os
17import time
18import six
19import yum
20
21from tempfile import NamedTemporaryFile
22from charmhelpers.core.hookenv import log
23
24YUM_NO_LOCK = 1 # The return code for "couldn't acquire lock" in YUM.
25YUM_NO_LOCK_RETRY_DELAY = 10 # Wait 10 seconds between apt lock checks.
26YUM_NO_LOCK_RETRY_COUNT = 30 # Retry to acquire the lock X times.
27
28
29def filter_installed_packages(packages):
30 """Return a list of packages that require installation."""
31 yb = yum.YumBase()
32 package_list = yb.doPackageLists()
33 temp_cache = {p.base_package_name: 1 for p in package_list['installed']}
34
35 _pkgs = [p for p in packages if not temp_cache.get(p, False)]
36 return _pkgs
37
38
39def install(packages, options=None, fatal=False):
40 """Install one or more packages."""
41 cmd = ['yum', '--assumeyes']
42 if options is not None:
43 cmd.extend(options)
44 cmd.append('install')
45 if isinstance(packages, six.string_types):
46 cmd.append(packages)
47 else:
48 cmd.extend(packages)
49 log("Installing {} with options: {}".format(packages,
50 options))
51 _run_yum_command(cmd, fatal)
52
53
54def upgrade(options=None, fatal=False, dist=False):
55 """Upgrade all packages."""
56 cmd = ['yum', '--assumeyes']
57 if options is not None:
58 cmd.extend(options)
59 cmd.append('upgrade')
60 log("Upgrading with options: {}".format(options))
61 _run_yum_command(cmd, fatal)
62
63
64def update(fatal=False):
65 """Update local yum cache."""
66 cmd = ['yum', '--assumeyes', 'update']
67 log("Update with fatal: {}".format(fatal))
68 _run_yum_command(cmd, fatal)
69
70
71def purge(packages, fatal=False):
72 """Purge one or more packages."""
73 cmd = ['yum', '--assumeyes', 'remove']
74 if isinstance(packages, six.string_types):
75 cmd.append(packages)
76 else:
77 cmd.extend(packages)
78 log("Purging {}".format(packages))
79 _run_yum_command(cmd, fatal)
80
81
82def yum_search(packages):
83 """Search for a package."""
84 output = {}
85 cmd = ['yum', 'search']
86 if isinstance(packages, six.string_types):
87 cmd.append(packages)
88 else:
89 cmd.extend(packages)
90 log("Searching for {}".format(packages))
91 result = subprocess.check_output(cmd)
92 for package in list(packages):
93 output[package] = package in result
94 return output
95
96
97def add_source(source, key=None):
98 """Add a package source to this system.
99
100 @param source: a URL with a rpm package
101
102 @param key: A key to be added to the system's keyring and used
103 to verify the signatures on packages. Ideally, this should be an
104 ASCII format GPG public key including the block headers. A GPG key
105 id may also be used, but be aware that only insecure protocols are
106 available to retrieve the actual public key from a public keyserver
107 placing your Juju environment at risk.
108 """
109 if source is None:
110 log('Source is not present. Skipping')
111 return
112
113 if source.startswith('http'):
114 directory = '/etc/yum.repos.d/'
115 for filename in os.listdir(directory):
116 with open(directory + filename, 'r') as rpm_file:
117 if source in rpm_file.read():
118 break
119 else:
120 log("Add source: {!r}".format(source))
121 # write in the charms.repo
122 with open(directory + 'Charms.repo', 'a') as rpm_file:
123 rpm_file.write('[%s]\n' % source[7:].replace('/', '_'))
124 rpm_file.write('name=%s\n' % source[7:])
125 rpm_file.write('baseurl=%s\n\n' % source)
126 else:
127 log("Unknown source: {!r}".format(source))
128
129 if key:
130 if '-----BEGIN PGP PUBLIC KEY BLOCK-----' in key:
131 with NamedTemporaryFile('w+') as key_file:
132 key_file.write(key)
133 key_file.flush()
134 key_file.seek(0)
135 subprocess.check_call(['rpm', '--import', key_file])
136 else:
137 subprocess.check_call(['rpm', '--import', key])
138
139
140def _run_yum_command(cmd, fatal=False):
141 """Run an YUM command.
142
143 Checks the output and retry if the fatal flag is set to True.
144
145 :param: cmd: str: The yum command to run.
146 :param: fatal: bool: Whether the command's output should be checked and
147 retried.
148 """
149 env = os.environ.copy()
150
151 if fatal:
152 retry_count = 0
153 result = None
154
155 # If the command is considered "fatal", we need to retry if the yum
156 # lock was not acquired.
157
158 while result is None or result == YUM_NO_LOCK:
159 try:
160 result = subprocess.check_call(cmd, env=env)
161 except subprocess.CalledProcessError as e:
162 retry_count = retry_count + 1
163 if retry_count > YUM_NO_LOCK_RETRY_COUNT:
164 raise
165 result = e.returncode
166 log("Couldn't acquire YUM lock. Will retry in {} seconds."
167 "".format(YUM_NO_LOCK_RETRY_DELAY))
168 time.sleep(YUM_NO_LOCK_RETRY_DELAY)
169
170 else:
171 subprocess.call(cmd, env=env)
diff --git a/hooks/charmhelpers/fetch/giturl.py b/hooks/charmhelpers/fetch/giturl.py
index f708d1e..4cf21bc 100644
--- a/hooks/charmhelpers/fetch/giturl.py
+++ b/hooks/charmhelpers/fetch/giturl.py
@@ -18,17 +18,18 @@ from charmhelpers.fetch import (
18 BaseFetchHandler, 18 BaseFetchHandler,
19 UnhandledSource, 19 UnhandledSource,
20 filter_installed_packages, 20 filter_installed_packages,
21 apt_install, 21 install,
22) 22)
23 23
24if filter_installed_packages(['git']) != []: 24if filter_installed_packages(['git']) != []:
25 apt_install(['git']) 25 install(['git'])
26 if filter_installed_packages(['git']) != []: 26 if filter_installed_packages(['git']) != []:
27 raise NotImplementedError('Unable to install git') 27 raise NotImplementedError('Unable to install git')
28 28
29 29
30class GitUrlFetchHandler(BaseFetchHandler): 30class GitUrlFetchHandler(BaseFetchHandler):
31 """Handler for git branches via generic and github URLs""" 31 """Handler for git branches via generic and github URLs."""
32
32 def can_handle(self, source): 33 def can_handle(self, source):
33 url_parts = self.parse_url(source) 34 url_parts = self.parse_url(source)
34 # TODO (mattyw) no support for ssh git@ yet 35 # TODO (mattyw) no support for ssh git@ yet
diff --git a/hooks/charmhelpers/fetch/ubuntu.py b/hooks/charmhelpers/fetch/ubuntu.py
new file mode 100644
index 0000000..fce496b
--- /dev/null
+++ b/hooks/charmhelpers/fetch/ubuntu.py
@@ -0,0 +1,336 @@
1# Copyright 2014-2015 Canonical Limited.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14
15import os
16import six
17import time
18import subprocess
19
20from tempfile import NamedTemporaryFile
21from charmhelpers.core.host import (
22 lsb_release
23)
24from charmhelpers.core.hookenv import log
25from charmhelpers.fetch import SourceConfigError
26
27CLOUD_ARCHIVE = """# Ubuntu Cloud Archive
28deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main
29"""
30
31PROPOSED_POCKET = """# Proposed
32deb http://archive.ubuntu.com/ubuntu {}-proposed main universe multiverse restricted
33"""
34
35CLOUD_ARCHIVE_POCKETS = {
36 # Folsom
37 'folsom': 'precise-updates/folsom',
38 'precise-folsom': 'precise-updates/folsom',
39 'precise-folsom/updates': 'precise-updates/folsom',
40 'precise-updates/folsom': 'precise-updates/folsom',
41 'folsom/proposed': 'precise-proposed/folsom',
42 'precise-folsom/proposed': 'precise-proposed/folsom',
43 'precise-proposed/folsom': 'precise-proposed/folsom',
44 # Grizzly
45 'grizzly': 'precise-updates/grizzly',
46 'precise-grizzly': 'precise-updates/grizzly',
47 'precise-grizzly/updates': 'precise-updates/grizzly',
48 'precise-updates/grizzly': 'precise-updates/grizzly',
49 'grizzly/proposed': 'precise-proposed/grizzly',
50 'precise-grizzly/proposed': 'precise-proposed/grizzly',
51 'precise-proposed/grizzly': 'precise-proposed/grizzly',
52 # Havana
53 'havana': 'precise-updates/havana',
54 'precise-havana': 'precise-updates/havana',
55 'precise-havana/updates': 'precise-updates/havana',
56 'precise-updates/havana': 'precise-updates/havana',
57 'havana/proposed': 'precise-proposed/havana',
58 'precise-havana/proposed': 'precise-proposed/havana',
59 'precise-proposed/havana': 'precise-proposed/havana',
60 # Icehouse
61 'icehouse': 'precise-updates/icehouse',
62 'precise-icehouse': 'precise-updates/icehouse',
63 'precise-icehouse/updates': 'precise-updates/icehouse',
64 'precise-updates/icehouse': 'precise-updates/icehouse',
65 'icehouse/proposed': 'precise-proposed/icehouse',
66 'precise-icehouse/proposed': 'precise-proposed/icehouse',
67 'precise-proposed/icehouse': 'precise-proposed/icehouse',
68 # Juno
69 'juno': 'trusty-updates/juno',
70 'trusty-juno': 'trusty-updates/juno',
71 'trusty-juno/updates': 'trusty-updates/juno',
72 'trusty-updates/juno': 'trusty-updates/juno',
73 'juno/proposed': 'trusty-proposed/juno',
74 'trusty-juno/proposed': 'trusty-proposed/juno',
75 'trusty-proposed/juno': 'trusty-proposed/juno',
76 # Kilo
77 'kilo': 'trusty-updates/kilo',
78 'trusty-kilo': 'trusty-updates/kilo',
79 'trusty-kilo/updates': 'trusty-updates/kilo',
80 'trusty-updates/kilo': 'trusty-updates/kilo',
81 'kilo/proposed': 'trusty-proposed/kilo',
82 'trusty-kilo/proposed': 'trusty-proposed/kilo',
83 'trusty-proposed/kilo': 'trusty-proposed/kilo',
84 # Liberty
85 'liberty': 'trusty-updates/liberty',
86 'trusty-liberty': 'trusty-updates/liberty',
87 'trusty-liberty/updates': 'trusty-updates/liberty',
88 'trusty-updates/liberty': 'trusty-updates/liberty',
89 'liberty/proposed': 'trusty-proposed/liberty',
90 'trusty-liberty/proposed': 'trusty-proposed/liberty',
91 'trusty-proposed/liberty': 'trusty-proposed/liberty',
92 # Mitaka
93 'mitaka': 'trusty-updates/mitaka',
94 'trusty-mitaka': 'trusty-updates/mitaka',
95 'trusty-mitaka/updates': 'trusty-updates/mitaka',
96 'trusty-updates/mitaka': 'trusty-updates/mitaka',
97 'mitaka/proposed': 'trusty-proposed/mitaka',
98 'trusty-mitaka/proposed': 'trusty-proposed/mitaka',
99 'trusty-proposed/mitaka': 'trusty-proposed/mitaka',
100 # Newton
101 'newton': 'xenial-updates/newton',
102 'xenial-newton': 'xenial-updates/newton',
103 'xenial-newton/updates': 'xenial-updates/newton',
104 'xenial-updates/newton': 'xenial-updates/newton',
105 'newton/proposed': 'xenial-proposed/newton',
106 'xenial-newton/proposed': 'xenial-proposed/newton',
107 'xenial-proposed/newton': 'xenial-proposed/newton',
108}
109
110APT_NO_LOCK = 100 # The return code for "couldn't acquire lock" in APT.
111APT_NO_LOCK_RETRY_DELAY = 10 # Wait 10 seconds between apt lock checks.
112APT_NO_LOCK_RETRY_COUNT = 30 # Retry to acquire the lock X times.
113
114
115def filter_installed_packages(packages):
116 """Return a list of packages that require installation."""
117 cache = apt_cache()
118 _pkgs = []
119 for package in packages:
120 try:
121 p = cache[package]
122 p.current_ver or _pkgs.append(package)
123 except KeyError:
124 log('Package {} has no installation candidate.'.format(package),
125 level='WARNING')
126 _pkgs.append(package)
127 return _pkgs
128
129
130def apt_cache(in_memory=True, progress=None):
131 """Build and return an apt cache."""
132 from apt import apt_pkg
133 apt_pkg.init()
134 if in_memory:
135 apt_pkg.config.set("Dir::Cache::pkgcache", "")
136 apt_pkg.config.set("Dir::Cache::srcpkgcache", "")
137 return apt_pkg.Cache(progress)
138
139
140def install(packages, options=None, fatal=False):
141 """Install one or more packages."""
142 if options is None:
143 options = ['--option=Dpkg::Options::=--force-confold']
144
145 cmd = ['apt-get', '--assume-yes']
146 cmd.extend(options)
147 cmd.append('install')
148 if isinstance(packages, six.string_types):
149 cmd.append(packages)
150 else:
151 cmd.extend(packages)
152 log("Installing {} with options: {}".format(packages,
153 options))
154 _run_apt_command(cmd, fatal)
155
156
157def upgrade(options=None, fatal=False, dist=False):
158 """Upgrade all packages."""
159 if options is None:
160 options = ['--option=Dpkg::Options::=--force-confold']
161
162 cmd = ['apt-get', '--assume-yes']
163 cmd.extend(options)
164 if dist:
165 cmd.append('dist-upgrade')
166 else:
167 cmd.append('upgrade')
168 log("Upgrading with options: {}".format(options))
169 _run_apt_command(cmd, fatal)
170
171
172def update(fatal=False):
173 """Update local apt cache."""
174 cmd = ['apt-get', 'update']
175 _run_apt_command(cmd, fatal)
176
177
178def purge(packages, fatal=False):
179 """Purge one or more packages."""
180 cmd = ['apt-get', '--assume-yes', 'purge']
181 if isinstance(packages, six.string_types):
182 cmd.append(packages)
183 else:
184 cmd.extend(packages)
185 log("Purging {}".format(packages))
186 _run_apt_command(cmd, fatal)
187
188
189def apt_mark(packages, mark, fatal=False):
190 """Flag one or more packages using apt-mark."""
191 log("Marking {} as {}".format(packages, mark))
192 cmd = ['apt-mark', mark]
193 if isinstance(packages, six.string_types):
194 cmd.append(packages)
195 else:
196 cmd.extend(packages)
197
198 if fatal:
199 subprocess.check_call(cmd, universal_newlines=True)
200 else:
201 subprocess.call(cmd, universal_newlines=True)
202
203
204def apt_hold(packages, fatal=False):
205 return apt_mark(packages, 'hold', fatal=fatal)
206
207
208def apt_unhold(packages, fatal=False):
209 return apt_mark(packages, 'unhold', fatal=fatal)
210
211
212def add_source(source, key=None):
213 """Add a package source to this system.
214
215 @param source: a URL or sources.list entry, as supported by
216 add-apt-repository(1). Examples::
217
218 ppa:charmers/example
219 deb https://stub:key@private.example.com/ubuntu trusty main
220
221 In addition:
222 'proposed:' may be used to enable the standard 'proposed'
223 pocket for the release.
224 'cloud:' may be used to activate official cloud archive pockets,
225 such as 'cloud:icehouse'
226 'distro' may be used as a noop
227
228 @param key: A key to be added to the system's APT keyring and used
229 to verify the signatures on packages. Ideally, this should be an
230 ASCII format GPG public key including the block headers. A GPG key
231 id may also be used, but be aware that only insecure protocols are
232 available to retrieve the actual public key from a public keyserver
233 placing your Juju environment at risk. ppa and cloud archive keys
234 are securely added automtically, so sould not be provided.
235 """
236 if source is None:
237 log('Source is not present. Skipping')
238 return
239
240 if (source.startswith('ppa:') or
241 source.startswith('http') or
242 source.startswith('deb ') or
243 source.startswith('cloud-archive:')):
244 subprocess.check_call(['add-apt-repository', '--yes', source])
245 elif source.startswith('cloud:'):
246 install(filter_installed_packages(['ubuntu-cloud-keyring']),
247 fatal=True)
248 pocket = source.split(':')[-1]
249 if pocket not in CLOUD_ARCHIVE_POCKETS:
250 raise SourceConfigError(
251 'Unsupported cloud: source option %s' %
252 pocket)
253 actual_pocket = CLOUD_ARCHIVE_POCKETS[pocket]
254 with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt:
255 apt.write(CLOUD_ARCHIVE.format(actual_pocket))
256 elif source == 'proposed':
257 release = lsb_release()['DISTRIB_CODENAME']
258 with open('/etc/apt/sources.list.d/proposed.list', 'w') as apt:
259 apt.write(PROPOSED_POCKET.format(release))
260 elif source == 'distro':
261 pass
262 else:
263 log("Unknown source: {!r}".format(source))
264
265 if key:
266 if '-----BEGIN PGP PUBLIC KEY BLOCK-----' in key:
267 with NamedTemporaryFile('w+') as key_file:
268 key_file.write(key)
269 key_file.flush()
270 key_file.seek(0)
271 subprocess.check_call(['apt-key', 'add', '-'], stdin=key_file)
272 else:
273 # Note that hkp: is in no way a secure protocol. Using a
274 # GPG key id is pointless from a security POV unless you
275 # absolutely trust your network and DNS.
276 subprocess.check_call(['apt-key', 'adv', '--keyserver',
277 'hkp://keyserver.ubuntu.com:80', '--recv',
278 key])
279
280
281def _run_apt_command(cmd, fatal=False):
282 """Run an APT command.
283
284 Checks the output and retries if the fatal flag is set
285 to True.
286
287 :param: cmd: str: The apt command to run.
288 :param: fatal: bool: Whether the command's output should be checked and
289 retried.
290 """
291 env = os.environ.copy()
292
293 if 'DEBIAN_FRONTEND' not in env:
294 env['DEBIAN_FRONTEND'] = 'noninteractive'
295
296 if fatal:
297 retry_count = 0
298 result = None
299
300 # If the command is considered "fatal", we need to retry if the apt
301 # lock was not acquired.
302
303 while result is None or result == APT_NO_LOCK:
304 try:
305 result = subprocess.check_call(cmd, env=env)
306 except subprocess.CalledProcessError as e:
307 retry_count = retry_count + 1
308 if retry_count > APT_NO_LOCK_RETRY_COUNT:
309 raise
310 result = e.returncode
311 log("Couldn't acquire DPKG lock. Will retry in {} seconds."
312 "".format(APT_NO_LOCK_RETRY_DELAY))
313 time.sleep(APT_NO_LOCK_RETRY_DELAY)
314
315 else:
316 subprocess.call(cmd, env=env)
317
318
319def get_upstream_version(package):
320 """Determine upstream version based on installed package
321
322 @returns None (if not installed) or the upstream version
323 """
324 import apt_pkg
325 cache = apt_cache()
326 try:
327 pkg = cache[package]
328 except:
329 # the package is unknown to the current apt cache.
330 return None
331
332 if not pkg.current_ver:
333 # package is known, but no version is currently installed.
334 return None
335
336 return apt_pkg.upstream_version(pkg.current_ver.ver_str)
diff --git a/hooks/charmhelpers/osplatform.py b/hooks/charmhelpers/osplatform.py
new file mode 100644
index 0000000..ea490bb
--- /dev/null
+++ b/hooks/charmhelpers/osplatform.py
@@ -0,0 +1,19 @@
1import platform
2
3
4def get_platform():
5 """Return the current OS platform.
6
7 For example: if current os platform is Ubuntu then a string "ubuntu"
8 will be returned (which is the name of the module).
9 This string is used to decide which platform module should be imported.
10 """
11 tuple_platform = platform.linux_distribution()
12 current_platform = tuple_platform[0]
13 if "Ubuntu" in current_platform:
14 return "ubuntu"
15 elif "CentOS" in current_platform:
16 return "centos"
17 else:
18 raise RuntimeError("This module is not supported on {}."
19 .format(current_platform))
diff --git a/hooks/charmhelpers/payload/execd.py b/hooks/charmhelpers/payload/execd.py
index 0c42090..1502aa0 100644
--- a/hooks/charmhelpers/payload/execd.py
+++ b/hooks/charmhelpers/payload/execd.py
@@ -47,11 +47,12 @@ def execd_submodule_paths(command, execd_dir=None):
47 yield path 47 yield path
48 48
49 49
50def execd_run(command, execd_dir=None, die_on_error=False, stderr=None): 50def execd_run(command, execd_dir=None, die_on_error=True, stderr=subprocess.STDOUT):
51 """Run command for each module within execd_dir which defines it.""" 51 """Run command for each module within execd_dir which defines it."""
52 for submodule_path in execd_submodule_paths(command, execd_dir): 52 for submodule_path in execd_submodule_paths(command, execd_dir):
53 try: 53 try:
54 subprocess.check_call(submodule_path, shell=True, stderr=stderr) 54 subprocess.check_output(submodule_path, stderr=stderr,
55 universal_newlines=True)
55 except subprocess.CalledProcessError as e: 56 except subprocess.CalledProcessError as e:
56 hookenv.log("Error ({}) running {}. Output: {}".format( 57 hookenv.log("Error ({}) running {}. Output: {}".format(
57 e.returncode, e.cmd, e.output)) 58 e.returncode, e.cmd, e.output))
diff --git a/tests/charmhelpers/contrib/openstack/amulet/deployment.py b/tests/charmhelpers/contrib/openstack/amulet/deployment.py
index 6ce91db..6fe8cf8 100644
--- a/tests/charmhelpers/contrib/openstack/amulet/deployment.py
+++ b/tests/charmhelpers/contrib/openstack/amulet/deployment.py
@@ -98,8 +98,47 @@ class OpenStackAmuletDeployment(AmuletDeployment):
98 98
99 return other_services 99 return other_services
100 100
101 def _add_services(self, this_service, other_services): 101 def _add_services(self, this_service, other_services, use_source=None,
102 """Add services to the deployment and set openstack-origin/source.""" 102 no_origin=None):
103 """Add services to the deployment and optionally set
104 openstack-origin/source.
105
106 :param this_service dict: Service dictionary describing the service
107 whose amulet tests are being run
108 :param other_services dict: List of service dictionaries describing
109 the services needed to support the target
110 service
111 :param use_source list: List of services which use the 'source' config
112 option rather than 'openstack-origin'
113 :param no_origin list: List of services which do not support setting
114 the Cloud Archive.
115 Service Dict:
116 {
117 'name': str charm-name,
118 'units': int number of units,
119 'constraints': dict of juju constraints,
120 'location': str location of charm,
121 }
122 eg
123 this_service = {
124 'name': 'openvswitch-odl',
125 'constraints': {'mem': '8G'},
126 }
127 other_services = [
128 {
129 'name': 'nova-compute',
130 'units': 2,
131 'constraints': {'mem': '4G'},
132 'location': cs:~bob/xenial/nova-compute
133 },
134 {
135 'name': 'mysql',
136 'constraints': {'mem': '2G'},
137 },
138 {'neutron-api-odl'}]
139 use_source = ['mysql']
140 no_origin = ['neutron-api-odl']
141 """
103 self.log.info('OpenStackAmuletDeployment: adding services') 142 self.log.info('OpenStackAmuletDeployment: adding services')
104 143
105 other_services = self._determine_branch_locations(other_services) 144 other_services = self._determine_branch_locations(other_services)
@@ -110,16 +149,22 @@ class OpenStackAmuletDeployment(AmuletDeployment):
110 services = other_services 149 services = other_services
111 services.append(this_service) 150 services.append(this_service)
112 151
152 use_source = use_source or []
153 no_origin = no_origin or []
154
113 # Charms which should use the source config option 155 # Charms which should use the source config option
114 use_source = ['mysql', 'mongodb', 'rabbitmq-server', 'ceph', 156 use_source = list(set(
115 'ceph-osd', 'ceph-radosgw', 'ceph-mon', 'ceph-proxy'] 157 use_source + ['mysql', 'mongodb', 'rabbitmq-server', 'ceph',
158 'ceph-osd', 'ceph-radosgw', 'ceph-mon',
159 'ceph-proxy']))
116 160
117 # Charms which can not use openstack-origin, ie. many subordinates 161 # Charms which can not use openstack-origin, ie. many subordinates
118 no_origin = ['cinder-ceph', 'hacluster', 'neutron-openvswitch', 'nrpe', 162 no_origin = list(set(
119 'openvswitch-odl', 'neutron-api-odl', 'odl-controller', 163 no_origin + ['cinder-ceph', 'hacluster', 'neutron-openvswitch',
120 'cinder-backup', 'nexentaedge-data', 164 'nrpe', 'openvswitch-odl', 'neutron-api-odl',
121 'nexentaedge-iscsi-gw', 'nexentaedge-swift-gw', 165 'odl-controller', 'cinder-backup', 'nexentaedge-data',
122 'cinder-nexentaedge', 'nexentaedge-mgmt'] 166 'nexentaedge-iscsi-gw', 'nexentaedge-swift-gw',
167 'cinder-nexentaedge', 'nexentaedge-mgmt']))
123 168
124 if self.openstack: 169 if self.openstack:
125 for svc in services: 170 for svc in services:
@@ -220,7 +265,8 @@ class OpenStackAmuletDeployment(AmuletDeployment):
220 self.trusty_icehouse, self.trusty_juno, self.utopic_juno, 265 self.trusty_icehouse, self.trusty_juno, self.utopic_juno,
221 self.trusty_kilo, self.vivid_kilo, self.trusty_liberty, 266 self.trusty_kilo, self.vivid_kilo, self.trusty_liberty,
222 self.wily_liberty, self.trusty_mitaka, 267 self.wily_liberty, self.trusty_mitaka,
223 self.xenial_mitaka) = range(14) 268 self.xenial_mitaka, self.xenial_newton,
269 self.yakkety_newton) = range(16)
224 270
225 releases = { 271 releases = {
226 ('precise', None): self.precise_essex, 272 ('precise', None): self.precise_essex,
@@ -236,7 +282,10 @@ class OpenStackAmuletDeployment(AmuletDeployment):
236 ('utopic', None): self.utopic_juno, 282 ('utopic', None): self.utopic_juno,
237 ('vivid', None): self.vivid_kilo, 283 ('vivid', None): self.vivid_kilo,
238 ('wily', None): self.wily_liberty, 284 ('wily', None): self.wily_liberty,
239 ('xenial', None): self.xenial_mitaka} 285 ('xenial', None): self.xenial_mitaka,
286 ('xenial', 'cloud:xenial-newton'): self.xenial_newton,
287 ('yakkety', None): self.yakkety_newton,
288 }
240 return releases[(self.series, self.openstack)] 289 return releases[(self.series, self.openstack)]
241 290
242 def _get_openstack_release_string(self): 291 def _get_openstack_release_string(self):
@@ -254,6 +303,7 @@ class OpenStackAmuletDeployment(AmuletDeployment):
254 ('vivid', 'kilo'), 303 ('vivid', 'kilo'),
255 ('wily', 'liberty'), 304 ('wily', 'liberty'),
256 ('xenial', 'mitaka'), 305 ('xenial', 'mitaka'),
306 ('yakkety', 'newton'),
257 ]) 307 ])
258 if self.openstack: 308 if self.openstack:
259 os_origin = self.openstack.split(':')[1] 309 os_origin = self.openstack.split(':')[1]
diff --git a/tests/charmhelpers/contrib/openstack/amulet/utils.py b/tests/charmhelpers/contrib/openstack/amulet/utils.py
index 8040b57..24b353e 100644
--- a/tests/charmhelpers/contrib/openstack/amulet/utils.py
+++ b/tests/charmhelpers/contrib/openstack/amulet/utils.py
@@ -83,6 +83,56 @@ class OpenStackAmuletUtils(AmuletUtils):
83 if not found: 83 if not found:
84 return 'endpoint not found' 84 return 'endpoint not found'
85 85
86 def validate_v3_endpoint_data(self, endpoints, admin_port, internal_port,
87 public_port, expected):
88 """Validate keystone v3 endpoint data.
89
90 Validate the v3 endpoint data which has changed from v2. The
91 ports are used to find the matching endpoint.
92
93 The new v3 endpoint data looks like:
94
95 [<Endpoint enabled=True,
96 id=0432655fc2f74d1e9fa17bdaa6f6e60b,
97 interface=admin,
98 links={u'self': u'<RESTful URL of this endpoint>'},
99 region=RegionOne,
100 region_id=RegionOne,
101 service_id=17f842a0dc084b928e476fafe67e4095,
102 url=http://10.5.6.5:9312>,
103 <Endpoint enabled=True,
104 id=6536cb6cb92f4f41bf22b079935c7707,
105 interface=admin,
106 links={u'self': u'<RESTful url of this endpoint>'},
107 region=RegionOne,
108 region_id=RegionOne,
109 service_id=72fc8736fb41435e8b3584205bb2cfa3,
110 url=http://10.5.6.6:35357/v3>,
111 ... ]
112 """
113 self.log.debug('Validating v3 endpoint data...')
114 self.log.debug('actual: {}'.format(repr(endpoints)))
115 found = []
116 for ep in endpoints:
117 self.log.debug('endpoint: {}'.format(repr(ep)))
118 if ((admin_port in ep.url and ep.interface == 'admin') or
119 (internal_port in ep.url and ep.interface == 'internal') or
120 (public_port in ep.url and ep.interface == 'public')):
121 found.append(ep.interface)
122 # note we ignore the links member.
123 actual = {'id': ep.id,
124 'region': ep.region,
125 'region_id': ep.region_id,
126 'interface': self.not_null,
127 'url': ep.url,
128 'service_id': ep.service_id, }
129 ret = self._validate_dict_data(expected, actual)
130 if ret:
131 return 'unexpected endpoint data - {}'.format(ret)
132
133 if len(found) != 3:
134 return 'Unexpected number of endpoints found'
135
86 def validate_svc_catalog_endpoint_data(self, expected, actual): 136 def validate_svc_catalog_endpoint_data(self, expected, actual):
87 """Validate service catalog endpoint data. 137 """Validate service catalog endpoint data.
88 138
@@ -100,6 +150,72 @@ class OpenStackAmuletUtils(AmuletUtils):
100 return "endpoint {} does not exist".format(k) 150 return "endpoint {} does not exist".format(k)
101 return ret 151 return ret
102 152
153 def validate_v3_svc_catalog_endpoint_data(self, expected, actual):
154 """Validate the keystone v3 catalog endpoint data.
155
156 Validate a list of dictinaries that make up the keystone v3 service
157 catalogue.
158
159 It is in the form of:
160
161
162 {u'identity': [{u'id': u'48346b01c6804b298cdd7349aadb732e',
163 u'interface': u'admin',
164 u'region': u'RegionOne',
165 u'region_id': u'RegionOne',
166 u'url': u'http://10.5.5.224:35357/v3'},
167 {u'id': u'8414f7352a4b47a69fddd9dbd2aef5cf',
168 u'interface': u'public',
169 u'region': u'RegionOne',
170 u'region_id': u'RegionOne',
171 u'url': u'http://10.5.5.224:5000/v3'},
172 {u'id': u'd5ca31440cc24ee1bf625e2996fb6a5b',
173 u'interface': u'internal',
174 u'region': u'RegionOne',
175 u'region_id': u'RegionOne',
176 u'url': u'http://10.5.5.224:5000/v3'}],
177 u'key-manager': [{u'id': u'68ebc17df0b045fcb8a8a433ebea9e62',
178 u'interface': u'public',
179 u'region': u'RegionOne',
180 u'region_id': u'RegionOne',
181 u'url': u'http://10.5.5.223:9311'},
182 {u'id': u'9cdfe2a893c34afd8f504eb218cd2f9d',
183 u'interface': u'internal',
184 u'region': u'RegionOne',
185 u'region_id': u'RegionOne',
186 u'url': u'http://10.5.5.223:9311'},
187 {u'id': u'f629388955bc407f8b11d8b7ca168086',
188 u'interface': u'admin',
189 u'region': u'RegionOne',
190 u'region_id': u'RegionOne',
191 u'url': u'http://10.5.5.223:9312'}]}
192
193 Note, that an added complication is that the order of admin, public,
194 internal against 'interface' in each region.
195
196 Thus, the function sorts the expected and actual lists using the
197 interface key as a sort key, prior to the comparison.
198 """
199 self.log.debug('Validating v3 service catalog endpoint data...')
200 self.log.debug('actual: {}'.format(repr(actual)))
201 for k, v in six.iteritems(expected):
202 if k in actual:
203 l_expected = sorted(v, key=lambda x: x['interface'])
204 l_actual = sorted(actual[k], key=lambda x: x['interface'])
205 if len(l_actual) != len(l_expected):
206 return ("endpoint {} has differing number of interfaces "
207 " - expected({}), actual({})"
208 .format(k, len(l_expected), len(l_actual)))
209 for i_expected, i_actual in zip(l_expected, l_actual):
210 self.log.debug("checking interface {}"
211 .format(i_expected['interface']))
212 ret = self._validate_dict_data(i_expected, i_actual)
213 if ret:
214 return self.endpoint_error(k, ret)
215 else:
216 return "endpoint {} does not exist".format(k)
217 return ret
218
103 def validate_tenant_data(self, expected, actual): 219 def validate_tenant_data(self, expected, actual):
104 """Validate tenant data. 220 """Validate tenant data.
105 221
@@ -928,7 +1044,8 @@ class OpenStackAmuletUtils(AmuletUtils):
928 retry_delay=5, 1044 retry_delay=5,
929 socket_timeout=1) 1045 socket_timeout=1)
930 connection = pika.BlockingConnection(parameters) 1046 connection = pika.BlockingConnection(parameters)
931 assert connection.server_properties['product'] == 'RabbitMQ' 1047 assert connection.is_open is True
1048 assert connection.is_closing is False
932 self.log.debug('Connect OK') 1049 self.log.debug('Connect OK')
933 return connection 1050 return connection
934 except Exception as e: 1051 except Exception as e: