Tidy ceph backend configuration

Drop generation of upstart override file and /etc/environment
and scrub any existing charm configuration in these locations
from an existing install.

These where required way back in the dawn of time when ceph
support was alpha/beta in cinder.

Provide backend specific configuration file path, allowing
multiple ceph clusters to be used with a single cinder
application.

Change-Id: I8a097e4de1c5c980f118a587a1a64792fad2fa05
Closes-Bug: 1769196
This commit is contained in:
James Page 2018-05-10 11:48:38 +01:00
parent 294d00dfa1
commit b1829e983a
6 changed files with 42 additions and 16 deletions

View File

@ -27,6 +27,12 @@ from charmhelpers.contrib.openstack.utils import (
CompareOpenStackReleases,
)
CHARM_CEPH_CONF = '/var/lib/charm/{}/ceph.conf'
def ceph_config_file():
return CHARM_CEPH_CONF.format(service_name())
class CephSubordinateContext(OSContextGenerator):
interfaces = ['ceph-cinder']
@ -54,6 +60,7 @@ class CephSubordinateContext(OSContextGenerator):
('rbd_pool', service),
('rbd_user', service),
('rbd_secret_uuid', leader_get('secret-uuid')),
('rbd_ceph_conf', ceph_config_file()),
]
}
}

View File

@ -22,7 +22,7 @@ import uuid
from cinder_utils import (
register_configs,
restart_map,
set_ceph_env_variables,
scrub_old_style_ceph,
PACKAGES,
REQUIRED_INTERFACES,
VERSION_PACKAGE,
@ -122,7 +122,6 @@ def ceph_changed():
if is_request_complete(get_ceph_request()):
log('Request complete')
CONFIGS.write_all()
set_ceph_env_variables(service=service)
for rid in relation_ids('storage-backend'):
storage_backend(rid)
for r_id in relation_ids('ceph-access'):
@ -180,9 +179,9 @@ def storage_backend_changed():
def upgrade_charm():
if 'ceph' in CONFIGS.complete_contexts():
CONFIGS.write_all()
set_ceph_env_variables(service=service_name())
for rid in relation_ids('storage-backend'):
storage_backend(rid)
scrub_old_style_ceph()
@hooks.hook('leader-settings-changed')

View File

@ -12,8 +12,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import re
from collections import OrderedDict
from tempfile import NamedTemporaryFile
from charmhelpers.core.hookenv import (
relation_ids,
@ -115,12 +121,19 @@ def restart_map():
return OrderedDict(_map)
def set_ceph_env_variables(service):
# XXX: Horrid kludge to make cinder-volume use
# a different ceph username than admin
env = open('/etc/environment', 'r').read()
if 'CEPH_ARGS' not in env:
with open('/etc/environment', 'a') as out:
out.write('CEPH_ARGS="--id %s"\n' % service)
with open('/etc/init/cinder-volume.override', 'w') as out:
out.write('env CEPH_ARGS="--id %s"\n' % service)
def scrub_old_style_ceph():
"""Purge any legacy ceph configuration from install"""
# NOTE: purge old override file - no longer needed
if os.path.exists('/etc/init/cinder-volume.override'):
os.remove('/etc/init/cinder-volume.override')
# NOTE: purge any CEPH_ARGS data from /etc/environment
env_file = '/etc/environment'
ceph_match = re.compile("^CEPH_ARGS.*").search
with open(env_file, 'r') as input_file:
with NamedTemporaryFile(mode='w',
delete=False,
dir=os.path.dirname(env_file)) as outfile:
for line in input_file:
if not ceph_match(line):
print(line, end='', file=outfile)
os.rename(outfile.name, input_file.name)

View File

@ -427,6 +427,8 @@ class CinderCephBasicDeployment(OpenStackAmuletDeployment):
["rbd_pool", "cinder-ceph"],
["rbd_user", "cinder-ceph"],
["rbd_secret_uuid", backend_uuid],
['rbd_ceph_conf',
'/var/lib/charm/cinder-ceph/ceph.conf'],
]
}
}
@ -584,7 +586,8 @@ class CinderCephBasicDeployment(OpenStackAmuletDeployment):
'volume_backend_name': 'cinder-ceph',
'volume_driver': 'cinder.volume.drivers.rbd.RBDDriver',
'rbd_pool': 'cinder-ceph',
'rbd_user': 'cinder-ceph'
'rbd_user': 'cinder-ceph',
'rbd_ceph_conf': '/var/lib/charm/cinder-ceph/ceph.conf',
},
}
if self._get_openstack_release() < self.xenial_ocata:

View File

@ -31,6 +31,7 @@ class TestCinderContext(CharmTestCase):
def setUp(self):
super(TestCinderContext, self).setUp(contexts, TO_PATCH)
self.leader_get.return_value = 'libvirt-uuid'
self.maxDiff = None
def test_ceph_not_related(self):
self.is_relation_made.return_value = False
@ -53,6 +54,8 @@ class TestCinderContext(CharmTestCase):
('rbd_pool', service),
('rbd_user', service),
('rbd_secret_uuid', 'libvirt-uuid'),
('rbd_ceph_conf',
'/var/lib/charm/mycinder/ceph.conf')
]
}
}
@ -75,6 +78,8 @@ class TestCinderContext(CharmTestCase):
('rbd_pool', service),
('rbd_user', service),
('rbd_secret_uuid', 'libvirt-uuid'),
('rbd_ceph_conf',
'/var/lib/charm/mycinder/ceph.conf')
]
}
}

View File

@ -31,7 +31,7 @@ TO_PATCH = [
'ensure_ceph_keyring',
'register_configs',
'restart_map',
'set_ceph_env_variables',
'scrub_old_style_ceph',
'is_request_complete',
'send_request_if_needed',
'CONFIGS',
@ -98,7 +98,6 @@ class TestCinderHooks(CharmTestCase):
user='cinder',
group='cinder')
self.assertTrue(self.CONFIGS.write_all.called)
self.set_ceph_env_variables.assert_called_with(service='cinder')
@patch.object(hooks, 'get_ceph_request')
@patch('charmhelpers.core.hookenv.config')
@ -178,7 +177,7 @@ class TestCinderHooks(CharmTestCase):
hooks.hooks.execute(['hooks/upgrade-charm'])
_storage_backend.assert_called_with('ceph:1')
assert self.CONFIGS.write_all.called
assert self.set_ceph_env_variables.called
self.scrub_old_style_ceph.assert_called_once_with()
@patch('charmhelpers.core.hookenv.config')
@patch.object(hooks, 'storage_backend')