Heart beat, config generator support
Change-Id: I705583361caa747aa290883d8f33b7f499279f43
This commit is contained in:
parent
72e7f2cd38
commit
26c8878d12
|
@ -0,0 +1,9 @@
|
|||
[DEFAULT]
|
||||
output_file = etc/namos/namos.conf.sample
|
||||
wrap_width = 79
|
||||
namespace = namos.common.config
|
||||
namespace = oslo.messaging
|
||||
namespace = oslo.middleware
|
||||
namespace = oslo.db
|
||||
namespace = oslo.log
|
||||
namespace = oslo.service.service
|
|
@ -10,4 +10,4 @@ rabbit_hosts = 172.241.0.101
|
|||
connection = mysql+pymysql://root:password@172.241.0.101/namos?charset=utf8
|
||||
|
||||
[conductor]
|
||||
workers=1
|
||||
workers=3
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
# List of config generator conf files for syncing the conf with namos
|
||||
heat=/opt/stack/heat/config-generator.conf
|
||||
namos=/home/manickan/workspace/namos/openstack/namos/config-generator.conf
|
||||
keystone=/opt/stack/keystone/config-generator/keystone.conf
|
||||
neutron-bgp-dragent=/opt/stack/neutron/etc/oslo-config-generator/bgp_dragent.ini
|
||||
neutron-dhcp-agent=/opt/stack/neutron/etc/oslo-config-generator/dhcp_agent.ini
|
||||
neutron-l3-agent=/opt/stack/neutron/etc/oslo-config-generator/l3_agent.ini
|
||||
neutron-linuxbridge-agent=/opt/stack/neutron/etc/oslo-config-generator/linuxbridge_agent.ini
|
||||
neutron-metadata-agent=/opt/stack/neutron/etc/oslo-config-generator/metadata_agent.ini
|
||||
neutron-metering-agent=/opt/stack/neutron/etc/oslo-config-generator/metering_agent.ini
|
||||
neutron-ml2=/opt/stack/neutron/etc/oslo-config-generator/ml2_conf.ini
|
||||
neutron-ml2-sriov=/opt/stack/neutron/etc/oslo-config-generator/ml2_conf_sriov.ini
|
||||
neutron=/opt/stack/neutron/etc/oslo-config-generator/neutron.conf
|
||||
neutron-openvswitch-agent=/opt/stack/neutron/etc/oslo-config-generator/openvswitch_agent.ini
|
||||
neutron-sriov-agent=/opt/stack/neutron/etc/oslo-config-generator/sriov_agent.ini
|
||||
lbaas-agent=/opt/stack/neutron-lbaas/etc/oslo-config-generator/lbaas_agent.ini
|
||||
neutron-lbaas=/opt/stack/neutron-lbaas/etc/oslo-config-generator/neutron_lbaas.conf
|
||||
services-lbaas=/opt/stack/neutron-lbaas/etc/oslo-config-generator/services_lbaas.conf
|
||||
glance-api=/opt/stack/glance/etc/oslo-config-generator/glance-api.conf
|
||||
glance-cache=/opt/stack/glance/etc/oslo-config-generator/glance-cache.conf
|
||||
glance-glare=/opt/stack/glance/etc/oslo-config-generator/glance-glare.conf
|
||||
glance-registry=/opt/stack/glance/etc/oslo-config-generator/glance-registry.conf
|
||||
glance-scrubber=/opt/stack/glance/etc/oslo-config-generator/glance-scrubber.conf
|
||||
glance-manage=/opt/stack/glance/etc/oslo-config-generator/glance-manage.conf
|
||||
nova=/opt/stack/nova/etc/nova/nova-config-generator.conf
|
||||
cinder=/opt/stack/cinder/cinder/config/cinder-config-generator.conf
|
|
@ -15,8 +15,11 @@
|
|||
import sys
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_utils import timeutils
|
||||
|
||||
from namos.common import config
|
||||
from namos.common import exception
|
||||
from namos.db import api
|
||||
from namos.db import sample
|
||||
from namos.db.sqlalchemy import migration
|
||||
|
||||
|
@ -25,6 +28,90 @@ CONF = cfg.CONF
|
|||
MANAGE_COMMAND_NAME = 'namos-manage'
|
||||
|
||||
|
||||
class HeartBeat(object):
|
||||
def find_status(self, sw, report_interval=60):
|
||||
status = False
|
||||
if sw.updated_at is not None:
|
||||
if ((timeutils.utcnow() - sw.updated_at).total_seconds()
|
||||
<= report_interval):
|
||||
status = True
|
||||
else:
|
||||
if ((timeutils.utcnow() - sw.created_at).total_seconds()
|
||||
<= report_interval):
|
||||
status = True
|
||||
|
||||
return status
|
||||
|
||||
def report_status(self):
|
||||
# TODO(mrkanag) Make like Node: Service: worker: status
|
||||
for sw in api.service_worker_get_all(None):
|
||||
print ('[', 'T' if self.find_status(sw) else 'F', ']', sw.name)
|
||||
|
||||
|
||||
class OsloConfigSchemaManager(object):
|
||||
def gen_schema(self):
|
||||
import json
|
||||
cfg_ns = dict()
|
||||
for cfg_ in api.config_schema_get_all(None):
|
||||
if cfg_.namespace not in cfg_ns:
|
||||
cfg_ns[cfg_.namespace] = dict()
|
||||
if cfg_.group_name not in cfg_ns[cfg_.namespace]:
|
||||
cfg_ns[cfg_.namespace][cfg_.group_name] = dict()
|
||||
cfg_ns[cfg_.namespace][cfg_.group_name][cfg_.name] = cfg_.to_dict()
|
||||
|
||||
open(CONF.command.outputfile, 'w').write(json.dumps(cfg_ns))
|
||||
|
||||
def sync(self):
|
||||
if CONF.command.gen:
|
||||
self.gen_schema()
|
||||
return
|
||||
|
||||
sync_map = {}
|
||||
with open(CONF.command.syncfile) as f:
|
||||
for line in f:
|
||||
if line.startswith("#"):
|
||||
continue
|
||||
kv = line.split("=")
|
||||
sync_map[kv[0]] = kv[1].replace("\n", "")
|
||||
|
||||
for k, v in sync_map.items():
|
||||
out_file = '%s/%s.json' % (CONF.command.outputdir or '/tmp', k)
|
||||
cmd = ('oslo-config-generator --config-file %s '
|
||||
'--output-file %s --output-format json' %
|
||||
(v, out_file))
|
||||
print ("\nSyncing %s " % cmd)
|
||||
import os
|
||||
os.system(cmd)
|
||||
|
||||
if CONF.command.dbsync:
|
||||
import json
|
||||
conf_dict = json.loads(open(out_file).read())
|
||||
for grp, namespaces in conf_dict.items():
|
||||
for namespace, opts in namespaces.items():
|
||||
for name, opt in opts.items():
|
||||
conf_ = dict(
|
||||
namespace=namespace,
|
||||
group_name=grp,
|
||||
name=name,
|
||||
default_value=opt['default'],
|
||||
type=opt['type']['name'],
|
||||
help=opt['help'],
|
||||
required=opt['required'],
|
||||
secret=opt['secret'],
|
||||
mutable=opt['mutable']
|
||||
)
|
||||
|
||||
try:
|
||||
api.config_schema_create(None,
|
||||
conf_)
|
||||
_a = 'T'
|
||||
except exception.AlreadyExist:
|
||||
_a = 'F'
|
||||
|
||||
print ('[', _a, ']', namespace, ':', grp, ':',
|
||||
name)
|
||||
|
||||
|
||||
class DBCommand(object):
|
||||
|
||||
def upgrade(self):
|
||||
|
@ -88,6 +175,16 @@ def add_command_parsers(subparsers):
|
|||
parser.add_argument('-p', '--purge', action='store_true')
|
||||
parser.set_defaults(func=command_object.demo_data)
|
||||
|
||||
parser = subparsers.add_parser('oslo_config_schema')
|
||||
parser.add_argument('-f', '--syncfile')
|
||||
parser.add_argument('-o', '--outputdir')
|
||||
parser.add_argument('-j', '--outputfile')
|
||||
parser.add_argument('-s', '--dbsync', action='store_true')
|
||||
parser.add_argument('-g', '--gen', action='store_true')
|
||||
parser.set_defaults(func=OsloConfigSchemaManager().sync)
|
||||
|
||||
parser = subparsers.add_parser('status')
|
||||
parser.set_defaults(func=HeartBeat().report_status)
|
||||
|
||||
command_opt = cfg.SubCommandOpt('command',
|
||||
title='Command',
|
||||
|
|
|
@ -49,3 +49,7 @@ def init_log(project=PROJECT_NAME):
|
|||
logging.setup(cfg.CONF,
|
||||
project,
|
||||
version=VERSION)
|
||||
|
||||
|
||||
def list_opts():
|
||||
yield 'conductor', conductor_opts
|
||||
|
|
|
@ -130,3 +130,8 @@ class ConfigNotFound(NotFound):
|
|||
class ConfigFileNotFound(NotFound):
|
||||
msg_fmt = ("Config file %(config_file_id)s does not found")
|
||||
error_code = 0x0b001
|
||||
|
||||
|
||||
class ConfigSchemaNotFound(NotFound):
|
||||
msg_fmt = ("Config schema %(config_schema_id)s does not found")
|
||||
error_code = 0x0c001
|
||||
|
|
|
@ -0,0 +1,503 @@
|
|||
# Copyright 2012 SINA Corporation
|
||||
# Copyright 2014 Cisco Systems, Inc.
|
||||
# All Rights Reserved.
|
||||
# Copyright 2014 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Sample configuration generator
|
||||
|
||||
Tool for generating a sample configuration file. See
|
||||
../doc/source/generator.rst for details.
|
||||
|
||||
.. versionadded:: 1.4
|
||||
"""
|
||||
|
||||
# TODO(mrkanag) copied from oslo_config.generator having the changes for
|
||||
# generating the conf in json format
|
||||
|
||||
import collections
|
||||
import logging
|
||||
import operator
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
import pkg_resources
|
||||
import six
|
||||
|
||||
from oslo_config import cfg
|
||||
import stevedore.named # noqa
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
_generator_opts = [
|
||||
cfg.StrOpt('output-file',
|
||||
help='Path of the file to write to. Defaults to stdout.'),
|
||||
cfg.StrOpt('output-format',
|
||||
default='txt',
|
||||
help='Output format either txt or json. Defaults to txt.'),
|
||||
cfg.IntOpt('wrap-width',
|
||||
default=70,
|
||||
help='The maximum length of help lines.'),
|
||||
cfg.MultiStrOpt('namespace',
|
||||
required=True,
|
||||
help='Option namespace under "oslo.config.opts" in which '
|
||||
'to query for options.'),
|
||||
]
|
||||
|
||||
|
||||
def register_cli_opts(conf):
|
||||
"""Register the formatter's CLI options with a ConfigOpts instance.
|
||||
|
||||
Note, this must be done before the ConfigOpts instance is called to parse
|
||||
the configuration.
|
||||
|
||||
:param conf: a ConfigOpts instance
|
||||
:raises: DuplicateOptError, ArgsAlreadyParsedError
|
||||
"""
|
||||
conf.register_cli_opts(_generator_opts)
|
||||
|
||||
|
||||
def _format_defaults(opt):
|
||||
"Return a list of formatted default values."
|
||||
if isinstance(opt, cfg.MultiStrOpt):
|
||||
if opt.sample_default is not None:
|
||||
defaults = opt.sample_default
|
||||
elif not opt.default:
|
||||
defaults = ['']
|
||||
else:
|
||||
defaults = opt.default
|
||||
else:
|
||||
if opt.sample_default is not None:
|
||||
default_str = str(opt.sample_default)
|
||||
elif opt.default is None:
|
||||
default_str = '<None>'
|
||||
elif (isinstance(opt, cfg.StrOpt) or
|
||||
isinstance(opt, cfg.IPOpt) or
|
||||
isinstance(opt, cfg.HostnameOpt)):
|
||||
default_str = opt.default
|
||||
elif isinstance(opt, cfg.BoolOpt):
|
||||
default_str = str(opt.default).lower()
|
||||
elif isinstance(opt, (cfg.IntOpt, cfg.FloatOpt,
|
||||
cfg.PortOpt)):
|
||||
default_str = str(opt.default)
|
||||
elif isinstance(opt, (cfg.ListOpt, cfg._ConfigFileOpt,
|
||||
cfg._ConfigDirOpt)):
|
||||
default_str = ','.join(opt.default)
|
||||
elif isinstance(opt, cfg.DictOpt):
|
||||
sorted_items = sorted(opt.default.items(),
|
||||
key=operator.itemgetter(0))
|
||||
default_str = ','.join(['%s:%s' % i for i in sorted_items])
|
||||
else:
|
||||
LOG.warning('Unknown option type: %s', repr(opt))
|
||||
default_str = str(opt.default)
|
||||
defaults = [default_str]
|
||||
|
||||
results = []
|
||||
for default_str in defaults:
|
||||
if default_str.strip() != default_str:
|
||||
default_str = '"%s"' % default_str
|
||||
results.append(default_str)
|
||||
return results
|
||||
|
||||
|
||||
class _OptFormatter(object):
|
||||
|
||||
"""Format configuration option descriptions to a file."""
|
||||
|
||||
def __init__(self, output_file=None, wrap_width=70):
|
||||
"""Construct an OptFormatter object.
|
||||
|
||||
:param output_file: a writeable file object
|
||||
:param wrap_width: The maximum length of help lines, 0 to not wrap
|
||||
"""
|
||||
self.output_file = output_file or sys.stdout
|
||||
self.wrap_width = wrap_width
|
||||
|
||||
def _format_help(self, help_text):
|
||||
"""Format the help for a group or option to the output file.
|
||||
|
||||
:param help_text: The text of the help string
|
||||
"""
|
||||
if self.wrap_width is not None and self.wrap_width > 0:
|
||||
wrapped = ""
|
||||
for line in help_text.splitlines():
|
||||
text = "\n".join(textwrap.wrap(line, self.wrap_width,
|
||||
initial_indent='# ',
|
||||
subsequent_indent='# ',
|
||||
break_long_words=False,
|
||||
replace_whitespace=False))
|
||||
wrapped += "#" if text == "" else text
|
||||
wrapped += "\n"
|
||||
lines = [wrapped]
|
||||
else:
|
||||
lines = ['# ' + help_text + '\n']
|
||||
return lines
|
||||
|
||||
def _get_choice_text(self, choice):
|
||||
if choice is None:
|
||||
return '<None>'
|
||||
elif choice == '':
|
||||
return "''"
|
||||
return six.text_type(choice)
|
||||
|
||||
def format_group(self, group_or_groupname):
|
||||
"""Format the description of a group header to the output file
|
||||
|
||||
:param group_or_groupname: a cfg.OptGroup instance or a name of group
|
||||
"""
|
||||
if isinstance(group_or_groupname, cfg.OptGroup):
|
||||
group = group_or_groupname
|
||||
lines = ['[%s]\n' % group.name]
|
||||
if group.help:
|
||||
lines += self._format_help(group.help)
|
||||
else:
|
||||
groupname = group_or_groupname
|
||||
lines = ['[%s]\n' % groupname]
|
||||
self.writelines(lines)
|
||||
|
||||
def format(self, opt):
|
||||
"""Format a description of an option to the output file.
|
||||
|
||||
:param opt: a cfg.Opt instance
|
||||
"""
|
||||
if not opt.help:
|
||||
LOG.warning('"%s" is missing a help string', opt.dest)
|
||||
|
||||
option_type = getattr(opt, 'type', None)
|
||||
opt_type = getattr(option_type, 'type_name', 'unknown value')
|
||||
|
||||
if opt.help:
|
||||
help_text = u'%s (%s)' % (opt.help,
|
||||
opt_type)
|
||||
else:
|
||||
help_text = u'(%s)' % opt_type
|
||||
lines = self._format_help(help_text)
|
||||
|
||||
if getattr(opt.type, 'min', None) is not None:
|
||||
lines.append('# Minimum value: %d\n' % opt.type.min)
|
||||
|
||||
if getattr(opt.type, 'max', None) is not None:
|
||||
lines.append('# Maximum value: %d\n' % opt.type.max)
|
||||
|
||||
if getattr(opt.type, 'choices', None):
|
||||
choices_text = ', '.join([self._get_choice_text(choice)
|
||||
for choice in opt.type.choices])
|
||||
lines.append('# Allowed values: %s\n' % choices_text)
|
||||
|
||||
try:
|
||||
if opt.mutable:
|
||||
lines.append(
|
||||
'# Note: This option can be changed without restarting.\n'
|
||||
)
|
||||
except AttributeError as err:
|
||||
# NOTE(dhellmann): keystoneauth defines its own Opt class,
|
||||
# and neutron (at least) returns instances of those
|
||||
# classes instead of oslo_config Opt instances. The new
|
||||
# mutable attribute is the first property where the API
|
||||
# isn't supported in the external class, so we can use
|
||||
# this failure to emit a warning. See
|
||||
# https://bugs.launchpad.net/keystoneauth/+bug/1548433 for
|
||||
# more details.
|
||||
import warnings
|
||||
if not isinstance(cfg.Opt, opt):
|
||||
warnings.warn(
|
||||
'Incompatible option class for %s (%r): %s' %
|
||||
(opt.dest, opt.__class__, err),
|
||||
)
|
||||
else:
|
||||
warnings.warn('Failed to fully format sample for %s: %s' %
|
||||
(opt.dest, err))
|
||||
|
||||
for d in opt.deprecated_opts:
|
||||
lines.append('# Deprecated group/name - [%s]/%s\n' %
|
||||
(d.group or 'DEFAULT', d.name or opt.dest))
|
||||
|
||||
if opt.deprecated_for_removal:
|
||||
lines.append(
|
||||
'# This option is deprecated for removal.\n'
|
||||
'# Its value may be silently ignored in the future.\n')
|
||||
if opt.deprecated_reason:
|
||||
lines.extend(
|
||||
self._format_help('Reason: ' + opt.deprecated_reason))
|
||||
|
||||
if hasattr(opt.type, 'format_defaults'):
|
||||
defaults = opt.type.format_defaults(opt.default,
|
||||
opt.sample_default)
|
||||
else:
|
||||
LOG.debug(
|
||||
"The type for option %(name)s which is %(type)s is not a "
|
||||
"subclass of types.ConfigType and doesn't provide a "
|
||||
"'format_defaults' method. A default formatter is not "
|
||||
"available so the best-effort formatter will be used.",
|
||||
{'type': opt.type, 'name': opt.name})
|
||||
defaults = _format_defaults(opt)
|
||||
for default_str in defaults:
|
||||
if default_str:
|
||||
default_str = ' ' + default_str
|
||||
lines.append('#%s =%s\n' % (opt.dest, default_str))
|
||||
|
||||
self.writelines(lines)
|
||||
|
||||
def write(self, s):
|
||||
"""Write an arbitrary string to the output file.
|
||||
|
||||
:param s: an arbitrary string
|
||||
"""
|
||||
self.output_file.write(s)
|
||||
|
||||
def writelines(self, l):
|
||||
"""Write an arbitrary sequence of strings to the output file.
|
||||
|
||||
:param l: a list of arbitrary strings
|
||||
"""
|
||||
self.output_file.writelines(l)
|
||||
|
||||
|
||||
def _cleanup_opts(read_opts):
|
||||
"""Cleanup duplicate options in namespace groups
|
||||
|
||||
Return a structure which removes duplicate options from a namespace group.
|
||||
NOTE:(rbradfor) This does not remove duplicated options from repeating
|
||||
groups in different namespaces:
|
||||
|
||||
:param read_opts: a list (namespace, [(group, [opt_1, opt_2])]) tuples
|
||||
:returns: a list of (namespace, [(group, [opt_1, opt_2])]) tuples
|
||||
"""
|
||||
|
||||
# OrderedDict is used specifically in the three levels to maintain the
|
||||
# source order of namespace/group/opt values
|
||||
clean = collections.OrderedDict()
|
||||
for namespace, listing in read_opts:
|
||||
if namespace not in clean:
|
||||
clean[namespace] = collections.OrderedDict()
|
||||
for group, opts in listing:
|
||||
if group not in clean[namespace]:
|
||||
clean[namespace][group] = collections.OrderedDict()
|
||||
for opt in opts:
|
||||
clean[namespace][group][opt.dest] = opt
|
||||
|
||||
# recreate the list of (namespace, [(group, [opt_1, opt_2])]) tuples
|
||||
# from the cleaned structure.
|
||||
cleaned_opts = [
|
||||
(namespace, [(group, list(clean[namespace][group].values()))
|
||||
for group in clean[namespace]])
|
||||
for namespace in clean
|
||||
]
|
||||
|
||||
return cleaned_opts
|
||||
|
||||
|
||||
def _get_raw_opts_loaders(namespaces):
|
||||
"""List the options available via the given namespaces.
|
||||
|
||||
:param namespaces: a list of namespaces registered under 'oslo.config.opts'
|
||||
:returns: a list of (namespace, [(group, [opt_1, opt_2])]) tuples
|
||||
"""
|
||||
mgr = stevedore.named.NamedExtensionManager(
|
||||
'oslo.config.opts',
|
||||
names=namespaces,
|
||||
on_load_failure_callback=on_load_failure_callback,
|
||||
invoke_on_load=False)
|
||||
return [(e.name, e.plugin) for e in mgr]
|
||||
|
||||
|
||||
def _get_opt_default_updaters(namespaces):
|
||||
mgr = stevedore.named.NamedExtensionManager(
|
||||
'oslo.config.opts.defaults',
|
||||
names=namespaces,
|
||||
on_load_failure_callback=on_load_failure_callback,
|
||||
invoke_on_load=False)
|
||||
return [ep.plugin for ep in mgr]
|
||||
|
||||
|
||||
def _update_defaults(namespaces):
|
||||
"Let application hooks update defaults inside libraries."
|
||||
for update in _get_opt_default_updaters(namespaces):
|
||||
update()
|
||||
|
||||
|
||||
def _list_opts(namespaces):
|
||||
"""List the options available via the given namespaces.
|
||||
|
||||
Duplicate options from a namespace are removed.
|
||||
|
||||
:param namespaces: a list of namespaces registered under 'oslo.config.opts'
|
||||
:returns: a list of (namespace, [(group, [opt_1, opt_2])]) tuples
|
||||
"""
|
||||
# Load the functions to get the options.
|
||||
loaders = _get_raw_opts_loaders(namespaces)
|
||||
# Update defaults, which might change global settings in library
|
||||
# modules.
|
||||
_update_defaults(namespaces)
|
||||
# Ask for the option definitions. At this point any global default
|
||||
# changes made by the updaters should be in effect.
|
||||
opts = [
|
||||
(namespace, loader())
|
||||
for namespace, loader in loaders
|
||||
]
|
||||
return _cleanup_opts(opts)
|
||||
|
||||
|
||||
def on_load_failure_callback(*args, **kwargs):
|
||||
raise
|
||||
|
||||
|
||||
def _output_opts(f, group, namespaces):
|
||||
f.format_group(group)
|
||||
for (namespace, opts) in sorted(namespaces,
|
||||
key=operator.itemgetter(0)):
|
||||
f.write('\n#\n# From %s\n#\n' % namespace)
|
||||
for opt in opts:
|
||||
f.write('\n')
|
||||
try:
|
||||
f.format(opt)
|
||||
except Exception as err:
|
||||
f.write('# Warning: Failed to format sample for %s\n' %
|
||||
(opt.dest,))
|
||||
f.write('# %s\n' % (err,))
|
||||
|
||||
|
||||
def _append_opts_json(f, group, namespaces):
|
||||
f[group] = dict()
|
||||
for (namespace, opts) in sorted(namespaces,
|
||||
key=operator.itemgetter(0)):
|
||||
|
||||
f[group][namespace] = dict()
|
||||
|
||||
for opt in opts:
|
||||
f[group][namespace][opt.name] = dict()
|
||||
f[group][namespace][opt.name]['help'] = opt.help or ''
|
||||
|
||||
f[group][namespace][opt.name]['type'] = dict()
|
||||
option_type = getattr(opt, 'type', None)
|
||||
opt_type = getattr(option_type, 'type_name', 'unknown type')
|
||||
f[group][namespace][opt.name]['type']['name'] = opt_type
|
||||
|
||||
if getattr(opt.type, 'min', None) is not None:
|
||||
f[group][namespace][opt.name]['type']['min'] = opt.type.min
|
||||
|
||||
if getattr(opt.type, 'max', None) is not None:
|
||||
f[group][namespace][opt.name]['type']['max'] = opt.type.min
|
||||
|
||||
if getattr(opt.type, 'choices', None):
|
||||
f[group][namespace][opt.name]['type'][
|
||||
'choices'] = opt.type.choices
|
||||
|
||||
if getattr(opt, 'mutable', None):
|
||||
f[group][namespace][opt.name]['mutable'] = opt.mutable
|
||||
else:
|
||||
f[group][namespace][opt.name]['mutable'] = None
|
||||
f[group][namespace][opt.name]['required'] = opt.required
|
||||
f[group][namespace][opt.name]['secret'] = opt.secret
|
||||
f[group][namespace][opt.name]['default'] = '%s' % opt.default
|
||||
|
||||
f[group][namespace][opt.name]['deprecated'] = []
|
||||
for d in opt.deprecated_opts:
|
||||
f[group][namespace][opt.name]['deprecated'].append(
|
||||
(d.group or 'DEFAULT', d.name or opt.dest))
|
||||
|
||||
f[group][namespace][opt.name][
|
||||
'deprecated_for_removal'] = opt.deprecated_for_removal
|
||||
|
||||
if getattr(opt, 'deprecated_reason', None):
|
||||
f[group][namespace][opt.name][
|
||||
'deprecated_reason'] = opt.deprecated_reason
|
||||
else:
|
||||
f[group][namespace][opt.name][
|
||||
'deprecated_reason'] = None
|
||||
|
||||
|
||||
def _get_group_name(item):
|
||||
group = item[0]
|
||||
# The keys of the groups dictionary could be an OptGroup. Otherwise the
|
||||
# help text of an OptGroup wouldn't be part of the generated sample
|
||||
# file. It could also be just a plain group name without any further
|
||||
# attributes. That's the reason why we have to differentiate here.
|
||||
return group.name if isinstance(group, cfg.OptGroup) else group
|
||||
|
||||
|
||||
def _get_groups(conf_ns):
|
||||
groups = {'DEFAULT': []}
|
||||
for namespace, listing in conf_ns:
|
||||
for group, opts in listing:
|
||||
if not opts:
|
||||
continue
|
||||
namespaces = groups.setdefault(group or 'DEFAULT', [])
|
||||
namespaces.append((namespace, opts))
|
||||
return groups
|
||||
|
||||
|
||||
def generate(conf):
|
||||
"""Generate a sample config file.
|
||||
|
||||
List all of the options available via the namespaces specified in the given
|
||||
configuration and write a description of them to the specified output file.
|
||||
|
||||
:param conf: a ConfigOpts instance containing the generator's configuration
|
||||
"""
|
||||
conf.register_opts(_generator_opts)
|
||||
|
||||
output_file = (open(conf.output_file, 'w')
|
||||
if conf.output_file else sys.stdout)
|
||||
|
||||
groups = _get_groups(_list_opts(conf.namespace))
|
||||
|
||||
if conf.output_format == 'json':
|
||||
generate_json(conf, groups, output_file)
|
||||
elif conf.output_format == 'txt':
|
||||
generate_txt(conf, groups, output_file)
|
||||
|
||||
|
||||
def generate_txt(conf, groups, output_file):
|
||||
|
||||
formatter = _OptFormatter(output_file=output_file,
|
||||
wrap_width=conf.wrap_width)
|
||||
|
||||
# Output the "DEFAULT" section as the very first section
|
||||
_output_opts(formatter, 'DEFAULT', groups.pop('DEFAULT'))
|
||||
|
||||
# output all other config sections with groups in alphabetical order
|
||||
for group, namespaces in sorted(groups.items(), key=_get_group_name):
|
||||
formatter.write('\n\n')
|
||||
_output_opts(formatter, group, namespaces)
|
||||
|
||||
|
||||
def generate_json(conf, groups, output_file):
|
||||
cfg_dict = dict()
|
||||
_append_opts_json(cfg_dict, 'DEFAULT', groups.pop('DEFAULT'))
|
||||
|
||||
# output all other config sections with groups in alphabetical order
|
||||
for group, namespaces in sorted(groups.items(), key=_get_group_name):
|
||||
# for nova, below fix is required for grp conductor, vnc
|
||||
if isinstance(group, cfg.OptGroup):
|
||||
group = group.name
|
||||
|
||||
_append_opts_json(cfg_dict, group, namespaces)
|
||||
|
||||
import json
|
||||
output_file.write(json.dumps(cfg_dict))
|
||||
|
||||
|
||||
def main(args=None):
|
||||
"""The main function of oslo-config-generator."""
|
||||
version = pkg_resources.get_distribution('oslo.config').version
|
||||
logging.basicConfig(level=logging.WARN)
|
||||
conf = cfg.ConfigOpts()
|
||||
register_cli_opts(conf)
|
||||
conf(args, version=version)
|
||||
generate(conf)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -17,6 +17,7 @@ import functools
|
|||
from oslo_config import cfg
|
||||
from oslo_context import context
|
||||
from oslo_log import log
|
||||
from oslo_utils import timeutils
|
||||
|
||||
from namos.common import config
|
||||
from namos.common import exception
|
||||
|
@ -75,6 +76,28 @@ class ConductorManager(object):
|
|||
|
||||
return service_worker_id
|
||||
|
||||
@request_context
|
||||
def heart_beat(self, context, identification, dieing=False):
|
||||
try:
|
||||
sw = db_api.service_worker_get_all_by(context,
|
||||
pid=identification)
|
||||
if len(sw) == 1:
|
||||
if not dieing:
|
||||
db_api.service_worker_update(
|
||||
context,
|
||||
sw[0].id,
|
||||
dict(updated_at=timeutils.utcnow()))
|
||||
LOG.info("HEART-BEAT LIVE %s " % identification)
|
||||
else:
|
||||
db_api.service_worker_delete(context,
|
||||
sw[0].id)
|
||||
LOG.info("HEART-BEAT STOPPED %s " % identification)
|
||||
else:
|
||||
LOG.error("HEART-BEAT FAILED, No service worker registered "
|
||||
"with identification %s " % identification)
|
||||
except Exception as e: # noqa
|
||||
LOG.error("HEART-BEAT FAILED %s " % e)
|
||||
|
||||
@request_context
|
||||
def service_perspective_get(self,
|
||||
context,
|
||||
|
@ -218,9 +241,11 @@ class ServiceProcessor(object):
|
|||
# TODO(mrkanag) Fix the name, device driver proper !
|
||||
dict(name='%s@%s' % (self.registration_info['pid'],
|
||||
service_component.name),
|
||||
pid=self.registration_info['pid'],
|
||||
pid=self.registration_info['identification'],
|
||||
host=self.registration_info['host'],
|
||||
service_component_id=service_component.id))
|
||||
service_component_id=service_component.id,
|
||||
deleted_at=None
|
||||
))
|
||||
LOG.info('Service Worker %s is created' % service_worker)
|
||||
except exception.AlreadyExist:
|
||||
# TODO(mrkanag) Find a way to purge the dead service worker
|
||||
|
@ -239,7 +264,8 @@ class ServiceProcessor(object):
|
|||
context,
|
||||
service_workers[0].id,
|
||||
dict(
|
||||
pid=self.registration_info['pid'],
|
||||
deleted_at=None,
|
||||
pid=self.registration_info['identification'],
|
||||
name='%s@%s' % (self.registration_info['pid'],
|
||||
service_component.name)
|
||||
))
|
||||
|
@ -253,20 +279,65 @@ class ServiceProcessor(object):
|
|||
# or per service_worker,
|
||||
for cfg_name, cfg_obj in self.registration_info[
|
||||
'config_dict'].iteritems():
|
||||
cfg_obj['service_worker_id'] = service_worker.id
|
||||
|
||||
cfg_schs = db_api.config_schema_get_by(
|
||||
context=context,
|
||||
group=cfg_obj['group'],
|
||||
name=cfg_obj['name']
|
||||
)
|
||||
|
||||
if len(cfg_schs) > 1:
|
||||
cfg_sche = cfg_schs[0]
|
||||
LOG.info("Config Schema %s is existing and is updated" %
|
||||
cfg_sche)
|
||||
else:
|
||||
try:
|
||||
cfg_sche = db_api.config_schema_create(
|
||||
context,
|
||||
dict(
|
||||
namespace='UNKNOWN-NAMOS',
|
||||
default_value=cfg_obj['default_value'],
|
||||
type=cfg_obj['type'],
|
||||
help=cfg_obj['help'],
|
||||
required=cfg_obj['required'],
|
||||
secret=cfg_obj['secret'],
|
||||
mutable=False,
|
||||
group_name=cfg_obj['group'],
|
||||
name=cfg_obj['name']
|
||||
)
|
||||
)
|
||||
LOG.info("Config Schema %s is created" % cfg_sche)
|
||||
except exception.AlreadyExist:
|
||||
cfg_schs = db_api.config_schema_get_by(
|
||||
context=context,
|
||||
group=cfg_obj['group'],
|
||||
name=cfg_obj['name'],
|
||||
namespace='UNKNOWN-NAMOS'
|
||||
)
|
||||
|
||||
cfg_sche = cfg_schs[0]
|
||||
LOG.info("Config Schema %s is existing and is updated" %
|
||||
cfg_sche)
|
||||
|
||||
cfg_obj_ = dict(
|
||||
service_worker_id=service_worker.id,
|
||||
name="%s.%s" % (cfg_obj['group'], cfg_name),
|
||||
value=cfg_obj['value'],
|
||||
oslo_config_schema_id=cfg_sche.id
|
||||
)
|
||||
|
||||
try:
|
||||
config = db_api.config_create(context, cfg_obj)
|
||||
config = db_api.config_create(context, cfg_obj_)
|
||||
LOG.info("Config %s is created" % config)
|
||||
except exception.AlreadyExist:
|
||||
configs = db_api.config_get_by_name_for_service_worker(
|
||||
context,
|
||||
service_worker_id=cfg_obj['service_worker_id'],
|
||||
name=cfg_obj['name'])
|
||||
service_worker_id=cfg_obj_['service_worker_id'],
|
||||
name=cfg_obj_['name'])
|
||||
if len(configs) == 1:
|
||||
config = db_api.config_update(context,
|
||||
configs[0].id,
|
||||
cfg_obj)
|
||||
cfg_obj_)
|
||||
LOG.info("Config %s is existing and is updated" % config)
|
||||
|
||||
return service_worker.id
|
||||
|
|
|
@ -304,12 +304,47 @@ def service_worker_get_all(context):
|
|||
return IMPL.service_worker_get_all(context)
|
||||
|
||||
|
||||
def service_worker_get_all_by(context, **kwargs):
|
||||
return IMPL.service_worker_get_all_by(context, **kwargs)
|
||||
|
||||
|
||||
def service_worker_delete(context, _id):
|
||||
return IMPL.service_worker_delete(context, _id)
|
||||
|
||||
|
||||
# Config
|
||||
# config schema
|
||||
def config_schema_create(context, values):
|
||||
return IMPL.config_schema_create(context, values)
|
||||
|
||||
|
||||
def config_schema_update(context, _id, values):
|
||||
return IMPL.config_schema_update(context, _id, values)
|
||||
|
||||
|
||||
def config_schema_get(context, _id):
|
||||
return IMPL.config_schema_get(context, _id)
|
||||
|
||||
|
||||
def config_schema_get_by_name(context, name):
|
||||
return IMPL.config_schema_get_by_name(context, name)
|
||||
|
||||
|
||||
def config_schema_get_by(context,
|
||||
namespace=None,
|
||||
group=None,
|
||||
name=None):
|
||||
return IMPL.config_schema_get_by(context, namespace, group, name)
|
||||
|
||||
|
||||
def config_schema_get_all(context):
|
||||
return IMPL.config_schema_get_all(context)
|
||||
|
||||
|
||||
def config_schema_delete(context, _id):
|
||||
return IMPL.config_schema_delete(context, _id)
|
||||
|
||||
|
||||
# Config
|
||||
def config_create(context, values):
|
||||
return IMPL.config_create(context, values)
|
||||
|
||||
|
@ -344,6 +379,8 @@ def config_delete(context, _id):
|
|||
return IMPL.config_delete(context, _id)
|
||||
|
||||
|
||||
# config file
|
||||
|
||||
def config_file_create(context, values):
|
||||
return IMPL.config_file_create(context, values)
|
||||
|
||||
|
|
|
@ -391,9 +391,9 @@ def _service_populate_demo_data():
|
|||
service_worker = inject_id(service_worker)
|
||||
api.service_worker_create(None, service_worker)
|
||||
|
||||
for config in CONFIG_LIST:
|
||||
config = inject_id(config)
|
||||
api.config_create(None, config)
|
||||
# for config in CONFIG_LIST:
|
||||
# config = inject_id(config)
|
||||
# api.config_create(None, config)
|
||||
|
||||
|
||||
def populate_demo_data():
|
||||
|
@ -418,8 +418,8 @@ def _device_purge_demo_data():
|
|||
|
||||
|
||||
def _service_purge_demo_data():
|
||||
for config in CONFIG_LIST:
|
||||
api.config_delete(None, config.keys()[0])
|
||||
# for config in CONFIG_LIST:
|
||||
# api.config_delete(None, config.keys()[0])
|
||||
for service_worker in SERVICE_WORKER_LIST:
|
||||
api.service_worker_delete(None, service_worker.keys()[0])
|
||||
|
||||
|
|
|
@ -179,6 +179,7 @@ def upgrade():
|
|||
def downgrade():
|
||||
op.drop_table('oslo_config_file')
|
||||
op.drop_table('oslo_config')
|
||||
op.drop_table('oslo_config_schema')
|
||||
op.drop_table('device_driver')
|
||||
op.drop_table('service_worker')
|
||||
op.drop_table('service_component')
|
||||
|
|
|
@ -488,6 +488,10 @@ def service_worker_get_all(context):
|
|||
return _get_all(context, models.ServiceWorker)
|
||||
|
||||
|
||||
def service_worker_get_all_by(context, **kwargs):
|
||||
return _service_worker_get_all_by(context, **kwargs)
|
||||
|
||||
|
||||
def _service_worker_get_all_by(context, **kwargs):
|
||||
return _get_all_by(context, models.ServiceWorker, **kwargs)
|
||||
|
||||
|
@ -496,6 +500,58 @@ def service_worker_delete(context, _id):
|
|||
return _delete(context, models.ServiceWorker, _id)
|
||||
|
||||
|
||||
# Config Schema
|
||||
|
||||
def config_schema_create(context, values):
|
||||
return _create(context, models.OsloConfigSchema(), values)
|
||||
|
||||
|
||||
def config_schema_update(context, _id, values):
|
||||
return _update(context, models.OsloConfigSchema, _id, values)
|
||||
|
||||
|
||||
def config_schema_get(context, _id):
|
||||
config = _get(context, models.OsloConfigSchema, _id)
|
||||
if config is None:
|
||||
raise exception.ConfigNotFound(config_schema_id=_id)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def config_schema_get_by_name(context, name):
|
||||
config = _get_by_name(context, models.OsloConfigSchema, name)
|
||||
if config is None:
|
||||
raise exception.ConfigSchemaNotFound(config_schema_id=name)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def config_schema_get_by(context,
|
||||
namespace=None,
|
||||
group=None,
|
||||
name=None):
|
||||
query = _model_query(context, models.OsloConfigSchema)
|
||||
if name is not None:
|
||||
query = query.filter_by(name=name)
|
||||
if group is not None:
|
||||
query = query.filter_by(group_name=group)
|
||||
if namespace is not None:
|
||||
query = query.filter_by(namespace=namespace)
|
||||
return query.all()
|
||||
|
||||
|
||||
def config_schema_get_all(context):
|
||||
return _get_all(context, models.OsloConfigSchema)
|
||||
|
||||
|
||||
def _config_schema_get_all_by(context, **kwargs):
|
||||
return _get_all_by(context, models.OsloConfigSchema, **kwargs)
|
||||
|
||||
|
||||
def config_schema_delete(context, _id):
|
||||
return _delete(context, models.OsloConfigSchema, _id)
|
||||
|
||||
|
||||
# Config
|
||||
|
||||
def config_create(context, values):
|
||||
|
|
|
@ -271,8 +271,9 @@ class ServiceWorker(BASE,
|
|||
default=lambda: str(uuid.uuid4()))
|
||||
|
||||
pid = sqlalchemy.Column(
|
||||
sqlalchemy.String(32),
|
||||
nullable=False
|
||||
sqlalchemy.String(64),
|
||||
nullable=False,
|
||||
unique=True
|
||||
)
|
||||
host = sqlalchemy.Column(
|
||||
sqlalchemy.String(248),
|
||||
|
@ -284,19 +285,17 @@ class ServiceWorker(BASE,
|
|||
nullable=False)
|
||||
|
||||
|
||||
class OsloConfig(BASE,
|
||||
NamosBase,
|
||||
SoftDelete,
|
||||
Extra):
|
||||
__tablename__ = 'oslo_config'
|
||||
class OsloConfigSchema(BASE,
|
||||
NamosBase,
|
||||
Extra):
|
||||
__tablename__ = 'oslo_config_schema'
|
||||
|
||||
# TODO(mrkanag) Check whether conf is unique across all services or only
|
||||
# sepcific to namespace, otherwise uniqueconstraint is name, group_name
|
||||
__table_args__ = (
|
||||
UniqueConstraint("name", "service_worker_id"),
|
||||
UniqueConstraint("group_name", "name", "namespace"),
|
||||
)
|
||||
|
||||
default_value = sqlalchemy.Column(
|
||||
sqlalchemy.Text
|
||||
)
|
||||
name = sqlalchemy.Column(sqlalchemy.String(255),
|
||||
# unique=True,
|
||||
nullable=False,
|
||||
|
@ -308,10 +307,20 @@ class OsloConfig(BASE,
|
|||
default=''
|
||||
)
|
||||
type = sqlalchemy.Column(
|
||||
sqlalchemy.String(16),
|
||||
sqlalchemy.String(128),
|
||||
nullable=False
|
||||
)
|
||||
value = sqlalchemy.Column(
|
||||
group_name = sqlalchemy.Column(
|
||||
sqlalchemy.String(128),
|
||||
nullable=False
|
||||
)
|
||||
namespace = sqlalchemy.Column(
|
||||
sqlalchemy.String(128),
|
||||
nullable=False
|
||||
)
|
||||
# TODO(mrkanag) default value is some time overriden by services, which
|
||||
# osloconfig allows, so this column should have values per given service
|
||||
default_value = sqlalchemy.Column(
|
||||
sqlalchemy.Text
|
||||
)
|
||||
required = sqlalchemy.Column(
|
||||
|
@ -322,9 +331,38 @@ class OsloConfig(BASE,
|
|||
sqlalchemy.Boolean,
|
||||
default=False
|
||||
)
|
||||
mutable = sqlalchemy.Column(
|
||||
sqlalchemy.Boolean,
|
||||
default=False
|
||||
)
|
||||
|
||||
|
||||
class OsloConfig(BASE,
|
||||
NamosBase,
|
||||
SoftDelete,
|
||||
Extra):
|
||||
__tablename__ = 'oslo_config'
|
||||
|
||||
__table_args__ = (
|
||||
UniqueConstraint("oslo_config_schema_id", "service_worker_id"),
|
||||
)
|
||||
|
||||
name = sqlalchemy.Column(sqlalchemy.String(255),
|
||||
# unique=True,
|
||||
nullable=False,
|
||||
default=lambda: str(uuid.uuid4()))
|
||||
|
||||
value = sqlalchemy.Column(
|
||||
sqlalchemy.Text
|
||||
)
|
||||
file = sqlalchemy.Column(
|
||||
sqlalchemy.String(512)
|
||||
)
|
||||
oslo_config_schema_id = sqlalchemy.Column(
|
||||
Uuid,
|
||||
sqlalchemy.ForeignKey('oslo_config_schema.id'),
|
||||
nullable=False
|
||||
)
|
||||
service_worker_id = sqlalchemy.Column(
|
||||
Uuid,
|
||||
sqlalchemy.ForeignKey('service_worker.id'),
|
||||
|
|
Loading…
Reference in New Issue