ironic-lib 2.0.0 release
meta:version: 2.0.0 meta:series: newton meta:release-type: release meta:announce: openstack-dev@lists.openstack.org meta:pypi: yes meta:first: no meta:release:Author: Dmitry Tantsur <divius.inside@gmail.com> meta:release:Commit: Dmitry Tantsur <divius.inside@gmail.com> meta:release:Change-Id: I21707e84459d904247f5e7808fc9647fb1fc168a meta:release:Code-Review+1: Jim Rollenhagen <jim@jimrollenhagen.com> meta:release:Code-Review+2: Doug Hellmann <doug@doughellmann.com> meta:release:Workflow+1: Doug Hellmann <doug@doughellmann.com> -----BEGIN PGP SIGNATURE----- Comment: GPGTools - http://gpgtools.org iQEcBAABAgAGBQJXg7YfAAoJEDttBqDEKEN6U0UH+QFrsB7PM80oOuQQLIaG3JPm I/WcehvHPs3PwEHbQbxo9sT6rCXYgmrKGA59Mfn1YR1xIoMUqFDsQpyo5+aIc/8U KTOP2cgxSVTIKD+KOFtpkHyMmOXjgLiFtsFPjg+ctqmzxjrU1OKPlsDoAewIxsG+ xsKNJ9rjQOffJtYTz8x8PcdYAb/yXBSJNeS7bXLwmF9ESPHOWP/K82rBH8F/LiUp ahSHsQJbJRo2mifoVMRDc3SSaGskXb8UHC6cJh3hH0ELcR4FdWrZDkpSuJFki59i MxZMBmt6tmGZHJLym3dY1yirRL7kaQuMNRrehYPfdgLyk/reZyyJHhlSlDACS6w= =Xc8F -----END PGP SIGNATURE----- Merge tag '2.0.0' into debian/newton ironic-lib 2.0.0 release meta:version: 2.0.0 meta:series: newton meta:release-type: release meta:announce: openstack-dev@lists.openstack.org meta:pypi: yes meta:first: no meta:release:Author: Dmitry Tantsur <divius.inside@gmail.com> meta:release:Commit: Dmitry Tantsur <divius.inside@gmail.com> meta:release:Change-Id: I21707e84459d904247f5e7808fc9647fb1fc168a meta:release:Code-Review+1: Jim Rollenhagen <jim@jimrollenhagen.com> meta:release:Code-Review+2: Doug Hellmann <doug@doughellmann.com> meta:release:Workflow+1: Doug Hellmann <doug@doughellmann.com>
This commit is contained in:
commit
3ec6c573f4
|
@ -32,3 +32,4 @@ AUTHORS
|
|||
ChangeLog
|
||||
*.sqlite
|
||||
*~
|
||||
.idea
|
||||
|
|
|
@ -5,7 +5,8 @@ ironic_lib
|
|||
Overview
|
||||
--------
|
||||
|
||||
A common library to be used by various projects in the Ironic ecosystem.
|
||||
A common library to be used **exclusively** by projects under the `Ironic
|
||||
governance <http://governance.openstack.org/reference/projects/ironic.html>`_.
|
||||
|
||||
Running Tests
|
||||
-------------
|
||||
|
|
|
@ -80,7 +80,7 @@ class DiskPartitioner(object):
|
|||
use_standard_locale=True, run_as_root=True)
|
||||
|
||||
def add_partition(self, size, part_type='primary', fs_type='',
|
||||
bootable=False):
|
||||
boot_flag=None):
|
||||
"""Add a partition.
|
||||
|
||||
:param size: The size of the partition in MiB.
|
||||
|
@ -90,15 +90,16 @@ class DiskPartitioner(object):
|
|||
fat16, HFS, linux-swap, NTFS, reiserfs, ufs.
|
||||
If blank (''), it will create a Linux native
|
||||
partition (83).
|
||||
:param bootable: Boolean value; whether the partition is bootable
|
||||
or not.
|
||||
:param boot_flag: Boot flag that needs to be configured on the
|
||||
partition. Ignored if None. It can take values
|
||||
'bios_grub', 'boot'.
|
||||
:returns: The partition number.
|
||||
|
||||
"""
|
||||
self._partitions.append({'size': size,
|
||||
'type': part_type,
|
||||
'fs_type': fs_type,
|
||||
'bootable': bootable})
|
||||
'boot_flag': boot_flag})
|
||||
return len(self._partitions)
|
||||
|
||||
def get_partitions(self):
|
||||
|
@ -145,8 +146,8 @@ class DiskPartitioner(object):
|
|||
end = start + part['size']
|
||||
cmd_args.extend(['mkpart', part['type'], part['fs_type'],
|
||||
str(start), str(end)])
|
||||
if part['bootable']:
|
||||
cmd_args.extend(['set', str(num), 'boot', 'on'])
|
||||
if part['boot_flag']:
|
||||
cmd_args.extend(['set', str(num), part['boot_flag'], 'on'])
|
||||
start = end
|
||||
|
||||
self._exec(*cmd_args)
|
||||
|
|
|
@ -45,17 +45,18 @@ opts = [
|
|||
cfg.IntOpt('efi_system_partition_size',
|
||||
default=200,
|
||||
help='Size of EFI system partition in MiB when configuring '
|
||||
'UEFI systems for local boot.',
|
||||
deprecated_group='deploy'),
|
||||
'UEFI systems for local boot.'),
|
||||
cfg.IntOpt('bios_boot_partition_size',
|
||||
default=1,
|
||||
help='Size of BIOS Boot partition in MiB when configuring '
|
||||
'GPT partitioned systems for local boot in BIOS.'),
|
||||
cfg.StrOpt('dd_block_size',
|
||||
default='1M',
|
||||
help='Block size to use when writing to the nodes disk.',
|
||||
deprecated_group='deploy'),
|
||||
help='Block size to use when writing to the nodes disk.'),
|
||||
cfg.IntOpt('iscsi_verify_attempts',
|
||||
default=3,
|
||||
help='Maximum attempts to verify an iSCSI connection is '
|
||||
'active, sleeping 1 second between attempts.',
|
||||
deprecated_group='deploy'),
|
||||
'active, sleeping 1 second between attempts.'),
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
@ -180,9 +181,14 @@ def make_partitions(dev, root_mb, swap_mb, ephemeral_mb,
|
|||
if boot_mode == "uefi" and boot_option == "local":
|
||||
part_num = dp.add_partition(CONF.disk_utils.efi_system_partition_size,
|
||||
fs_type='fat32',
|
||||
bootable=True)
|
||||
boot_flag='boot')
|
||||
part_dict['efi system partition'] = part_template % part_num
|
||||
|
||||
if boot_mode == "bios" and boot_option == "local" and disk_label == "gpt":
|
||||
part_num = dp.add_partition(CONF.disk_utils.bios_boot_partition_size,
|
||||
boot_flag='bios_grub')
|
||||
part_dict['BIOS Boot partition'] = part_template % part_num
|
||||
|
||||
if ephemeral_mb:
|
||||
LOG.debug("Add ephemeral partition (%(size)d MB) to device: %(dev)s "
|
||||
"for node %(node)s",
|
||||
|
@ -208,8 +214,14 @@ def make_partitions(dev, root_mb, swap_mb, ephemeral_mb,
|
|||
LOG.debug("Add root partition (%(size)d MB) to device: %(dev)s "
|
||||
"for node %(node)s",
|
||||
{'dev': dev, 'size': root_mb, 'node': node_uuid})
|
||||
part_num = dp.add_partition(root_mb, bootable=(boot_option == "local" and
|
||||
boot_mode == "bios"))
|
||||
|
||||
boot_val = None
|
||||
if (boot_mode == "bios" and boot_option == "local" and
|
||||
disk_label == "msdos"):
|
||||
boot_val = 'boot'
|
||||
|
||||
part_num = dp.add_partition(root_mb, boot_flag=boot_val)
|
||||
|
||||
part_dict['root'] = part_template % part_num
|
||||
|
||||
if commit:
|
||||
|
@ -316,10 +328,19 @@ def destroy_disk_metadata(dev, node_uuid):
|
|||
# https://bugs.launchpad.net/ironic/+bug/1317647
|
||||
LOG.debug("Start destroy disk metadata for node %(node)s.",
|
||||
{'node': node_uuid})
|
||||
utils.execute('wipefs', '--all', dev,
|
||||
run_as_root=True,
|
||||
check_exit_code=[0],
|
||||
use_standard_locale=True)
|
||||
try:
|
||||
utils.execute('wipefs', '--force', '--all', dev,
|
||||
run_as_root=True,
|
||||
use_standard_locale=True)
|
||||
except processutils.ProcessExecutionError as e:
|
||||
# NOTE(zhenguo): Check if --force option is supported for wipefs,
|
||||
# if not, we should try without it.
|
||||
if '--force' in str(e):
|
||||
utils.execute('wipefs', '--all', dev,
|
||||
run_as_root=True,
|
||||
use_standard_locale=True)
|
||||
else:
|
||||
raise e
|
||||
LOG.info(_LI("Disk metadata on %(dev)s successfully destroyed for node "
|
||||
"%(node)s"), {'dev': dev, 'node': node_uuid})
|
||||
|
||||
|
|
|
@ -99,3 +99,7 @@ class InstanceDeployFailure(IronicException):
|
|||
class FileSystemNotSupported(IronicException):
|
||||
message = _("Failed to create a file system. "
|
||||
"File system %(fs)s is not supported.")
|
||||
|
||||
|
||||
class InvalidMetricConfig(IronicException):
|
||||
message = _("Invalid value for metrics config option: %(reason)s")
|
||||
|
|
|
@ -0,0 +1,300 @@
|
|||
# Copyright 2016 Rackspace Hosting
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import abc
|
||||
import functools
|
||||
import random
|
||||
import time
|
||||
|
||||
import six
|
||||
|
||||
from ironic_lib.common.i18n import _
|
||||
|
||||
|
||||
class Timer(object):
|
||||
"""A timer decorator and context manager.
|
||||
|
||||
It is bound to this MetricLogger. For example:
|
||||
|
||||
from ironic_lib import metrics
|
||||
|
||||
METRICS = metrics.get_metrics_logger()
|
||||
|
||||
@METRICS.timer('foo')
|
||||
def foo(bar, baz):
|
||||
print bar, baz
|
||||
|
||||
with METRICS.timer('foo'):
|
||||
do_something()
|
||||
"""
|
||||
def __init__(self, metrics, name):
|
||||
"""Init the decorator / context manager.
|
||||
|
||||
:param metrics: The metric logger
|
||||
:param name: The metric name
|
||||
"""
|
||||
if not isinstance(name, six.string_types):
|
||||
raise TypeError(_("The metric name is expected to be a string. "
|
||||
"Value is %s") % name)
|
||||
self.metrics = metrics
|
||||
self.name = name
|
||||
self._start = None
|
||||
|
||||
def __call__(self, f):
|
||||
@functools.wraps(f)
|
||||
def wrapped(*args, **kwargs):
|
||||
start = _time()
|
||||
result = f(*args, **kwargs)
|
||||
duration = _time() - start
|
||||
|
||||
# Log the timing data (in ms)
|
||||
self.metrics.send_timer(self.metrics.get_metric_name(self.name),
|
||||
duration * 1000)
|
||||
return result
|
||||
return wrapped
|
||||
|
||||
def __enter__(self):
|
||||
self._start = _time()
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
duration = _time() - self._start
|
||||
# Log the timing data (in ms)
|
||||
self.metrics.send_timer(self.metrics.get_metric_name(self.name),
|
||||
duration * 1000)
|
||||
|
||||
|
||||
class Counter(object):
|
||||
"""A counter decorator and context manager.
|
||||
|
||||
It is bound to this MetricLogger. For example:
|
||||
|
||||
from ironic_lib import metrics
|
||||
|
||||
METRICS = metrics.get_metrics_logger()
|
||||
|
||||
@METRICS.counter('foo')
|
||||
def foo(bar, baz):
|
||||
print bar, baz
|
||||
|
||||
with METRICS.counter('foo'):
|
||||
do_something()
|
||||
"""
|
||||
def __init__(self, metrics, name, sample_rate):
|
||||
"""Init the decorator / context manager.
|
||||
|
||||
:param metrics: The metric logger
|
||||
:param name: The metric name
|
||||
:param sample_rate: Probabilistic rate at which the values will be sent
|
||||
"""
|
||||
if not isinstance(name, six.string_types):
|
||||
raise TypeError(_("The metric name is expected to be a string. "
|
||||
"Value is %s") % name)
|
||||
|
||||
if (sample_rate is not None and
|
||||
(sample_rate < 0.0 or sample_rate > 1.0)):
|
||||
msg = _("sample_rate is set to %s. Value must be None "
|
||||
"or in the interval [0.0, 1.0]") % sample_rate
|
||||
raise ValueError(msg)
|
||||
|
||||
self.metrics = metrics
|
||||
self.name = name
|
||||
self.sample_rate = sample_rate
|
||||
|
||||
def __call__(self, f):
|
||||
@functools.wraps(f)
|
||||
def wrapped(*args, **kwargs):
|
||||
self.metrics.send_counter(
|
||||
self.metrics.get_metric_name(self.name),
|
||||
1, sample_rate=self.sample_rate)
|
||||
|
||||
result = f(*args, **kwargs)
|
||||
|
||||
return result
|
||||
return wrapped
|
||||
|
||||
def __enter__(self):
|
||||
self.metrics.send_counter(self.metrics.get_metric_name(self.name),
|
||||
1, sample_rate=self.sample_rate)
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
pass
|
||||
|
||||
|
||||
class Gauge(object):
|
||||
"""A gauge decorator.
|
||||
|
||||
It is bound to this MetricLogger. For example:
|
||||
|
||||
from ironic_lib import metrics
|
||||
|
||||
METRICS = metrics.get_metrics_logger()
|
||||
|
||||
@METRICS.gauge('foo')
|
||||
def foo(bar, baz):
|
||||
print bar, baz
|
||||
|
||||
with METRICS.gauge('foo'):
|
||||
do_something()
|
||||
"""
|
||||
def __init__(self, metrics, name):
|
||||
"""Init the decorator / context manager.
|
||||
|
||||
:param metrics: The metric logger
|
||||
:param name: The metric name
|
||||
"""
|
||||
if not isinstance(name, six.string_types):
|
||||
raise TypeError(_("The metric name is expected to be a string. "
|
||||
"Value is %s") % name)
|
||||
self.metrics = metrics
|
||||
self.name = name
|
||||
|
||||
def __call__(self, f):
|
||||
@functools.wraps(f)
|
||||
def wrapped(*args, **kwargs):
|
||||
result = f(*args, **kwargs)
|
||||
self.metrics.send_gauge(self.metrics.get_metric_name(self.name),
|
||||
result)
|
||||
|
||||
return result
|
||||
return wrapped
|
||||
|
||||
|
||||
def _time():
|
||||
"""Wraps time.time() for simpler testing."""
|
||||
return time.time()
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class MetricLogger(object):
|
||||
"""Abstract class representing a metrics logger.
|
||||
|
||||
A MetricLogger sends data to a backend (noop or statsd).
|
||||
The data can be a gauge, a counter, or a timer.
|
||||
|
||||
The data sent to the backend is composed of:
|
||||
- a full metric name
|
||||
- a numeric value
|
||||
|
||||
The format of the full metric name is:
|
||||
_prefix<delim>name
|
||||
where:
|
||||
_prefix: [global_prefix<delim>][uuid<delim>][host_name<delim>]prefix
|
||||
name: the name of this metric
|
||||
<delim>: the delimiter. Default is '.'
|
||||
"""
|
||||
|
||||
def __init__(self, prefix='', delimiter='.'):
|
||||
"""Init a MetricLogger.
|
||||
|
||||
:param prefix: Prefix for this metric logger. This string will prefix
|
||||
all metric names.
|
||||
:param delimiter: Delimiter used to generate the full metric name.
|
||||
"""
|
||||
self._prefix = prefix
|
||||
self._delimiter = delimiter
|
||||
|
||||
def get_metric_name(self, name):
|
||||
"""Get the full metric name.
|
||||
|
||||
The format of the full metric name is:
|
||||
_prefix<delim>name
|
||||
where:
|
||||
_prefix: [global_prefix<delim>][uuid<delim>][host_name<delim>]prefix
|
||||
name: the name of this metric
|
||||
<delim>: the delimiter. Default is '.'
|
||||
|
||||
:param name: The metric name.
|
||||
:return: The full metric name, with logger prefix, as a string.
|
||||
"""
|
||||
if not self._prefix:
|
||||
return name
|
||||
return self._delimiter.join([self._prefix, name])
|
||||
|
||||
def send_gauge(self, name, value):
|
||||
"""Send gauge metric data.
|
||||
|
||||
Gauges are simple values.
|
||||
The backend will set the value of gauge 'name' to 'value'.
|
||||
|
||||
:param name: Metric name
|
||||
:param value: Metric numeric value that will be sent to the backend
|
||||
"""
|
||||
self._gauge(name, value)
|
||||
|
||||
def send_counter(self, name, value, sample_rate=None):
|
||||
"""Send counter metric data.
|
||||
|
||||
Counters are used to count how many times an event occurred.
|
||||
The backend will increment the counter 'name' by the value 'value'.
|
||||
|
||||
Optionally, specify sample_rate in the interval [0.0, 1.0] to
|
||||
sample data probabilistically where:
|
||||
|
||||
P(send metric data) = sample_rate
|
||||
|
||||
If sample_rate is None, then always send metric data, but do not
|
||||
have the backend send sample rate information (if supported).
|
||||
|
||||
:param name: Metric name
|
||||
:param value: Metric numeric value that will be sent to the backend
|
||||
:param sample_rate: Probabilistic rate at which the values will be
|
||||
sent. Value must be None or in the interval [0.0, 1.0].
|
||||
"""
|
||||
if (sample_rate is None or random.random() < sample_rate):
|
||||
return self._counter(name, value,
|
||||
sample_rate=sample_rate)
|
||||
|
||||
def send_timer(self, name, value):
|
||||
"""Send timer data.
|
||||
|
||||
Timers are used to measure how long it took to do something.
|
||||
|
||||
:param m_name: Metric name
|
||||
:param m_value: Metric numeric value that will be sent to the backend
|
||||
"""
|
||||
self._timer(name, value)
|
||||
|
||||
def timer(self, name):
|
||||
return Timer(self, name)
|
||||
|
||||
def counter(self, name, sample_rate=None):
|
||||
return Counter(self, name, sample_rate)
|
||||
|
||||
def gauge(self, name):
|
||||
return Gauge(self, name)
|
||||
|
||||
@abc.abstractmethod
|
||||
def _gauge(self, name, value):
|
||||
"""Abstract method for backends to implement gauge behavior."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def _counter(self, name, value, sample_rate=None):
|
||||
"""Abstract method for backends to implement counter behavior."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def _timer(self, name, value):
|
||||
"""Abstract method for backends to implement timer behavior."""
|
||||
|
||||
|
||||
class NoopMetricLogger(MetricLogger):
|
||||
"""Noop metric logger that throws away all metric data."""
|
||||
def _gauge(self, name, value):
|
||||
pass
|
||||
|
||||
def _counter(self, name, value, sample_rate=None):
|
||||
pass
|
||||
|
||||
def _timer(self, m_name, value):
|
||||
pass
|
|
@ -0,0 +1,108 @@
|
|||
# Copyright 2016 Rackspace Hosting
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import contextlib
|
||||
import socket
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log
|
||||
|
||||
from ironic_lib.common.i18n import _LW
|
||||
from ironic_lib import metrics
|
||||
|
||||
statsd_opts = [
|
||||
cfg.StrOpt('statsd_host',
|
||||
default='localhost',
|
||||
help='Host for use with the statsd backend.'),
|
||||
cfg.PortOpt('statsd_port',
|
||||
default=8125,
|
||||
help='Port to use with the statsd backend.')
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.register_opts(statsd_opts, group='metrics_statsd')
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class StatsdMetricLogger(metrics.MetricLogger):
|
||||
"""Metric logger that reports data via the statsd protocol."""
|
||||
|
||||
GAUGE_TYPE = 'g'
|
||||
COUNTER_TYPE = 'c'
|
||||
TIMER_TYPE = 'ms'
|
||||
|
||||
def __init__(self, prefix, delimiter='.', host=None, port=None):
|
||||
"""Initialize a StatsdMetricLogger
|
||||
|
||||
The logger uses the given prefix list, delimiter, host, and port.
|
||||
|
||||
:param prefix: Prefix for this metric logger.
|
||||
:param delimiter: Delimiter used to generate the full metric name.
|
||||
:param host: The statsd host
|
||||
:param port: The statsd port
|
||||
"""
|
||||
super(StatsdMetricLogger, self).__init__(prefix,
|
||||
delimiter=delimiter)
|
||||
|
||||
self._host = host or CONF.metrics_statsd.statsd_host
|
||||
self._port = port or CONF.metrics_statsd.statsd_port
|
||||
|
||||
self._target = (self._host, self._port)
|
||||
|
||||
def _send(self, name, value, metric_type, sample_rate=None):
|
||||
"""Send metrics to the statsd backend
|
||||
|
||||
:param name: Metric name
|
||||
:param value: Metric value
|
||||
:param metric_type: Metric type (GAUGE_TYPE, COUNTER_TYPE,
|
||||
or TIMER_TYPE)
|
||||
:param sample_rate: Probabilistic rate at which the values will be sent
|
||||
"""
|
||||
if sample_rate is None:
|
||||
metric = '%s:%s|%s' % (name, value, metric_type)
|
||||
else:
|
||||
metric = '%s:%s|%s@%s' % (name, value, metric_type, sample_rate)
|
||||
|
||||
# Ideally, we'd cache a sending socket in self, but that
|
||||
# results in a socket getting shared by multiple green threads.
|
||||
with contextlib.closing(self._open_socket()) as sock:
|
||||
try:
|
||||
sock.settimeout(0.0)
|
||||
sock.sendto(metric, self._target)
|
||||
except socket.error as e:
|
||||
LOG.warning(_LW("Failed to send the metric value to "
|
||||
"host %(host)s port %(port)s. "
|
||||
"Error: %(error)s"),
|
||||
{'host': self._host, 'port': self._port,
|
||||
'error': e})
|
||||
|
||||
def _open_socket(self):
|
||||
return socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
|
||||
def _gauge(self, name, value):
|
||||
return self._send(name, value, self.GAUGE_TYPE)
|
||||
|
||||
def _counter(self, name, value, sample_rate=None):
|
||||
return self._send(name, value, self.COUNTER_TYPE,
|
||||
sample_rate=sample_rate)
|
||||
|
||||
def _timer(self, name, value):
|
||||
return self._send(name, value, self.TIMER_TYPE)
|
||||
|
||||
|
||||
def list_opts():
|
||||
"""Entry point for oslo-config-generator."""
|
||||
return [('metrics_statsd', statsd_opts)]
|
|
@ -0,0 +1,100 @@
|
|||
# Copyright 2016 Rackspace Hosting
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslo_config import cfg
|
||||
import six
|
||||
|
||||
from ironic_lib.common.i18n import _
|
||||
from ironic_lib import exception
|
||||
from ironic_lib import metrics
|
||||
from ironic_lib import metrics_statsd
|
||||
|
||||
metrics_opts = [
|
||||
cfg.StrOpt('backend',
|
||||
default='noop',
|
||||
choices=['noop', 'statsd'],
|
||||
help='Backend to use for the metrics system.'),
|
||||
cfg.BoolOpt('prepend_host',
|
||||
default=False,
|
||||
help='Prepend the hostname to all metric names. '
|
||||
'The format of metric names is '
|
||||
'[global_prefix.][host_name.]prefix.metric_name.'),
|
||||
cfg.BoolOpt('prepend_host_reverse',
|
||||
default=True,
|
||||
help='Split the prepended host value by "." and reverse it '
|
||||
'(to better match the reverse hierarchical form of '
|
||||
'domain names).'),
|
||||
cfg.StrOpt('global_prefix',
|
||||
help='Prefix all metric names with this value. '
|
||||
'By default, there is no global prefix. '
|
||||
'The format of metric names is '
|
||||
'[global_prefix.][host_name.]prefix.metric_name.')
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.register_opts(metrics_opts, group='metrics')
|
||||
|
||||
|
||||
def get_metrics_logger(prefix='', backend=None, host=None, delimiter='.'):
|
||||
"""Return a metric logger with the specified prefix.
|
||||
|
||||
The format of the prefix is:
|
||||
[global_prefix<delim>][host_name<delim>]prefix
|
||||
where <delim> is the delimiter (default is '.')
|
||||
|
||||
:param prefix: Prefix for this metric logger.
|
||||
Value should be a string or None.
|
||||
:param backend: Backend to use for the metrics system.
|
||||
Possible values are 'noop' and 'statsd'.
|
||||
:param host: Name of this node.
|
||||
:param delimiter: Delimiter to use for the metrics name.
|
||||
:return: The new MetricLogger.
|
||||
"""
|
||||
if not isinstance(prefix, six.string_types):
|
||||
msg = (_("This metric prefix (%s) is of unsupported type. "
|
||||
"Value should be a string or None")
|
||||
% str(prefix))
|
||||
raise exception.InvalidMetricConfig(msg)
|
||||
|
||||
if CONF.metrics.prepend_host and host:
|
||||
if CONF.metrics.prepend_host_reverse:
|
||||
host = '.'.join(reversed(host.split('.')))
|
||||
|
||||
if prefix:
|
||||
prefix = delimiter.join([host, prefix])
|
||||
else:
|
||||
prefix = host
|
||||
|
||||
if CONF.metrics.global_prefix:
|
||||
if prefix:
|
||||
prefix = delimiter.join([CONF.metrics.global_prefix, prefix])
|
||||
else:
|
||||
prefix = CONF.metrics.global_prefix
|
||||
|
||||
backend = backend or CONF.metrics.backend
|
||||
if backend == 'statsd':
|
||||
return metrics_statsd.StatsdMetricLogger(prefix, delimiter=delimiter)
|
||||
elif backend == 'noop':
|
||||
return metrics.NoopMetricLogger(prefix, delimiter=delimiter)
|
||||
else:
|
||||
msg = (_("The backend is set to an unsupported type: "
|
||||
"%s. Value should be 'noop' or 'statsd'.")
|
||||
% backend)
|
||||
raise exception.InvalidMetricConfig(msg)
|
||||
|
||||
|
||||
def list_opts():
|
||||
"""Entry point for oslo-config-generator."""
|
||||
return [('metrics', metrics_opts)]
|
|
@ -30,21 +30,26 @@ class DiskPartitionerTestCase(test_base.BaseTestCase):
|
|||
dp = disk_partitioner.DiskPartitioner('/dev/fake')
|
||||
dp.add_partition(1024)
|
||||
dp.add_partition(512, fs_type='linux-swap')
|
||||
dp.add_partition(2048, bootable=True)
|
||||
expected = [(1, {'bootable': False,
|
||||
dp.add_partition(2048, boot_flag='boot')
|
||||
dp.add_partition(2048, boot_flag='bios_grub')
|
||||
expected = [(1, {'boot_flag': None,
|
||||
'fs_type': '',
|
||||
'type': 'primary',
|
||||
'size': 1024}),
|
||||
(2, {'bootable': False,
|
||||
(2, {'boot_flag': None,
|
||||
'fs_type': 'linux-swap',
|
||||
'type': 'primary',
|
||||
'size': 512}),
|
||||
(3, {'bootable': True,
|
||||
(3, {'boot_flag': 'boot',
|
||||
'fs_type': '',
|
||||
'type': 'primary',
|
||||
'size': 2048}),
|
||||
(4, {'boot_flag': 'bios_grub',
|
||||
'fs_type': '',
|
||||
'type': 'primary',
|
||||
'size': 2048})]
|
||||
partitions = [(n, p) for n, p in dp.get_partitions()]
|
||||
self.assertThat(partitions, HasLength(3))
|
||||
self.assertThat(partitions, HasLength(4))
|
||||
self.assertEqual(expected, partitions)
|
||||
|
||||
@mock.patch.object(disk_partitioner.DiskPartitioner, '_exec',
|
||||
|
@ -52,11 +57,15 @@ class DiskPartitionerTestCase(test_base.BaseTestCase):
|
|||
@mock.patch.object(utils, 'execute', autospec=True)
|
||||
def test_commit(self, mock_utils_exc, mock_disk_partitioner_exec):
|
||||
dp = disk_partitioner.DiskPartitioner('/dev/fake')
|
||||
fake_parts = [(1, {'bootable': False,
|
||||
fake_parts = [(1, {'boot_flag': None,
|
||||
'fs_type': 'fake-fs-type',
|
||||
'type': 'fake-type',
|
||||
'size': 1}),
|
||||
(2, {'bootable': True,
|
||||
(2, {'boot_flag': 'boot',
|
||||
'fs_type': 'fake-fs-type',
|
||||
'type': 'fake-type',
|
||||
'size': 1}),
|
||||
(3, {'boot_flag': 'bios_grub',
|
||||
'fs_type': 'fake-fs-type',
|
||||
'type': 'fake-type',
|
||||
'size': 1})]
|
||||
|
@ -69,7 +78,9 @@ class DiskPartitionerTestCase(test_base.BaseTestCase):
|
|||
mock.ANY, 'mklabel', 'msdos',
|
||||
'mkpart', 'fake-type', 'fake-fs-type', '1', '2',
|
||||
'mkpart', 'fake-type', 'fake-fs-type', '2', '3',
|
||||
'set', '2', 'boot', 'on')
|
||||
'set', '2', 'boot', 'on',
|
||||
'mkpart', 'fake-type', 'fake-fs-type', '3', '4',
|
||||
'set', '3', 'bios_grub', 'on')
|
||||
mock_utils_exc.assert_called_once_with(
|
||||
'fuser', '/dev/fake', run_as_root=True, check_exit_code=[0, 1])
|
||||
|
||||
|
@ -80,11 +91,11 @@ class DiskPartitionerTestCase(test_base.BaseTestCase):
|
|||
def test_commit_with_device_is_busy_once(self, mock_utils_exc,
|
||||
mock_disk_partitioner_exec):
|
||||
dp = disk_partitioner.DiskPartitioner('/dev/fake')
|
||||
fake_parts = [(1, {'bootable': False,
|
||||
fake_parts = [(1, {'boot_flag': None,
|
||||
'fs_type': 'fake-fs-type',
|
||||
'type': 'fake-type',
|
||||
'size': 1}),
|
||||
(2, {'bootable': True,
|
||||
(2, {'boot_flag': 'boot',
|
||||
'fs_type': 'fake-fs-type',
|
||||
'type': 'fake-type',
|
||||
'size': 1})]
|
||||
|
@ -111,11 +122,11 @@ class DiskPartitionerTestCase(test_base.BaseTestCase):
|
|||
def test_commit_with_device_is_always_busy(self, mock_utils_exc,
|
||||
mock_disk_partitioner_exec):
|
||||
dp = disk_partitioner.DiskPartitioner('/dev/fake')
|
||||
fake_parts = [(1, {'bootable': False,
|
||||
fake_parts = [(1, {'boot_flag': None,
|
||||
'fs_type': 'fake-fs-type',
|
||||
'type': 'fake-type',
|
||||
'size': 1}),
|
||||
(2, {'bootable': True,
|
||||
(2, {'boot_flag': 'boot',
|
||||
'fs_type': 'fake-fs-type',
|
||||
'type': 'fake-type',
|
||||
'size': 1})]
|
||||
|
@ -142,11 +153,11 @@ class DiskPartitionerTestCase(test_base.BaseTestCase):
|
|||
def test_commit_with_device_disconnected(self, mock_utils_exc,
|
||||
mock_disk_partitioner_exec):
|
||||
dp = disk_partitioner.DiskPartitioner('/dev/fake')
|
||||
fake_parts = [(1, {'bootable': False,
|
||||
fake_parts = [(1, {'boot_flag': None,
|
||||
'fs_type': 'fake-fs-type',
|
||||
'type': 'fake-type',
|
||||
'size': 1}),
|
||||
(2, {'bootable': True,
|
||||
(2, {'boot_flag': 'boot',
|
||||
'fs_type': 'fake-fs-type',
|
||||
'type': 'fake-type',
|
||||
'size': 1})]
|
||||
|
|
|
@ -37,7 +37,7 @@ from ironic_lib import utils
|
|||
CONF = cfg.CONF
|
||||
|
||||
|
||||
@mock.patch.object(utils, 'execute')
|
||||
@mock.patch.object(utils, 'execute', autospec=True)
|
||||
class ListPartitionsTestCase(test_base.BaseTestCase):
|
||||
|
||||
def test_correct(self, execute_mock):
|
||||
|
@ -60,7 +60,7 @@ BYT;
|
|||
'parted', '-s', '-m', '/dev/fake', 'unit', 'MiB', 'print',
|
||||
use_standard_locale=True, run_as_root=True)
|
||||
|
||||
@mock.patch.object(disk_utils.LOG, 'warning')
|
||||
@mock.patch.object(disk_utils.LOG, 'warning', autospec=True)
|
||||
def test_incorrect(self, log_mock, execute_mock):
|
||||
output = """
|
||||
BYT;
|
||||
|
@ -164,8 +164,8 @@ class WorkOnDiskTestCase(test_base.BaseTestCase):
|
|||
boot_mode="bios",
|
||||
disk_label=None)
|
||||
|
||||
@mock.patch.object(utils, 'unlink_without_raise')
|
||||
@mock.patch.object(disk_utils, '_get_configdrive')
|
||||
@mock.patch.object(utils, 'unlink_without_raise', autospec=True)
|
||||
@mock.patch.object(disk_utils, '_get_configdrive', autospec=True)
|
||||
def test_no_configdrive_partition(self, mock_configdrive, mock_unlink):
|
||||
mock_configdrive.return_value = (10, 'fake-path')
|
||||
swap_part = '/dev/fake-part1'
|
||||
|
@ -229,7 +229,7 @@ class WorkOnDiskTestCase(test_base.BaseTestCase):
|
|||
disk_label='gpt')
|
||||
|
||||
|
||||
@mock.patch.object(utils, 'execute')
|
||||
@mock.patch.object(utils, 'execute', autospec=True)
|
||||
class MakePartitionsTestCase(test_base.BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
|
@ -240,6 +240,8 @@ class MakePartitionsTestCase(test_base.BaseTestCase):
|
|||
self.ephemeral_mb = 0
|
||||
self.configdrive_mb = 0
|
||||
self.node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
|
||||
self.efi_size = CONF.disk_utils.efi_system_partition_size
|
||||
self.bios_size = CONF.disk_utils.bios_boot_partition_size
|
||||
|
||||
def _get_parted_cmd(self, dev, label=None):
|
||||
if label is None:
|
||||
|
@ -248,17 +250,44 @@ class MakePartitionsTestCase(test_base.BaseTestCase):
|
|||
return ['parted', '-a', 'optimal', '-s', dev,
|
||||
'--', 'unit', 'MiB', 'mklabel', label]
|
||||
|
||||
def _test_make_partitions(self, mock_exc, boot_option, disk_label=None):
|
||||
def _test_make_partitions(self, mock_exc, boot_option, boot_mode='bios',
|
||||
disk_label=None):
|
||||
mock_exc.return_value = (None, None)
|
||||
disk_utils.make_partitions(self.dev, self.root_mb, self.swap_mb,
|
||||
self.ephemeral_mb, self.configdrive_mb,
|
||||
self.node_uuid, boot_option=boot_option,
|
||||
disk_label=disk_label)
|
||||
boot_mode=boot_mode, disk_label=disk_label)
|
||||
|
||||
expected_mkpart = ['mkpart', 'primary', 'linux-swap', '1', '513',
|
||||
'mkpart', 'primary', '', '513', '1537']
|
||||
if boot_option == "local":
|
||||
expected_mkpart.extend(['set', '2', 'boot', 'on'])
|
||||
_s = lambda x, sz: x + sz
|
||||
|
||||
if boot_option == "local" and boot_mode == "uefi":
|
||||
add_efi_sz = lambda x: str(_s(x, self.efi_size))
|
||||
expected_mkpart = ['mkpart', 'primary', 'fat32', '1',
|
||||
add_efi_sz(1),
|
||||
'set', '1', 'boot', 'on',
|
||||
'mkpart', 'primary', 'linux-swap',
|
||||
add_efi_sz(1), add_efi_sz(513), 'mkpart',
|
||||
'primary', '', add_efi_sz(513),
|
||||
add_efi_sz(1537)]
|
||||
else:
|
||||
if boot_option == "local":
|
||||
if disk_label == "gpt":
|
||||
add_bios_sz = lambda x: str(_s(x, self.bios_size))
|
||||
expected_mkpart = ['mkpart', 'primary', '', '1',
|
||||
add_bios_sz(1),
|
||||
'set', '1', 'bios_grub', 'on',
|
||||
'mkpart', 'primary', 'linux-swap',
|
||||
add_bios_sz(1), add_bios_sz(513),
|
||||
'mkpart', 'primary', '',
|
||||
add_bios_sz(513), add_bios_sz(1537)]
|
||||
else:
|
||||
expected_mkpart = ['mkpart', 'primary', 'linux-swap', '1',
|
||||
'513', 'mkpart', 'primary', '', '513',
|
||||
'1537', 'set', '2', 'boot', 'on']
|
||||
else:
|
||||
expected_mkpart = ['mkpart', 'primary', 'linux-swap', '1',
|
||||
'513', 'mkpart', 'primary', '', '513',
|
||||
'1537']
|
||||
self.dev = 'fake-dev'
|
||||
parted_cmd = (self._get_parted_cmd(self.dev, disk_label) +
|
||||
expected_mkpart)
|
||||
|
@ -275,6 +304,14 @@ class MakePartitionsTestCase(test_base.BaseTestCase):
|
|||
def test_make_partitions_local_boot(self, mock_exc):
|
||||
self._test_make_partitions(mock_exc, boot_option="local")
|
||||
|
||||
def test_make_partitions_local_boot_uefi(self, mock_exc):
|
||||
self._test_make_partitions(mock_exc, boot_option="local",
|
||||
boot_mode="uefi", disk_label="gpt")
|
||||
|
||||
def test_make_partitions_local_boot_gpt_bios(self, mock_exc):
|
||||
self._test_make_partitions(mock_exc, boot_option="local",
|
||||
disk_label="gpt")
|
||||
|
||||
def test_make_partitions_disk_label_gpt(self, mock_exc):
|
||||
self._test_make_partitions(mock_exc, boot_option="netboot",
|
||||
disk_label="gpt")
|
||||
|
@ -341,7 +378,7 @@ class MakePartitionsTestCase(test_base.BaseTestCase):
|
|||
self.assertEqual(expected_result, result)
|
||||
|
||||
|
||||
@mock.patch.object(utils, 'execute')
|
||||
@mock.patch.object(utils, 'execute', autospec=True)
|
||||
class DestroyMetaDataTestCase(test_base.BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
|
@ -350,9 +387,8 @@ class DestroyMetaDataTestCase(test_base.BaseTestCase):
|
|||
self.node_uuid = "12345678-1234-1234-1234-1234567890abcxyz"
|
||||
|
||||
def test_destroy_disk_metadata(self, mock_exec):
|
||||
expected_calls = [mock.call('wipefs', '--all', 'fake-dev',
|
||||
expected_calls = [mock.call('wipefs', '--force', '--all', 'fake-dev',
|
||||
run_as_root=True,
|
||||
check_exit_code=[0],
|
||||
use_standard_locale=True)]
|
||||
disk_utils.destroy_disk_metadata(self.dev, self.node_uuid)
|
||||
mock_exec.assert_has_calls(expected_calls)
|
||||
|
@ -360,9 +396,8 @@ class DestroyMetaDataTestCase(test_base.BaseTestCase):
|
|||
def test_destroy_disk_metadata_wipefs_fail(self, mock_exec):
|
||||
mock_exec.side_effect = processutils.ProcessExecutionError
|
||||
|
||||
expected_call = [mock.call('wipefs', '--all', 'fake-dev',
|
||||
expected_call = [mock.call('wipefs', '--force', '--all', 'fake-dev',
|
||||
run_as_root=True,
|
||||
check_exit_code=[0],
|
||||
use_standard_locale=True)]
|
||||
self.assertRaises(processutils.ProcessExecutionError,
|
||||
disk_utils.destroy_disk_metadata,
|
||||
|
@ -370,8 +405,22 @@ class DestroyMetaDataTestCase(test_base.BaseTestCase):
|
|||
self.node_uuid)
|
||||
mock_exec.assert_has_calls(expected_call)
|
||||
|
||||
def test_destroy_disk_metadata_wipefs_not_support_force(self, mock_exec):
|
||||
mock_exec.side_effect = iter(
|
||||
[processutils.ProcessExecutionError(description='--force'),
|
||||
(None, None)])
|
||||
|
||||
@mock.patch.object(utils, 'execute')
|
||||
expected_call = [mock.call('wipefs', '--force', '--all', 'fake-dev',
|
||||
run_as_root=True,
|
||||
use_standard_locale=True),
|
||||
mock.call('wipefs', '--all', 'fake-dev',
|
||||
run_as_root=True,
|
||||
use_standard_locale=True)]
|
||||
disk_utils.destroy_disk_metadata(self.dev, self.node_uuid)
|
||||
mock_exec.assert_has_calls(expected_call)
|
||||
|
||||
|
||||
@mock.patch.object(utils, 'execute', autospec=True)
|
||||
class GetDeviceBlockSizeTestCase(test_base.BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
|
@ -387,9 +436,9 @@ class GetDeviceBlockSizeTestCase(test_base.BaseTestCase):
|
|||
mock_exec.assert_has_calls(expected_call)
|
||||
|
||||
|
||||
@mock.patch.object(disk_utils, 'dd')
|
||||
@mock.patch.object(disk_utils, 'qemu_img_info')
|
||||
@mock.patch.object(disk_utils, 'convert_image')
|
||||
@mock.patch.object(disk_utils, 'dd', autospec=True)
|
||||
@mock.patch.object(disk_utils, 'qemu_img_info', autospec=True)
|
||||
@mock.patch.object(disk_utils, 'convert_image', autospec=True)
|
||||
class PopulateImageTestCase(test_base.BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
|
@ -493,11 +542,11 @@ class RealFilePartitioningTestCase(test_base.BaseTestCase):
|
|||
self.assertIn(sizes[2], (9, 10))
|
||||
|
||||
|
||||
@mock.patch.object(shutil, 'copyfileobj')
|
||||
@mock.patch.object(requests, 'get')
|
||||
@mock.patch.object(shutil, 'copyfileobj', autospec=True)
|
||||
@mock.patch.object(requests, 'get', autospec=True)
|
||||
class GetConfigdriveTestCase(test_base.BaseTestCase):
|
||||
|
||||
@mock.patch.object(gzip, 'GzipFile')
|
||||
@mock.patch.object(gzip, 'GzipFile', autospec=True)
|
||||
def test_get_configdrive(self, mock_gzip, mock_requests, mock_copy):
|
||||
mock_requests.return_value = mock.MagicMock(content='Zm9vYmFy')
|
||||
tempdir = tempfile.mkdtemp()
|
||||
|
@ -510,7 +559,7 @@ class GetConfigdriveTestCase(test_base.BaseTestCase):
|
|||
fileobj=mock.ANY)
|
||||
mock_copy.assert_called_once_with(mock.ANY, mock.ANY)
|
||||
|
||||
@mock.patch.object(gzip, 'GzipFile')
|
||||
@mock.patch.object(gzip, 'GzipFile', autospec=True)
|
||||
def test_get_configdrive_base64_string(self, mock_gzip, mock_requests,
|
||||
mock_copy):
|
||||
disk_utils._get_configdrive('Zm9vYmFy', 'fake-node-uuid')
|
||||
|
@ -526,7 +575,7 @@ class GetConfigdriveTestCase(test_base.BaseTestCase):
|
|||
'http://1.2.3.4/cd', 'fake-node-uuid')
|
||||
self.assertFalse(mock_copy.called)
|
||||
|
||||
@mock.patch.object(base64, 'b64decode')
|
||||
@mock.patch.object(base64, 'b64decode', autospec=True)
|
||||
def test_get_configdrive_base64_error(self, mock_b64, mock_requests,
|
||||
mock_copy):
|
||||
mock_b64.side_effect = TypeError
|
||||
|
@ -536,7 +585,7 @@ class GetConfigdriveTestCase(test_base.BaseTestCase):
|
|||
mock_b64.assert_called_once_with('malformed')
|
||||
self.assertFalse(mock_copy.called)
|
||||
|
||||
@mock.patch.object(gzip, 'GzipFile')
|
||||
@mock.patch.object(gzip, 'GzipFile', autospec=True)
|
||||
def test_get_configdrive_gzip_error(self, mock_gzip, mock_requests,
|
||||
mock_copy):
|
||||
mock_requests.return_value = mock.MagicMock(content='Zm9vYmFy')
|
||||
|
@ -553,8 +602,8 @@ class GetConfigdriveTestCase(test_base.BaseTestCase):
|
|||
@mock.patch('time.sleep', lambda sec: None)
|
||||
class OtherFunctionTestCase(test_base.BaseTestCase):
|
||||
|
||||
@mock.patch.object(os, 'stat')
|
||||
@mock.patch.object(stat, 'S_ISBLK')
|
||||
@mock.patch.object(os, 'stat', autospec=True)
|
||||
@mock.patch.object(stat, 'S_ISBLK', autospec=True)
|
||||
def test_is_block_device_works(self, mock_is_blk, mock_os):
|
||||
device = '/dev/disk/by-path/ip-1.2.3.4:5678-iscsi-iqn.fake-lun-9'
|
||||
mock_is_blk.return_value = True
|
||||
|
@ -562,7 +611,7 @@ class OtherFunctionTestCase(test_base.BaseTestCase):
|
|||
self.assertTrue(disk_utils.is_block_device(device))
|
||||
mock_is_blk.assert_called_once_with(mock_os().st_mode)
|
||||
|
||||
@mock.patch.object(os, 'stat')
|
||||
@mock.patch.object(os, 'stat', autospec=True)
|
||||
def test_is_block_device_raises(self, mock_os):
|
||||
device = '/dev/disk/by-path/ip-1.2.3.4:5678-iscsi-iqn.fake-lun-9'
|
||||
mock_os.side_effect = OSError
|
||||
|
@ -597,8 +646,8 @@ class OtherFunctionTestCase(test_base.BaseTestCase):
|
|||
'out_format', 'source', 'dest',
|
||||
run_as_root=False)
|
||||
|
||||
@mock.patch.object(os.path, 'getsize')
|
||||
@mock.patch.object(disk_utils, 'qemu_img_info')
|
||||
@mock.patch.object(os.path, 'getsize', autospec=True)
|
||||
@mock.patch.object(disk_utils, 'qemu_img_info', autospec=True)
|
||||
def test_get_image_mb(self, mock_qinfo, mock_getsize):
|
||||
mb = 1024 * 1024
|
||||
|
||||
|
|
|
@ -0,0 +1,161 @@
|
|||
# Copyright 2016 Rackspace Hosting
|
||||
# All Rights Reserved
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import types
|
||||
|
||||
import mock
|
||||
from oslo_config import cfg
|
||||
from oslotest import base as test_base
|
||||
|
||||
from ironic_lib import metrics as metricslib
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
class MockedMetricLogger(metricslib.MetricLogger):
|
||||
_gauge = mock.Mock(spec_set=types.FunctionType)
|
||||
_counter = mock.Mock(spec_set=types.FunctionType)
|
||||
_timer = mock.Mock(spec_set=types.FunctionType)
|
||||
|
||||
|
||||
class TestMetricLogger(test_base.BaseTestCase):
|
||||
def setUp(self):
|
||||
super(TestMetricLogger, self).setUp()
|
||||
self.ml = MockedMetricLogger('prefix', '.')
|
||||
self.ml_no_prefix = MockedMetricLogger('', '.')
|
||||
self.ml_other_delim = MockedMetricLogger('prefix', '*')
|
||||
self.ml_default = MockedMetricLogger()
|
||||
|
||||
def test_init(self):
|
||||
self.assertEqual(self.ml._prefix, 'prefix')
|
||||
self.assertEqual(self.ml._delimiter, '.')
|
||||
|
||||
self.assertEqual(self.ml_no_prefix._prefix, '')
|
||||
self.assertEqual(self.ml_other_delim._delimiter, '*')
|
||||
self.assertEqual(self.ml_default._prefix, '')
|
||||
|
||||
def test_get_metric_name(self):
|
||||
self.assertEqual(
|
||||
self.ml.get_metric_name('metric'),
|
||||
'prefix.metric')
|
||||
|
||||
self.assertEqual(
|
||||
self.ml_no_prefix.get_metric_name('metric'),
|
||||
'metric')
|
||||
|
||||
self.assertEqual(
|
||||
self.ml_other_delim.get_metric_name('metric'),
|
||||
'prefix*metric')
|
||||
|
||||
def test_send_gauge(self):
|
||||
self.ml.send_gauge('prefix.metric', 10)
|
||||
self.ml._gauge.assert_called_once_with('prefix.metric', 10)
|
||||
|
||||
def test_send_counter(self):
|
||||
self.ml.send_counter('prefix.metric', 10)
|
||||
self.ml._counter.assert_called_once_with(
|
||||
'prefix.metric', 10,
|
||||
sample_rate=None)
|
||||
self.ml._counter.reset_mock()
|
||||
|
||||
self.ml.send_counter('prefix.metric', 10, sample_rate=1.0)
|
||||
self.ml._counter.assert_called_once_with(
|
||||
'prefix.metric', 10,
|
||||
sample_rate=1.0)
|
||||
self.ml._counter.reset_mock()
|
||||
|
||||
self.ml.send_counter('prefix.metric', 10, sample_rate=0.0)
|
||||
self.assertFalse(self.ml._counter.called)
|
||||
|
||||
def test_send_timer(self):
|
||||
self.ml.send_timer('prefix.metric', 10)
|
||||
self.ml._timer.assert_called_once_with('prefix.metric', 10)
|
||||
|
||||
@mock.patch('ironic_lib.metrics._time', autospec=True)
|
||||
@mock.patch('ironic_lib.metrics.MetricLogger.send_timer', autospec=True)
|
||||
def test_decorator_timer(self, mock_timer, mock_time):
|
||||
mock_time.side_effect = [1, 43]
|
||||
|
||||
@self.ml.timer('foo.bar.baz')
|
||||
def func(x):
|
||||
return x * x
|
||||
|
||||
func(10)
|
||||
|
||||
mock_timer.assert_called_once_with(self.ml, 'prefix.foo.bar.baz',
|
||||
42 * 1000)
|
||||
|
||||
@mock.patch('ironic_lib.metrics.MetricLogger.send_counter', autospec=True)
|
||||
def test_decorator_counter(self, mock_counter):
|
||||
|
||||
@self.ml.counter('foo.bar.baz')
|
||||
def func(x):
|
||||
return x * x
|
||||
|
||||
func(10)
|
||||
|
||||
mock_counter.assert_called_once_with(self.ml, 'prefix.foo.bar.baz', 1,
|
||||
sample_rate=None)
|
||||
|
||||
@mock.patch('ironic_lib.metrics.MetricLogger.send_counter', autospec=True)
|
||||
def test_decorator_counter_sample_rate(self, mock_counter):
|
||||
|
||||
@self.ml.counter('foo.bar.baz', sample_rate=0.5)
|
||||
def func(x):
|
||||
return x * x
|
||||
|
||||
func(10)
|
||||
|
||||
mock_counter.assert_called_once_with(self.ml, 'prefix.foo.bar.baz', 1,
|
||||
sample_rate=0.5)
|
||||
|
||||
@mock.patch('ironic_lib.metrics.MetricLogger.send_gauge', autospec=True)
|
||||
def test_decorator_gauge(self, mock_gauge):
|
||||
@self.ml.gauge('foo.bar.baz')
|
||||
def func(x):
|
||||
return x
|
||||
|
||||
func(10)
|
||||
|
||||
mock_gauge.assert_called_once_with(self.ml, 'prefix.foo.bar.baz', 10)
|
||||
|
||||
@mock.patch('ironic_lib.metrics._time', autospec=True)
|
||||
@mock.patch('ironic_lib.metrics.MetricLogger.send_timer', autospec=True)
|
||||
def test_context_mgr_timer(self, mock_timer, mock_time):
|
||||
mock_time.side_effect = [1, 43]
|
||||
|
||||
with self.ml.timer('foo.bar.baz'):
|
||||
pass
|
||||
|
||||
mock_timer.assert_called_once_with(self.ml, 'prefix.foo.bar.baz',
|
||||
42 * 1000)
|
||||
|
||||
@mock.patch('ironic_lib.metrics.MetricLogger.send_counter', autospec=True)
|
||||
def test_context_mgr_counter(self, mock_counter):
|
||||
|
||||
with self.ml.counter('foo.bar.baz'):
|
||||
pass
|
||||
|
||||
mock_counter.assert_called_once_with(self.ml, 'prefix.foo.bar.baz', 1,
|
||||
sample_rate=None)
|
||||
|
||||
@mock.patch('ironic_lib.metrics.MetricLogger.send_counter', autospec=True)
|
||||
def test_context_mgr_counter_sample_rate(self, mock_counter):
|
||||
|
||||
with self.ml.counter('foo.bar.baz', sample_rate=0.5):
|
||||
pass
|
||||
|
||||
mock_counter.assert_called_once_with(self.ml, 'prefix.foo.bar.baz', 1,
|
||||
sample_rate=0.5)
|
|
@ -0,0 +1,96 @@
|
|||
# Copyright 2016 Rackspace Hosting
|
||||
# All Rights Reserved
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import socket
|
||||
|
||||
import mock
|
||||
from oslotest import base as test_base
|
||||
|
||||
from ironic_lib import metrics_statsd
|
||||
|
||||
|
||||
class TestStatsdMetricLogger(test_base.BaseTestCase):
|
||||
def setUp(self):
|
||||
super(TestStatsdMetricLogger, self).setUp()
|
||||
self.ml = metrics_statsd.StatsdMetricLogger('prefix', '.', 'test-host',
|
||||
4321)
|
||||
|
||||
def test_init(self):
|
||||
self.assertEqual(self.ml._host, 'test-host')
|
||||
self.assertEqual(self.ml._port, 4321)
|
||||
self.assertEqual(self.ml._target, ('test-host', 4321))
|
||||
|
||||
@mock.patch('ironic_lib.metrics_statsd.StatsdMetricLogger._send',
|
||||
autospec=True)
|
||||
def test_gauge(self, mock_send):
|
||||
self.ml._gauge('metric', 10)
|
||||
mock_send.assert_called_once_with(self.ml, 'metric', 10, 'g')
|
||||
|
||||
@mock.patch('ironic_lib.metrics_statsd.StatsdMetricLogger._send',
|
||||
autospec=True)
|
||||
def test_counter(self, mock_send):
|
||||
self.ml._counter('metric', 10)
|
||||
mock_send.assert_called_once_with(self.ml, 'metric', 10, 'c',
|
||||
sample_rate=None)
|
||||
mock_send.reset_mock()
|
||||
|
||||
self.ml._counter('metric', 10, sample_rate=1.0)
|
||||
mock_send.assert_called_once_with(self.ml, 'metric', 10, 'c',
|
||||
sample_rate=1.0)
|
||||
|
||||
@mock.patch('ironic_lib.metrics_statsd.StatsdMetricLogger._send',
|
||||
autospec=True)
|
||||
def test_timer(self, mock_send):
|
||||
self.ml._timer('metric', 10)
|
||||
mock_send.assert_called_once_with(self.ml, 'metric', 10, 'ms')
|
||||
|
||||
@mock.patch('socket.socket')
|
||||
def test_open_socket(self, mock_socket_constructor):
|
||||
self.ml._open_socket()
|
||||
mock_socket_constructor.assert_called_once_with(
|
||||
socket.AF_INET,
|
||||
socket.SOCK_DGRAM)
|
||||
|
||||
@mock.patch('socket.socket')
|
||||
def test_send(self, mock_socket_constructor):
|
||||
mock_socket = mock.Mock()
|
||||
mock_socket_constructor.return_value = mock_socket
|
||||
|
||||
self.ml._send('part1.part2', 2, 'type')
|
||||
mock_socket.sendto.assert_called_once_with(
|
||||
'part1.part2:2|type',
|
||||
('test-host', 4321))
|
||||
mock_socket.close.assert_called_once_with()
|
||||
mock_socket.reset_mock()
|
||||
|
||||
self.ml._send('part1.part2', 3.14159, 'type')
|
||||
mock_socket.sendto.assert_called_once_with(
|
||||
'part1.part2:3.14159|type',
|
||||
('test-host', 4321))
|
||||
mock_socket.close.assert_called_once_with()
|
||||
mock_socket.reset_mock()
|
||||
|
||||
self.ml._send('part1.part2', 5, 'type')
|
||||
mock_socket.sendto.assert_called_once_with(
|
||||
'part1.part2:5|type',
|
||||
('test-host', 4321))
|
||||
mock_socket.close.assert_called_once_with()
|
||||
mock_socket.reset_mock()
|
||||
|
||||
self.ml._send('part1.part2', 5, 'type', sample_rate=0.5)
|
||||
mock_socket.sendto.assert_called_once_with(
|
||||
'part1.part2:5|type@0.5',
|
||||
('test-host', 4321))
|
||||
mock_socket.close.assert_called_once_with()
|
|
@ -0,0 +1,108 @@
|
|||
# Copyright 2016 Rackspace Hosting
|
||||
# All Rights Reserved
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslotest import base as test_base
|
||||
|
||||
from oslo_config import cfg
|
||||
|
||||
from ironic_lib import exception
|
||||
from ironic_lib import metrics as metricslib
|
||||
from ironic_lib import metrics_statsd
|
||||
from ironic_lib import metrics_utils
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
class TestGetLogger(test_base.BaseTestCase):
|
||||
def setUp(self):
|
||||
super(TestGetLogger, self).setUp()
|
||||
|
||||
def test_default_backend(self):
|
||||
metrics = metrics_utils.get_metrics_logger('foo')
|
||||
self.assertIsInstance(metrics, metricslib.NoopMetricLogger)
|
||||
|
||||
def test_statsd_backend(self):
|
||||
CONF.set_override('backend', 'statsd', group='metrics')
|
||||
|
||||
metrics = metrics_utils.get_metrics_logger('foo')
|
||||
self.assertIsInstance(metrics, metrics_statsd.StatsdMetricLogger)
|
||||
CONF.clear_override('backend', group='metrics')
|
||||
|
||||
def test_nonexisting_backend(self):
|
||||
CONF.set_override('backend', 'none', group='metrics')
|
||||
|
||||
self.assertRaises(exception.InvalidMetricConfig,
|
||||
metrics_utils.get_metrics_logger, 'foo')
|
||||
CONF.clear_override('backend', group='metrics')
|
||||
|
||||
def test_numeric_prefix(self):
|
||||
self.assertRaises(exception.InvalidMetricConfig,
|
||||
metrics_utils.get_metrics_logger, 1)
|
||||
|
||||
def test_numeric_list_prefix(self):
|
||||
self.assertRaises(exception.InvalidMetricConfig,
|
||||
metrics_utils.get_metrics_logger, (1, 2))
|
||||
|
||||
def test_default_prefix(self):
|
||||
metrics = metrics_utils.get_metrics_logger()
|
||||
self.assertIsInstance(metrics, metricslib.NoopMetricLogger)
|
||||
self.assertEqual(metrics.get_metric_name("bar"), "bar")
|
||||
|
||||
def test_prepend_host_backend(self):
|
||||
CONF.set_override('prepend_host', True, group='metrics')
|
||||
CONF.set_override('prepend_host_reverse', False, group='metrics')
|
||||
|
||||
metrics = metrics_utils.get_metrics_logger(prefix='foo',
|
||||
host="host.example.com")
|
||||
self.assertIsInstance(metrics, metricslib.NoopMetricLogger)
|
||||
self.assertEqual(metrics.get_metric_name("bar"),
|
||||
"host.example.com.foo.bar")
|
||||
|
||||
CONF.clear_override('prepend_host', group='metrics')
|
||||
CONF.clear_override('prepend_host_reverse', group='metrics')
|
||||
|
||||
def test_prepend_global_prefix_host_backend(self):
|
||||
CONF.set_override('prepend_host', True, group='metrics')
|
||||
CONF.set_override('prepend_host_reverse', False, group='metrics')
|
||||
CONF.set_override('global_prefix', 'global_pre', group='metrics')
|
||||
|
||||
metrics = metrics_utils.get_metrics_logger(prefix='foo',
|
||||
host="host.example.com")
|
||||
self.assertIsInstance(metrics, metricslib.NoopMetricLogger)
|
||||
self.assertEqual(metrics.get_metric_name("bar"),
|
||||
"global_pre.host.example.com.foo.bar")
|
||||
|
||||
CONF.clear_override('prepend_host', group='metrics')
|
||||
CONF.clear_override('prepend_host_reverse', group='metrics')
|
||||
CONF.clear_override('global_prefix', group='metrics')
|
||||
|
||||
def test_prepend_other_delim(self):
|
||||
metrics = metrics_utils.get_metrics_logger('foo', delimiter='*')
|
||||
self.assertIsInstance(metrics, metricslib.NoopMetricLogger)
|
||||
self.assertEqual(metrics.get_metric_name("bar"),
|
||||
"foo*bar")
|
||||
|
||||
def test_prepend_host_reverse_backend(self):
|
||||
CONF.set_override('prepend_host', True, group='metrics')
|
||||
CONF.set_override('prepend_host_reverse', True, group='metrics')
|
||||
|
||||
metrics = metrics_utils.get_metrics_logger('foo',
|
||||
host="host.example.com")
|
||||
self.assertIsInstance(metrics, metricslib.NoopMetricLogger)
|
||||
self.assertEqual(metrics.get_metric_name("bar"),
|
||||
"com.example.host.foo.bar")
|
||||
|
||||
CONF.clear_override('prepend_host', group='metrics')
|
||||
CONF.clear_override('prepend_host_reverse', group='metrics')
|
|
@ -140,15 +140,15 @@ grep foo
|
|||
os.unlink(tmpfilename)
|
||||
os.unlink(tmpfilename2)
|
||||
|
||||
@mock.patch.object(processutils, 'execute')
|
||||
@mock.patch.object(os.environ, 'copy', return_value={})
|
||||
@mock.patch.object(processutils, 'execute', autospec=True)
|
||||
@mock.patch.object(os.environ, 'copy', return_value={}, autospec=True)
|
||||
def test_execute_use_standard_locale_no_env_variables(self, env_mock,
|
||||
execute_mock):
|
||||
utils.execute('foo', use_standard_locale=True)
|
||||
execute_mock.assert_called_once_with('foo',
|
||||
env_variables={'LC_ALL': 'C'})
|
||||
|
||||
@mock.patch.object(processutils, 'execute')
|
||||
@mock.patch.object(processutils, 'execute', autospec=True)
|
||||
def test_execute_use_standard_locale_with_env_variables(self,
|
||||
execute_mock):
|
||||
utils.execute('foo', use_standard_locale=True,
|
||||
|
@ -157,7 +157,7 @@ grep foo
|
|||
env_variables={'LC_ALL': 'C',
|
||||
'foo': 'bar'})
|
||||
|
||||
@mock.patch.object(processutils, 'execute')
|
||||
@mock.patch.object(processutils, 'execute', autospec=True)
|
||||
def test_execute_not_use_standard_locale(self, execute_mock):
|
||||
utils.execute('foo', use_standard_locale=False,
|
||||
env_variables={'foo': 'bar'})
|
||||
|
@ -166,28 +166,58 @@ grep foo
|
|||
|
||||
def test_execute_without_root_helper(self):
|
||||
CONF.set_override('root_helper', None, group='ironic_lib')
|
||||
with mock.patch.object(processutils, 'execute') as execute_mock:
|
||||
with mock.patch.object(
|
||||
processutils, 'execute', autospec=True) as execute_mock:
|
||||
utils.execute('foo', run_as_root=False)
|
||||
execute_mock.assert_called_once_with('foo', run_as_root=False)
|
||||
|
||||
def test_execute_without_root_helper_run_as_root(self):
|
||||
CONF.set_override('root_helper', None, group='ironic_lib')
|
||||
with mock.patch.object(processutils, 'execute') as execute_mock:
|
||||
with mock.patch.object(
|
||||
processutils, 'execute', autospec=True) as execute_mock:
|
||||
utils.execute('foo', run_as_root=True)
|
||||
execute_mock.assert_called_once_with('foo', run_as_root=False)
|
||||
|
||||
def test_execute_with_root_helper(self):
|
||||
with mock.patch.object(processutils, 'execute') as execute_mock:
|
||||
with mock.patch.object(
|
||||
processutils, 'execute', autospec=True) as execute_mock:
|
||||
utils.execute('foo', run_as_root=False)
|
||||
execute_mock.assert_called_once_with('foo', run_as_root=False)
|
||||
|
||||
def test_execute_with_root_helper_run_as_root(self):
|
||||
with mock.patch.object(processutils, 'execute') as execute_mock:
|
||||
with mock.patch.object(
|
||||
processutils, 'execute', autospec=True) as execute_mock:
|
||||
utils.execute('foo', run_as_root=True)
|
||||
execute_mock.assert_called_once_with(
|
||||
'foo', run_as_root=True,
|
||||
root_helper=CONF.ironic_lib.root_helper)
|
||||
|
||||
@mock.patch.object(utils, 'LOG', autospec=True)
|
||||
def _test_execute_with_log_stdout(self, log_mock, log_stdout=None):
|
||||
with mock.patch.object(processutils, 'execute') as execute_mock:
|
||||
execute_mock.return_value = ('stdout', 'stderr')
|
||||
if log_stdout is not None:
|
||||
utils.execute('foo', log_stdout=log_stdout)
|
||||
else:
|
||||
utils.execute('foo')
|
||||
execute_mock.assert_called_once_with('foo')
|
||||
name, args, kwargs = log_mock.debug.mock_calls[1]
|
||||
if log_stdout is False:
|
||||
self.assertEqual(2, log_mock.debug.call_count)
|
||||
self.assertNotIn('stdout', args[0])
|
||||
else:
|
||||
self.assertEqual(3, log_mock.debug.call_count)
|
||||
self.assertIn('stdout', args[0])
|
||||
|
||||
def test_execute_with_log_stdout_default(self):
|
||||
self._test_execute_with_log_stdout()
|
||||
|
||||
def test_execute_with_log_stdout_true(self):
|
||||
self._test_execute_with_log_stdout(log_stdout=True)
|
||||
|
||||
def test_execute_with_log_stdout_false(self):
|
||||
self._test_execute_with_log_stdout(log_stdout=False)
|
||||
|
||||
|
||||
class MkfsTestCase(test_base.BaseTestCase):
|
||||
|
||||
|
|
|
@ -46,13 +46,21 @@ LOG = logging.getLogger(__name__)
|
|||
def execute(*cmd, **kwargs):
|
||||
"""Convenience wrapper around oslo's execute() method.
|
||||
|
||||
:param cmd: Passed to processutils.execute.
|
||||
:param use_standard_locale: True | False. Defaults to False. If set to
|
||||
True, execute command with standard locale
|
||||
Executes and logs results from a system command. See docs for
|
||||
oslo_concurrency.processutils.execute for usage.
|
||||
|
||||
:param \*cmd: positional arguments to pass to processutils.execute()
|
||||
:param use_standard_locale: keyword-only argument. True | False.
|
||||
Defaults to False. If set to True,
|
||||
execute command with standard locale
|
||||
added to environment variables.
|
||||
:param log_stdout: keyword-only argument. True | False. Defaults
|
||||
to True. If set to True, logs the output.
|
||||
:param \*\*kwargs: keyword arguments to pass to processutils.execute()
|
||||
:returns: (stdout, stderr) from process execution
|
||||
:raises: UnknownArgumentError
|
||||
:raises: UnknownArgumentError on receiving unknown arguments
|
||||
:raises: ProcessExecutionError
|
||||
:raises: OSError
|
||||
"""
|
||||
|
||||
use_standard_locale = kwargs.pop('use_standard_locale', False)
|
||||
|
@ -61,6 +69,8 @@ def execute(*cmd, **kwargs):
|
|||
env['LC_ALL'] = 'C'
|
||||
kwargs['env_variables'] = env
|
||||
|
||||
log_stdout = kwargs.pop('log_stdout', True)
|
||||
|
||||
# If root_helper config is not specified, no commands are run as root.
|
||||
run_as_root = kwargs.get('run_as_root', False)
|
||||
if run_as_root:
|
||||
|
@ -72,7 +82,8 @@ def execute(*cmd, **kwargs):
|
|||
result = processutils.execute(*cmd, **kwargs)
|
||||
LOG.debug('Execution completed, command line is "%s"',
|
||||
' '.join(map(str, cmd)))
|
||||
LOG.debug('Command stdout is: "%s"' % result[0])
|
||||
if log_stdout:
|
||||
LOG.debug('Command stdout is: "%s"' % result[0])
|
||||
LOG.debug('Command stderr is: "%s"' % result[1])
|
||||
return result
|
||||
|
||||
|
|
|
@ -3,11 +3,11 @@
|
|||
# process, which may cause wedges in the gate later.
|
||||
|
||||
pbr>=1.6 # Apache-2.0
|
||||
oslo.concurrency>=3.5.0 # Apache-2.0
|
||||
oslo.config>=3.9.0 # Apache-2.0
|
||||
oslo.concurrency>=3.8.0 # Apache-2.0
|
||||
oslo.config>=3.10.0 # Apache-2.0
|
||||
oslo.i18n>=2.1.0 # Apache-2.0
|
||||
oslo.service>=1.0.0 # Apache-2.0
|
||||
oslo.utils>=3.5.0 # Apache-2.0
|
||||
requests!=2.9.0,>=2.8.1 # Apache-2.0
|
||||
oslo.service>=1.10.0 # Apache-2.0
|
||||
oslo.utils>=3.14.0 # Apache-2.0
|
||||
requests>=2.10.0 # Apache-2.0
|
||||
six>=1.9.0 # MIT
|
||||
oslo.log>=1.14.0 # Apache-2.0
|
||||
|
|
|
@ -25,3 +25,5 @@ oslo.config.opts =
|
|||
ironic_lib.disk_partitioner = ironic_lib.disk_partitioner:list_opts
|
||||
ironic_lib.disk_utils = ironic_lib.disk_utils:list_opts
|
||||
ironic_lib.utils = ironic_lib.utils:list_opts
|
||||
ironic_lib.metrics = ironic_lib.metrics_utils:list_opts
|
||||
ironic_lib.metrics_statsd = ironic_lib.metrics_statsd:list_opts
|
||||
|
|
|
@ -5,8 +5,8 @@
|
|||
coverage>=3.6 # Apache-2.0
|
||||
eventlet!=0.18.3,>=0.18.2 # MIT
|
||||
hacking<0.11,>=0.10.0
|
||||
mock>=1.2 # BSD
|
||||
os-testr>=0.4.1 # Apache-2.0
|
||||
mock>=2.0 # BSD
|
||||
os-testr>=0.7.0 # Apache-2.0
|
||||
oslotest>=1.10.0 # Apache-2.0
|
||||
testscenarios>=0.4 # Apache-2.0/BSD
|
||||
testtools>=1.4.0 # MIT
|
||||
|
|
2
tox.ini
2
tox.ini
|
@ -24,7 +24,7 @@ commands = flake8 {posargs}
|
|||
setenv = VIRTUALENV={envdir}
|
||||
LANGUAGE=en_US
|
||||
commands =
|
||||
python setup.py testr --coverage --coverage-package-name=ironic_lib --omit=ironic_lib/openstack/common/*.py {posargs}
|
||||
python setup.py test --coverage --coverage-package-name=ironic_lib --omit=ironic_lib/openstack/common/*.py {posargs}
|
||||
|
||||
[testenv:venv]
|
||||
commands = {posargs}
|
||||
|
|
Loading…
Reference in New Issue