Merge "Update pep8 checks"
This commit is contained in:
commit
5f18d192a9
|
@ -77,10 +77,11 @@ METRIC_NAME_BY_DIMENSION_CQL = ('select metric_name from dimensions_metrics wher
|
|||
'tenant_id = ? and dimension_name = ? and dimension_value = ? '
|
||||
'group by metric_name order by metric_name')
|
||||
|
||||
METRIC_NAME_BY_DIMENSION_OFFSET_CQL = ('select metric_name from dimensions_metrics where region = ? and '
|
||||
'tenant_id = ? and dimension_name = ? and dimension_value = ? and '
|
||||
'metric_name >= ?'
|
||||
'group by metric_name order by metric_name')
|
||||
METRIC_NAME_BY_DIMENSION_OFFSET_CQL = (
|
||||
'select metric_name from dimensions_metrics where region = ? and '
|
||||
'tenant_id = ? and dimension_name = ? and dimension_value = ? and '
|
||||
'metric_name >= ?'
|
||||
'group by metric_name order by metric_name')
|
||||
|
||||
METRIC_NAME_CQL = ('select distinct region, tenant_id, metric_name from metrics_dimensions '
|
||||
'where region = ? and tenant_id = ? allow filtering')
|
||||
|
@ -93,8 +94,9 @@ METRIC_BY_ID_CQL = ('select region, tenant_id, metric_name, dimensions from meas
|
|||
|
||||
Metric = namedtuple('metric', 'id name dimensions')
|
||||
|
||||
ALARM_HISTORY_CQL = ('select tenant_id, alarm_id, time_stamp, metric, new_state, old_state, reason, reason_data, '
|
||||
'sub_alarms from alarm_state_history where %s %s %s %s %s')
|
||||
ALARM_HISTORY_CQL = (
|
||||
'select tenant_id, alarm_id, time_stamp, metric, new_state, old_state, reason, reason_data, '
|
||||
'sub_alarms from alarm_state_history where %s %s %s %s %s')
|
||||
|
||||
ALARM_ID_EQ = 'and alarm_id = %s'
|
||||
|
||||
|
@ -128,7 +130,8 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
|
|||
|
||||
self.metric_name_by_dimension_stmt = self.session.prepare(METRIC_NAME_BY_DIMENSION_CQL)
|
||||
|
||||
self.metric_name_by_dimension_offset_stmt = self.session.prepare(METRIC_NAME_BY_DIMENSION_OFFSET_CQL)
|
||||
self.metric_name_by_dimension_offset_stmt = self.session.prepare(
|
||||
METRIC_NAME_BY_DIMENSION_OFFSET_CQL)
|
||||
|
||||
self.metric_name_stmt = self.session.prepare(METRIC_NAME_CQL)
|
||||
|
||||
|
@ -228,8 +231,15 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
|
|||
|
||||
for name in names:
|
||||
if name == offset_name:
|
||||
futures = self._list_metrics_by_name(tenant_id, region, name, dimensions, offset_dimensions,
|
||||
limit, start_time=None, end_time=None)
|
||||
futures = self._list_metrics_by_name(
|
||||
tenant_id,
|
||||
region,
|
||||
name,
|
||||
dimensions,
|
||||
offset_dimensions,
|
||||
limit,
|
||||
start_time=None,
|
||||
end_time=None)
|
||||
if offset_dimensions and dimensions:
|
||||
offset_futures.extend(futures)
|
||||
else:
|
||||
|
@ -270,16 +280,32 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
|
|||
|
||||
return metric
|
||||
|
||||
def _list_metrics_by_name(self, tenant_id, region, name, dimensions, dimension_offset, limit, start_time=None,
|
||||
end_time=None):
|
||||
def _list_metrics_by_name(
|
||||
self,
|
||||
tenant_id,
|
||||
region,
|
||||
name,
|
||||
dimensions,
|
||||
dimension_offset,
|
||||
limit,
|
||||
start_time=None,
|
||||
end_time=None):
|
||||
|
||||
or_dimensions = []
|
||||
sub_dimensions = {}
|
||||
futures = []
|
||||
|
||||
if not dimensions:
|
||||
query = self._build_metrics_by_name_query(tenant_id, region, name, dimensions, None, start_time,
|
||||
end_time, dimension_offset, limit)
|
||||
query = self._build_metrics_by_name_query(
|
||||
tenant_id,
|
||||
region,
|
||||
name,
|
||||
dimensions,
|
||||
None,
|
||||
start_time,
|
||||
end_time,
|
||||
dimension_offset,
|
||||
limit)
|
||||
futures.append(self.session.execute_async(query[0], query[1]))
|
||||
return futures
|
||||
|
||||
|
@ -308,16 +334,30 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
|
|||
for k, v in dims.items():
|
||||
extracted_dimensions[k] = v
|
||||
|
||||
query = self._build_metrics_by_name_query(tenant_id, region, name, extracted_dimensions,
|
||||
wildcard_dimensions, start_time,
|
||||
end_time, dimension_offset, limit)
|
||||
query = self._build_metrics_by_name_query(
|
||||
tenant_id,
|
||||
region,
|
||||
name,
|
||||
extracted_dimensions,
|
||||
wildcard_dimensions,
|
||||
start_time,
|
||||
end_time,
|
||||
dimension_offset,
|
||||
limit)
|
||||
|
||||
futures.append(self.session.execute_async(query[0], query[1]))
|
||||
|
||||
else:
|
||||
query = self._build_metrics_by_name_query(tenant_id, region, name, sub_dimensions, wildcard_dimensions,
|
||||
start_time,
|
||||
end_time, dimension_offset, limit)
|
||||
query = self._build_metrics_by_name_query(
|
||||
tenant_id,
|
||||
region,
|
||||
name,
|
||||
sub_dimensions,
|
||||
wildcard_dimensions,
|
||||
start_time,
|
||||
end_time,
|
||||
dimension_offset,
|
||||
limit)
|
||||
futures.append(self.session.execute_async(query[0], query[1]))
|
||||
|
||||
return futures
|
||||
|
@ -331,9 +371,17 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
|
|||
|
||||
return None
|
||||
|
||||
def _build_metrics_by_name_query(self, tenant_id, region, name, dimensions, wildcard_dimensions, start_time,
|
||||
end_time, dim_offset,
|
||||
limit):
|
||||
def _build_metrics_by_name_query(
|
||||
self,
|
||||
tenant_id,
|
||||
region,
|
||||
name,
|
||||
dimensions,
|
||||
wildcard_dimensions,
|
||||
start_time,
|
||||
end_time,
|
||||
dim_offset,
|
||||
limit):
|
||||
|
||||
conditions = [REGION_EQ, TENANT_EQ]
|
||||
params = [region, tenant_id.encode('utf8')]
|
||||
|
@ -427,13 +475,15 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
|
|||
extracted_dimensions[k] = v
|
||||
|
||||
names.extend(
|
||||
self._list_metric_names_single_dimension_value(tenant_id, region, extracted_dimensions, offset))
|
||||
self._list_metric_names_single_dimension_value(
|
||||
tenant_id, region, extracted_dimensions, offset))
|
||||
|
||||
names.sort(key=lambda x: x[u'name'])
|
||||
return names
|
||||
|
||||
else:
|
||||
names = self._list_metric_names_single_dimension_value(tenant_id, region, single_dimensions, offset)
|
||||
names = self._list_metric_names_single_dimension_value(
|
||||
tenant_id, region, single_dimensions, offset)
|
||||
names.sort(key=lambda x: x[u'name'])
|
||||
return names
|
||||
|
||||
|
@ -444,18 +494,27 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
|
|||
if dimensions:
|
||||
for name, value in dimensions.items():
|
||||
if offset:
|
||||
futures.append(self.session.execute_async(self.metric_name_by_dimension_offset_stmt,
|
||||
[region, tenant_id, name, value, offset]))
|
||||
futures.append(
|
||||
self.session.execute_async(
|
||||
self.metric_name_by_dimension_offset_stmt, [
|
||||
region, tenant_id, name, value, offset]))
|
||||
else:
|
||||
futures.append(self.session.execute_async(self.metric_name_by_dimension_stmt,
|
||||
[region, tenant_id, name, value]))
|
||||
futures.append(
|
||||
self.session.execute_async(
|
||||
self.metric_name_by_dimension_stmt, [
|
||||
region, tenant_id, name, value]))
|
||||
|
||||
else:
|
||||
if offset:
|
||||
futures.append(
|
||||
self.session.execute_async(self.metric_name_offset_stmt, [region, tenant_id, offset]))
|
||||
self.session.execute_async(
|
||||
self.metric_name_offset_stmt, [
|
||||
region, tenant_id, offset]))
|
||||
else:
|
||||
futures.append(self.session.execute_async(self.metric_name_stmt, [region, tenant_id]))
|
||||
futures.append(
|
||||
self.session.execute_async(
|
||||
self.metric_name_stmt, [
|
||||
region, tenant_id]))
|
||||
|
||||
names_list = []
|
||||
|
||||
|
@ -499,7 +558,8 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
|
|||
|
||||
try:
|
||||
if len(metrics) > 1 and not group_by:
|
||||
# offset is controlled only by offset_timestamp when the group by option is not enabled
|
||||
# offset is controlled only by offset_timestamp when the group by option
|
||||
# is not enabled
|
||||
count, series_list = self._query_merge_measurements(metrics,
|
||||
dimensions,
|
||||
start_timestamp,
|
||||
|
@ -566,7 +626,8 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
|
|||
if remaining <= 0:
|
||||
break
|
||||
|
||||
# offset_timestamp is used only in the first group, reset to None for subsequent groups
|
||||
# offset_timestamp is used only in the first group, reset to None for
|
||||
# subsequent groups
|
||||
if offset_timestamp:
|
||||
offset_timestamp = None
|
||||
|
||||
|
@ -626,9 +687,9 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
|
|||
measurements = []
|
||||
row = next(result[1], None)
|
||||
while row:
|
||||
measurements.append([self._isotime_msec(row.time_stamp),
|
||||
row.value,
|
||||
rest_utils.from_json(row.value_meta) if row.value_meta else {}])
|
||||
measurements.append(
|
||||
[self._isotime_msec(row.time_stamp), row.value,
|
||||
rest_utils.from_json(row.value_meta) if row.value_meta else {}])
|
||||
count += 1
|
||||
if limit and count >= limit:
|
||||
break
|
||||
|
@ -673,7 +734,8 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
|
|||
else:
|
||||
conditions.append('')
|
||||
|
||||
return SimpleStatement(MEASUREMENT_LIST_CQL % tuple(conditions), fetch_size=fetch_size), params
|
||||
return SimpleStatement(MEASUREMENT_LIST_CQL %
|
||||
tuple(conditions), fetch_size=fetch_size), params
|
||||
|
||||
def _merge_series(self, series, dimensions, limit):
|
||||
series_list = []
|
||||
|
@ -704,10 +766,9 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
|
|||
count += 1
|
||||
row = next(series[top_batch[num_series - 1][0]][1], None)
|
||||
if row:
|
||||
top_batch[num_series - 1] = [top_batch[num_series - 1][0],
|
||||
row.time_stamp,
|
||||
row.value,
|
||||
rest_utils.from_json(row.value_meta) if row.value_meta else {}]
|
||||
top_batch[num_series - 1] = \
|
||||
[top_batch[num_series - 1][0], row.time_stamp,
|
||||
row.value, rest_utils.from_json(row.value_meta) if row.value_meta else {}]
|
||||
|
||||
top_batch.sort(key=lambda m: m[1], reverse=True)
|
||||
else:
|
||||
|
@ -802,7 +863,8 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
|
|||
first_measure = measurements[0]
|
||||
first_measure_start_time = MetricsRepository._parse_time_string(first_measure[0])
|
||||
|
||||
# skip blank intervals at the beginning, finds the start time of stat period that is not empty
|
||||
# skip blank intervals at the beginning, finds the start time of stat
|
||||
# period that is not empty
|
||||
stat_start_time = start_time + timedelta(
|
||||
seconds=((first_measure_start_time - start_time).seconds / period) * period)
|
||||
|
||||
|
@ -840,8 +902,8 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
|
|||
stats_sum += value
|
||||
|
||||
if stats_count:
|
||||
stat = MetricsRepository._create_stat(statistics, stat_start_time, stats_count, stats_sum,
|
||||
stats_min, stats_max)
|
||||
stat = MetricsRepository._create_stat(
|
||||
statistics, stat_start_time, stats_count, stats_sum, stats_min, stats_max)
|
||||
stats_list.append(stat)
|
||||
limit -= 1
|
||||
|
||||
|
@ -861,7 +923,13 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
|
|||
return json_statistics_list
|
||||
|
||||
@staticmethod
|
||||
def _create_stat(statistics, timestamp, stat_count=None, stat_sum=None, stat_min=None, stat_max=None):
|
||||
def _create_stat(
|
||||
statistics,
|
||||
timestamp,
|
||||
stat_count=None,
|
||||
stat_sum=None,
|
||||
stat_min=None,
|
||||
stat_max=None):
|
||||
|
||||
stat = [MetricsRepository._isotime_msec(timestamp)]
|
||||
|
||||
|
@ -909,7 +977,11 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
|
|||
conditions.append(ALARM_ID_EQ)
|
||||
params.append(alarm_id_list[0])
|
||||
else:
|
||||
conditions.append(' and alarm_id in ({}) '.format(','.join(['%s'] * len(alarm_id_list))))
|
||||
conditions.append(
|
||||
' and alarm_id in ({}) '.format(
|
||||
','.join(
|
||||
['%s'] *
|
||||
len(alarm_id_list))))
|
||||
for alarm_id in alarm_id_list:
|
||||
params.append(alarm_id)
|
||||
|
||||
|
|
|
@ -518,9 +518,10 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
|
|||
return json_metric_list
|
||||
|
||||
def _build_measurement_name_list(self, measurement_names):
|
||||
"""read measurement names from InfluxDB response
|
||||
"""Read measurement names from InfluxDB response
|
||||
|
||||
Extract the measurement names (InfluxDB terminology) from the SHOW MEASURMENTS result to yield metric names
|
||||
Extract the measurement names (InfluxDB terminology) from the SHOW MEASURMENTS result
|
||||
to yield metric names
|
||||
:param measurement_names: result from SHOW MEASUREMENTS call (json-dict)
|
||||
:return: list of metric-names (Monasca terminology)
|
||||
"""
|
||||
|
@ -605,8 +606,9 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
|
|||
if not group_by:
|
||||
measurement[u'dimensions'] = dimensions
|
||||
else:
|
||||
measurement[u'dimensions'] = {key: value for key, value in serie['tags'].items()
|
||||
if not key.startswith('_')}
|
||||
measurement[u'dimensions'] = {
|
||||
key: value for key,
|
||||
value in serie['tags'].items() if not key.startswith('_')}
|
||||
|
||||
json_measurement_list.append(measurement)
|
||||
index += 1
|
||||
|
@ -716,8 +718,9 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
|
|||
if not group_by:
|
||||
statistic[u'dimensions'] = dimensions
|
||||
else:
|
||||
statistic[u'dimensions'] = {key: value for key, value in serie['tags'].items()
|
||||
if not key.startswith('_')}
|
||||
statistic[u'dimensions'] = {
|
||||
key: value for key,
|
||||
value in serie['tags'].items() if not key.startswith('_')}
|
||||
|
||||
json_statistics_list.append(statistic)
|
||||
index += 1
|
||||
|
|
|
@ -120,8 +120,8 @@ class SubAlarmDefinition(object):
|
|||
|
||||
def __repr__(self):
|
||||
|
||||
result = 'id={},alarm_definition_id={},function={},metric_name={},dimensions={}'\
|
||||
.format(self.id, self.alarm_definition_id, self.function, self.metric_name, self.dimensions)
|
||||
result = 'id={},alarm_definition_id={},function={},metric_name={},dimensions={}' .format(
|
||||
self.id, self.alarm_definition_id, self.function, self.metric_name, self.dimensions)
|
||||
result += ',operator={},period={},periods={},determinstic={}'\
|
||||
.format(self.operator, self.period, self.periods, self.deterministic)
|
||||
return result
|
||||
|
|
|
@ -108,35 +108,46 @@ class AlarmDefinitionsRepository(sql_repository.SQLRepository,
|
|||
aao.c.ok_actions,
|
||||
aau.c.undetermined_actions]))
|
||||
|
||||
self.get_sub_alarms_query = (select([sa_s.c.id.label('sub_alarm_id'),
|
||||
sa_s.c.alarm_id,
|
||||
sa_s.c.expression])
|
||||
.select_from(sa_s.join(a_s, a_s.c.id == sa_s.c.alarm_id)
|
||||
.join(ad_s, ad_s.c.id == a_s.c.alarm_definition_id))
|
||||
.where(ad_s.c.tenant_id == bindparam('b_tenant_id'))
|
||||
.where(ad_s.c.id == bindparam('b_id'))
|
||||
.distinct())
|
||||
self.get_sub_alarms_query = (
|
||||
select(
|
||||
[
|
||||
sa_s.c.id.label('sub_alarm_id'),
|
||||
sa_s.c.alarm_id,
|
||||
sa_s.c.expression]) .select_from(
|
||||
sa_s.join(
|
||||
a_s,
|
||||
a_s.c.id == sa_s.c.alarm_id) .join(
|
||||
ad_s,
|
||||
ad_s.c.id == a_s.c.alarm_definition_id)) .where(
|
||||
ad_s.c.tenant_id == bindparam('b_tenant_id')) .where(
|
||||
ad_s.c.id == bindparam('b_id')) .distinct())
|
||||
|
||||
mdg = (select([md_s.c.dimension_set_id,
|
||||
models.group_concat([md_s.c.name + text("'='") + md_s.c.value]).label('dimensions')])
|
||||
models.group_concat(
|
||||
[md_s.c.name + text("'='") + md_s.c.value]).label('dimensions')])
|
||||
.select_from(md_s)
|
||||
.group_by(md_s.c.dimension_set_id)
|
||||
.alias('mdg'))
|
||||
|
||||
self.get_alarm_metrics_query = (select([a_s.c.id.label('alarm_id'),
|
||||
mde_s.c.name,
|
||||
mdg.c.dimensions])
|
||||
.select_from(a_s.join(ad_s, ad_s.c.id == a_s.c.alarm_definition_id)
|
||||
.join(am_s, am_s.c.alarm_id == a_s.c.id)
|
||||
.join(mdd_s, mdd_s.c.id
|
||||
== am_s.c.metric_definition_dimensions_id)
|
||||
.join(mde_s, mde_s.c.id == mdd_s.c.metric_definition_id)
|
||||
.outerjoin(mdg, mdg.c.dimension_set_id
|
||||
== mdd_s.c.metric_dimension_set_id))
|
||||
.where(ad_s.c.tenant_id == bindparam('b_tenant_id'))
|
||||
.where(ad_s.c.id == bindparam('b_id'))
|
||||
.order_by(a_s.c.id)
|
||||
.distinct())
|
||||
self.get_alarm_metrics_query = (
|
||||
select(
|
||||
[a_s.c.id.label('alarm_id'),
|
||||
mde_s.c.name,
|
||||
mdg.c.dimensions]) .select_from(
|
||||
a_s.join(
|
||||
ad_s,
|
||||
ad_s.c.id == a_s.c.alarm_definition_id) .join(
|
||||
am_s,
|
||||
am_s.c.alarm_id == a_s.c.id) .join(
|
||||
mdd_s,
|
||||
mdd_s.c.id == am_s.c.metric_definition_dimensions_id) .join(
|
||||
mde_s,
|
||||
mde_s.c.id == mdd_s.c.metric_definition_id) .outerjoin(
|
||||
mdg,
|
||||
mdg.c.dimension_set_id == mdd_s.c.metric_dimension_set_id)) .where(
|
||||
ad_s.c.tenant_id == bindparam('b_tenant_id')) .where(
|
||||
ad_s.c.id == bindparam('b_id')) .order_by(
|
||||
a_s.c.id) .distinct())
|
||||
|
||||
self.soft_delete_ad_query = (update(ad)
|
||||
.where(ad.c.tenant_id == bindparam('b_tenant_id'))
|
||||
|
@ -154,90 +165,95 @@ class AlarmDefinitionsRepository(sql_repository.SQLRepository,
|
|||
.group_by(sadd_s.c.sub_alarm_definition_id)
|
||||
.alias('saddg'))
|
||||
|
||||
self.get_sub_alarm_definitions_query = (select([sad_s, saddg.c.dimensions])
|
||||
.select_from(sad_s.outerjoin(saddg,
|
||||
saddg.c.sub_alarm_definition_id
|
||||
== sad_s.c.id))
|
||||
.where(sad_s.c.alarm_definition_id
|
||||
== bindparam('b_alarm_definition_id')))
|
||||
self.get_sub_alarm_definitions_query = (
|
||||
select(
|
||||
[
|
||||
sad_s,
|
||||
saddg.c.dimensions]) .select_from(
|
||||
sad_s.outerjoin(
|
||||
saddg,
|
||||
saddg.c.sub_alarm_definition_id == sad_s.c.id)) .where(
|
||||
sad_s.c.alarm_definition_id == bindparam('b_alarm_definition_id')))
|
||||
|
||||
self.create_alarm_definition_insert_ad_query = (insert(ad)
|
||||
.values(
|
||||
id=bindparam('b_id'),
|
||||
tenant_id=bindparam('b_tenant_id'),
|
||||
name=bindparam('b_name'),
|
||||
description=bindparam('b_description'),
|
||||
expression=bindparam('b_expression'),
|
||||
severity=bindparam('b_severity'),
|
||||
match_by=bindparam('b_match_by'),
|
||||
actions_enabled=bindparam('b_actions_enabled'),
|
||||
created_at=bindparam('b_created_at'),
|
||||
updated_at=bindparam('b_updated_at')))
|
||||
self.create_alarm_definition_insert_ad_query = (
|
||||
insert(ad) .values(
|
||||
id=bindparam('b_id'),
|
||||
tenant_id=bindparam('b_tenant_id'),
|
||||
name=bindparam('b_name'),
|
||||
description=bindparam('b_description'),
|
||||
expression=bindparam('b_expression'),
|
||||
severity=bindparam('b_severity'),
|
||||
match_by=bindparam('b_match_by'),
|
||||
actions_enabled=bindparam('b_actions_enabled'),
|
||||
created_at=bindparam('b_created_at'),
|
||||
updated_at=bindparam('b_updated_at')))
|
||||
|
||||
self.create_alarm_definition_insert_sad_query = (insert(sad)
|
||||
.values(
|
||||
id=bindparam('b_id'),
|
||||
alarm_definition_id=bindparam('b_alarm_definition_id'),
|
||||
function=bindparam('b_function'),
|
||||
metric_name=bindparam('b_metric_name'),
|
||||
operator=bindparam('b_operator'),
|
||||
threshold=bindparam('b_threshold'),
|
||||
period=bindparam('b_period'),
|
||||
periods=bindparam('b_periods'),
|
||||
is_deterministic=bindparam('b_is_deterministic'),
|
||||
created_at=bindparam('b_created_at'),
|
||||
updated_at=bindparam('b_updated_at')))
|
||||
self.create_alarm_definition_insert_sad_query = (
|
||||
insert(sad) .values(
|
||||
id=bindparam('b_id'),
|
||||
alarm_definition_id=bindparam('b_alarm_definition_id'),
|
||||
function=bindparam('b_function'),
|
||||
metric_name=bindparam('b_metric_name'),
|
||||
operator=bindparam('b_operator'),
|
||||
threshold=bindparam('b_threshold'),
|
||||
period=bindparam('b_period'),
|
||||
periods=bindparam('b_periods'),
|
||||
is_deterministic=bindparam('b_is_deterministic'),
|
||||
created_at=bindparam('b_created_at'),
|
||||
updated_at=bindparam('b_updated_at')))
|
||||
|
||||
b_sad_id = bindparam('b_sub_alarm_definition_id')
|
||||
self.create_alarm_definition_insert_sadd_query = (insert(sadd)
|
||||
.values(
|
||||
sub_alarm_definition_id=b_sad_id,
|
||||
dimension_name=bindparam('b_dimension_name'),
|
||||
value=bindparam('b_value')))
|
||||
self.create_alarm_definition_insert_sadd_query = (
|
||||
insert(sadd) .values(
|
||||
sub_alarm_definition_id=b_sad_id,
|
||||
dimension_name=bindparam('b_dimension_name'),
|
||||
value=bindparam('b_value')))
|
||||
|
||||
self.update_or_patch_alarm_definition_update_ad_query = (update(ad)
|
||||
.where(ad.c.tenant_id == bindparam('b_tenant_id'))
|
||||
.where(ad.c.id == bindparam('b_id')))
|
||||
self.update_or_patch_alarm_definition_update_ad_query = (
|
||||
update(ad) .where(
|
||||
ad.c.tenant_id == bindparam('b_tenant_id')) .where(
|
||||
ad.c.id == bindparam('b_id')))
|
||||
|
||||
self.update_or_patch_alarm_definition_delete_sad_query = (delete(sad)
|
||||
.where(sad.c.id == bindparam('b_id')))
|
||||
self.update_or_patch_alarm_definition_delete_sad_query = (
|
||||
delete(sad) .where(sad.c.id == bindparam('b_id')))
|
||||
|
||||
self.update_or_patch_alarm_definition_update_sad_query = (update(sad)
|
||||
.where(sad.c.id == bindparam('b_id'))
|
||||
.values(
|
||||
operator=bindparam('b_operator'),
|
||||
threshold=bindparam('b_threshold'),
|
||||
is_deterministic=bindparam('b_is_deterministic'),
|
||||
updated_at=bindparam('b_updated_at')))
|
||||
self.update_or_patch_alarm_definition_update_sad_query = (
|
||||
update(sad) .where(
|
||||
sad.c.id == bindparam('b_id')) .values(
|
||||
operator=bindparam('b_operator'),
|
||||
threshold=bindparam('b_threshold'),
|
||||
is_deterministic=bindparam('b_is_deterministic'),
|
||||
updated_at=bindparam('b_updated_at')))
|
||||
|
||||
b_ad_id = bindparam('b_alarm_definition_id'),
|
||||
self.update_or_patch_alarm_definition_insert_sad_query = (insert(sad)
|
||||
.values(
|
||||
id=bindparam('b_id'),
|
||||
alarm_definition_id=b_ad_id,
|
||||
function=bindparam('b_function'),
|
||||
metric_name=bindparam('b_metric_name'),
|
||||
operator=bindparam('b_operator'),
|
||||
threshold=bindparam('b_threshold'),
|
||||
period=bindparam('b_period'),
|
||||
periods=bindparam('b_periods'),
|
||||
is_deterministic=bindparam('b_is_deterministic'),
|
||||
created_at=bindparam('b_created_at'),
|
||||
updated_at=bindparam('b_updated_at')))
|
||||
self.update_or_patch_alarm_definition_insert_sad_query = (
|
||||
insert(sad) .values(
|
||||
id=bindparam('b_id'),
|
||||
alarm_definition_id=b_ad_id,
|
||||
function=bindparam('b_function'),
|
||||
metric_name=bindparam('b_metric_name'),
|
||||
operator=bindparam('b_operator'),
|
||||
threshold=bindparam('b_threshold'),
|
||||
period=bindparam('b_period'),
|
||||
periods=bindparam('b_periods'),
|
||||
is_deterministic=bindparam('b_is_deterministic'),
|
||||
created_at=bindparam('b_created_at'),
|
||||
updated_at=bindparam('b_updated_at')))
|
||||
|
||||
self.update_or_patch_alarm_definition_insert_sadd_query = (insert(sadd)
|
||||
.values(
|
||||
sub_alarm_definition_id=b_sad_id,
|
||||
dimension_name=bindparam('b_dimension_name'),
|
||||
value=bindparam('b_value')))
|
||||
self.update_or_patch_alarm_definition_insert_sadd_query = (
|
||||
insert(sadd) .values(
|
||||
sub_alarm_definition_id=b_sad_id,
|
||||
dimension_name=bindparam('b_dimension_name'),
|
||||
value=bindparam('b_value')))
|
||||
|
||||
self.delete_aa_query = (delete(aa)
|
||||
.where(aa.c.alarm_definition_id
|
||||
== bindparam('b_alarm_definition_id')))
|
||||
|
||||
self.delete_aa_state_query = (delete(aa)
|
||||
.where(aa.c.alarm_definition_id == bindparam('b_alarm_definition_id'))
|
||||
.where(aa.c.alarm_state == bindparam('b_alarm_state')))
|
||||
self.delete_aa_state_query = (
|
||||
delete(aa) .where(
|
||||
aa.c.alarm_definition_id == bindparam('b_alarm_definition_id')) .where(
|
||||
aa.c.alarm_state == bindparam('b_alarm_state')))
|
||||
|
||||
self.select_nm_query = (select([nm_s.c.id])
|
||||
.select_from(nm_s)
|
||||
|
@ -315,8 +331,8 @@ class AlarmDefinitionsRepository(sql_repository.SQLRepository,
|
|||
|
||||
if severity:
|
||||
severities = severity.split('|')
|
||||
query = query.where(
|
||||
or_(ad.c.severity == bindparam('b_severity' + str(i)) for i in range(len(severities))))
|
||||
query = query.where(or_(ad.c.severity == bindparam(
|
||||
'b_severity' + str(i)) for i in range(len(severities))))
|
||||
for i, s in enumerate(severities):
|
||||
parms['b_severity' + str(i)] = s.encode('utf8')
|
||||
|
||||
|
@ -389,8 +405,10 @@ class AlarmDefinitionsRepository(sql_repository.SQLRepository,
|
|||
return self._get_sub_alarm_definitions(conn, alarm_definition_id)
|
||||
|
||||
def _get_sub_alarm_definitions(self, conn, alarm_definition_id):
|
||||
return [dict(row) for row in conn.execute(self.get_sub_alarm_definitions_query,
|
||||
b_alarm_definition_id=alarm_definition_id).fetchall()]
|
||||
return [
|
||||
dict(row) for row in conn.execute(
|
||||
self.get_sub_alarm_definitions_query,
|
||||
b_alarm_definition_id=alarm_definition_id).fetchall()]
|
||||
|
||||
@sql_repository.sql_try_catch_block
|
||||
def create_alarm_definition(self, tenant_id, name, expression,
|
||||
|
|
|
@ -20,7 +20,8 @@ from monasca_api.common.repositories import alarms_repository
|
|||
from monasca_api.common.repositories import exceptions
|
||||
from monasca_api.common.repositories.sqla import models
|
||||
from monasca_api.common.repositories.sqla import sql_repository
|
||||
from sqlalchemy import MetaData, update, delete, select, text, bindparam, func, literal_column, asc, desc
|
||||
from sqlalchemy import (MetaData, update, delete, select, text,
|
||||
bindparam, func, literal_column, asc, desc)
|
||||
from sqlalchemy import or_
|
||||
|
||||
|
||||
|
@ -54,15 +55,22 @@ class AlarmsRepository(sql_repository.SQLRepository,
|
|||
gc_columns = [md.c.name + text("'='") + md.c.value]
|
||||
|
||||
mdg = (select([md.c.dimension_set_id,
|
||||
models.group_concat(gc_columns).label('dimensions')])
|
||||
models.group_concat(gc_columns).label('dimensions')])
|
||||
.select_from(md)
|
||||
.group_by(md.c.dimension_set_id).alias('mdg'))
|
||||
|
||||
self.base_query_from = (a_s.join(ad, ad.c.id == a_s.c.alarm_definition_id)
|
||||
.join(am, am.c.alarm_id == a_s.c.id)
|
||||
.join(mdd, mdd.c.id == am.c.metric_definition_dimensions_id)
|
||||
.join(mde, mde.c.id == mdd.c.metric_definition_id)
|
||||
.outerjoin(mdg, mdg.c.dimension_set_id == mdd.c.metric_dimension_set_id))
|
||||
self.base_query_from = (
|
||||
a_s.join(
|
||||
ad,
|
||||
ad.c.id == a_s.c.alarm_definition_id) .join(
|
||||
am,
|
||||
am.c.alarm_id == a_s.c.id) .join(
|
||||
mdd,
|
||||
mdd.c.id == am.c.metric_definition_dimensions_id) .join(
|
||||
mde,
|
||||
mde.c.id == mdd.c.metric_definition_id) .outerjoin(
|
||||
mdg,
|
||||
mdg.c.dimension_set_id == mdd.c.metric_dimension_set_id))
|
||||
|
||||
self.base_query = select([a_s.c.id.label('alarm_id'),
|
||||
a_s.c.state,
|
||||
|
@ -280,8 +288,8 @@ class AlarmsRepository(sql_repository.SQLRepository,
|
|||
|
||||
if 'severity' in query_parms:
|
||||
severities = query_parms['severity'].split('|')
|
||||
query = query.where(
|
||||
or_(ad.c.severity == bindparam('b_severity' + str(i)) for i in range(len(severities))))
|
||||
query = query.where(or_(ad.c.severity == bindparam(
|
||||
'b_severity' + str(i)) for i in range(len(severities))))
|
||||
for i, s in enumerate(severities):
|
||||
parms['b_severity' + str(i)] = s.encode('utf8')
|
||||
|
||||
|
@ -364,21 +372,23 @@ class AlarmsRepository(sql_repository.SQLRepository,
|
|||
|
||||
order_columns = []
|
||||
if 'sort_by' in query_parms:
|
||||
columns_mapper = {'alarm_id': a.c.id,
|
||||
'alarm_definition_id': ad.c.id,
|
||||
'alarm_definition_name': ad.c.name,
|
||||
'state_updated_timestamp': a.c.state_updated_at,
|
||||
'updated_timestamp': a.c.updated_at,
|
||||
'created_timestamp': a.c.created_at,
|
||||
'severity': models.field_sort(ad.c.severity, map(text, ["'LOW'",
|
||||
"'MEDIUM'",
|
||||
"'HIGH'",
|
||||
"'CRITICAL'"])),
|
||||
'state': models.field_sort(a.c.state, map(text, ["'OK'",
|
||||
"'UNDETERMINED'",
|
||||
"'ALARM'"]))}
|
||||
columns_mapper = \
|
||||
{'alarm_id': a.c.id,
|
||||
'alarm_definition_id': ad.c.id,
|
||||
'alarm_definition_name': ad.c.name,
|
||||
'state_updated_timestamp': a.c.state_updated_at,
|
||||
'updated_timestamp': a.c.updated_at,
|
||||
'created_timestamp': a.c.created_at,
|
||||
'severity': models.field_sort(ad.c.severity, map(text, ["'LOW'",
|
||||
"'MEDIUM'",
|
||||
"'HIGH'",
|
||||
"'CRITICAL'"])),
|
||||
'state': models.field_sort(a.c.state, map(text, ["'OK'",
|
||||
"'UNDETERMINED'",
|
||||
"'ALARM'"]))}
|
||||
|
||||
order_columns, received_cols = self._remap_columns(query_parms['sort_by'], columns_mapper)
|
||||
order_columns, received_cols = self._remap_columns(
|
||||
query_parms['sort_by'], columns_mapper)
|
||||
|
||||
if not received_cols.get('alarm_id', False):
|
||||
order_columns.append(a.c.id)
|
||||
|
@ -461,9 +471,11 @@ class AlarmsRepository(sql_repository.SQLRepository,
|
|||
|
||||
sub_query_columns.extend(sub_group_by_columns)
|
||||
|
||||
sub_query_from = (mde.join(mdd, mde.c.id == mdd.c.metric_definition_id)
|
||||
.join(md, mdd.c.metric_dimension_set_id == md.c.dimension_set_id)
|
||||
.join(am, am.c.metric_definition_dimensions_id == mdd.c.id))
|
||||
sub_query_from = (
|
||||
mde.join(
|
||||
mdd, mde.c.id == mdd.c.metric_definition_id) .join(
|
||||
md, mdd.c.metric_dimension_set_id == md.c.dimension_set_id) .join(
|
||||
am, am.c.metric_definition_dimensions_id == mdd.c.id))
|
||||
|
||||
sub_query = (select(sub_query_columns)
|
||||
.select_from(sub_query_from)
|
||||
|
@ -491,8 +503,8 @@ class AlarmsRepository(sql_repository.SQLRepository,
|
|||
|
||||
if 'severity' in query_parms:
|
||||
severities = query_parms['severity'].split('|')
|
||||
query = query.where(
|
||||
or_(ad.c.severity == bindparam('b_severity' + str(i)) for i in range(len(severities))))
|
||||
query = query.where(or_(ad.c.severity == bindparam(
|
||||
'b_severity' + str(i)) for i in range(len(severities))))
|
||||
for i, s in enumerate(severities):
|
||||
parms['b_severity' + str(i)] = s.encode('utf8')
|
||||
|
||||
|
|
|
@ -153,9 +153,11 @@ class group_concat(expression.ColumnElement):
|
|||
def _group_concat_oracle(element, compiler_, **kw):
|
||||
str_order_by = ''
|
||||
if element.order_by is not None and len(element.order_by) > 0:
|
||||
str_order_by = "ORDER BY {0}".format(", ".join([compiler_.process(x) for x in element.order_by]))
|
||||
str_order_by = "ORDER BY {0}".format(
|
||||
", ".join([compiler_.process(x) for x in element.order_by]))
|
||||
else:
|
||||
str_order_by = "ORDER BY {0}".format(", ".join([compiler_.process(x) for x in element.columns]))
|
||||
str_order_by = "ORDER BY {0}".format(
|
||||
", ".join([compiler_.process(x) for x in element.columns]))
|
||||
return "LISTAGG({0}, '{2}') WITHIN GROUP ({1})".format(
|
||||
", ".join([compiler_.process(x) for x in element.columns]),
|
||||
str_order_by,
|
||||
|
@ -167,7 +169,8 @@ def _group_concat_oracle(element, compiler_, **kw):
|
|||
def _group_concat_postgresql(element, compiler_, **kw):
|
||||
str_order_by = ''
|
||||
if element.order_by is not None and len(element.order_by) > 0:
|
||||
str_order_by = "ORDER BY {0}".format(", ".join([compiler_.process(x) for x in element.order_by]))
|
||||
str_order_by = "ORDER BY {0}".format(
|
||||
", ".join([compiler_.process(x) for x in element.order_by]))
|
||||
|
||||
return "STRING_AGG({0}, '{2}' {1})".format(
|
||||
", ".join([compiler_.process(x) for x in element.columns]),
|
||||
|
@ -188,7 +191,8 @@ def _group_concat_sybase(element, compiler_, **kw):
|
|||
def _group_concat_mysql(element, compiler_, **kw):
|
||||
str_order_by = ''
|
||||
if element.order_by is not None and len(element.order_by) > 0:
|
||||
str_order_by = "ORDER BY {0}".format(",".join([compiler_.process(x) for x in element.order_by]))
|
||||
str_order_by = "ORDER BY {0}".format(
|
||||
",".join([compiler_.process(x) for x in element.order_by]))
|
||||
return "GROUP_CONCAT({0} {1} SEPARATOR '{2}')".format(
|
||||
", ".join([compiler_.process(x) for x in element.columns]),
|
||||
str_order_by,
|
||||
|
|
|
@ -184,7 +184,14 @@ class NotificationsRepository(sql_repository.SQLRepository,
|
|||
b_name=name.encode('utf8')).fetchone()
|
||||
|
||||
@sql_repository.sql_try_catch_block
|
||||
def update_notification(self, notification_id, tenant_id, name, notification_type, address, period):
|
||||
def update_notification(
|
||||
self,
|
||||
notification_id,
|
||||
tenant_id,
|
||||
name,
|
||||
notification_type,
|
||||
address,
|
||||
period):
|
||||
with self._db_engine.connect() as conn:
|
||||
now = datetime.datetime.utcnow()
|
||||
|
||||
|
|
|
@ -20,37 +20,45 @@ from oslo_config import cfg
|
|||
BASE_SQL_PATH = 'monasca_api.common.repositories.sqla.'
|
||||
|
||||
repositories_opts = [
|
||||
cfg.StrOpt('metrics_driver',
|
||||
default='monasca_api.common.repositories.influxdb.metrics_repository:MetricsRepository',
|
||||
advanced=True,
|
||||
help='''
|
||||
cfg.StrOpt(
|
||||
'metrics_driver',
|
||||
default='monasca_api.common.repositories.influxdb.metrics_repository:MetricsRepository',
|
||||
advanced=True,
|
||||
help='''
|
||||
The repository driver to use for metrics
|
||||
'''),
|
||||
cfg.StrOpt('alarm_definitions_driver',
|
||||
default=BASE_SQL_PATH + 'alarm_definitions_repository:AlarmDefinitionsRepository',
|
||||
advanced=True,
|
||||
help='''
|
||||
cfg.StrOpt(
|
||||
'alarm_definitions_driver',
|
||||
default=BASE_SQL_PATH +
|
||||
'alarm_definitions_repository:AlarmDefinitionsRepository',
|
||||
advanced=True,
|
||||
help='''
|
||||
The repository driver to use for alarm definitions
|
||||
'''),
|
||||
cfg.StrOpt('alarms_driver',
|
||||
default=BASE_SQL_PATH + 'alarms_repository:AlarmsRepository',
|
||||
advanced=True,
|
||||
help='''
|
||||
cfg.StrOpt(
|
||||
'alarms_driver',
|
||||
default=BASE_SQL_PATH +
|
||||
'alarms_repository:AlarmsRepository',
|
||||
advanced=True,
|
||||
help='''
|
||||
The repository driver to use for alarms
|
||||
'''),
|
||||
cfg.StrOpt('notifications_driver',
|
||||
default=BASE_SQL_PATH + 'notifications_repository:NotificationsRepository',
|
||||
advanced=True,
|
||||
help='''
|
||||
cfg.StrOpt(
|
||||
'notifications_driver',
|
||||
default=BASE_SQL_PATH +
|
||||
'notifications_repository:NotificationsRepository',
|
||||
advanced=True,
|
||||
help='''
|
||||
The repository driver to use for notifications
|
||||
'''),
|
||||
cfg.StrOpt('notification_method_type_driver',
|
||||
default=BASE_SQL_PATH + 'notification_method_type_repository:NotificationMethodTypeRepository',
|
||||
advanced=True,
|
||||
help='''
|
||||
cfg.StrOpt(
|
||||
'notification_method_type_driver',
|
||||
default=BASE_SQL_PATH +
|
||||
'notification_method_type_repository:NotificationMethodTypeRepository',
|
||||
advanced=True,
|
||||
help='''
|
||||
The repository driver to use for notifications
|
||||
''')
|
||||
]
|
||||
''')]
|
||||
|
||||
repositories_group = cfg.OptGroup(name='repositories', title='repositories')
|
||||
|
||||
|
|
|
@ -378,7 +378,7 @@ class TestAlarmRepoDB(base.BaseTestCase):
|
|||
'metric_definition_dimensions_id': '32'}]
|
||||
|
||||
self.default_mdes = [{'id': '1',
|
||||
'name': 'cpu.idle_perc',
|
||||
'name': 'cpu.idle_perc',
|
||||
'tenant_id': 'bob',
|
||||
'region': 'west'},
|
||||
{'id': '111',
|
||||
|
|
|
@ -353,7 +353,9 @@ class TestAlarmDefinitionRepoDB(base.BaseTestCase):
|
|||
sub_alarms = self.repo.get_sub_alarms('bob', '234')
|
||||
self.assertEqual(sub_alarms, [])
|
||||
|
||||
ads = self.repo.get_alarm_definitions('bob', '90% CPU', {'image_id': '888'}, None, None, 0, 100)
|
||||
ads = self.repo.get_alarm_definitions(
|
||||
'bob', '90% CPU', {
|
||||
'image_id': '888'}, None, None, 0, 100)
|
||||
expected = [{'actions_enabled': False,
|
||||
'alarm_actions': '29387234,77778687',
|
||||
'description': None,
|
||||
|
@ -379,7 +381,9 @@ class TestAlarmDefinitionRepoDB(base.BaseTestCase):
|
|||
'undetermined_actions': None}]
|
||||
self.assertEqual(ads, expected)
|
||||
|
||||
ads = self.repo.get_alarm_definitions('bob', '90% CPU', {'image_id': '888'}, 'LOW', None, 0, 100)
|
||||
ads = self.repo.get_alarm_definitions(
|
||||
'bob', '90% CPU', {
|
||||
'image_id': '888'}, 'LOW', None, 0, 100)
|
||||
expected = [{'actions_enabled': False,
|
||||
'alarm_actions': '29387234,77778687',
|
||||
'description': None,
|
||||
|
@ -405,7 +409,9 @@ class TestAlarmDefinitionRepoDB(base.BaseTestCase):
|
|||
'undetermined_actions': None}]
|
||||
self.assertEqual(ads, expected)
|
||||
|
||||
ads = self.repo.get_alarm_definitions('bob', '90% CPU', {'image_id': '888'}, 'CRITICAL', None, 0, 100)
|
||||
ads = self.repo.get_alarm_definitions(
|
||||
'bob', '90% CPU', {
|
||||
'image_id': '888'}, 'CRITICAL', None, 0, 100)
|
||||
expected = []
|
||||
self.assertEqual(ads, expected)
|
||||
|
||||
|
@ -757,10 +763,15 @@ class TestAlarmDefinitionRepoDB(base.BaseTestCase):
|
|||
self.run_patch_test(expression=ALARM_DEF_123_FIELDS['expression'].replace(')', ', 120)'))
|
||||
|
||||
def test_should_patch_expression_periods_change(self):
|
||||
self.run_patch_test(expression=ALARM_DEF_123_FIELDS['expression'].replace(' 10', ' 10 times 2'))
|
||||
self.run_patch_test(
|
||||
expression=ALARM_DEF_123_FIELDS['expression'].replace(
|
||||
' 10', ' 10 times 2'))
|
||||
|
||||
def test_patch_fails_change_match_by(self):
|
||||
self.assertRaises(exceptions.InvalidUpdateException, self.run_patch_test, match_by=u'device')
|
||||
self.assertRaises(
|
||||
exceptions.InvalidUpdateException,
|
||||
self.run_patch_test,
|
||||
match_by=u'device')
|
||||
|
||||
def test_patch_fails_change_metric_name(self):
|
||||
self.assertRaises(exceptions.InvalidUpdateException, self.run_patch_test,
|
||||
|
@ -792,17 +803,18 @@ class TestAlarmDefinitionRepoDB(base.BaseTestCase):
|
|||
match_by, severity,
|
||||
patch=True)
|
||||
|
||||
alarm_def_row = (ALARM_DEF_123_FIELDS['id'],
|
||||
name if name else ALARM_DEF_123_FIELDS['name'],
|
||||
description if description else ALARM_DEF_123_FIELDS['description'],
|
||||
expression if expression else ALARM_DEF_123_FIELDS['expression'],
|
||||
ALARM_DEF_123_FIELDS['match_by'], # match-by can't change
|
||||
severity if severity else ALARM_DEF_123_FIELDS['severity'],
|
||||
actions_enabled if actions_enabled else ALARM_DEF_123_FIELDS['actions_enabled'],
|
||||
u','.join(alarm_actions) if alarm_actions else ALARM_DEF_123_FIELDS['alarm_actions'],
|
||||
u','.join(ok_actions) if ok_actions else ALARM_DEF_123_FIELDS['ok_actions'],
|
||||
(u','.join(undetermined_actions) if undetermined_actions else
|
||||
ALARM_DEF_123_FIELDS['undetermined_actions']))
|
||||
alarm_def_row = \
|
||||
(ALARM_DEF_123_FIELDS['id'],
|
||||
name if name else ALARM_DEF_123_FIELDS['name'],
|
||||
description if description else ALARM_DEF_123_FIELDS['description'],
|
||||
expression if expression else ALARM_DEF_123_FIELDS['expression'],
|
||||
ALARM_DEF_123_FIELDS['match_by'], # match-by can't change
|
||||
severity if severity else ALARM_DEF_123_FIELDS['severity'],
|
||||
actions_enabled if actions_enabled else ALARM_DEF_123_FIELDS['actions_enabled'],
|
||||
u','.join(alarm_actions) if alarm_actions else ALARM_DEF_123_FIELDS['alarm_actions'],
|
||||
u','.join(ok_actions) if ok_actions else ALARM_DEF_123_FIELDS['ok_actions'],
|
||||
(u','.join(undetermined_actions) if undetermined_actions else
|
||||
ALARM_DEF_123_FIELDS['undetermined_actions']))
|
||||
|
||||
sad = self.default_sads[0]
|
||||
if expression and ALARM_DEF_123_FIELDS['expression'] != expression:
|
||||
|
@ -818,7 +830,12 @@ class TestAlarmDefinitionRepoDB(base.BaseTestCase):
|
|||
'period': sub_expr.period,
|
||||
'is_deterministic': sub_expr.deterministic,
|
||||
'periods': sub_expr.periods})
|
||||
expected_sub_alarm_maps = {'changed': {u'111': sub_alarm_def}, 'new': {}, 'old': {}, 'unchanged': {}}
|
||||
expected_sub_alarm_maps = {
|
||||
'changed': {
|
||||
u'111': sub_alarm_def},
|
||||
'new': {},
|
||||
'old': {},
|
||||
'unchanged': {}}
|
||||
else:
|
||||
sub_alarm_def = sub_alarm_definition.SubAlarmDefinition(
|
||||
row={'id': sad['id'],
|
||||
|
@ -831,5 +848,6 @@ class TestAlarmDefinitionRepoDB(base.BaseTestCase):
|
|||
'period': sad['period'],
|
||||
'is_deterministic': sad['is_deterministic'],
|
||||
'periods': sad['periods']})
|
||||
expected_sub_alarm_maps = {'changed': {}, 'new': {}, 'old': {}, 'unchanged': {u'111': sub_alarm_def}}
|
||||
expected_sub_alarm_maps = {'changed': {}, 'new': {},
|
||||
'old': {}, 'unchanged': {u'111': sub_alarm_def}}
|
||||
self.assertEqual((alarm_def_row, expected_sub_alarm_maps), updates)
|
||||
|
|
|
@ -182,7 +182,8 @@ class TestAlarmsStateHistory(AlarmTestBase):
|
|||
def test_alarm_state_history(self):
|
||||
expected_elements = {u"elements": [dict(ALARM_HISTORY)]}
|
||||
del expected_elements[u"elements"][0][u"time"]
|
||||
del expected_elements[u"elements"][0][u"sub_alarms"][0][u"sub_alarm_expression"][u"metric_definition"]
|
||||
del (expected_elements[u"elements"][0][u"sub_alarms"][0]
|
||||
[u"sub_alarm_expression"][u"metric_definition"])
|
||||
del expected_elements[u"elements"][0][u"tenant_id"]
|
||||
|
||||
response = self.simulate_request(
|
||||
|
@ -412,8 +413,13 @@ class TestAlarmDefinition(AlarmTestBase):
|
|||
u'name': u'Test Alarm Definition Updated',
|
||||
}
|
||||
|
||||
self.simulate_request("/v2.0/alarm-definitions/", headers={'X-Roles': 'admin', 'X-Tenant-Id': TENANT_ID},
|
||||
method="PATCH", body=json.dumps(alarm_def))
|
||||
self.simulate_request(
|
||||
"/v2.0/alarm-definitions/",
|
||||
headers={
|
||||
'X-Roles': 'admin',
|
||||
'X-Tenant-Id': TENANT_ID},
|
||||
method="PATCH",
|
||||
body=json.dumps(alarm_def))
|
||||
|
||||
self.assertEqual(self.srmock.status, falcon.HTTP_400)
|
||||
|
||||
|
@ -422,15 +428,24 @@ class TestAlarmDefinition(AlarmTestBase):
|
|||
u'name': u'Test Alarm Definition Updated',
|
||||
}
|
||||
|
||||
self.simulate_request("/v2.0/alarm-definitions/", headers={'X-Roles': 'admin', 'X-Tenant-Id': TENANT_ID},
|
||||
method="PUT", body=json.dumps(alarm_def))
|
||||
self.simulate_request(
|
||||
"/v2.0/alarm-definitions/",
|
||||
headers={
|
||||
'X-Roles': 'admin',
|
||||
'X-Tenant-Id': TENANT_ID},
|
||||
method="PUT",
|
||||
body=json.dumps(alarm_def))
|
||||
|
||||
self.assertEqual(self.srmock.status, falcon.HTTP_400)
|
||||
|
||||
def test_alarm_definition_delete_no_id(self):
|
||||
|
||||
self.simulate_request("/v2.0/alarm-definitions/", headers={'X-Roles': 'admin', 'X-Tenant-Id': TENANT_ID},
|
||||
method="DELETE")
|
||||
self.simulate_request(
|
||||
"/v2.0/alarm-definitions/",
|
||||
headers={
|
||||
'X-Roles': 'admin',
|
||||
'X-Tenant-Id': TENANT_ID},
|
||||
method="DELETE")
|
||||
|
||||
self.assertEqual(self.srmock.status, falcon.HTTP_400)
|
||||
|
||||
|
|
|
@ -114,10 +114,10 @@ class TestHealthChecks(test_base.BaseApiTestCase):
|
|||
for service in test_list:
|
||||
kafka_check.health_check.return_value = base.CheckResult(service['kafka']['healthy'],
|
||||
service['kafka']['message'])
|
||||
alarms_db_check.health_check.return_value = base.CheckResult(service['alarms_db']['healthy'],
|
||||
service['alarms_db']['message'])
|
||||
metrics_db_check.health_check.return_value = base.CheckResult(service['netrics_db']['healthy'],
|
||||
service['netrics_db']['message'])
|
||||
alarms_db_check.health_check.return_value = base.CheckResult(
|
||||
service['alarms_db']['healthy'], service['alarms_db']['message'])
|
||||
metrics_db_check.health_check.return_value = base.CheckResult(
|
||||
service['netrics_db']['healthy'], service['netrics_db']['message'])
|
||||
self.set_route()
|
||||
self.resources._kafka_check = kafka_check
|
||||
self.resources._alarm_db_check = alarms_db_check
|
||||
|
|
|
@ -27,23 +27,21 @@ class TestModelsDB(base.BaseTestCase):
|
|||
|
||||
md = models.create_md_model(metadata)
|
||||
gc_columns = [md.c.name + text("'='") + md.c.value]
|
||||
self.group_concat_md = (select([md.c.dimension_set_id,
|
||||
models.group_concat(gc_columns).label('dimensions')])
|
||||
.select_from(md)
|
||||
.group_by(md.c.dimension_set_id))
|
||||
self.group_concat_md = (
|
||||
select([md.c.dimension_set_id,
|
||||
models.group_concat(gc_columns).label('dimensions')])
|
||||
.select_from(md)
|
||||
.group_by(md.c.dimension_set_id))
|
||||
|
||||
self.group_concat_md_order = (select([md.c.dimension_set_id,
|
||||
models.group_concat(gc_columns,
|
||||
order_by=[md.c.name.asc()]).label('dimensions')])
|
||||
.select_from(md)
|
||||
.group_by(md.c.dimension_set_id))
|
||||
self.group_concat_md_order = (
|
||||
select([md.c.dimension_set_id,
|
||||
models.group_concat(gc_columns,
|
||||
order_by=[md.c.name.asc()]).label('dimensions')])
|
||||
.select_from(md)
|
||||
.group_by(md.c.dimension_set_id))
|
||||
|
||||
self.order_by_field = (select([md.c.dimension_set_id])
|
||||
.select_from(md)
|
||||
.order_by(asc(models.field_sort(md.c.dimension_set_id, map(text,
|
||||
["'A'",
|
||||
"'B'",
|
||||
"'C'"])))))
|
||||
self.order_by_field = (select([md.c.dimension_set_id]) .select_from(md) .order_by(
|
||||
asc(models.field_sort(md.c.dimension_set_id, map(text, ["'A'", "'B'", "'C'"])))))
|
||||
|
||||
def test_oracle(self):
|
||||
from sqlalchemy.dialects import oracle
|
||||
|
@ -66,10 +64,11 @@ FROM metric_dimension GROUP BY metric_dimension.dimension_set_id''')
|
|||
FROM metric_dimension GROUP BY metric_dimension.dimension_set_id''')
|
||||
self.assertEqual(expected, query)
|
||||
|
||||
expected = ("""SELECT metric_dimension.dimension_set_id \n"""
|
||||
"""FROM metric_dimension ORDER BY CASE WHEN metric_dimension.dimension_set_id='A'"""
|
||||
""" THEN 0 WHEN metric_dimension.dimension_set_id='B' THEN 1 WHEN"""
|
||||
""" metric_dimension.dimension_set_id='C' THEN 2 ELSE 3 END ASC""")
|
||||
expected = (
|
||||
"""SELECT metric_dimension.dimension_set_id \n"""
|
||||
"""FROM metric_dimension ORDER BY CASE WHEN metric_dimension.dimension_set_id='A'"""
|
||||
""" THEN 0 WHEN metric_dimension.dimension_set_id='B' THEN 1 WHEN"""
|
||||
""" metric_dimension.dimension_set_id='C' THEN 2 ELSE 3 END ASC""")
|
||||
query = str(self.order_by_field.compile(dialect=dialect))
|
||||
self.assertEqual(expected, query)
|
||||
|
||||
|
@ -78,25 +77,28 @@ FROM metric_dimension GROUP BY metric_dimension.dimension_set_id''')
|
|||
dialect = diale_.dialect()
|
||||
query = str(self.group_concat_md.compile(dialect=dialect))
|
||||
|
||||
expected = ('''SELECT metric_dimension.dimension_set_id, STRING_AGG(metric_dimension.name '''
|
||||
'''|| '=' || metric_dimension.value, ',' ) AS dimensions '''
|
||||
'''
|
||||
expected = (
|
||||
'''SELECT metric_dimension.dimension_set_id, STRING_AGG(metric_dimension.name '''
|
||||
'''|| '=' || metric_dimension.value, ',' ) AS dimensions '''
|
||||
'''
|
||||
FROM metric_dimension GROUP BY metric_dimension.dimension_set_id''')
|
||||
self.assertEqual(expected, query)
|
||||
|
||||
query = str(self.group_concat_md_order.compile(dialect=dialect))
|
||||
|
||||
expected = ('''SELECT metric_dimension.dimension_set_id, STRING_AGG(metric_dimension.name '''
|
||||
'''|| '=' || metric_dimension.value, ',' ORDER BY metric_dimension.name ASC) '''
|
||||
'''AS dimensions '''
|
||||
'''
|
||||
expected = (
|
||||
'''SELECT metric_dimension.dimension_set_id, STRING_AGG(metric_dimension.name '''
|
||||
'''|| '=' || metric_dimension.value, ',' ORDER BY metric_dimension.name ASC) '''
|
||||
'''AS dimensions '''
|
||||
'''
|
||||
FROM metric_dimension GROUP BY metric_dimension.dimension_set_id''')
|
||||
self.assertEqual(expected, query)
|
||||
|
||||
expected = ("""SELECT metric_dimension.dimension_set_id \n"""
|
||||
"""FROM metric_dimension ORDER BY CASE WHEN metric_dimension.dimension_set_id='A'"""
|
||||
""" THEN 0 WHEN metric_dimension.dimension_set_id='B' THEN 1 WHEN"""
|
||||
""" metric_dimension.dimension_set_id='C' THEN 2 ELSE 3 END ASC""")
|
||||
expected = (
|
||||
"""SELECT metric_dimension.dimension_set_id \n"""
|
||||
"""FROM metric_dimension ORDER BY CASE WHEN metric_dimension.dimension_set_id='A'"""
|
||||
""" THEN 0 WHEN metric_dimension.dimension_set_id='B' THEN 1 WHEN"""
|
||||
""" metric_dimension.dimension_set_id='C' THEN 2 ELSE 3 END ASC""")
|
||||
query = str(self.order_by_field.compile(dialect=dialect))
|
||||
self.assertEqual(expected, query)
|
||||
|
||||
|
@ -105,24 +107,27 @@ FROM metric_dimension GROUP BY metric_dimension.dimension_set_id''')
|
|||
dialect = diale_.dialect()
|
||||
query = str(self.group_concat_md.compile(dialect=dialect))
|
||||
|
||||
expected = ('''SELECT metric_dimension.dimension_set_id, LIST(metric_dimension.name || '=' '''
|
||||
'''|| metric_dimension.value, ',') AS dimensions '''
|
||||
'''
|
||||
expected = (
|
||||
'''SELECT metric_dimension.dimension_set_id, LIST(metric_dimension.name || '=' '''
|
||||
'''|| metric_dimension.value, ',') AS dimensions '''
|
||||
'''
|
||||
FROM metric_dimension GROUP BY metric_dimension.dimension_set_id''')
|
||||
self.assertEqual(expected, query)
|
||||
|
||||
query = str(self.group_concat_md_order.compile(dialect=dialect))
|
||||
|
||||
expected = ('''SELECT metric_dimension.dimension_set_id, LIST(metric_dimension.name || '=' '''
|
||||
'''|| metric_dimension.value, ',') AS dimensions '''
|
||||
'''
|
||||
expected = (
|
||||
'''SELECT metric_dimension.dimension_set_id, LIST(metric_dimension.name || '=' '''
|
||||
'''|| metric_dimension.value, ',') AS dimensions '''
|
||||
'''
|
||||
FROM metric_dimension GROUP BY metric_dimension.dimension_set_id''')
|
||||
self.assertEqual(expected, query)
|
||||
|
||||
expected = ("""SELECT metric_dimension.dimension_set_id \n"""
|
||||
"""FROM metric_dimension ORDER BY CASE WHEN metric_dimension.dimension_set_id='A'"""
|
||||
""" THEN 0 WHEN metric_dimension.dimension_set_id='B' THEN 1 WHEN"""
|
||||
""" metric_dimension.dimension_set_id='C' THEN 2 ELSE 3 END ASC""")
|
||||
expected = (
|
||||
"""SELECT metric_dimension.dimension_set_id \n"""
|
||||
"""FROM metric_dimension ORDER BY CASE WHEN metric_dimension.dimension_set_id='A'"""
|
||||
""" THEN 0 WHEN metric_dimension.dimension_set_id='B' THEN 1 WHEN"""
|
||||
""" metric_dimension.dimension_set_id='C' THEN 2 ELSE 3 END ASC""")
|
||||
query = str(self.order_by_field.compile(dialect=dialect))
|
||||
self.assertEqual(expected, query)
|
||||
|
||||
|
@ -131,23 +136,29 @@ FROM metric_dimension GROUP BY metric_dimension.dimension_set_id''')
|
|||
dialect = diale_.dialect()
|
||||
query = str(self.group_concat_md.compile(dialect=dialect))
|
||||
|
||||
expected = ('''SELECT metric_dimension.dimension_set_id, GROUP_CONCAT(concat(concat(metric_dimension.name, '''
|
||||
''''='), metric_dimension.value) SEPARATOR ',') AS dimensions '''
|
||||
'''
|
||||
expected = (
|
||||
'''SELECT metric_dimension.dimension_set_id, '''
|
||||
'''GROUP_CONCAT(concat(concat(metric_dimension.name, '''
|
||||
''''='), metric_dimension.value) SEPARATOR ',') AS dimensions '''
|
||||
'''
|
||||
FROM metric_dimension GROUP BY metric_dimension.dimension_set_id''')
|
||||
self.assertEqual(expected, query)
|
||||
|
||||
query = str(self.group_concat_md_order.compile(dialect=dialect))
|
||||
|
||||
expected = ('''SELECT metric_dimension.dimension_set_id, GROUP_CONCAT(concat(concat(metric_dimension.name, '''
|
||||
''''='), metric_dimension.value) ORDER BY metric_dimension.name ASC '''
|
||||
'''SEPARATOR ',') AS dimensions '''
|
||||
'''
|
||||
expected = (
|
||||
'''SELECT metric_dimension.dimension_set_id, '''
|
||||
'''GROUP_CONCAT(concat(concat(metric_dimension.name, '''
|
||||
''''='), metric_dimension.value) ORDER BY metric_dimension.name ASC '''
|
||||
'''SEPARATOR ',') AS dimensions '''
|
||||
'''
|
||||
FROM metric_dimension GROUP BY metric_dimension.dimension_set_id''')
|
||||
self.assertEqual(expected, query)
|
||||
|
||||
expected = ('''SELECT metric_dimension.dimension_set_id \n'''
|
||||
'''FROM metric_dimension ORDER BY FIELD(metric_dimension.dimension_set_id, 'A', 'B', 'C') ASC''')
|
||||
expected = (
|
||||
'''SELECT metric_dimension.dimension_set_id \n'''
|
||||
'''FROM metric_dimension '''
|
||||
'''ORDER BY FIELD(metric_dimension.dimension_set_id, 'A', 'B', 'C') ASC''')
|
||||
|
||||
query = str(self.order_by_field.compile(dialect=dialect))
|
||||
self.assertEqual(expected, query)
|
||||
|
|
|
@ -200,7 +200,11 @@ class TestNotificationValidation(base.BaseTestCase):
|
|||
self.assertEqual("Address name@ is not of correct format", str(ex))
|
||||
|
||||
def test_validation_exception_for_invalid_period_for_email(self):
|
||||
notification = {"name": "MyEmail", "type": "EMAIL", "address": "name@domain.com", "period": "60"}
|
||||
notification = {
|
||||
"name": "MyEmail",
|
||||
"type": "EMAIL",
|
||||
"address": "name@domain.com",
|
||||
"period": "60"}
|
||||
ex = self.assertRaises(schemas_exceptions.ValidationException,
|
||||
schemas_notifications.parse_and_validate,
|
||||
notification, valid_periods)
|
||||
|
|
|
@ -91,5 +91,6 @@ def _parse_and_validate_period(period, valid_periods):
|
|||
except Exception:
|
||||
raise exceptions.ValidationException("Period {} must be a valid integer".format(period))
|
||||
if period != 0 and period not in valid_periods:
|
||||
raise exceptions.ValidationException("{} is not a valid period, not in {}".format(period, valid_periods))
|
||||
raise exceptions.ValidationException(
|
||||
"{} is not a valid period, not in {}".format(period, valid_periods))
|
||||
return period
|
||||
|
|
|
@ -32,9 +32,10 @@ def validate_alarm_state(state):
|
|||
|
||||
def validate_alarm_definition_severity(severity):
|
||||
if severity.upper() not in VALID_ALARM_DEFINITION_SEVERITIES:
|
||||
raise HTTPUnprocessableEntityError("Invalid Severity",
|
||||
"Severity {} must be one of {}".format(severity.encode('utf8'),
|
||||
VALID_ALARM_DEFINITION_SEVERITIES))
|
||||
raise HTTPUnprocessableEntityError(
|
||||
"Invalid Severity",
|
||||
"Severity {} must be one of {}".format(severity.encode('utf8'),
|
||||
VALID_ALARM_DEFINITION_SEVERITIES))
|
||||
|
||||
|
||||
def validate_severity_query(severity_str):
|
||||
|
|
|
@ -110,8 +110,9 @@ class AlarmDefinitions(alarm_definitions_api_v2.AlarmDefinitionsV2API,
|
|||
try:
|
||||
offset = int(offset)
|
||||
except Exception:
|
||||
raise HTTPUnprocessableEntityError('Unprocessable Entity',
|
||||
'Offset value {} must be an integer'.format(offset))
|
||||
raise HTTPUnprocessableEntityError(
|
||||
'Unprocessable Entity',
|
||||
'Offset value {} must be an integer'.format(offset))
|
||||
result = self._alarm_definition_list(req.project_id, name,
|
||||
dimensions, severity,
|
||||
req.uri, sort_by,
|
||||
|
@ -243,14 +244,16 @@ class AlarmDefinitions(alarm_definitions_api_v2.AlarmDefinitionsV2API,
|
|||
limit=0)
|
||||
if definitions:
|
||||
if not expected_id:
|
||||
LOG.warning("Found existing definition for {} with tenant_id {}".format(name, tenant_id))
|
||||
raise exceptions.AlreadyExistsException("An alarm definition with the name {} already exists"
|
||||
.format(name))
|
||||
LOG.warning(
|
||||
"Found existing definition for {} with tenant_id {}".format(name, tenant_id))
|
||||
raise exceptions.AlreadyExistsException(
|
||||
"An alarm definition with the name {} already exists" .format(name))
|
||||
|
||||
found_definition_id = definitions[0]['id']
|
||||
if found_definition_id != expected_id:
|
||||
LOG.warning("Found existing alarm definition for {} with tenant_id {} with unexpected id {}"
|
||||
.format(name, tenant_id, found_definition_id))
|
||||
LOG.warning(
|
||||
"Found existing alarm definition for {} with tenant_id {} with unexpected id {}"
|
||||
.format(name, tenant_id, found_definition_id))
|
||||
raise exceptions.AlreadyExistsException(
|
||||
"An alarm definition with the name {} already exists with id {}"
|
||||
.format(name, found_definition_id))
|
||||
|
|
|
@ -130,12 +130,14 @@ class Alarms(alarms_api_v2.AlarmsV2API,
|
|||
if isinstance(query_parms['sort_by'], six.string_types):
|
||||
query_parms['sort_by'] = query_parms['sort_by'].split(',')
|
||||
|
||||
allowed_sort_by = {'alarm_id', 'alarm_definition_id', 'alarm_definition_name',
|
||||
'state', 'severity', 'lifecycle_state', 'link',
|
||||
'state_updated_timestamp', 'updated_timestamp', 'created_timestamp'}
|
||||
allowed_sort_by = {
|
||||
'alarm_id', 'alarm_definition_id', 'alarm_definition_name',
|
||||
'state', 'severity', 'lifecycle_state', 'link',
|
||||
'state_updated_timestamp', 'updated_timestamp', 'created_timestamp'}
|
||||
validation.validate_sort_by(query_parms['sort_by'], allowed_sort_by)
|
||||
|
||||
query_parms['metric_dimensions'] = helpers.get_query_dimensions(req, 'metric_dimensions')
|
||||
query_parms['metric_dimensions'] = helpers.get_query_dimensions(
|
||||
req, 'metric_dimensions')
|
||||
helpers.validate_query_dimensions(query_parms['metric_dimensions'])
|
||||
|
||||
offset = helpers.get_query_param(req, 'offset')
|
||||
|
@ -144,8 +146,9 @@ class Alarms(alarms_api_v2.AlarmsV2API,
|
|||
offset = int(offset)
|
||||
except Exception as ex:
|
||||
LOG.exception(ex)
|
||||
raise HTTPUnprocessableEntityError("Unprocessable Entity",
|
||||
"Offset value {} must be an integer".format(offset))
|
||||
raise HTTPUnprocessableEntityError(
|
||||
"Unprocessable Entity",
|
||||
"Offset value {} must be an integer".format(offset))
|
||||
|
||||
result = self._alarm_list(req.uri, req.project_id,
|
||||
query_parms, offset,
|
||||
|
@ -393,8 +396,9 @@ class AlarmsCount(alarms_api_v2.AlarmsCountV2API, alarming.Alarming):
|
|||
try:
|
||||
offset = int(offset)
|
||||
except Exception:
|
||||
raise HTTPUnprocessableEntityError("Unprocessable Entity",
|
||||
"Offset must be a valid integer, was {}".format(offset))
|
||||
raise HTTPUnprocessableEntityError(
|
||||
"Unprocessable Entity",
|
||||
"Offset must be a valid integer, was {}".format(offset))
|
||||
|
||||
result = self._alarms_count(req.uri, req.project_id, query_parms, offset, req.limit)
|
||||
|
||||
|
@ -427,8 +431,9 @@ class AlarmsCount(alarms_api_v2.AlarmsCountV2API, alarming.Alarming):
|
|||
return result
|
||||
|
||||
if len(count_data) > limit:
|
||||
result['links'].append({'rel': 'next',
|
||||
'href': helpers.create_alarms_count_next_link(req_uri, offset, limit)})
|
||||
result['links'].append({
|
||||
'rel': 'next',
|
||||
'href': helpers.create_alarms_count_next_link(req_uri, offset, limit)})
|
||||
count_data = count_data[:limit]
|
||||
|
||||
result['columns'].extend(group_by)
|
||||
|
@ -449,7 +454,8 @@ class AlarmsCount(alarms_api_v2.AlarmsCountV2API, alarming.Alarming):
|
|||
if not set(group_by).issubset(allowed_values):
|
||||
raise HTTPUnprocessableEntityError(
|
||||
"Unprocessable Entity",
|
||||
"One or more group-by values from {} are not in {}".format(group_by, allowed_values))
|
||||
"One or more group-by values from {} are not in {}"
|
||||
.format(group_by, allowed_values))
|
||||
|
||||
|
||||
class AlarmsStateHistory(alarms_api_v2.AlarmsStateHistoryV2API,
|
||||
|
|
|
@ -55,7 +55,8 @@ class Notifications(notifications_api_v2.NotificationsV2API):
|
|||
:raises falcon.HTTPBadRequest
|
||||
"""
|
||||
try:
|
||||
schemas_notifications.parse_and_validate(notification, self.valid_periods, require_all=require_all)
|
||||
schemas_notifications.parse_and_validate(
|
||||
notification, self.valid_periods, require_all=require_all)
|
||||
except schemas_exceptions.ValidationException as ex:
|
||||
LOG.exception(ex)
|
||||
raise falcon.HTTPBadRequest('Bad Request', str(ex))
|
||||
|
@ -65,14 +66,18 @@ class Notifications(notifications_api_v2.NotificationsV2API):
|
|||
|
||||
if notification:
|
||||
if not expected_id:
|
||||
LOG.warning("Found existing notification method for {} with tenant_id {}".format(name, tenant_id))
|
||||
LOG.warning(
|
||||
"Found existing notification method for {} with tenant_id {}"
|
||||
.format(name, tenant_id))
|
||||
raise exceptions.AlreadyExistsException(
|
||||
"A notification method with the name {} already exists".format(name))
|
||||
|
||||
found_notification_id = notification['id']
|
||||
if found_notification_id != expected_id:
|
||||
LOG.warning("Found existing notification method for {} with tenant_id {} with unexpected id {}"
|
||||
.format(name, tenant_id, found_notification_id))
|
||||
LOG.warning(
|
||||
"Found existing notification method for {} "
|
||||
"with tenant_id {} with unexpected id {}"
|
||||
.format(name, tenant_id, found_notification_id))
|
||||
raise exceptions.AlreadyExistsException(
|
||||
"A notification method with name {} already exists with id {}"
|
||||
.format(name, found_notification_id))
|
||||
|
@ -82,9 +87,12 @@ class Notifications(notifications_api_v2.NotificationsV2API):
|
|||
exists = nmt.upper() in notification_methods
|
||||
|
||||
if not exists:
|
||||
LOG.warning("Found no notification method type {} . Did you install/enable the plugin for that type?"
|
||||
.format(nmt))
|
||||
raise falcon.HTTPBadRequest('Bad Request', "Not a valid notification method type {} ".format(nmt))
|
||||
LOG.warning(
|
||||
"Found no notification method type {}."
|
||||
"Did you install/enable the plugin for that type?"
|
||||
.format(nmt))
|
||||
raise falcon.HTTPBadRequest('Bad Request', "Not a valid notification method type {} "
|
||||
.format(nmt))
|
||||
|
||||
def _create_notification(self, tenant_id, notification, uri):
|
||||
|
||||
|
@ -183,7 +191,8 @@ class Notifications(notifications_api_v2.NotificationsV2API):
|
|||
notification_id)
|
||||
|
||||
def _patch_get_notification(self, tenant_id, notification_id, notification):
|
||||
original_notification = self._notifications_repo.list_notification(tenant_id, notification_id)
|
||||
original_notification = self._notifications_repo.list_notification(
|
||||
tenant_id, notification_id)
|
||||
if 'name' not in notification:
|
||||
notification['name'] = original_notification['name']
|
||||
if 'type' not in notification:
|
||||
|
|
2
tox.ini
2
tox.ini
|
@ -136,7 +136,7 @@ ignore = F821,H201,H302,H405
|
|||
# H205: Use assert(Greater|Less)(Equal) for comparison.
|
||||
enable-extensions=H106,H203,H204,H205
|
||||
max-complexity = 50
|
||||
max-line-length = 120
|
||||
max-line-length = 100
|
||||
builtins = _
|
||||
exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,tools,build
|
||||
show-source = True
|
||||
|
|
Loading…
Reference in New Issue