Eliminated ceiling function for utilization metrics

Removed the calls to the ceiling function on utilization
metrics aggregation such that they now are exact values (i.e.,
not rounded up to the next integral value).

Change-Id: I9813b94acb051f6754da2d559090318010f86e57
This commit is contained in:
Flint Calvin 2016-09-09 21:05:34 +00:00
parent 3cdb0d1687
commit 4edad0286a
4 changed files with 13 additions and 14 deletions

View File

@ -12,7 +12,6 @@
# License for the specific language governing permissions and limitations
# under the License.
from pyspark.sql.functions import ceil
from pyspark.sql.functions import col
from pyspark.sql.functions import when
from pyspark.sql import SQLContext
@ -157,7 +156,7 @@ class FetchQuantityUtil(UsageComponent):
To calculate the utilized quantity this component uses following
formula:
utilized quantity = ceil((100 - idle_perc) * total_quantity / 100)
utilized quantity = (100 - idle_perc) * total_quantity / 100
"""
@ -252,9 +251,9 @@ class FetchQuantityUtil(UsageComponent):
quant_idle_perc_calc_df = quant_idle_perc_df.select(
col("quantity_df_alias.*"),
when(col("idle_perc_df_alias.quantity") != 0.0,
ceil(((100.0 - col(
"idle_perc_df_alias.quantity"))) * col(
"quantity_df_alias.quantity") / 100.0))
(100.0 - col(
"idle_perc_df_alias.quantity")) * col(
"quantity_df_alias.quantity") / 100.0)
.otherwise(col("quantity_df_alias.quantity"))
.alias("utilized_quantity"),

View File

@ -747,7 +747,7 @@ class SparkTest(SparkContextTest):
self.assertTrue(cpu_util_cores_agg_metric is not None)
self.assertEqual(2.0,
self.assertEqual(0.5303809523809525,
cpu_util_cores_agg_metric
.get('metric').get('value'))
self.assertEqual('useast',
@ -789,7 +789,7 @@ class SparkTest(SparkContextTest):
self.assertTrue(cpu_util_cores_agg_metric is not None)
self.assertEqual(1.0,
self.assertEqual(0.3866666666666669,
cpu_util_cores_agg_metric
.get('metric').get('value'))
self.assertEqual('useast',
@ -831,7 +831,7 @@ class SparkTest(SparkContextTest):
self.assertTrue(cpu_util_cores_agg_metric is not None)
self.assertEqual(1.0,
self.assertEqual(0.14371428571428566,
cpu_util_cores_agg_metric
.get('metric').get('value'))
self.assertEqual('useast',

View File

@ -181,7 +181,7 @@ class TestFetchQuantityUtilAgg(SparkContextTest):
if value.get('metric').get(
'name') == 'cpu.utilized_logical_cores_agg'][0]
self.assertEqual(9.0,
self.assertEqual(7.7700000000000005,
utilized_cpu_logical_agg_metric.get(
'metric').get('value'))
self.assertEqual('useast',
@ -284,7 +284,7 @@ class TestFetchQuantityUtilAgg(SparkContextTest):
if value.get('metric').get(
'name') == 'cpu.utilized_logical_cores_agg'][0]
self.assertEqual(11.0,
self.assertEqual(9.52,
utilized_cpu_logical_agg_metric.get(
'metric').get('value'))
self.assertEqual('useast',
@ -387,7 +387,7 @@ class TestFetchQuantityUtilAgg(SparkContextTest):
if value.get('metric').get(
'name') == 'cpu.utilized_logical_cores_agg'][0]
self.assertEqual(8.0,
self.assertEqual(7.134214285714285,
utilized_cpu_logical_agg_metric.get(
'metric').get('value'))
self.assertEqual('useast',

View File

@ -217,7 +217,7 @@ class SparkTest(SparkContextTest):
value.get('metric').get('dimensions').get('host') ==
'all'][0]
self.assertEqual(8.0,
self.assertEqual(7.134214285714285,
utilized_cpu_logical_agg_metric.get(
'metric').get('value'))
self.assertEqual('useast',
@ -258,7 +258,7 @@ class SparkTest(SparkContextTest):
value.get('metric').get('dimensions').get('host') ==
'test-cp1-comp0333-mgmt'][0]
self.assertEqual(5.0,
self.assertEqual(4.9665,
utilized_cpu_logical_agg_metric.get(
'metric').get('value'))
self.assertEqual('useast',
@ -299,7 +299,7 @@ class SparkTest(SparkContextTest):
value.get('metric').get('dimensions').get('host') ==
'test-cp1-comp0027-mgmt'][0]
self.assertEqual(3.0,
self.assertEqual(2.1677142857142853,
utilized_cpu_logical_agg_metric.get(
'metric').get('value'))
self.assertEqual('useast',