From 4d2daac1997fa616610e8d4b3f1135a863878107 Mon Sep 17 00:00:00 2001 From: Bhinav Sura Date: Tue, 25 Jul 2017 16:59:20 -0700 Subject: [PATCH] PERCENTILE, DOWNSAMPLE, MOVING Transform changes. --- .../transform/CullAboveValueMapping.java | 2 +- .../transform/CullBelowValueMapping.java | 2 +- .../DeviationValueReducerOrMapping.java | 2 +- .../metric/transform/DownsampleTransform.java | 22 +- .../transform/FillCalculateTransform.java | 5 +- .../metric/transform/InternalReducerType.java | 8 +- ...tricFilterWithInteralReducerTransform.java | 4 +- .../transform/MetricZipperTransform.java | 24 +- .../transform/MovingAverageTransform.java | 185 ----------- .../metric/transform/MovingValueMapping.java | 72 ++--- .../metric/transform/PercentileTransform.java | 159 --------- .../PercentileValueReducerOrMapping.java | 148 ++------- .../metric/transform/Diff_VTransformTest.java | 82 +++++ .../transform/Divide_VTransformTest.java | 80 +++++ .../transform/DownsampleTransformTest.java | 103 +++++- .../transform/FillCalculateTransformTest.java | 18 +- .../transform/MovingAverageTransformTest.java | 305 ------------------ .../metric/transform/MovingTransformTest.java | 63 +++- .../transform/PercentileTransformTest.java | 184 +++++++---- .../transform/Scale_VTransformTest.java | 80 +++++ .../metric/transform/Sum_VTransformTest.java | 81 +++++ 21 files changed, 731 insertions(+), 898 deletions(-) delete mode 100644 ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MovingAverageTransform.java delete mode 100644 ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/PercentileTransform.java delete mode 100644 ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/MovingAverageTransformTest.java diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/CullAboveValueMapping.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/CullAboveValueMapping.java index fa3c551ab..2c8c5f53e 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/CullAboveValueMapping.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/CullAboveValueMapping.java @@ -34,7 +34,7 @@ import com.google.common.base.Predicate; import com.google.common.collect.Maps; import com.salesforce.dva.argus.system.SystemAssert; -import org.apache.commons.math.stat.descriptive.rank.Percentile; +import org.apache.commons.math3.stat.descriptive.rank.Percentile; import java.util.Arrays; import java.util.HashMap; import java.util.List; diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/CullBelowValueMapping.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/CullBelowValueMapping.java index c9d1db3ca..851ae5b32 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/CullBelowValueMapping.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/CullBelowValueMapping.java @@ -34,7 +34,7 @@ import com.google.common.base.Predicate; import com.google.common.collect.Maps; import com.salesforce.dva.argus.system.SystemAssert; -import org.apache.commons.math.stat.descriptive.rank.Percentile; +import org.apache.commons.math3.stat.descriptive.rank.Percentile; import java.util.Arrays; import java.util.HashMap; import java.util.List; diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/DeviationValueReducerOrMapping.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/DeviationValueReducerOrMapping.java index e9e88fdcd..5bbde4eb1 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/DeviationValueReducerOrMapping.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/DeviationValueReducerOrMapping.java @@ -33,7 +33,7 @@ import com.salesforce.dva.argus.system.SystemAssert; import com.salesforce.dva.argus.system.SystemException; -import org.apache.commons.math.stat.descriptive.moment.StandardDeviation; +import org.apache.commons.math3.stat.descriptive.moment.StandardDeviation; import java.util.ArrayList; import java.util.List; import java.util.Map; diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/DownsampleTransform.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/DownsampleTransform.java index 609ef6b69..81eaccaa1 100755 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/DownsampleTransform.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/DownsampleTransform.java @@ -35,17 +35,15 @@ import com.salesforce.dva.argus.entity.Metric; import com.salesforce.dva.argus.service.metric.MetricReader; import com.salesforce.dva.argus.system.SystemAssert; -import org.apache.commons.math.stat.descriptive.moment.Mean; -import org.apache.commons.math.stat.descriptive.moment.StandardDeviation; -import org.apache.commons.math.stat.descriptive.summary.Sum; +import org.apache.commons.math3.stat.descriptive.moment.Mean; +import org.apache.commons.math3.stat.descriptive.moment.StandardDeviation; +import org.apache.commons.math3.stat.descriptive.rank.Percentile; +import org.apache.commons.math3.stat.descriptive.summary.Sum; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.TreeMap; /** @@ -78,6 +76,7 @@ public static Double downsamplerReducer(List values, String reducerType) operands.add(value); } } + InternalReducerType type = InternalReducerType.fromString(reducerType); switch (type) { case AVG: @@ -93,8 +92,10 @@ public static Double downsamplerReducer(List values, String reducerType) case COUNT: values.removeAll(Collections.singleton(null)); return (double) values.size(); + case PERCENTILE: + return new Percentile().evaluate(Doubles.toArray(operands), Double.parseDouble(reducerType.substring(1))); default: - throw new UnsupportedOperationException(reducerType); + throw new UnsupportedOperationException("Illegal type: " + reducerType + ". Please provide a valid type."); } } @@ -126,10 +127,12 @@ public List transform(List metrics) { @Override public List transform(List metrics, List constants) { - SystemAssert.requireArgument(metrics != null, "Cannot transform empty metric/metrics"); + SystemAssert.requireArgument(metrics != null, "Cannot transform null metrics"); + if (metrics.isEmpty()) { return metrics; } + SystemAssert.requireArgument(constants.size() == 1, "Downsampler Transform can only have exactly one constant which is downsampler expression"); SystemAssert.requireArgument(constants.get(0).contains("-"), "This downsampler expression is not valid."); @@ -142,11 +145,8 @@ public List transform(List metrics, List constants) { String windowSizeStr = expArr[0]; Long windowSize = getWindowInSeconds(windowSizeStr) * 1000; String windowUnit = windowSizeStr.substring(windowSizeStr.length() - 1); - // init downsample type - Set typeSet = new HashSet(Arrays.asList("avg", "min", "max", "sum", "dev", "count")); String downsampleType = expArr[1]; - SystemAssert.requireArgument(typeSet.contains(downsampleType), "Please input a valid type."); for (Metric metric : metrics) { metric.setDatapoints(createDownsampleDatapoints(metric.getDatapoints(), windowSize, downsampleType, windowUnit)); } diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/FillCalculateTransform.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/FillCalculateTransform.java index d8232146c..9b5e51e03 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/FillCalculateTransform.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/FillCalculateTransform.java @@ -31,6 +31,7 @@ package com.salesforce.dva.argus.service.metric.transform; +import com.google.common.primitives.Doubles; import com.salesforce.dva.argus.entity.Metric; import com.salesforce.dva.argus.system.SystemAssert; import java.util.ArrayList; @@ -40,6 +41,8 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; +import org.apache.commons.math3.stat.descriptive.rank.Percentile; + /** * Creates a constant line based on the calculated value.
* FILL_CALCULATE(, , , ) @@ -89,7 +92,7 @@ private static Double calculateResult(Metric metric, String calculationType) { if (matcher.matches()) { Integer target = Integer.valueOf(matcher.group(1)); - result = PercentileTransform.calculateNthPercentile(valueList, target); + result = new Percentile().evaluate(Doubles.toArray(valueList), target); } else { switch (calculationType) { case "min": diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/InternalReducerType.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/InternalReducerType.java index 684c60ce9..88be98beb 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/InternalReducerType.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/InternalReducerType.java @@ -53,6 +53,7 @@ public enum InternalReducerType { DEVIATION("dev"), SUM("sum"), MEDIAN("median"), + PERCENTILE(""), COUNT("count"); //~ Instance fields ****************************************************************************************************************************** @@ -84,8 +85,13 @@ public static InternalReducerType fromString(String name) { return type; } } + + if(name.matches("^p\\d{1,2}$")) { + return InternalReducerType.PERCENTILE; + } } - throw new IllegalArgumentException(name); + + throw new IllegalArgumentException("Illegal type: " + name + ". Please provide a valid type."); } //~ Methods ************************************************************************************************************************************** diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MetricFilterWithInteralReducerTransform.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MetricFilterWithInteralReducerTransform.java index 7096939b4..2127d92aa 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MetricFilterWithInteralReducerTransform.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MetricFilterWithInteralReducerTransform.java @@ -34,8 +34,8 @@ import com.google.common.primitives.Doubles; import com.salesforce.dva.argus.entity.Metric; import com.salesforce.dva.argus.system.SystemAssert; -import org.apache.commons.math.stat.descriptive.moment.Mean; -import org.apache.commons.math.stat.descriptive.moment.StandardDeviation; +import org.apache.commons.math3.stat.descriptive.moment.Mean; +import org.apache.commons.math3.stat.descriptive.moment.StandardDeviation; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MetricZipperTransform.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MetricZipperTransform.java index ca50653f6..3055d44dd 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MetricZipperTransform.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MetricZipperTransform.java @@ -50,6 +50,8 @@ public class MetricZipperTransform implements Transform { private final ValueZipper valueZipper; private final String defaultScope; + protected static String FULLJOIN = "UNION"; + protected Boolean fulljoinIndicator = false; //~ Constructors ********************************************************************************************************************************* @@ -72,18 +74,23 @@ public String getResultScopeName() { @Override public List transform(List metrics, List constants) { - SystemAssert.requireArgument(constants == null || constants.isEmpty(), "Zipper transform doesn't support constants!"); + SystemAssert.requireArgument(constants != null && constants.size() == 1 && FULLJOIN.equals(constants.get(0).toUpperCase()), + "Zipper transforms only support UNION indicator as a constant!"); + + fulljoinIndicator = true; return transform(metrics); } @Override public List transform(List metrics) { - SystemAssert.requireArgument(metrics != null, "Cannot transform empty metric/metrics"); + SystemAssert.requireArgument(metrics != null, "Cannot transform null metrics."); if (metrics.isEmpty()) { return metrics; } + SystemAssert.requireArgument(metrics.size() >= 2 && metrics.get(metrics.size() - 1) != null, "Cannot transform without a base metric as second param!"); + return zip(metrics.subList(0, metrics.size() - 1), metrics.get(metrics.size() - 1)); } @@ -134,6 +141,19 @@ public Map zip(Map originalDatapoints, Map baseDP : baseDatapoints.entrySet()) { + Long baseDPKey = baseDP.getKey(); + + if(!zippedDP.containsKey(baseDPKey)) { + zippedDP.put(baseDPKey, this.valueZipper.zip(null, baseDP.getValue())); + } + } + } + return zippedDP; } diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MovingAverageTransform.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MovingAverageTransform.java deleted file mode 100644 index ef588309a..000000000 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MovingAverageTransform.java +++ /dev/null @@ -1,185 +0,0 @@ -/* - * Copyright (c) 2016, Salesforce.com, Inc. - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * 3. Neither the name of Salesforce.com nor the names of its contributors may - * be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ - -package com.salesforce.dva.argus.service.metric.transform; - -import com.salesforce.dva.argus.entity.Metric; -import com.salesforce.dva.argus.service.metric.MetricReader; -import com.salesforce.dva.argus.system.SystemAssert; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import java.util.List; -import java.util.Map; -import java.util.TreeMap; - -/** - * Calculates the moving average. - * - * @author Bhinav Sura (bhinav.sura@salesforce.com) - */ -public class MovingAverageTransform implements Transform { - - //~ Instance fields ****************************************************************************************************************************** - - private final Logger _logger = LoggerFactory.getLogger(MovingAverageTransform.class); - - //~ Methods ************************************************************************************************************************************** - - @Override - public List transform(List metrics) { - throw new UnsupportedOperationException("Moving Average Transform needs a window size either as fixed number" + - " of past points or time interval"); - } - - @Override - public List transform(List metrics, List constants) { - SystemAssert.requireArgument(metrics != null, "Cannot transform null or empty metrics"); - if (metrics.isEmpty()) { - return metrics; - } - SystemAssert.requireArgument(constants != null && constants.size() == 1, - "Moving Average Transform " + - "must provide exactly 1 constant. windowSize -> Either fixed number of past points or time interval"); - - String window = constants.get(0); - MetricReader.TimeUnit timeunit = null; - - long windowSizeInSeconds = 0; - try { - timeunit = MetricReader.TimeUnit.fromString(window.substring(window.length() - 1)); - long timeDigits = Long.parseLong(window.substring(0, window.length() - 1)); - windowSizeInSeconds = timeDigits * timeunit.getValue() / 1000; - } catch (Exception t) { - long windowSize = Long.parseLong(window); - - for (Metric metric : metrics) { - metric.setDatapoints(_calculateMovingAverageUsingFixedNoOfPastPoints(metric.getDatapoints(), windowSize)); - } - return metrics; - } - - for (Metric metric : metrics) { - metric.setDatapoints(_calculateMovingAverageUsingTimeInterval(metric.getDatapoints(), windowSizeInSeconds)); - } - return metrics; - } - - private Map _calculateMovingAverageUsingTimeInterval(Map originalDatapoints, long windowSizeInSeconds) { - SystemAssert.requireArgument(windowSizeInSeconds != 0, "Time Interval cannot be 0 for Moving Average Transform"); - - Map transformedDatapoints = new TreeMap<>(); - Map sortedDatapoints = new TreeMap<>(originalDatapoints); - Long[] timestamps = new Long[sortedDatapoints.size()]; - - sortedDatapoints.keySet().toArray(timestamps); - - double sum = sortedDatapoints.get(timestamps[0]); - Long firstTimestamp = timestamps[0]; - int count = 1; - - for (int i = 1, j = 0; i < timestamps.length; i++) { - if (j == 0) { - while (timestamps[i] - windowSizeInSeconds * 1000 < firstTimestamp) { - try { - sum += sortedDatapoints.get(timestamps[i]); - } catch (NumberFormatException | NullPointerException e) { - _logger.warn("Failed to parse datapoint: " + sortedDatapoints.get(timestamps[i])); - } - transformedDatapoints.put(timestamps[i - 1], null); - i++; - count++; - } - transformedDatapoints.put(timestamps[i - 1], (sum / count)); - } - try { - sum += sortedDatapoints.get(timestamps[i]); - while (timestamps[j] <= timestamps[i] - windowSizeInSeconds * 1000) { - sum = _subtractWithinWindow(sum, sortedDatapoints, timestamps[j], timestamps[i]); - count--; - j++; - } - } catch (NumberFormatException | NullPointerException e) { - _logger.warn("Failed to parse datapoint: " + sortedDatapoints.get(timestamps[i])); - } - count++; - transformedDatapoints.put(timestamps[i], (sum / count)); - } - return transformedDatapoints; - } - - private double _subtractWithinWindow(double sum, Map sortedDatapoints, long end, long start) { - sum -= sortedDatapoints.get(end); - return sum; - } - - private Map _calculateMovingAverageUsingFixedNoOfPastPoints(Map originalDatapoints, long window) { - SystemAssert.requireArgument(window != 0, "Window cannot be 0 for Moving Average Transform"); - - Map transformedDatapoints = new TreeMap<>(); - Map sortedDatapoints = new TreeMap<>(originalDatapoints); - double sum = 0.0, firstValueInInterval = 0.0; - Long[] timestamps = new Long[sortedDatapoints.size()]; - - sortedDatapoints.keySet().toArray(timestamps); - for (int i = 0, j = 0; i < timestamps.length; i++) { - if (i + 1 < window) { - try { - sum += sortedDatapoints.get(timestamps[i]); - } catch (NumberFormatException | NullPointerException e) { - _logger.warn("Failed to parse datapoint: " + sortedDatapoints.get(timestamps[i]) + "Skipping this one."); - } - transformedDatapoints.put(timestamps[i], null); - } else { - try { - sum += sortedDatapoints.get(timestamps[i]); - sum -= firstValueInInterval; - firstValueInInterval = sortedDatapoints.get(timestamps[j]); - } catch (NumberFormatException | NullPointerException e) { - _logger.warn("Failed to parse datapoint: " + sortedDatapoints.get(timestamps[i]) + "Skipping this one."); - } - transformedDatapoints.put(timestamps[i], (sum / window)); - j++; - } - } - return transformedDatapoints; - } - - @Override - public String getResultScopeName() { - return TransformFactory.Function.MOVINGAVERAGE.name(); - } - - @Override - public List transform(List... listOfList) { - throw new UnsupportedOperationException("This class is deprecated!"); - } -} -/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MovingValueMapping.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MovingValueMapping.java index b74fd8490..4808e5eb8 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MovingValueMapping.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MovingValueMapping.java @@ -35,7 +35,7 @@ import com.salesforce.dva.argus.system.SystemAssert; import com.salesforce.dva.argus.system.SystemException; import org.apache.commons.lang.ArrayUtils; -import org.apache.commons.math.stat.descriptive.rank.Percentile; +import org.apache.commons.math3.stat.descriptive.rank.Percentile; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; @@ -63,32 +63,22 @@ public Map mapping(Map originalDatapoints) { @Override public Map mapping(Map originalDatapoints, List constants) { - SystemAssert.requireArgument(constants != null, "Moving Average Transform needs a window size of time interval"); - SystemAssert.requireArgument(!constants.isEmpty(), - "Moving Average Transform must provide at least 1 constant which is windowSize of time interval."); + SystemAssert.requireArgument(constants != null && !constants.isEmpty(), + "MOVING Transform must provide at least 1 constant which is windowSize of time interval."); + + //Default is to perform Moving Average. + InternalReducerType type = InternalReducerType.AVG; if (constants.size() > 1) { - SystemAssert.requireArgument(constants.size() == 2, "Moving Average Transform can't have more than 2 contants!"); - SystemAssert.requireArgument((InternalReducerType.AVG.getName().equals(constants.get(1)) || - InternalReducerType.MEDIAN.getName().equals(constants.get(1))), "Type can only be average or median."); + SystemAssert.requireArgument(constants.size() == 2, "MOVING Transform can't have more than 2 contants!"); + + type = InternalReducerType.fromString(constants.get(1)); + SystemAssert.requireArgument((InternalReducerType.AVG.equals(type) || + InternalReducerType.MEDIAN.equals(type) || InternalReducerType.SUM.equals(type)), "Type can be one of "); } long windowSizeInSeconds = getWindowInSeconds(constants.get(0)); - SystemAssert.requireArgument(windowSizeInSeconds != 0, "Time Interval cannot be 0 for Moving Average Transform"); - - boolean isMedian = false; - String reducerType = InternalReducerType.AVG.getName(); - - if (constants.size() == 2) { - reducerType = constants.get(1); - } - - InternalReducerType type = InternalReducerType.fromString(reducerType); - - if (type.equals(InternalReducerType.MEDIAN)) { - isMedian = true; - } - + Map movingDatapoints = new TreeMap<>(); Map sortedDatapoints = new TreeMap<>(originalDatapoints); @@ -134,13 +124,7 @@ public Map mapping(Map originalDatapoints, List= timestamps.length - 1) { break; } - if (isMedian) { - double[] numbers = ArrayUtils.toPrimitive(numberArr.toArray(new Double[numberArr.size()])); - - value = new Percentile().evaluate(numbers, 50.0); - } else { - value = (sum / count); - } + value = _calculateValue(sum, numberArr, count, type); movingDatapoints.put(timestamps[head - 1], value); sum += sortedDatapoints.get(timestamps[head]); numberArr.add(sortedDatapoints.get(timestamps[head])); @@ -152,13 +136,7 @@ public Map mapping(Map originalDatapoints, List mapping(Map originalDatapoints, List numberArr, int count, InternalReducerType type) { + + if (InternalReducerType.MEDIAN.equals(type)) { + double[] numbers = ArrayUtils.toPrimitive(numberArr.toArray(new Double[numberArr.size()])); + return new Percentile().evaluate(numbers, 50.0); + } + + if(InternalReducerType.AVG.equals(type)) { + return (sum / count); + } + + return sum; + } + @Override public String name() { return TransformFactory.Function.MOVING.name(); diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/PercentileTransform.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/PercentileTransform.java deleted file mode 100644 index d0b174e0d..000000000 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/PercentileTransform.java +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Copyright (c) 2016, Salesforce.com, Inc. - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * 3. Neither the name of Salesforce.com nor the names of its contributors may - * be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ - -package com.salesforce.dva.argus.service.metric.transform; - -import com.salesforce.dva.argus.entity.Metric; -import com.salesforce.dva.argus.service.metric.MetricReader; -import com.salesforce.dva.argus.system.SystemAssert; -import com.salesforce.dva.argus.system.SystemException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.TreeMap; - -/** - * Calculates a percentile transform. - * - * @author Bhinav Sura (bhinav.sura@salesforce.com) - */ -public class PercentileTransform implements Transform { - - //~ Methods ************************************************************************************************************************************** - - /** - * Calculates nth percentile of a set of values using the Nearest Neighbor Method. - * - * @param values The values to evaluate. - * @param n The n-th percentile to calculate. - * - * @return The nth-percentile value. - */ - static Double calculateNthPercentile(List values, int n) { - Double[] valuesArr = new Double[values.size()]; - - valuesArr = values.toArray(valuesArr); - Arrays.sort(valuesArr); - - int ordinalRank = (int) Math.ceil(n * values.size() / 100.0); - - return valuesArr[ordinalRank - 1]; - } - - //~ Methods ************************************************************************************************************************************** - - @Override - public List transform(List metrics) { - throw new UnsupportedOperationException("Percentile Transform needs a constant window."); - } - - @Override - public List transform(List metrics, List constants) { - SystemAssert.requireArgument(metrics != null, "Cannot transform null or empty metrics"); - if (metrics.isEmpty()) { - return metrics; - } - SystemAssert.requireArgument(constants != null && constants.size() == 2, - "Percentile Transform must provide exactly 2 constants. n -> The nth percentile to calculate, windowSize -> Window size in seconds"); - - long windowInSeconds = 0; - int n = 0; - - try { - n = Integer.parseInt(constants.get(0)); - SystemAssert.requireArgument(n > 0 && n < 100, "For Percentile Transform, 0 < n < 100."); - windowInSeconds = getWindowInSeconds(constants.get(1)); - } catch (NumberFormatException nfe) { - throw new SystemException("Illegal window size supplied to percentile transform", nfe); - } - for (Metric metric : metrics) { - metric.setDatapoints(_calculatePercenTileSeries(metric.getDatapoints(), n, windowInSeconds)); - } - return metrics; - } - - private long getWindowInSeconds(String window) { - MetricReader.TimeUnit timeunit = null; - - try { - timeunit = MetricReader.TimeUnit.fromString(window.substring(window.length() - 1)); - - long timeDigits = Long.parseLong(window.substring(0, window.length() - 1)); - - return timeDigits * timeunit.getValue() / 1000; - } catch (Exception t) { - return Long.parseLong(window); - } - } - - private Map _calculatePercenTileSeries(Map originalDatapoints, int n, long windowInSeconds) { - Map transformedDatapoints = new HashMap<>(); - Map sortedDatapoints = new TreeMap<>(originalDatapoints); - List values = new ArrayList<>(); - Long windowStart = 0L; - - for (Map.Entry entry : sortedDatapoints.entrySet()) { - Long timestamp = entry.getKey(); - Double value = entry.getValue(); - - if (values.isEmpty()) { - values.add(value); - windowStart = timestamp; - } else { - if (timestamp > windowStart + windowInSeconds * 1000) { - transformedDatapoints.put(windowStart, calculateNthPercentile(values, n)); - values.clear(); - windowStart = timestamp; - } - values.add(value); - } - } - - if (!values.isEmpty()) { - transformedDatapoints.put(windowStart, calculateNthPercentile(values, n)); - } - - return transformedDatapoints; - } - - @Override - public String getResultScopeName() { - return TransformFactory.Function.PERCENTILE.name(); - } - - @Override - public List transform(List... listOfList) { - throw new UnsupportedOperationException("This class is deprecated."); - } -} -/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/PercentileValueReducerOrMapping.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/PercentileValueReducerOrMapping.java index e615b135d..868b9d832 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/PercentileValueReducerOrMapping.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/PercentileValueReducerOrMapping.java @@ -31,18 +31,17 @@ package com.salesforce.dva.argus.service.metric.transform; -import com.google.common.collect.TreeMultiset; +import com.google.common.primitives.Doubles; import com.salesforce.dva.argus.service.metric.MetricReader; import com.salesforce.dva.argus.system.SystemAssert; -import com.salesforce.dva.argus.system.SystemException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import java.util.Collections; -import java.util.Comparator; + +import java.util.Collection; import java.util.List; import java.util.Map; import java.util.TreeMap; +import org.apache.commons.math3.stat.descriptive.rank.Percentile; + /** * Calculates the Nth percentile. If a window size is specified, each metric will be evaluated individually using that window. Otherwise, the set of * data points across metrics at each given timestamp are evaluated resulting in a single metric result. @@ -54,11 +53,7 @@ public class PercentileValueReducerOrMapping implements ValueReducerOrMapping { //~ Static fields/initializers ******************************************************************************************************************* private static Double percentile = Double.MAX_VALUE; - private static String windowSize = ""; - - //~ Instance fields ****************************************************************************************************************************** - - private Logger _logger = LoggerFactory.getLogger(getClass()); + private static final String INDIVIDUAL = "INDIVIDUAL"; //~ Methods ************************************************************************************************************************************** @@ -78,27 +73,20 @@ private void parseConstants(List constants) { "Percentile Transform must provide at least percentile to calculate."); SystemAssert.requireArgument(Double.parseDouble(constants.get(0)) > 0.0 && Double.parseDouble(constants.get(0)) < 100.0, "For Percentile Transform, 0.0 < percentile < 100.0."); + PercentileValueReducerOrMapping.percentile = Double.parseDouble(constants.get(0)); + if (constants.size() > 1) { - try { - PercentileValueReducerOrMapping.windowSize = constants.get(1); - } catch (NumberFormatException nfe) { - throw new SystemException("Illegal window size supplied to percentile transform", nfe); - } - } - } - - private long getWindowInSeconds(String window) { - MetricReader.TimeUnit timeunit = null; - - try { - timeunit = MetricReader.TimeUnit.fromString(window.substring(window.length() - 1)); - - long timeDigits = Long.parseLong(window.substring(0, window.length() - 1)); - - return timeDigits * timeunit.getValue() / 1000; - } catch (Exception t) { - return Long.parseLong(window); + if(!INDIVIDUAL.equalsIgnoreCase(constants.get(1))) { + String window = constants.get(1); + try { + MetricReader.TimeUnit.fromString(window.substring(window.length() - 1)); + Long.parseLong(window.substring(0, window.length() - 1)); + } catch (Exception t) { + throw new IllegalArgumentException( + "Invalid timeWindow: " + window + ". Please specify a valid window (E.g. 1s, 1m, 1h, 1d) "); + } + } } } @@ -110,7 +98,7 @@ public Map mapping(Map originalDatapoints) { @Override public Map mapping(Map originalDatapoints, List constants) { parseConstants(constants); - return _calculateNthPercentileForOneMetric(originalDatapoints, percentile, getWindowInSeconds(windowSize)); + return _calculateNthPercentileForOneMetric(originalDatapoints, percentile); } @Override @@ -118,93 +106,21 @@ public String name() { return TransformFactory.Function.PERCENTILE.name(); } - private Map _calculateNthPercentileForOneMetric(Map originalDatapoints, Double percentileValue, - long windowInSeconds) { - Map percentileDatapoints = new TreeMap<>(); - - for (Map.Entry entry : originalDatapoints.entrySet()) { - if (entry.getValue() == null) { - entry.setValue(0.0); - } - } - - Long[] timestamps = new Long[originalDatapoints.size()]; - - originalDatapoints.keySet().toArray(timestamps); - - // TreeSet allowing duplicate elements. - TreeMultiset values = TreeMultiset.create(new Comparator() { - - @Override - public int compare(Double d1, Double d2) { - return d1.compareTo(d2); - } - }); - - long start = System.currentTimeMillis(); - - values.add(originalDatapoints.get(timestamps[0])); - if (timestamps.length == 1) { - percentileDatapoints.put(timestamps[0], _calculateNthPercentile(values, percentileValue)); - } - - Long firstTimestamp = timestamps[0]; - - for (int head = 1, tail = 0; head < timestamps.length; head++) { - // When moving window, maintain a invariant that timestamps[head] - timestamps[end] < windowSize - // if timestamps[head] - timestamps[end] == windowSize, some points need to be kicked off - // For first window, exclude timestamps[head] - // For a regular window, exclude timestamps[tail] - if (tail == 0) { - while (timestamps[head] - windowInSeconds * 1000 < firstTimestamp) { - // run out of points before hitting the end of first window - if (head >= timestamps.length - 1) { - break; - } - - // do a partial calculation if not enough points - percentileDatapoints.put(timestamps[head - 1], _calculateNthPercentile(values, percentileValue)); - values.add(originalDatapoints.get(timestamps[head])); - head++; - } - percentileDatapoints.put(timestamps[head - 1], _calculateNthPercentile(values, percentileValue)); - } - values.add(originalDatapoints.get(timestamps[head])); - while (timestamps[tail] <= timestamps[head] - windowInSeconds * 1000) { - values.remove(originalDatapoints.get(timestamps[tail])); - tail++; - } - percentileDatapoints.put(timestamps[head], _calculateNthPercentile(values, percentileValue)); - } - _logger.debug("Time to calculate percentile = " + (System.currentTimeMillis() - start) + "ms"); - return percentileDatapoints; - } - - private Double _calculateNthPercentile(List values, Double percentileValue) { - Collections.sort(values, new Comparator() { - - @Override - public int compare(Double d1, Double d2) { - return d1.compareTo(d2); - } - }); - - int ordinalRank = (int) Math.ceil(percentileValue * values.size() / 100.0); - - return values.get(ordinalRank - 1); + private Map _calculateNthPercentileForOneMetric(Map originalDatapoints, Double percentileValue) { + + Map result = new TreeMap<>(); + for(Long timestamp : originalDatapoints.keySet()) { + result.put(timestamp, _calculateNthPercentile(originalDatapoints.values(), percentileValue)); + break; + } + + return result; + } - // O(n) operation to return percentile value from a sorted list. - private Double _calculateNthPercentile(TreeMultiset values, Double percentileValue) { - int ordinalRank = (int) Math.ceil(percentileValue * values.size() / 100.0); - int index = 1; - - for (Double value : values) { - if (index++ == ordinalRank) { - return value; - } - } - throw new SystemException("This should never happen."); + private Double _calculateNthPercentile(Collection values, Double percentileValue) { + return new Percentile().evaluate(Doubles.toArray(values), percentileValue); } + } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/Diff_VTransformTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/Diff_VTransformTest.java index 4f06b7a4d..3ea6cd2af 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/Diff_VTransformTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/Diff_VTransformTest.java @@ -34,6 +34,7 @@ import com.salesforce.dva.argus.entity.Metric; import org.junit.Test; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -494,5 +495,86 @@ public void testDiff_VTransformWithMissingPointNullPointVectorAgainstNullPointMe assertEquals(result.get(2).getDatapoints().size(), 4); assertEquals(expected_3, result.get(2).getDatapoints()); } + + @Test + public void testDiff_VTransformWithSameShorterLongerVectorAgainstMetricList_fullJoinIndicator() { + Transform sum_vTransform = new MetricZipperTransform(new DiffValueZipper()); + Map datapoints_1 = new HashMap(); + + datapoints_1.put(1000L, 1.0); + datapoints_1.put(2000L, 2.0); + datapoints_1.put(3000L, 3.0); + + Metric metric_1 = new Metric(TEST_SCOPE, TEST_METRIC); + + metric_1.setDatapoints(datapoints_1); + + Map datapoints_2 = new HashMap(); + + datapoints_2.put(1000L, 10.0); + datapoints_2.put(2000L, 100.0); + datapoints_2.put(3000L, 1000.0); + datapoints_2.put(4000L, 10000.0); + + Metric metric_2 = new Metric(TEST_SCOPE, TEST_METRIC); + + metric_2.setDatapoints(datapoints_2); + + Map datapoints_3 = new HashMap(); + + datapoints_3.put(1000L, 1.0); + datapoints_3.put(2000L, 10.0); + + Metric metric_3 = new Metric(TEST_SCOPE, TEST_METRIC); + + metric_3.setDatapoints(datapoints_3); + + Map vector_datapoints = new HashMap(); + + vector_datapoints.put(1000L, 1.0); + vector_datapoints.put(2000L, 1.0); + vector_datapoints.put(3000L, 1.0); + + Metric vector = new Metric(TEST_SCOPE, TEST_METRIC); + + vector.setDatapoints(vector_datapoints); + + List metrics = new ArrayList(); + + metrics.add(metric_1); + metrics.add(metric_2); + metrics.add(metric_3); + metrics.add(vector); + + Map expected_1 = new HashMap(); + + expected_1.put(1000L, 0.0); + expected_1.put(2000L, 1.0); + expected_1.put(3000L, 2.0); + + Map expected_2 = new HashMap(); + + expected_2.put(1000L, 9.0); + expected_2.put(2000L, 99.0); + expected_2.put(3000L, 999.0); + expected_2.put(4000L, 10000.0); + + Map expected_3 = new HashMap(); + + expected_3.put(1000L, 0.0); + expected_3.put(2000L, 9.0); + expected_3.put(3000L, -1.0); + + List result = sum_vTransform.transform(metrics, Arrays.asList("UNION")); + + assertEquals(3, result.get(0).getDatapoints().size()); + assertEquals(expected_1, result.get(0).getDatapoints()); + assertEquals(4, result.get(1).getDatapoints().size()); + assertEquals(expected_2, result.get(1).getDatapoints()); + assertEquals(3, result.get(2).getDatapoints().size()); + assertEquals(expected_3, result.get(2).getDatapoints()); + } + + } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/Divide_VTransformTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/Divide_VTransformTest.java index d3473c2f3..c1bd919c4 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/Divide_VTransformTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/Divide_VTransformTest.java @@ -35,6 +35,7 @@ import com.salesforce.dva.argus.system.SystemException; import org.junit.Test; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -521,5 +522,84 @@ public void testDivide_VTransformWithMissingPointNullPointVectorAgainstNullPoint assertEquals(result.get(2).getDatapoints().size(), 4); assertEquals(expected_3, result.get(2).getDatapoints()); } + + @Test + public void testDivide_VTransformWithSameShorterLongerVectorAgainstMetricList_fullJoinIndicator() { + Transform sum_vTransform = new MetricZipperTransform(new DivideValueZipper()); + Map datapoints_1 = new HashMap(); + + datapoints_1.put(1000L, 2.0); + datapoints_1.put(2000L, 4.0); + datapoints_1.put(3000L, 6.0); + + Metric metric_1 = new Metric(TEST_SCOPE, TEST_METRIC); + + metric_1.setDatapoints(datapoints_1); + + Map datapoints_2 = new HashMap(); + + datapoints_2.put(1000L, 10.0); + datapoints_2.put(2000L, 100.0); + datapoints_2.put(3000L, 1000.0); + datapoints_2.put(4000L, 10000.0); + + Metric metric_2 = new Metric(TEST_SCOPE, TEST_METRIC); + + metric_2.setDatapoints(datapoints_2); + + Map datapoints_3 = new HashMap(); + + datapoints_3.put(1000L, 10.0); + datapoints_3.put(2000L, 100.0); + + Metric metric_3 = new Metric(TEST_SCOPE, TEST_METRIC); + + metric_3.setDatapoints(datapoints_3); + + Map vector_datapoints = new HashMap(); + + vector_datapoints.put(1000L, 2.0); + vector_datapoints.put(2000L, 2.0); + vector_datapoints.put(3000L, 2.0); + + Metric vector = new Metric(TEST_SCOPE, TEST_METRIC); + + vector.setDatapoints(vector_datapoints); + + List metrics = new ArrayList(); + + metrics.add(metric_1); + metrics.add(metric_2); + metrics.add(metric_3); + metrics.add(vector); + + Map expected_1 = new HashMap(); + + expected_1.put(1000L, 1.0); + expected_1.put(2000L, 2.0); + expected_1.put(3000L, 3.0); + + Map expected_2 = new HashMap(); + + expected_2.put(1000L, 5.0); + expected_2.put(2000L, 50.0); + expected_2.put(3000L, 500.0); + expected_2.put(4000L, 10000.0); + + Map expected_3 = new HashMap(); + + expected_3.put(1000L, 5.0); + expected_3.put(2000L, 50.0); + expected_3.put(3000L, 0.0); + + List result = sum_vTransform.transform(metrics, Arrays.asList("UNION")); + + assertEquals(3, result.get(0).getDatapoints().size()); + assertEquals(expected_1, result.get(0).getDatapoints()); + assertEquals(4, result.get(1).getDatapoints().size()); + assertEquals(expected_2, result.get(1).getDatapoints()); + assertEquals(3, result.get(2).getDatapoints().size()); + assertEquals(expected_3, result.get(2).getDatapoints()); + } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/DownsampleTransformTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/DownsampleTransformTest.java index 8dfceebf3..518b21ac8 100755 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/DownsampleTransformTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/DownsampleTransformTest.java @@ -406,7 +406,7 @@ public void testDownsampleTransformDevOneMetric() { assertEquals(result.size(), 1); assertEquals(expected_1, result.get(0).getDatapoints()); } - + @Test public void testDownsampleTransformAvgMultipleMetrics() { Transform downsampleTransform = new DownsampleTransform(); @@ -716,5 +716,106 @@ public void testDownsampleTransformMetricIsAllNull() { assertEquals(expected, result.get(0).getDatapoints()); } + @Test + public void testDownsampleTransformPercentileOneMetric() { + Transform downsampleTransform = new DownsampleTransform(); + Map datapoints_1 = new HashMap(); + + datapoints_1.put(1000L, 1.0); + datapoints_1.put(2000L, 2.0); + datapoints_1.put(3000L, 3.0); + datapoints_1.put(4000L, 4.0); + datapoints_1.put(5000L, 5.0); + datapoints_1.put(6000L, 6.0); + datapoints_1.put(7000L, 7.0); + datapoints_1.put(8000L, 8.0); + datapoints_1.put(9000L, 9.0); + + Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); + + metric_1.setDatapoints(datapoints_1); + + List metrics = new ArrayList(); + + metrics.add(metric_1); + + List constants = new ArrayList(); + + constants.add("2s-p90"); + + Map expected_1 = new HashMap(); + + expected_1.put(0L, 1.0); + expected_1.put(2000L, 3.0); + expected_1.put(4000L, 5.0); + expected_1.put(6000L, 7.0); + expected_1.put(8000L, 9.0); + + List result = downsampleTransform.transform(metrics, constants); + + assertEquals(result.size(), 1); + assertEquals(expected_1, result.get(0).getDatapoints()); + } + + @Test + public void testDownsampleTransformPercentileMultipleMetrics() { + Transform downsampleTransform = new DownsampleTransform(); + Map datapoints_1 = new HashMap(); + + datapoints_1.put(000L, 10.0); + datapoints_1.put(1000L, 1.0); + datapoints_1.put(2000L, 2.0); + datapoints_1.put(3000L, 3.0); + datapoints_1.put(4000L, 4.0); + datapoints_1.put(5000L, 5.0); + datapoints_1.put(6000L, 6.0); + datapoints_1.put(7000L, 7.0); + datapoints_1.put(8000L, 8.0); + datapoints_1.put(9000L, 9.0); + + Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); + + metric_1.setDatapoints(datapoints_1); + + Map datapoints_2 = new HashMap(); + + datapoints_2.put(0L, 1.0); + datapoints_2.put(1000L, 20.0); + datapoints_2.put(2000L, 30.0); + datapoints_2.put(3000L, 40.0); + datapoints_2.put(4000L, 50.0); + datapoints_2.put(5000L, 60.0); + datapoints_2.put(6000L, 70.0); + datapoints_2.put(7000L, 80.0); + datapoints_2.put(8000L, 90.0); + datapoints_2.put(9000L, 100.0); + + + Metric metric_2 = new Metric(TEST_SCOPE + "1", TEST_METRIC); + + metric_2.setDatapoints(datapoints_2); + + List metrics = new ArrayList(); + + metrics.add(metric_1); + metrics.add(metric_2); + + List constants = new ArrayList(); + constants.add("10s-p90"); + + Map expected_1 = new HashMap(); + + expected_1.put(0L, 9.9); + + Map expected_2 = new HashMap(); + + expected_2.put(0L, 99.0); + + List result = downsampleTransform.transform(metrics, constants); + + assertEquals(2, result.size()); + assertEquals(expected_1, result.get(0).getDatapoints()); + assertEquals(expected_2, result.get(1).getDatapoints()); + } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/FillCalculateTransformTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/FillCalculateTransformTest.java index 9f534ff18..6b311b8ed 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/FillCalculateTransformTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/FillCalculateTransformTest.java @@ -124,15 +124,15 @@ public void testMetricListIntervalOffsetWithPercentile() { Map expected_1 = new HashMap<>(); - expected_1.put(1000L, 9.0); - expected_1.put(3000L, 9.0); - expected_1.put(4000L, 9.0); - expected_1.put(5000L, 9.0); - expected_1.put(6000L, 9.0); - expected_1.put(7000L, 9.0); - expected_1.put(8000L, 9.0); - expected_1.put(9000L, 9.0); - expected_1.put(10000L, 9.0); + expected_1.put(1000L, 9.2); + expected_1.put(3000L, 9.2); + expected_1.put(4000L, 9.2); + expected_1.put(5000L, 9.2); + expected_1.put(6000L, 9.2); + expected_1.put(7000L, 9.2); + expected_1.put(8000L, 9.2); + expected_1.put(9000L, 9.2); + expected_1.put(10000L, 9.2); List result = fillCalculateTransform.transform(metrics, constants); diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/MovingAverageTransformTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/MovingAverageTransformTest.java deleted file mode 100644 index fd2cedc2e..000000000 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/MovingAverageTransformTest.java +++ /dev/null @@ -1,305 +0,0 @@ -/* - * Copyright (c) 2016, Salesforce.com, Inc. - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * 3. Neither the name of Salesforce.com nor the names of its contributors may - * be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ - -package com.salesforce.dva.argus.service.metric.transform; - -import com.salesforce.dva.argus.entity.Metric; -import org.junit.Test; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.junit.Assert.assertEquals; - -public class MovingAverageTransformTest { - - @Test - public void testMovingAvgTransformWithFixedNoOfPastPoints() { - Transform movingAvgTransform = new MovingAverageTransform(); - Map datapoints = new HashMap(); - - datapoints.put(1000L, 1.0); - datapoints.put(2000L, 2.0); - datapoints.put(3000L, 3.0); - datapoints.put(5000L, 10.0); - datapoints.put(6000L, 2.0); - datapoints.put(7000L, 3.0); - datapoints.put(10000L, 15.0); - - Map actual = new HashMap(); - - actual.put(1000L, null); - actual.put(2000L, 1.5); - actual.put(3000L, 2.5); - actual.put(5000L, 6.5); - actual.put(6000L, 6.0); - actual.put(7000L, 2.5); - actual.put(10000L, 9.0); - - Metric metric = new Metric("test-scope", "test-metric"); - - metric.setDatapoints(datapoints); - - List metrics = new ArrayList(); - - metrics.add(metric); - - List constants = new ArrayList(1); - - constants.add("2"); - - List result = movingAvgTransform.transform(metrics, constants); - - assertEquals(result.get(0).getDatapoints().size(), actual.size()); - assertEquals(result.get(0).getDatapoints(), actual); - } - - @Test - public void testMovingAvgTransformWithFixedNoOfPastPoints1() { - Transform movingAvgTransform = new MovingAverageTransform(); - Map datapoints = new HashMap(); - - datapoints.put(1000L, null); - datapoints.put(2000L, null); - datapoints.put(3000L, 3.0); - datapoints.put(5000L, 10.0); - datapoints.put(6000L, 2.0); - datapoints.put(7000L, 3.0); - datapoints.put(10000L, 15.0); - - Map actual = new HashMap(); - - actual.put(1000L, null); - actual.put(2000L, null); - actual.put(3000L, null); - actual.put(5000L, 3.25); - actual.put(6000L, 3.75); - actual.put(7000L, 4.5); - actual.put(10000L, 7.5); - - Metric metric = new Metric("test-scope", "test-metric"); - - metric.setDatapoints(datapoints); - - List metrics = new ArrayList(); - - metrics.add(metric); - - List constants = new ArrayList(1); - - constants.add("4"); - - List result = movingAvgTransform.transform(metrics, constants); - - assertEquals(result.get(0).getDatapoints().size(), actual.size()); - assertEquals(result.get(0).getDatapoints(), actual); - } - - @Test - public void testMovingAvgTransformWithFixedNoOfPastPoints2() { - Transform movingAvgTransform = new MovingAverageTransform(); - Map datapoints = new HashMap(); - - datapoints.put(1000L, null); - datapoints.put(2000L, null); - datapoints.put(3000L, null); - datapoints.put(5000L, null); - datapoints.put(6000L, 2.0); - datapoints.put(7000L, 3.0); - datapoints.put(10000L, 15.0); - - Map actual = new HashMap(); - - actual.put(1000L, null); - actual.put(2000L, 0.0); - actual.put(3000L, 0.0); - actual.put(5000L, 0.0); - actual.put(6000L, 1.0); - actual.put(7000L, 2.5); - actual.put(10000L, 9.0); - - Metric metric = new Metric("test-scope", "test-metric"); - - metric.setDatapoints(datapoints); - - List metrics = new ArrayList(); - - metrics.add(metric); - - List constants = new ArrayList(1); - - constants.add("2"); - - List result = movingAvgTransform.transform(metrics, constants); - - assertEquals(result.get(0).getDatapoints().size(), actual.size()); - assertEquals(result.get(0).getDatapoints(), actual); - } - - @Test - public void testMovingAvgTransformWithTimeInterval() { - Transform movingAvgTransform = new MovingAverageTransform(); - Map datapoints = new HashMap(); - - datapoints.put(1000L, 1.0); - datapoints.put(2000L, 2.0); - datapoints.put(3000L, 3.0); - datapoints.put(5000L, 10.0); - datapoints.put(6000L, 2.0); - datapoints.put(7000L, 3.0); - datapoints.put(10000L, 15.0); - - Map actual = new HashMap(); - - actual.put(1000L, null); - actual.put(2000L, 1.5); - actual.put(3000L, 2.5); - actual.put(5000L, 10.0); - actual.put(6000L, 6.0); - actual.put(7000L, 2.5); - actual.put(10000L, 15.0); - - Metric metric = new Metric("test-scope", "test-metric"); - - metric.setDatapoints(datapoints); - - List metrics = new ArrayList(); - - metrics.add(metric); - - List constants = new ArrayList(1); - - constants.add("2s"); - - List result = movingAvgTransform.transform(metrics, constants); - - assertEquals(result.get(0).getDatapoints().size(), actual.size()); - assertEquals(result.get(0).getDatapoints(), actual); - } - - @Test - public void testMovingAvgTransformWithTimeInterval1() { - Transform movingAvgTransform = new MovingAverageTransform(); - Map datapoints = new HashMap(); - - datapoints.put(1000L, 1.0); - datapoints.put(2000L, 20.0); - datapoints.put(3000L, 20.0); - datapoints.put(5000L, 10.0); - datapoints.put(6000L, 2.0); - datapoints.put(7000L, 3.0); - datapoints.put(10000L, 15.0); - - Map actual = new HashMap(); - - actual.put(1000L, null); - actual.put(2000L, 10.5); - actual.put(3000L, 20.0); - actual.put(5000L, 10.0); - actual.put(6000L, 6.0); - actual.put(7000L, 2.5); - actual.put(10000L, 15.0); - - Metric metric = new Metric("test-scope", "test-metric"); - - metric.setDatapoints(datapoints); - - List metrics = new ArrayList(); - - metrics.add(metric); - - List constants = new ArrayList(1); - - constants.add("2s"); - - List result = movingAvgTransform.transform(metrics, constants); - - assertEquals(result.get(0).getDatapoints().size(), actual.size()); - assertEquals(result.get(0).getDatapoints(), actual); - } - - @Test - public void testMovingAvgTransformWithTimeInterval2() { - Transform movingAvgTransform = new MovingAverageTransform(); - Map datapoints = new HashMap(); - - datapoints.put(1000L, 2.0); - datapoints.put(2000L, 2.0); - datapoints.put(3000L, 3.0); - datapoints.put(5000L, 10.0); - datapoints.put(6000L, 2.0); - datapoints.put(7000L, 3.0); - datapoints.put(10000L, 15.0); - - Map actual = new HashMap(); - - actual.put(1000L, null); - actual.put(2000L, 2.0); - actual.put(3000L, 2.5); - actual.put(5000L, 10.0); - actual.put(6000L, 6.0); - actual.put(7000L, 2.5); - actual.put(10000L, 15.0); - - Metric metric = new Metric("test-scope", "test-metric"); - - metric.setDatapoints(datapoints); - - List metrics = new ArrayList(); - - metrics.add(metric); - - List constants = new ArrayList(1); - - constants.add("2s"); - - List result = movingAvgTransform.transform(metrics, constants); - - assertEquals(result.get(0).getDatapoints().size(), actual.size()); - assertEquals(result.get(0).getDatapoints(), actual); - } - - @Test(expected = UnsupportedOperationException.class) - public void transform_ShouldThrowUnsupportedOperationExceptionWhenNoWindowSizeSpecified() { - List metrics = new ArrayList(); - - new MovingAverageTransform().transform(metrics); - } - - @Test - public void testMovingAvgTransformScopeName() { - Transform movingAvgTransform = new MovingAverageTransform(); - - assertEquals("MOVINGAVERAGE", movingAvgTransform.getResultScopeName()); - } -} -/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/MovingTransformTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/MovingTransformTest.java index 2b0fe4381..23cc50794 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/MovingTransformTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/MovingTransformTest.java @@ -127,6 +127,48 @@ public void testMovingMedianTransformWithTimeInterval() { assertEquals(result.get(0).getDatapoints().size(), actual.size()); assertEquals(result.get(0).getDatapoints(), actual); } + + @Test + public void testMovingSumTransformWithTimeInterval() { + Transform movingTransform = new MetricMappingTransform(new MovingValueMapping()); + Map datapoints = new HashMap(); + + datapoints.put(1000L, 1.0); + datapoints.put(2000L, 2.0); + datapoints.put(3000L, 3.0); + datapoints.put(5000L, 10.0); + datapoints.put(6000L, 2.0); + datapoints.put(7000L, 3.0); + datapoints.put(10000L, 15.0); + + Map actual = new HashMap(); + + actual.put(1000L, 1.0); + actual.put(2000L, 3.0); + actual.put(3000L, 5.0); + actual.put(5000L, 10.0); + actual.put(6000L, 12.0); + actual.put(7000L, 5.0); + actual.put(10000L, 15.0); + + Metric metric = new Metric(TEST_SCOPE, TEST_METRIC); + + metric.setDatapoints(datapoints); + + List metrics = new ArrayList(); + + metrics.add(metric); + + List constants = new ArrayList(1); + + constants.add("2s"); + constants.add("sum"); + + List result = movingTransform.transform(metrics, constants); + + assertEquals(result.get(0).getDatapoints().size(), actual.size()); + assertEquals(result.get(0).getDatapoints(), actual); + } @Test public void testMovingAvgTransformWithTimeIntervalHasNullValue() { @@ -277,7 +319,7 @@ public void movingWithOnlyOnePoint() { } @Test(expected = UnsupportedOperationException.class) - public void transform_ShouldThrowUnsupportedOperationExceptionWhenNoWindowSizeSpecified() { + public void transform_ShouldThrowUnsupportedOperationExceptionWhenNoConstantsAreSpecified() { List metrics = new ArrayList(); metrics.add(new Metric(TEST_SCOPE, TEST_METRIC)); @@ -286,9 +328,22 @@ public void transform_ShouldThrowUnsupportedOperationExceptionWhenNoWindowSizeSp movingTransform.transform(metrics); } + + @Test(expected = IllegalArgumentException.class) + public void transform_ShouldThrowIllegalArgumentExceptionWhenNoWindowSizeIsSpecified() { + List metrics = new ArrayList(); - @Test(expected = UnsupportedOperationException.class) - public void transform_ShouldThrowUnsupportedOperationExceptionWhenTypeIsInvalid() { + metrics.add(new Metric(TEST_SCOPE, TEST_METRIC)); + + Transform movingTransform = new MetricMappingTransform(new MovingValueMapping()); + + List constants = new ArrayList(1); + + movingTransform.transform(metrics, constants); + } + + @Test(expected = IllegalArgumentException.class) + public void transform_ShouldThrowIllegalArgumentExceptionWhenTypeIsInvalid() { List metrics = new ArrayList(); metrics.add(new Metric(TEST_SCOPE, TEST_METRIC)); @@ -298,7 +353,7 @@ public void transform_ShouldThrowUnsupportedOperationExceptionWhenTypeIsInvalid( constants.add("2"); constants.add("foobar"); - movingTransform.transform(metrics); + movingTransform.transform(metrics, constants); } @Test diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/PercentileTransformTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/PercentileTransformTest.java index 8d2b1029f..f0ed2dc0d 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/PercentileTransformTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/PercentileTransformTest.java @@ -32,11 +32,9 @@ package com.salesforce.dva.argus.service.metric.transform; import com.salesforce.dva.argus.entity.Metric; -import com.salesforce.dva.argus.system.SystemException; import org.junit.Test; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -48,37 +46,6 @@ public class PercentileTransformTest { private static final String TEST_SCOPE = "test-scope"; private static final String TEST_METRIC = "test-metric"; - @Test - public void testPercentileCalculation() { - List values = new ArrayList<>(); - - values.add(20.0); - values.add(15.0); - values.add(50.0); - values.add(35.0); - values.add(40.0); - - PercentileTransform transform = new PercentileTransform(); - Method method; - - try { - method = transform.getClass().getDeclaredMethod("calculateNthPercentile", List.class, int.class); - method.setAccessible(true); - - Double p30 = Double.class.cast(method.invoke(transform, values, 30)); - Double p40 = Double.class.cast(method.invoke(transform, values, 40)); - Double p50 = Double.class.cast(method.invoke(transform, values, 50)); - Double p100 = Double.class.cast(method.invoke(transform, values, 100)); - - assertEquals(p30, new Double(20.0)); - assertEquals(p40, new Double(20.0)); - assertEquals(p50, new Double(35.0)); - assertEquals(p100, new Double(50.0)); - } catch (NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e) { - throw new SystemException("Exception occurred while trying to invoke method via reflection.", e); - } - } - @Test public void testPercentileTransformWithOneConstantShareCommonDPs() { Transform percentileTransform = new MetricReducerOrMappingWithConstantTransform(new PercentileValueReducerOrMapping()); @@ -136,7 +103,7 @@ public void testPercentileTransformWithOneConstantShareCommonDPs() { Map expected = new HashMap(); - expected.put(1000L, 20.0); + expected.put(1000L, 19.0); List result = percentileTransform.transform(metrics, constants); @@ -206,7 +173,7 @@ public void testPercentileTransformWithOneConstantShareSomeCommonDPs() { Map expected = new HashMap(); - expected.put(1000L, 20.0); + expected.put(1000L, 19.0); List result = percentileTransform.transform(metrics, constants); @@ -278,7 +245,7 @@ public void testPercentileTransformWithOneConstantShareNoCommonDPs() { @Test - public void testPercentileTransformWithOneConstantAndUNIONShareNoCommonDPs() { + public void testPercentileTransformWithOneConstantShareNoCommonDPs_fullJoinIndicator() { Transform percentileTransform = new MetricReducerOrMappingWithConstantTransform(new PercentileValueReducerOrMapping()); Map datapoints_1 = new HashMap(); Map expected = new HashMap(); @@ -359,7 +326,7 @@ public void testPercentileTransformWithoutConstants() { percentileTransform.transform(metrics, constants); } - @Test(expected = NumberFormatException.class) + @Test(expected = IllegalArgumentException.class) public void testPercentileTransformWithIllegalTimeunit() { Transform percentileTransform = new MetricReducerOrMappingWithConstantTransform(new PercentileValueReducerOrMapping()); Metric metric = new Metric(TEST_SCOPE, TEST_METRIC); @@ -401,11 +368,20 @@ public void testPercentileTransformWithoutMetrics() { } @Test - public void testPercentile95TransformWithAbsoluteWindowSizeInSeconds() { + public void testPercentile95TransformSingleMetric_LegacySupport() { Transform percentileTransform = new MetricReducerOrMappingWithConstantTransform(new PercentileValueReducerOrMapping()); Map datapoints = new HashMap(); datapoints.put(1000L, 1.0); + datapoints.put(2000L, 2.0); + datapoints.put(3000L, 3.0); + datapoints.put(4000L, 4.0); + datapoints.put(5000L, 5.0); + datapoints.put(6000L, 6.0); + datapoints.put(7000L, 7.0); + datapoints.put(8000L, 8.0); + datapoints.put(9000L, 9.0); + datapoints.put(10000L, 10.0); Metric metric = new Metric(TEST_SCOPE, TEST_METRIC); @@ -417,28 +393,81 @@ public void testPercentile95TransformWithAbsoluteWindowSizeInSeconds() { List constants = new ArrayList(1); - constants.add("95"); + constants.add("90"); constants.add("3s"); List result = percentileTransform.transform(metrics, constants); Map expected = new HashMap(); - expected.put(1000L, 1.0); + expected.put(1000L, 9.9); assertEquals(expected, result.get(0).getDatapoints()); } + + @Test + public void testPercentile95TransformMultipleMetrics_LegacySupport() { + Transform percentileTransform = new MetricReducerOrMappingWithConstantTransform(new PercentileValueReducerOrMapping()); + Map datapoints = new HashMap(); + + datapoints.put(1000L, 1.0); + datapoints.put(2000L, 2.0); + datapoints.put(3000L, 3.0); + datapoints.put(4000L, 4.0); + datapoints.put(5000L, 5.0); + datapoints.put(6000L, 6.0); + datapoints.put(7000L, 7.0); + datapoints.put(8000L, 8.0); + datapoints.put(9000L, 9.0); + datapoints.put(10000L, 10.0); + + Metric metric1 = new Metric(TEST_SCOPE, TEST_METRIC + "1"); + + metric1.setDatapoints(datapoints); + + datapoints = new HashMap(); + + datapoints.put(1000L, 1.0); + datapoints.put(2000L, 2.0); + datapoints.put(3000L, 3.0); + datapoints.put(4000L, 4.0); + datapoints.put(5000L, 5.0); + datapoints.put(6000L, 6.0); + datapoints.put(7000L, 7.0); + datapoints.put(8000L, 8.0); + datapoints.put(9000L, 9.0); + datapoints.put(10000L, 10.0); + + Metric metric2 = new Metric(TEST_SCOPE, TEST_METRIC + "2"); + + metric2.setDatapoints(datapoints); + + List metrics = Arrays.asList(metric1, metric2); + List constants = Arrays.asList("90", "3s"); + + List result = percentileTransform.transform(metrics, constants); + + Map expected = new HashMap(); + expected.put(1000L, 9.9); + + assertEquals(2, result.size()); + assertEquals(expected, result.get(0).getDatapoints()); + assertEquals(expected, result.get(1).getDatapoints()); + } @Test - public void testPercentile95TransformWithLastWindowOnlyHaveOnePoint() { + public void testPercentile95TransformSingleMetric_individualEvaluation() { Transform percentileTransform = new MetricReducerOrMappingWithConstantTransform(new PercentileValueReducerOrMapping()); Map datapoints = new HashMap(); datapoints.put(1000L, 1.0); datapoints.put(2000L, 2.0); datapoints.put(3000L, 3.0); - datapoints.put(5000L, 10.0); - datapoints.put(6000L, 2.0); - datapoints.put(7000L, 3.0); - datapoints.put(10000L, 15.0); + datapoints.put(4000L, 4.0); + datapoints.put(5000L, 5.0); + datapoints.put(6000L, 6.0); + datapoints.put(7000L, 7.0); + datapoints.put(8000L, 8.0); + datapoints.put(9000L, 9.0); + datapoints.put(10000L, 10.0); Metric metric = new Metric(TEST_SCOPE, TEST_METRIC); @@ -450,21 +479,64 @@ public void testPercentile95TransformWithLastWindowOnlyHaveOnePoint() { List constants = new ArrayList(1); - constants.add("95"); - constants.add("3s"); + constants.add("90"); + constants.add("individual"); List result = percentileTransform.transform(metrics, constants); Map expected = new HashMap(); - expected.put(1000L, 1.0); - expected.put(2000L, 2.0); - expected.put(3000L, 3.0); - expected.put(5000L, 10.0); - expected.put(6000L, 10.0); - expected.put(7000L, 10.0); - expected.put(10000L, 15.0); - assertEquals(result.get(0).getDatapoints().size(), 7); + expected.put(1000L, 9.9); + assertEquals(expected, result.get(0).getDatapoints()); + } + + @Test + public void testPercentile95TransformMultipleMetrics_individualEvaluation() { + Transform percentileTransform = new MetricReducerOrMappingWithConstantTransform(new PercentileValueReducerOrMapping()); + Map datapoints = new HashMap(); + + datapoints.put(1000L, 1.0); + datapoints.put(2000L, 2.0); + datapoints.put(3000L, 3.0); + datapoints.put(4000L, 4.0); + datapoints.put(5000L, 5.0); + datapoints.put(6000L, 6.0); + datapoints.put(7000L, 7.0); + datapoints.put(8000L, 8.0); + datapoints.put(9000L, 9.0); + datapoints.put(10000L, 10.0); + + Metric metric1 = new Metric(TEST_SCOPE, TEST_METRIC + "1"); + + metric1.setDatapoints(datapoints); + + datapoints = new HashMap(); + + datapoints.put(1000L, 1.0); + datapoints.put(2000L, 2.0); + datapoints.put(3000L, 3.0); + datapoints.put(4000L, 4.0); + datapoints.put(5000L, 5.0); + datapoints.put(6000L, 6.0); + datapoints.put(7000L, 7.0); + datapoints.put(8000L, 8.0); + datapoints.put(9000L, 9.0); + datapoints.put(10000L, 10.0); + + Metric metric2 = new Metric(TEST_SCOPE, TEST_METRIC + "2"); + + metric2.setDatapoints(datapoints); + + List metrics = Arrays.asList(metric1, metric2); + List constants = Arrays.asList("90", "individual"); + + List result = percentileTransform.transform(metrics, constants); + + Map expected = new HashMap(); + expected.put(1000L, 9.9); + + assertEquals(2, result.size()); assertEquals(expected, result.get(0).getDatapoints()); + assertEquals(expected, result.get(1).getDatapoints()); } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/Scale_VTransformTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/Scale_VTransformTest.java index 19787f882..cb614093d 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/Scale_VTransformTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/Scale_VTransformTest.java @@ -34,6 +34,7 @@ import com.salesforce.dva.argus.entity.Metric; import org.junit.Test; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -494,5 +495,84 @@ public void testScale_VTransformWithMissingPointNullPointVectorAgainstNullPointM assertEquals(result.get(2).getDatapoints().size(), 4); assertEquals(expected_3, result.get(2).getDatapoints()); } + + @Test + public void testScale_VTransformWithSameShorterLongerVectorAgainstMetricList_fullJoinIndicator() { + Transform sum_vTransform = new MetricZipperTransform(new ScaleValueZipper()); + Map datapoints_1 = new HashMap(); + + datapoints_1.put(1000L, 1.0); + datapoints_1.put(2000L, 2.0); + datapoints_1.put(3000L, 3.0); + + Metric metric_1 = new Metric(TEST_SCOPE, TEST_METRIC); + + metric_1.setDatapoints(datapoints_1); + + Map datapoints_2 = new HashMap(); + + datapoints_2.put(1000L, 10.0); + datapoints_2.put(2000L, 100.0); + datapoints_2.put(3000L, 1000.0); + datapoints_2.put(4000L, 10000.0); + + Metric metric_2 = new Metric(TEST_SCOPE, TEST_METRIC); + + metric_2.setDatapoints(datapoints_2); + + Map datapoints_3 = new HashMap(); + + datapoints_3.put(1000L, 1.0); + datapoints_3.put(2000L, 10.0); + + Metric metric_3 = new Metric(TEST_SCOPE, TEST_METRIC); + + metric_3.setDatapoints(datapoints_3); + + Map vector_datapoints = new HashMap(); + + vector_datapoints.put(1000L, 1.0); + vector_datapoints.put(2000L, 1.0); + vector_datapoints.put(3000L, 1.0); + + Metric vector = new Metric(TEST_SCOPE, TEST_METRIC); + + vector.setDatapoints(vector_datapoints); + + List metrics = new ArrayList(); + + metrics.add(metric_1); + metrics.add(metric_2); + metrics.add(metric_3); + metrics.add(vector); + + Map expected_1 = new HashMap(); + + expected_1.put(1000L, 1.0); + expected_1.put(2000L, 2.0); + expected_1.put(3000L, 3.0); + + Map expected_2 = new HashMap(); + + expected_2.put(1000L, 10.0); + expected_2.put(2000L, 100.0); + expected_2.put(3000L, 1000.0); + expected_2.put(4000L, 10000.0); + + Map expected_3 = new HashMap(); + + expected_3.put(1000L, 1.0); + expected_3.put(2000L, 10.0); + expected_3.put(3000L, 1.0); + + List result = sum_vTransform.transform(metrics, Arrays.asList("UNION")); + + assertEquals(3, result.get(0).getDatapoints().size()); + assertEquals(expected_1, result.get(0).getDatapoints()); + assertEquals(4, result.get(1).getDatapoints().size()); + assertEquals(expected_2, result.get(1).getDatapoints()); + assertEquals(3, result.get(2).getDatapoints().size()); + assertEquals(expected_3, result.get(2).getDatapoints()); + } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/Sum_VTransformTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/Sum_VTransformTest.java index 295bc6065..eb3c46711 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/Sum_VTransformTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/Sum_VTransformTest.java @@ -34,6 +34,7 @@ import com.salesforce.dva.argus.entity.Metric; import org.junit.Test; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -494,5 +495,85 @@ public void testSum_VTransformWithMissingPointNullPointVectorAgainstNullPointMet assertEquals(result.get(2).getDatapoints().size(), 4); assertEquals(expected_3, result.get(2).getDatapoints()); } + + @Test + public void testSum_VTransformWithSameShorterLongerVectorAgainstMetricList_fullJoinIndicator() { + Transform sum_vTransform = new MetricZipperTransform(new SumValueZipper()); + Map datapoints_1 = new HashMap(); + + datapoints_1.put(1000L, 1.0); + datapoints_1.put(2000L, 2.0); + datapoints_1.put(3000L, 3.0); + + Metric metric_1 = new Metric(TEST_SCOPE, TEST_METRIC); + + metric_1.setDatapoints(datapoints_1); + + Map datapoints_2 = new HashMap(); + + datapoints_2.put(1000L, 10.0); + datapoints_2.put(2000L, 100.0); + datapoints_2.put(3000L, 1000.0); + datapoints_2.put(4000L, 10000.0); + + Metric metric_2 = new Metric(TEST_SCOPE, TEST_METRIC); + + metric_2.setDatapoints(datapoints_2); + + Map datapoints_3 = new HashMap(); + + datapoints_3.put(1000L, 0.1); + datapoints_3.put(2000L, 0.01); + + Metric metric_3 = new Metric(TEST_SCOPE, TEST_METRIC); + + metric_3.setDatapoints(datapoints_3); + + Map vector_datapoints = new HashMap(); + + vector_datapoints.put(1000L, 1.0); + vector_datapoints.put(2000L, 1.0); + vector_datapoints.put(3000L, 1.0); + + Metric vector = new Metric(TEST_SCOPE, TEST_METRIC); + + vector.setDatapoints(vector_datapoints); + + List metrics = new ArrayList(); + + metrics.add(metric_1); + metrics.add(metric_2); + metrics.add(metric_3); + metrics.add(vector); + + Map expected_1 = new HashMap(); + + expected_1.put(1000L, 2.0); + expected_1.put(2000L, 3.0); + expected_1.put(3000L, 4.0); + + Map expected_2 = new HashMap(); + + expected_2.put(1000L, 11.0); + expected_2.put(2000L, 101.0); + expected_2.put(3000L, 1001.0); + expected_2.put(4000L, 10000.0); + + Map expected_3 = new HashMap(); + + expected_3.put(1000L, 1.1); + expected_3.put(2000L, 1.01); + expected_3.put(3000L, 1.0); + + List result = sum_vTransform.transform(metrics, Arrays.asList("UNION")); + + assertEquals(3, result.get(0).getDatapoints().size()); + assertEquals(expected_1, result.get(0).getDatapoints()); + assertEquals(4, result.get(1).getDatapoints().size()); + assertEquals(expected_2, result.get(1).getDatapoints()); + assertEquals(3, result.get(2).getDatapoints().size()); + assertEquals(expected_3, result.get(2).getDatapoints()); + } + } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */