diff --git a/.gitignore b/.gitignore index 5a22d86f8..4fd80ed64 100644 --- a/.gitignore +++ b/.gitignore @@ -12,7 +12,7 @@ argus-build.properties .settings/ .DS_Store /config - +ArgusKafkaConsumer/dependency-reduced-pom.xml ArgusCore/bloomstate/* # Eclipse ignore @@ -43,6 +43,9 @@ local.properties *.ipr *.iws +#vscode +.vscode/ + Makefile property diff --git a/.travis.yml b/.travis.yml index a25ec8670..ccc1b43d4 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,5 +1,6 @@ language: java jdk: oraclejdk8 +dist: trusty sudo: required group: deprecated-2017Q2 env: diff --git a/ArgusClient/pom.xml b/ArgusClient/pom.xml index 8acfaebd4..3b5b86da3 100644 --- a/ArgusClient/pom.xml +++ b/ArgusClient/pom.xml @@ -4,16 +4,20 @@ argus com.salesforce.argus - 2.22.3 + 4-SNAPSHOT .. + argus-client + 4.75-SNAPSHOT jar ArgusClient Command line client for the Argus Production Monitoring Tool. ${project.basedir}/${project.parent.relativePath} + ** + @@ -81,6 +85,28 @@ test-resources + + copy + package + + copy + + + + + io.prometheus.jmx + jmx_prometheus_javaagent + ${jmx.prometheus.version} + jar + sources + true + ${project.build.directory} + jmx_prometheus_javaagent-${jmx.prometheus.version}.jar + + + + + @@ -142,30 +168,30 @@ - - ${project.groupId} - argus - ${project.version} - resources - zip - provided - - - ${project.groupId} - argus - ${project.version} - test-resources - zip - provided - - - junit - junit - - - ${project.groupId} - argus-core - ${project.version} - + + ${project.groupId} + argus + ${project.parent.version} + resources + zip + provided + + + ${project.groupId} + argus + ${project.parent.version} + test-resources + zip + provided + + + junit + junit + + + com.salesforce.argus + argus-core + 4.75-SNAPSHOT + diff --git a/ArgusClient/src/main/java/com/salesforce/dva/argus/client/Alerter.java b/ArgusClient/src/main/java/com/salesforce/dva/argus/client/Alerter.java index 3c61080d6..4eb2cd938 100644 --- a/ArgusClient/src/main/java/com/salesforce/dva/argus/client/Alerter.java +++ b/ArgusClient/src/main/java/com/salesforce/dva/argus/client/Alerter.java @@ -78,8 +78,12 @@ class Alerter implements Runnable { public void run() { while (!Thread.currentThread().isInterrupted()) { try { - jobCounter.addAndGet(service.executeScheduledAlerts(50, timeout).size()); - LOGGER.info("alerts evaluated so far: {}", jobCounter.get()); + int currentAlertCount = jobCounter.get(); + jobCounter.addAndGet(service.executeScheduledAlerts(50, timeout)); + + if(jobCounter.get() != currentAlertCount) { + LOGGER.info("alerts evaluated so far: {}", jobCounter.get()); + } Thread.sleep(POLL_INTERVAL_MS); } catch (InterruptedException ex) { LOGGER.info("Execution was interrupted."); @@ -89,7 +93,7 @@ public void run() { LOGGER.error("Exception in alerter: {}", ExceptionUtils.getFullStackTrace(ex)); } } - LOGGER.warn(MessageFormat.format("Alerter thread interrupted. {} alerts evaluated by this thread.", jobCounter.get())); + LOGGER.warn("Alerter thread interrupted. {} alerts evaluated by this thread.", jobCounter.get()); service.dispose(); } } diff --git a/ArgusClient/src/main/java/com/salesforce/dva/argus/client/AnnotationCommitter.java b/ArgusClient/src/main/java/com/salesforce/dva/argus/client/AnnotationCommitter.java index bf294a730..9b60cf64c 100644 --- a/ArgusClient/src/main/java/com/salesforce/dva/argus/client/AnnotationCommitter.java +++ b/ArgusClient/src/main/java/com/salesforce/dva/argus/client/AnnotationCommitter.java @@ -70,7 +70,7 @@ public void run() { int count = collectionService.commitAnnotations(ANNOTATION_CHUNK_SIZE, TIMEOUT); if (count > 0) { - LOGGER.info(MessageFormat.format("Committed {0} annotations.", count)); + LOGGER.info("Committed {} annotations.", count); jobCounter.addAndGet(count); } Thread.sleep(POLL_INTERVAL_MS); @@ -82,7 +82,7 @@ public void run() { LOGGER.info("Error occurred while committing annotations. Reason {}", ex.toString()); } } - LOGGER.warn(MessageFormat.format("Annotation committer thread interrupted. {} annotations committed by this thread.", jobCounter.get())); + LOGGER.warn("Annotation committer thread interrupted. {} annotations committed by this thread.", jobCounter.get()); collectionService.dispose(); monitorService.dispose(); } diff --git a/ArgusClient/src/main/java/com/salesforce/dva/argus/client/ClientServiceFactory.java b/ArgusClient/src/main/java/com/salesforce/dva/argus/client/ClientServiceFactory.java index 02e45716e..31fe883ed 100644 --- a/ArgusClient/src/main/java/com/salesforce/dva/argus/client/ClientServiceFactory.java +++ b/ArgusClient/src/main/java/com/salesforce/dva/argus/client/ClientServiceFactory.java @@ -48,23 +48,34 @@ class ClientServiceFactory { //~ Methods ************************************************************************************************************************************** - static ExecutorService startClientService(SystemMain system, ClientType clientType, AtomicInteger jobCounter) { + static ExecutorService[] startClientService(SystemMain system, ClientType clientType, AtomicInteger jobCounter) { switch (clientType) { case ALERT: - return startAlertClientService(system, jobCounter); + return collect(startAlertClientService(system, jobCounter), + startRefocusClientService(system)); case COMMIT_SCHEMA: /* Alpha feature, not currently supported. */ - return startCommitSchemaClientService(system, jobCounter); + return collect(startCommitSchemaClientService(system, jobCounter)); case COMMIT_ANNOTATIONS: - return startCommitAnnotationsClientService(system, jobCounter); + return collect(startCommitAnnotationsClientService(system, jobCounter)); + case COMMIT_HISTOGRAMS: + return collect(startCommitHistogramsClientService(system, jobCounter)); case PROCESS_QUERIES: - return startProcessMetricsClientService(system, jobCounter); + return collect(startProcessMetricsClientService(system, jobCounter)); default: - return startCommitMetricsClientService(system, jobCounter); + return collect(startCommitMetricsClientService(system, jobCounter)); } } + + private static ExecutorService[] collect(ExecutorService ... services) + { + ExecutorService[] rv = new ExecutorService[services.length]; + System.arraycopy(services, 0, rv, 0, services.length); + return rv; + } + private static ExecutorService startAlertClientService(SystemMain system, AtomicInteger jobCounter) { int configuredCount = Integer.valueOf(system.getConfiguration().getValue(SystemConfiguration.Property.CLIENT_THREADS)); int configuredTimeout = Integer.valueOf(system.getConfiguration().getValue(SystemConfiguration.Property.CLIENT_CONNECT_TIMEOUT)); @@ -86,6 +97,28 @@ public Thread newThread(Runnable r) { return service; } + private static ExecutorService startRefocusClientService(SystemMain system) { + int configuredCount = Integer.valueOf(system.getConfiguration().getValue(SystemConfiguration.Property.REFOCUS_CLIENT_THREADS)); + int configuredTimeout = Integer.valueOf(system.getConfiguration().getValue(SystemConfiguration.Property.REFOCUS_CLIENT_CONNECT_TIMEOUT)); + int threadPoolCount = Math.max(configuredCount, 1); // TODO - why any other value than 1? + // todo - No need for tpc>1 thread until threads added for executing the HTTP requests. + int timeout = Math.max(10000, configuredTimeout); + ExecutorService service = Executors.newFixedThreadPool(threadPoolCount, new ThreadFactory() { + + AtomicInteger id = new AtomicInteger(0); + + @Override + public Thread newThread(Runnable r) { + return new Thread(r, MessageFormat.format("refocusclient-{0}", id.getAndIncrement())); + } + }); + system.getServiceFactory().getMonitorService().startRecordingCounters(); + for (int i = 0; i < threadPoolCount; i++) { + service.submit(new Refocuser(system.getServiceFactory().getRefocusService(), timeout)); + } + return service; + } + private static ExecutorService startCommitAnnotationsClientService(SystemMain system, AtomicInteger jobCounter) { int configuredCount = Integer.valueOf(system.getConfiguration().getValue(SystemConfiguration.Property.CLIENT_THREADS)); int threadPoolCount = Math.max(configuredCount, 2); @@ -104,7 +137,26 @@ public Thread newThread(Runnable r) { } return service; } + + private static ExecutorService startCommitHistogramsClientService(SystemMain system, AtomicInteger jobCounter) { + int configuredCount = Integer.valueOf(system.getConfiguration().getValue(SystemConfiguration.Property.CLIENT_THREADS)); + int threadPoolCount = Math.max(configuredCount, 2); + ExecutorService service = Executors.newFixedThreadPool(threadPoolCount, new ThreadFactory() { + + AtomicInteger id = new AtomicInteger(0); + @Override + public Thread newThread(Runnable r) { + return new Thread(r, MessageFormat.format("histogramcommitclient-{0}", id.getAndIncrement())); + } + }); + system.getServiceFactory().getMonitorService().startRecordingCounters(); + for (int i = 0; i < threadPoolCount; i++) { + service.submit(new HistogramCommitter(system.getServiceFactory().getCollectionService(),system.getServiceFactory().getMonitorService(), jobCounter)); + } + return service; + } + private static ExecutorService startCommitMetricsClientService(SystemMain system, AtomicInteger jobCounter) { int configuredCount = Integer.valueOf(system.getConfiguration().getValue(SystemConfiguration.Property.CLIENT_THREADS)); int threadPoolCount = Math.max(configuredCount, 2); @@ -136,7 +188,7 @@ public Thread newThread(Runnable r) { return new Thread(r, MessageFormat.format("schemacommitclient-{0}", id.getAndIncrement())); } }); - system.getServiceFactory().getMonitorService().startRecordingCounters(); + system.getServiceFactory().getMonitorService().startRecordingCounters(); for (int i = 0; i < threadPoolCount; i++) { service.submit(new SchemaCommitter(system.getServiceFactory().getCollectionService(),system.getServiceFactory().getMonitorService(), jobCounter)); } diff --git a/ArgusClient/src/main/java/com/salesforce/dva/argus/client/ClientType.java b/ArgusClient/src/main/java/com/salesforce/dva/argus/client/ClientType.java index d7a645632..97ea1d5ec 100644 --- a/ArgusClient/src/main/java/com/salesforce/dva/argus/client/ClientType.java +++ b/ArgusClient/src/main/java/com/salesforce/dva/argus/client/ClientType.java @@ -42,6 +42,7 @@ enum ClientType { COMMIT_METRICS, COMMIT_ANNOTATIONS, + COMMIT_HISTOGRAMS, ALERT, /* Alpha feature, not currently supported. */ COMMIT_SCHEMA, diff --git a/ArgusClient/src/main/java/com/salesforce/dva/argus/client/HistogramCommitter.java b/ArgusClient/src/main/java/com/salesforce/dva/argus/client/HistogramCommitter.java new file mode 100644 index 000000000..4f2c03855 --- /dev/null +++ b/ArgusClient/src/main/java/com/salesforce/dva/argus/client/HistogramCommitter.java @@ -0,0 +1,88 @@ +/* + * Copyright (c) 2019, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus.client; + +import java.util.concurrent.atomic.AtomicInteger; + +import com.salesforce.dva.argus.service.CollectionService; +import com.salesforce.dva.argus.service.MonitorService; + +/** + * Commits histograms from the submit queue into persistent storage. + * + * @author Dilip Devaraj (ddevaraj@salesforce.com) + */ +public class HistogramCommitter extends AbstractCommitter { + + //~ Static fields/initializers ******************************************************************************************************************* + + private static final int HISTOGRAM_MESSAGES_CHUNK_SIZE = 100; + //~ Constructors ********************************************************************************************************************************* + + /** + * Creates a new HistogramCommitter object. + * + * @param collectionService The collection service to use. Cannot be null. + * @param monitorService The monitoring service to use. Cannot be null. + * @param jobCounter The global job counter used to track the number of histograms. + */ + HistogramCommitter(CollectionService colletionService, MonitorService monitorService, AtomicInteger jobCounter) { + super(colletionService,monitorService, jobCounter); + } + + //~ Methods ************************************************************************************************************************************** + + @Override + public void run() { + while (!Thread.currentThread().isInterrupted()) { + try { + int count = collectionService.commitHistograms(HISTOGRAM_MESSAGES_CHUNK_SIZE, TIMEOUT); + + if (count > 0) { + LOGGER.info("Committed {} histograms.", count); + jobCounter.addAndGet(count); + } + Thread.sleep(POLL_INTERVAL_MS); + } catch (InterruptedException ie) { + LOGGER.info("Execution was interrupted."); + Thread.currentThread().interrupt(); + break; + } catch (Throwable ex) { + LOGGER.info("Error occurred while committing histograms. Reason {}", ex.toString()); + } + } + LOGGER.warn("Histogram committer thread interrupted. {} histograms committed by this thread.", jobCounter.get()); + collectionService.dispose(); + monitorService.dispose(); + } +} +/* Copyright (c) 2019, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusClient/src/main/java/com/salesforce/dva/argus/client/Main.java b/ArgusClient/src/main/java/com/salesforce/dva/argus/client/Main.java index 2db44171e..6aee062ed 100644 --- a/ArgusClient/src/main/java/com/salesforce/dva/argus/client/Main.java +++ b/ArgusClient/src/main/java/com/salesforce/dva/argus/client/Main.java @@ -48,7 +48,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; -import static com.salesforce.dva.argus.client.ClientType.COMMIT_METRICS; import static com.salesforce.dva.argus.util.Option.findOption; /** @@ -72,7 +71,7 @@ public class Main { static { HELP_OPTION = Option.createFlag("-h", "Display the usage and available collector types."); TYPE_OPTION = Option.createOption("-t", - "[ COMMIT_METRICS | COMMIT_ANNOTATIONS | ALERT | COMMIT_SCHEMA ] Specifies the type of client to create. Default is COMMIT_METRICS"); + "[ COMMIT_METRICS | COMMIT_ANNOTATIONS | ALERT | COMMIT_SCHEMA | COMMIT_HISTOGRAMS] Specifies the type of client to create. Default is COMMIT_METRICS"); INSTALL_OPTION = Option.createOption("-i", " Specifies a file location to store a configuration created interactively."); CONFIG_OPTION = Option.createOption("-f", " Specifies the configuration file to use."); TEMPLATES = new Option[] { HELP_OPTION, TYPE_OPTION, INSTALL_OPTION, CONFIG_OPTION }; @@ -162,9 +161,14 @@ static void main(String[] args, PrintStream out) throws IOException { ClientType type; try { - type = typeOption == null ? COMMIT_METRICS : ClientType.valueOf(typeOption.getValue()); + if (typeOption == null) { + throw new IllegalArgumentException("clientType option '" + TYPE_OPTION.getName() + "' cannot be null"); + } + type = ClientType.valueOf(typeOption.getValue()); } catch (Exception ex) { - type = COMMIT_METRICS; + LOGGER.error("Exception while reading clientType argument '{}', process will now exit: ", TYPE_OPTION.getName(), ex); + System.exit(2); + return; } final Thread mainThread = Thread.currentThread(); @@ -227,7 +231,7 @@ void invoke(ClientType clientType) { try { LOGGER.info("Starting service."); - ExecutorService service = ClientServiceFactory.startClientService(_system, clientType, _jobCounter); + ExecutorService[] services = ClientServiceFactory.startClientService(_system, clientType, _jobCounter); LOGGER.info("Service started."); @@ -242,13 +246,21 @@ void invoke(ClientType clientType) { } } LOGGER.info("Stopping service."); - service.shutdownNow(); - try { - if (!service.awaitTermination(60000, TimeUnit.MILLISECONDS)) { - LOGGER.warn("Shutdown timed out after 60 seconds. Exiting."); + + for (ExecutorService s: services) + { + s.shutdownNow(); + try + { + if (!s.awaitTermination(60000, TimeUnit.MILLISECONDS)) + { + LOGGER.warn("Shutdown timed out after 60 seconds. Exiting."); + } + } catch (InterruptedException iex) + { + LOGGER.warn("Forcing shutdown."); + break; } - } catch (InterruptedException iex) { - LOGGER.warn("Forcing shutdown."); } LOGGER.info("Service stopped."); } catch (Exception ex) { diff --git a/ArgusClient/src/main/java/com/salesforce/dva/argus/client/MetricCommitter.java b/ArgusClient/src/main/java/com/salesforce/dva/argus/client/MetricCommitter.java index d3519c3d9..294aa2802 100644 --- a/ArgusClient/src/main/java/com/salesforce/dva/argus/client/MetricCommitter.java +++ b/ArgusClient/src/main/java/com/salesforce/dva/argus/client/MetricCommitter.java @@ -60,8 +60,8 @@ public class MetricCommitter extends AbstractCommitter { * @param monitorService The monitoring service to use. Cannot be null. * @param jobCounter The global job counter used to track the number of annotations. */ - MetricCommitter(CollectionService colletionService, MonitorService monitorService, AtomicInteger jobCounter) { - super(colletionService,monitorService, jobCounter); + MetricCommitter(CollectionService collectionService, MonitorService monitorService, AtomicInteger jobCounter) { + super(collectionService,monitorService, jobCounter); } //~ Methods ************************************************************************************************************************************** @@ -95,7 +95,7 @@ public void run() { LOGGER.info("Error occurred while committing metrics. Reason {}", ex.toString()); } } - LOGGER.warn(MessageFormat.format("Metric committer thread interrupted. {} datapoints committed by this thread.", jobCounter.get())); + LOGGER.warn("Metric committer thread interrupted. {} datapoints committed by this thread.", jobCounter.get()); collectionService.dispose(); monitorService.dispose(); } diff --git a/ArgusClient/src/main/java/com/salesforce/dva/argus/client/Refocuser.java b/ArgusClient/src/main/java/com/salesforce/dva/argus/client/Refocuser.java new file mode 100644 index 000000000..fa8ba90c1 --- /dev/null +++ b/ArgusClient/src/main/java/com/salesforce/dva/argus/client/Refocuser.java @@ -0,0 +1,93 @@ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus.client; + +import com.salesforce.dva.argus.service.RefocusService; + +import org.apache.commons.lang.exception.ExceptionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.concurrent.atomic.AtomicInteger; + +/** + * Forwards batches of refocus notifications. + * + * @author Ian Keck (ikeck@salesforce.com) + */ +class Refocuser implements Runnable { + + //~ Static fields/initializers ******************************************************************************************************************* + + private static final long POLL_INTERVAL_MS = 10; + private static final Logger LOGGER = LoggerFactory.getLogger(Refocuser.class); + + //~ Instance fields ****************************************************************************************************************************** + + private final RefocusService service; + private final int timeout; + + //~ Constructors ********************************************************************************************************************************* + + /** + * Creates a new Alerter object. + * + * @param service The Refocus service to use. + * @param timeout The timeout in milliseconds for a single alert evaluation. Must be a positive number. + */ + Refocuser(RefocusService service, int timeout) { + this.service = service; + this.timeout = timeout; + } + + //~ Methods ************************************************************************************************************************************** + + @Override + public void run() { + + while (!Thread.currentThread().isInterrupted()) { + try { + service.forwardNotifications(); + Thread.sleep(POLL_INTERVAL_MS); // TODO - needed? + + } catch (InterruptedException ex) { + LOGGER.info("Execution was interrupted."); + Thread.currentThread().interrupt(); + break; + } catch (Throwable ex) { + LOGGER.error("Exception in RefocusForwarder: {}", ExceptionUtils.getFullStackTrace(ex)); + } + } + service.dispose(); + } +} +/* Copyright (c) 2019, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusClient/src/main/java/com/salesforce/dva/argus/client/SchemaCommitter.java b/ArgusClient/src/main/java/com/salesforce/dva/argus/client/SchemaCommitter.java index 96c720401..ab4d6bc7a 100644 --- a/ArgusClient/src/main/java/com/salesforce/dva/argus/client/SchemaCommitter.java +++ b/ArgusClient/src/main/java/com/salesforce/dva/argus/client/SchemaCommitter.java @@ -87,7 +87,7 @@ public void run() { LOGGER.warn("Error occurred while committing metrics for schema records creation.", ex); } } - LOGGER.warn(MessageFormat.format("Schema committer thread interrupted. {} metrics committed by this thread.", jobCounter.get())); + LOGGER.warn("Schema committer thread interrupted. {} metrics committed by this thread.", jobCounter.get()); collectionService.dispose(); monitorService.dispose(); } diff --git a/ArgusCore/pom.xml b/ArgusCore/pom.xml index 852a46877..45dd3172e 100644 --- a/ArgusCore/pom.xml +++ b/ArgusCore/pom.xml @@ -5,15 +5,18 @@ argus com.salesforce.argus - 2.22.3 + 4-SNAPSHOT .. + argus-core jar + 4.75-SNAPSHOT ArgusCore - Argus core services. Business layer for the Argus Production Monitoring Tool. + Argus core services. ${project.basedir}/${project.parent.relativePath} + 2.1.1 @@ -141,23 +144,24 @@ + - - ${project.groupId} - argus - ${project.version} - resources - zip - provided - - - ${project.groupId} - argus - ${project.version} - test-resources - zip - provided - + + ${project.groupId} + argus + ${project.parent.version} + resources + zip + provided + + + ${project.groupId} + argus + ${project.parent.version} + test-resources + zip + provided + ch.qos.logback logback-classic @@ -169,7 +173,7 @@ org.mockito mockito-core - 1.10.19 + 2.27.0 test @@ -178,6 +182,12 @@ 1.3 test + + com.github.tomakehurst + wiremock + 2.18.0 + test + junit junit @@ -249,6 +259,11 @@ mail 1.4.7 + + javax.ws.rs + javax.ws.rs-api + 2.0 + org.apache.commons commons-lang3 @@ -256,34 +271,33 @@ org.apache.kafka - kafka_2.10 - 0.8.2.1 + kafka_2.11 + ${kafka.version} + + + org.slf4j + slf4j-log4j12 + + + log4j + log4j + + org.apache.kafka kafka-clients - 0.8.2.1 - - - org.scala-lang - scala-library - 2.10.5 - - - com.101tec - zkclient - 0.3 - - - org.apache.zookeeper - zookeeper + ${kafka.version} org.slf4j slf4j-log4j12 + + log4j + log4j + - 3.4.10 net.sf.jopt-simple @@ -436,8 +450,8 @@ org.elasticsearch.client - rest - 5.5.1 + elasticsearch-rest-client + 6.6.2 net.openhft @@ -445,9 +459,36 @@ 0.8 - org.freemarker - freemarker - 2.3.28 + org.freemarker + freemarker + 2.3.28 + + + io.prometheus.jmx + jmx_prometheus_javaagent + ${jmx.prometheus.version} + + + org.jfree + jfreechart + 1.5.0 + + + io.prometheus.jmx + jmx_prometheus_javaagent + ${jmx.prometheus.version} + + + org.powermock + powermock-module-junit4 + 2.0.0 + test + + + org.powermock + powermock-api-mockito2 + 2.0.0 + test diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/AbstractSchemaRecord.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/AbstractSchemaRecord.java new file mode 100644 index 000000000..84a0f4789 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/AbstractSchemaRecord.java @@ -0,0 +1,66 @@ +package com.salesforce.dva.argus.entity; + +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.net.InetAddress; + +public abstract class AbstractSchemaRecord { + private static Logger _logger = LoggerFactory.getLogger(AbstractSchemaRecord.class); + + private final static String randomBloomAppend; + static { + String appendValue; + try { + appendValue = Integer.toString(Math.abs(InetAddress.getLocalHost().getHostName().hashCode())); + } catch (IOException io) { + appendValue = "12345"; + _logger.error("Failed to create randomBloomAppend, using {}. {}", appendValue, io); + } + randomBloomAppend = appendValue; + } + + public abstract String toBloomFilterKey(); + + public static String getBloomAppend() { + return randomBloomAppend; + } + + public static String constructKey(String scope, String metric, String tagk, String tagv, String namespace, String retention) { + StringBuilder sb = new StringBuilder(scope); + if(!StringUtils.isEmpty(metric)) { + sb.append('\0').append(metric); + } + if(!StringUtils.isEmpty(namespace)) { + sb.append('\0').append(namespace); + } + if(!StringUtils.isEmpty(tagk)) { + sb.append('\0').append(tagk); + } + if(!StringUtils.isEmpty(tagv)) { + sb.append('\0').append(tagv); + } + //there is use case where users simply want to update the retention without touching rest of a metric + if(!StringUtils.isEmpty(retention)) { + sb.append('\0').append(retention); + } + sb.append('\0').append(randomBloomAppend); + + return sb.toString(); + } + + public static String constructKey(Metric metric, String tagk, String tagv) { + return constructKey(metric.getScope(), + metric.getMetric(), + tagk, + tagv, + metric.getNamespace(), + metric.getMetatagsRecord() == null ? null : metric.getMetatagsRecord().getMetatagValue(MetricSchemaRecord.RETENTION_DISCOVERY)); + } + + public static String constructKey(String scope) { + return constructKey(scope, null, null, null, null, null); + } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/Alert.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/Alert.java index 6ea2129d4..b18bca9ae 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/Alert.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/Alert.java @@ -32,6 +32,7 @@ package com.salesforce.dva.argus.entity; import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; +import static com.salesforce.dva.argus.system.SystemAssert.requireArgumentP; import java.io.IOException; import java.io.Serializable; @@ -44,6 +45,8 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Arrays; +import java.util.stream.Collectors; import javax.persistence.Basic; import javax.persistence.CascadeType; @@ -69,6 +72,8 @@ import javax.persistence.criteria.Root; import javax.persistence.criteria.Selection; +import com.salesforce.dva.argus.util.CommonUtils; +import com.salesforce.dva.argus.util.Cron; import org.apache.commons.lang3.reflect.FieldUtils; import org.eclipse.persistence.config.HintValues; import org.eclipse.persistence.config.QueryHints; @@ -85,6 +90,9 @@ import com.fasterxml.jackson.databind.module.SimpleModule; import com.salesforce.dva.argus.service.metric.MetricReader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + /** * The entity which encapsulates information about a Dashboard. * @@ -115,35 +123,36 @@ @NamedQuery( name = "Alert.findByNameAndOwner", query = - "SELECT a FROM Alert a WHERE a.name = :name AND a.owner = :owner AND a.id in (SELECT jpa.id from JPAEntity jpa where jpa.deleted = false)" + "SELECT a FROM Alert a WHERE a.name = :name AND a.owner = :owner AND a.id in (SELECT jpa.id from JPAEntity jpa where jpa.deleted = false AND (jpa.createdBy IS NOT NULL AND jpa.modifiedBy IS NOT NULL))" ), @NamedQuery( name = "Alert.findByOwner", - query = "SELECT a FROM Alert a WHERE a.owner = :owner AND a.id in (SELECT jpa.id from JPAEntity jpa where jpa.deleted = false)" + query = "SELECT a FROM Alert a WHERE a.owner = :owner AND a.id in (SELECT jpa.id from JPAEntity jpa where jpa.deleted = false AND (jpa.createdBy IS NOT NULL AND jpa.modifiedBy IS NOT NULL))" ), @NamedQuery( - name = "Alert.findAll", query = "SELECT a FROM Alert a WHERE a.id in (SELECT jpa.id from JPAEntity jpa where jpa.deleted = false)" + name = "Alert.findAll", query = "SELECT a FROM Alert a WHERE a.id in (SELECT jpa.id from JPAEntity jpa where jpa.deleted = false AND (jpa.createdBy IS NOT NULL AND jpa.modifiedBy IS NOT NULL))" ), @NamedQuery( name = "Alert.findByStatus", - query = "SELECT a FROM Alert a where a.enabled= :enabled AND a.id in (SELECT jpa.id from JPAEntity jpa where jpa.deleted = false and TYPE(jpa)= Alert) order by a.id asc" + query = "SELECT a FROM Alert a where a.enabled= :enabled AND a.id in (SELECT jpa.id from JPAEntity jpa where jpa.deleted = false and TYPE(jpa)= Alert AND (jpa.createdBy IS NOT NULL AND jpa.modifiedBy IS NOT NULL)) order by a.id asc" ), @NamedQuery( name = "Alert.findByRangeAndStatus", - query = "SELECT a FROM Alert a where a.id BETWEEN :fromId and :toId AND a.enabled= :enabled AND a.id in (SELECT jpa.id from JPAEntity jpa where jpa.deleted = false and TYPE(jpa)= Alert) order by a.id asc" + query = "SELECT a FROM Alert a where a.id BETWEEN :fromId and :toId AND a.enabled= :enabled AND a.id in (SELECT jpa.id from JPAEntity jpa where jpa.deleted = false and TYPE(jpa)= Alert AND (jpa.createdBy IS NOT NULL AND jpa.modifiedBy IS NOT NULL)) order by a.id asc" ), @NamedQuery( name = "Alert.findIDsByStatus", - query = "SELECT a.id FROM Alert a where a.enabled= :enabled AND a.id in (SELECT jpa.id from JPAEntity jpa where jpa.deleted = false and TYPE(jpa)= Alert) order by a.id asc" + query = "SELECT a.id FROM Alert a where a.enabled= :enabled AND a.id in (SELECT jpa.id from JPAEntity jpa where jpa.deleted = false and TYPE(jpa)= Alert AND (jpa.createdBy IS NOT NULL AND jpa.modifiedBy IS NOT NULL)) order by a.id asc" ), @NamedQuery( name = "Alert.findAlertsModifiedAfterDate", - query = "SELECT a FROM Alert a where a.id in (SELECT jpa.id from JPAEntity jpa where TYPE(jpa)= Alert and jpa.modifiedDate >= :modifiedDate) order by a.id asc" + query = "SELECT a FROM Alert a where a.id in (SELECT jpa.id from JPAEntity jpa where TYPE(jpa)= Alert and jpa.modifiedDate >= :modifiedDate AND (jpa.createdBy IS NOT NULL AND jpa.modifiedBy IS NOT NULL)) order by a.id asc" ), @NamedQuery( name = "Alert.findByPrefix", - query = "SELECT a FROM Alert a where a.name LIKE :name AND a.id in (SELECT jpa.id from JPAEntity jpa where jpa.deleted = false)" - ), @NamedQuery(name = "Alert.setEnabled", query = "UPDATE Alert a SET a.enabled=true WHERE a = :alert"), + query = "SELECT a FROM Alert a where a.name LIKE :name AND a.id in (SELECT jpa.id from JPAEntity jpa where jpa.deleted = false AND (jpa.createdBy IS NOT NULL AND jpa.modifiedBy IS NOT NULL))" + ), + @NamedQuery(name = "Alert.setEnabled", query = "UPDATE Alert a SET a.enabled=true WHERE a = :alert"), @NamedQuery(name = "Alert.setDisabled", query = "UPDATE Alert a SET a.enabled=false WHERE a = :alert"), @NamedQuery(name = "Alert.countByStatus", query = "SELECT count(a) from Alert a where a.enabled= :enabled"), @NamedQuery( @@ -157,29 +166,29 @@ // Count alert queries @NamedQuery( name = "Alert.countByOwner", - query = "SELECT count(a) FROM Alert a WHERE a.owner = :owner AND a.id in (SELECT jpa.id from JPAEntity jpa where jpa.deleted = false)" + query = "SELECT count(a) FROM Alert a WHERE a.owner = :owner AND a.id in (SELECT jpa.id from JPAEntity jpa where jpa.deleted = false AND (jpa.createdBy IS NOT NULL AND jpa.modifiedBy IS NOT NULL))" ), @NamedQuery( name = "Alert.countByOwnerWithSearchText", - query = "SELECT count(a) FROM Alert a WHERE a.owner = :owner AND a.id in (SELECT jpa.id from JPAEntity jpa where jpa.deleted = false) " + query = "SELECT count(a) FROM Alert a WHERE a.owner = :owner AND a.id in (SELECT jpa.id from JPAEntity jpa where jpa.deleted = false AND (jpa.createdBy IS NOT NULL AND jpa.modifiedBy IS NOT NULL)) " + "AND (FUNCTION('LOWER', a.name) LIKE :searchtext OR FUNCTION('LOWER', a.owner.userName) LIKE :searchtext)" ), @NamedQuery( name = "Alert.countSharedAlerts", - query = "SELECT count(a) from Alert a where a.shared = true AND a.id in (SELECT jpa.id from JPAEntity jpa where jpa.deleted = false)" + query = "SELECT count(a) from Alert a where a.shared = true AND a.id in (SELECT jpa.id from JPAEntity jpa where jpa.deleted = false AND (jpa.createdBy IS NOT NULL AND jpa.modifiedBy IS NOT NULL))" ), @NamedQuery( name = "Alert.countSharedAlertsWithSearchText", - query = "SELECT count(a) FROM Alert a WHERE a.shared = true AND a.id IN (SELECT jpa.id FROM JPAEntity jpa WHERE jpa.deleted = false) " + query = "SELECT count(a) FROM Alert a WHERE a.shared = true AND a.id IN (SELECT jpa.id FROM JPAEntity jpa WHERE jpa.deleted = false AND (jpa.createdBy IS NOT NULL AND jpa.modifiedBy IS NOT NULL)) " + "AND (FUNCTION('LOWER', a.name) LIKE :searchtext OR FUNCTION('LOWER', a.owner.userName) LIKE :searchtext)" ), @NamedQuery( name = "Alert.countPrivateAlertsForPrivilegedUser", - query = "SELECT count(a) from Alert a where a.shared = false AND a.id in (SELECT jpa.id from JPAEntity jpa where jpa.deleted = false)" + query = "SELECT count(a) from Alert a where a.shared = false AND a.id in (SELECT jpa.id from JPAEntity jpa where jpa.deleted = false AND (jpa.createdBy IS NOT NULL AND jpa.modifiedBy IS NOT NULL))" ), @NamedQuery( name = "Alert.countPrivateAlertsForPrivilegedUserWithSearchText", - query = "SELECT count(a) from Alert a where a.shared = false AND a.id in (SELECT jpa.id from JPAEntity jpa where jpa.deleted = false) " + query = "SELECT count(a) from Alert a where a.shared = false AND a.id in (SELECT jpa.id from JPAEntity jpa where jpa.deleted = false AND (jpa.createdBy IS NOT NULL AND jpa.modifiedBy IS NOT NULL)) " + "AND (FUNCTION('LOWER', a.name) LIKE :searchtext OR FUNCTION('LOWER', a.owner.userName) LIKE :searchtext)" ), } @@ -205,6 +214,7 @@ public class Alert extends JPAEntity implements Serializable, CronJob { @Metadata private boolean enabled = false; + @Metadata private boolean missingDataNotificationEnabled; @OneToMany(mappedBy = "alert", cascade = CascadeType.ALL, orphanRemoval = true) @@ -221,6 +231,9 @@ public class Alert extends JPAEntity implements Serializable, CronJob { @Metadata private boolean shared; + //~ Static Instance fields *********************************************************************************************************************** + private static final Logger LOGGER = LoggerFactory.getLogger(Alert.class); + // Default values for page limit and page offset private static int DEFAULT_PAGE_LIMIT = 10; private static int DEFAULT_PAGE_OFFSET = 0; @@ -249,9 +262,100 @@ public Alert(PrincipalUser creator, PrincipalUser owner, String name, String exp setName(name); setExpression(expression); setCronEntry(cronEntry); - setEnabled(false); setMissingDataNotificationEnabled(false); - setShared(false); + setShared(false); + setEnabled(false); + } + + /** + * Creates an alert object by creating a deep copy of another alert + * + * @param other The alert to be copied + * @param user The owner of the new alert. Cannot be null. + * @param alertName The name of the new alert. Cannot be null or empty. + * + * @throws Exception Throws exception if a problem is encountered while copying props of the original alert. + */ + public Alert(Alert other, String alertName, PrincipalUser user) throws Exception { + this(user, user, alertName, other.getExpression(), other.getCronEntry()); + + List clonedTriggers = new ArrayList<>(); + List clonedNotifications = new ArrayList<>(); + Map triggersCreatedMapByHash = new HashMap<>(); + + /*For each existing notification, create new cloned notification. + * For each existing trigger in the current notification, create new cloned trigger and add it to cloned notification. + **/ + for (Notification currentNotification : other.getNotifications()) { + Notification currentNotificationCloned = new Notification( + currentNotification.getName(), + this, + currentNotification.getNotifierName(), + currentNotification.getSubscriptions(), + currentNotification.getCooldownPeriod() + ); + + clonedNotifications.add(currentNotificationCloned); + + CommonUtils.copyProperties(currentNotificationCloned, currentNotification); + currentNotificationCloned.setAlert(this); + currentNotificationCloned.setCreatedBy(user); + currentNotificationCloned.setModifiedBy(user); + + List triggersInCurrentNotification = new ArrayList<>(); + for (Trigger currentTrigger : currentNotification.getTriggers()) { + int currentTriggerHash = currentTrigger.hashCode(); + if (!triggersCreatedMapByHash.containsKey(currentTriggerHash)) { + Trigger currentTriggerCloned = new Trigger( + this, + currentTrigger.getType(), + currentTrigger.getName(), + currentTrigger.getThreshold(), + currentTrigger.getSecondaryThreshold(), + currentTrigger.getInertia() + ); + clonedTriggers.add(currentTriggerCloned); + CommonUtils.copyProperties(currentTriggerCloned, currentTrigger); + currentTriggerCloned.setCreatedBy(user); + currentTriggerCloned.setModifiedBy(user); + currentTriggerCloned.setAlert(this); + triggersCreatedMapByHash.put(currentTriggerHash, currentTriggerCloned); + } + triggersInCurrentNotification.add(triggersCreatedMapByHash.get(currentTriggerHash)); + } + currentNotificationCloned.setTriggers(triggersInCurrentNotification); + } + + /* + * Triggers with no notifications attached + * */ + for (Trigger currentTrigger : other.getTriggers()) { + int currentTriggerHash = currentTrigger.hashCode(); + if (!triggersCreatedMapByHash.containsKey(currentTriggerHash)) { + Trigger currentTriggerCloned = new Trigger( + this, + currentTrigger.getType(), + currentTrigger.getName(), + currentTrigger.getThreshold(), + currentTrigger.getSecondaryThreshold(), + currentTrigger.getInertia() + ); + clonedTriggers.add(currentTriggerCloned); + CommonUtils.copyProperties(currentTriggerCloned, currentTrigger); + currentTriggerCloned.setCreatedBy(user); + currentTriggerCloned.setModifiedBy(user); + currentTriggerCloned.setAlert(this); + triggersCreatedMapByHash.put(currentTriggerHash, currentTriggerCloned); + } + } + + // NOTE: whenever a new field gets added to an Alert object make sure to update this clone + this.setMissingDataNotificationEnabled(other.isMissingDataNotificationEnabled()); + this.setShared(other.isShared()); + this.setTriggers(clonedTriggers); + this.setNotifications(clonedNotifications); + this.setModifiedBy(user); + this.setEnabled(other.isEnabled()); // This should be last } /** Creates a new Alert object. */ @@ -386,7 +490,7 @@ public static List findByOwnerMeta(EntityManager em, PrincipalUser owner) whereParams.put(OWNER_KEY, owner); // Get alerts meta - return getAlertsMetaPaged(em, null, null, whereParams, null); + return getAlertsMetaPaged(em, null, null, whereParams, null, null, null); } catch (NoResultException ex) { return new ArrayList<>(0); } @@ -405,11 +509,15 @@ public static List findByOwnerMeta(EntityManager em, PrincipalUser owner) * The starting offset of the result. * @param searchText * The text to filter the search results. - * + * @param sortField + * The field of the alert that is used for sorting. + * @param sortOrder + * The order for sorting. + * * @return The list of alerts for the owner. */ public static List findByOwnerMetaPaged(EntityManager em, PrincipalUser owner, Integer limit, - Integer offset, String searchText) { + Integer offset, String searchText, String sortField, String sortOrder) { requireArgument(em != null, "Entity manager can not be null."); requireArgument(owner != null, "Owner cannot be null"); @@ -430,7 +538,7 @@ public static List findByOwnerMetaPaged(EntityManager em, PrincipalUser o whereParams.put(OWNER_KEY, owner); // Get alerts meta - return getAlertsMetaPaged(em, limit, offset, whereParams, searchText); + return getAlertsMetaPaged(em, limit, offset, whereParams, searchText, sortField, sortOrder); } catch (NoResultException ex) { return new ArrayList<>(0); } @@ -471,7 +579,7 @@ public static List findAllMeta(EntityManager em) { whereParams.put(DELETED_KEY, false); // Get alerts meta - return getAlertsMetaPaged(em, null, null, whereParams, null); + return getAlertsMetaPaged(em, null, null, whereParams, null, null, null); } catch (NoResultException ex) { return new ArrayList<>(0); } @@ -719,7 +827,7 @@ public static List findSharedAlertsMeta(EntityManager em, PrincipalUser o } // Get alerts meta - return getAlertsMetaPaged(em, limit, null, whereParams, null); + return getAlertsMetaPaged(em, limit, null, whereParams, null, null, null); } catch (NoResultException ex) { return new ArrayList<>(0); } @@ -736,10 +844,15 @@ public static List findSharedAlertsMeta(EntityManager em, PrincipalUser o * The starting offset of the result. * @param searchText * The text to filter the search results. + * @param sortField + * The field of the alert that is used for sorting. + * @param sortOrder + * The order for sorting. * * @return The list of shared alerts with given limit and offset. */ - public static List findSharedAlertsMetaPaged(EntityManager em, Integer limit, Integer offset, String searchText) { + public static List findSharedAlertsMetaPaged(EntityManager em, Integer limit, Integer offset, String searchText, + String sortField, String sortOrder) { requireArgument(em != null, "Entity manager can not be null."); if (searchText != null) { @@ -760,7 +873,7 @@ public static List findSharedAlertsMetaPaged(EntityManager em, Integer li whereParams.put(SHARED_KEY, true); // Get alerts meta - return getAlertsMetaPaged(em, limit, offset, whereParams, searchText); + return getAlertsMetaPaged(em, limit, offset, whereParams, searchText, sortField, sortOrder); } catch (NoResultException ex) { return new ArrayList<>(0); } @@ -775,10 +888,15 @@ public static List findSharedAlertsMetaPaged(EntityManager em, Integer li * @param offset The starting offset of the result. * @param searchText * The text to filter the search results. - * + * @param sortField + * The field of the alert that is used for sorting. + * @param sortOrder + * The order for sorting. + * * @return The list of private alerts' meta with given limit and offset. */ - public static List findPrivateAlertsForPrivilegedUserMetaPaged(EntityManager em, PrincipalUser owner, Integer limit, Integer offset, String searchText) { + public static List findPrivateAlertsForPrivilegedUserMetaPaged(EntityManager em, PrincipalUser owner, Integer limit, Integer offset, String searchText, + String sortField, String sortOrder) { requireArgument(em != null, "Entity manager can not be null."); if (searchText != null) { @@ -804,7 +922,7 @@ public static List findPrivateAlertsForPrivilegedUserMetaPaged(EntityMana whereParams.put(SHARED_KEY, false); // Get alerts meta - return getAlertsMetaPaged(em, limit, offset, whereParams, searchText); + return getAlertsMetaPaged(em, limit, offset, whereParams, searchText, sortField, sortOrder); } catch (NoResultException ex) { return new ArrayList<>(0); } @@ -884,7 +1002,7 @@ public static List findByPrefix(EntityManager em, String prefix) { * limit and offset. */ private static List getAlertsMetaPaged(EntityManager em, Integer limit, Integer offset, - Map whereParams, String searchText) { + Map whereParams, String searchText, String sortField, String sortOrder) { CriteriaBuilder cb = em.getCriteriaBuilder(); CriteriaQuery cq = cb.createTupleQuery(); Root e = cq.from(Alert.class); @@ -928,9 +1046,32 @@ private static List getAlertsMetaPaged(EntityManager em, Integer limit, I cq.where(predicates.toArray(new Predicate[predicates.size()])); } - // Sort result by alert id - cq.orderBy(cb.asc(e.get("id"))); + // By default, do not sort + // Sort based or sortField and sortOrder if both are not null + if (sortField != null && sortOrder != null){ + Expression sortColumn; + + switch(SortFieldType.fromName(sortField)) { + case OWNER_NAME : + sortColumn = e.join("owner").get("userName"); + break; + default : + sortColumn = e.get(sortField); + break; + } + switch(SortOrderType.fromName(sortOrder)){ + case ASC : + cq.orderBy(cb.asc(sortColumn)); + break; + case DESC : + cq.orderBy(cb.desc(sortColumn)); + break; + default : + break; + } + } + TypedQuery query = em.createQuery(cq); query.setHint("javax.persistence.cache.storeMode", "REFRESH"); query.setHint(QueryHints.REFRESH, HintValues.TRUE); @@ -949,19 +1090,24 @@ private static List getAlertsMetaPaged(EntityManager em, Integer limit, I List alerts = new ArrayList<>(); for (Tuple tuple : result) { - - Alert a = new Alert(PrincipalUser.class.cast(tuple.get("createdBy")), - PrincipalUser.class.cast(tuple.get("owner")), String.class.cast(tuple.get("name")), - String.class.cast(tuple.get("expression")), String.class.cast(tuple.get("cronEntry"))); - - a.id = BigInteger.class.cast(tuple.get("id")); - a.enabled = Boolean.class.cast(tuple.get("enabled")); - a.createdDate = Date.class.cast(tuple.get("createdDate")); - a.modifiedDate = Date.class.cast(tuple.get("modifiedDate")); - a.shared = Boolean.class.cast(tuple.get("shared")); - a.modifiedBy = PrincipalUser.class.cast(tuple.get("modifiedBy")); - - alerts.add(a); + try { + Alert a = new Alert(PrincipalUser.class.cast(tuple.get("createdBy")), + PrincipalUser.class.cast(tuple.get("owner")), String.class.cast(tuple.get("name")), + String.class.cast(tuple.get("expression")), String.class.cast(tuple.get("cronEntry"))); + + a.id = BigInteger.class.cast(tuple.get("id")); + a.enabled = Boolean.class.cast(tuple.get("enabled")); + a.createdDate = Date.class.cast(tuple.get("createdDate")); + a.modifiedDate = Date.class.cast(tuple.get("modifiedDate")); + a.shared = Boolean.class.cast(tuple.get("shared")); + a.modifiedBy = PrincipalUser.class.cast(tuple.get("modifiedBy")); + a.missingDataNotificationEnabled = Boolean.class.cast(tuple.get("missingDataNotificationEnabled")); + + alerts.add(a); + } catch (RuntimeException r) { + // TODO: Add logging? + continue; + } } // Trim excessive items more then limit in the end @@ -975,7 +1121,7 @@ private static List getAlertsMetaPaged(EntityManager em, Integer limit, I private static String _convertSearchTextWildCardForQuery(String searchText) { return "%" + searchText.toLowerCase().replace("*", "%").replace("?","_") + "%"; } - + /** * Returns the CRON entry for the alert. * @@ -992,6 +1138,7 @@ public String getCronEntry() { * @param cronEntry The new CRON entry. Cannot be null and must be valid CRON entry syntax. */ public void setCronEntry(String cronEntry) { + requireArgument(cronEntry != null && !cronEntry.isEmpty(), "CronEntry cannot be null or empty."); this.cronEntry = cronEntry; } @@ -1047,8 +1194,8 @@ public String getExpression() { * * @param expression The alert expression. Cannot be null and must be valid metric expression syntax as defined in the MetricService */ - public void setExpression(String expression) { - requireArgument(MetricReader.isValid(expression), "Invalid metric expression " + expression); + public void setExpression(String expression) throws RuntimeException { + requireArgument(expression != null && !expression.isEmpty(), "Expression cannot be null or empty."); this.expression = expression; } @@ -1159,12 +1306,60 @@ public void setShared(boolean shared) { this.shared = shared; } + + /** + * Validates all fields of an alert. + * @throws RuntimeException + */ + public void validateAlert() throws RuntimeException { + requireArgumentP(this.expression, x -> MetricReader.validateExpression(x), "Invalid alert expression: " + this.expression, true); + requireArgument(Cron.isCronEntryValid(this.cronEntry), "Invalid cron entry: " + this.cronEntry); + requireArgument(this.owner != null, "Owner cannot be null."); + requireArgument(this.name != null && !this.name.isEmpty(), "Name cannot be null or empty."); + } + + /** + * Returns whether the alert is valid + */ + public boolean isValid() { + try + { + this.validateAlert(); + } + catch (RuntimeException e) + { + return false; + } + return true; + } + + /** + * Returns null if the alert is valid or the error message if it is not. + */ + public String validationMessage() { + try + { + this.validateAlert(); + } + catch (RuntimeException e) + { + return e.getMessage(); + } + return null; + } + @Override public int hashCode() { int hash = 5; hash = 31 * hash + Objects.hashCode(this.name); hash = 31 * hash + Objects.hashCode(this.owner); + hash = 31 * hash + Objects.hashCode(this.cronEntry); + hash = 31 * hash + Objects.hashCode(this.enabled); + hash = 31 * hash + Objects.hashCode(this.expression); + hash = 31 * hash + Objects.hashCode(this.missingDataNotificationEnabled); + hash = 31 * hash + Objects.hashCode(this.shared); + return hash; } @@ -1179,12 +1374,18 @@ public boolean equals(Object obj) { final Alert other = (Alert) obj; - if (!Objects.equals(this.name, other.name)) { + if (this.hashCode() != other.hashCode()) { return false; } - if (!Objects.equals(this.owner, other.owner)) { + + if (!CommonUtils.listsAreEquivelent(this.getTriggers(), other.getTriggers())) { + return false; + } + + if (!CommonUtils.listsAreEquivelent(this.getNotifications(), other.getNotifications())) { return false; } + return true; } @@ -1209,6 +1410,9 @@ public void serialize(Alert alert, JsonGenerator jgen, SerializerProvider provid jgen.writeBooleanField("enabled", alert.isEnabled()); jgen.writeBooleanField("missingDataNotificationEnabled", alert.isMissingDataNotificationEnabled()); jgen.writeObjectField("owner", alert.getOwner()); + if(alert.getModifiedDate() != null) { + jgen.writeObjectField("modifiedDate", alert.getModifiedDate().getTime()); + } jgen.writeArrayFieldStart("triggers"); for(Trigger trigger : alert.getTriggers()) { @@ -1226,6 +1430,8 @@ public void serialize(Alert alert, JsonGenerator jgen, SerializerProvider provid } } + + // NOTE - ian - TODO - create a separate deserializer for testing OR handle missing info here? public static class Deserializer extends JsonDeserializer { @Override @@ -1249,13 +1455,15 @@ public Alert deserialize(JsonParser jp, DeserializationContext ctxt) throws IOEx alert.setName(name); String expression = rootNode.get("expression").asText(); + alert.setExpression(expression); String cronEntry = rootNode.get("cronEntry").asText(); alert.setCronEntry(cronEntry); - boolean enabled = rootNode.get("enabled").asBoolean(); - alert.setEnabled(enabled); +// if(rootNode.get("modifiedDate") != null) { +// alert.setModifiedDate(Date.from(Instant.ofEpochMilli(rootNode.get("modifiedDate").asLong()))); +// } boolean missingDataNotificationEnabled = rootNode.get("missingDataNotificationEnabled").asBoolean(); alert.setMissingDataNotificationEnabled(missingDataNotificationEnabled); @@ -1287,6 +1495,10 @@ public Alert deserialize(JsonParser jp, DeserializationContext ctxt) throws IOEx } alert.setNotifications(notifications); + // This needs to be last because alerts won't validate unless they are complete. + boolean enabled = rootNode.get("enabled").asBoolean(); + alert.setEnabled(enabled); + return alert; } @@ -1348,8 +1560,91 @@ public PrincipalUser deserialize(JsonParser jp, DeserializationContext ctxt) thr return user; } - } + //~ Enums ************************************************************************************************************************************** + public enum SortFieldType { + + OWNER_NAME("ownerName"), + NAME("name"), + MODIFIED_DATE("modifiedDate"), + CREATED_DATE("createdDate"); + + private String name_; + private SortFieldType(String name){ + name_ = name; + } + + /** + * Returns a given sort field type corresponding to the given name. + * + * @param name The sort field type name + * + * @return The sort field type + */ + public static SortFieldType fromName(String name) { + for (SortFieldType t: SortFieldType.values()) { + if (t.getName().equalsIgnoreCase(name)) { + return t; + } + } + String errorMessage = "SortFieldType " + name + + " does not exist or is not supported. Allowed values are: " + + Arrays.asList(SortFieldType.values()).stream().map(t -> t.getName()).collect(Collectors.toList()); + + LOGGER.error(errorMessage + "\n"); + throw new IllegalArgumentException(errorMessage); + } + + /** + * Return sort field type name. + * + * @return The sort field type name\ + * + */ + public String getName(){ + return name_; + } + } + + + + public enum SortOrderType { + + ASC, + DESC; + + /** + * Returns a given sort order type corresponding to the given name. + * + * @param name The sort order type name + * + * @return The sort order type + */ + public static SortOrderType fromName(String name) { + for (SortOrderType t : SortOrderType.values()) { + if (t.getName().equalsIgnoreCase(name)) { + return t; + } + } + String errorMessage = "SortOrderType " + name + + " does not exist or is not supported. Allowed values are: " + + Arrays.asList(SortOrderType.values()); + + LOGGER.error(errorMessage + "\n"); + throw new IllegalArgumentException(errorMessage); + } + + /** + * Return Sort order type name. + * + * @return The sort order type name + * + */ + public String getName(){ + return this.toString(); + } + } + } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ \ No newline at end of file diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/Annotation.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/Annotation.java index 323b2f904..3c5487d85 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/Annotation.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/Annotation.java @@ -36,6 +36,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; +import java.util.TreeMap; import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; @@ -88,6 +89,34 @@ protected Annotation() { //~ Methods ************************************************************************************************************************************** + /** + * Returns the size of the annotation in bytes. + * + * @return The size in bytes. + */ + public int computeSizeBytes() { + int size = computeLength(_source); + size += computeLength(_id); + size += computeLength(_type); + size += computeLength(getScope()); + size += Long.BYTES; // size of timestamp field + for (Map.Entry e : _fields.entrySet()) { + size += e.getKey().length(); + size += e.getValue().length(); + } + for (Map.Entry e : getTags().entrySet()) { + size += e.getKey().length(); + size += e.getValue().length(); + } + size += computeLength(getUid()); + size += computeLength(getMetric()); + return size; + } + + private int computeLength(String s) { + return s != null ? s.length() : 0; + } + /** * Returns the source of the annotation. * @@ -193,7 +222,7 @@ public boolean equals(Object obj) { * @param scope The scope of the collection. Cannot be null or empty. */ @Override - protected void setScope(String scope) { + public void setScope(String scope) { requireArgument(scope != null && !scope.trim().isEmpty(), "Scope cannot be null or empty."); super.setScope(scope); } @@ -204,7 +233,7 @@ protected void setScope(String scope) { * @param metric The metric with which the annotation is associated. If not null, it cannot be empty. */ @Override - protected void setMetric(String metric) { + public void setMetric(String metric) { requireArgument(metric == null || !metric.trim().isEmpty(), "Metric can be null, but if specified, cannot be empty"); super.setMetric(metric); } @@ -224,7 +253,7 @@ private void setType(String type) { * * @param timestamp THe time stamp for the annotation. Cannot be null. */ - private void setTimestamp(Long timestamp) { + public void setTimestamp(Long timestamp) { requireArgument(timestamp != null, "Timestamp cannot be null."); _timestamp = timestamp; } @@ -269,5 +298,10 @@ public String toString() { return MessageFormat.format(format, params); } + + public static String getIdentifierFieldsAsString(Annotation annotation) { + return new StringBuilder(annotation.getScope()).append(":").append(annotation.getMetric()).append(":") + .append(annotation.getTags().toString()).append(":").append(annotation.getType()).append(":").append(annotation.getTimestamp()).toString(); + } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/Histogram.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/Histogram.java new file mode 100644 index 000000000..28f4e2c29 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/Histogram.java @@ -0,0 +1,280 @@ +/* + * Copyright (c) 2019, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus.entity; + +import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; + +import java.io.Serializable; +import java.text.MessageFormat; +import java.util.Collections; +import java.util.Map; +import java.util.TreeMap; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.salesforce.dva.argus.system.SystemAssert; + +/** + * Time series histogram entity object. This entity encapsulates all the information needed to represent a time series for a histogram within a single + * scope. The following tag names are reserved. Any methods that set tags, which use these reserved tag names, will throw a runtime exception. + * + * @author Dilip Devaraj (ddevaraj@salesforce.com) + */ +@SuppressWarnings("serial") +public class Histogram extends TSDBEntity implements Serializable { + + //~ Instance fields ****************************************************************************************************************************** + private String _displayName; + private String _units; + @JsonSerialize (keyUsing = HistogramBucketSerializer.class) + @JsonDeserialize (keyUsing = HistogramBucketDeserializer.class) + private Map _buckets; + private Long _underflow = 0L; + private Long _overflow = 0L; + private Long _timestamp; + + //~ Constructors ********************************************************************************************************************************* + + /** + * Creates a new Histogram object by performing a shallow copy of the given Histogram object. + * + * @param histogram The histogram object to clone. Cannot be null. + */ + public Histogram(Histogram histogram) { + SystemAssert.requireArgument(histogram != null, "Histogram to clone cannot be null."); + setScope(histogram.getScope()); + setMetric(histogram.getMetric()); + setTags(histogram.getTags()); + _buckets = new TreeMap<>(); + setBuckets(histogram.getBuckets()); + setDisplayName(histogram.getDisplayName()); + setUnits(histogram.getUnits()); + } + + /** + * Creates a new Histogram object. + * + * @param scope The reverse dotted name of the collection scope. Cannot be null or empty. + * @param metric The name of the metric. Cannot be null or empty. + */ + public Histogram(String scope, String metric) { + this(); + setScope(scope); + setMetric(metric); + } + + /** Creates a new Histogram object. */ + protected Histogram() { + super(null, null); + _buckets = new TreeMap<>(); + } + + //~ Methods ************************************************************************************************************************************** + + @Override + public void setScope(String scope) { + requireArgument(scope != null && !scope.trim().isEmpty(), "Scope cannot be null or empty."); + super.setScope(scope); + } + + @Override + public void setMetric(String metric) { + requireArgument(metric != null && !metric.trim().isEmpty(), "Metric cannot be null or empty."); + super.setMetric(metric); + } + + /** + * Returns the optional overflow for the histogram. + * + * @return The overflow for the histogram. + */ + public Long getOverflow() { + return _overflow; + } + + /** + * Sets the overflow for the histogram. + * + * @param overflow The overflow for the histogram + */ + public void setOverflow(Long overflow) { + _overflow = overflow; + } + + /** + * Returns the optional underflow for the histogram. + * + * @return The underflow for the histogram. + */ + public Long getUnderflow() { + return _underflow; + } + + /** + * Sets the underflow for the histogram. + * + * @param underflow The underflow for the histogram. + */ + public void setUnderflow(Long underflow) { + _underflow = underflow; + } + + /** + * Returns an unmodifiable map of histogram buckets which is backed by the entity objects internal data. + * + * @return The map of histogram buckets. Will never be null, but may be empty. + */ + public Map getBuckets() { + return Collections.unmodifiableMap(_buckets); + } + + /** + * Add a new bucket with count to exisitng buckets + * + * @param lowerBound lower bound of bucket + * @param upperBound upper bound of bucket + * @param count count within this bucket + */ + public void addBucket(float lowerBound, float upperBound, long count) { + _buckets.put(new HistogramBucket(lowerBound, upperBound), count); + } + + /** + * Deletes the current map of histogram buckets and replaces them with a new map. + * + * @param buckets The new map of histogram buckets. If null or empty, only the deletion of the current set of histogram buckets is performed. + */ + public void setBuckets(Map buckets) { + _buckets.clear(); + if (buckets != null) { + _buckets.putAll(buckets); + } + } + + /* + * Deletes the current map of histogram buckets + */ + public void clearBuckets() { + _buckets.clear(); + } + + /** + * Sets the display name for the histogram. + * + * @param displayName The display name for the histogram. Can be null or empty. + */ + public void setDisplayName(String displayName) { + _displayName = displayName; + } + + /** + * Returns the display name for the histogram. + * + * @return The display name for the histogram. Can be null or empty. + */ + public String getDisplayName() { + return _displayName; + } + + /** + * Sets the units of the histogram values. + * + * @param units The units of the histogram values. Can be null or empty. + */ + public void setUnits(String units) { + _units = units; + } + + /** + * Returns the units of the histogram values. + * + * @return The units of the histogram values. Can be null or empty. + */ + public String getUnits() { + return _units; + } + + /** + * Sets the time stamp at which the histogram exists. + * + * @param timestamp The time stamp for the histogram. Cannot be null. + */ + public void setTimestamp(Long timestamp) { + requireArgument(timestamp != null, "Timestamp cannot be null."); + _timestamp = timestamp; + } + + /** + * Returns the time stamp of the histogram. + * + * @return The time stamp of the histogram. Will never be null. + */ + public Long getTimestamp() { + return _timestamp; + } + + @Override + public String toString() { + Object[] params = {getTimestamp(), getScope(), getMetric(), getTags(), getBuckets(), getUnderflow(), getOverflow() }; + String format = "timestamp=>{0,number,#}, scope=>{1}, metric=>{2}, tags=>{3}, buckets=>{4}, underflow=>{5}, overflow=>{6}"; + + return MessageFormat.format(format, params); + } + + /** + * To return an identifier string, the format is <scope>:<name>{<tags>} + * + * @return Returns a metric identifier for the histogram. Will never return null. + */ + @JsonIgnore + public String getIdentifier() { + + String tags = ""; + Map sortedTags = getTags(); + if(!sortedTags.isEmpty()) { + StringBuilder tagListBuffer = new StringBuilder("{"); + for (String tagKey : sortedTags.keySet()) { + tagListBuffer.append(tagKey).append('=').append(sortedTags.get(tagKey)).append(','); + } + + tags = tagListBuffer.substring(0, tagListBuffer.length() - 1).concat("}"); + } + + Object[] params = { getScope(), getMetric(), tags }; + String format = "{0}:{1}" + "{2}"; + + return MessageFormat.format(format, params); + } + +} +/* Copyright (c) 2019, Salesforce.com, Inc. All rights reserved. */ \ No newline at end of file diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/HistogramBucket.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/HistogramBucket.java new file mode 100644 index 000000000..858102b3f --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/HistogramBucket.java @@ -0,0 +1,57 @@ +package com.salesforce.dva.argus.entity; + +import java.io.Serializable; + +import com.fasterxml.jackson.annotation.JsonValue; + +/** + * HistogramBucket object that encompasses the lower, upper bound of this histogram bucket. + * + * @author Dilip Devaraj (ddevaraj@salesforce.com) + */ +public class HistogramBucket implements Serializable, Comparable { + private static final long serialVersionUID = 1L; + private float lowerBound; + private float upperBound; + + public HistogramBucket(int lowerBound, int upperBound) { + this.lowerBound = lowerBound; + this.upperBound = upperBound; + } + + public HistogramBucket(float lowerBound, float upperBound) { + this.lowerBound = lowerBound; + this.upperBound = upperBound; + } + + public HistogramBucket(String value) { + String[] bounds = value.split(","); + this.lowerBound = Float.parseFloat(bounds[0].trim()); + this.upperBound = Float.parseFloat(bounds[1].trim()); + } + + public float getLowerBound() { + return lowerBound; + } + + public float getUpperBound() { + return upperBound; + } + + @Override + public int compareTo(HistogramBucket that) { + if(this.equals(that)){ + return 0; + } else { + int lowerBoundCompare = Float.compare(this.lowerBound, that.lowerBound); + if(lowerBoundCompare !=0) return lowerBoundCompare; + else return Float.compare(this.upperBound, that.upperBound); + } + } + + @Override + @JsonValue + public String toString() { + return lowerBound + "," + upperBound; + } +} \ No newline at end of file diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/HistogramBucketDeserializer.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/HistogramBucketDeserializer.java new file mode 100644 index 000000000..69e58ec0a --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/HistogramBucketDeserializer.java @@ -0,0 +1,21 @@ +package com.salesforce.dva.argus.entity; + +import java.io.IOException; + +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.KeyDeserializer; +import com.fasterxml.jackson.databind.ObjectMapper; + +/** + * HistogramBucket Deserializer + * + * @author Dilip Devaraj (ddevaraj@salesforce.com) + */ +public class HistogramBucketDeserializer extends KeyDeserializer{ + private static final ObjectMapper mapper = new ObjectMapper(); + + @Override + public HistogramBucket deserializeKey(String key, DeserializationContext ctxt) throws IOException { + return mapper.readValue(key, HistogramBucket.class); + } +} \ No newline at end of file diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/HistogramBucketSerializer.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/HistogramBucketSerializer.java new file mode 100644 index 000000000..b217e15c9 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/HistogramBucketSerializer.java @@ -0,0 +1,25 @@ +package com.salesforce.dva.argus.entity; + +import java.io.IOException; +import java.io.StringWriter; + +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; + +/** + * HistogramBucket Serializer + * + * @author Dilip Devaraj (ddevaraj@salesforce.com) + */ +public class HistogramBucketSerializer extends JsonSerializer { + private static final ObjectMapper mapper = new ObjectMapper(); + + @Override + public void serialize(HistogramBucket value, JsonGenerator jgen, SerializerProvider serializers) throws IOException { + StringWriter writer = new StringWriter(); + mapper.writeValue(writer, value); + jgen.writeFieldName(writer.toString()); + } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/ImagePoints.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/ImagePoints.java new file mode 100644 index 000000000..6f5852141 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/ImagePoints.java @@ -0,0 +1,156 @@ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus.entity; + + +import org.apache.commons.lang.StringUtils; +import java.io.Serializable; + +/** + * This class represents the points in XY graph as well as the properties related to that + */ +public class ImagePoints implements Cloneable, Serializable { + + /** The first value. */ + private double firstPoint; + + /** The second value. */ + private double secondPoint; + + /** The label */ + private String label = StringUtils.EMPTY; + + /** The Color */ + private ImageProperties.ImageColors color = ImageProperties.DEFAULT_COLOR; + + /** + * + * @param firstPoint Represents first point in the XY Axis + * @param secondPoint Represents second point in the XY Axis + * @param label Represents label associated with the points + */ + public ImagePoints(double firstPoint, double secondPoint, String label) { + this.firstPoint = firstPoint; + this.secondPoint = secondPoint; + this.label = label; + } + + /** + * + * @param firstPoint Represents first point in the XY Axis + * @param secondPoint Represents second point in the XY Axis + * @param color Represents color associated with the points + */ + public ImagePoints(double firstPoint, double secondPoint, ImageProperties.ImageColors color) { + this.firstPoint = firstPoint; + this.secondPoint = secondPoint; + this.color = color; + } + + /** + * + * @param firstPoint Represents first point in the XY Axis + * @param secondPoint Represents second point in the XY Axis + * @param label Represents label associated with the points + * @param color Represents color associated with the points + */ + public ImagePoints(double firstPoint, double secondPoint, String label, ImageProperties.ImageColors color) { + this.firstPoint = firstPoint; + this.secondPoint = secondPoint; + this.label = label; + this.color = color; + } + + /** + * Gets first point in the XY Axis + * @return first point in the XY Axis + */ + public double getFirstPoint() { + return firstPoint; + } + + /** + * Sets first point in the XY Axis + * @param firstPoint + */ + public void setFirstPoint(double firstPoint) { + this.firstPoint = firstPoint; + } + + /** + * Gets second point in the XY Axis + * @return second point in the XY Axis + */ + public double getSecondPoint() { + return secondPoint; + } + + /** + * Sets second point in the XY Axis + * @param secondPoint + */ + public void setSecondPoint(double secondPoint) { + this.secondPoint = secondPoint; + } + + /** + * Gets the label associated with the points + * @return Label is returned + */ + public String getLabel() { + return label; + } + + /** + * Sets the label associated with the points + * @param label + */ + public void setLabel(String label) { + this.label = label; + } + + /** + * Gets the color associated with the points + * @return Color associated + */ + public ImageProperties.ImageColors getColor() { + return color; + } + + /** + * Sets the color associated with the points + * @param color Color associated + */ + public void setColor(ImageProperties.ImageColors color) { + this.color = color; + } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/ImageProperties.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/ImageProperties.java new file mode 100644 index 000000000..453ad96b0 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/ImageProperties.java @@ -0,0 +1,231 @@ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus.entity; + +import org.apache.commons.lang.StringUtils; +import java.awt.Color; +import java.util.List; + +public class ImageProperties { + + + private static final String DEFAULT_IMAGE_XAXIS_LABEL="Time"; + private static final String DEFAULT_IMAGE_YAXIS_LABEL="Value"; + private static final String DEFAULT_IMAGE_CHART_NAME = StringUtils.EMPTY; + private static final int DEFAULT_IMAGE_WIDTH = 1100; + private static final int DEFAULT_IMAGE_HEIGHT = 550; + public static final ImageColors DEFAULT_COLOR = ImageColors.VERY_LIGHT_BLUE; + + private int imageWidth = DEFAULT_IMAGE_WIDTH; + private int imageHeight = DEFAULT_IMAGE_HEIGHT; + private String chartName = DEFAULT_IMAGE_CHART_NAME; + private String xAxisName = DEFAULT_IMAGE_XAXIS_LABEL; + private String yAxisName = DEFAULT_IMAGE_YAXIS_LABEL; + private List shadeXAxisArea; + private List shadeYAxisArea; + private List labelPoints; + + + /** + * Gets the Image Width. Default value is 1100 + * @return Returns the Image width + */ + public int getImageWidth() { + return imageWidth; + } + + + /** + * Sets the Image Width of the JPG Image + * @param imageWidth imageWidth value + */ + public void setImageWidth(int imageWidth) { + this.imageWidth = imageWidth; + } + + + /** + * Gets the Image Height. Default value is 550 + * @return Returns the Image Height + */ + public int getImageHeight() { + return imageHeight; + } + + /** + * Sets the Image Height of the JPG Image + * @param imageHeight imageHeight Value + */ + public void setImageHeight(int imageHeight) { + this.imageHeight = imageHeight; + } + + /** + * Gets the Chart Name of the Image.By default the chart name is empty + * @return Returns the chart name + */ + public String getChartName() { + return chartName; + } + + /** + * Sets the Chart Name of the Image. + * @param chartName Chart Name value + */ + public void setChartName(String chartName) { + this.chartName = chartName; + } + + /** + * Gets the X-Axis Name of the Image. By default the value is "Time" + * @return Returns the X-Axis Name + */ + public String getxAxisName() { + return xAxisName; + } + + /** + * Sets the X-Axis Name of the Image + * @param xAxisName X-Axis Name + */ + public void setxAxisName(String xAxisName) { + this.xAxisName = xAxisName; + } + + /** + * Gets the Y-Axis Name of the Image. By default the value is "Value" + * @return Returns the Y-Axis Name + */ + public String getyAxisName() { + return yAxisName; + } + + /** + * Sets the Y-Axis Name of the Image + * @param yAxisName Y-Axis Name + */ + public void setyAxisName(String yAxisName) { + this.yAxisName = yAxisName; + } + + /** + * Gets information related to List of ImagePoints that are used to shade the area parallel to X-Axis + * @return List of ImagePoints to Shade X-Axis + */ + public List getShadeXAxisArea() { + return shadeXAxisArea; + } + + /** + * Set the Information that is required to shade the area parallel to X-Axis.If y1==y2 then straight line will be drawn + * @param shadeXAxisArea List of ImagePoints with each ImagePoints represent y1,y2,label,Color + */ + public void setShadeXAxisArea(List shadeXAxisArea) { + this.shadeXAxisArea = shadeXAxisArea; + } + + /** + * Gets information related to List of ImagePoints that are used to shade the area parallel to Y-Axis + * @return List of ImagePoints to Shade Y-Axis + */ + public List getShadeYAxisArea() { + return shadeYAxisArea; + } + + /** + * Set the Information that is required to shade the area parallel to Y-Axis.If x1==x2 then straight line will be drawn + * @param shadeYAxisArea List of ImagePoints with each ImagePoints represent x1,x2,label,Color + */ + public void setShadeYAxisArea(List shadeYAxisArea) { + this.shadeYAxisArea = shadeYAxisArea; + } + + /** + * Gets List of ImagePoints to represent labels in XY axis + * @return List of ImagePoints to plot labels + */ + public List getLabelPoints() { + return labelPoints; + } + + /** + * Set the information that is required to label points in XY Axis + * @param labelPoints List of ImagePoints with each ImagePoints represent x1,y1,label,Color + */ + public void setLabelPoints(List labelPoints) { + this.labelPoints = labelPoints; + } + + public enum ImageColors { + + VERY_DARK_RED(new Color(0x80, 0x00, 0x00)), + DARK_RED(new Color(0xc0, 0x00, 0x00)), + LIGHT_RED(new Color(0xFF, 0x40, 0x40)), + VERY_LIGHT_RED(new Color(0xFF, 0x80, 0x80)), + VERY_DARK_YELLOW(new Color(0x80, 0x80, 0x00)), + DARK_YELLOW(new Color(0xC0, 0xC0, 0x00)), + LIGHT_YELLOW(new Color(0xFF, 0xFF, 0x40)), + VERY_LIGHT_YELLOW(new Color(0xFF, 0xFF, 0x80)), + VERY_DARK_GREEN(new Color(0x00, 0x80, 0x00)), + DARK_GREEN(new Color(0x00, 0xC0, 0x00)), + LIGHT_GREEN(new Color(0x40, 0xFF, 0x40)), + VERY_LIGHT_GREEN(new Color(0x80, 0xFF, 0x80)), + VERY_DARK_CYAN(new Color(0x00, 0x80, 0x80)), + DARK_CYAN(new Color(0x00, 0xC0, 0xC0)), + LIGHT_CYAN(new Color(0x40, 0xFF, 0xFF)), + VERY_LIGHT_CYAN(new Color(0x80, 0xFF, 0xFF)), + VERY_DARK_BLUE(new Color(0x00, 0x00, 0x80)), + DARK_BLUE(new Color(0x00, 0x00, 0xC0)), + LIGHT_BLUE(new Color(0x40, 0x40, 0xFF)), + VERY_LIGHT_BLUE(new Color(0x80, 0x80, 0xFF)), + VERY_DARK_MAGENTA(new Color(0x80, 0x00, 0x80)), + DARK_MAGENTA(new Color(0xC0, 0x00, 0xC0)), + LIGHT_MAGENTA(new Color(0xFF, 0x40, 0xFF)), + VERY_LIGHT_MAGENTA(new Color(0xFF, 0x80, 0xFF)), + VERY_LIGHT_PINK(new Color(255, 230, 230)); + + + private Color color; + + ImageColors(Color color) { + this.setColor(color); + } + + public Color getColor() { + return color; + } + + public void setColor(Color color) { + this.color = color; + } + } +} \ No newline at end of file diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/JPAEntity.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/JPAEntity.java index 929cddcc7..082244838 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/JPAEntity.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/JPAEntity.java @@ -186,6 +186,7 @@ public static E findByPrimaryKey(EntityManager em, BigI TypedQuery query = em.createNamedQuery("JPAEntity.findByPrimaryKey", type); query.setHint("javax.persistence.cache.storeMode", "REFRESH"); + try { query.setParameter("id", id); query.setParameter("deleted", false); @@ -213,6 +214,7 @@ public static List findByPrimaryKeys(EntityManager e TypedQuery query = em.createNamedQuery("JPAEntity.findByPrimaryKeys", type); query.setHint("javax.persistence.cache.storeMode", "REFRESH"); + try { query.setParameter("ids", ids); query.setParameter("deleted", false); @@ -239,6 +241,7 @@ public static List findEntitiesMarkedForDeletion(Ent TypedQuery query = em.createNamedQuery("JPAEntity.findByDeleteMarker", type); query.setHint("javax.persistence.cache.storeMode", "REFRESH"); + try { query.setParameter("deleted", true); if(limit > 0) { diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/MetatagsRecord.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/MetatagsRecord.java index ebd6ffbe9..44aa14aa3 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/MetatagsRecord.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/MetatagsRecord.java @@ -31,12 +31,12 @@ package com.salesforce.dva.argus.entity; +import com.salesforce.dva.argus.entity.TSDBEntity.ReservedField; + import java.util.Collections; -import java.util.Map; import java.util.HashMap; -import java.util.Map.Entry; -import java.util.TreeMap; -import com.salesforce.dva.argus.entity.TSDBEntity.ReservedField; +import java.util.Map; +import java.util.Objects; /** @@ -52,7 +52,7 @@ * @author Kunal Nawale (knawale@salesforce.com) */ -public class MetatagsRecord { +public class MetatagsRecord extends AbstractSchemaRecord { private Map _metatags = new HashMap<>(0); private String _key = null; @@ -140,4 +140,29 @@ public String getMetatagValue(String metatagKey) { public String removeMetatag(String metatagKey) { return _metatags.remove(metatagKey); } + + @Override + public String toBloomFilterKey() { + return getKey(); + } + + @Override + public int hashCode() { + return _key.hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + MetatagsRecord other = (MetatagsRecord) obj; + return Objects.equals(_key, other._key) && Objects.equals(_metatags, other._metatags); + } } diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/Metric.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/Metric.java index a5783be6a..43b1e0a0b 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/Metric.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/Metric.java @@ -32,15 +32,16 @@ package com.salesforce.dva.argus.entity; import com.fasterxml.jackson.annotation.JsonIgnore; -import com.google.common.base.Objects; import com.salesforce.dva.argus.service.tsdb.MetricQuery; import com.salesforce.dva.argus.system.SystemAssert; import java.io.Serializable; import java.text.MessageFormat; import java.util.Collections; +import java.util.Comparator; import java.util.Map; import java.util.Map.Entry; +import java.util.SortedMap; import java.util.TreeMap; import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; @@ -58,16 +59,21 @@ * @author Tom Valine (tvaline@salesforce.com), Bhinav Sura (bhinav.sura@salesforce.com) */ @SuppressWarnings("serial") -public class Metric extends TSDBEntity implements Serializable { +public class Metric extends TSDBEntity implements Serializable, Comparable { + + private static final Comparator METRIC_COMPARATOR = Comparator + .comparing((Metric m) -> m.getScope().toLowerCase()) + .thenComparing(m -> m.getMetric().toLowerCase()) + .thenComparing(m -> m.getTags().toString().toLowerCase()); //~ Instance fields ****************************************************************************************************************************** private String _namespace; private String _displayName; private String _units; - private final Map _datapoints; + private final SortedMap _datapoints; private MetricQuery _query; - private MetatagsRecord _metatagsRecord = null; + private MetatagsRecord _metatagsRecord = null; //~ Constructors ********************************************************************************************************************************* @@ -146,7 +152,7 @@ public void setNamespace(String namespace) { * @return The map of time series data points. Will never be null, but may be empty. */ public Map getDatapoints() { - return Collections.unmodifiableMap(_datapoints); + return Collections.unmodifiableSortedMap(_datapoints); } /** @@ -176,6 +182,17 @@ public void addDatapoints(Map datapoints) { } } + /** + * Adds the current set of data points to the current map. + * + * @param time, value A single point to add to timeseries. + */ + public void addDatapoint(Long time, Double value) { + if (time != null && value != null) { + _datapoints.put(time, value); + } + } + /** * If current set already has a value at that timestamp then sums up the datapoint value for that timestamp at coinciding cutoff boundary, @@ -345,8 +362,7 @@ public String getIdentifier() { String tags = ""; - Map sortedTags = new TreeMap<>(); - sortedTags.putAll(getTags()); + Map sortedTags = getTags(); if(!sortedTags.isEmpty()) { StringBuilder tagListBuffer = new StringBuilder("{"); for (String tagKey : sortedTags.keySet()) { @@ -375,10 +391,25 @@ public MetatagsRecord getMetatagsRecord() { /** * Replaces the metatags for a metric. MetatagsRecord cannot use any of the reserved tag names. * - * @param metatags The new metatags for the metric. + * @param metatagsRec The new metatags for the metric. */ public void setMetatagsRecord(MetatagsRecord metatagsRec) { _metatagsRecord = metatagsRec; } + + @Override + public int compareTo(Metric m) { + return METRIC_COMPARATOR.compare(this, m); + } + + /** + * + * @return number of datapoints present + */ + + @JsonIgnore + public int getNumOfDatapoints() { + return (_datapoints == null) ? 0: _datapoints.size(); + } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/MetricSchemaRecord.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/MetricSchemaRecord.java index c4b82f894..374af242a 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/MetricSchemaRecord.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/MetricSchemaRecord.java @@ -44,11 +44,11 @@ * * @author Tom Valine (tvaline@salesforce.com) */ -public class MetricSchemaRecord { +public class MetricSchemaRecord extends AbstractSchemaRecord { public static final String RETENTION_DISCOVERY = "_retention_discovery_"; public static final String EXPIRATION_TS = "ets"; //expiration timestamp - public static final int DEFAULT_RETENTION_DISCOVERY_DAYS = 45; + public static final int DEFAULT_RETENTION_DISCOVERY_DAYS = 52; public static final int MAX_RETENTION_DISCOVERY_DAYS = 120; //~ Instance fields ****************************************************************************************************************************** @@ -366,5 +366,9 @@ public String getStringValueForType(SchemaService.RecordType type) { } } + @Override + public String toBloomFilterKey() { + return constructKey(scope, metric, tagKey, tagValue, namespace, retentionDiscovery == null ? null :retentionDiscovery.toString()); + } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/MetricSchemaRecordQuery.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/MetricSchemaRecordQuery.java index f3e584a29..6c4e6dab0 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/MetricSchemaRecordQuery.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/MetricSchemaRecordQuery.java @@ -301,7 +301,10 @@ public MetricSchemaRecordQueryBuilder tagValue(String tagValue) { this.tagValue = tagValue; return this; } - + + /** + * @param limit Maximum amount of hits to return. Set to 0 for unbounded max / unlimited results. + */ public MetricSchemaRecordQueryBuilder limit(int limit) { this.limit = limit; return this; diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/Notification.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/Notification.java index 41247e159..6beb495fc 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/Notification.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/Notification.java @@ -43,34 +43,25 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Optional; import java.util.regex.Matcher; import java.util.regex.Pattern; -import javax.persistence.Basic; -import javax.persistence.Column; -import javax.persistence.ElementCollection; -import javax.persistence.Entity; -import javax.persistence.EntityManager; -import javax.persistence.FetchType; -import javax.persistence.JoinColumn; -import javax.persistence.JoinTable; -import javax.persistence.Lob; -import javax.persistence.ManyToMany; -import javax.persistence.ManyToOne; -import javax.persistence.NoResultException; -import javax.persistence.Query; -import javax.persistence.Table; -import javax.persistence.UniqueConstraint; +import javax.persistence.*; import com.fasterxml.jackson.core.JsonGenerator; -import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.JsonDeserializer; import com.fasterxml.jackson.databind.JsonNode; +import com.google.gson.JsonObject; +import com.google.gson.JsonParser; import com.fasterxml.jackson.databind.JsonSerializer; import com.fasterxml.jackson.databind.SerializerProvider; import com.salesforce.dva.argus.service.AlertService; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; @@ -78,164 +69,176 @@ * Encapsulates information about an alert notification. When a condition is triggered, it sends one or more notifications. The interval over which * the trigger conditions are evaluated is the entire interval specified by the alert expression. * - * @author Tom Valine (tvaline@salesforce.com), Raj Sarkapally(rsarkapally@salesforce.com) + * @author Tom Valine (tvaline@salesforce.com), Raj Sarkapally(rsarkapally@salesforce.com) */ @SuppressWarnings("serial") @Entity -@Table(name = "NOTIFICATION", uniqueConstraints = @UniqueConstraint(columnNames = { "name", "alert_id" })) +@Table(name = "NOTIFICATION", uniqueConstraints = @UniqueConstraint(columnNames = {"name", "alert_id"})) public class Notification extends JPAEntity implements Serializable { - - public static class Serializer extends JsonSerializer { - - @Override - public void serialize(Notification notification, JsonGenerator jgen, SerializerProvider provider) throws IOException, JsonProcessingException { - - jgen.writeStartObject(); - - jgen.writeStringField("id", notification.getId().toString()); - if(notification.getCreatedDate()!=null) { - jgen.writeNumberField("createdDate", notification.getCreatedDate().getTime()); - } - - if(notification.getModifiedDate()!=null) { - jgen.writeNumberField("modifiedDate", notification.getModifiedDate().getTime()); - } - - jgen.writeStringField("name", notification.getName()); - jgen.writeStringField("notifier", notification.getNotifierName()); - jgen.writeNumberField("cooldownPeriod", notification.getCooldownPeriod()); - jgen.writeBooleanField("srActionable", notification.getSRActionable()); - jgen.writeNumberField("severityLevel", notification.getSeverityLevel()); - - if(notification.getCustomText() != null) { - jgen.writeStringField("customText", notification.getCustomText()); - } - - jgen.writeArrayFieldStart("subscriptions"); - for(String subscription : notification.getSubscriptions()) { - jgen.writeString(subscription); - } - jgen.writeEndArray(); - - jgen.writeArrayFieldStart("metricsToAnnotate"); - for(String metricToAnnotate : notification.getMetricsToAnnotate()) { - jgen.writeString(metricToAnnotate); - } - jgen.writeEndArray(); - - jgen.writeArrayFieldStart("triggers"); - for(Trigger trigger : notification.getTriggers()) { - jgen.writeString(trigger.getId().toString()); - } - jgen.writeEndArray(); - - // Getting these values requires a lot of queries to rdbms at runtime, and so these are excluded for now - // as the current usecases do not need these values to be serialized - //jgen.writeObjectField("cooldownExpirationByTriggerAndMetric", notification.getCooldownExpirationMap()); - //jgen.writeObjectField("activeStatusByTriggerAndMetric", notification.getActiveStatusMap()); - - jgen.writeEndObject(); - - } - - } - - public static class Deserializer extends JsonDeserializer { - - @Override - public Notification deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException { - - Notification notification = new Notification(); - - JsonNode rootNode = jp.getCodec().readTree(jp); - - BigInteger id = new BigInteger(rootNode.get("id").asText()); - notification.id = id; - - if(rootNode.get("modifiedDate")!=null) { - notification.setModifiedDate(Date.from(Instant.ofEpochMilli(rootNode.get("modifiedDate").asLong()))); - } - - if(rootNode.get("createdDate")!=null) { - notification.createdDate = Date.from(Instant.ofEpochMilli(rootNode.get("createdDate").asLong())); - } - - String name = rootNode.get("name").asText(); - notification.setName(name); - - String notifierName = rootNode.get("notifier").asText(); - notification.setNotifierName(notifierName); - - long cooldownPeriod = rootNode.get("cooldownPeriod").asLong(); - notification.setCooldownPeriod(cooldownPeriod); - - boolean srActionable = rootNode.get("srActionable").asBoolean(); - notification.setSRActionable(srActionable); - - int severity = rootNode.get("severityLevel").asInt(); - notification.setSeverityLevel(severity); - - if(rootNode.get("customText") != null) { - notification.setCustomText(rootNode.get("customText").asText()); - } - - List subscriptions = new ArrayList<>(); - JsonNode subscriptionsArrayNode = rootNode.get("subscriptions"); - if(subscriptionsArrayNode.isArray()) { - for(JsonNode subscriptionNode : subscriptionsArrayNode) { - subscriptions.add(subscriptionNode.asText()); - } - } - notification.setSubscriptions(subscriptions); - - List metricsToAnnotate = new ArrayList<>(); - JsonNode metricsToAnnotateArrayNode = rootNode.get("metricsToAnnotate"); - if(metricsToAnnotateArrayNode.isArray()) { - for(JsonNode metricToAnnotateNode : metricsToAnnotateArrayNode) { - metricsToAnnotate.add(metricToAnnotateNode.asText()); - } - } - notification.setMetricsToAnnotate(metricsToAnnotate); - - List triggers = new ArrayList<>(); - JsonNode triggersArrayNode = rootNode.get("triggers"); - if(triggersArrayNode.isArray()) { - for(JsonNode triggerNode : triggersArrayNode) { - BigInteger triggerId = new BigInteger(triggerNode.asText()); - Trigger trigger = new Trigger(); - trigger.id = triggerId; - triggers.add(trigger); - } - } - notification.setTriggers(triggers); - - // Commenting this part out as these fields are not currently serialized - /*Map activeStatusByTriggerAndMetric = new HashMap<>(); - JsonNode activeStatusByTriggerAndMetricNode = rootNode.get("activeStatusByTriggerAndMetric"); - if(activeStatusByTriggerAndMetricNode.isObject()) { - Iterator> fieldsIter = activeStatusByTriggerAndMetricNode.fields(); - while(fieldsIter.hasNext()) { - Entry field = fieldsIter.next(); - activeStatusByTriggerAndMetric.put(field.getKey(), field.getValue().asBoolean()); - } - } - notification.activeStatusByTriggerAndMetric = activeStatusByTriggerAndMetric; - - Map cooldownExpirationByTriggerAndMetric = new HashMap<>(); - JsonNode cooldownExpirationByTriggerAndMetricNode = rootNode.get("cooldownExpirationByTriggerAndMetric"); - if(cooldownExpirationByTriggerAndMetricNode.isObject()) { - Iterator> fieldsIter = cooldownExpirationByTriggerAndMetricNode.fields(); - while(fieldsIter.hasNext()) { - Entry field = fieldsIter.next(); - cooldownExpirationByTriggerAndMetric.put(field.getKey(), field.getValue().asLong()); - } - } - notification.cooldownExpirationByTriggerAndMetric = cooldownExpirationByTriggerAndMetric;*/ - - return notification; - } - - } + + public static class Serializer extends JsonSerializer { + + @Override + public void serialize(Notification notification, JsonGenerator jgen, SerializerProvider provider) throws IOException, JsonProcessingException { + + jgen.writeStartObject(); + + jgen.writeStringField("id", notification.getId().toString()); + if (notification.getCreatedDate() != null) { + jgen.writeNumberField("createdDate", notification.getCreatedDate().getTime()); + } + + if (notification.getModifiedDate() != null) { + jgen.writeNumberField("modifiedDate", notification.getModifiedDate().getTime()); + } + + jgen.writeStringField("name", notification.getName()); + jgen.writeStringField("notifier", notification.getNotifierName()); + jgen.writeNumberField("cooldownPeriod", notification.getCooldownPeriod()); + jgen.writeBooleanField("srActionable", notification.getSRActionable()); + jgen.writeNumberField("severityLevel", notification.getSeverityLevel()); + + String customText = notification.getCustomText(); + String notificationCustomData = notification.getNotificationCustomData(); + + jgen.writeArrayFieldStart("subscriptions"); + for (String subscription : notification.getSubscriptions()) { + jgen.writeString(subscription.trim()); + } + jgen.writeEndArray(); + + jgen.writeArrayFieldStart("metricsToAnnotate"); + for (String metricToAnnotate : notification.getMetricsToAnnotate()) { + jgen.writeString(metricToAnnotate); + } + jgen.writeEndArray(); + + jgen.writeArrayFieldStart("triggers"); + for (Trigger trigger : notification.getTriggers()) { + jgen.writeString(trigger.getId().toString()); + } + jgen.writeEndArray(); + + if (customText != null) { + jgen.writeStringField("customText", customText); + } + + if(notificationCustomData != null) { + jgen.writeStringField("notificationCustomData", notificationCustomData); + } + + // Getting these values requires a lot of queries to rdbms at runtime, and so these are excluded for now + // as the current usecases do not need these values to be serialized + //jgen.writeObjectField("cooldownExpirationByTriggerAndMetric", notification.getCooldownExpirationMap()); + //jgen.writeObjectField("activeStatusByTriggerAndMetric", notification.getActiveStatusMap()); + + jgen.writeEndObject(); + + } + + } + + public static class Deserializer extends JsonDeserializer { + + @Override + public Notification deserialize(com.fasterxml.jackson.core.JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException { + + Notification notification = new Notification(); + + JsonNode rootNode = jp.getCodec().readTree(jp); + + BigInteger id = new BigInteger(rootNode.get("id").asText()); + notification.id = id; + + if (rootNode.get("modifiedDate") != null) { + notification.setModifiedDate(Date.from(Instant.ofEpochMilli(rootNode.get("modifiedDate").asLong()))); + } + + if (rootNode.get("createdDate") != null) { + notification.createdDate = Date.from(Instant.ofEpochMilli(rootNode.get("createdDate").asLong())); + } + + String name = rootNode.get("name").asText(); + notification.setName(name); + + String notifierName = rootNode.get("notifier").asText(); + notification.setNotifierName(notifierName); + + long cooldownPeriod = rootNode.get("cooldownPeriod").asLong(); + notification.setCooldownPeriod(cooldownPeriod); + + boolean srActionable = rootNode.get("srActionable").asBoolean(); + notification.setSRActionable(srActionable); + + + int severity = rootNode.get("severityLevel").asInt(); + notification.setSeverityLevel(severity); + + if (rootNode.get("customText") != null) { + notification.setCustomText(rootNode.get("customText").asText()); + } + + if (rootNode.get("notificationCustomData") != null) { + notification.setNotificationCustomData(rootNode.get("notificationCustomData").asText()); + } + + List subscriptions = new ArrayList<>(); + JsonNode subscriptionsArrayNode = rootNode.get("subscriptions"); + if (subscriptionsArrayNode.isArray()) { + for (JsonNode subscriptionNode : subscriptionsArrayNode) { + subscriptions.add(subscriptionNode.asText().trim()); + } + } + notification.setSubscriptions(subscriptions); + + List metricsToAnnotate = new ArrayList<>(); + JsonNode metricsToAnnotateArrayNode = rootNode.get("metricsToAnnotate"); + if (metricsToAnnotateArrayNode.isArray()) { + for (JsonNode metricToAnnotateNode : metricsToAnnotateArrayNode) { + metricsToAnnotate.add(metricToAnnotateNode.asText()); + } + } + notification.setMetricsToAnnotate(metricsToAnnotate); + + List triggers = new ArrayList<>(); + JsonNode triggersArrayNode = rootNode.get("triggers"); + if (triggersArrayNode.isArray()) { + for (JsonNode triggerNode : triggersArrayNode) { + BigInteger triggerId = new BigInteger(triggerNode.asText()); + Trigger trigger = new Trigger(); + trigger.id = triggerId; + triggers.add(trigger); + } + } + notification.setTriggers(triggers); + + // Commenting this part out as these fields are not currently serialized + /*Map activeStatusByTriggerAndMetric = new HashMap<>(); + JsonNode activeStatusByTriggerAndMetricNode = rootNode.get("activeStatusByTriggerAndMetric"); + if(activeStatusByTriggerAndMetricNode.isObject()) { + Iterator> fieldsIter = activeStatusByTriggerAndMetricNode.fields(); + while(fieldsIter.hasNext()) { + Entry field = fieldsIter.next(); + activeStatusByTriggerAndMetric.put(field.getKey(), field.getValue().asBoolean()); + } + } + notification.activeStatusByTriggerAndMetric = activeStatusByTriggerAndMetric; + + Map cooldownExpirationByTriggerAndMetric = new HashMap<>(); + JsonNode cooldownExpirationByTriggerAndMetricNode = rootNode.get("cooldownExpirationByTriggerAndMetric"); + if(cooldownExpirationByTriggerAndMetricNode.isObject()) { + Iterator> fieldsIter = cooldownExpirationByTriggerAndMetricNode.fields(); + while(fieldsIter.hasNext()) { + Entry field = fieldsIter.next(); + cooldownExpirationByTriggerAndMetric.put(field.getKey(), field.getValue().asLong()); + } + } + notification.cooldownExpirationByTriggerAndMetric = cooldownExpirationByTriggerAndMetric;*/ + + return notification; + } + + } //~ Instance fields ****************************************************************************************************************************** @@ -246,47 +249,60 @@ public Notification deserialize(JsonParser jp, DeserializationContext ctxt) thro String notifierName; @ElementCollection - @Column(length = 2048) + @Column(length = 2048) List subscriptions = new ArrayList<>(0); @ElementCollection - List metricsToAnnotate = new ArrayList<>(0); + List metricsToAnnotate = new ArrayList<>(0); long cooldownPeriod; - @ManyToOne(optional = false, fetch=FetchType.LAZY) + @ManyToOne(optional = false, fetch = FetchType.LAZY) @JoinColumn(name = "alert_id") private Alert alert; - @ManyToMany + @ManyToMany @JoinTable( - name = "NOTIFICATION_TRIGGER", joinColumns = @JoinColumn(name = "TRIGGER_ID"), inverseJoinColumns = @JoinColumn(name = "NOTIFICATION_ID") + name = "NOTIFICATION_TRIGGER", joinColumns = @JoinColumn(name = "TRIGGER_ID"), inverseJoinColumns = @JoinColumn(name = "NOTIFICATION_ID") ) List triggers = new ArrayList<>(0); - boolean isSRActionable = false; + boolean isSRActionable = false; - int severityLevel = 5; + int severityLevel = 5; @Lob private String customText; + @Lob + private String notificationCustomData; + + private static String EVENT_NAME_KEY = "__eventName__"; + private static String ELEMENT_NAME_KEY = "__elementName__"; + private static String PRODUCT_TAG_KEY = "__productTag__"; + private static String ARTICLE_NUMBER_KEY = "__articleNumber__"; + private static String ENABLE_CLEAR_NOTIFICATION_KEY = "__enableClearNotification__"; // used by EmailNotifier and GusNotifier + private static String EMAIL_SUBJECT_KEY = "__emailSubject__"; // used by EmailNotifier + + @ElementCollection private Map cooldownExpirationByTriggerAndMetric = new HashMap<>(); @ElementCollection private Map activeStatusByTriggerAndMetric = new HashMap<>(); + @Transient + private final Logger _logger = LoggerFactory.getLogger(Notification.class); //~ Constructors ********************************************************************************************************************************* /** * Creates a new Notification object with a cool down of one hour and having specified no metrics on which to create annotations. * - * @param name The notification name. Cannot be null or empty. - * @param alert The alert with which the notification is associated. - * @param notifierName The notifier implementation class name. - * @param subscriptions The notifier specific list of subscriptions to which notification shall be sent. - * @param cooldownPeriod The cool down period of the notification + * @param name The notification name. Cannot be null or empty. + * @param alert The alert with which the notification is associated. + * @param notifierName The notifier implementation class name. + * @param subscriptions The notifier specific list of subscriptions to which notification shall be sent. + * @param cooldownPeriod The cool down period of the notification */ public Notification(String name, Alert alert, String notifierName, List subscriptions, long cooldownPeriod) { super(alert.getOwner()); @@ -295,9 +311,12 @@ public Notification(String name, Alert alert, String notifierName, List setNotifierName(notifierName); setSubscriptions(subscriptions); setCooldownPeriod(cooldownPeriod); + initializeNotificationData(); } - /** Creates a new Notification object. */ + /** + * Creates a new Notification object. + */ protected Notification() { super(null); } @@ -305,53 +324,53 @@ protected Notification() { //~ Static Methods ******************************************************************************************************************************* @SuppressWarnings("unchecked") - public static void updateActiveStatusAndCooldown(EntityManager em, List notifications) { - requireArgument(em != null, "Entity manager can not be null."); - - if(notifications.isEmpty()) return; + public static void updateActiveStatusAndCooldown(EntityManager em, List notifications) { + requireArgument(em != null, "Entity manager can not be null."); - Map notificationsByIds = new HashMap<>(notifications.size()); + if (notifications.isEmpty()) return; - StringBuilder sb = new StringBuilder(); - for(Notification n : notifications) { - notificationsByIds.put(n.getId(), n); - n.activeStatusByTriggerAndMetric.clear(); - n.cooldownExpirationByTriggerAndMetric.clear(); - sb.append(n.getId()).append(","); - } + Map notificationsByIds = new HashMap<>(notifications.size()); - String ids = sb.substring(0, sb.length()-1); - try { - Query q = em.createNativeQuery("select * from notification_cooldownexpirationbytriggerandmetric where notification_id IN (" + ids + ")"); - List objects = q.getResultList(); + StringBuilder sb = new StringBuilder(); + for (Notification n : notifications) { + notificationsByIds.put(n.getId(), n); + n.activeStatusByTriggerAndMetric.clear(); + n.cooldownExpirationByTriggerAndMetric.clear(); + sb.append(n.getId()).append(","); + } - for(Object[] object : objects) { - BigInteger notificationId = new BigInteger(String.valueOf(Long.class.cast(object[0]))); - Long cooldownExpiration = Long.class.cast(object[1]); - String key = String.class.cast(object[2]); - notificationsByIds.get(notificationId).cooldownExpirationByTriggerAndMetric.put(key, cooldownExpiration); - } + String ids = sb.substring(0, sb.length() - 1); + try { + Query q = em.createNativeQuery("select * from notification_cooldownexpirationbytriggerandmetric where notification_id IN (" + ids + ")"); + List objects = q.getResultList(); - q = em.createNativeQuery("select * from notification_activestatusbytriggerandmetric where notification_id IN (" + ids + ")"); - objects = q.getResultList(); + for (Object[] object : objects) { + BigInteger notificationId = new BigInteger(String.valueOf(Long.class.cast(object[0]))); + Long cooldownExpiration = Long.class.cast(object[1]); + String key = String.class.cast(object[2]); + notificationsByIds.get(notificationId).cooldownExpirationByTriggerAndMetric.put(key, cooldownExpiration); + } - for(Object[] object : objects) { - BigInteger notificationId = new BigInteger(String.valueOf(Long.class.cast(object[0]))); - Boolean isActive; - try { - isActive = Boolean.class.cast(object[1]); - } catch (ClassCastException e) { - // This is because Embedded Derby stores booleans as 0, 1. - isActive = Integer.class.cast(object[1]) == 0 ? Boolean.FALSE : Boolean.TRUE; - } + q = em.createNativeQuery("select * from notification_activestatusbytriggerandmetric where notification_id IN (" + ids + ")"); + objects = q.getResultList(); + + for (Object[] object : objects) { + BigInteger notificationId = new BigInteger(String.valueOf(Long.class.cast(object[0]))); + Boolean isActive; + try { + isActive = Boolean.class.cast(object[1]); + } catch (ClassCastException e) { + // This is because Embedded Derby stores booleans as 0, 1. + isActive = Integer.class.cast(object[1]) == 0 ? Boolean.FALSE : Boolean.TRUE; + } - String key = String.class.cast(object[2]); - notificationsByIds.get(notificationId).activeStatusByTriggerAndMetric.put(key, isActive); - } + String key = String.class.cast(object[2]); + notificationsByIds.get(notificationId).activeStatusByTriggerAndMetric.put(key, isActive); + } - } catch(NoResultException ex) { - return; - } + } catch (NoResultException ex) { + return; + } } @@ -360,16 +379,15 @@ public static void updateActiveStatusAndCooldown(EntityManager em, List getSubscriptions() { return Collections.unmodifiableList(subscriptions); @@ -444,12 +462,12 @@ public List getSubscriptions() { /** * Replaces the subscriptions used by the notifier to send the notifications. * - * @param subscriptions The subscription list. + * @param subscriptions The subscription list. */ public void setSubscriptions(List subscriptions) { this.subscriptions.clear(); - if(subscriptions == null) return; - for(String currentSubscription: subscriptions) { + if (subscriptions == null) return; + for (String currentSubscription : subscriptions) { if (this.getNotifierName().equals(AlertService.SupportedNotifier.GUS.getName())) { if (currentSubscription.isEmpty() || currentSubscription.length() < 10) throw new IllegalArgumentException("GUS Subscription has to contain subjectId with more than 10 characters."); @@ -472,7 +490,7 @@ public void setSubscriptions(List subscriptions) { /** * Returns the cool down period of notification. * - * @return cool down period in milliseconds + * @return cool down period in milliseconds */ public long getCooldownPeriod() { return cooldownPeriod; @@ -481,7 +499,7 @@ public long getCooldownPeriod() { /** * Sets the cool down period to notification. * - * @param cooldownPeriod cool down period in milliseconds + * @param cooldownPeriod cool down period in milliseconds */ public void setCooldownPeriod(long cooldownPeriod) { requireArgument(cooldownPeriod >= 0, "Cool down period cannot be negative."); @@ -491,41 +509,46 @@ public void setCooldownPeriod(long cooldownPeriod) { /** * Returns the cool down expiration time of the notification given a metric,trigger combination. * - * @param trigger The trigger - * @param metric The metric - * @return cool down expiration time in milliseconds + * @param trigger The trigger + * @param metric The metric + * @return cool down expiration time in milliseconds */ public long getCooldownExpirationByTriggerAndMetric(Trigger trigger, Metric metric) { - String key = _hashTriggerAndMetric(trigger, metric); - return this.cooldownExpirationByTriggerAndMetric.containsKey(key) ? this.cooldownExpirationByTriggerAndMetric.get(key) : 0; + + if (trigger == null) + { + return 0; + } + String key = _hashTriggerAndMetric(trigger, metric); + return this.cooldownExpirationByTriggerAndMetric.containsKey(key) ? this.cooldownExpirationByTriggerAndMetric.get(key) : 0; } /** * Sets the cool down expiration time of the notification given a metric,trigger combination. * - * @param trigger The trigger - * @param metric The metric - * @param cooldownExpiration cool down expiration time in milliseconds + * @param trigger The trigger + * @param metric The metric + * @param cooldownExpiration cool down expiration time in milliseconds */ public void setCooldownExpirationByTriggerAndMetric(Trigger trigger, Metric metric, long cooldownExpiration) { requireArgument(cooldownExpiration >= 0, "Cool down expiration time cannot be negative."); - String key = _hashTriggerAndMetric(trigger, metric); - this.cooldownExpirationByTriggerAndMetric.put(key, cooldownExpiration); + String key = _hashTriggerAndMetric(trigger, metric); + this.cooldownExpirationByTriggerAndMetric.put(key, cooldownExpiration); } public Map getCooldownExpirationMap() { - return cooldownExpirationByTriggerAndMetric; - } + return cooldownExpirationByTriggerAndMetric; + } public void setCooldownExpirationMap(Map cooldownExpirationByTriggerAndMetric) { - this.cooldownExpirationByTriggerAndMetric = cooldownExpirationByTriggerAndMetric; - } + this.cooldownExpirationByTriggerAndMetric = cooldownExpirationByTriggerAndMetric; + } - /** + /** * Returns all metrics to be annotated. * - * @return list of metrics + * @return list of metrics */ public List getMetricsToAnnotate() { return metricsToAnnotate; @@ -534,7 +557,7 @@ public List getMetricsToAnnotate() { /** * Sets metrics to be annotated. * - * @param metricsToAnnotate list of metrics. + * @param metricsToAnnotate list of metrics. */ public void setMetricsToAnnotate(List metricsToAnnotate) { this.metricsToAnnotate.clear(); @@ -553,7 +576,7 @@ public boolean onCooldown(Trigger trigger, Metric metric) { /** * returns the notification name. * - * @return notification name. + * @return notification name. */ public String getName() { return name; @@ -562,7 +585,7 @@ public String getName() { /** * Sets the notification name. * - * @param name Notification name. Cannot be null or empty. + * @param name Notification name. Cannot be null or empty. */ public void setName(String name) { this.name = name; @@ -571,7 +594,7 @@ public void setName(String name) { /** * Returns the triggers associated with the notification. * - * @return The triggers associated with the notification. + * @return The triggers associated with the notification. */ public List getTriggers() { return Collections.unmodifiableList(triggers); @@ -580,7 +603,7 @@ public List getTriggers() { /** * Replaces the triggers associated with the notification. * - * @param triggers The triggers associated with the notification. + * @param triggers The triggers associated with the notification. */ public void setTriggers(List triggers) { this.triggers.clear(); @@ -592,50 +615,50 @@ public void setTriggers(List triggers) { /** * Given a metric,notification combination, indicates whether a triggering condition associated with this notification is still in a triggering state. * - * @param trigger The Trigger that caused this notification - * @param metric The metric that caused this notification - * - * @return True if the triggering condition is still in a triggering state. + * @param trigger The Trigger that caused this notification + * @param metric The metric that caused this notification + * @return True if the triggering condition is still in a triggering state. */ public boolean isActiveForTriggerAndMetric(Trigger trigger, Metric metric) { - String key = _hashTriggerAndMetric(trigger, metric); - return this.activeStatusByTriggerAndMetric.containsKey(key) ? activeStatusByTriggerAndMetric.get(key) : false; + String key = _hashTriggerAndMetric(trigger, metric); + return this.activeStatusByTriggerAndMetric.containsKey(key) ? activeStatusByTriggerAndMetric.get(key) : false; } /** * When a notification is sent out when a metric violates the trigger threshold, set this notification active for that trigger,metric combination * - * @param trigger The Trigger that caused this notification - * @param metric The metric that caused this notification - * @param active Whether to set the notification to active + * @param trigger The Trigger that caused this notification + * @param metric The metric that caused this notification + * @param active Whether to set the notification to active */ public void setActiveForTriggerAndMetric(Trigger trigger, Metric metric, boolean active) { - String key = _hashTriggerAndMetric(trigger, metric); - this.activeStatusByTriggerAndMetric.put(key, active); + String key = _hashTriggerAndMetric(trigger, metric); + this.activeStatusByTriggerAndMetric.put(key, active); } /** * Indicates whether the notification is monitored by SR * - * @return True if notification is monitored by SR + * @return True if notification is monitored by SR */ public boolean getSRActionable() { return isSRActionable; } - + /** * Specifies whether the notification should be monitored by SR (actionable by SR) * - * @param isSRActionable True if SR should monitor the notification + * @param isSRActionable True if SR should monitor the notification */ public void setSRActionable(boolean isSRActionable) { + //TODO: If SRActionable is checked, article number should be present. We should report the users regarding badly configured alerts and add check. this.isSRActionable = isSRActionable; } /** * Gets the severity level of notification * - * @return The severity level + * @return The severity level */ public int getSeverityLevel() { return severityLevel; @@ -644,7 +667,7 @@ public int getSeverityLevel() { /** * Sets the severity level of notification * - * @param severityLevel The severity level + * @param severityLevel The severity level */ public void setSeverityLevel(int severityLevel) { if (severityLevel < 1 || severityLevel > 5) { @@ -653,36 +676,149 @@ public void setSeverityLevel(int severityLevel) { this.severityLevel = severityLevel; } - public Map getActiveStatusMap() { - return activeStatusByTriggerAndMetric; - } - - public void setActiveStatusMap(Map activeStatusByTriggerAndMetric) { - this.activeStatusByTriggerAndMetric = activeStatusByTriggerAndMetric; - } + public Map getActiveStatusMap() { + return activeStatusByTriggerAndMetric; + } + + public void setActiveStatusMap(Map activeStatusByTriggerAndMetric) { + this.activeStatusByTriggerAndMetric = activeStatusByTriggerAndMetric; + } /** * Return the custom text in order to include in the notification - * @return the customText is optional - */ - public String getCustomText() { - return customText; - } - - /** - * Sets the custom text to the notification - * @param customText customText is optional - */ - public void setCustomText(String customText) { - this.customText = customText; - } - - @Override + * + * @return the customText is optional + */ + public String getCustomText() { + return this.customText; + } + + /** + * Sets the custom text to the notification + * + * @param customText customText is optional + */ + public void setCustomText(String customText) { + this.customText = customText; + } + + public String getArticleNumber() { + return getGOCField(ARTICLE_NUMBER_KEY); + } + + public void setArticleNumber(String articleNumber) { + setGOCField(ARTICLE_NUMBER_KEY, articleNumber); + } + + public String getElementName() { + return getGOCField(ELEMENT_NAME_KEY); + } + + public void setElementName(String elementName) { + setGOCField(ELEMENT_NAME_KEY, elementName); + } + + public String getEventName() { + return getGOCField(EVENT_NAME_KEY); + } + + public void setEventName(String eventName) { + setGOCField(EVENT_NAME_KEY, eventName); + } + + public String getProductTag() { + return getGOCField(PRODUCT_TAG_KEY); + } + + public void setProductTag(String productTag) { + setGOCField(PRODUCT_TAG_KEY, productTag); + } + + public boolean isEnableClearNotification() { + String enableClearNotification = getGOCField(ENABLE_CLEAR_NOTIFICATION_KEY); + boolean result = true; // default value is true if enableClearNotification is not set + if (enableClearNotification != null && enableClearNotification.equalsIgnoreCase("false")) { + result = false; + } + return result; + } + + public void setEnableClearNotification(boolean enableClearNotification) { + setGOCField(ENABLE_CLEAR_NOTIFICATION_KEY, Boolean.toString(enableClearNotification)); + } + + public String getEmailSubject() { + return getGOCField(EMAIL_SUBJECT_KEY); + } + + public void setEmailSubject(String subject) { + setGOCField(EMAIL_SUBJECT_KEY, subject); + } + + private String getNotificationCustomData() { + return notificationCustomData; + } + + private void setNotificationCustomData(String notificationCustomData) { + this.notificationCustomData = notificationCustomData; + } + + private JsonObject getJsonObject() { + JsonObject GOCFields; + try { + GOCFields = new JsonParser().parse(this.notificationCustomData).getAsJsonObject(); + } catch (Exception ex) { + _logger.debug("Unable to parse notification object. Creating a new object. Exception: {}", ex.getMessage()); + GOCFields = getEmptyNotificationCustomData(); + } + return GOCFields; + } + + private void setGOCField(final String fieldName, final String fieldValue) { + JsonObject GOCFields = getJsonObject(); + GOCFields.addProperty(fieldName, fieldValue); + this.notificationCustomData = GOCFields.toString(); + } + + private String getGOCField(final String fieldName) { + JsonObject GOCFields = getJsonObject(); + if (GOCFields != null && GOCFields.has(fieldName)) { + return GOCFields.get(fieldName).isJsonNull() ? null : GOCFields.get(fieldName).getAsString(); + } else { + return null; + } + } + + private JsonObject getEmptyNotificationCustomData() { + JsonObject GOCFields = new JsonObject(); + GOCFields.addProperty(ARTICLE_NUMBER_KEY, ""); + GOCFields.addProperty(EVENT_NAME_KEY, ""); + GOCFields.addProperty(ELEMENT_NAME_KEY, ""); + GOCFields.addProperty(PRODUCT_TAG_KEY, ""); + GOCFields.addProperty(ENABLE_CLEAR_NOTIFICATION_KEY, ""); + GOCFields.addProperty(EMAIL_SUBJECT_KEY, ""); + return GOCFields; + } + + private void initializeNotificationData() { + this.notificationCustomData = getEmptyNotificationCustomData().toString(); + } + + @Override public int hashCode() { int hash = 5; hash = 29 * hash + Objects.hashCode(this.name); hash = 29 * hash + Objects.hashCode(this.alert); + hash = 29 * hash + Objects.hashCode(this.metricsToAnnotate); + hash = 29 * hash + Objects.hashCode(this.subscriptions); + hash = 29 * hash + Objects.hashCode(this.cooldownPeriod); + hash = 29 * hash + Objects.hashCode(this.isSRActionable); + hash = 29 * hash + Objects.hashCode(this.severityLevel); + hash = 29 * hash + Objects.hashCode(this.notifierName); + hash = 29 * hash + Objects.hashCode(this.customText); + hash = 29 * hash + Objects.hashCode(this.notificationCustomData); + return hash; } @@ -697,32 +833,32 @@ public boolean equals(Object obj) { final Notification other = (Notification) obj; - if (!Objects.equals(this.name, other.name)) { - return false; - } - if (!Objects.equals(this.alert, other.alert)) { + if (this.hashCode() != other.hashCode()) { return false; } + return true; } @Override public String toString() { return "Notification{" + "name=" + name + ", notifierName=" + notifierName + ", subscriptions=" + subscriptions + ", metricsToAnnotate=" + - metricsToAnnotate + ", cooldownPeriod=" + cooldownPeriod + ", triggers=" + triggers + ", severity=" + severityLevel + ", srActionable=" + isSRActionable + ", customText;" + customText + '}'; + metricsToAnnotate + ", cooldownPeriod=" + cooldownPeriod + ", triggers=" + triggers + ", severity=" + severityLevel + ", srActionable=" + isSRActionable + ", customText=" + customText + + ", eventName=" + getEventName() + ", elementName=" + getElementName() + ", productTag=" + getProductTag() + ", articleNumber=" + getArticleNumber() + + ", enableClearNotification=" + Boolean.toString(isEnableClearNotification()) + ", emailSubject=" + getEmailSubject() + '}'; } - private String _hashTriggerAndMetric(Trigger trigger, Metric metric) { - requireArgument(trigger != null, "Trigger cannot be null."); - requireArgument(metric != null, "Metric cannot be null"); + private String _hashTriggerAndMetric(Trigger trigger, Metric metric) { + requireArgument(trigger != null, "Trigger cannot be null."); + requireArgument(metric != null, "Metric cannot be null"); - if(trigger.getId()!=null) { - return trigger.getId().toString() + "$$" + metric.getIdentifier().hashCode(); - }else { - return "0$$" + metric.getIdentifier().hashCode(); - } - } + if (trigger.getId() != null) { + return trigger.getId().toString() + "$$" + metric.getIdentifier().hashCode(); + } else { + return "0$$" + metric.getIdentifier().hashCode(); + } + } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ \ No newline at end of file diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/ScopeAndMetricOnlySchemaRecord.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/QueryStoreRecord.java similarity index 52% rename from ArgusCore/src/main/java/com/salesforce/dva/argus/entity/ScopeAndMetricOnlySchemaRecord.java rename to ArgusCore/src/main/java/com/salesforce/dva/argus/entity/QueryStoreRecord.java index 582efa8fb..90547a8f7 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/ScopeAndMetricOnlySchemaRecord.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/QueryStoreRecord.java @@ -29,23 +29,57 @@ * POSSIBILITY OF SUCH DAMAGE. */ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + package com.salesforce.dva.argus.entity; import com.google.common.base.Objects; +import org.apache.commons.lang.StringUtils; import java.text.MessageFormat; /** - * Represents a search result row for scope and metric name only discovery queries. + * Record that is used to represent Argus User Query to store in ES * - * @author Naveen Reddy Karri (nkarri@salesforce.com) + * @author ChandraVyas Annakula (cannakula@salesforce.com) */ -public class ScopeAndMetricOnlySchemaRecord { +public class QueryStoreRecord { + + private static final String QSR_BLOOM_IDENTIFICATION_KEY = "QueryStoreRecord"; private String scope; private String metric; - public ScopeAndMetricOnlySchemaRecord(String scope, String metric) { + public QueryStoreRecord(String scope, String metric) { setScope(scope); setMetric(metric); @@ -90,17 +124,17 @@ public boolean equals(Object obj) { return false; } - ScopeAndMetricOnlySchemaRecord other = (ScopeAndMetricOnlySchemaRecord) obj; + QueryStoreRecord other = (QueryStoreRecord) obj; return Objects.equal(scope, other.scope) && Objects.equal(metric, other.metric); } @Override public String toString() { - return MessageFormat.format("ScopeAndMetricOnlySchemaRecord (Scope = {0} Metric = {1}", scope, metric); + return MessageFormat.format("QueryStoreRecord (Scope = {0} Metric = {1}", scope, metric); } - public static String getFieldsAsString(ScopeAndMetricOnlySchemaRecord msr) { + public static String getFieldsAsString(QueryStoreRecord msr) { StringBuilder sb = new StringBuilder(msr.getScope()); sb.append(":"); @@ -108,5 +142,21 @@ public static String getFieldsAsString(ScopeAndMetricOnlySchemaRecord msr) { return sb.toString(); } + + public String toBloomFilterKey() { + return constructBloomKey(scope, metric); + } + + public static String constructBloomKey(String scope, String metric) { + StringBuilder sb = new StringBuilder(QSR_BLOOM_IDENTIFICATION_KEY); + + if(!StringUtils.isEmpty(scope)) { + sb.append('\0').append(scope); + } + if(!StringUtils.isEmpty(metric)) { + sb.append('\0').append(metric); + } + + return sb.toString(); + } } -/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/SchemaQuery.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/SchemaQuery.java index 5c789a718..b0d4da771 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/SchemaQuery.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/SchemaQuery.java @@ -3,7 +3,7 @@ import com.salesforce.dva.argus.system.SystemAssert; public class SchemaQuery { - + protected int limit; protected int page; @@ -16,8 +16,11 @@ public SchemaQuery(int limit, int page) { setPage(page); } + /** + * @param limit Maximum amount of hits to return. Set to 0 for unbounded max / unlimited results. + */ public void setLimit(int limit) { - SystemAssert.requireArgument(limit > 0, "Limit must be a positive integer."); + SystemAssert.requireArgument(limit >= 0, "Limit must be a positive integer or zero for unlimited"); this.limit = limit; } diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/ScopeOnlySchemaRecord.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/ScopeOnlySchemaRecord.java index 9ebae1164..9dd9ac1de 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/ScopeOnlySchemaRecord.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/ScopeOnlySchemaRecord.java @@ -40,7 +40,7 @@ * * @author Dilip Devaraj (ddevaraj@salesforce.com) */ -public class ScopeOnlySchemaRecord { +public class ScopeOnlySchemaRecord extends AbstractSchemaRecord { //~ Instance fields ****************************************************************************************************************************** @@ -110,5 +110,10 @@ public boolean equals(Object obj) { public String toString() { return MessageFormat.format("ScopeOnlySchemaRecord = (Scope = {0}", scope); } + + @Override + public String toBloomFilterKey() { + return constructKey(scope, null, null, null, null, null); + } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/TSDBEntity.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/TSDBEntity.java index a14c2298b..f278538fb 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/TSDBEntity.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/TSDBEntity.java @@ -34,7 +34,6 @@ import java.io.Serializable; import java.text.MessageFormat; import java.util.Collections; -import java.util.HashMap; import java.util.Map; import java.util.TreeMap; @@ -49,13 +48,19 @@ public abstract class TSDBEntity implements Serializable { private static final int NUM_TAGS = 50; + + /* + * Argus only supports letters, digits and ./-_ for namespace, scope, + * metric and tags. All other characters will be replaced by this. + */ + private static final String UNSUPPORTED_CHARACTER_REPLACEMENT = "__"; //~ Instance fields ****************************************************************************************************************************** private String _uid; private String _scope; private String _metric; - private final Map _tags = new HashMap<>(0); + private final Map _tags = new TreeMap<>(); //~ Constructors ********************************************************************************************************************************* @@ -130,12 +135,12 @@ protected void setUid(String uid) { } /** - * Returns an unmodifiable collection of tags associated with the metric. + * Returns an unmodifiable collection of sorted tags associated with the metric. * - * @return The tags for a metric. Will never be null but may be empty. + * @return The sorted tags for a metric. Will never be null but may be empty. */ public Map getTags() { - Map result = new HashMap<>(); + Map result = new TreeMap<>(); for (Map.Entry entry : _tags.entrySet()) { String key = entry.getKey(); @@ -146,6 +151,10 @@ public Map getTags() { } return Collections.unmodifiableMap(result); } + + public void removeTag(String tagName) { + _tags.remove(tagName); + } /** * Replaces the tags for a metric. Tags cannot use any of the reserved tag names. @@ -176,6 +185,12 @@ public static void validateTags(Map tags) { } } } + + public static String replaceUnsupportedChars(String input) { + if(input != null){ + return input.replaceAll("[^a-zA-Z0-9\\./\\-_]+", UNSUPPORTED_CHARACTER_REPLACEMENT); + } else return input; + } /** * Sets a single tag. The tag may not use any of the reserved tag names. @@ -308,4 +323,4 @@ public String getKey() { } } } -/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ +/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ \ No newline at end of file diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/Trigger.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/Trigger.java index 165bc3941..2cb39e8fe 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/Trigger.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/Trigger.java @@ -398,6 +398,11 @@ public int hashCode() { hash = 97 * hash + Objects.hashCode(this.name); hash = 97 * hash + Objects.hashCode(this.alert); + hash = 97 * hash + Objects.hashCode(this.type); + hash = 97 * hash + Objects.hashCode(this.threshold); + hash = 97 * hash + Objects.hashCode(this.inertia); + hash = 97 * hash + Objects.hashCode(this.secondaryThreshold); + return hash; } @@ -412,12 +417,10 @@ public boolean equals(Object obj) { final Trigger other = (Trigger) obj; - if (!Objects.equals(this.name, other.name)) { - return false; - } - if (!Objects.equals(this.alert, other.alert)) { + if (this.hashCode() != other.hashCode()) { return false; } + return true; } diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/AlertService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/AlertService.java index 1002ccde7..ce5448a5f 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/AlertService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/AlertService.java @@ -37,6 +37,7 @@ import com.salesforce.dva.argus.entity.Notification; import com.salesforce.dva.argus.entity.PrincipalUser; import com.salesforce.dva.argus.entity.Trigger; +import com.salesforce.dva.argus.service.MonitorService.Counter; import com.salesforce.dva.argus.service.alert.AlertsCountContext; import com.salesforce.dva.argus.service.alert.DefaultAlertService.NotificationContext; import com.salesforce.dva.argus.service.alert.notifier.*; @@ -119,9 +120,9 @@ public interface AlertService extends Service { * @param alertCount The maximum number of alerts to dequeue. * @param timeout The maximum amount of time in milliseconds to attempt to dequeue alerts. * - * @return returns Job history of alerts executed. + * @return number of alerts evaluated. */ - List executeScheduledAlerts(int alertCount, int timeout); + Integer executeScheduledAlerts(int alertCount, int timeout); /** * Enqueues alerts to be executed by the next available alert client. @@ -130,6 +131,19 @@ public interface AlertService extends Service { */ void enqueueAlerts(List alerts); + /** + * Evaluates the serialized alert and delivers results to the result cache. + * Used by historical testing. + * + * @param serializedAlert The serializedAlert + * @param when The time at which to evaluate the alert. + * @param testUuid The test UUID. + * + * @return returns Job history of alerts executed. + */ + // TODO - improve architecture - test spec, and callback class for delivering results. + void testEvaluateAlert(String serializedAlert, Long when, String testUuid); + /** * Returns a list of alerts for an owner. * @@ -146,10 +160,12 @@ public interface AlertService extends Service { * @param limit The number of items to fetch. * @param offset The starting point of current page. * @param searchText The text to filter on the search results if not null or empty. + * @param sortField The field of the alert that is used for sorting. + * @param sortOrder The order for sorting. * * @return The list of alerts. */ - List findAlertsByOwnerPaged(PrincipalUser owner, Integer limit, Integer offset, String searchText); + List findAlertsByOwnerPaged(PrincipalUser owner, Integer limit, Integer offset, String searchText, String sortField, String sortOrder); /** * Returns a list of alerts that have been marked for deletion. @@ -288,10 +304,12 @@ public interface AlertService extends Service { * @param limit The number of items to fetch. * @param offset The starting point of current page. * @param searchText The text to filter on the search results if not null or empty. - * + * @param sortField The field of the alert that is used for sorting. + * @param sortOrder The order for sorting. + * * @return The list of shared alerts. */ - List findSharedAlertsPaged(Integer limit, Integer offset, String searchText); + List findSharedAlertsPaged(Integer limit, Integer offset, String searchText, String sortField, String sortOrder); /** * Returns the list of supported notifiers. @@ -323,10 +341,12 @@ public interface AlertService extends Service { * @param limit The number of items to fetch. * @param offset The starting point of current page. * @param searchText The text to filter on the search results if not null or empty. - * + * @param sortField The field of the alert that is used for sorting. + * @param sortOrder The order for sorting. + * * @return The list of private alerts if privileged user. */ - List findPrivateAlertsForPrivilegedUserPaged(PrincipalUser owner, Integer limit, Integer offset, String searchText); + List findPrivateAlertsForPrivilegedUserPaged(PrincipalUser owner, Integer limit, Integer offset, String searchText, String sortField, String sortOrder); /** * Count alerts with the given AlertsCountContext. @@ -337,6 +357,14 @@ public interface AlertService extends Service { */ int countAlerts(AlertsCountContext context); + /** + * This is helper function to post counter value to tsdb and JMX exporter + * + * @param counter the counter to export + * @param value the value of the counterx + */ + void updateCounter(Counter counter, Double value); + /** * This is helper function so that we can export metrics to JMX metric exporter everywhere in the * system @@ -362,7 +390,13 @@ enum SupportedNotifier { WARDENPOSTING(WardenPostingNotifier.class.getName()), GUS(GusNotifier.class.getName()), CALLBACK(CallbackNotifier.class.getName()), - REFOCUS(RefocusNotifier.class.getName()); + PAGERDUTY(PagerDutyNotifier.class.getName()), + REFOCUS(RefocusNotifier.class.getName()), + REFOCUS_BOOLEAN(RefocusBooleanNotifier.class.getName()), + REFOCUS_VALUE(RefocusValueNotifier.class.getName()); + + // , NOOP(NoOpNotifier.class.getName() + String name; @@ -413,15 +447,17 @@ public static interface Notifier { /** * Sends notifications for the trigger on which the alert condition occurred. - * + * + * @param notificationContext The context for the notification. Cannot be null. + * @return true for success, false for failure */ - void sendNotification(NotificationContext notificationContext); + boolean sendNotification(NotificationContext notificationContext); /** * Clears notifications for the trigger on which the alert condition occurred. - * + * @return true for success, false for failure */ - void clearNotification(NotificationContext notificationContext); + boolean clearNotification(NotificationContext notificationContext); /** * Returns the name of the notifier. @@ -457,4 +493,4 @@ public enum NotificationStatus { } } -/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ \ No newline at end of file +/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/AnnotationStorageService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/AnnotationStorageService.java new file mode 100644 index 000000000..3c800d9df --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/AnnotationStorageService.java @@ -0,0 +1,29 @@ +package com.salesforce.dva.argus.service; + +import java.util.List; + +import com.salesforce.dva.argus.entity.Annotation; +import com.salesforce.dva.argus.service.tsdb.AnnotationQuery; + +/** + * Provides methods for putting or retrieving annotations from storage. + * + * @author Dilip Devaraj (ddevaraj@salesforce.com) + */ +public interface AnnotationStorageService extends Service{ + /** + * Writes annotation data. Any existing data is overwritten. + * + * @param annotations The list of annotations to write. Cannot be null, but may be empty. + */ + void putAnnotations(List annotations); + + /** + * Reads annotation data. + * + * @param queries The list of queries to execute. Cannot be null, but may be empty. + * + * @return The query results. Will never be null, but may be empty. + */ + List getAnnotations(List queries); +} \ No newline at end of file diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/ArgusTransport.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/ArgusTransport.java new file mode 100644 index 000000000..2e734f147 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/ArgusTransport.java @@ -0,0 +1,152 @@ +package com.salesforce.dva.argus.service; + +import org.apache.commons.lang.StringUtils; +import org.apache.http.HttpHost; +import org.apache.http.auth.AuthScope; +import org.apache.http.auth.UsernamePasswordCredentials; +import org.apache.http.client.CredentialsProvider; +import org.apache.http.client.config.RequestConfig; +import org.apache.http.config.Registry; +import org.apache.http.config.RegistryBuilder; +import org.apache.http.conn.socket.ConnectionSocketFactory; +import org.apache.http.conn.socket.PlainConnectionSocketFactory; +import org.apache.http.conn.ssl.NoopHostnameVerifier; +import org.apache.http.conn.ssl.SSLConnectionSocketFactory; +import org.apache.http.impl.client.BasicCredentialsProvider; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.impl.conn.DefaultProxyRoutePlanner; +import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.net.ssl.SSLContext; +import java.security.KeyManagementException; +import java.security.NoSuchAlgorithmException; +import java.util.Optional; + +import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; + +public class ArgusTransport { + private static final Logger LOGGER = LoggerFactory.getLogger(ArgusTransport.class); + private static final int CONNECTION_TIMEOUT_MILLIS = 10000; + private static final int READ_TIMEOUT_MILLIS = 10000; + + protected final CloseableHttpClient httpClient; + + public ArgusTransport(Optional proxyHost, + Optional proxyPort, + Optional proxyUsername, + Optional proxyPassword, + int connectionPoolMaxSize, + int connectionPoolMaxPerRoute) { + this.httpClient = buildHttpClient(proxyHost, proxyPort, proxyUsername, proxyPassword, connectionPoolMaxSize, connectionPoolMaxPerRoute); + } + + public ArgusTransport(String proxyHost, String proxyPort, String proxyUsername, String proxyPassword, int connectionPoolMaxSize, int connectionPoolMaxPerRoute) { + this(validateProxyHostAndPortStrings(proxyHost, proxyPort) ? Optional.of(proxyHost) : Optional.empty(), + validateProxyHostAndPortStrings(proxyHost, proxyPort) ? Optional.of(Integer.parseInt(proxyPort)) : Optional.empty(), + validateProxyUsernameAndPassword(proxyUsername, proxyPort) ? Optional.of(proxyUsername) : Optional.empty(), + validateProxyUsernameAndPassword(proxyPassword, proxyPort) ? Optional.of(proxyPassword) : Optional.empty(), + connectionPoolMaxSize, connectionPoolMaxPerRoute); + } + + public static boolean validateProxyHostAndPortStrings(String proxyHost, String proxyPort) { + requireArgument(StringUtils.isBlank(proxyPort) || StringUtils.isNumeric(proxyPort), + "proxyPort must be numeric if present"); + return StringUtils.isNotBlank(proxyHost) && StringUtils.isNotBlank(proxyPort) && StringUtils.isNumeric(proxyPort); + } + + public static boolean validateProxyUsernameAndPassword(String proxyUsername, String proxyPassword) { + return StringUtils.isNotBlank(proxyUsername) && StringUtils.isNotBlank(proxyPassword); + } + + /** + * Get HttpClient. + * + * @return HttpClient + */ + public CloseableHttpClient getHttpClient() { + return httpClient; + } + + protected static SSLContext getSSLContext() { + SSLContext sslContext = null; + try { + sslContext = SSLContext.getInstance("TLS"); + sslContext.init(null, null, null); + } catch (NoSuchAlgorithmException | KeyManagementException e) { + LOGGER.error("Failed to init SSLContext", e); + } + return sslContext; + } + + protected static PoolingHttpClientConnectionManager buildConnectionManager(int connectionPoolMaxSize, + int connectionPoolMaxPerRoute, + SSLContext sslContext) { + requireArgument(connectionPoolMaxSize > 0, + String.format("connectionPoolMaxSize(%d) must be > 0", connectionPoolMaxSize)); + requireArgument(connectionPoolMaxPerRoute > 0, + String.format("connectionPoolMaxPerRoute(%d) must be > 0", connectionPoolMaxPerRoute)); + + RegistryBuilder rb = RegistryBuilder.create() + .register("http", PlainConnectionSocketFactory.getSocketFactory()); + if (sslContext != null) { + rb.register("https", new SSLConnectionSocketFactory(sslContext)); + } + Registry r = rb.build(); + PoolingHttpClientConnectionManager cm = new PoolingHttpClientConnectionManager(r); + cm.setMaxTotal(connectionPoolMaxSize); + cm.setDefaultMaxPerRoute(connectionPoolMaxPerRoute); + LOGGER.info(String.format("Creating connection manager with maxPoolSize=%d, maxPerRoute=%d", + connectionPoolMaxSize, + connectionPoolMaxPerRoute)); + return cm; + } + + protected static CloseableHttpClient buildHttpClient(Optional proxyHost, + Optional proxyPort, + Optional proxyUsername, + Optional proxyPassword, + int connectionPoolMaxSize, + int connectionPoolMaxPerRoute) { + requireArgument(!proxyHost.isPresent() || StringUtils.isNotBlank(proxyHost.get()), + String.format("proxyHost must not be blank if present", proxyHost.isPresent() ? proxyHost.get() : "null")); + requireArgument(!proxyPort.isPresent() || proxyPort.get() > 0, + String.format("proxyPort(%s) must > 0 if present", proxyPort.isPresent() ? proxyPort.get().toString() : "null")); + + SSLContext sslContext = getSSLContext(); + PoolingHttpClientConnectionManager cm = buildConnectionManager(connectionPoolMaxSize, connectionPoolMaxPerRoute, sslContext); + + RequestConfig requestConfig = RequestConfig.custom() + .setConnectTimeout(CONNECTION_TIMEOUT_MILLIS) + .setConnectionRequestTimeout(CONNECTION_TIMEOUT_MILLIS) + .setSocketTimeout(READ_TIMEOUT_MILLIS) + .build(); + + HttpClientBuilder builder = HttpClients.custom() + .setDefaultRequestConfig(requestConfig) + .setConnectionManager(cm); + if (sslContext != null) { + builder = builder + .setSSLContext(sslContext) + .setSSLHostnameVerifier(new NoopHostnameVerifier()); + } + if (proxyHost.isPresent() && proxyHost.get().length() > 0 && proxyPort.isPresent()) { + HttpHost proxy = new HttpHost(proxyHost.get(), proxyPort.get().intValue()); + DefaultProxyRoutePlanner routePlanner = new DefaultProxyRoutePlanner(proxy); + builder = builder.setRoutePlanner(routePlanner); + + if(proxyUsername.isPresent() && proxyUsername.get().length() > 0 && proxyPassword.isPresent() && proxyPassword.get().length() > 0){ + CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials(new AuthScope(proxy), new UsernamePasswordCredentials(proxyUsername.get(), proxyPassword.get())); + + builder.setDefaultCredentialsProvider(credentialsProvider); + } + } + + return builder.build(); + } + +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/CacheService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/CacheService.java index 0b40dbc37..49f5714b9 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/CacheService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/CacheService.java @@ -28,7 +28,7 @@ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ - + package com.salesforce.dva.argus.service; import java.util.List; @@ -83,8 +83,6 @@ public interface CacheService extends Service { */ void put(Map entries, int ttl); - /** Removes all entries from cache. */ - void clear(); /** * Check if a key exists in cache. @@ -151,7 +149,7 @@ public interface CacheService extends Service { * * @param keys The set of keys to examine. Cannot be null, but may be empty. * - * @return A map having the same size as the input set of keys, indicating the existence of each key. + * @return A map having the same size as the input set of keys, indicating the existence of each key. */ Map exist(Set keys); diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/CallbackService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/CallbackService.java index 3c4afe4e5..c2936ee32 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/CallbackService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/CallbackService.java @@ -5,6 +5,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.salesforce.dva.argus.service.alert.DefaultAlertService; +import com.salesforce.dva.argus.service.alert.notifier.CallbackNotifier; import org.apache.http.HttpResponse; /** @@ -14,9 +15,9 @@ */ public interface CallbackService { - HttpResponse sendNotification(DefaultAlertService.NotificationContext context); + HttpResponse sendNotification(DefaultAlertService.NotificationContext context, CallbackNotifier notifier); - class Request { + class CallbackRequest { @JsonProperty(required = true) private String uri; @@ -33,7 +34,7 @@ public Method getMethod() { return method; } - public Request setMethod(Method method) { + public CallbackRequest setMethod(Method method) { this.method = method; return this; } @@ -42,7 +43,7 @@ public String getUri() { return uri; } - public Request setUri(String uri) { + public CallbackRequest setUri(String uri) { this.uri = uri; return this; } @@ -51,7 +52,7 @@ public String getBody() { return body; } - public Request setBody(String body) { + public CallbackRequest setBody(String body) { this.body = body; return this; } @@ -60,7 +61,7 @@ public Map getHeader() { return header; } - public Request setHeader(Map header) { + public CallbackRequest setHeader(Map header) { this.header = header; return this; } diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/CollectionService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/CollectionService.java index 4bdd96538..b2ece1d7b 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/CollectionService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/CollectionService.java @@ -32,6 +32,7 @@ package com.salesforce.dva.argus.service; import com.salesforce.dva.argus.entity.Annotation; +import com.salesforce.dva.argus.entity.Histogram; import com.salesforce.dva.argus.entity.Metric; import com.salesforce.dva.argus.entity.PrincipalUser; import java.util.List; @@ -89,6 +90,19 @@ public interface CollectionService extends Service { * @return The number of metric schema records committed. */ int commitMetricSchema(int metricCount, int timeout); + + /** + * Commits histograms from the collection queue into the data store. The actual number of histograms committed will be: Summation(from i=1 to + * i=k) {ni}, where ni is the number of histograms contained in message i. The actual number of messages dequeued will be + * the maximum number that can be dequeued from the collection queue within the specified timeout period, not to exceed the maximum number + * specified. + * + * @param messageCount The maximum number of histogram messages to commit from the queue. Must be a positive non-zero number. + * @param timeout The timeout in milliseconds. Must be a positive non-zero number. + * + * @return The list of histograms committed. + */ + int commitHistograms(int messageCount, int timeout); /** * Submits a single annotation to the collection queue. User based policy checks are enforced prior to the submission of data. If any policy @@ -118,5 +132,23 @@ public interface CollectionService extends Service { * @return The number of annotations committed. */ int commitAnnotations(int annotationCount, int timeout); + + /** + * Submits a single histogram to the collection queue. User based policy checks are enforced prior to the submission of data. If any policy condition + * is not met, the method shall throw a runtime exception. + * + * @param submitter The user submitting the data. Cannot be null. + * @param histogram The histogram to submit. Cannot be null. + */ + void submitHistogram(PrincipalUser submitter, Histogram histogram); + + /** + * Submits histogram to the collection queue. Each message submitted to the queue contains a chunk of histograms. User based policy checks are enforced + * prior to the submission of data. If any policy condition is not met, the method shall throw a runtime exception. + * + * @param submitter The user submitting the data. Cannot be null. + * @param histograms The histogram to submit. Cannot be null. + */ + void submitHistograms(PrincipalUser submitter, List histograms); } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/DiscoveryService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/DiscoveryService.java index d781c8758..56cf1f68f 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/DiscoveryService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/DiscoveryService.java @@ -28,17 +28,25 @@ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ - + package com.salesforce.dva.argus.service; import com.salesforce.dva.argus.entity.MetricSchemaRecord; import com.salesforce.dva.argus.entity.MetricSchemaRecordQuery; import com.salesforce.dva.argus.entity.SchemaQuery; +import com.salesforce.dva.argus.entity.TSDBEntity; +import com.salesforce.dva.argus.service.MonitorService.Counter; import com.salesforce.dva.argus.service.SchemaService.RecordType; +import com.salesforce.dva.argus.service.schema.WildcardExpansionLimitExceededException; import com.salesforce.dva.argus.service.tsdb.MetricQuery; +import com.salesforce.dva.argus.util.RequestContextHolder; import java.text.MessageFormat; +import java.util.HashMap; import java.util.List; +import java.util.Map; + +import org.slf4j.Logger; /** * Provides a means to query metric schema meta data to determine the existence of metrics. @@ -46,29 +54,13 @@ * @author Tom Valine (tvaline@salesforce.com) */ public interface DiscoveryService extends Service { - - /** This should be a configuration. For now, this is how we reached on a value of 2M. - * A datapoint in Argus is a tuple containing a Long timestamp (8 bytes with some additional Java Wrapper Class bytes) - * and a Double value (8 bytes with some additional Java Wrapper Class bytes). We would then consider a datapoint to - * take up around 40 bytes of memory. - * - * Fixing the max QPM (Queries Per Minute) for Argus to around 500, and avg. query latency to around 3 secs - * (both are conservative estimates), we would be serving 25 queries concurrently. Again these are just ball park - * estimates to fix the maximum number of datapoints that should be returned in a response. - * - * Let's assume we reserve 2GB of memory for concurrently executing these 25 queries. That would mean around 80MB per - * request. Roughly translating to around (80M bytes/40 bytes =) 2M datapoints. - * - * Please configure this no. according to the above calculation for your environment. - **/ - static final int MAX_DATAPOINTS_PER_RESPONSE = 2000000; - - /** We enforce a soft limit of 1 minute on the datapoint sampling frequency through WardenService and hence assume this - * to be the same. */ + + /** We enforce a soft limit of 1 minute on the datapoint sampling frequency through WardenService and hence assume this + * to be the same. */ static final long DATAPOINT_SAMPLING_FREQ_IN_MILLIS = 60 * 1000L; - - static final String EXCEPTION_MESSAGE = MessageFormat.format("Your query may return more than {0} datapoints in all. Please modify your query. " - + "You may either reduce the time window or narrow your wildcard search or use downsampling.", MAX_DATAPOINTS_PER_RESPONSE); + + static final String EXCEPTION_MESSAGE = "Your query may return more than {0} datapoints in all. Please modify your query. " + + "You may either reduce the time window or narrow your wildcard search or use downsampling."; //~ Methods ************************************************************************************************************************************** @@ -76,24 +68,24 @@ public interface DiscoveryService extends Service { * Returns a list of metric schema records which match the filtering criteria. At least one field must be filtered. * * @param query The query to filter by - * @return A list of metric schema records matching the filtering criteria. Will never return null, but may be empty. + * @return A list of metric schema records matching the filtering criteria. Will never return null, but may be empty. */ List filterRecords(SchemaQuery query); /** * @param query The query - * @param type The field to return. Cannot be null. + * @param type The field to return. Cannot be null. * @return A unique list of MetricSchemaRecords. Will never return null, but may be empty. */ List getUniqueRecords(MetricSchemaRecordQuery query, RecordType type); - /** - * @param query The query - * @param type The field to return. Cannot be null. - * @param indexLevel The index of the tokenized results to return. - * @return A unique list of tokenized results. Will never return null, but may be empty. - */ - List browseRecords(MetricSchemaRecordQuery query, RecordType type, int indexLevel); + /** + * @param query The query + * @param type The field to return. Cannot be null. + * @param indexLevel The index of the tokenized results to return. + * @return A unique list of tokenized results. Will never return null, but may be empty. + */ + List browseRecords(MetricSchemaRecordQuery query, RecordType type, int indexLevel); /** * Expands a given wildcard query into a list of distinct queries. @@ -112,43 +104,92 @@ public interface DiscoveryService extends Service { * @return True if the query is a wildcard query. */ static boolean isWildcardQuery(MetricQuery query) { - - if (SchemaService.containsWildcard(query.getScope()) - || SchemaService.containsWildcard(query.getMetric()) - || SchemaService.containsWildcard(query.getNamespace())) { + + if (SchemaService.containsWildcard(query.getScope()) + || SchemaService.containsWildcard(query.getMetric()) + || SchemaService.containsWildcard(query.getNamespace())) { return true; } if (query.getTags() != null) { for (String tagKey : query.getTags().keySet()) { if (SchemaService.containsWildcard(tagKey) || - (!"*".equals(query.getTag(tagKey)) && SchemaService.containsWildcard(query.getTag(tagKey)))) { + (!"*".equals(query.getTag(tagKey)) && SchemaService.containsWildcard(query.getTag(tagKey)))) { return true; } } } return false; } - - static int maxTimeseriesAllowed(MetricQuery query) { - - long timeWindowInMillis = query.getEndTimestamp() - query.getStartTimestamp(); - long downsamplingDivisor = (query.getDownsamplingPeriod() == null || query.getDownsamplingPeriod() <= 0) ? 1 : query.getDownsamplingPeriod(); - long numDatapointsPerTimeSeries = timeWindowInMillis / DATAPOINT_SAMPLING_FREQ_IN_MILLIS / downsamplingDivisor; - - numDatapointsPerTimeSeries = numDatapointsPerTimeSeries <= 0 ? 1 : numDatapointsPerTimeSeries; - - return (int) (MAX_DATAPOINTS_PER_RESPONSE / numDatapointsPerTimeSeries); + + static int maxTimeseriesAllowed(MetricQuery query, long maxDataPointsPerResponse) { + long timeWindowInMillis = getTimeWindowInMillis(query.getStartTimestamp(), query.getEndTimestamp()); + // return max datapoints for single second queries + if(timeWindowInMillis<=2000L) { + return (int)maxDataPointsPerResponse; + } + long downsamplingDivisor = (query.getDownsamplingPeriod() == null || query.getDownsamplingPeriod() <= 0) ? 60000l : query.getDownsamplingPeriod(); + downsamplingDivisor = (timeWindowInMillis > downsamplingDivisor) ? downsamplingDivisor : timeWindowInMillis; + long samplingPeriod = (downsamplingDivisor>DATAPOINT_SAMPLING_FREQ_IN_MILLIS) ? DATAPOINT_SAMPLING_FREQ_IN_MILLIS : downsamplingDivisor; + + double numRawDPsPerSeries = (timeWindowInMillis*1.0)/samplingPeriod; + double numDownsampledDPsPerSeries = (numRawDPsPerSeries) / (downsamplingDivisor/(samplingPeriod*1.0)); + + numDownsampledDPsPerSeries = numDownsampledDPsPerSeries <= 0 ? 1 : numDownsampledDPsPerSeries; + + return (int) (maxDataPointsPerResponse / numDownsampledDPsPerSeries); } + + static long getTimeWindowInMillis(long startTimestamp, long endTimestamp) { + // handling case when start or end timestamp is specified in seconds + if(startTimestamp*1000TSDBService.METRICS_RETENTION_PERIOD_MILLIS) { + timeWindowInMillis = TSDBService.METRICS_RETENTION_PERIOD_MILLIS; + } + return timeWindowInMillis; + } + static int numApproxTimeseriesForQuery(MetricQuery mq) { - int count = 1; - for(String tagValue : mq.getTags().values()) { - String splits[] = tagValue.split("\\|"); - count *= splits.length; - } - - return count; - } + int count = 1; + for(String tagValue : mq.getTags().values()) { + String splits[] = tagValue.split("\\|"); + count *= splits.length; + } + + return count; + } + + static void throwMaximumDatapointsExceededException(MetricQuery query, long maxDataPointsPerQuery, boolean enforceDatapointLimit, MonitorService monitorService, Logger logger) throws WildcardExpansionLimitExceededException{ + if((query.getDownsamplingPeriod()!=null && query.getDownsamplingPeriod()!=0) || enforceDatapointLimit) { + if(monitorService!=null) { + Map tags = new HashMap<>(); + tags.put("scope", TSDBEntity.replaceUnsupportedChars(query.getScope())); + tags.put("metric", TSDBEntity.replaceUnsupportedChars(query.getMetric())); + if(RequestContextHolder.getRequestContext()!=null) { + tags.put("user", RequestContextHolder.getRequestContext().getUserName()); + }else { + tags.put("user", "unknown"); + } + monitorService.modifyCounter(Counter.QUERY_DATAPOINTS_LIMIT_EXCEEDED, 1, tags); + logger.error("Maximum datapoints limit execeeded for query - " + query.toString() + ", user - "+tags.get("user")); + } + } + + // We are throwing the exception only when the downsampler is absent, + // as we want to give users some time to adjust their queries which have downsampler in them, unless the enforceDatapointLimit flag is true + if(query.getDownsamplingPeriod()==null || query.getDownsamplingPeriod()==0 || enforceDatapointLimit) { + throw new WildcardExpansionLimitExceededException(MessageFormat.format(EXCEPTION_MESSAGE, maxDataPointsPerQuery)) ; + } + } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/ImageService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/ImageService.java new file mode 100644 index 000000000..cff9d34bf --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/ImageService.java @@ -0,0 +1,88 @@ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus.service; + +import com.salesforce.dva.argus.entity.ImageProperties; +import com.salesforce.dva.argus.entity.Metric; +import org.apache.commons.lang3.tuple.Pair; + +import java.util.List; + +/** + * Provides methods to generate,store and query image. + * + * @author Chandravyas Annakula (cannakula@salesforce.com) + */ + +public interface ImageService extends Service { + + + /** + * Generates an image for the given list of metrics + * + * @param metrics List of metrics for which image is generated + * @param properties Properties of the image that need to be generated + * @return Returns Byte Array of the JPEG Image + */ + + byte[] generateImage(List metrics, ImageProperties properties); + + /** + * Saves the image byte array in an sync or async fashion + * + * @param imageBytes Byte Array of the JPEG image to be saved + * @param sync saves image synchronously or asynchronously + * @return Returns a Unique Id for this image + */ + + String storeImage(byte[] imageBytes, boolean sync); + + + /** + * Generates and Saves the image in an sync or async fashion for the given list of metrics + * + * @param metrics List of metrics for which JPEG image is generated + * @param properties Properties of the image that need to be generated + * @param sync saves image synchronously or asynchronously + * @return Pair Object with first argument contains Unique Id for image and second argument contains associated JPEG image byte array + */ + Pair generateAndStoreImage(List metrics, ImageProperties properties, boolean sync); + + + /** + * Returns an image for the given imageId + * + * @param imageId Key of the image + * @return Associated JPEG image is returned + */ + byte[] getImageById(String imageId); +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/ImageStorageService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/ImageStorageService.java new file mode 100644 index 000000000..b02fbd3b7 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/ImageStorageService.java @@ -0,0 +1,61 @@ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus.service; + +import java.util.List; + +import com.salesforce.dva.argus.entity.Annotation; +import com.salesforce.dva.argus.service.tsdb.AnnotationQuery; + +/** + * Provides methods for inserting or retrieving images from storage. + * + * @author Chandravyas Annakula (cannakula@salesforce.com) + */ +public interface ImageStorageService extends Service { + + /** + * Writes image data. Any existing data is overwritten. + * @param imageId id of the Image Bytes + * @param imageBytes imageBytes that need to be stored + * @param sync saves image synchronously or asynchronously + */ + void putImage(String imageId,byte[] imageBytes,boolean sync); + + /** + * Retrieves the image bytes given the imageId + * + * @param imageId Key of the image + * @return Associated image bytes is returned + */ + byte[] getImage(String imageId); +} \ No newline at end of file diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/MQService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/MQService.java index b488cb087..b2d075af7 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/MQService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/MQService.java @@ -156,6 +156,8 @@ public enum MQQueue { METRIC("argusMetricQueue"), /** The name of the queue into which submitted annotation data is put. */ ANNOTATION("argusAnnotationQueue"), + /** The name of the queue into which submitted histogram data is put. */ + HISTOGRAM("argusHistogramQueue"), /** The name of the queue into which all enabled jobs are put. * The scheduler then enqueues them and determines whether they are to be scheduled or not. */ TASKQUEUE("argusTaskQueue"), diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/MailService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/MailService.java index 240f38382..2228b6109 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/MailService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/MailService.java @@ -31,6 +31,8 @@ package com.salesforce.dva.argus.service; +import com.salesforce.dva.argus.service.mail.EmailContext; + import java.util.Set; /** @@ -45,13 +47,10 @@ public interface MailService extends Service { /** * Sends an email message. * - * @param to The set of email recipients. - * @param subject The email subject. - * @param message The message body. - * @param contentType The content type. - * @param priority The message priority. + * @param emailContext Context of the email such as recipients, subject, body, ... + * @return true for success, false when there's error */ - void sendMessage(Set to, String subject, String message, String contentType, Priority priority); + boolean sendMessage(EmailContext emailContext); //~ Enums **************************************************************************************************************************************** diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/MetricService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/MetricService.java index be9c2f392..6bde2df51 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/MetricService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/MetricService.java @@ -32,6 +32,7 @@ package com.salesforce.dva.argus.service; import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.service.metric.MetricQueryResult; import com.salesforce.dva.argus.service.tsdb.MetricQuery; import java.util.List; @@ -51,10 +52,9 @@ public interface MetricService extends Service { * * @param expressions A list of query expressions * - * @return A list of time series Metrics for the given expressions. A transformed Metric if specified so by the expression. The method will never - * return null. Returns an empty list if all expressions return nothing. + * @return MetricQueryResult object which contains details of the returned metrics for the expression as well as other details computed during query execution */ - List getMetrics(List expressions); + MetricQueryResult getMetrics(List expressions); /** * Evaluates the given expressions and returns a list of time series Metrics. @@ -65,10 +65,9 @@ public interface MetricService extends Service { * For e.g. If the expression is -1h:argus.jvm:mem.heap.used:avg, 1 hour should be subtracted from * relativeTo * - * @return A list of time series Metrics for the given expressions. A transformed Metric if specified so by the expression. The method will never - * return null. Returns an empty list if all expressions return nothing. + * @return MetricQueryResult object which contains details of the returned metrics for the expression as well as other details computed during query execution */ - List getMetrics(List expressions, long relativeTo); + MetricQueryResult getMetrics(List expressions, long relativeTo); /** * Evaluates the given expression to construct MetricQuery object and returns the time series for the corresponding expression. The query @@ -81,10 +80,9 @@ public interface MetricService extends Service { * identity(1414799283000:00D300000062.na1:app_record.count{recordType=A, source=splunk}:avg:15m-avg) E.g. * identity(-15h:-10h:00D300000062.na1:app_record.count{recordType=A, source=splunk}:avg) * - * @return A time series for the given expression. A transformed time series if specified so by the expression. Null if no such time series - * found. + * @return MetricQueryResult object which contains details of the returned metrics for the expression as well as other details computed during query execution */ - List getMetrics(String expression); + MetricQueryResult getMetrics(String expression); /** * Evaluates the given expression and returns a list of time series Metrics. @@ -95,10 +93,9 @@ public interface MetricService extends Service { * For e.g. If the expression is -1h:argus.jvm:mem.heap.used:avg, 1 hour should be subtracted from * relativeTo * - * @return A list of time series Metrics for the given expressions. A transformed Metric if specified so by the expression. The method will never - * return null. Returns an empty list if all expressions return nothing. + * @return MetricQueryResult object which contains details of the returned metrics for the expression as well as other details computed during query execution */ - List getMetrics(String expression, long relativeTo); + MetricQueryResult getMetrics(String expression, long relativeTo); /** * Batch and enqueue the given expressions and return the batch ID. @@ -158,45 +155,46 @@ public interface MetricService extends Service { * @return The corresponding list of metric query objects. Will never return null. */ List getQueries(List expression, long relativeTo); - - /** - * Returns the range of time series that current query has expanded to. - * - * - * @return The range of time series that current query has expanded to. - */ - String getExpandedTimeSeriesRange(); - + /** - * Returns the query time window of current query - * + * Returns a list of MetricQuery objects corresponding to the given expression where the query time range is relativeTo by the given value. * - * @return The query time window corresponding to current query. - */ - String getQueryTimeWindow(); - + * @param expression The metric expressions to evaluate. Cannot be null, but may be empty. All entries must be a valid metric expression. + * @param relativeTo The timestamp from which the start and end times should be relative to. Only applied when using + * relative timestamps in expressions. + * For e.g. If the expression is -1h:argus.jvm:mem.heap.used:avg, 1 hour should be subtracted from + * relativeTo + * @return The corresponding list of metric query objects. Will never return null. + */ + List parseToMetricQuery(String expression, long relativeTo); + /** - * Returns the number of discovery results that current query has expanded to. - * + * Returns a list of MetricQuery objects corresponding to the given expression where the query time range is relativeTo by the given value. * - * @return The number of discovery results that current query has expanded to. - */ - Integer getNumDiscoveryResults(); - + * @param expression The list of metric expressions to evaluate. Cannot be null, but may be empty. All entries must be a valid metric expression. + * @param relativeTo The timestamp from which the start and end times should be relative to. Only applied when using + * relative timestamps in expressions. + * For e.g. If the expression is -1h:argus.jvm:mem.heap.used:avg, 1 hour should be subtracted from + * relativeTo + * @return The corresponding list of metric query objects. Will never return null. + */ + List parseToMetricQuery(List expression, long relativeTo); + /** - * Returns the number of discovery queries that has been made to the discovery service. - * + * Returns list of DC from the metric query list, if present. + * @param mQList The list of MetricQuery expressions to evaluate. Cannot be null, but may be empty. All entries must be a valid metric expression. * - * @return The number of discovery queries that has been made to the discovery service. - */ - Integer getNumDiscoveryQueries(); + * @return The DC list. + */ + List extractDCFromMetricQuery(List mQList); /** - * Returns List of DCs from the expression. - * @param expression The list of metric expressions to evaluate. Cannot be null, but may be empty. All entries must be a valid metric expression. + * Returns DC from the metric query, if present. + * @param m The Metric to evaluate. Cannot be null, but may be empty. All entries must be a valid metric expression. * - * @return The list of DCs from the expression. + * @return The DC. */ - List getDCFromExpression(String expression); + String extractDCFromMetric(Metric m); + } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/MetricStorageService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/MetricStorageService.java new file mode 100644 index 000000000..4eb6a0e47 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/MetricStorageService.java @@ -0,0 +1,30 @@ +package com.salesforce.dva.argus.service; + +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.service.tsdb.MetricQuery; + +import java.util.List; +import java.util.Map; + +/** + * Provides methods for putting or retrieving metrics from storage. + * + * @author Sudhanshu Bahety (sudhanshu.bahety@salesforce.com) + */ +public interface MetricStorageService extends Service { + /** + * Writes metric data. Any existing data is overwritten. + * + * @param metrics The list of metrics to write. Cannot be null, but may be empty. + */ + void putMetrics(List metrics); + + /** + * Reads metric data. + * + * @param queries The list of queries to execute. Cannot be null, but may be empty. + * + * @return The query results as a map of query to the corresponding metrics it returns. Will never be null, but may be empty. + */ + Map> getMetrics(List queries); +} \ No newline at end of file diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/MonitorService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/MonitorService.java index b88cbaa2e..11f66e77d 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/MonitorService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/MonitorService.java @@ -33,7 +33,6 @@ import com.salesforce.dva.argus.entity.Dashboard; import com.salesforce.dva.argus.entity.Metric; -import com.salesforce.dva.argus.service.monitor.GaugeExporter; import java.util.Map; @@ -149,12 +148,12 @@ public interface MonitorService extends Service { * * @return isDataLagging boolean flag */ - boolean isDataLagging(); + boolean isDataLagging(String dataCenter); /** * This is helper function so that we can export metrics to JMX metric exporter everywhere in the * system - * + * * @param metric the metric to export * @param value the value datapoint of the metric to be export */ @@ -190,71 +189,146 @@ public static enum Counter { THREADS("argus.jvm", "thread.used"), PEAK_THREADS("argus.jvm", "thread.peak"), DAEMON_THREADS("argus.jvm", "thread.daemon"), - METRIC_WRITES("argus.core", "metric.writes"), - ANNOTATION_WRITES("argus.core", "annotation.writes"), - METRIC_READS("argus.core", "metric.reads"), - ANNOTATION_READS("argus.core", "annotation.reads"), - WARDEN_TRIGGERS("argus.core", "triggers.warden"), - SYSTEM_TRIGGERS("argus.core", "triggers.system"), - USER_TRIGGERS("argus.core", "triggers.user"), - JOBS_SCHEDULED("argus.core", "jobs.scheduled"), + MBEANSERVER_MBEAN_TOTAL("argus.jvm", "mbeanserver.mbean.total", MetricType.COUNTER), + METRIC_WRITES("argus.core", "metric.writes", MetricType.COUNTER), + ANNOTATION_WRITES("argus.core", "annotation.writes", MetricType.COUNTER), + ANNOTATION_DROPS_MAXSIZEEXCEEDED("argus.core", "annotation.drops.maxSizeExceeded", MetricType.COUNTER), + HISTOGRAM_WRITES("argus.core", "histogram.writes", MetricType.COUNTER), + HISTOGRAM_DROPPED("argus.core", "histogram.dropped", MetricType.COUNTER), + METRIC_READS("argus.core", "metric.reads", MetricType.COUNTER), + ANNOTATION_READS("argus.core", "annotation.reads", MetricType.COUNTER), + WARDEN_TRIGGERS("argus.core", "triggers.warden", MetricType.COUNTER), + SYSTEM_TRIGGERS("argus.core", "triggers.system", MetricType.COUNTER), + USER_TRIGGERS("argus.core", "triggers.user", MetricType.COUNTER), + JOBS_SCHEDULED("argus.core", "jobs.scheduled", MetricType.COUNTER), JOBS_MAX("argus.core", "jobs.max"), + ALERTS_KPI("argus.core", "alerts.kpi"), ALERTS_ENABLED("argus.core", "alerts.enabled"), - ALERTS_SCHEDULED("argus.core", "alerts.scheduled"), + ALERTS_SCHEDULED("argus.core", "alerts.scheduled", MetricType.COUNTER), + ALERTS_SCHEDULED_TOTAL("argus.core", "alerts.scheduled.total", MetricType.COUNTER), ALERTS_SCHEDULING_QUEUE_SIZE("argus.core", "alerts.scheduleQueue.size"), - ALERTS_EVALUATED("argus.core", "alerts.evaluated"), - ALERTS_FAILED("argus.core", "alerts.failed"), - ALERTS_EVALUATION_LATENCY("argus.core", "alerts.evaluation.latency"), - ALERTS_SKIPPED("argus.core", "alerts.skipped"), - NOTIFICATIONS_SENT("argus.core", "notifications.sent"), - TRIGGERS_VIOLATED("argus.core", "triggers.violated"), - ALERTS_MAX("argus.core", "alerts.max"), - ALERT_EVALUATION_KPI("argus.core", "alert.evaluation.kpi"), - DATAPOINT_READS("argus.core", "datapoint.reads"), - DATAPOINT_WRITES("argus.core", "datapoint.writes"), - UNIQUE_USERS("argus.core", "users.unique"), - DAILY_USERS("argus.core", "users.daily"), - MONTHLY_USERS("argus.core", "users.monthly"), - COMMIT_CLIENT_DATAPOINT_WRITES("argus.core", "commit.client.datapoint.writes"), - COMMIT_CLIENT_METRIC_WRITES("argus.core", "commit.client.metric.writes"), - SCHEMACOMMIT_CLIENT_METRIC_WRITES("argus.core", "schemacommit.client.metric.writes"), - - SCOPEANDMETRICNAMES_WRITTEN("argus.core", "scopeandmetricnames.written"), + ALERTS_EVALUATED("argus.core", "alerts.evaluated", MetricType.COUNTER), + ALERTS_EVALUATED_RAWTOTAL("argus.core", "alerts.evaluated.rawtotal", MetricType.COUNTER), + ALERTS_EVALUATED_TOTAL("argus.core", "alerts.evaluated.total", MetricType.COUNTER), + ALERTS_EVALUATION_STARTED("argus.alerts", "evaluation.started", MetricType.COUNTER), + ALERTS_EVALUATION_DELAYED("argus.alerts", "evaluation.delayed", MetricType.COUNTER), + ALERTS_FAILED("argus.core", "alerts.failed", MetricType.COUNTER), + ALERTS_EVALUATION_LATENCY("argus.core", "alerts.evaluation.latency", MetricType.COUNTER), + ALERTS_UPDATE_LATENCY("argus.core","alerts.update.latency"), + ALERTS_NEW_LATENCY("argus.core","alerts.new.latency"), + ALERTS_UPDATED_COUNT("argus.core","alerts.updated", MetricType.COUNTER), + ALERTS_CREATED_COUNT("argus.core","alerts.created", MetricType.COUNTER), + ALERTS_SKIPPED("argus.core", "alerts.skipped", MetricType.COUNTER), + TRANSFORMS_EVALUATED("argus.core", "transforms.evaluated", MetricType.COUNTER), + NOTIFICATIONS_SENT("argus.core", "notifications.sent", MetricType.COUNTER), + GOC_NOTIFICATIONS_FAILED("argus.core", "notifications.failed.goc", MetricType.COUNTER), + GUS_NOTIFICATIONS_FAILED("argus.core", "notifications.failed.gus", MetricType.COUNTER), + PAGERDUTY_NOTIFICATIONS_FAILED("argus.core", "notifications.failed.pagerduty", MetricType.COUNTER), + GOC_NOTIFICATIONS_RETRIES("argus.core", "notifications.retries.goc", MetricType.COUNTER), + GUS_NOTIFICATIONS_RETRIES("argus.core", "notifications.retries.gus", MetricType.COUNTER), + PAGERDUTY_NOTIFICATIONS_RETRIES("argus.core", "notifications.retries.pagerduty", MetricType.COUNTER), + TRIGGERS_VIOLATED("argus.core", "triggers.violated", MetricType.COUNTER), + ALERTS_MAX("argus.core", "alerts.max",MetricType.COUNTER), + ALERT_EVALUATION_KPI("argus.core", "alert.evaluation.kpi", MetricType.COUNTER), + DATAPOINT_READS("argus.core", "datapoint.reads", MetricType.COUNTER), + DATAPOINT_WRITES("argus.core", "datapoint.writes", MetricType.COUNTER), + UNIQUE_USERS("argus.core", "users.unique", MetricType.COUNTER), + DAILY_USERS("argus.core", "users.daily", MetricType.COUNTER), + MONTHLY_USERS("argus.core", "users.monthly", MetricType.COUNTER), + COMMIT_CLIENT_DATAPOINT_WRITES("argus.core", "commit.client.datapoint.writes", MetricType.COUNTER), + COMMIT_CLIENT_METRIC_WRITES("argus.core", "commit.client.metric.writes", MetricType.COUNTER), + SCHEMACOMMIT_CLIENT_METRIC_WRITES("argus.core", "schemacommit.client.metric.writes", MetricType.COUNTER), + + // MORE FINE GRAIN ALERT EVALUATION TIMERS + METRICQUERYPROCESSOR_EVALUATETSDBQUERY_LATENCY("argus.core", "metricqueryprocessor.evaluatetsdbquery.latency", MetricType.COUNTER), + METRICQUERYPROCESSOR_EVALUATETSDBQUERY_COUNT("argus.core", "metricqueryprocessor.evaluatetsdbquery.count", MetricType.COUNTER), + METRICS_GETMETRICS_LATENCY("argus.core", "metrics.getmetrics.latency", MetricType.COUNTER), + METRICS_GETMETRICS_COUNT("argus.core", "metrics.getmetrics.count", MetricType.COUNTER), + REDISCACHE_GET_LATENCY("argus.core", "rediscache.get.latency", MetricType.COUNTER), + REDISCACHE_GET_COUNT("argus.core", "rediscache.get.count", MetricType.COUNTER), + ALERTS_EVALUATION_ONLY_LATENCY("argus.core", "alerts.evaluation.nonotification.latency", MetricType.COUNTER), + ALERTS_EVALUATION_LATENCY_COUNT("argus.core", "alerts.evaluation.timer.count", MetricType.COUNTER), + + SCOPEANDMETRICNAMES_WRITTEN("argus.core", "scopeandmetricnames.written", MetricType.COUNTER), SCOPEANDMETRICNAMES_WRITE_LATENCY("argus.core", "scopeandmetricnames.write.latency"), - SCOPEANDMETRICNAMES_QUERY_COUNT("argus.core", "scopeandmetricnames.query.count"), + SCOPEANDMETRICNAMES_QUERY_COUNT("argus.core", "scopeandmetricnames.query.count", MetricType.COUNTER), SCOPEANDMETRICNAMES_QUERY_LATENCY("argus.core", "scopeandmetricnames.query.latency"), - SCOPENAMES_WRITTEN("argus.core", "scopenames.written"), + QUERYSTORE_RECORDS_WRITTEN("argus.core", "querystore.records.written", MetricType.COUNTER), + QUERYSTORE_RECORDS_WRITE_LATENCY("argus.core", "querystore.records.write.latency"), + + SCOPENAMES_WRITTEN("argus.core", "scopenames.written", MetricType.COUNTER), SCOPENAMES_WRITE_LATENCY("argus.core", "scopenames.write.latency"), - SCOPENAMES_QUERY_COUNT("argus.core", "scopenames.query.count"), + SCOPENAMES_QUERY_COUNT("argus.core", "scopenames.query.count", MetricType.COUNTER), SCOPENAMES_QUERY_LATENCY("argus.core", "scopenames.query.latency"), - SCHEMARECORDS_WRITTEN("argus.core", "schemarecords.written"), + SCHEMARECORDS_WRITTEN("argus.core", "schemarecords.written", MetricType.COUNTER), SCHEMARECORDS_WRITE_LATENCY("argus.core", "schemarecords.write.latency"), - SCHEMARECORDS_QUERY_COUNT("argus.core", "schemarecords.query.count"), + SCHEMARECORDS_QUERY_COUNT("argus.core", "schemarecords.query.count", MetricType.COUNTER), + SCHEMARECORDS_DOCS_PULLED("argus.core", "schemarecords.docs.pulled.count", MetricType.COUNTER), SCHEMARECORDS_QUERY_LATENCY("argus.core", "schemarecords.query.latency"), - METATAGS_WRITTEN("argus.core", "metatags.written"), + METATAGS_WRITTEN("argus.core", "metatags.written", MetricType.COUNTER), METATAGS_WRITE_LATENCY("argus.core", "metatags.write.latency"), + BLOOM_CREATED_APPROXIMATE_ELEMENT_COUNT("argus.core", "bloomfilter.created.approximate.element.count", MetricType.COUNTER), + BLOOM_MODIFIED_APPROXIMATE_ELEMENT_COUNT("argus.core", "bloomfilter.modified.approximate.element.count", MetricType.COUNTER), + + QUERY_STORE_BLOOM_CREATED_APPROXIMATE_ELEMENT_COUNT("argus.core", "querystore.bloomfilter.created.approximate.element.count", MetricType.COUNTER), + + DATALAG_PER_DC_TIME_LAG("argus.core", "datalag.seconds"), + DATALAG_PER_DC_OFFSET_LAG("argus.core", "datalag.offset"), + QUERY_DATAPOINTS_LIMIT_EXCEEDED("argus.core", "query.datapoints.limit.exceeded"), + + ELASTIC_SEARCH_GET_FAILURES("argus.core", "elastic.search.get.failures", MetricType.COUNTER), - BLOOMFILTER_APPROXIMATE_ELEMENT_COUNT("argus.core", "bloomfilter.approximate.element.count"), - BLOOMFILTER_SCOPE_ONLY_APPROXIMATE_ELEMENT_COUNT("argus.core", "bloomfilter.scope.only.approximate.element.count"), - BLOOMFILTER_SCOPE_AND_METRIC_ONLY_APPROXIMATE_ELEMENT_COUNT("argus.core", "bloomfilter.scope.and.metric.only.approximate.element.count"), - BLOOMFILTER_METATAGS_APPROXIMATE_ELEMENT_COUNT("argus.core", "bloomfilter.metatags.approximate.element.count"); + CONSUMER_OFFSET_RECORDS_WRITE_FAILURES("argus.core", "consumer.offset.records.write.failures", MetricType.COUNTER), + CONSUMER_OFFSET_RECORDS_WRITE_LATENCY("argus.core", "consumer.offset.records.write.latency", MetricType.COUNTER), + CONSUMER_OFFSET_RECORDS_READ_LATENCY("argus.core", "consumer.offset.records.read.latency", MetricType.COUNTER), + + + ANNOTATION_RECORDS_WRITE_FAILURES("argus.core", "annotation.records.write.failures", MetricType.COUNTER), + IMAGE_RECORDS_WRITE_FAILURES("argus.core", "image.records.write.failures", MetricType.COUNTER); private final String _scope; private final String _metric; + private final MetricType _type; + private final String _jmxMetricNameSuffix; /** * Creates a new Counter object. * * @param scope The counter scope name. * @param metric The corresponding metric name. + * @param type The corresponding metric type. + * @param jmxMetricNameSuffix This will be appended to the JMX metric name used by Prometheus. */ - Counter(String scope, String metric) { + Counter(String scope, String metric, MetricType type, String jmxMetricNameSuffix) { _scope = scope; _metric = metric; + _type = type; + _jmxMetricNameSuffix = jmxMetricNameSuffix; + } + + /** + * Creates a new Counter object. + * + * @param scope The counter scope name. + * @param metric The corresponding metric name. + * @param type The corresponding metric type. + */ + Counter(String scope, String metric, MetricType type) { + this(scope, metric, type, MetricType.COUNTER == type ? ".count" : ""); + } + + /** + * Creates a new Counter object. + * + * @param scope The counter scope name. + * @param metric The corresponding metric name. + */ + Counter(String scope, String metric) { + this(scope, metric, MetricType.GAUGE, ""); } /** @@ -290,6 +364,23 @@ public String getScope() { public String getMetric() { return _metric; } + + /** + * Retrieves the metric type for the counter. + * + * @return The metric type for the counter. Will not be null. + */ + public MetricType getMetricType() { + return _type; + } + + public String getJMXMetricNameSuffix() {return _jmxMetricNameSuffix; } + + public static enum MetricType { + COUNTER, + GAUGE, + TIMER + } } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/NotifierFactory.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/NotifierFactory.java index f8aeac45e..da505db14 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/NotifierFactory.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/NotifierFactory.java @@ -62,7 +62,13 @@ public final class NotifierFactory { private Provider _callbackNotifierProvider; @Inject private Provider _refocusNotifierProvider; - + // IMPORTANT - todo - is there anything needed to set these up? + @Inject + private Provider _refocusBooleanNotifierProvider; + @Inject + private Provider _refocusValueNotifierProvider; + @Inject + private Provider _pagerDutyNotifierProvider; public synchronized CallbackNotifier getCallbackNotifier() { return _callbackNotifierProvider.get(); } @@ -129,5 +135,33 @@ public synchronized GusNotifier getGusNotifier() { public synchronized RefocusNotifier getRefocusNotifier() { return _refocusNotifierProvider.get(); } + + /** + * Returns an instance of the Refocus Notifier. + * + * @return An instance of the Refocus Notifier. + */ + public synchronized RefocusBooleanNotifier getRefocusBooleanNotifier() { + return _refocusBooleanNotifierProvider.get(); + } + + /** + * Returns an instance of the Refocus Notifier. + * + * @return An instance of the Refocus Notifier. + */ + public synchronized RefocusValueNotifier getRefocusValueNotifier() { + return _refocusValueNotifierProvider.get(); + } + + /** + * Returns an instance of the PagerDuty Notifier. + * + * @return An instance of the PagerDuty Notifier. + */ + public synchronized PagerDutyNotifier getPagerDutyNotifier() { + return _pagerDutyNotifierProvider.get(); + } + } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/monitor/GaugeExporter.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/QueryStoreService.java similarity index 72% rename from ArgusCore/src/main/java/com/salesforce/dva/argus/service/monitor/GaugeExporter.java rename to ArgusCore/src/main/java/com/salesforce/dva/argus/service/QueryStoreService.java index 475fa1c78..17c6b3918 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/monitor/GaugeExporter.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/QueryStoreService.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2018, Salesforce.com, Inc. + * Copyright (c) 2019, Salesforce.com, Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without @@ -28,25 +28,25 @@ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ -package com.salesforce.dva.argus.service.monitor; + +package com.salesforce.dva.argus.service; import com.salesforce.dva.argus.entity.Metric; +import java.util.List; + /** - * This interface allow program to export internal counter metrics to - * external metric collecting / reporting system - * - * @author taozhang - * + * Provides methods to store argus user queries + * @author ChandraVyas Annakula (cannakula@salesforce.com) */ -public interface GaugeExporter { - /** - * export internal metric and its latest value through GaugeExporter. System - * need the metric counter object to create corresponding object name - * for the exporter - * @param metric The internal metric that will be exported - * @param value The latest value of the metric. - */ - void exportGauge(Metric metric, Double value); + +public interface QueryStoreService extends Service { + + /** + * Stores user queries using metadata of the provided metrics + * + * @param metrics The metrics for which to store query records for. Cannot be null, but may be empty. + */ + void putArgusWsQueries(List metrics); } diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/RefocusService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/RefocusService.java new file mode 100644 index 000000000..731f54257 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/RefocusService.java @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus.service; + +import com.salesforce.dva.argus.entity.History; +import com.salesforce.dva.argus.service.alert.notifier.NotificationForwarder; + +/** + * Provides methods to create, update and delete alerts. + * + * @author Ian Keck (ikeck@salesforce.com) + */ +// FUTURE - separate refocusService and Notification forwarder Problem. how to set up the factories/providers so that +// future - they point to the same object in the interrim. +public interface RefocusService extends Service, NotificationForwarder { + + public boolean sendRefocusNotification(String subject_aspect, String value, String user_id, String token_id, History history) /*throws InterruptedException */; + public boolean sendRefocusNotification(String subject_aspect, String value, String user_id, String token_id, History history, long nextFireTime) /*throws InterruptedException */; + +} +/* Copyright (c) 2019, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/SchemaService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/SchemaService.java index 78d217f53..c960fbd29 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/SchemaService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/SchemaService.java @@ -69,7 +69,7 @@ public interface SchemaService extends Service { void put(List metrics); /** - * Returns a list of schema records matched by the given query. + * Returns a list of schema records matched by the given query, taking limit and page into account * * @param query The query to evaluate. Cannot be null. * @@ -276,4 +276,4 @@ public String getName() { } } } -/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ +/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ \ No newline at end of file diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/ServiceFactory.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/ServiceFactory.java index ce69cd214..8e5ce100c 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/ServiceFactory.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/ServiceFactory.java @@ -1,315 +1,381 @@ -/* - * Copyright (c) 2016, Salesforce.com, Inc. - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * 3. Neither the name of Salesforce.com nor the names of its contributors may - * be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ - -package com.salesforce.dva.argus.service; - -import com.google.inject.Inject; -import com.google.inject.Provider; - -/** - * The system service factory module. All services should be obtained from this class via injection. - * - * @author Tom Valine (tvaline@salesforce.com), Bhinav Sura (bhinav.sura@salesforce.com), Gaurav Kumar (gaurav.kumar@salesforce.com) - */ -public final class ServiceFactory { - - //~ Instance fields ****************************************************************************************************************************** - - @Inject - Provider _tsdbServiceProvider; - @Inject - Provider _collectionServiceProvider; - @Inject - Provider _mqServiceProvider; - @Inject - Provider _userServiceProvider; - @Inject - Provider _dashboardServiceProvider; - @Inject - Provider _oauthAuthorizationCodeServiceProvider; - @Inject - Provider _alertServiceProvider; - @Inject - Provider _metricServiceProvider; - @Inject - Provider _schedulingServiceProvider; - @Inject - Provider _globalInterlockServiceProvider; - @Inject - Provider _monitorServiceProvider; - @Inject - Provider _annotationServiceProvider; - @Inject - Provider _wardenServiceProvider; - @Inject - Provider _managementServiceProvider; - @Inject - Provider _auditServiceProvider; - @Inject - Provider _mailServiceProvider; - @Inject - Provider _authServiceProvider; - @Inject - Provider _historyServiceProvider; - @Inject - Provider _schemaServiceProvider; - @Inject - Provider _namespaceServiceProvider; - @Inject - Provider _cacheServiceProvider; - @Inject - Provider _discoveryServiceProvider; - @Inject - Provider _batchServiceProvider; - @Inject - Provider _chartServiceProvider; - - - //~ Methods ************************************************************************************************************************************** - - /** - * Returns an instance of the TSDB service. - * - * @return An instance of the TSDB service. - */ - public synchronized TSDBService getTSDBService() { - return _tsdbServiceProvider.get(); - } - - /** - * Returns an instance of the Collection service. - * - * @return An instance of the Collection service. - */ - public synchronized CollectionService getCollectionService() { - return _collectionServiceProvider.get(); - } - - /** - * Returns an instance of the MQ service. - * - * @return An instance of the MQ service. - */ - public synchronized MQService getMQService() { - return _mqServiceProvider.get(); - } - - /** - * Returns an instance of the user service. - * - * @return An instance of the user service. - */ - public synchronized UserService getUserService() { - return _userServiceProvider.get(); - } - - /** - * Returns an instance of the dashboard service. - * - * @return An instance of the dashboard service. - */ - public synchronized DashboardService getDashboardService() { - return _dashboardServiceProvider.get(); - } - - /** - * Returns an instance of OAuth Authorization Code service. - * - * @return An instance of the OAuthAuthorizationCode service. - */ - public synchronized OAuthAuthorizationCodeService getOAuthAuthorizationCodeService() { - return _oauthAuthorizationCodeServiceProvider.get(); - } - - /** - * Returns an instance of the alert service. - * - * @return An instance of the alert service. - */ - public synchronized AlertService getAlertService() { - return _alertServiceProvider.get(); - } - - /** - * Returns an instance of the metric service. - * - * @return An instance of the metric service. - */ - public synchronized MetricService getMetricService() { - return _metricServiceProvider.get(); - } - - /** - * Returns an instance of the scheduling service. - * - * @return An instance of the scheduling service. - */ - public synchronized SchedulingService getSchedulingService() { - return _schedulingServiceProvider.get(); - } - - /** - * Returns an instance of the global interlock service. - * - * @return An instance of the global interlock service. - */ - public synchronized GlobalInterlockService getGlobalInterlockService() { - return _globalInterlockServiceProvider.get(); - } - - /** - * Returns an instance of the monitor service. - * - * @return An instance of the monitor service. - */ - public synchronized MonitorService getMonitorService() { - return _monitorServiceProvider.get(); - } - - /** - * Returns an instance of the mail service. - * - * @return An instance of the mail service. - */ - public synchronized MailService getMailService() { - return _mailServiceProvider.get(); - } - - /** - * Returns an instance of the annotation service. - * - * @return An instance of the annotation service. - */ - public synchronized AnnotationService getAnnotationService() { - return _annotationServiceProvider.get(); - } - - /** - * Returns an instance of the warden service. - * - * @return An instance of the warden service. - */ - public synchronized WardenService getWardenService() { - return _wardenServiceProvider.get(); - } - - /** - * Returns an instance of the management service. - * - * @return An instance of the management service. - */ - public synchronized ManagementService getManagementService() { - return _managementServiceProvider.get(); - } - - /** - * Returns an instance of the audit service. - * - * @return An instance of the audit service. - */ - public synchronized AuditService getAuditService() { - return _auditServiceProvider.get(); - } - - /** - * Returns an instance of the authentication service. - * - * @return An instance of the audit service. - */ - public synchronized AuthService getAuthService() { - return _authServiceProvider.get(); - } - - /** - * Returns an instance of the job history service. - * - * @return An instance of the job history service. - */ - public synchronized HistoryService getHistoryService() { - return _historyServiceProvider.get(); - } - - /** - * Returns an instance of the schema service. - * - * @return An instance of the schema service. - */ - public synchronized SchemaService getSchemaService() { - return _schemaServiceProvider.get(); - } - - /** - * Returns an instance of the namespace service. - * - * @return An instance of the namespace service. - */ - public synchronized NamespaceService getNamespaceService() { - return _namespaceServiceProvider.get(); - } - - /** - * Returns an instance of the cache service. - * - * @return An instance of the cache service. - */ - public synchronized CacheService getCacheService() { - return _cacheServiceProvider.get(); - } - - /** - * Returns an instance of the Discovery service. - * - * @return An instance of the Discovery service. - */ - public synchronized DiscoveryService getDiscoveryService() { - return _discoveryServiceProvider.get(); - } - - /** - * Returns an instance of the batch service. - * - * @return An instance of the batch service. - */ - public synchronized BatchService getBatchService() { - return _batchServiceProvider.get(); - } - - /** - * Returns an instance of the chart service. - * - * @return An instance of the chart service. - */ - public synchronized ChartService getChartService() { - return _chartServiceProvider.get(); - } - -} -/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus.service; + +import com.google.inject.Inject; +import com.google.inject.Provider; +import com.salesforce.dva.argus.entity.Metric; + +/** + * The system service factory module. All services should be obtained from this class via injection. + * + * @author Tom Valine (tvaline@salesforce.com), Bhinav Sura (bhinav.sura@salesforce.com), Gaurav Kumar (gaurav.kumar@salesforce.com) + */ +public final class ServiceFactory { + + //~ Instance fields ****************************************************************************************************************************** + + @Inject + Provider _tsdbServiceProvider; + @Inject + Provider _annotationStorageServiceProvider; + @Inject + Provider _collectionServiceProvider; + @Inject + Provider _mqServiceProvider; + @Inject + Provider _userServiceProvider; + @Inject + Provider _dashboardServiceProvider; + @Inject + Provider _oauthAuthorizationCodeServiceProvider; + @Inject + Provider _alertServiceProvider; + @Inject + Provider _metricServiceProvider; + @Inject + Provider _schedulingServiceProvider; + @Inject + Provider _globalInterlockServiceProvider; + @Inject + Provider _monitorServiceProvider; + @Inject + Provider _annotationServiceProvider; + @Inject + Provider _wardenServiceProvider; + @Inject + Provider _managementServiceProvider; + @Inject + Provider _auditServiceProvider; + @Inject + Provider _mailServiceProvider; + @Inject + Provider _authServiceProvider; + @Inject + Provider _historyServiceProvider; + @Inject + Provider _schemaServiceProvider; + @Inject + Provider _namespaceServiceProvider; + @Inject + Provider _cacheServiceProvider; + @Inject + Provider _discoveryServiceProvider; + @Inject + Provider _batchServiceProvider; + @Inject + Provider _chartServiceProvider; + @Inject + Provider _serviceManagementServiceProvider; + @Inject + Provider _refocusServiceProvider; + @Inject + Provider _queryStoreServiceProvider; + @Inject + Provider _imageServiceProvider; + @Inject + Provider _consumerOffsetMetricStorageService; + + + //~ Methods ************************************************************************************************************************************** + + /** + * Returns an instance of the TSDB service. + * + * @return An instance of the TSDB service. + */ + public synchronized TSDBService getTSDBService() { + return _tsdbServiceProvider.get(); + } + + /** + * Returns an instance of the annotation storage service. + * + * @return An instance of the annotation storage service. + */ + public synchronized AnnotationStorageService getAnnotationStorageService() { + return _annotationStorageServiceProvider.get(); + } + + /** + * Returns an instance of the Collection service. + * + * @return An instance of the Collection service. + */ + public synchronized CollectionService getCollectionService() { + return _collectionServiceProvider.get(); + } + + /** + * Returns an instance of the MQ service. + * + * @return An instance of the MQ service. + */ + public synchronized MQService getMQService() { + return _mqServiceProvider.get(); + } + + /** + * Returns an instance of the user service. + * + * @return An instance of the user service. + */ + public synchronized UserService getUserService() { + return _userServiceProvider.get(); + } + + /** + * Returns an instance of the dashboard service. + * + * @return An instance of the dashboard service. + */ + public synchronized DashboardService getDashboardService() { + return _dashboardServiceProvider.get(); + } + + /** + * Returns an instance of OAuth Authorization Code service. + * + * @return An instance of the OAuthAuthorizationCode service. + */ + public synchronized OAuthAuthorizationCodeService getOAuthAuthorizationCodeService() { + return _oauthAuthorizationCodeServiceProvider.get(); + } + + /** + * Returns an instance of the alert service. + * + * @return An instance of the alert service. + */ + public synchronized AlertService getAlertService() { + return _alertServiceProvider.get(); + } + + /** + * Returns an instance of the metric service. + * + * @return An instance of the metric service. + */ + public synchronized MetricService getMetricService() { + return _metricServiceProvider.get(); + } + + /** + * Returns an instance of the scheduling service. + * + * @return An instance of the scheduling service. + */ + public synchronized SchedulingService getSchedulingService() { + return _schedulingServiceProvider.get(); + } + + /** + * Returns an instance of the global interlock service. + * + * @return An instance of the global interlock service. + */ + public synchronized GlobalInterlockService getGlobalInterlockService() { + return _globalInterlockServiceProvider.get(); + } + + /** + * Returns an instance of the monitor service. + * + * @return An instance of the monitor service. + */ + public synchronized MonitorService getMonitorService() { + return _monitorServiceProvider.get(); + } + + /** + * Returns an instance of the mail service. + * + * @return An instance of the mail service. + */ + public synchronized MailService getMailService() { + return _mailServiceProvider.get(); + } + + /** + * Returns an instance of the annotation service. + * + * @return An instance of the annotation service. + */ + public synchronized AnnotationService getAnnotationService() { + return _annotationServiceProvider.get(); + } + + /** + * Returns an instance of the warden service. + * + * @return An instance of the warden service. + */ + public synchronized WardenService getWardenService() { + return _wardenServiceProvider.get(); + } + + /** + * Returns an instance of the management service. + * + * @return An instance of the management service. + */ + public synchronized ManagementService getManagementService() { + return _managementServiceProvider.get(); + } + + /** + * Returns an instance of the audit service. + * + * @return An instance of the audit service. + */ + public synchronized AuditService getAuditService() { + return _auditServiceProvider.get(); + } + + /** + * Returns an instance of the authentication service. + * + * @return An instance of the audit service. + */ + public synchronized AuthService getAuthService() { + return _authServiceProvider.get(); + } + + /** + * Returns an instance of the job history service. + * + * @return An instance of the job history service. + */ + public synchronized HistoryService getHistoryService() { + return _historyServiceProvider.get(); + } + + /** + * Returns an instance of the schema service. + * + * @return An instance of the schema service. + */ + public synchronized SchemaService getSchemaService() { + return _schemaServiceProvider.get(); + } + + /** + * Returns an instance of the namespace service. + * + * @return An instance of the namespace service. + */ + public synchronized NamespaceService getNamespaceService() { + return _namespaceServiceProvider.get(); + } + + /** + * Returns an instance of the cache service. + * + * @return An instance of the cache service. + */ + public synchronized CacheService getCacheService() { + return _cacheServiceProvider.get(); + } + + /** + * Returns an instance of the Discovery service. + * + * @return An instance of the Discovery service. + */ + public synchronized DiscoveryService getDiscoveryService() { + return _discoveryServiceProvider.get(); + } + + /** + * Returns an instance of the batch service. + * + * @return An instance of the batch service. + */ + public synchronized BatchService getBatchService() { + return _batchServiceProvider.get(); + } + + /** + * Returns an instance of the chart service. + * + * @return An instance of the chart service. + */ + public synchronized ChartService getChartService() { + return _chartServiceProvider.get(); + } + + /** + * Returns an instance of the service management service. + * + * @return An instance of the service management service. + */ + public synchronized ServiceManagementService getServiceManagementService() { + return _serviceManagementServiceProvider.get(); + } + + /** + * Returns an instance of the refocus service. + * + * @return An instance of the refocus service. + */ + public synchronized RefocusService getRefocusService() { + return _refocusServiceProvider.get(); + } + + /** + * Returns an instance of the Queries Store service. + * + * @return An instance of the Queries Store service. + */ + public synchronized QueryStoreService getQueryStoreService() { + return _queryStoreServiceProvider.get(); + } + + /** + * Returns an instance of the Image service. + * + * @return An instance of the Image service. + */ + public synchronized ImageService getImageService() { + return _imageServiceProvider.get(); + } + + + /*** + * Returns an instance of the Metric Storage service. + * + * @return An instance of the Metric Storage service. + */ + public synchronized MetricStorageService getConsumerOffsetMetricStorageService() { return _consumerOffsetMetricStorageService.get();} +} +/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ + diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/TSDBService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/TSDBService.java index 03b104b9b..6c551045a 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/TSDBService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/TSDBService.java @@ -32,6 +32,7 @@ package com.salesforce.dva.argus.service; import com.salesforce.dva.argus.entity.Annotation; +import com.salesforce.dva.argus.entity.Histogram; import com.salesforce.dva.argus.entity.Metric; import com.salesforce.dva.argus.service.metric.transform.Transform; import com.salesforce.dva.argus.service.tsdb.AnnotationQuery; @@ -50,41 +51,22 @@ * * @author Tom Valine (tvaline@salesforce.com), Bhinav Sura (bhinav.sura@salesforce.com) */ -public interface TSDBService extends Service { +public interface TSDBService extends AnnotationStorageService, MetricStorageService { - //~ Methods ************************************************************************************************************************************** + public static final long MILLIS_IN_A_DAY = 86400000L; + + public static final long METRICS_RETENTION_PERIOD_MILLIS = 400*MILLIS_IN_A_DAY; - /** - * Writes metric data. Any existing data is overwritten. - * - * @param metrics The list of metrics to write. Cannot be null, but may be empty. - */ - void putMetrics(List metrics); + //~ Methods ************************************************************************************************************************************** /** - * Reads metric data. + * Writes histogram data. Any existing data is overwritten. * - * @param queries The list of queries to execute. Cannot be null, but may be empty. - * - * @return The query results as a map of query to the corresponding metrics it returns. Will never be null, but may be empty. + * @param histograms The list of histograms to write. Cannot be null, but may be empty. */ - Map> getMetrics(List queries); + void putHistograms(List histograms); - /** - * Writes annotation data. Any existing data is overwritten. - * - * @param annotations The list of annotations to write. Cannot be null, but may be empty. - */ - void putAnnotations(List annotations); - /** - * Reads annotation data. - * - * @param queries The list of queries to execute. Cannot be null, but may be empty. - * - * @return The query results. Will never be null, but may be empty. - */ - List getAnnotations(List queries); static void collate(List metrics) { @@ -109,7 +91,7 @@ static Map> groupMetricsForAggregation(List metrics for(Metric m : metrics) { - Map tags = new TreeMap<>(m.getTags()); + Map tags = m.getTags(); StringBuilder sb = new StringBuilder(); for(Map.Entry entry : tags.entrySet()) { @@ -124,7 +106,6 @@ static Map> groupMetricsForAggregation(List metrics } groupedMetricsMap.get(sb.toString()).add(m); - } return groupedMetricsMap; @@ -196,6 +177,55 @@ public static String getWindow(long differenceInMillis) { } } + /** + * Enumeration of time window for a query + * + * @author Sundeep Tiyyagura (stiyyagura@salesforce.com) + */ + public static enum QueryStartTimeWindow { + + WITHIN_24_HRS("within_24_hrs", 0L, MILLIS_IN_A_DAY), + WITHIN_24_HRS_AND_7_DAYS("within_24_hrs_and_7_days", MILLIS_IN_A_DAY+1, 7*MILLIS_IN_A_DAY), + WITHIN_8_DAYS_AND_14_DAYS("within_8_days_and_14_days", 7*MILLIS_IN_A_DAY+1, 14*MILLIS_IN_A_DAY), + WITHIN_15_DAYS_AND_30_DAYS("within_15_days_and_30_days", 14*MILLIS_IN_A_DAY+1, 30*MILLIS_IN_A_DAY), + WITHIN_31_DAYS_AND_90_DAYS("within_31_days_and_90_days", 30*MILLIS_IN_A_DAY+1, 90*MILLIS_IN_A_DAY), + GREATER_THAN_90_DAYS("greater_than_90_days", 90*MILLIS_IN_A_DAY +1, 600*MILLIS_IN_A_DAY); + + private String _name; + + private long _startMillis; + + private long _endMillis; + + QueryStartTimeWindow(String name, long startMillis, long endMillis) { + this._name = name; + this._startMillis = startMillis; + this._endMillis = endMillis; + } + + public String getName() { + return _name; + } + + public long getStartMillis() { + return _startMillis; + } + + public long getEndMillis() { + return _endMillis; + } + + public static String getWindow(long windowInMillis) { + + for(QueryStartTimeWindow window : QueryStartTimeWindow.values()) { + if(windowInMillis>=window.getStartMillis() && windowInMillis<=window.getEndMillis()) { + return window.getName(); + } + } + return GREATER_THAN_90_DAYS.getName(); + } + } + /** * Enumeration of number of expanded time series for a query diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/AlertDefinitionsCache.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/AlertDefinitionsCache.java index 495c9f2ff..5ec52afe5 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/AlertDefinitionsCache.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/AlertDefinitionsCache.java @@ -51,77 +51,91 @@ public class AlertDefinitionsCache { - private static final Logger _logger = LoggerFactory.getLogger(AlertDefinitionsCache.class); - - private AlertDefinitionsCacheRefresherThread refresherThread; - - private static Map alertsMapById = new ConcurrentHashMap(); - - private static Map> alertsMapByCronEntry = new ConcurrentHashMap>(); - - private boolean alertsCacheInitialized = false; - - public AlertDefinitionsCache(AlertService alertService) { - refresherThread = new AlertDefinitionsCacheRefresherThread(this, alertService); - refresherThread.setDaemon(true); - refresherThread.start(); - } - - public Map getAlertsMapById() { - return alertsMapById; - } - - public void setAlertsMapById(Map alertsMapById) { - this.alertsMapById = alertsMapById; - } - - public Map> getAlertsMapByCronEntry() { - return alertsMapByCronEntry; - } - - public void setAlertsMapByCronEntry(Map> alertsMapByCronEntry) { - this.alertsMapByCronEntry = alertsMapByCronEntry; - } - - public boolean isAlertsCacheInitialized() { - return alertsCacheInitialized; - } - - public void setAlertsCacheInitialized(boolean alertsCacheInitialized) { - this.alertsCacheInitialized = alertsCacheInitialized; - } - - public static List getEnabledAlertsForMinute(long minuteStartTimeMillis){ - List enabledAlerts = new ArrayList(); - List enabledAlertIds = new ArrayList(); - - for(String cronEntry : alertsMapByCronEntry.keySet()) { - try { - - Date minuteStartTime = new Date(minuteStartTimeMillis); - String quartzCronEntry = Cron.convertToQuartzCronEntry(cronEntry); - Date previousMinuteLastSecondTime = new Date(minuteStartTimeMillis - 1000); - - // CronTrigger getFireTimeAfter only works for current and future time. For checking from a previous point of time - // we need to change startAtTime. - // https://stackoverflow.com/questions/7029196/quartz-crontrigger-getting-next-fire-time - CronTrigger cronTrigger = TriggerBuilder.newTrigger().withSchedule(CronScheduleBuilder.cronSchedule(quartzCronEntry)).startAt(previousMinuteLastSecondTime).build(); - Date nextFireTime = cronTrigger.getFireTimeAfter(previousMinuteLastSecondTime); - if(nextFireTime.equals(minuteStartTime)) { - enabledAlertIds.addAll(alertsMapByCronEntry.get(cronEntry)); - } - }catch(Exception e) { - _logger.error("Exception occured when trying to parse cron entry - " + cronEntry + " Exception - "+ ExceptionUtils.getFullStackTrace(e)); - } - } - Collections.sort(enabledAlertIds); - for(BigInteger alertId : enabledAlertIds) { - Alert a = alertsMapById.get(alertId); - if(a!=null) { - enabledAlerts.add(alertsMapById.get(alertId)); - } - } - return enabledAlerts; - } + private static final Logger _logger = LoggerFactory.getLogger(AlertDefinitionsCache.class); + + private AlertDefinitionsCacheRefresherThread refresherThread; + + private static Map alertsMapById = new ConcurrentHashMap(); + + private static Map> alertsMapByCronEntry = new ConcurrentHashMap>(); + + private boolean alertsCacheInitialized = false; + + public AlertDefinitionsCache(AlertService alertService) { + refresherThread = new AlertDefinitionsCacheRefresherThread(this, alertService); + refresherThread.setDaemon(true); + refresherThread.start(); + } + + // for unit testing + public AlertDefinitionsCache(AlertService alertService, boolean refreshThreadDaemonized) { + refresherThread = new AlertDefinitionsCacheRefresherThread(this, alertService); + if (refreshThreadDaemonized) { + refresherThread.setDaemon(true); + refresherThread.start(); + } + } + + // for unit testing + public AlertDefinitionsCache(AlertDefinitionsCacheRefresherThread refresherThread) { + this.refresherThread = refresherThread; + } + + public Map getAlertsMapById() { + return alertsMapById; + } + + public void setAlertsMapById(Map alertsMapById) { + this.alertsMapById = alertsMapById; + } + + public Map> getAlertsMapByCronEntry() { + return alertsMapByCronEntry; + } + + public void setAlertsMapByCronEntry(Map> alertsMapByCronEntry) { + this.alertsMapByCronEntry = alertsMapByCronEntry; + } + + public boolean isAlertsCacheInitialized() { + return alertsCacheInitialized; + } + + public void setAlertsCacheInitialized(boolean alertsCacheInitialized) { + this.alertsCacheInitialized = alertsCacheInitialized; + } + + public static List getEnabledAlertsForMinute(long minuteStartTimeMillis) { + List enabledAlerts = new ArrayList(); + List enabledAlertIds = new ArrayList(); + + for (String cronEntry : alertsMapByCronEntry.keySet()) { + try { + + Date minuteStartTime = new Date(minuteStartTimeMillis); + String quartzCronEntry = Cron.convertToQuartzCronEntry(cronEntry); + Date previousMinuteLastSecondTime = new Date(minuteStartTimeMillis - 1000); + + // CronTrigger getFireTimeAfter only works for current and future time. For checking from a previous point of time + // we need to change startAtTime. + // https://stackoverflow.com/questions/7029196/quartz-crontrigger-getting-next-fire-time + CronTrigger cronTrigger = TriggerBuilder.newTrigger().withSchedule(CronScheduleBuilder.cronSchedule(quartzCronEntry)).startAt(previousMinuteLastSecondTime).build(); + Date nextFireTime = cronTrigger.getFireTimeAfter(previousMinuteLastSecondTime); + if (nextFireTime.equals(minuteStartTime)) { + enabledAlertIds.addAll(alertsMapByCronEntry.get(cronEntry)); + } + } catch (Exception e) { + _logger.error("Exception occured when trying to parse cron entry - " + cronEntry + " Exception - " + ExceptionUtils.getFullStackTrace(e)); + } + } + Collections.sort(enabledAlertIds); + for (BigInteger alertId : enabledAlertIds) { + Alert a = alertsMapById.get(alertId); + if (a != null) { + enabledAlerts.add(alertsMapById.get(alertId)); + } + } + return enabledAlerts; + } } diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/AlertDefinitionsCacheRefresherThread.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/AlertDefinitionsCacheRefresherThread.java index c03564bc3..1733650da 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/AlertDefinitionsCacheRefresherThread.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/AlertDefinitionsCacheRefresherThread.java @@ -33,7 +33,6 @@ import java.math.BigInteger; import java.util.ArrayList; import java.util.Date; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -44,86 +43,167 @@ import com.salesforce.dva.argus.entity.Alert; import com.salesforce.dva.argus.service.AlertService; +import com.salesforce.dva.argus.service.MonitorService.Counter; -public class AlertDefinitionsCacheRefresherThread extends Thread{ - - private final Logger _logger = LoggerFactory.getLogger(AlertDefinitionsCacheRefresherThread.class); - - // keeping the refresh interval at 1 minute, as this corresponds to the minimum alert execution interval based on cron expression - private static final Long REFRESH_INTERVAL_MILLIS = 60*1000L; - - private static final Long LOOKBACK_PERIOD_FOR_REFRESH_MILLIS = 5*REFRESH_INTERVAL_MILLIS; - - private AlertDefinitionsCache alertDefinitionsCache = null; - - private AlertService alertService; - - public AlertDefinitionsCacheRefresherThread(AlertDefinitionsCache cache, AlertService alertService) { - this.alertDefinitionsCache = cache; - this.alertService = alertService; - } - - public void run() { - while (!isInterrupted()) { - long executionTime = 0L; - try { - _logger.info("Starting alert definitions cache refresh"); - long startTime = System.currentTimeMillis(); - if(!alertDefinitionsCache.isAlertsCacheInitialized()) { - List enabledAlerts = alertService.findAlertsByStatus(true); - Map enabledAlertsMap = enabledAlerts.stream().collect(Collectors.toMap(alert -> alert.getId(), alert -> alert)); - for(Alert a : enabledAlerts) { - addEntrytoCronMap(a); - } - alertDefinitionsCache.setAlertsMapById(enabledAlertsMap); - alertDefinitionsCache.setAlertsCacheInitialized(true); - }else { - List modifiedAlerts = alertService.findAlertsModifiedAfterDate(new Date(startTime - Math.max(executionTime + REFRESH_INTERVAL_MILLIS, LOOKBACK_PERIOD_FOR_REFRESH_MILLIS))); - // updating only the modified/deleted alerts in the cache - if(modifiedAlerts!=null && modifiedAlerts.size()>0) { - for(Alert a : modifiedAlerts) { - _logger.debug("Processing modified alert - {},{},{},{} ", a.getId(), a.getName(), a.getCronEntry(), a.getExpression()); - if(alertDefinitionsCache.getAlertsMapById().containsKey(a.getId())) { - if(a.isDeleted() || !a.isEnabled()) { - alertDefinitionsCache.getAlertsMapById().remove(a.getId()); - removeEntryFromCronMap(a.getId()); - }else { - alertDefinitionsCache.getAlertsMapById().put(a.getId(), a); - // removing the previous cron mapping and adding fresh just in case the mapping changed - removeEntryFromCronMap(a.getId()); - addEntrytoCronMap(a); - } - }else if(a.isEnabled() && !a.isDeleted()) { - alertDefinitionsCache.getAlertsMapById().put(a.getId(), a); - addEntrytoCronMap(a); - } - } - } - _logger.info("Number of modified alerts since last refresh - " + modifiedAlerts.size()); - } - executionTime = System.currentTimeMillis() - startTime; - _logger.info("Alerts cache refreshed successfully in {} millis. Number of alerts in cache - {}", executionTime, alertDefinitionsCache.getAlertsMapById().keySet().size()); - if(executionTime < REFRESH_INTERVAL_MILLIS) { - sleep(REFRESH_INTERVAL_MILLIS - executionTime); - } - }catch(Exception e) { - _logger.error("Exception occured when trying to refresh alert definition cache - " + ExceptionUtils.getFullStackTrace(e)); - } - } - } - - private void addEntrytoCronMap(Alert a) { - if(alertDefinitionsCache.getAlertsMapByCronEntry().get(a.getCronEntry())==null) { - alertDefinitionsCache.getAlertsMapByCronEntry().put(a.getCronEntry(), new ArrayList()); - } - alertDefinitionsCache.getAlertsMapByCronEntry().get(a.getCronEntry()).add(a.getId()); - } - - private void removeEntryFromCronMap(BigInteger alertId) { - for(String cronEntry : alertDefinitionsCache.getAlertsMapByCronEntry().keySet()) { - if(alertDefinitionsCache.getAlertsMapByCronEntry().get(cronEntry).contains(alertId)) { - alertDefinitionsCache.getAlertsMapByCronEntry().get(cronEntry).remove(alertId); - } - } - } +public class AlertDefinitionsCacheRefresherThread extends Thread { + + private final Logger _logger = LoggerFactory.getLogger(AlertDefinitionsCacheRefresherThread.class); + + // keeping the refresh interval at 1 minute, as this corresponds to the minimum alert execution interval based on cron expression + private static final Long REFRESH_INTERVAL_MILLIS = 60 * 1000L; + + private static final Long LOOKBACK_PERIOD_FOR_REFRESH_MILLIS = 5 * REFRESH_INTERVAL_MILLIS; + + private AlertDefinitionsCache alertDefinitionsCache = null; + + private AlertService alertService; + + public AlertDefinitionsCacheRefresherThread(AlertDefinitionsCache cache, AlertService alertService) { + this.alertDefinitionsCache = cache; + this.alertService = alertService; + } + + public void run() { + long lastExecutionTime = 0L; + while (!isInterrupted()) { + long executionTime = 0L, currentExecutionTime = 0L; + try { + long startTime = System.currentTimeMillis(); + if (!alertDefinitionsCache.isAlertsCacheInitialized()) { + _logger.info("Starting alert definitions cache initialization"); + lastExecutionTime = System.currentTimeMillis(); + initializeAlertDefinitionsCache(); + } else { + _logger.info("Starting alert definitions cache refresh"); + currentExecutionTime = System.currentTimeMillis(); + refreshAlertDefinitionsCache(startTime, executionTime, lastExecutionTime, currentExecutionTime); + } + + if (lastExecutionTime > 0) { + _logger.info("AlertCache was refreshed after {} millisec", currentExecutionTime - lastExecutionTime); + } + + lastExecutionTime = currentExecutionTime; + executionTime = System.currentTimeMillis() - startTime; + _logger.info("Alerts cache refresh was executed successfully in {} millis. Number of alerts in cache - {}", executionTime, alertDefinitionsCache.getAlertsMapById().keySet().size()); + if (executionTime < REFRESH_INTERVAL_MILLIS) { + sleep(REFRESH_INTERVAL_MILLIS - executionTime); + } + } catch (Exception e) { + _logger.error("Exception occurred when trying to refresh alert definition cache - " + ExceptionUtils.getFullStackTrace(e)); + } + } + } + + void refreshAlertDefinitionsCache(long startTime, long executionTime, long lastExecutionTime, long currentExecutionTime) { + List modifiedAlerts = alertService.findAlertsModifiedAfterDate(new Date(startTime - Math.max(executionTime + REFRESH_INTERVAL_MILLIS, LOOKBACK_PERIOD_FOR_REFRESH_MILLIS))); + + // updating only the modified/deleted alerts in the cache + long sumTimeToDiscover = 0L; + long sumTimeToDiscoverNew = 0L; + int newAlertsCount = 0; + int updatedAlertsCount = 0; + if (modifiedAlerts != null && modifiedAlerts.size() > 0) { + for (Alert a : modifiedAlerts) { + long timeToDiscover = 0; + _logger.debug("Processing modified alert - {},{},{},{} after {} milliseconds ", a.getId(), + a.getName(), a.getCronEntry(), a.getExpression(), timeToDiscover); + + boolean isValid = checkIsValidAlert(a); + + if (alertDefinitionsCache.getAlertsMapById().containsKey(a.getId())) { + timeToDiscover = currentExecutionTime - a.getModifiedDate().getTime(); + if (a.isDeleted() || !a.isEnabled() || !isValid) { + alertDefinitionsCache.getAlertsMapById().remove(a.getId()); + removeEntryFromCronMap(a.getId()); + sumTimeToDiscover += timeToDiscover; + updatedAlertsCount++; + _logger.debug("Found updated alert {} to be removed from cache which was updated at {}, created at {} lastExecutionTime {}, currentExecutionTime {}, timeToDiscover {}", + a.getId().toString(), a.getModifiedDate().getTime(), a.getCreatedDate().getTime(), lastExecutionTime, currentExecutionTime, timeToDiscover); + } else { + Alert alertFromCache = alertDefinitionsCache.getAlertsMapById().get(a.getId()); + boolean isAlertModified = !a.equals(alertFromCache); + _logger.debug("Reading alert from cache to check if it needs to be updated: Alert {} which was updated at {}, created at {} lastExecutionTime {}, currentExecutionTime {}, timeToDiscover {}", + alertFromCache.getId().toString(), alertFromCache.getModifiedDate().getTime(), alertFromCache.getCreatedDate().getTime(), lastExecutionTime, currentExecutionTime, timeToDiscover); + + // removing the previous cron mapping and adding fresh only in case the mapping changed + if (isAlertModified) { + removeEntryFromCronMap(a.getId()); + alertDefinitionsCache.getAlertsMapById().put(a.getId(), a); + addEntrytoCronMap(a); + sumTimeToDiscover += timeToDiscover; + updatedAlertsCount++; + } + } + } else if (a.isEnabled() && !a.isDeleted() && isValid) { + timeToDiscover = currentExecutionTime - a.getCreatedDate().getTime(); + sumTimeToDiscoverNew += timeToDiscover; + newAlertsCount++; + alertDefinitionsCache.getAlertsMapById().put(a.getId(), a); + addEntrytoCronMap(a); + _logger.debug("Found a new alert {} which was created at {}, lastExecutionTime {}, currentExecutionTime {}, timeToDiscover {}", + a.getId().toString(), a.getCreatedDate().getTime(), lastExecutionTime, currentExecutionTime, timeToDiscover); + } + } + } + + alertService.updateCounter(Counter.ALERTS_UPDATED_COUNT, (double) updatedAlertsCount); + _logger.info("Number of modified alerts since last refresh - " + updatedAlertsCount); + + alertService.updateCounter(Counter.ALERTS_CREATED_COUNT, (double) newAlertsCount); + _logger.info("Number of created alerts since last refresh - " + newAlertsCount); + + if (updatedAlertsCount > 0) { + long avgTimeToDiscover = sumTimeToDiscover / updatedAlertsCount; + alertService.updateCounter(Counter.ALERTS_UPDATE_LATENCY, (double) avgTimeToDiscover); + _logger.info("Average time to discovery of change - " + avgTimeToDiscover + " milliseconds"); + } + + if (newAlertsCount > 0) { + _logger.info("Number of created alerts since last refresh - " + newAlertsCount); + long avgTimeToDiscoverNewAlert = sumTimeToDiscoverNew / newAlertsCount; + alertService.updateCounter(Counter.ALERTS_NEW_LATENCY, (double) avgTimeToDiscoverNewAlert); + _logger.info("Average time to discovery of new alert - " + avgTimeToDiscoverNewAlert + " milliseconds"); + } + } + + void initializeAlertDefinitionsCache() { + List enabledAlerts = alertService.findAlertsByStatus(true); + Map enabledValidAlertsMap = enabledAlerts.stream(). + filter(this::checkIsValidAlert). + collect(Collectors.toMap(alert -> alert.getId(), alert -> alert)); + for (Alert a : enabledAlerts) { + if (enabledValidAlertsMap.containsKey(a.getId())) { + addEntrytoCronMap(a); + } + } + alertDefinitionsCache.setAlertsMapById(enabledValidAlertsMap); + alertDefinitionsCache.setAlertsCacheInitialized(true); + } + + private void addEntrytoCronMap(Alert a) { + if (alertDefinitionsCache.getAlertsMapByCronEntry().get(a.getCronEntry()) == null) { + alertDefinitionsCache.getAlertsMapByCronEntry().put(a.getCronEntry(), new ArrayList()); + } + alertDefinitionsCache.getAlertsMapByCronEntry().get(a.getCronEntry()).add(a.getId()); + } + + private void removeEntryFromCronMap(BigInteger alertId) { + for (String cronEntry : alertDefinitionsCache.getAlertsMapByCronEntry().keySet()) { + if (alertDefinitionsCache.getAlertsMapByCronEntry().get(cronEntry).contains(alertId)) { + alertDefinitionsCache.getAlertsMapByCronEntry().get(cronEntry).remove(alertId); + } + } + } + + private boolean checkIsValidAlert(Alert a) { + if (!a.isValid()) { + String msg = a.validationMessage(); + _logger.info("AlertDefinitionsCache: Excluding INVALID ALERT {},{},{},{} : {}", + a.getId(), a.getName(), a.getCronEntry(), a.getExpression(), msg); + return false; + } + return true; + } } + diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/DefaultAlertService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/DefaultAlertService.java index 4aff64842..bc18c20ee 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/DefaultAlertService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/DefaultAlertService.java @@ -31,40 +31,10 @@ package com.salesforce.dva.argus.service.alert; -import static com.salesforce.dva.argus.service.MQService.MQQueue.ALERT; -import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; -import static java.math.BigInteger.ZERO; - -import java.io.Serializable; -import java.math.BigInteger; -import java.text.MessageFormat; -import java.text.SimpleDateFormat; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.Comparator; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; -import java.util.TimeZone; -import java.util.regex.Pattern; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -import javax.persistence.EntityManager; - -import org.apache.commons.lang.exception.ExceptionUtils; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.module.SimpleModule; +import com.google.common.annotations.VisibleForTesting; import com.google.inject.Inject; import com.google.inject.Provider; import com.google.inject.persist.Transactional; @@ -72,6 +42,7 @@ import com.salesforce.dva.argus.entity.History; import com.salesforce.dva.argus.entity.History.JobStatus; import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.entity.MetricSchemaRecord; import com.salesforce.dva.argus.entity.Notification; import com.salesforce.dva.argus.entity.PrincipalUser; import com.salesforce.dva.argus.entity.Trigger; @@ -86,25 +57,81 @@ import com.salesforce.dva.argus.service.MonitorService.Counter; import com.salesforce.dva.argus.service.NotifierFactory; import com.salesforce.dva.argus.service.TSDBService; +import com.salesforce.dva.argus.service.alert.retriever.ImageDataRetrievalContext; +import com.salesforce.dva.argus.service.alert.retriever.ImageDataRetriever; import com.salesforce.dva.argus.service.jpa.DefaultJPAService; +import com.salesforce.dva.argus.service.mail.EmailContext; +import com.salesforce.dva.argus.service.metric.MetricQueryResult; import com.salesforce.dva.argus.service.metric.transform.MissingDataException; +import com.salesforce.dva.argus.service.monitor.DataLagService; +import com.salesforce.dva.argus.service.tsdb.MetricQuery; import com.salesforce.dva.argus.system.SystemConfiguration; +import com.salesforce.dva.argus.service.alert.testing.AlertTestResults; import com.salesforce.dva.argus.util.AlertUtils; -import com.salesforce.dva.argus.util.Cron; +import com.salesforce.dva.argus.util.MonitoringUtils; +import com.salesforce.dva.argus.util.RequestContext; +import com.salesforce.dva.argus.util.RequestContextHolder; +import org.apache.commons.lang.exception.ExceptionUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.tuple.Pair; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.persistence.EntityManager; +import java.io.Serializable; +import java.math.BigInteger; +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Comparator; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.regex.Pattern; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import java.util.function.Consumer; +import java.util.function.Supplier; + +import static com.salesforce.dva.argus.service.MQService.MQQueue.ALERT; +import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; +import static java.math.BigInteger.ZERO; /** * Default implementation of the alert service. * - * @author Tom Valine (tvaline@salesforce.com), Raj sarkapally (rsarkapally@salesforce.com), Dongpu Jin (djin@salesforce.com) + * @author Tom Valine (tvaline@salesforce.com), Raj sarkapally (rsarkapally@salesforce.com), Dongpu Jin (djin@salesforce.com), Ian Keck (ikeck@salesforce.com) */ -public class DefaultAlertService extends DefaultJPAService implements AlertService { +public class DefaultAlertService extends DefaultJPAService implements AlertService +{ //~ Static fields/initializers ******************************************************************************************************************* + private static final String ACTIONTAG = "action"; + private static final String ALERTIDTAG = "alertId"; + private static final String RETRIESTAG = "retries"; + private static final String HOSTTAG = "host"; + private static final String NOTIFYTARGETTAG = "notifyTarget"; + private static final String STATUSTAG = "status"; private static final String USERTAG = "user"; + + private static final String ACTION_CLEARED = "cleared"; + private static final String ACTION_MISSINGDATA = "missingdata"; + private static final String ACTION_NOTIFIED = "notified"; + private static final String ACTION_TRIGGERED = "triggered"; + private static final String ALERTSCOPE = "argus.alerts"; private static final BigInteger DEFAULTALERTID = new BigInteger("0"); private static final String DEFAULTUSER = "none"; - + private static final String STATUS_SUCCESS = "succeeded"; + private static final String STATUS_FAILURE = "failed"; + private static final Long EVALUATIONDELAY = 1000L * 60; //~ Instance fields ****************************************************************************************************************************** private final Logger _logger = LoggerFactory.getLogger(DefaultAlertService.class); @@ -113,6 +140,7 @@ public class DefaultAlertService extends DefaultJPAService implements AlertServi private final TSDBService _tsdbService; private final MetricService _metricService; private final MailService _mailService; + private final ImageDataRetriever _imageDataRetriever; private final SystemConfiguration _configuration; private final HistoryService _historyService; private final MonitorService _monitorService; @@ -120,11 +148,13 @@ public class DefaultAlertService extends DefaultJPAService implements AlertServi private final ObjectMapper _mapper = new ObjectMapper(); private static NotificationsCache _notificationsCache = null; private static List _whiteListedScopeRegexPatterns = null; + private static List _whiteListedUserRegexPatterns = null; private static final String HOSTNAME; //~ Constructors ********************************************************************************************************************************* - static { + static + { // Can fail if DNS is broken. // ToDo Handle the failure. HOSTNAME = SystemConfiguration.getHostname(); @@ -133,21 +163,22 @@ public class DefaultAlertService extends DefaultJPAService implements AlertServi /** * Creates a new DefaultAlertService object. * - * @param configuration The system configuration instance to use. Cannot be null. - * @param mqService The MQ service instance to use. Cannot be null. - * @param metricService The Metric service instance to use. Cannot be null. - * @param auditService The audit service instance to use. Cannot be null. - * @param tsdbService The TSDB service instance to use. - * @param mailService The mail service instance to use. Cannot be null. - * @param historyService The job history service instance to use. Cannot be null. - * @param monitorService The monitor service instance to use. Cannot be null. - * @param notifierFactory The notifier factory to use - * @param emProvider The entity manager provider to use + * @param configuration The system configuration instance to use. Cannot be null. + * @param mqService The MQ service instance to use. Cannot be null. + * @param metricService The Metric service instance to use. Cannot be null. + * @param auditService The audit service instance to use. Cannot be null. + * @param tsdbService The TSDB service instance to use. + * @param mailService The mail service instance to use. Cannot be null. + * @param historyService The job history service instance to use. Cannot be null. + * @param monitorService The monitor service instance to use. Cannot be null. + * @param notifierFactory The notifier factory to use + * @param emProvider The entity manager provider to use */ @Inject - public DefaultAlertService(SystemConfiguration configuration, MQService mqService, MetricService metricService, - AuditService auditService, TSDBService tsdbService, MailService mailService, HistoryService historyService, - MonitorService monitorService, NotifierFactory notifierFactory, Provider emProvider) { + public DefaultAlertService(SystemConfiguration configuration, MQService mqService, MetricService metricService, + AuditService auditService, TSDBService tsdbService, MailService mailService, HistoryService historyService, + MonitorService monitorService, ImageDataRetriever imageDataRetriever, NotifierFactory notifierFactory, Provider emProvider) + { super(auditService, configuration); requireArgument(mqService != null, "MQ service cannot be null."); requireArgument(metricService != null, "Metric service cannot be null."); @@ -159,6 +190,7 @@ public DefaultAlertService(SystemConfiguration configuration, MQService mqServic _configuration = configuration; _historyService = historyService; _monitorService = monitorService; + _imageDataRetriever = imageDataRetriever; _notifierFactory = notifierFactory; _emProvider = emProvider; @@ -167,7 +199,8 @@ public DefaultAlertService(SystemConfiguration configuration, MQService mqServic //~ Methods ************************************************************************************************************************************** - private void _initializeObjectMapper() { + private void _initializeObjectMapper() + { SimpleModule module = new SimpleModule(); module.addSerializer(Alert.class, new Alert.Serializer()); @@ -181,13 +214,13 @@ private void _initializeObjectMapper() { @Override @Transactional - public Alert updateAlert(Alert alert) { + public Alert updateAlert(Alert alert) throws RuntimeException + { requireNotDisposed(); requireArgument(alert != null, "Cannot update a null alert"); - boolean isCronValid = Cron.isCronEntryValid(alert.getCronEntry()); - if(!isCronValid) { - throw new RuntimeException("Input cron entry - " + alert.getCronEntry() + " is invalid"); - } + + alert.validateAlert(); // prevent any invalid alerts from being committed to the database. + alert.setModifiedDate(new Date()); EntityManager em = _emProvider.get(); @@ -201,7 +234,8 @@ public Alert updateAlert(Alert alert) { @Override @Transactional - public void deleteAlert(String name, PrincipalUser owner) { + public void deleteAlert(String name, PrincipalUser owner) + { requireNotDisposed(); requireArgument(name != null && !name.isEmpty(), "Name cannot be null or empty."); requireArgument(owner != null, "Owner cannot be null."); @@ -213,7 +247,8 @@ public void deleteAlert(String name, PrincipalUser owner) { @Override @Transactional - public void deleteAlert(Alert alert) { + public void deleteAlert(Alert alert) + { requireNotDisposed(); requireArgument(alert != null, "Alert cannot be null."); _logger.debug("Deleting an alert {}.", alert); @@ -226,7 +261,8 @@ public void deleteAlert(Alert alert) { @Override @Transactional - public void markAlertForDeletion(String name, PrincipalUser owner) { + public void markAlertForDeletion(String name, PrincipalUser owner) + { requireNotDisposed(); requireArgument(name != null && !name.isEmpty(), "Name cannot be null or empty."); requireArgument(owner != null, "Owner cannot be null."); @@ -238,7 +274,8 @@ public void markAlertForDeletion(String name, PrincipalUser owner) { @Override @Transactional - public void markAlertForDeletion(Alert alert) { + public void markAlertForDeletion(Alert alert) + { requireNotDisposed(); requireArgument(alert != null, "Alert cannot be null."); _logger.debug("Marking alert for deletion {}.", alert); @@ -246,9 +283,9 @@ public void markAlertForDeletion(Alert alert) { EntityManager em = _emProvider.get(); alert.setDeleted(true); - alert.setEnabled(false); alert.setName(alert.getName() + System.currentTimeMillis()); alert.setModifiedDate(new Date()); + alert.setEnabled(false); Alert result = mergeEntity(em, alert); em.flush(); @@ -257,20 +294,23 @@ public void markAlertForDeletion(Alert alert) { } @Override - public List findAlertsMarkedForDeletion() { + public List findAlertsMarkedForDeletion() + { requireNotDisposed(); return findEntitiesMarkedForDeletion(_emProvider.get(), Alert.class, -1); } @Override - public List findAlertsMarkedForDeletion(final int limit) { + public List findAlertsMarkedForDeletion(final int limit) + { requireNotDisposed(); requireArgument(limit > 0, "Limit must be greater than 0."); return findEntitiesMarkedForDeletion(_emProvider.get(), Alert.class, limit); } @Override - public List findAlertsByOwner(PrincipalUser owner, boolean metadataOnly) { + public List findAlertsByOwner(PrincipalUser owner, boolean metadataOnly) + { requireNotDisposed(); requireArgument(owner != null, "Owner cannot be null."); @@ -278,15 +318,17 @@ public List findAlertsByOwner(PrincipalUser owner, boolean metadataOnly) } @Override - public List findAlertsByOwnerPaged(PrincipalUser owner, Integer limit, Integer offset, String searchText) { + public List findAlertsByOwnerPaged(PrincipalUser owner, Integer limit, Integer offset, String searchText, String sortField, String sortOrder) + { requireNotDisposed(); requireArgument(owner != null, "Owner cannot be null."); - return Alert.findByOwnerMetaPaged(_emProvider.get(), owner, limit, offset, searchText); + return Alert.findByOwnerMetaPaged(_emProvider.get(), owner, limit, offset, searchText, sortField, sortOrder); } @Override - public Alert findAlertByPrimaryKey(BigInteger id) { + public Alert findAlertByPrimaryKey(BigInteger id) + { requireNotDisposed(); requireArgument(id != null && id.compareTo(ZERO) > 0, "ID must be a positive non-zero value."); @@ -301,7 +343,8 @@ public Alert findAlertByPrimaryKey(BigInteger id) { } @Override - public List findAlertsByPrimaryKeys(List ids) { + public List findAlertsByPrimaryKeys(List ids) + { requireNotDisposed(); requireArgument(ids != null && !ids.isEmpty(), "IDs list cannot be null or empty."); @@ -315,40 +358,84 @@ public List findAlertsByPrimaryKeys(List ids) { return result; } + + // ****************************************************************************** + // Alert Evaluation + // ****************************************************************************** + @Override - public void updateNotificationsActiveStatusAndCooldown(List notifications) { + public void updateNotificationsActiveStatusAndCooldown(List notifications) + { List ids = notifications.stream().map(x -> x.getId()).collect(Collectors.toList()); _logger.debug("Updating notifications: {}", ids); - if(_notificationsCache == null) { - synchronized(DefaultAlertService.class) { - if(_notificationsCache == null) { + if (_notificationsCache == null) + { + synchronized (DefaultAlertService.class) + { + if (_notificationsCache == null) + { _notificationsCache = new NotificationsCache(_emProvider); } } } // if cache is refreshed, we read the cooldown and trigger info from cache, else we query the db directly - if(_notificationsCache.isNotificationsCacheRefreshed()) { - for(Notification notification : notifications) { - if(_notificationsCache.getNotificationActiveStatusMap().get(notification.getId())!=null) { + if (_notificationsCache.isNotificationsCacheRefreshed()) + { + for (Notification notification : notifications) + { + if (_notificationsCache.getNotificationActiveStatusMap().get(notification.getId()) != null) + { notification.setActiveStatusMap(_notificationsCache.getNotificationActiveStatusMap().get(notification.getId())); - }else { + } else + { notification.getActiveStatusMap().clear(); } - if(_notificationsCache.getNotificationCooldownExpirationMap().get(notification.getId())!=null) { + if (_notificationsCache.getNotificationCooldownExpirationMap().get(notification.getId()) != null) + { notification.setCooldownExpirationMap(_notificationsCache.getNotificationCooldownExpirationMap().get(notification.getId())); - }else { + } else + { notification.getCooldownExpirationMap().clear(); } } - }else { + } else + { Notification.updateActiveStatusAndCooldown(_emProvider.get(), notifications); } } + private void loadWhiteListRegexPatterns() + { + if (_whiteListedScopeRegexPatterns == null) + { + String whiteListedScopesProperty = _configuration.getValue(DataLagService.Property.DATA_LAG_WHITE_LISTED_SCOPES.getName(), DataLagService.Property.DATA_LAG_WHITE_LISTED_SCOPES.getDefaultValue()); + if (!StringUtils.isEmpty(whiteListedScopesProperty)) + { + _whiteListedScopeRegexPatterns = Stream.of(whiteListedScopesProperty.split(",")).map(elem -> Pattern.compile(elem.toLowerCase())).collect(Collectors.toList()); + } else + { + _whiteListedScopeRegexPatterns = new ArrayList(); + } + } + + if (_whiteListedUserRegexPatterns == null) + { + String whiteListedUsersProperty = _configuration.getValue(DataLagService.Property.DATA_LAG_WHITE_LISTED_USERS.getName(), DataLagService.Property.DATA_LAG_WHITE_LISTED_USERS.getDefaultValue()); + if (!StringUtils.isEmpty(whiteListedUsersProperty)) + { + _whiteListedUserRegexPatterns = Stream.of(whiteListedUsersProperty.split(",")).map(elem -> Pattern.compile(elem.toLowerCase())).collect(Collectors.toList()); + } else + { + _whiteListedUserRegexPatterns = new ArrayList(); + } + } + } + + @Override @Transactional - public List executeScheduledAlerts(int alertCount, int timeout) { + public Integer executeScheduledAlerts(int alertCount, int timeout) { requireNotDisposed(); requireArgument(alertCount > 0, "Alert count must be greater than zero."); requireArgument(timeout > 0, "Timeout in milliseconds must be greater than zero."); @@ -361,215 +448,399 @@ public List executeScheduledAlerts(int alertCount, int timeout) { Map alertsByNotificationId = new HashMap<>(); Map alertEnqueueTimestampsByAlertId = new HashMap<>(); - for(AlertWithTimestamp alertWithTimestamp : alertsWithTimestamp) { + loadWhiteListRegexPatterns(); + + _monitorService.modifyCounter(Counter.ALERTS_EVALUATED_RAWTOTAL, alertsWithTimestamp.size(), new HashMap<>()); + for (AlertWithTimestamp alertWithTimestamp : alertsWithTimestamp) + { String serializedAlert = alertWithTimestamp.getSerializedAlert(); + + _logger.debug(MessageFormat.format("serializedAlert {0}", serializedAlert)); + Alert alert; - try { + try + { alert = _mapper.readValue(serializedAlert, Alert.class); - } catch (Exception e) { + } catch (Exception e) + { String logMessage = MessageFormat.format("Failed to deserialize alert {0}. Full stack trace of exception {1}", serializedAlert, ExceptionUtils.getFullStackTrace(e)); _logger.warn(logMessage); logAlertStatsOnFailure(DEFAULTALERTID, DEFAULTUSER); + continue; + } + if (!_shouldEvaluateAlert(alert, alert.getId())) + { + logAlertStatsOnFailure(alert.getId(), alert.getOwner().getUserName()); continue; } - if(!_shouldEvaluateAlert(alert, alert.getId())) { + if (alertEnqueueTimestampsByAlertId.containsKey(alert.getId())) + { + String logMessage = MessageFormat.format("Found alert {0}:{1} with multiple timestamps. ExistingTime:{2} NewTime:{3}. Existing evaluation will be overwritten.", + alert.getId(), alert.getName(), alertEnqueueTimestampsByAlertId.get(alert.getId()), alertWithTimestamp.getAlertEnqueueTime()); + _logger.warn(logMessage); + // Treating this as a failure. logAlertStatsOnFailure(alert.getId(), alert.getOwner().getUserName()); - continue; } alertEnqueueTimestampsByAlertId.put(alert.getId(), alertWithTimestamp.getAlertEnqueueTime()); List notifications = new ArrayList<>(alert.getNotifications()); alert.setNotifications(null); - for(Notification n : notifications) { + + if (notifications.size() == 0) + { + String logMessage = MessageFormat.format("Found alert {0}:{1} with no notification.", alert.getId(), alert.getName()); + _logger.warn(logMessage); + + // Treating this as a failure. + logAlertStatsOnFailure(alert.getId(), alert.getOwner().getUserName()); + continue; + } + + for (Notification n : notifications) + { + + if (alertsByNotificationId.containsKey(n.getId())) + { + String logMessage = MessageFormat.format("Found alert {0}:{1} where notification {2} is present multiple times. ", + alert.getId(), alert.getName(), n.getId()); + _logger.warn(logMessage); + } + alertsByNotificationId.put(n.getId(), alert); } allNotifications.addAll(notifications); } - // Update the state of notification objects from the database since the notification contained - // in the serialized alert might be stale. This is because the scheduler only refreshes the alerts - // after a specified REFRESH_INTERVAL. And within this interval, the notification state may have changed. - // For example, the notification may have been updated to be on cooldown by a previous alert evaluation. - // Or it's active/clear status may have changed. + // Update the state of notification objects from the database since the notification contained + // in the serialized alert might be stale. This is because the scheduler only refreshes the alerts + // after a specified REFRESH_INTERVAL. And within this interval, the notification state may have changed. + // For example, the notification may have been updated to be on cooldown by a previous alert evaluation. + // Or it's active/clear status may have changed. + + // Adds updated notifications back to each alert. updateNotificationsActiveStatusAndCooldown(allNotifications); - for(Notification n : allNotifications) { + for (Notification n : allNotifications) + { alertsByNotificationId.get(n.getId()).addNotification(n); } Set alerts = new HashSet<>(alertsByNotificationId.values()); + + long jobStartTime, evaluateEndTime; + Long alertEnqueueTimestamp; + + String logMessage; + History history; + + NotificationProcessor np = new NotificationProcessor(this, _logger); + _monitorService.modifyCounter(Counter.ALERTS_EVALUATED_TOTAL, alerts.size(), new HashMap<>()); + boolean datalagMonitorEnabled = Boolean.valueOf(_configuration.getValue(DataLagService.Property.DATA_LAG_MONITOR_ENABLED.getName(), DataLagService.Property.DATA_LAG_MONITOR_ENABLED.getDefaultValue())); + AtomicInteger numberOfAlertsEvaluated = new AtomicInteger(alerts.size()); for (Alert alert : alerts) { - long jobStartTime = System.currentTimeMillis(); - long jobEndTime = 0; - Long alertEnqueueTimestamp = 0L; - - - String logMessage = null; - History history = null; - - if(Boolean.valueOf(_configuration.getValue(SystemConfiguration.Property.DATA_LAG_MONITOR_ENABLED))){ - if(_monitorService.isDataLagging()) { - if(_whiteListedScopeRegexPatterns==null) { - String whiteListedScopesProperty = _configuration.getValue(SystemConfiguration.Property.DATA_LAG_WHITE_LISTED_SCOPES); - if(!StringUtils.isEmpty(whiteListedScopesProperty)) { - _whiteListedScopeRegexPatterns = Stream.of(whiteListedScopesProperty.split(",")).map (elem -> Pattern.compile(elem.toLowerCase())).collect(Collectors.toList()); - }else { - _whiteListedScopeRegexPatterns = new ArrayList(); - } - } - if(_whiteListedScopeRegexPatterns.isEmpty() || !AlertUtils.isScopePresentInWhiteList(alert.getExpression(), _whiteListedScopeRegexPatterns)) { - history = new History(History.addDateToMessage(JobStatus.SKIPPED.getDescription()), HOSTNAME, alert.getId(), JobStatus.SKIPPED); - logMessage = MessageFormat.format("Skipping evaluating the alert with id: {0}. because metric data was lagging", alert.getId().intValue()); - _logger.info(logMessage); - history.appendMessageNUpdateHistory(logMessage, null, 0); - history = _historyService.createHistory(alert, history.getMessage(), history.getJobStatus(), history.getExecutionTime()); - historyList.add(history); - Map tags = new HashMap<>(); - tags.put(USERTAG, alert.getOwner().getUserName()); - _monitorService.modifyCounter(Counter.ALERTS_SKIPPED, 1, tags); - continue; - } - } - } + jobStartTime = System.currentTimeMillis(); + evaluateEndTime = 0; + alertEnqueueTimestamp = alertEnqueueTimestampsByAlertId.get(alert.getId()); + updateRequestContext(alert); + + updateAlertStartEvaluationStats(alertEnqueueTimestampsByAlertId, alert, jobStartTime); history = new History(History.addDateToMessage(JobStatus.STARTED.getDescription()), HOSTNAME, alert.getId(), JobStatus.STARTED); Set missingDataTriggers = new HashSet(); - for(Trigger trigger : alert.getTriggers()) { - if(trigger.getType().equals(TriggerType.NO_DATA)) { + for (Trigger trigger : alert.getTriggers()) + { + if (trigger.getType().equals(TriggerType.NO_DATA)) + { missingDataTriggers.add(trigger); } } - try { - alertEnqueueTimestamp = alertEnqueueTimestampsByAlertId.get(alert.getId()); - List metrics = _metricService.getMetrics(alert.getExpression(), alertEnqueueTimestamp); - - if(areDatapointsEmpty(metrics)) { - if (alert.isMissingDataNotificationEnabled()) { - _sendNotificationForMissingData(alert); - logMessage = MessageFormat.format("Metric data does not exist for alert expression: {0}. Sent notification for missing data.", - alert.getExpression()); - _logger.info(logMessage); - history.appendMessageNUpdateHistory(logMessage, null, 0); - } else { - logMessage = MessageFormat.format("Metric data does not exist for alert expression: {0}. Missing data notification was not enabled.", - alert.getExpression()); - _logger.info(logMessage); - history.appendMessageNUpdateHistory(logMessage, null, 0); - } + boolean alertSkipped = false; + boolean alertFailure = false; + boolean alertEvaluationStarted = false; + boolean doesDatalagExistInAnyDC = false; - if(missingDataTriggers.size()>0) { - for(Notification notification : alert.getNotifications()) { - if (!notification.getTriggers().isEmpty()) { - _processMissingDataNotification(alert, history, missingDataTriggers, notification, true, alertEnqueueTimestamp); + try + { + alertEnqueueTimestamp = alertEnqueueTimestampsByAlertId.get(alert.getId()); + MetricQueryResult queryResult = _metricService.getMetrics(alert.getExpression(), alertEnqueueTimestamp); + MonitoringUtils.updateAlertMetricQueryPerfCounters(_monitorService, queryResult, alert.getOwner().getUserName()); // NOTE - ian - TODO - call this for historical testing? + List metrics = new ArrayList<>(queryResult.getMetricsList()); + int initialMetricSize = metrics.size(); + doesDatalagExistInAnyDC = datalagMonitorEnabled && doesDatalagExistsInAtLeastOneDC(queryResult, alert); + + /* It works only for alerts with regex based expressions + TODO: Fix for expressions that do not go through discovery service ( i.e, non regex based expressions ) + */ +// if (initialMetricSize == 0 && alert.getModifiedDate() != null && ((System.currentTimeMillis() - alert.getModifiedDate().getTime()) / (24 * 60 * 60 * 1000)) > MetricSchemaRecord.DEFAULT_RETENTION_DISCOVERY_DAYS && // if Last Modified time was > DEFAULT_RETENTION_DISCOVERY_DAYS +// (_whiteListedScopeRegexPatterns.isEmpty() || !AlertUtils.isScopePresentInWhiteList(alert.getExpression(), _whiteListedScopeRegexPatterns))) { // not disable whitelisted argus alerts. + if (false) + { + _logger.info("Orphan Alert detected. Disabling it and notifying user. Alert Id: {}", alert.getId()); + Alert dbAlert = findAlertByPrimaryKey(alert.getId()); + dbAlert.setEnabled(false); + _sendOrphanAlertNotification(alert); + } else + { + + if (datalagMonitorEnabled) + { + /* Two Cases: 1. Contains transform, 2. Doesn't contain transform. + * If contain transform, disable if at least 1 dc is lagging. + * else disable per expanded expression specific lag. + * TODO: If transforms are independent, should we disable entirely or expression specific. + */ + if (queryResult.containsTransform() || initialMetricSize == 0) + { // Skip alert evaluation if the initial time series returned by metric service is null or if expression contains transforms and data lag exists in at least one dc. + if (doesDatalagExistInAnyDC) + { + logMessage = MessageFormat.format("Skipping Alert {0} Evaluation as data was lagging in at least one dc for expression: {1}", alert.getId().intValue(), alert.getExpression()); + updateDatalagHistory(alert, historyList, logMessage); + alertSkipped = true; + continue; } + } else + { // expanded alert expression doesn't contain any transforms. + metrics.removeIf(m -> shouldMetricBeRemovedForDataLag(alert, m, historyList)); + } + + if (initialMetricSize > 0 && metrics.size() == 0) + { // Skip alert evaluation if all the expanded alert expression contains dc with data lag and initial size was non-zero. + alertSkipped = true; + _logger.info(MessageFormat.format("Skipping Alert {0} Evaluation as the metrics expressions evaluation were skipped due to data lag. {1}", alert.getId().intValue(), alert.getExpression())); + continue; } } - } else { - //Only evaluate those triggers which are associated with any notification. - Set triggersToEvaluate = new HashSet<>(); - for(Notification notification : alert.getNotifications()) { - triggersToEvaluate.addAll(notification.getTriggers()); - } - Map> triggerFiredTimesAndMetricsByTrigger = _evaluateTriggers(triggersToEvaluate, - metrics, alert.getExpression(), alertEnqueueTimestamp); - - for(Notification notification : alert.getNotifications()) { - if (notification.getTriggers().isEmpty()) { - logMessage = MessageFormat.format("The notification {0} has no triggers.", notification.getName()); - _logger.info(logMessage); - history.appendMessageNUpdateHistory(logMessage, null, 0); - } else { - _processNotification(alert, history, metrics, triggerFiredTimesAndMetricsByTrigger, notification, alertEnqueueTimestamp); - if(missingDataTriggers.size()>0) { - // processing to possibly to clear missing data notification - _processMissingDataNotification(alert, history, missingDataTriggers, notification, false, alertEnqueueTimestamp); + alertEvaluationStarted = true; + + evaluateEndTime = System.currentTimeMillis(); // set evaluateEndTime to evaluate start time to override init value (0) + if (areDatapointsEmpty(metrics)) + { + _processMissingDataNotifications(np, alert, history, alertEnqueueTimestamp, missingDataTriggers); + + } else + { + // _logger.error("Alert: {}", alert.toString()); // DEBUG - REMOVE or add conditional here. maybe a log whitelist? - feature for debugging + + //Only evaluate those triggers which are associated with any notification. + Set triggersToEvaluate = new HashSet<>(); + for (Notification notification : alert.getNotifications()) + { + triggersToEvaluate.addAll(notification.getTriggers()); + } + + Map> triggerFiredTimesAndMetricsByTrigger = _evaluateTriggers(triggersToEvaluate, + metrics, alert.getExpression(), alertEnqueueTimestamp, this::noopTags); + + evaluateEndTime = System.currentTimeMillis(); + + for (Notification notification : alert.getNotifications()) + { + if (notification.getTriggers().isEmpty()) + { + _processTriggerlessNotification(np, alert, history, metrics, notification, alertEnqueueTimestamp); + } else + { + _processNotification(np, alert, history, metrics, triggerFiredTimesAndMetricsByTrigger, notification, alertEnqueueTimestamp); + if (missingDataTriggers.size() > 0) + { + // processing to possibly to clear missing data notification + _processMissingDataNotification(np, alert, history, missingDataTriggers, notification, false, alertEnqueueTimestamp); + } } } } } - jobEndTime = System.currentTimeMillis(); - long evalLatency = jobEndTime - jobStartTime; - history.appendMessageNUpdateHistory("Alert was evaluated successfully.", JobStatus.SUCCESS, evalLatency); - Map tags = new HashMap<>(); - tags.put("host", HOSTNAME); - publishAlertTrackingMetric(Counter.ALERTS_EVALUATED.getMetric(), alert.getId(), 1.0/*success*/, tags); - tags = new HashMap<>(); - tags.put(USERTAG, alert.getOwner().getUserName()); - _monitorService.modifyCounter(Counter.ALERTS_EVALUATION_LATENCY, evalLatency, tags); - } catch (MissingDataException mde) { - handleAlertEvaluationException(alert, jobStartTime, alertEnqueueTimestamp, history, missingDataTriggers, mde, true); - } catch (Exception ex) { - handleAlertEvaluationException(alert, jobStartTime, alertEnqueueTimestamp, history, missingDataTriggers, ex, false); - } finally { - Map tags = new HashMap<>(); - tags.put(USERTAG, alert.getOwner().getUserName()); - _monitorService.modifyCounter(Counter.ALERTS_EVALUATED, 1, tags); + history.appendMessageNUpdateHistory("Alert was evaluated successfully.", JobStatus.SUCCESS, System.currentTimeMillis() - jobStartTime); + + } catch (MissingDataException mde) + { + if (doesDatalagExistInAnyDC && !alertEvaluationStarted) + { + alertSkipped = true; + } + alertFailure = true; + _handleAlertEvaluationException(np, alert, jobStartTime, alertEnqueueTimestamp, history, missingDataTriggers, mde, true); + } catch (Exception ex) + { + if (doesDatalagExistInAnyDC && !alertEvaluationStarted) + { + alertSkipped = true; + } + alertFailure = true; + _handleAlertEvaluationException(np, alert, jobStartTime, alertEnqueueTimestamp, history, missingDataTriggers, ex, false); + } finally + { + history = _historyService.createHistory(alert, history.getMessage(), history.getJobStatus(), history.getExecutionTime()); historyList.add(history); + + Map tags = new HashMap<>(); + tags.put(HOSTTAG, HOSTNAME); + tags.put(USERTAG, alert.getOwner().getUserName()); + + if (!alertSkipped) + { + _monitorService.modifyCounter(Counter.ALERTS_EVALUATION_LATENCY, System.currentTimeMillis() - jobStartTime, tags); + if (evaluateEndTime == 0) + { + evaluateEndTime = System.currentTimeMillis(); + } + _monitorService.modifyCounter(Counter.ALERTS_EVALUATION_ONLY_LATENCY, evaluateEndTime - jobStartTime, tags); + _monitorService.modifyCounter(Counter.ALERTS_EVALUATION_LATENCY_COUNT, 1, tags); + } + + _monitorService.modifyCounter(alertSkipped ? Counter.ALERTS_SKIPPED : Counter.ALERTS_EVALUATED, 1, tags); + + if (alertFailure) + { + _monitorService.modifyCounter(Counter.ALERTS_FAILED, 1, tags); + } + + tags.put(ALERTIDTAG, alert.getId().toString()); + + if(alertSkipped) { + numberOfAlertsEvaluated.decrementAndGet(); + publishAlertTrackingMetric(Counter.ALERTS_SKIPPED.getMetric(), 1.0, tags); + } else + { + publishAlertTrackingMetric(Counter.ALERTS_EVALUATED.getMetric(), 1.0, tags); + } + + if (alertFailure) + { + publishAlertTrackingMetric(Counter.ALERTS_FAILED.getMetric(), 1.0, tags); + } } } // end for - return historyList; + return numberOfAlertsEvaluated.get(); } - private void logAlertStatsOnFailure(BigInteger alertid, String user) { - Map tags = new HashMap<>(); - tags.put("host", HOSTNAME); - publishAlertTrackingMetric(Counter.ALERTS_EVALUATED.getMetric(), alertid, -1.0/*failure*/, tags); - tags = new HashMap<>(); - tags.put(USERTAG, user); - _monitorService.modifyCounter(Counter.ALERTS_FAILED, 1, tags); + @VisibleForTesting + protected void updateRequestContext(Alert alert) + { + RequestContextHolder.setRequestContext(new RequestContext(alert.getOwner().getUserName() + "-alert")); + } - _monitorService.modifyCounter(Counter.ALERTS_EVALUATED, 1, tags); + private boolean doesDatalagExistsInAtLeastOneDC(MetricQueryResult queryResult, Alert alert) + { + + boolean isLagPresentInAtLeastOneDC = false; + + List mQInboundList = queryResult.getInboundMetricQueries(); + List dcList = _metricService.extractDCFromMetricQuery(mQInboundList); + + if (dcList == null || dcList.size() == 0) + { + isLagPresentInAtLeastOneDC = doesDatalagConditionSatisfy(alert, null); + } + + for (String currentDC : dcList) + { + isLagPresentInAtLeastOneDC |= doesDatalagConditionSatisfy(alert, currentDC); + } + + _logger.debug(MessageFormat.format("AlertId: {0}, Expression:{1}, DC detected: {2}, lagPresent:{3}", alert.getId(), alert.getExpression(), dcList, isLagPresentInAtLeastOneDC)); + return isLagPresentInAtLeastOneDC; } - private void handleAlertEvaluationException(Alert alert, long jobStartTime, Long alertEnqueueTimestamp, History history, - Set missingDataTriggers, Exception ex, Boolean isDataMissing) { - long jobEndTime; - String logMessage; - jobEndTime = System.currentTimeMillis(); - logMessage = MessageFormat.format("Failed to evaluate alert : {0} due to missing data exception. Full stack trace of exception - {1}", - alert.getId().intValue(), ExceptionUtils.getFullStackTrace(ex)); - _logger.warn(logMessage); + private boolean doesDatalagConditionSatisfy(Alert alert, String currentDC) + { + return _monitorService.isDataLagging(currentDC) && + (_whiteListedScopeRegexPatterns.isEmpty() || !AlertUtils.isPatternPresentInWhiteList(alert.getExpression(), _whiteListedScopeRegexPatterns)) && + (_whiteListedUserRegexPatterns.isEmpty() || !AlertUtils.isPatternPresentInWhiteList(alert.getOwner().getUserName(), _whiteListedUserRegexPatterns)); + } - try { - if (Boolean.valueOf(_configuration.getValue(SystemConfiguration.Property.EMAIL_EXCEPTIONS))) { - _sendEmailToAdmin(alert, alert.getId(), ex); + private void updateDatalagHistory(Alert alert, List historyList, String historyMessage) + { + _logger.info(historyMessage); + if (historyList != null) + { + History history = new History(History.addDateToMessage(JobStatus.SKIPPED.getDescription()), HOSTNAME, alert.getId(), JobStatus.SKIPPED); + history.appendMessageNUpdateHistory(historyMessage, null, 0); + history = _historyService.createHistory(alert, history.getMessage(), history.getJobStatus(), history.getExecutionTime()); + historyList.add(history); + } + } + + // TODO - handle case when testing. should return a + private String _shouldMetricBeRemovedForDataLag(Alert alert, Metric m) + { + try + { + String currentDC = _metricService.extractDCFromMetric(m); + if (doesDatalagConditionSatisfy(alert, currentDC)) + { + String logMessage = String.format("Skipping evaluation of the alert expression with scope: %s in alert with id: %d due metric data was lagging in DC: %s", m.getScope(), alert.getId().intValue(), currentDC); + return logMessage; } + return null; + } catch (Exception ex) + { + _logger.error("Error while identifying metric be removed from datalag: {}", ex); + return null; + } + } - history.appendMessageNUpdateHistory(logMessage, JobStatus.FAILURE, jobEndTime - jobStartTime); + private boolean shouldMetricBeRemovedForDataLag(Alert alert, Metric m, List historyList) + { + String msg = _shouldMetricBeRemovedForDataLag(alert, m); + if (msg != null) + { + String logMessage = MessageFormat.format("{0}", msg); + updateDatalagHistory(alert, historyList, logMessage); + return true; + } + return false; + } - if(logMessage.contains("net.opentsdb.tsd.BadRequestException") || isDataMissing) { + private void updateAlertStartEvaluationStats(Map alertEnqueueTimestampsByAlertId, Alert alert, long jobStartTime) + { + Long alertEnqueueTimestamp = 0L; - if (alert.isMissingDataNotificationEnabled()) { - _sendNotificationForMissingData(alert); - } + Map tags = new HashMap<>(); + tags.put(USERTAG, alert.getOwner().getUserName()); - if (missingDataTriggers.size() > 0) { - for (Notification notification : alert.getNotifications()) { - if (!notification.getTriggers().isEmpty()) { - _processMissingDataNotification(alert, history, missingDataTriggers, notification, true, alertEnqueueTimestamp); - } - } - } + if (alertEnqueueTimestampsByAlertId.containsKey(alert.getId())) + { + + alertEnqueueTimestamp = alertEnqueueTimestampsByAlertId.get(alert.getId()); + + if (jobStartTime - alertEnqueueTimestamp > EVALUATIONDELAY) + { + _monitorService.modifyCounter(Counter.ALERTS_EVALUATION_DELAYED, 1, tags); + _logger.warn("EVALUATION_DELAYED: Alert {}:{} enQueueTime {} evaluationTime {}", + alert.getId(), alert.getName(), alertEnqueueTimestamp, jobStartTime); + } else + { + _monitorService.modifyCounter(Counter.ALERTS_EVALUATION_STARTED, 1, tags); } } - catch (Exception e) { - logMessage = MessageFormat.format("Unexpected exception evaluating alert : {0}. Full stack trace of exception - {1}", alert.getId().intValue(), ExceptionUtils.getFullStackTrace(e)); - _logger.warn(logMessage); - } + } + + private void logAlertStatsOnFailure(BigInteger alertId, String user) + { + Map tags = new HashMap<>(); + tags.put(USERTAG, user); + + _monitorService.modifyCounter(Counter.ALERTS_FAILED, 1, tags); + _monitorService.modifyCounter(Counter.ALERTS_EVALUATED, 1, tags); - logAlertStatsOnFailure(alert.getId(), alert.getOwner().getUserName()); + tags.put(HOSTTAG, HOSTNAME); + tags.put(ALERTIDTAG, alertId.toString()); + + publishAlertTrackingMetric(Counter.ALERTS_EVALUATED.getMetric(), -1.0/*failure*/, tags); } + private boolean areDatapointsEmpty(List metrics) { if(metrics==null || metrics.size()==0) { return true; @@ -583,50 +854,168 @@ private boolean areDatapointsEmpty(List metrics) { return true; } + + /* ---------------------------------------------------------------------------------------------- + * Generic Notification Processing + * Specific processing for each situation is implemented by classes implementing INotificationHandler. + * The higher level notification processing is handled by the following functions: + * 1. _processMissingDataNotifications() + * 2. _handleAlertEvaluationException() + * 3. _processNotification() + * 4. _processTriggerlessNotification() + * 5. _processMissingDataNotification() + * These functions call INotificationHandler to perform the notification handling. + * Normal alert processing uses NotificationProcessor. + * Alert testing uses TestNotificationProcessor. + * ---------------------------------------------------------------------------------------------*/ + + + /* + * INotificationHandler defines the methods called to handle notification in various situations. + * The default implementation delivers notifications the usual way. + * THe test implementation drops notifications on the floor and will later record that they notified. + */ + interface INotificationHandler { + + // Notification Handling + void _onNotificationRefocusValueNotifier(Notification n, History h); + + void _onNotificationFired(Alert alert, Trigger trigger, Notification notification, Metric m, History history, + Map triggerFiredTimesForMetrics, Boolean isBooleanRefocusNotifier, Long alertEnqueueTimestamp); + + void _onNotificationCleared(Alert alert, Trigger trigger, Notification notification, Metric m, History history, + boolean isBooleanRefocusNotifier, Long alertEnqueueTimestamp); + + // Triggerless Notification Handling + void _onTriggerlessIsNotRefocusValueNotifier(Notification n, History h); + + void _onTriggerlessIsRefocusValueNotifier(Alert alert, Notification notification, Metric m, History history, + Long dataPoint, Long alertEnqueueTimestamp); + + // Missing Data Notification Handling + void _onMissingDataRefocusValueNotification(Notification n, History h); + + void _onMissingDataNotification(Alert alert, Trigger trigger, Notification notification, Metric m, History history, + boolean isRefocusNotifier, Long alertEnqueueTimestamp); + + void _onMissingClearedDataNotification(Alert alert, Trigger trigger, Notification notification, Metric m, History history, + boolean isRefocusNotifier, Long alertEnqueueTimestamp); + + void _onMissingDataNotificationEnabled(Alert alert, History history); + + void _onMissingDataNotificationDisabled(Alert alert, History history); + + // Alert Evaluation Exception Handling + void _onAlertEvaluationException(Alert alert, History history, + long jobStartTime, Exception ex, boolean isDataMissing); + + } + + + private void _processMissingDataNotifications(INotificationHandler np, Alert alert, History history, Long alertEvaluationTime, Set missingDataTriggers) + { + if(alert.isMissingDataNotificationEnabled()) { + np._onMissingDataNotificationEnabled(alert, history); + } + else { + np._onMissingDataNotificationDisabled(alert, history); + } + + if(missingDataTriggers.size()>0) { + for (Notification notification : alert.getNotifications()) { + if (!notification.getTriggers().isEmpty()) { + _processMissingDataNotification(np, alert, history, missingDataTriggers, notification, true, alertEvaluationTime); + } + } + } + } + + private void _handleAlertEvaluationException(INotificationHandler np, Alert alert, long jobStartTime, Long alertEnqueueTimestamp, History history, + Set missingDataTriggers, Exception ex, Boolean isDataMissing) { + try + { + np._onAlertEvaluationException(alert, history, jobStartTime, ex, isDataMissing); + + String exMessage = ExceptionUtils.getMessage(ex); + if (exMessage.contains("net.opentsdb.tsd.BadRequestException") || isDataMissing) + { + _processMissingDataNotifications(np, alert, history, alertEnqueueTimestamp, missingDataTriggers); + } + } + catch(Exception e) + { + String logMessage = MessageFormat.format("Unexpected exception evaluating alert : `{0}`. Full stack trace of exception - {1}", alert.getId().intValue(), ExceptionUtils.getFullStackTrace(e)); + _logger.warn(logMessage); + } + } + + /** * Evaluates all triggers associated with the notification and updates the job history. */ - private void _processNotification(Alert alert, History history, List metrics, - Map> triggerFiredTimesAndMetricsByTrigger, Notification notification, Long alertEnqueueTimestamp) { + public void _processNotification(INotificationHandler notificationHandler, + Alert alert, + History history, + List metrics, + Map> triggerFiredTimesAndMetricsByTrigger, + Notification notification, + Long alertEnqueueTimestamp) { //refocus notifier does not need cool down logic, and every evaluation needs to send notification - boolean isRefocusNotifier = SupportedNotifier.REFOCUS.getName().equals(notification.getNotifierName()); + // Future - once refocus v1 notifiers are migrated to refocus_boolean notifiers, remove REFOCUS. + boolean isBooleanRefocusNotifier = SupportedNotifier.REFOCUS.getName().equals(notification.getNotifierName()) || + SupportedNotifier.REFOCUS_BOOLEAN.getName().equals(notification.getNotifierName()); + boolean isValueRefocusNotifier = SupportedNotifier.REFOCUS_VALUE.getName().equals(notification.getNotifierName()); + + if (isValueRefocusNotifier) { + // Future - For now just ignore RefocusValueNotifiers attached to Triggers. + notificationHandler._onNotificationRefocusValueNotifier(notification, history); + return; + } - for(Trigger trigger : notification.getTriggers()) { + for (Trigger trigger : notification.getTriggers()) { Map triggerFiredTimesForMetrics = triggerFiredTimesAndMetricsByTrigger.get(trigger.getId()); - for(Metric m : metrics) { - if(triggerFiredTimesForMetrics!=null && triggerFiredTimesForMetrics.containsKey(m)) { - String logMessage = MessageFormat.format("The trigger {0} was evaluated against metric {1} and it is fired.", trigger.getName(), m.getIdentifier()); - history.appendMessageNUpdateHistory(logMessage, null, 0); + for (Metric m : metrics) { + if (triggerFiredTimesForMetrics != null && triggerFiredTimesForMetrics.containsKey(m)) { + notificationHandler._onNotificationFired( alert, trigger, notification, m, history, + triggerFiredTimesForMetrics, + isBooleanRefocusNotifier, alertEnqueueTimestamp); - if (isRefocusNotifier) { - sendNotification(trigger, m, history, notification, alert, triggerFiredTimesForMetrics.get(m), alertEnqueueTimestamp); - continue; - } - - if(!notification.onCooldown(trigger, m)) { - _updateNotificationSetActiveStatus(trigger, m, history, notification); - sendNotification(trigger, m, history, notification, alert, triggerFiredTimesForMetrics.get(m), alertEnqueueTimestamp); - } else { - logMessage = MessageFormat.format("The notification {0} is on cooldown until {1}.", notification.getName(), getDateMMDDYYYY(notification.getCooldownExpirationByTriggerAndMetric(trigger, m))); - history.appendMessageNUpdateHistory(logMessage, null, 0); - } } else { - String logMessage = MessageFormat.format("The trigger {0} was evaluated against metric {1} and it is not fired.", trigger.getName(), m.getIdentifier()); - history.appendMessageNUpdateHistory(logMessage, null, 0); + notificationHandler._onNotificationCleared( alert, trigger, notification, m, history, + isBooleanRefocusNotifier, alertEnqueueTimestamp); + } + } + } + } - if (isRefocusNotifier) { - sendClearNotification(trigger, m, history, notification, alert, alertEnqueueTimestamp); - continue; - } + /** + * Evaluates notifiers without triggers. Only RefocusValueNotifiers can execute with out a trigger. + * All other notifiers without triggers are logged. + */ + public void _processTriggerlessNotification(INotificationHandler notificationHandler, + Alert alert, + History history, + List metrics, + Notification notification, + Long alertEnqueueTimestamp) { - if(notification.isActiveForTriggerAndMetric(trigger, m)) { - // This is case when the notification was active for the given trigger, metric combination - // and the metric did not violate triggering condition on current evaluation. Hence we must clear it. - _updateNotificationClearActiveStatus(trigger, m, notification); - sendClearNotification(trigger, m, history, notification, alert, alertEnqueueTimestamp); - } + boolean isRefocusValueNotifier = SupportedNotifier.REFOCUS_VALUE.getName().equals(notification.getNotifierName()); + + if (!isRefocusValueNotifier) { + notificationHandler._onTriggerlessIsNotRefocusValueNotifier(notification, history); + } else { + + // Refocus Notifiers: every evaluation needs to send notification + // Future - file work item for Refocus -> each metric (evaluated expression) will be directed to all of the S+A in the notifier. + // future - Work item will request expansion of the S+A based on some part of the metric expression. + // FOR NOW - Users should auther Alerts with RefocusValueNotifiers to have only a single expression. + for (Metric m : metrics) { + Long latestDataPoint = getLatestDatapointTime(m, alert.getExpression(), alertEnqueueTimestamp); + + if (latestDataPoint != null) { + notificationHandler._onTriggerlessIsRefocusValueNotifier( alert, notification, m, history, latestDataPoint, alertEnqueueTimestamp); } } } @@ -635,49 +1024,222 @@ private void _processNotification(Alert alert, History history, List met /** * Evaluates all triggers associated with the missing data notification and updates the job history. */ - private void _processMissingDataNotification(Alert alert, History history, Set triggers, Notification notification, boolean isDataMissing, Long alertEnqueueTimestamp) { + public void _processMissingDataNotification(INotificationHandler notificationHandler, + Alert alert, + History history, + Set triggers, + Notification notification, + boolean isDataMissing, + Long alertEnqueueTimestamp){ //refocus notifier does not need cool down logic, and every evaluation needs to send notification - boolean isRefocusNotifier = SupportedNotifier.REFOCUS.getName().equals(notification.getNotifierName()); - - for(Trigger trigger : notification.getTriggers()) { - if(triggers.contains(trigger)) { - Metric m = new Metric("argus","argus"); - if(isDataMissing) { - String logMessage = MessageFormat.format("The trigger {0} was evaluated and it is fired as data for the metric expression {1} does not exist", trigger.getName(), alert.getExpression()); - history.appendMessageNUpdateHistory(logMessage, null, 0); - - if(isRefocusNotifier) { - sendNotification(trigger, m, history, notification, alert, System.currentTimeMillis(), alertEnqueueTimestamp); - continue; - } + boolean isRefocusNotifier = SupportedNotifier.REFOCUS.getName().equals(notification.getNotifierName()) || + SupportedNotifier.REFOCUS_BOOLEAN.getName().equals(notification.getNotifierName()); + boolean isValueRefocusNotifier = SupportedNotifier.REFOCUS_VALUE.getName().equals(notification.getNotifierName()); - if (!notification.onCooldown(trigger, m)) { - _updateNotificationSetActiveStatus(trigger, m, history, notification); - sendNotification(trigger, m, history, notification, alert, System.currentTimeMillis(), alertEnqueueTimestamp); - } else { - logMessage = MessageFormat.format("The notification {0} is on cooldown until {1}.", notification.getName(), getDateMMDDYYYY(notification.getCooldownExpirationByTriggerAndMetric(trigger, m))); - history.appendMessageNUpdateHistory(logMessage, null, 0); - } + if (isValueRefocusNotifier) { + notificationHandler._onMissingDataRefocusValueNotification(notification, history); + return; + } + + for (Trigger trigger : notification.getTriggers()) { + if (triggers.contains(trigger)) { + Metric m = new Metric("unknown", "unknown"); + if (isDataMissing) { + notificationHandler._onMissingDataNotification( alert, trigger, notification, m, history, + isRefocusNotifier, alertEnqueueTimestamp); + + } else { + notificationHandler._onMissingClearedDataNotification( alert, trigger, notification, m, history, + isRefocusNotifier, alertEnqueueTimestamp); + } + } + } + } + + /* + * Default Notification Processor - Normal handling of Notification during alert execution. + */ + class NotificationProcessor implements INotificationHandler { + + private DefaultAlertService alertService; + private Logger _logger; + + public NotificationProcessor(DefaultAlertService alertService, Logger logger) + { + this.alertService = alertService; + this._logger = logger; + } + + // Notification ------------------------------------------------------------------- + + @Override + public void _onNotificationRefocusValueNotifier(Notification notification, History history) + { + // Future - For now just ignore RefocusValueNotifiers attached to Triggers. + String logMessage = MessageFormat.format("RefocusValueNotifiers must not be associated with triggers. Name: `{0}`", notification.getName()); + _logger.info(logMessage); + history.appendMessageNUpdateHistory(logMessage, null, 0); + } + + @Override + public void _onNotificationFired(Alert alert, Trigger trigger, Notification notification, Metric m, History history, + Map triggerFiredTimesForMetrics, + Boolean isBooleanRefocusNotifier, + Long alertEnqueueTimestamp) + { + String logMessage = MessageFormat.format("The trigger `{0}` was evaluated against metric `{1}` and it is fired.", trigger.getName(), m.getIdentifier()); + history.appendMessageNUpdateHistory(logMessage, null, 0); + + if (isBooleanRefocusNotifier) { + sendNotification(trigger, m, history, notification, alert, triggerFiredTimesForMetrics.get(m), alertEnqueueTimestamp, ACTION_NOTIFIED); + return; + } + + if (!notification.onCooldown(trigger, m)) { + _updateNotificationSetActiveStatus(trigger, m, history, notification); + sendNotification(trigger, m, history, notification, alert, triggerFiredTimesForMetrics.get(m), alertEnqueueTimestamp, ACTION_TRIGGERED); + } else { + logMessage = MessageFormat.format("The notification `{0}` is on cooldown until {1}.", notification.getName(), getDateMMDDYYYY(notification.getCooldownExpirationByTriggerAndMetric(trigger, m))); + history.appendMessageNUpdateHistory(logMessage, null, 0); + } + } + + @Override + public void _onNotificationCleared(Alert alert, Trigger trigger, Notification notification, Metric m, History history, + boolean isBooleanRefocusNotifier, Long alertEnqueueTimestamp) + { + String logMessage = MessageFormat.format("The trigger `{0}` was evaluated against metric `{1}` and it is not fired.", trigger.getName(), m.getIdentifier()); + history.appendMessageNUpdateHistory(logMessage, null, 0); + + if (isBooleanRefocusNotifier) { + sendClearNotification(trigger, m, history, notification, alert, alertEnqueueTimestamp, ACTION_NOTIFIED); + return; + } + + if (notification.isActiveForTriggerAndMetric(trigger, m)) { + // This is case when the notification was active for the given trigger, metric combination + // and the metric did not violate triggering condition on current evaluation. Hence we must clear it. + _updateNotificationClearActiveStatus(trigger, m, notification); + sendClearNotification(trigger, m, history, notification, alert, alertEnqueueTimestamp, ACTION_CLEARED); + } + } + + // Triggerless Notification ------------------------------------------------------- + + @Override + public void _onTriggerlessIsNotRefocusValueNotifier(Notification notification, History history) + { + String logMessage = MessageFormat.format("The notification `{0}` has no triggers.", notification.getName()); + _logger.debug(logMessage); + history.appendMessageNUpdateHistory(logMessage, null, 0); + } + + @Override + public void _onTriggerlessIsRefocusValueNotifier(Alert alert, Notification notification, Metric m, History history, + Long dataPoint, Long alertEnqueueTimestamp) + { + sendNotification(null, m, history, notification, alert, dataPoint, alertEnqueueTimestamp, ACTION_NOTIFIED); + } + + // Missing Data Notification ------------------------------------------------------- + + @Override + public void _onMissingDataRefocusValueNotification(Notification notification, History history) + { + // Future - For now just ignore RefocusValueNotifiers attached to NoData Scenarios. Later we trigger, but require that the subscriptions for refocusValue have a value supplied too! S|A|Value + String logMessage = MessageFormat.format("RefocusValueNotifiers must not be associated with no-data triggers. Name: `{0}`", notification.getName()); + _logger.info(logMessage); + history.appendMessageNUpdateHistory(logMessage, null, 0); + return; + } + + @Override + public void _onMissingDataNotification(Alert alert, Trigger trigger, Notification notification, Metric m, History history, + boolean isRefocusNotifier, Long alertEnqueueTimestamp) + { + String logMessage = MessageFormat.format("The trigger `{0}` was evaluated and it is fired as data for the metric expression `{1}` does not exist", trigger.getName(), alert.getExpression()); + history.appendMessageNUpdateHistory(logMessage, null, 0); + + if (isRefocusNotifier) { + sendNotification(trigger, m, history, notification, alert, System.currentTimeMillis(), alertEnqueueTimestamp, ACTION_NOTIFIED); + return; + } + + if (!notification.onCooldown(trigger, m)) { + _updateNotificationSetActiveStatus(trigger, m, history, notification); + sendNotification(trigger, m, history, notification, alert, System.currentTimeMillis(), alertEnqueueTimestamp, ACTION_TRIGGERED); + } else { + logMessage = MessageFormat.format("The notification `{0}` is on cooldown until `{1}`.", notification.getName(), getDateMMDDYYYY(notification.getCooldownExpirationByTriggerAndMetric(trigger, m))); + history.appendMessageNUpdateHistory(logMessage, null, 0); + } + } + + @Override + public void _onMissingClearedDataNotification(Alert alert, Trigger trigger, Notification notification, Metric m, History history, + boolean isRefocusNotifier, Long alertEnqueueTimestamp) + { + String logMessage = MessageFormat.format("The trigger `{0}` was evaluated and it is not fired as data exists for the expression `{1}`", trigger.getName(), alert.getExpression()); + history.appendMessageNUpdateHistory(logMessage, null, 0); - } else { // Data is not missing - String logMessage = MessageFormat.format("The trigger {0} was evaluated and it is not fired as data exists for the expression {1}", trigger.getName(), alert.getExpression()); - history.appendMessageNUpdateHistory(logMessage, null, 0); + if (isRefocusNotifier) { + sendClearNotification(trigger, m, history, notification, alert, alertEnqueueTimestamp, ACTION_NOTIFIED); + return; + } + if (notification.isActiveForTriggerAndMetric(trigger, m)) { + // This is case when the notification was active for the given trigger, metric combination + // and the metric did not violate triggering condition on current evaluation. Hence we must clear it. + _updateNotificationClearActiveStatus(trigger, m, notification); + sendClearNotification(trigger, m, history, notification, alert, alertEnqueueTimestamp, ACTION_CLEARED); + } + } + + @Override + public void _onMissingDataNotificationEnabled(Alert alert, History history) { + _sendNotificationForMissingData(alert); + + String logMessage = MessageFormat.format("Metric data does not exist for alert expression: {0}. Sent notification for missing data.", + alert.getExpression()); + _logger.debug(logMessage); + history.appendMessageNUpdateHistory(logMessage, null, 0); + } + + @Override + public void _onMissingDataNotificationDisabled(Alert alert, History history) { + + String logMessage = MessageFormat.format("Metric data does not exist for alert expression: {0}. Missing data notification was not enabled.", + alert.getExpression()); + _logger.debug(logMessage); + history.appendMessageNUpdateHistory(logMessage, null, 0); + } - if(isRefocusNotifier) { - sendClearNotification(trigger, m, history, notification, alert, alertEnqueueTimestamp); - continue; - } - if (notification.isActiveForTriggerAndMetric(trigger, m)) { - // This is case when the notification was active for the given trigger, metric combination - // and the metric did not violate triggering condition on current evaluation. Hence we must clear it. - _updateNotificationClearActiveStatus(trigger, m, notification); - sendClearNotification(trigger, m, history, notification, alert, alertEnqueueTimestamp); - } + // Exception Handling --------------------------------------------------------------- + + @Override + public void _onAlertEvaluationException(Alert alert, History history, + long jobStartTime, Exception ex, boolean isDataMissing) + { + String logMessage; + long jobEndTime = System.currentTimeMillis(); + if (isDataMissing) + { + logMessage = MessageFormat.format("Failed to evaluate alert : `{0}` due to missing data exception. Exception message - {1}", + alert.getId().intValue(), ExceptionUtils.getMessage(ex)); + } else + { + logMessage = MessageFormat.format("Failed to evaluate alert : `{0}`. Exception message - {1}", + alert.getId().intValue(), ExceptionUtils.getMessage(ex)); + } + _logger.warn(logMessage); - } + if (Boolean.valueOf(_configuration.getValue(SystemConfiguration.Property.EMAIL_EXCEPTIONS))) + { + _sendEmailToAdmin(alert, alert.getId(), ex); } + + history.appendMessageNUpdateHistory(logMessage, JobStatus.FAILURE, jobEndTime - jobStartTime); } + } @@ -686,23 +1248,39 @@ private void _processMissingDataNotification(Alert alert, History history, Set tags) { + _monitorService.modifyCounter(Counter.TRIGGERS_VIOLATED, 1, tags); + } + + private void noopTags(Map tags) { + // Don't delete. This method is used by alert testing. + } + + + + /** * Evaluates all triggers for the given set of metrics and returns a map of triggerIds to a map containing the triggered metric - * and the trigger fired time. + * and the trigger fired time. + * Note: this::incrementTriggersViolated() is passed to increment the triggedCounter. + * Returns map: trigger_id -> ( map: metric -> time ) */ - private Map> _evaluateTriggers(Set triggers, List metrics, String queryExpression, Long alertEnqueueTimestamp) { + private Map> _evaluateTriggers(Set triggers, + List metrics, + String queryExpression, + Long alertEnqueueTimestamp, + Consumer> incrTriggeredCounter) { Map> triggerFiredTimesAndMetricsByTrigger = new HashMap<>(); for(Trigger trigger : triggers) { @@ -714,7 +1292,7 @@ private Map> _evaluateTriggers(Set trigge triggerFiredTimesForMetrics.put(metric, triggerFiredTime); Map tags = new HashMap<>(); tags.put(USERTAG, trigger.getAlert().getOwner().getUserName()); - _monitorService.modifyCounter(Counter.TRIGGERS_VIOLATED, 1, tags); + incrTriggeredCounter.accept(tags); // In normal alert evaluation, this increments the Triggers_Violated counter. } } triggerFiredTimesAndMetricsByTrigger.put(trigger.getId(), triggerFiredTimesForMetrics); @@ -724,65 +1302,179 @@ private Map> _evaluateTriggers(Set trigge public void sendNotification(Trigger trigger, Metric metric, History history, Notification notification, Alert alert, - Long triggerFiredTime, Long alertEnqueueTime) { + Long triggerFiredTime, Long alertEnqueueTime, String action) { + + /* NOTE - For trigger-less Notifications (i.e. the RefocusValueNotifier), trigger is null, and the + passed in triggerFiredTime is the most recent value in the metric. */ + double triggerValue = 0.0; + if(trigger == null || !trigger.getType().equals(TriggerType.NO_DATA)){ + triggerValue = metric.getDatapoints().get(triggerFiredTime); + } - double value = 0.0; - if(!trigger.getType().equals(TriggerType.NO_DATA)){ - value = metric.getDatapoints().get(triggerFiredTime); + Pair evaluatedMetricSnapshotDetails = null; + String evaluatedMetricSnapshotURL = null; + if (isImagesInNotificationsEnabled(action)) { + ImageDataRetrievalContext imageDataRetrievalContext = new ImageDataRetrievalContext(alert, trigger, + triggerFiredTime, metric, Notifier.NotificationStatus.TRIGGERED); + evaluatedMetricSnapshotDetails = getEvaluatedMetricSnapshotDetails(imageDataRetrievalContext); + if (evaluatedMetricSnapshotDetails != null) { + evaluatedMetricSnapshotURL = _imageDataRetriever.getImageURL(evaluatedMetricSnapshotDetails); + } } - NotificationContext context = new NotificationContext(alert, trigger, notification, triggerFiredTime, value, metric, history); + + Long timestamp = (alertEnqueueTime != null) ? alertEnqueueTime : System.currentTimeMillis(); + String alertEvaluationTrackingID = getAlertEvaluationTrackingID(alert, timestamp); + + NotificationContext context = new NotificationContext(alert, trigger, notification, triggerFiredTime, + triggerValue, metric, history, evaluatedMetricSnapshotDetails, evaluatedMetricSnapshotURL, alertEvaluationTrackingID ); context.setAlertEnqueueTimestamp(alertEnqueueTime); Notifier notifier = getNotifier(SupportedNotifier.fromClassName(notification.getNotifierName())); - notifier.sendNotification(context); - Map tags = new HashMap<>(); - tags.put("status", "active"); - tags.put("type", SupportedNotifier.fromClassName(notification.getNotifierName()).name()); + String alertId = (trigger != null) ? trigger.getAlert().getId().toString() : alert.getId().toString(); + String notificationTarget = SupportedNotifier.fromClassName(notification.getNotifierName()).name(); + + Map tags = new HashMap<>(); + String logMessage; + boolean rc; + + try { + rc = notifier.sendNotification(context); + } catch (Exception e) { + _logger.error("sendNotification() hit exception", e); + rc = false; + } + + // TODO - log alertId, triggerId, notificationId? + if (rc) { + tags.put(STATUSTAG, STATUS_SUCCESS); + if (trigger != null) { + logMessage = MessageFormat.format("Sent alert notification and updated the cooldown: {0}", + getDateMMDDYYYY(notification.getCooldownExpirationByTriggerAndMetric(trigger, metric))); + } + else { + logMessage = MessageFormat.format("Sent notification to {0}", + notificationTarget); + } + } else { + tags.put(STATUSTAG, STATUS_FAILURE); + logMessage = MessageFormat.format("Failed to send notification to {0}", + notificationTarget); + } + + tags.put(USERTAG, alert.getOwner().getUserName()); + tags.put(ACTIONTAG, action); + tags.put(RETRIESTAG, Integer.toString(context.getNotificationRetries())); + tags.put(NOTIFYTARGETTAG, notificationTarget); + // metric published every minute by monitor service. Fewer tags, faster for aggregated debugging _monitorService.modifyCounter(Counter.NOTIFICATIONS_SENT, 1, tags); - tags = new HashMap<>(); - tags.put("notification_id", notification.getId().intValue()+""); - tags.put("host", HOSTNAME); - tags.put("metric", metric.getIdentifier().hashCode()+""); - publishAlertTrackingMetric(Counter.NOTIFICATIONS_SENT.getMetric(), trigger.getAlert().getId(), 1.0/*notification sent*/, tags); - - String logMessage = MessageFormat.format("Sent alert notification and updated the cooldown: {0}", - getDateMMDDYYYY(notification.getCooldownExpirationByTriggerAndMetric(trigger, metric))); - _logger.info(logMessage); + + tags.put(HOSTTAG, HOSTNAME); + tags.put(ALERTIDTAG, alertId); + publishAlertTrackingMetric(Counter.NOTIFICATIONS_SENT.getMetric(), 1.0, tags); + + _logger.debug(logMessage); history.appendMessageNUpdateHistory(logMessage, null, 0); } - public void sendClearNotification(Trigger trigger, Metric metric, History history, Notification notification, Alert alert, Long alertEnqueueTime) { - NotificationContext context = new NotificationContext(alert, trigger, notification, System.currentTimeMillis(), 0.0, metric, history); + private boolean isImagesInNotificationsEnabled(String action) { + return Boolean.valueOf(_configuration.getValue(SystemConfiguration.Property.IMAGES_IN_NOTIFICATIONS_ENABLED)) && + (ACTION_TRIGGERED.equals(action) || ACTION_CLEARED.equals(action)); + } + + private Pair getEvaluatedMetricSnapshotDetails(ImageDataRetrievalContext imageDataRetrievalContext) { + Pair evaluatedMetricSnapshotDetails; + try { + evaluatedMetricSnapshotDetails = _imageDataRetriever.getAnnotatedImage(imageDataRetrievalContext); + } catch (Exception e) { + _logger.error("Exception encountered while trying to fetch the evaluated metric snapshot details. The snapshot" + + " or the URL will not be displayed for notification associated with alert ID " + + imageDataRetrievalContext.getAlert().getId(), e); + return null; + } + return evaluatedMetricSnapshotDetails; + } + + + public void sendClearNotification(Trigger trigger, Metric metric, History history, Notification notification, Alert alert, Long alertEnqueueTime, String action) { + Pair evaluatedMetricSnapshotDetails = null; + String evaluatedMetricSnapshotURL = null; + + if (isImagesInNotificationsEnabled(action)) { + ImageDataRetrievalContext imageDataRetrievalContext = new ImageDataRetrievalContext(alert, trigger, metric, Notifier.NotificationStatus.CLEARED); + evaluatedMetricSnapshotDetails = getEvaluatedMetricSnapshotDetails(imageDataRetrievalContext); + if (evaluatedMetricSnapshotDetails != null) { + evaluatedMetricSnapshotURL = _imageDataRetriever.getImageURL(evaluatedMetricSnapshotDetails); + } + } + + Long timestamp = (alertEnqueueTime != null) ? alertEnqueueTime : System.currentTimeMillis(); + String alertEvaluationTrackingID = getAlertEvaluationTrackingID(alert, timestamp); + + NotificationContext context = new NotificationContext(alert, trigger, notification, System.currentTimeMillis(), + 0.0, metric, history, evaluatedMetricSnapshotDetails, evaluatedMetricSnapshotURL, + alertEvaluationTrackingID); context.setAlertEnqueueTimestamp(alertEnqueueTime); Notifier notifier = getNotifier(SupportedNotifier.fromClassName(notification.getNotifierName())); - notifier.clearNotification(context); + String alertId = (trigger != null) ? trigger.getAlert().getId().toString() : alert.getId().toString(); + String notificationTarget = SupportedNotifier.fromClassName(notification.getNotifierName()).name(); Map tags = new HashMap<>(); - tags.put("status", "clear"); - tags.put("type", SupportedNotifier.fromClassName(notification.getNotifierName()).name()); + String logMessage; + boolean rc; + + try { + rc = notifier.clearNotification(context); + } catch (Exception e) { + _logger.error("clearNotification() hit exception", e); + rc = false; + } + + if (rc) { + tags.put(STATUSTAG, STATUS_SUCCESS); + logMessage = MessageFormat.format("The notification {0} was cleared.", notification.getName()); + } else { + tags.put(STATUSTAG, STATUS_FAILURE); + logMessage = MessageFormat.format("Failed to send clear notification to {0}", notificationTarget); + } + + tags.put(USERTAG, alert.getOwner().getUserName()); + tags.put(ACTIONTAG, action); + tags.put(NOTIFYTARGETTAG, notificationTarget); + // metric published every minute by monitor service. Fewer tags, faster for aggregated debugging _monitorService.modifyCounter(Counter.NOTIFICATIONS_SENT, 1, tags); - tags = new HashMap<>(); - tags.put("notification_id", notification.getId().intValue()+""); - tags.put("host", HOSTNAME); - tags.put("metric", metric.getIdentifier().hashCode()+""); - publishAlertTrackingMetric(Counter.NOTIFICATIONS_SENT.getMetric(), trigger.getAlert().getId(), -1.0/*notification cleared*/,tags); - String logMessage = MessageFormat.format("The notification {0} was cleared.", notification.getName()); + tags.put(HOSTTAG, HOSTNAME); + tags.put(ALERTIDTAG, alertId); + publishAlertTrackingMetric(Counter.NOTIFICATIONS_SENT.getMetric(), 1.0, tags); + _logger.info(logMessage); history.appendMessageNUpdateHistory(logMessage, null, 0); } - private void publishAlertTrackingMetric(String scope, BigInteger alertId, double value, Map tags) { + private String getAlertEvaluationTrackingID(Alert alert, Long timestamp) { + BigInteger alertId = alert.getId(); + if(timestamp == null) { + _logger.error("The timestamp is null. Unable to construct a tracking ID for evaluation on alert ID "+ alertId); + } + return alertId + "_" + timestamp; + } + + + /** + * Publishing tracking metric per alert/notification. For ad-hoc tracking metrics + */ + private void publishAlertTrackingMetric(String metric, double value, Map tags) { Map datapoints = new HashMap<>(); - datapoints.put(1000 * 60 * (System.currentTimeMillis()/(1000 *60)), value); - Metric trackingMetric = new Metric(scope, "alert-" + alertId.intValue()); + datapoints.put(System.currentTimeMillis(), value); + Metric trackingMetric = new Metric(ALERTSCOPE, metric); trackingMetric.addDatapoints(datapoints); + if(tags!=null) { trackingMetric.setTags(tags); } - this.exportMetric(trackingMetric, value); + // this.exportMetric(trackingMetric, value); try { _tsdbService.putMetrics(Arrays.asList(new Metric[] {trackingMetric})); } catch (Exception ex) { @@ -818,11 +1510,46 @@ private void _sendEmailToAdmin(Alert alert, BigInteger alertId, Throwable ex) { message.append(MessageFormat.format("
Exception message: The alert with id {0} does not exist.", alertId.intValue())); } message.append(MessageFormat.format("
Time stamp: {0}", History.DATE_FORMATTER.get().format(new Date(System.currentTimeMillis())))); - _mailService.sendMessage(to, subject, message.toString(), "text/html; charset=utf-8", MailService.Priority.HIGH); + + EmailContext.Builder emailContextBuilder = new EmailContext.Builder() + .withRecipients(to) + .withSubject(subject) + .withEmailBody(message.toString()) + .withContentType("text/html; charset=utf-8") + .withEmailPriority(MailService.Priority.HIGH); + + _mailService.sendMessage(emailContextBuilder.build()); if (alert != null && alert.getOwner() != null && alert.getOwner().getEmail() != null && !alert.getOwner().getEmail().isEmpty()) { to.clear(); to.add(alert.getOwner().getEmail()); - _mailService.sendMessage(to, subject, message.toString(), "text/html; charset=utf-8", MailService.Priority.HIGH); + + emailContextBuilder = emailContextBuilder.withRecipients(to); + _mailService.sendMessage(emailContextBuilder.build()); + } + } + + private void _sendOrphanAlertNotification(Alert alert) { + if(alert != null) { + String subject = MessageFormat.format("Argus alert {0} is mark disabled", alert.getId().intValue()); + StringBuilder message = new StringBuilder(); + message.append("

This is an alert disabling notification

"); + message.append(MessageFormat.format("Alert Id: {0}", alert.getId().intValue())); + message.append(MessageFormat.format("
Alert name: {0}" , alert.getName())); + message.append(MessageFormat.format("
No data found for the following metric expression: {0} for last {1} days.", alert.getExpression(), MetricSchemaRecord.DEFAULT_RETENTION_DISCOVERY_DAYS)); + message.append("
If you wish to re enable it, please modify the alert expression and then enable the alert."); + if (alert.getOwner() != null && alert.getOwner().getEmail() != null && !alert.getOwner().getEmail().isEmpty()) { + Set to = new HashSet<>(); + to.add(alert.getOwner().getEmail()); + EmailContext emailContext = new EmailContext.Builder() + .withRecipients(to) + .withSubject(subject) + .withEmailBody(message.toString()) + .withContentType("text/html; charset=utf-8") + .withEmailPriority(MailService.Priority.NORMAL) + .build(); + + _mailService.sendMessage(emailContext); + } } } @@ -838,15 +1565,27 @@ private void _sendNotificationForMissingData(Alert alert) { message.append(MessageFormat.format("
Alert name: {0}" , alert.getName())); message.append(MessageFormat.format("
No data found for the following metric expression: {0}", alert.getExpression())); message.append(MessageFormat.format("
Time stamp: {0}", History.DATE_FORMATTER.get().format(new Date(System.currentTimeMillis())))); - _mailService.sendMessage(to, subject, message.toString(), "text/html; charset=utf-8", MailService.Priority.HIGH); + + EmailContext emailContext = new EmailContext.Builder() + .withRecipients(to) + .withSubject(subject) + .withEmailBody(message.toString()) + .withContentType("text/html; charset=utf-8") + .withEmailPriority(MailService.Priority.HIGH) + .build(); + + boolean rc = _mailService.sendMessage(emailContext); Map tags = new HashMap<>(); - tags.put("status", "missingdata"); - tags.put("type", SupportedNotifier.EMAIL.name()); + tags.put(ALERTIDTAG, alert.getId().toString()); + tags.put(USERTAG, alert.getOwner().getUserName()); + tags.put(ACTIONTAG, ACTION_MISSINGDATA); + tags.put(STATUSTAG, rc ? STATUS_SUCCESS: STATUS_FAILURE); + tags.put(NOTIFYTARGETTAG, SupportedNotifier.EMAIL.name()); _monitorService.modifyCounter(Counter.NOTIFICATIONS_SENT, 1, tags); + publishAlertTrackingMetric(Counter.NOTIFICATIONS_SENT.getMetric(), 1.0, tags); } - @Override @Transactional public Alert findAlertByNameAndOwner(String name, PrincipalUser owner) { @@ -866,6 +1605,9 @@ public void enqueueAlerts(List alerts) { AlertWithTimestamp obj; try { String serializedAlert = _mapper.writeValueAsString(alert); + + _logger.debug(MessageFormat.format("serializedAlert {0}", serializedAlert)); + obj = new AlertWithTimestamp(serializedAlert, System.currentTimeMillis()); } catch (JsonProcessingException e) { _logger.warn("Failed to serialize alert: {}.", alert.getId().intValue()); @@ -876,23 +1618,30 @@ public void enqueueAlerts(List alerts) { alertsWithTimestamp.add(obj); } - _mqService.enqueue(ALERT.getQueueName(), alertsWithTimestamp); + try { + _mqService.enqueue(ALERT.getQueueName(), alertsWithTimestamp); + } catch (Exception ex) { + _logger.error("Error occurred while enqueueing alerts to mq service. Reason {}", ex.getMessage()); + } List metricsAlertScheduled = new ArrayList(); + _monitorService.modifyCounter(Counter.ALERTS_SCHEDULED_TOTAL, alerts.size(), new HashMap<>()); // Write alerts scheduled for evaluation as time series to TSDB for (Alert alert : alerts) { Map datapoints = new HashMap<>(); // convert timestamp to nearest minute since cron is Least scale resolution of minute datapoints.put(1000 * 60 * (System.currentTimeMillis()/(1000 *60)), 1.0); - Metric metric = new Metric("alerts.scheduled", "alert-" + alert.getId().toString()); - metric.setTag("host", HOSTNAME); + Metric metric = new Metric("argus.alerts", "scheduled"); + metric.setTag(HOSTTAG, HOSTNAME); + metric.setTag(ALERTIDTAG, alert.getId().toString()); + metric.setTag(USERTAG, alert.getOwner().getUserName()); metric.addDatapoints(datapoints); metricsAlertScheduled.add(metric); - Map tags = new HashMap<>(); tags.put(USERTAG, alert.getOwner().getUserName()); + tags.put(ALERTIDTAG, alert.getId().toString()); _monitorService.modifyCounter(Counter.ALERTS_SCHEDULED, 1, tags); } @@ -900,10 +1649,450 @@ public void enqueueAlerts(List alerts) { _tsdbService.putMetrics(metricsAlertScheduled); } catch (Exception ex) { _logger.error("Error occurred while pushing alert audit scheduling time series. Reason: {}", ex.getMessage()); - } + } + } + + // ****************************************************************************** + // Historical Testing - Alert Evaluation + // ****************************************************************************** + + + /** + * Evaluates the serialized alert and delivers results to the result cache. + * Used by historical testing. + * + * @param serializedAlert The serializedAlert + * @param when The time at which to evaluate the alert. + * @param testUuid The test UUID. + * + * @return returns Job history of alerts executed. + */ + // TODO - improve architecture - test spec, and callback class for delivering results. + @Override + // IMPORTANT - @Transactional ?? - should not be as it shouldn't talk to the DB + public void testEvaluateAlert(String serializedAlert, Long when, String testUuid) + { + requireNotDisposed(); + + assert(serializedAlert != null && serializedAlert.length() > 0); + assert(testUuid != null && testUuid.length() > 0); + assert(when != 0L); + + AlertTestResults results = new AlertTestResults(testUuid); + Alert alert; + + _logger.debug(MessageFormat.format("historicalTest {2} evaluationTime {1} serializedAlert {0}", serializedAlert, when, testUuid)); + + // NOTE - ian - Use of .getId() - reserve range of 250 IDs for use by testing. (They can be shared during alert testing.) + // note - ian - JPA reserves IDs from 100001 onward. Could we take 2k below that? + // NOTE - TODO - can we filter history for a specific range of IDs? + // NOTE - how to specify the unusable range of ids. + // NOTE - TODO - serialized alert - on JPA load, fixup all ids to be within the range of test alerts. + + try { + alert = _mapper.readValue(serializedAlert, Alert.class); // IMPORTANT - verify that this is independent of ids (i.e. that internal ids work fine!) + } catch (Exception e) + { + String logMessage = MessageFormat.format("Failed to deserialize alert {0}. Full stack trace of exception {1}", serializedAlert, ExceptionUtils.getFullStackTrace(e)); + _logger.warn(logMessage); + return; + } + + try { + if (alert != null) + { + testEvaluateAlert(alert, when, results); + } + } + finally + { + // TODO - deliver results + } + } + + // ------------------------------------------------------------------------------------------ + // FUTURE - Handle notification state changes (cooldown) + // NOTE - not important for V1 of historical testing. In any case, it requires new code + // to handle because the status and state must be maintained separately from the state of the cached + // state for live alerts. + // Also (IMPORTANT) this state can only be generated oldest-newest by ensuring execution order. + // NOTE - ian - historical testing & notification - 2 problems: 1) stubs/cooldown handling, 2) cooldown should be evaluated oldest - newest + // note - ian - State is preserved in notifiers - bad! COnsider post process to handle cool-down. Note that start of window cooldown will not be correctly evaluated. + // ----------------------------------------------------------------------------------------- + + private boolean verifyTriggerNamesAreUnique(Alert alert) + { + List triggers = alert.getTriggers(); + Set triggerNames = triggers.stream().map( t -> t.getName()).collect(Collectors.toSet()); + return triggers.size() == triggerNames.size(); + } + + @VisibleForTesting + public boolean testEvaluateAlert(Alert alert, Long alertEvaluationTime, AlertTestResults testResults) + { + MessageList messages = new MessageList(); + String msg; + String logMessage; + + assert(alert != null); + assert(alertEvaluationTime != 0L); + assert(testResults != null); + + // Isolate result recording into API + // - results.recordRequest(alert,time) + // - results.recordMetrics(metrics) + // - results.recordTriggersFired(triggers) + // - results.recordSummary(failed,nodata,latency,tags,...) + // TODO - do in caller? + // TODO - replace with result recorder? + testResults.setAlert(alert); + testResults.setEvaluationTime(alertEvaluationTime); + testResults.setExpression(alert.getExpression()); + testResults.setCronEntry(alert.getCronEntry()); + + requireNotDisposed(); + loadWhiteListRegexPatterns(); // NOTE - probably not needed for historical testing (used by datalag experiments) + + List notifications = new ArrayList<>(alert.getNotifications()); + + // Note - It's OK to test alerts without notifications, but only metrics will be returned. + messages.info(notifications.size() == 0, () -> String.format("Info: Testing alert %s with no notifications. NO triggers will be evaluated", alert.getName())); + // TODO - IMPORTANT - verify unique trigger names (assumes unique names and IDs) + // TODO - IMPORTANT - calling code should validate expression + // TODO - calling code should validate the cron. + + + + // Evaluate Alert, Triggers, Notifications ----------------------------------------------------------------- + // TODO - enable datalag monitor in alert testing? + boolean datalagMonitorEnabled = Boolean.valueOf(_configuration.getValue(DataLagService.Property.DATA_LAG_MONITOR_ENABLED.getName(), DataLagService.Property.DATA_LAG_MONITOR_ENABLED.getDefaultValue())); // TODO - get default value + + long jobStartTime = System.currentTimeMillis(); + long evaluateEndTime = 0; + + updateRequestContext(alert); // NOTE - stores owner name of current alert in thread local storage // UGH! + + // Collect missing data triggers + Set missingDataTriggers = new HashSet(); + for(Trigger trigger : alert.getTriggers()) { + if(trigger.getType().equals(TriggerType.NO_DATA)) { + missingDataTriggers.add(trigger); + } + } + + boolean alertSkipped = false; + boolean alertIsValid = true; + boolean alertFailure = false; + boolean alertEvaluationStarted = false; + boolean doesDatalagExistInAnyDC = false; + boolean noDataIsAvailable = false; + INotificationHandler np = new TestNotificationProcessor(this, _logger, messages); + + try { + + // Verify alert semantics --------------------------------------------------- + if (!verifyTriggerNamesAreUnique(alert)) + { + messages.warn("Some triggers of this alert do not have unique names."); + alertIsValid = false; // TODO - bail instead and throw a distinct exception + } + + + // Evaluate the Expression -------------------------------------------------- + + MetricQueryResult queryResult = _metricService.getMetrics(alert.getExpression(), alertEvaluationTime); + // Updates metric query perf counters + // QUESTION - MonitoringUtils.updateAlertMetricQueryPerfCounters(_monitorService, queryResult, alert.getOwner().getUserName()); // QUESTION - call in historical testing? + List metrics = new ArrayList<>(queryResult.getMetricsList()); + int initialMetricSize = metrics.size(); + + + // Check for Data-Lag ------------------------------------------------------- + doesDatalagExistInAnyDC = datalagMonitorEnabled && doesDatalagExistsInAtLeastOneDC(queryResult, alert); + + // TODO - keep or remove datalag testing? + if (datalagMonitorEnabled) { + /* Two Cases: 1. Contains transform, 2. Doesn't contain transform. + * If contain transform, disable if at least 1 dc is lagging. + * else disable per expanded expression specific lag. + * TODO: If transforms are independent, should we disable entirely or expression specific. + */ + if( queryResult.containsTransform() || initialMetricSize == 0) { // Skip alert evaluation if the initial time series returned by metric service is null or if expression contains transforms and data lag exists in at least one dc. + if ( doesDatalagExistInAnyDC ) { + messages.warn(String.format("Skipping Alert %s Evaluation as data was lagging in at least one dc for expression", alert.getName())); + alertSkipped = true; + return false; + } + } else { // expanded alert expression doesn't contain any transforms. + + // TODO - do we need these messages in historical testing? + List perDcDataLagMsgs = metrics.stream().map( m -> _shouldMetricBeRemovedForDataLag(alert,m)).collect(Collectors.toList()); + perDcDataLagMsgs.removeIf( s -> s == null ); + for (String m: perDcDataLagMsgs) + { + messages.warn(m); + } + + // Can we do something neater so we don't have to iterate through metrics twice? + metrics.removeIf( m -> _shouldMetricBeRemovedForDataLag(alert,m) != null ? true : false); + } + + if (initialMetricSize > 0 && metrics.size() == 0) { // Skip alert evaluation if all the expanded alert expression contains dc with data lag and initial size was non-zero. + alertSkipped = true; + return false; + } + } + + + // Return metrics computed. + testResults.setMetrics(metrics); + + alertEvaluationStarted = true; + + evaluateEndTime = System.currentTimeMillis(); // set evaluateEndTime to evaluate start time to override init value (0) + + // If all metrics are empty + if (areDatapointsEmpty(metrics)) { + + noDataIsAvailable = true; + + // TODO - record missing data triggers as fired. IMPORTANT + _processMissingDataNotifications(np, alert, (History) null, alertEvaluationTime, missingDataTriggers); + + } else { + + // Determine which triggers to evaluate. + // Only evaluate those triggers which are associated with some notification. + int numTriggers = alert.getTriggers().size(); + + Set triggersToEvaluate = new HashSet<>(); + for (Notification notification : alert.getNotifications()) { + triggersToEvaluate.addAll(notification.getTriggers()); + } + int numTriggersToEvaluate = triggersToEvaluate.size(); + + Set nonEvaluatedTriggers = new HashSet(alert.getTriggers()); + nonEvaluatedTriggers.removeAll(triggersToEvaluate); + + if (messages.warn(numTriggers == 0, () -> "Warning: Alert has no triggers. NO triggers will be evaluated.")) {} + else if (messages.error(numTriggers > 0 && numTriggersToEvaluate == 0, () -> "Error: NO Notifier has a Trigger. NO triggers will be evaluated.")) {} + else if (messages.warn(numTriggers != numTriggersToEvaluate, () -> "Warning: Some triggers are not linked to a Notifier and will NOT be evaluated." )) {} + // TODO - get Alert.triggers and warn if orphan triggers (no notifier referring to them). + + + // Evaluate Triggers + Map> triggerFiredTimesAndMetricsByTrigger = + _evaluateTriggers(triggersToEvaluate, metrics, alert.getExpression(), alertEvaluationTime, this::incrementTriggersViolated); + + evaluateEndTime = System.currentTimeMillis(); + + // Save Trigger map Here -> TODO - convert to understandable format first? + testResults.setTriggerFirings(triggerFiredTimesAndMetricsByTrigger); + testResults.setEvaluatedTriggers(triggersToEvaluate.stream().map( t -> t.getId()).collect(Collectors.toSet())); + testResults.setNonEvaluatedTriggers(nonEvaluatedTriggers.stream().map( t -> t.getId()).collect(Collectors.toSet())); + + // TODO - Historical Testing - V2 - Record Notification Firing by passing a reference to the testResults object in np. + + // Execute Notifications + for (Notification notification : alert.getNotifications()) { + if (notification.getTriggers().isEmpty()) { + _processTriggerlessNotification(np, alert, (History) null, metrics, notification, alertEvaluationTime); + } else { + _processNotification(np, alert, (History) null, metrics, triggerFiredTimesAndMetricsByTrigger, notification, alertEvaluationTime); + if (missingDataTriggers.size() > 0) { + // processing to possibly to clear missing data notification + _processMissingDataNotification(np, alert, (History) null, missingDataTriggers, notification, false, alertEvaluationTime); + } + } + } + } + + } catch (MissingDataException mde) { + if (doesDatalagExistInAnyDC && !alertEvaluationStarted) { + alertSkipped = true; + } + alertFailure = true; + _handleAlertEvaluationException(np, alert, jobStartTime, alertEvaluationTime, null, + missingDataTriggers, mde, true); + } catch (Exception ex) { + if (doesDatalagExistInAnyDC && !alertEvaluationStarted) { + alertSkipped = true; + } + alertFailure = true; + _handleAlertEvaluationException(np, alert, jobStartTime, alertEvaluationTime, null, + missingDataTriggers, ex, false); + } finally { + + // Return variables + Long latency = 0L; + Long evalOnlyLatency = 0L; + boolean bValid = alertIsValid; + boolean bSkipped = alertSkipped; + boolean bFailed = alertFailure; // exception caught + boolean bNoData = noDataIsAvailable; + + Map tags = new HashMap<>(); + tags.put(HOSTTAG, HOSTNAME); + tags.put(USERTAG, alert.getOwner().getUserName()); + + // TODO - compute latency even when skipped + if (!alertSkipped) { + latency = System.currentTimeMillis() - jobStartTime; + if (evaluateEndTime == 0) { + evaluateEndTime = System.currentTimeMillis(); + } + evalOnlyLatency = evaluateEndTime - jobStartTime; + } + + testResults.setTags(tags); + testResults.setLatency(latency); + testResults.setEvaluateOnlyLatency(evalOnlyLatency); + testResults.setIsValid(bValid); + testResults.setIsFailed(bFailed); + testResults.setIsSkipped(bSkipped); + testResults.setIsNoData(bNoData); + testResults.setMessages(messages.messages); + + } + return true; // TODO - return results! + } + + /* + * MessageList - this class collects messages into a list of strings. + * For the methods that take a test, the String argument is replaced by a supplier so that the string can + * be lazily evaluated. The user is expected to pass a lambda that looks like the following to construct the string + * when the test evaluates to true: () -> String.format(format,...) + */ + // TODO - move to generic utility class + class MessageList + { + public List messages; + + public MessageList() + { + messages = new ArrayList(); + } + + private boolean condition(boolean test, String type, Supplier s) + { + if (test) + { + messages.add(type + ": " + s.get()); + } + return test; + } + + private boolean unconditional(String type, String str) + { + messages.add(type + ": " + str); + return true; + } + + public boolean warn(boolean test, Supplier s) { return condition(test, "Warning", s); } + public boolean info(boolean test, Supplier s) { return condition(test, "Info", s); } + public boolean error(boolean test, Supplier s) { return condition(test, "Error", s); } + + public boolean warn(String s) { return unconditional("Warning", s); } + public boolean info(String s) { return unconditional("Info", s); } + public boolean error(String s) { return unconditional("Error", s); } + } + + + /* + * TestNotificationProcessor - NotificationProcessor used in Alert testing. + */ + // IMPORTANT - Historical Testing - V2 - reporting notification behavior, + // TODO - encapsulate all of the result recording in this object? Implement a result interface? + class TestNotificationProcessor implements INotificationHandler { + + private DefaultAlertService alertService; + private Logger _logger; + private MessageList messages; + + public TestNotificationProcessor(DefaultAlertService alertService, Logger logger, MessageList messages ) + { + this.alertService = alertService; + this._logger = logger; + this.messages = messages; + } + + public MessageList getMessages() { return messages ; } + + // Notification ------------------------------------------------------------------- + + @Override + public void _onNotificationRefocusValueNotifier(Notification notification, History history) + { + // TODO - messages += MessageFormat.format("RefocusValueNotifiers must not be associated with triggers. Name: `{0}`", notification.getName()); + } + + @Override + public void _onNotificationFired(Alert alert, Trigger trigger, Notification notification, Metric m, History history, + Map triggerFiredTimesForMetrics, + Boolean isBooleanRefocusNotifier, Long alertEnqueueTimestamp) {} + + @Override + public void _onNotificationCleared(Alert alert, Trigger trigger, Notification notification, Metric m, History history, + boolean isBooleanRefocusNotifier, Long alertEnqueueTimestamp) {} + + // Triggerless Notification ------------------------------------------------------- + + @Override + public void _onTriggerlessIsNotRefocusValueNotifier(Notification notification, History history) {} + + @Override + public void _onTriggerlessIsRefocusValueNotifier(Alert a, Notification n, Metric m, History h, + Long dataPoint, Long alertEnqueueTimestamp) {} + + // Missing Data Notification ------------------------------------------------------- + + @Override + public void _onMissingDataRefocusValueNotification(Notification notification, History history) + { + // TODO - messages += MessageFormat.format("RefocusValueNotifiers must not be associated with no-data triggers. Name: `{0}`", notification.getName()); + } + + @Override + public void _onMissingDataNotification(Alert alert, Trigger trigger, Notification notification, Metric m, History history, + boolean isRefocusNotifier, Long alertEnqueueTimestamp) {} + + @Override + public void _onMissingClearedDataNotification(Alert alert, Trigger trigger, Notification notification, Metric m, History history, + boolean isRefocusNotifier, Long alertEnqueueTimestamp) {} + + @Override + public void _onMissingDataNotificationEnabled(Alert alert, History history) {} + + @Override + public void _onMissingDataNotificationDisabled(Alert alert, History history) {} + + // Exception Handling ------------------------------------------------------------ + @Override + public void _onAlertEvaluationException(Alert alert, History history, long jobStartTime, Exception ex, boolean isDataMissing) + { + long jobEndTime; + String logMessage; + jobEndTime = System.currentTimeMillis(); + if (isDataMissing) + { + logMessage = String.format("Failed to evaluate alert : `%s` due to missing data exception. Exception message - %s", + alert.getName(), ExceptionUtils.getMessage(ex)); + } else + { + logMessage = String.format("Failed to evaluate alert : `%s`. Exception message - %s", + alert.getName(), ExceptionUtils.getMessage(ex)); + } + messages.warn(logMessage); + } + + } + // ****************************************************************************** + // Query API + // ****************************************************************************** + @Override public List findAllAlerts(boolean metadataOnly) { requireNotDisposed(); @@ -974,13 +2163,14 @@ public List findSharedAlerts(boolean metadataOnly, PrincipalUser owner, I } @Override - public List findSharedAlertsPaged(Integer limit, Integer offset, String searchText) { + public List findSharedAlertsPaged(Integer limit, Integer offset, String searchText, String sortField, String sortOrder) { requireNotDisposed(); - return Alert.findSharedAlertsMetaPaged(_emProvider.get(), limit, offset, searchText); + return Alert.findSharedAlertsMetaPaged(_emProvider.get(), limit, offset, searchText, sortField, sortOrder); } @Override - public List findPrivateAlertsForPrivilegedUserPaged(PrincipalUser owner, Integer limit, Integer offset, String searchText) { + public List findPrivateAlertsForPrivilegedUserPaged(PrincipalUser owner, Integer limit, Integer offset, String searchText, + String sortField, String sortOrder) { requireNotDisposed(); // Invalid user nor non-privileged user shall not view other's non-shared alerts, thus immediately return empty list @@ -988,7 +2178,7 @@ public List findPrivateAlertsForPrivilegedUserPaged(PrincipalUser owner, return new ArrayList<>(0); } - return Alert.findPrivateAlertsForPrivilegedUserMetaPaged(_emProvider.get(), owner, limit, offset, searchText); + return Alert.findPrivateAlertsForPrivilegedUserMetaPaged(_emProvider.get(), owner, limit, offset, searchText, sortField, sortOrder); } @Override @@ -1026,6 +2216,12 @@ public int countAlerts(AlertsCountContext context) { return 0; } + + // ****************************************************************************** + // Trigger support, Notification support and implementation methods. + // Should be re-organized. + // ****************************************************************************** + /** * Returns an instance of a supported notifier. * @@ -1050,8 +2246,15 @@ public Notifier getNotifier(SupportedNotifier notifier) { return _notifierFactory.getWardenPostingNotifier(); case GUS: return _notifierFactory.getGusNotifier(); + case PAGERDUTY: + return _notifierFactory.getPagerDutyNotifier(); case REFOCUS: return _notifierFactory.getRefocusNotifier(); + case REFOCUS_BOOLEAN: + return _notifierFactory.getRefocusBooleanNotifier(); + case REFOCUS_VALUE: + return _notifierFactory.getRefocusValueNotifier(); + // TODO - ian NoOpNotifier! default: return _notifierFactory.getDBNotifier(); } @@ -1088,6 +2291,7 @@ public Long getTriggerFiredDatapointTime(Trigger trigger, Metric metric, String return null; } } + // else NOTE - metric.getDataPoints().size() > 1 Collections.sort(sortedDatapoints, new Comparator>() { @@ -1102,7 +2306,7 @@ public int compare(Entry e1, Entry e2) { if(trigger.getType().equals(TriggerType.NO_DATA)) { if(trigger.getInertia()>0) { Long[] queryTimes = AlertUtils.getStartAndEndTimes(queryExpression, alertEnqueueTimestamp); - if(((sortedDatapoints.get(0).getKey()-queryTimes[0]) > trigger.getInertia())){ + if(((sortedDatapoints.get(0).getKey()-queryTimes[0]) > trigger.getInertia())){ return sortedDatapoints.get(0).getKey(); } @@ -1132,6 +2336,25 @@ public int compare(Entry e1, Entry e2) { return null; } + + /** + * Evaluates the trigger against metric data. + * + * @param metric Metric data for the alert which the trigger belongs. + * + * @return The time stamp of the last data point in metric at which the trigger was decided to be fired. + */ + public Long getLatestDatapointTime(Metric metric, String queryExpression, Long alertEnqueueTimestamp) { + + if (metric.getDatapoints().isEmpty()) { + return null; + } + + Long latestTime = Collections.max(metric.getDatapoints().keySet()); + return latestTime; + } + + @Override @Transactional public void deleteTrigger(Trigger trigger) { @@ -1173,7 +2396,7 @@ private String getDateMMDDYYYY(long dateInSeconds) { //~ Inner Classes ******************************************************************************************************************************** /** - * Used to enqueue alerts to evaluate. The timestamp is used to reconcile lag between enqueue time + * Used to enqueue alerts to evaluate. The timestamp is used to reconcile lag between enqueue time * and evaluation time by adjusting relative times in the alert metric expression being evaluated. * * @author Bhinav Sura (bhinav.sura@salesforce.com) @@ -1234,6 +2457,10 @@ public static class NotificationContext { private Metric triggeredMetric; private long alertEnqueueTimestamp; private History history; + private int notificationRetries = 0; + private Pair evaluatedMetricSnapshotDetails; + private String evaluatedMetricSnapshotURL; + private String alertEvaluationTrackingID; /** * Creates a new Notification Context object. @@ -1246,7 +2473,26 @@ public static class NotificationContext { * @param triggeredMetric The corresponding metric * @param history History object */ - public NotificationContext(Alert alert, Trigger trigger, Notification notification, long triggerFiredTime, double triggerEventValue, Metric triggeredMetric, History history) { + public NotificationContext(Alert alert, Trigger trigger, Notification notification, long triggerFiredTime, + double triggerEventValue, Metric triggeredMetric, History history, + Pair evaluatedMetricSnapshotDetails, String evaluatedMetricSnapshotURL, + String alertEvaluationTrackingID) { + this.alert = alert; + this.trigger = trigger; + this.coolDownExpiration = notification.getCooldownExpirationByTriggerAndMetric(trigger, triggeredMetric); + this.notification = notification; + this.triggerFiredTime = triggerFiredTime; + this.triggerEventValue = triggerEventValue; + this.triggeredMetric = triggeredMetric; + this.alertEnqueueTimestamp = 0L; + this.history = history; + this.evaluatedMetricSnapshotDetails = evaluatedMetricSnapshotDetails; + this.evaluatedMetricSnapshotURL = evaluatedMetricSnapshotURL; + this.alertEvaluationTrackingID = alertEvaluationTrackingID; + } + + public NotificationContext(Alert alert, Trigger trigger, Notification notification, long triggerFiredTime, + double triggerEventValue, Metric triggeredMetric, History history) { this.alert = alert; this.trigger = trigger; this.coolDownExpiration = notification.getCooldownExpirationByTriggerAndMetric(trigger, triggeredMetric); @@ -1256,11 +2502,34 @@ public NotificationContext(Alert alert, Trigger trigger, Notification notificati this.triggeredMetric = triggeredMetric; this.alertEnqueueTimestamp = 0L; this.history = history; + this.evaluatedMetricSnapshotDetails = null; + this.evaluatedMetricSnapshotURL = null; + this.alertEvaluationTrackingID = null; } /** Creates a new NotificationContext object. */ protected NotificationContext() { } + public Notification getAlertNotification() { + final String notificationName = notification.getName(); + for (Notification alertNotification : alert.getNotifications()) { + if (alertNotification.getName().equalsIgnoreCase(notificationName)) { + return alertNotification; + } + } + return null; + } + + public Trigger getAlertTrigger() { + final String triggerName = trigger.getName(); + for (Trigger alertTrigger : alert.getTriggers()) { + if (alertTrigger.getName().equalsIgnoreCase(triggerName)) { + return alertTrigger; + } + } + return null; + } + /** * returns the alert id. * @@ -1385,6 +2654,26 @@ public void setTriggeredMetric(Metric triggeredMetric) { public long getAlertEnqueueTimestamp() { return alertEnqueueTimestamp; } public void setAlertEnqueueTimestamp(Long alertEnqueueTimestamp) { this.alertEnqueueTimestamp = alertEnqueueTimestamp; } + + public void setNotificationRetries(int notificationRetries) { this.notificationRetries = notificationRetries; } + + public Optional> getEvaluatedMetricSnapshotDetails() { + return Optional.ofNullable(evaluatedMetricSnapshotDetails); + } + + public Optional getEvaluatedMetricSnapshotURL() { + return Optional.ofNullable(evaluatedMetricSnapshotURL); + } + + public Optional getAlertEvaluationTrackingID() { + return Optional.ofNullable(alertEvaluationTrackingID); + } + + /** + * + * @return number of retries to send the notification. + */ + public int getNotificationRetries() { return notificationRetries; } } @@ -1393,6 +2682,9 @@ public void exportMetric(Metric metric, Double value) { this._monitorService.exportMetric(metric, value); } - + @Override + public void updateCounter(Counter counter, Double value) { + this._monitorService.updateCounter(counter, value, null); + } } -/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ \ No newline at end of file +/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/NotificationsCache.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/NotificationsCache.java index f841dbab2..c207fb8cb 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/NotificationsCache.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/NotificationsCache.java @@ -36,6 +36,7 @@ import javax.persistence.EntityManager; +import com.google.common.annotations.VisibleForTesting; import com.google.inject.Provider; /* @@ -45,7 +46,7 @@ */ public class NotificationsCache { - private NotificationsCacheRefresherThread refresherThread; + private final NotificationsCacheRefresherThread refresherThread; private Map> notificationCooldownExpirationMap = new HashMap>(); @@ -55,6 +56,22 @@ public class NotificationsCache { public NotificationsCache(Provider em) { refresherThread = new NotificationsCacheRefresherThread(this, em); + + initCacheAndStartRefresherThread(); + } + + @VisibleForTesting + protected NotificationsCache(NotificationsCacheRefresherThread refresherThread) { + this.refresherThread = refresherThread; + + initCacheAndStartRefresherThread(); + } + + private void initCacheAndStartRefresherThread() { + // Run once on main thread to populate the cache + refresherThread.runOnce(); + + // Now set to run periodically refresherThread.setDaemon(true); refresherThread.start(); } diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/NotificationsCacheRefresherThread.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/NotificationsCacheRefresherThread.java index 4a5a2bf94..53299a98d 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/NotificationsCacheRefresherThread.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/NotificationsCacheRefresherThread.java @@ -68,49 +68,7 @@ public NotificationsCacheRefresherThread(NotificationsCache cache, Provider objects = q.getResultList(); - Map> currNotificationCooldownExpirationMap = new HashMap>(); - - for(Object[] object : objects) { - BigInteger notificationId = new BigInteger(String.valueOf(Long.class.cast(object[0]))); - Long cooldownExpiration = Long.class.cast(object[1]); - String key = String.class.cast(object[2]); - if(currNotificationCooldownExpirationMap.get(notificationId)==null) { - currNotificationCooldownExpirationMap.put(notificationId, new HashMap()); - } - currNotificationCooldownExpirationMap.get(notificationId).put(key, cooldownExpiration); - } - notificationsCache.setNotificationCooldownExpirationMap(currNotificationCooldownExpirationMap); - - // populating the active status cache - q = em.createNativeQuery("select * from notification_activestatusbytriggerandmetric"); - objects = q.getResultList(); - Map> currNotificationActiveStatusMap = new HashMap>(); - - for(Object[] object : objects) { - BigInteger notificationId = new BigInteger(String.valueOf(Long.class.cast(object[0]))); - Boolean isActive; - try { - isActive = Boolean.class.cast(object[1]); - } catch (ClassCastException e) { - // This is because Embedded Derby stores booleans as 0, 1. - isActive = Integer.class.cast(object[1]) == 0 ? Boolean.FALSE : Boolean.TRUE; - } - String key = String.class.cast(object[2]); - if(currNotificationActiveStatusMap.get(notificationId)==null) { - currNotificationActiveStatusMap.put(notificationId, new HashMap()); - } - currNotificationActiveStatusMap.get(notificationId).put(key, isActive); - } - notificationsCache.setNotificationActiveStatusMap(currNotificationActiveStatusMap); - - notificationsCache.setNotificationsCacheRefreshed(true); - _logger.info("Notifications cache refresh successful."); + runOnce(); sleep(REFRESH_INTERVAL_MILLIS); }catch(Exception e) { _logger.error("Exception occured when trying to refresh notifications cache - " + ExceptionUtils.getFullStackTrace(e)); @@ -118,4 +76,50 @@ public void run() { } } } + + public void runOnce() { + _logger.info("Starting notifications cache refresh"); + EntityManager em = _emProvider.get(); + + // populating notifications cooldown cache + Query q = em.createNativeQuery("select * from notification_cooldownexpirationbytriggerandmetric"); + List objects = q.getResultList(); + Map> currNotificationCooldownExpirationMap = new HashMap>(); + + for(Object[] object : objects) { + BigInteger notificationId = new BigInteger(String.valueOf(Long.class.cast(object[0]))); + Long cooldownExpiration = Long.class.cast(object[1]); + String key = String.class.cast(object[2]); + if(currNotificationCooldownExpirationMap.get(notificationId)==null) { + currNotificationCooldownExpirationMap.put(notificationId, new HashMap()); + } + currNotificationCooldownExpirationMap.get(notificationId).put(key, cooldownExpiration); + } + notificationsCache.setNotificationCooldownExpirationMap(currNotificationCooldownExpirationMap); + + // populating the active status cache + q = em.createNativeQuery("select * from notification_activestatusbytriggerandmetric"); + objects = q.getResultList(); + Map> currNotificationActiveStatusMap = new HashMap>(); + + for(Object[] object : objects) { + BigInteger notificationId = new BigInteger(String.valueOf(Long.class.cast(object[0]))); + Boolean isActive; + try { + isActive = Boolean.class.cast(object[1]); + } catch (ClassCastException e) { + // This is because Embedded Derby stores booleans as 0, 1. + isActive = Integer.class.cast(object[1]) == 0 ? Boolean.FALSE : Boolean.TRUE; + } + String key = String.class.cast(object[2]); + if(currNotificationActiveStatusMap.get(notificationId)==null) { + currNotificationActiveStatusMap.put(notificationId, new HashMap()); + } + currNotificationActiveStatusMap.get(notificationId).put(key, isActive); + } + notificationsCache.setNotificationActiveStatusMap(currNotificationActiveStatusMap); + + notificationsCache.setNotificationsCacheRefreshed(true); + _logger.info("Notifications cache refresh successful."); + } } diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/com/salesforce/dva/argus/service/alert/testing/TestResults.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/com/salesforce/dva/argus/service/alert/testing/TestResults.java new file mode 100644 index 000000000..6cb5ea470 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/com/salesforce/dva/argus/service/alert/testing/TestResults.java @@ -0,0 +1,5 @@ +package com.salesforce.dva.argus.service.alert.com.salesforce.dva.argus.service.alert.testing; + +public class TestResults +{ +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/AuditNotifier.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/AuditNotifier.java index 0610c9a5e..5c5710af8 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/AuditNotifier.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/AuditNotifier.java @@ -31,12 +31,27 @@ package com.salesforce.dva.argus.service.alert.notifier; -import com.google.inject.Inject; -import com.google.inject.Provider; +import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; + +import java.math.BigInteger; +import java.net.URLEncoder; +import java.sql.Date; +import java.text.MessageFormat; +import java.text.SimpleDateFormat; +import java.util.List; +import java.util.Properties; +import java.util.TimeZone; + +import javax.persistence.EntityManager; + import com.salesforce.dva.argus.entity.Audit; import com.salesforce.dva.argus.entity.JPAEntity; import com.salesforce.dva.argus.entity.Notification; import com.salesforce.dva.argus.entity.Trigger; +import org.joda.time.DateTimeConstants; + +import com.google.inject.Inject; +import com.google.inject.Provider; import com.salesforce.dva.argus.entity.Trigger.TriggerType; import com.salesforce.dva.argus.service.AnnotationService; import com.salesforce.dva.argus.service.AuditService; @@ -45,18 +60,8 @@ import com.salesforce.dva.argus.system.SystemConfiguration; import com.salesforce.dva.argus.util.AlertUtils; import com.salesforce.dva.argus.util.TemplateReplacer; -import org.joda.time.DateTimeConstants; -import java.math.BigInteger; -import java.net.URLEncoder; -import java.sql.Date; -import java.text.MessageFormat; -import java.text.SimpleDateFormat; -import java.util.List; -import java.util.Properties; -import java.util.TimeZone; -import javax.persistence.EntityManager; - -import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A notifier that sends notification to a database. @@ -65,9 +70,10 @@ */ public class AuditNotifier extends DefaultNotifier { + private static final Logger LOGGER = LoggerFactory.getLogger(AuditNotifier.class); //~ Static fields/initializers ******************************************************************************************************************* - protected static final ThreadLocal DATE_FORMATTER = new ThreadLocal() { + public static final ThreadLocal DATE_FORMATTER = new ThreadLocal() { @Override protected SimpleDateFormat initialValue() { @@ -95,8 +101,7 @@ protected SimpleDateFormat initialValue() { * @param emf The entity manager factory. Cannot be null. */ @Inject - public AuditNotifier(MetricService metricService, AnnotationService annotationService, AuditService auditService, SystemConfiguration config, - Provider emf) { + public AuditNotifier(MetricService metricService, AnnotationService annotationService, AuditService auditService, SystemConfiguration config, Provider emf) { super(metricService, annotationService, config); _auditService = auditService; _config = config; @@ -110,12 +115,21 @@ public String getName() { } @Override - protected void sendAdditionalNotification(NotificationContext context) { + protected boolean sendAdditionalNotification(NotificationContext context) { requireArgument(context != null, "Notification context cannot be null."); Audit audit = new Audit(getAuditBody(context, NotificationStatus.TRIGGERED), SystemConfiguration.getHostname(), context.getAlert()); - _auditService.createAudit(audit); + Audit res = _auditService.createAudit(audit); + + // the previous call does not return any status, nor throw exception + if (null != res) { + return true; + } else { + context.getHistory().appendMessageNUpdateHistory(MessageFormat.format("Not able to create a new audit record for triggered notification: {0}.", + context.getNotification().getName()), null, 0); + return false; + } } /** @@ -153,25 +167,42 @@ protected String getAuditBody(NotificationContext context, NotificationStatus no sb.append(notificationMessage); String customText = context.getNotification().getCustomText(); - if( customText != null && customText.length()>0 && notificationStatus == NotificationStatus.TRIGGERED){ + if(customText != null && customText.length()>0 && notificationStatus == NotificationStatus.TRIGGERED){ sb.append(TemplateReplacer.applyTemplateChanges(context, customText)).append("
"); } + + context.getAlertEvaluationTrackingID().ifPresent(trackingID -> { + sb.append("Tracking ID: " + trackingID + "
"); + }); + sb.append(MessageFormat.format("Notification: {0}
", TemplateReplacer.applyTemplateChanges(context,notification.getName()))); sb.append(MessageFormat.format("Triggered by: {0}
", TemplateReplacer.applyTemplateChanges(context, context.getTrigger().getName()))); if(notificationStatus == NotificationStatus.TRIGGERED) { sb.append(MessageFormat.format("Notification is on cooldown until: {0}
", DATE_FORMATTER.get().format(new Date(context.getCoolDownExpiration())))); } - if (!expression.equals("")) sb.append(MessageFormat.format("Evaluated metric expression: {0}
", expression)); - else sb.append(MessageFormat.format("Evaluated metric expression: {0}
", context.getAlert().getExpression())); + + if(context.getEvaluatedMetricSnapshotURL().isPresent()) { + sb.append("

Snapshot of the evaluated metric data.
"); + } else { + if(!expression.equals("")) { + sb.append("

Click here to view the evaluated metric data.

"); + } + } + if(!expression.equals("")) { - sb.append("

Click here to view the evaluated metric data.
"); + sb.append(MessageFormat.format("Evaluated metric expression: {0}
", expression)); + } else { + sb.append(MessageFormat.format("Evaluated metric expression: {0}
", context.getAlert().getExpression())); } + sb.append("

Click " + + "here for the current view of the metric data.

"); + if(context.getTriggeredMetric()!=null) { if(notificationStatus == NotificationStatus.TRIGGERED){ sb.append(MessageFormat.format("Triggered on Metric: {0}
", context.getTriggeredMetric().getIdentifier())); - }else { + } else { sb.append(MessageFormat.format("Cleared on Metric: {0}
", context.getTriggeredMetric().getIdentifier())); } } @@ -195,7 +226,7 @@ protected String getAuditBody(NotificationContext context, NotificationStatus no * * @return The trigger detail information. */ - protected String getTriggerDetails(Trigger trigger, NotificationContext context) { + public String getTriggerDetails(Trigger trigger, NotificationContext context) { if (trigger != null) { String triggerString = trigger.toString(); triggerString = TemplateReplacer.applyTemplateChanges(context, triggerString); @@ -214,7 +245,7 @@ protected String getTriggerDetails(Trigger trigger, NotificationContext context) * * @return The fully constructed URL for the metric. */ - protected String getMetricUrl(String metricToAnnotate, long triggerFiredTime) { + public String getMetricUrl(String metricToAnnotate, long triggerFiredTime) { long start = triggerFiredTime - (6L * DateTimeConstants.MILLIS_PER_HOUR); long end = Math.min(System.currentTimeMillis(), triggerFiredTime + (6L * DateTimeConstants.MILLIS_PER_HOUR)); String expression = MessageFormat.format("{0,number,#}:{1,number,#}:{2}", start, end, metricToAnnotate); @@ -229,7 +260,7 @@ protected String getMetricUrl(String metricToAnnotate, long triggerFiredTime) { * @return The fully constructed URL for the expression. */ @SuppressWarnings("deprecation") - protected String getExpressionUrl(String expression) { + public String getExpressionUrl(String expression) { String template = _config.getValue(Property.AUDIT_METRIC_URL_TEMPLATE.getName(), Property.AUDIT_METRIC_URL_TEMPLATE.getDefaultValue()); try { expression = URLEncoder.encode(expression, "UTF-8"); @@ -246,7 +277,7 @@ protected String getExpressionUrl(String expression) { * * @return The fully constructed URL for the alert. */ - protected String getAlertUrl(BigInteger id) { + public String getAlertUrl(BigInteger id) { String template = _config.getValue(Property.AUDIT_ALERT_URL_TEMPLATE.getName(), Property.AUDIT_ALERT_URL_TEMPLATE.getDefaultValue()); return template.replaceAll("\\$alertid\\$", String.valueOf(id)); @@ -264,12 +295,20 @@ public List getAllNotifications(JPAEntity entity) { } @Override - protected void clearAdditionalNotification(NotificationContext context) { + protected boolean clearAdditionalNotification(NotificationContext context) { requireArgument(context != null, "Notification context cannot be null."); Audit audit = new Audit(getAuditBody(context, NotificationStatus.CLEARED), SystemConfiguration.getHostname(), context.getAlert()); - _auditService.createAudit(audit); + Audit res = _auditService.createAudit(audit); + + if (null != res) { + return true; + } else { + context.getHistory().appendMessageNUpdateHistory(MessageFormat.format("Not able to create a new audit record for cleared notification: {0}.", + context.getNotification().getName()), null, 0); + return false; + } } @Override @@ -293,9 +332,11 @@ public enum Property { /** The prodoutage email to send notification. */ AUDIT_PRODOUTAGE_EMAIL_TEMPLATE("notifier.property.goc.prodoutage.email", "prodoutage@yourcompany.com"), /** The alert URL template to use in notifications. */ - AUDIT_ALERT_URL_TEMPLATE("notifier.property.alert.alerturl.template", "http://localhost:8080/argus/alertId"), + AUDIT_ALERT_URL_TEMPLATE("notifier.property.alert.alerturl.template", "http://localhost:8080/argus/#/alerts/$alertid$"), /** The metric URL template to use in notifications. */ - AUDIT_METRIC_URL_TEMPLATE("notifier.property.alert.metricurl.template", "http://localhost:8080/argus/metrics"); + AUDIT_METRIC_URL_TEMPLATE("notifier.property.alert.metricurl.template", "http://localhost:8080/argus/#/viewmetrics?expression=$expression$"), + /** The metric image URL template to use in notifications. */ + AUDIT_METRIC_IMAGE_URL_TEMPLATE("notifier.property.alert.imageurl.template", "http://localhost:8080/argus/#/images/$imageID$"); private final String _name; diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/CallbackNotifier.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/CallbackNotifier.java index 70f574560..33795a77c 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/CallbackNotifier.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/CallbackNotifier.java @@ -13,25 +13,26 @@ */ package com.salesforce.dva.argus.service.alert.notifier; +import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; + +import java.text.MessageFormat; + +import javax.persistence.EntityManager; + +import org.apache.http.HttpResponse; +import org.slf4j.Logger; + import com.google.inject.Inject; import com.google.inject.Provider; import com.salesforce.dva.argus.entity.History; -import com.salesforce.dva.argus.inject.SLF4JTypeListener; import com.salesforce.dva.argus.service.AnnotationService; import com.salesforce.dva.argus.service.AuditService; import com.salesforce.dva.argus.service.CallbackService; import com.salesforce.dva.argus.service.MetricService; import com.salesforce.dva.argus.service.alert.DefaultAlertService; import com.salesforce.dva.argus.system.SystemConfiguration; -import javax.persistence.EntityManager; - import com.salesforce.dva.argus.util.TemplateReplacer; -import org.apache.http.HttpResponse; -import org.slf4j.Logger; - -import java.text.MessageFormat; - -import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; +import org.slf4j.LoggerFactory; /** * Callback notifier sending the event via REST client to an endpoint defined within the notification subscription. @@ -40,8 +41,7 @@ */ public class CallbackNotifier extends AuditNotifier { - @SLF4JTypeListener.InjectLogger - private Logger _logger; + private final Logger _logger = LoggerFactory.getLogger(CallbackNotifier.class); private final CallbackService _callbackService; /** @@ -55,8 +55,9 @@ public class CallbackNotifier extends AuditNotifier { * @param emf The entity manager factory. Cannot be null. */ @Inject - public CallbackNotifier(MetricService metricService, AnnotationService annotationService, AuditService - auditService, CallbackService callbackService, SystemConfiguration config, Provider emf) + public CallbackNotifier(MetricService metricService, AnnotationService annotationService, AuditService auditService, + CallbackService callbackService, SystemConfiguration config, + Provider emf) { super(metricService, annotationService, auditService, config, emf); requireArgument(callbackService != null, "callback service cannot be null."); @@ -72,29 +73,37 @@ public String getName() { } @Override - protected void sendAdditionalNotification(DefaultAlertService.NotificationContext context) { - requireArgument(context != null, "Notification context cannot be null."); + protected boolean sendAdditionalNotification(DefaultAlertService.NotificationContext context) { + if (Boolean.valueOf(_config.getValue(SystemConfiguration.Property.CALLBACK_ENABLED))) { + requireArgument(context != null, "Notification context cannot be null."); - String notificationName = TemplateReplacer.applyTemplateChanges(context, context.getNotification().getName()); + String notificationName = TemplateReplacer.applyTemplateChanges(context, context.getNotification().getName()); - History history = context.getHistory(); + History history = context.getHistory(); - super.sendAdditionalNotification(context); - HttpResponse response = _callbackService.sendNotification(context); - int code = response.getStatusLine().getStatusCode(); - if (!(code >= 200 && code <= 300)) { - String errorMessage = MessageFormat.format("Notification {0} cannot be sent. {1}", - notificationName, response.getStatusLine().getReasonPhrase()); + super.sendAdditionalNotification(context); + HttpResponse response = _callbackService.sendNotification(context, this); + int code = response.getStatusLine().getStatusCode(); + if (!(code >= 200 && code <= 300)) { + String errorMessage = MessageFormat.format("Notification {0} cannot be sent. {1}", + notificationName, response.getStatusLine().getReasonPhrase()); - history.appendMessageNUpdateHistory(errorMessage, null, 0); - _logger.error(errorMessage); - } else { + history.appendMessageNUpdateHistory(errorMessage, null, 0); + _logger.error(errorMessage); + return false; + } else { - String infoMessage = MessageFormat.format("Notification {0} sent. {1}", - notificationName, response.getStatusLine().getReasonPhrase()); + String infoMessage = MessageFormat.format("Notification {0} sent. {1}", + notificationName, response.getStatusLine().getReasonPhrase()); - history.appendMessageNUpdateHistory(infoMessage, null, 0); - _logger.info(infoMessage); + history.appendMessageNUpdateHistory(infoMessage, null, 0); + _logger.info(infoMessage); + } + + return true; + } else { + _logger.info("Callback notifications are disabled"); + return false; } } } diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/DefaultNotifier.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/DefaultNotifier.java index 36a04bb47..81a3b44b0 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/DefaultNotifier.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/DefaultNotifier.java @@ -31,6 +31,17 @@ package com.salesforce.dva.argus.service.alert.notifier; +import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.TreeMap; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import com.google.inject.Inject; import com.salesforce.dva.argus.entity.Alert; import com.salesforce.dva.argus.entity.Annotation; @@ -42,16 +53,7 @@ import com.salesforce.dva.argus.system.SystemAssert; import com.salesforce.dva.argus.system.SystemConfiguration; import com.salesforce.dva.argus.util.TemplateReplacer; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.TreeMap; /** * Default implementation of the Notifier interface. It creates an annotation on the specific time series specified in the alert expression which @@ -94,15 +96,17 @@ protected DefaultNotifier(MetricService metricService, AnnotationService annotat //~ Methods ************************************************************************************************************************************** @Override - public void sendNotification(NotificationContext notificationContext) { + public boolean sendNotification(NotificationContext notificationContext) { SystemAssert.requireArgument(notificationContext != null, "Notification context cannot be null."); Map additionalFields = new HashMap<>(); additionalFields.put("Notification status", "Notification created."); _createAnnotation(notificationContext, additionalFields); - sendAdditionalNotification(notificationContext); + boolean rc = sendAdditionalNotification(notificationContext); _dispose(); + + return rc; } private void _createAnnotation(NotificationContext notificationContext, Map additionalFields) { @@ -139,14 +143,16 @@ private void _createAnnotation(NotificationContext notificationContext, Map additionalFields = new HashMap<>(); additionalFields.put("Notification status", "Notification cleared."); _createAnnotation(notificationContext, additionalFields); - clearAdditionalNotification(notificationContext); + boolean rc = clearAdditionalNotification(notificationContext); _dispose(); + + return rc; } /** * Defines additional implementation specific actions to take when a notification is cleared. * */ - protected abstract void clearAdditionalNotification(NotificationContext context); + protected abstract boolean clearAdditionalNotification(NotificationContext context); @Override public Properties getNotifierProperties(){ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/EmailNotifier.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/EmailNotifier.java index 84495889a..f82219bb6 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/EmailNotifier.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/EmailNotifier.java @@ -28,9 +28,19 @@ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ - + package com.salesforce.dva.argus.service.alert.notifier; +import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; + +import java.sql.Date; +import java.text.MessageFormat; +import java.util.HashSet; +import java.util.Set; + +import javax.persistence.EntityManager; + +import com.google.common.base.Strings; import com.google.inject.Inject; import com.google.inject.Provider; import com.salesforce.dva.argus.entity.Alert; @@ -41,21 +51,17 @@ import com.salesforce.dva.argus.service.AuditService; import com.salesforce.dva.argus.service.MailService; import com.salesforce.dva.argus.service.MetricService; -import com.salesforce.dva.argus.service.AlertService.Notifier.NotificationStatus; import com.salesforce.dva.argus.service.alert.DefaultAlertService.NotificationContext; +import com.salesforce.dva.argus.service.mail.EmailContext; import com.salesforce.dva.argus.system.SystemConfiguration; import com.salesforce.dva.argus.system.SystemException; import com.salesforce.dva.argus.util.AlertUtils; import com.salesforce.dva.argus.util.TemplateReplacer; +import org.apache.commons.lang3.tuple.Pair; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -import java.sql.Date; -import java.text.MessageFormat; -import java.util.HashSet; -import java.util.Set; - -import javax.persistence.EntityManager; - -import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; +import org.apache.commons.lang.StringUtils; /** * Implementation of notifier interface for notifying via email. @@ -64,6 +70,8 @@ */ public class EmailNotifier extends AuditNotifier { + private static final Logger LOGGER = LoggerFactory.getLogger(EmailNotifier.class); + //~ Instance fields ****************************************************************************************************************************** private final MailService _mailService; @@ -81,8 +89,8 @@ public class EmailNotifier extends AuditNotifier { * @param emf The entity manager factory. Cannot be null. */ @Inject - public EmailNotifier(MetricService metricService, AnnotationService annotationService, AuditService auditService, MailService mailService, - SystemConfiguration config, Provider emf) { + public EmailNotifier(MetricService metricService, AnnotationService annotationService, AuditService auditService, + MailService mailService, SystemConfiguration config, Provider emf) { super(metricService, annotationService, auditService, config, emf); requireArgument(mailService != null, "Mail service cannot be null."); requireArgument(config != null, "The configuration cannot be null."); @@ -97,15 +105,37 @@ public String getName() { } @Override - protected void sendAdditionalNotification(NotificationContext context) { + protected boolean sendAdditionalNotification(NotificationContext context) { requireArgument(context != null, "Notification context cannot be null."); super.sendAdditionalNotification(context); + Set recipients = _getNotificationSubscriptions(context); + EmailContext emailContext = getEmailContextForEmailNotifications(context, NotificationStatus.TRIGGERED, recipients); + boolean isSent = _mailService.sendMessage(emailContext); + if (!isSent) { + context.getHistory().appendMessageNUpdateHistory(MessageFormat.format("Not able to send email for triggered notification: `{0}.` to recipient {1}", + context.getNotification().getName(), recipients), null, 0); + } + return isSent; + } + + private EmailContext getEmailContextForEmailNotifications(NotificationContext context, NotificationStatus status, + Set recipients) { String subject = getEmailSubject(context); - String body = getEmailBody(context, NotificationStatus.TRIGGERED); - Set to = _getNotificationSubscriptions(context); + String body = getEmailBody(context, status); + + EmailContext.Builder emailContextBuilder = new EmailContext.Builder() + .withRecipients(recipients) + .withSubject(subject) + .withEmailBody(body) + .withContentType("text/html; charset=utf-8") + .withEmailPriority(MailService.Priority.NORMAL); - _mailService.sendMessage(to, subject, body, "text/html; charset=utf-8", MailService.Priority.NORMAL); + if (context.getEvaluatedMetricSnapshotDetails().isPresent()) { + emailContextBuilder = emailContextBuilder.withImageDetails(context.getEvaluatedMetricSnapshotDetails().get()); + } + + return emailContextBuilder.build(); } private Set _getNotificationSubscriptions(NotificationContext context) { @@ -123,14 +153,32 @@ private Set _getNotificationSubscriptions(NotificationContext context) { return new HashSet<>(notification.getSubscriptions()); } - private String getEmailSubject(NotificationContext context) { - String currentSubject = "[Argus] Notification for Alert: " + TemplateReplacer.applyTemplateChanges(context, context.getAlert().getName()); + /** + * Returns the email subject. + * + * @param context The notification context. + * + * @return The email subject. + */ + protected String getEmailSubject(NotificationContext context) { + String subject = context.getNotification().getEmailSubject(); + if (StringUtils.isBlank(subject)) { + subject = generateDefaultEmailSubjectTemplate(context); + } + return TemplateReplacer.applyTemplateChanges(context, subject); + } + + private String generateDefaultEmailSubjectTemplate(NotificationContext context) { Alert currentAlert = context.getAlert(); - if (currentAlert.getNotifications().size() > 1) - currentSubject += " Notification: "+ TemplateReplacer.applyTemplateChanges(context, context.getNotification().getName()); - if (currentAlert.getTriggers().size() > 1) - currentSubject += " Trigger:" + TemplateReplacer.applyTemplateChanges(context, context.getTrigger().getName()); - return currentSubject; + StringBuilder subject = new StringBuilder(); + subject.append("[Argus] Notification for Alert: ${alert.name}"); + if (currentAlert.getNotifications().size() > 1) { + subject.append(" Notification: ${notification.name}"); + } + if (currentAlert.getTriggers().size() > 1) { + subject.append(" Trigger:${trigger.name}"); + } + return subject.toString(); } /** @@ -169,6 +217,11 @@ protected String getEmailBody(NotificationContext context, NotificationStatus no if( customText != null && customText.length()>0 && notificationStatus == NotificationStatus.TRIGGERED){ sb.append(TemplateReplacer.applyTemplateChanges(context, customText)).append("
"); } + + context.getAlertEvaluationTrackingID().ifPresent(trackingID -> { + sb.append("Tracking ID: " + trackingID + "
"); + }); + Alert currentAlert = notification.getAlert(); String expression = AlertUtils.getExpressionWithAbsoluteStartAndEndTimeStamps(context); if(currentAlert.getNotifications().size() > 1) @@ -180,20 +233,43 @@ protected String getEmailBody(NotificationContext context, NotificationStatus no DATE_FORMATTER.get().format(new Date(context.getCoolDownExpiration())))); } - if(!expression.equals("")) sb.append(MessageFormat.format("Evaluated metric expression: {0}
", expression)); - else sb.append(MessageFormat.format("Evaluated metric expression: {0}
", context.getAlert().getExpression())); + if(context.getEvaluatedMetricSnapshotDetails().isPresent()) { + Pair evaluatedMetricSnapshotDetails = context.getEvaluatedMetricSnapshotDetails().get(); + byte[] imageBytes = evaluatedMetricSnapshotDetails.getRight(); + String imageContentID = evaluatedMetricSnapshotDetails.getLeft(); + if(imageBytes != null && !Strings.isNullOrEmpty(imageContentID)) { + sb.append(""); + } + if(context.getEvaluatedMetricSnapshotURL().isPresent() && !context.getEvaluatedMetricSnapshotURL().get().equals("")) { + sb.append("

Snapshot of the evaluated metric data.

"); + } else { + if(!expression.equals("")) { + sb.append("

Click here to view the evaluated metric data.

"); + } + } + } else { + if(!expression.equals("")) { + sb.append("

Click here to view the evaluated metric data.

"); + } + } + if(!expression.equals("")) { - sb.append("

Click here to view the evaluated metric data.

"); + sb.append(MessageFormat.format("Evaluated metric expression: {0}
", expression)); + } else { + sb.append(MessageFormat.format("Evaluated metric expression: {0}
", context.getAlert().getExpression())); } - + + sb.append("

Click " + + "here for the current view of the metric data.

"); + if(context.getTriggeredMetric()!=null) { - if(notificationStatus == NotificationStatus.TRIGGERED){ - sb.append(MessageFormat.format("Triggered on Metric: {0}
", context.getTriggeredMetric().getIdentifier())); - }else { - sb.append(MessageFormat.format("Cleared on Metric: {0}
", context.getTriggeredMetric().getIdentifier())); - } - } - + if(notificationStatus == NotificationStatus.TRIGGERED) { + sb.append(MessageFormat.format("Triggered on Metric: {0}
", context.getTriggeredMetric().getIdentifier())); + }else { + sb.append(MessageFormat.format("Cleared on Metric: {0}
", context.getTriggeredMetric().getIdentifier())); + } + } + sb.append(MessageFormat.format("Trigger details: {0}
", getTriggerDetails(trigger, context))); if(!trigger.getType().equals(TriggerType.NO_DATA) && notificationStatus == NotificationStatus.TRIGGERED){ sb.append(MessageFormat.format("Triggering event value: {0}
", context.getTriggerEventValue())); @@ -205,7 +281,7 @@ protected String getEmailBody(NotificationContext context, NotificationStatus no sb.append(getMetricUrl(metricToAnnotate, context.getTriggerFiredTime())); sb.append("'>Click here to view the annotated series for ").append(metricToAnnotate).append(".
"); } - + sb.append("

Click here to view alert definition.
"); sb.append("

Disclaimer: This alert was evaluated using the time series data as it existed at the time of evaluation. "); sb.append("If the data source has inherent lag or a large aggregation window is used during data collection, it is possible "); @@ -213,19 +289,27 @@ protected String getEmailBody(NotificationContext context, NotificationStatus no sb.append("ensuring the time window used in alert expression is outside the range of the datasource lag."); sb.append("

You received this notification because you, or a distribution list you belong to is listed as a "); sb.append("subscriber of the alert."); + return sb.toString(); } @Override - protected void clearAdditionalNotification(NotificationContext context) { + protected boolean clearAdditionalNotification(NotificationContext context) { requireArgument(context != null, "Notification context cannot be null."); super.clearAdditionalNotification(context); - String subject = getEmailSubject(context); - String body = getEmailBody(context, NotificationStatus.CLEARED); - Set to = _getNotificationSubscriptions(context); + boolean isSent = true; + if (context.getNotification().isEnableClearNotification()) { + Set recipients = _getNotificationSubscriptions(context); + EmailContext emailContext = getEmailContextForEmailNotifications(context, NotificationStatus.CLEARED, recipients); - _mailService.sendMessage(to, subject, body, "text/html; charset=utf-8", MailService.Priority.NORMAL); + isSent = _mailService.sendMessage(emailContext); + if (!isSent) { + context.getHistory().appendMessageNUpdateHistory(MessageFormat.format("Not able to send email for cleared notification: `{0}.` to recipient {1}", + context.getNotification().getName(), recipients), null, 0); + } + } + return isSent; } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/GOCNotifier.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/GOCNotifier.java index d33590064..4a4bb5deb 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/GOCNotifier.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/GOCNotifier.java @@ -35,37 +35,46 @@ import com.google.gson.JsonObject; import com.google.inject.Inject; import com.google.inject.Provider; +import com.google.inject.Singleton; import com.salesforce.dva.argus.entity.Alert; +import com.salesforce.dva.argus.entity.History; import com.salesforce.dva.argus.entity.Metric; import com.salesforce.dva.argus.entity.Notification; import com.salesforce.dva.argus.entity.Trigger; import com.salesforce.dva.argus.entity.Trigger.TriggerType; -import com.salesforce.dva.argus.inject.SLF4JTypeListener; import com.salesforce.dva.argus.service.AnnotationService; import com.salesforce.dva.argus.service.AuditService; import com.salesforce.dva.argus.service.MetricService; -import com.salesforce.dva.argus.service.AlertService.Notifier.NotificationStatus; +import com.salesforce.dva.argus.service.MonitorService; import com.salesforce.dva.argus.service.alert.DefaultAlertService.NotificationContext; +import com.salesforce.dva.argus.service.alert.notifier.GusTransport.GetAuthenticationTokenFailureException; import com.salesforce.dva.argus.system.SystemConfiguration; import com.salesforce.dva.argus.system.SystemException; +import com.salesforce.dva.argus.util.AlertUtils; +import com.salesforce.dva.argus.util.TemplateReplacer; + +import org.apache.commons.httpclient.methods.PostMethod; +import org.apache.commons.lang.StringUtils; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.RequestBuilder; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.util.EntityUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import javax.persistence.EntityManager; +import java.io.IOException; import java.io.UnsupportedEncodingException; +import java.net.SocketTimeoutException; import java.net.URLEncoder; import java.sql.Date; import java.text.MessageFormat; +import java.util.List; +import java.util.Map; import java.util.Properties; -import javax.persistence.EntityManager; - -import com.salesforce.dva.argus.util.AlertUtils; -import com.salesforce.dva.argus.util.TemplateReplacer; -import org.apache.commons.httpclient.HttpClient; -import org.apache.commons.httpclient.MultiThreadedHttpConnectionManager; -import org.apache.commons.httpclient.methods.PostMethod; -import org.apache.commons.httpclient.methods.StringRequestEntity; -import org.apache.commons.httpclient.params.HttpConnectionManagerParams; -import org.slf4j.Logger; - import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; /** @@ -73,13 +82,13 @@ * * @author Fiaz Hossain (fiaz.hossain@salesforce.com) */ +@Singleton public class GOCNotifier extends AuditNotifier { - //~ Instance fields ****************************************************************************************************************************** - - @SLF4JTypeListener.InjectLogger - private Logger _logger; - + private static final Logger _logger = LoggerFactory.getLogger(GOCNotifier.class); + private static final int MAX_ATTEMPTS_GOC_POST = 3; + private final MonitorService monitorService; + private volatile GusTransport gusTransport = null; //~ Constructors ********************************************************************************************************************************* @@ -94,34 +103,18 @@ public class GOCNotifier extends AuditNotifier { */ @Inject public GOCNotifier(MetricService metricService, AnnotationService annotationService, AuditService auditService, - SystemConfiguration config, Provider emf) { + SystemConfiguration config, Provider emf, + MonitorService monitorService) { super(metricService, annotationService, auditService, config, emf); requireArgument(config != null, "The configuration cannot be null."); + this.monitorService = monitorService; } //~ Methods ************************************************************************************************************************************** - private PostMethod getRequestMethod(boolean refresh, String id) throws UnsupportedEncodingException { - GOCTransport gocTransport = new GOCTransport(); - EndpointInfo endpointInfo = gocTransport.getEndpointInfo(_config, _logger, refresh); - - // Create upsert URI with PATCH method - PostMethod post = new PostMethod(String.format("%s/services/data/v25.0/sobjects/SM_Alert__c/%s/%s", endpointInfo.getEndPoint(), - urlEncode(GOCData.SM_ALERT_ID__C_FIELD), urlEncode(id))) { - - @Override - public String getName() { - return "PATCH"; - } - }; - post.setRequestHeader("Authorization", "Bearer " + endpointInfo.getToken()); - return post; - } - /** * Sends an GOC++ message. - * - * @param severity The message severity + * @param severity The message severity * @param className The alert class name * @param elementName The element/instance name * @param eventName The event name @@ -130,11 +123,30 @@ public String getName() { * @param srActionable Is the GOC notification SR actionable * @param lastNotified The last message time. (typically current time) * @param triggeredOnMetric The corresponding metric + * @param productTag + * @param articleNumber + * @return true if succeed, false if fail */ - public void sendMessage(Severity severity, String className, String elementName, String eventName, String message, - int severityLevel, boolean srActionable, long lastNotified, Metric triggeredOnMetric) { - requireArgument(elementName != null && !elementName.isEmpty(), "ElementName cannot be null or empty."); + private boolean sendMessage(History history, + Severity severity, + String className, + String elementName, + String eventName, + String message, + int severityLevel, + boolean srActionable, + long lastNotified, + Metric triggeredOnMetric, + String productTag, + String articleNumber, + NotificationContext context) { + requireArgument(elementName != null && !elementName.isEmpty(), "ElementName cannot be null or empty."); requireArgument(eventName != null && !eventName.isEmpty(), "EventName cannot be null or empty."); + + boolean result = false; + String failureMsg = null; + int retries = 0; + if (Boolean.valueOf(_config.getValue(com.salesforce.dva.argus.system.SystemConfiguration.Property.GOC_ENABLED))) { try { GOCDataBuilder builder = new GOCDataBuilder(); @@ -144,7 +156,8 @@ public void sendMessage(Severity severity, String className, String elementName, eventName = _truncateIfSizeGreaterThan(eventName, 100); builder.withClassName(className).withElementName(elementName).withEventName(eventName). - withSeverity(severityLevel).withSRActionable(srActionable).withEventText(message); + withSeverity(severityLevel).withSRActionable(srActionable).withEventText(message) + .withArticleNumber(articleNumber); if (severity == Severity.OK) { builder.withActive(false).withClearedAt(lastNotified); } else { @@ -154,49 +167,127 @@ public void sendMessage(Severity severity, String className, String elementName, if (srActionable == true) { builder.withUserdefined2(_config.getValue(AuditNotifier.Property.AUDIT_PRODOUTAGE_EMAIL_TEMPLATE.getName(), AuditNotifier.Property.AUDIT_PRODOUTAGE_EMAIL_TEMPLATE.getDefaultValue())); } + if (productTag != null) { + builder.withProductTag(productTag); + } GOCData gocData = builder.build(); boolean refresh = false; - GOCTransport gocTransport = new GOCTransport(); - HttpClient httpclient = gocTransport.getHttpClient(_config); + CloseableHttpClient httpClient = getGusTransportInstance().getHttpClient(); - for (int i = 0; i < 1; i++) { + for (int i = 0; i < MAX_ATTEMPTS_GOC_POST; i++) { + retries = i; - PostMethod post = null; + CloseableHttpResponse response = null; try { - post=getRequestMethod(refresh, triggeredOnMetric.hashCode() + " " + gocData.getsm_Alert_Id__c()); - post.setRequestEntity(new StringRequestEntity(gocData.toJSON(), "application/json", null)); - - int respCode = httpclient.executeMethod(post); + GusTransport.EndpointInfo endpointInfo = getGusTransportInstance().getEndpointInfo(refresh); + // Create upsert URI with PATCH method + RequestBuilder rb = RequestBuilder.patch() + .setUri(String.format("%s/services/data/v25.0/sobjects/SM_Alert__c/%s/%s", + endpointInfo.getEndPoint(), + urlEncode(GOCData.SM_ALERT_ID__C_FIELD), + urlEncode(triggeredOnMetric.hashCode() + " " + gocData.getsm_Alert_Id__c()))) + .setHeader("Authorization", "Bearer " + endpointInfo.getToken()) + .setEntity(new StringEntity(gocData.toJSON(), ContentType.create("application/json"))); + + response = httpClient.execute(rb.build()); + int respCode = response.getStatusLine().getStatusCode(); // Check for success if (respCode == 201 || respCode == 204) { - _logger.info("Success - send GOC++ having element '{}' event '{}' severity {}.", elementName, eventName, severity.name()); + String infoMsg = MessageFormat.format("Success - send GOC++ having element {0} event {1} severity {2}.", + elementName, eventName, severity.name()); + _logger.debug(infoMsg); + history.appendMessageNUpdateHistory(infoMsg, null, 0); + + result = true; break; - } else if (respCode == 401) { - // Indication that the session timedout, Need to refresh and retry - refresh = true; } else { - _logger.error("Failure - send GOC++ having element '{}' event '{}' severity {}. Response code '{}' response '{}'", - elementName, eventName, severity.name(), respCode, post.getResponseBodyAsString()); + final String gusPostResponseBody = EntityUtils.toString(response.getEntity()); + failureMsg = MessageFormat.format("Failure - send GOC++ having element {0} event {1} severity {2}. Response code {3} response {4}", + elementName, eventName, severity.name(), respCode, gusPostResponseBody); + + if (respCode == 401) { // Indication that the session timed out, try refreshing token and retrying post + _logger.warn(failureMsg); + refresh = true; + continue; // retry + } else if (respCode == 400) { + List> jsonResponseBody = null; + try { + jsonResponseBody = new Gson().fromJson(gusPostResponseBody, List.class); + } catch (RuntimeException e) { + _logger.warn("Failed to parse response", e); + } + if (jsonResponseBody != null && jsonResponseBody.size() > 0) { + Map responseBodyMap = jsonResponseBody.get(0); + if (responseBodyMap != null && + ("INVALID_HEADER_TYPE".equals(responseBodyMap.get("message")) || + "INVALID_AUTH_HEADER".equals(responseBodyMap.get("errorCode")))) { + _logger.warn("Failed with invalid auth header, attempting to refresh token if possible"); + refresh = true; + continue; // retry + } + } + _logger.error(failureMsg); + break; // do not retry + } else if (respCode >= 500 && respCode < 600) { // Server errors + _logger.warn(failureMsg); + continue; // retry + } else { + _logger.error(failureMsg); + break; // unknown error, do not retry + } } + } catch (SocketTimeoutException e) { + failureMsg = MessageFormat.format("Failure - send GOC++ having element {0} event {1} severity {2}. Exception {3}", + elementName, eventName, severity.name(), e.getMessage()); + _logger.error(failureMsg, e); + + refresh = false; // do not refresh token + continue; // retry + } catch (GetAuthenticationTokenFailureException e) { + failureMsg = MessageFormat.format("Failure - send GOC++ having element {0} event {1} severity {2}. Exception {3}", + elementName, eventName, severity.name(), e.getMessage()); + _logger.error(failureMsg, e); + + refresh = true; // try forced refresh of token + continue; // retry } catch (Exception e) { - _logger.error("Failure - send GOC++ having element '{}' event '{}' severity {}. Exception '{}'", elementName, eventName, - severity.name(), e); + failureMsg = MessageFormat.format("Failure - send GOC++ having element {0} event {1} severity {2}. Exception {3}", + elementName, eventName, severity.name(), e.getMessage()); + _logger.error(failureMsg, e); + break; // unknown error, do not retry } finally { - if(post != null){ - post.releaseConnection(); + try { + if (response != null) { + response.close(); + } + } catch (IOException e) { + _logger.error("Exception while attempting to close post to GUS response", e); } } } } catch (RuntimeException ex) { + failureMsg = MessageFormat.format("Failure - send GOC++. Exception {0}",ex.getMessage()); + history.appendMessageNUpdateHistory(failureMsg, null, 0); throw new SystemException("Failed to send an GOC++ notification.", ex); + } finally { + monitorService.modifyCounter(MonitorService.Counter.GOC_NOTIFICATIONS_RETRIES, retries, null); + monitorService.modifyCounter(MonitorService.Counter.GOC_NOTIFICATIONS_FAILED, result ? 0 : 1, null); } } else { - _logger.info("Sending GOC++ notification is disabled. Not sending message for element '{}' event '{}' severity {}.", elementName, - eventName, severity.name()); + failureMsg = MessageFormat.format("Sending GOC++ notification is disabled. Not sending message for element {0} event {1} severity {2}.", + elementName, eventName, severity.name()); + _logger.warn(failureMsg); + } + + context.setNotificationRetries(retries); + if (StringUtils.isNotBlank(failureMsg)) { + history.appendMessageNUpdateHistory(failureMsg, null, 0); } + return result; + } private static String _truncateIfSizeGreaterThan(String str, int maxAllowed) { @@ -214,13 +305,17 @@ public String getName() { } @Override - protected void sendAdditionalNotification(NotificationContext context) { - _sendAdditionalNotification(context, NotificationStatus.TRIGGERED); + protected boolean sendAdditionalNotification(NotificationContext context) { + requireArgument(context != null, "Notification context cannot be null."); + super.sendAdditionalNotification(context); + return _sendAdditionalNotification(context, NotificationStatus.TRIGGERED); } @Override - protected void clearAdditionalNotification(NotificationContext context) { - _sendAdditionalNotification(context, NotificationStatus.CLEARED); + protected boolean clearAdditionalNotification(NotificationContext context) { + requireArgument(context != null, "Notification context cannot be null."); + super.clearAdditionalNotification(context); + return _sendAdditionalNotification(context, NotificationStatus.CLEARED); } /** @@ -229,15 +324,7 @@ protected void clearAdditionalNotification(NotificationContext context) { * @param context The notification context. Cannot be null. * @param status The notification status. If null, will set the notification severity to ERROR */ - protected void _sendAdditionalNotification(NotificationContext context, NotificationStatus status) { - requireArgument(context != null, "Notification context cannot be null."); - - if(status == NotificationStatus.TRIGGERED) { - super.sendAdditionalNotification(context); - }else { - super.clearAdditionalNotification(context); - } - + protected boolean _sendAdditionalNotification(NotificationContext context, NotificationStatus status) { Notification notification = null; Trigger trigger = null; @@ -259,8 +346,33 @@ protected void _sendAdditionalNotification(NotificationContext context, Notifica String body = getGOCMessageBody(notification, trigger, context, status); Severity sev = status == NotificationStatus.CLEARED ? Severity.OK : Severity.ERROR; - sendMessage(sev, TemplateReplacer.applyTemplateChanges(context, context.getNotification().getName()), TemplateReplacer.applyTemplateChanges(context, context.getAlert().getName()), TemplateReplacer.applyTemplateChanges(context, context.getTrigger().getName()), body, - context.getNotification().getSeverityLevel(),context.getNotification().getSRActionable(), context.getTriggerFiredTime(), context.getTriggeredMetric()); + String elementName = notification.getElementName(); + String eventName = notification.getEventName(); + + if (elementName == null || elementName.isEmpty()) { + elementName = context.getAlert().getName(); + } + + if (eventName == null || eventName.isEmpty()) { + eventName = trigger.getName(); + } + + elementName = TemplateReplacer.applyTemplateChanges(context, elementName); + eventName = TemplateReplacer.applyTemplateChanges(context, eventName); + + return sendMessage(context.getHistory(), + sev, + TemplateReplacer.applyTemplateChanges(context, notification.getName()), + elementName, + eventName, + body, + context.getNotification().getSeverityLevel(), + context.getNotification().getSRActionable(), + context.getTriggerFiredTime(), + context.getTriggeredMetric(), + notification.getProductTag(), + notification.getArticleNumber(), + context); } /** @@ -278,12 +390,17 @@ protected String getGOCMessageBody(Notification notification, Trigger trigger, N String expression = AlertUtils.getExpressionWithAbsoluteStartAndEndTimeStamps(context); String notificationMessage = notificationStatus == NotificationStatus.TRIGGERED ? "Triggered" : "Cleared"; - sb.append(MessageFormat.format("Alert {0} was {1} at 21}\n", TemplateReplacer.applyTemplateChanges(context, context.getAlert().getName()), notificationMessage, + sb.append(MessageFormat.format("Alert {0} was {1} at {2}\n", TemplateReplacer.applyTemplateChanges(context, context.getAlert().getName()), notificationMessage, DATE_FORMATTER.get().format(new Date(context.getTriggerFiredTime())))); String customText = context.getNotification().getCustomText(); if( customText != null && customText.length()>0 && notificationStatus == NotificationStatus.TRIGGERED){ sb.append(TemplateReplacer.applyTemplateChanges(context, customText)).append("\n"); } + + context.getAlertEvaluationTrackingID().ifPresent(trackingID -> { + sb.append("Tracking ID: " + trackingID + "\n"); + }); + if(currentAlert.getNotifications().size() > 1) sb.append(MessageFormat.format("Notification: {0}\n", TemplateReplacer.applyTemplateChanges(context, notification.getName()))); if(currentAlert.getTriggers().size() > 1) @@ -292,17 +409,23 @@ protected String getGOCMessageBody(Notification notification, Trigger trigger, N sb.append(MessageFormat.format("Notification is on cooldown until: {0}\n", DATE_FORMATTER.get().format(new Date(context.getCoolDownExpiration())))); } - if(!expression.equals("")) { - sb.append(MessageFormat.format("URL for evaluated metric expression: {0}\n", getExpressionUrl(expression))); - }else { - sb.append(MessageFormat.format("Evaluated metric expression: {0}\n", context.getAlert().getExpression())); + + if (context.getEvaluatedMetricSnapshotURL().isPresent() && !context.getEvaluatedMetricSnapshotURL().get().equals("")) { + sb.append(MessageFormat.format("Snapshot of the evaluated metric data: {0}\n", context.getEvaluatedMetricSnapshotURL().get())); + } else { + if(!expression.equals("")) { + sb.append(MessageFormat.format("URL for evaluated metric expression: {0}\n", getExpressionUrl(expression))); + } } + sb.append(MessageFormat.format("Current view of the metric expression: {0}\n", + getExpressionUrl(context.getAlert().getExpression()))); + if(context.getTriggeredMetric()!=null) { if(notificationStatus == NotificationStatus.TRIGGERED){ - sb.append(MessageFormat.format("Triggered on Metric: {0}
", context.getTriggeredMetric().getIdentifier())); + sb.append(MessageFormat.format("Triggered on Metric: {0}", context.getTriggeredMetric().getIdentifier())); }else { - sb.append(MessageFormat.format("Cleared on Metric: {0}
", context.getTriggeredMetric().getIdentifier())); + sb.append(MessageFormat.format("Cleared on Metric: {0}", context.getTriggeredMetric().getIdentifier())); } } @@ -331,6 +454,28 @@ public Properties getNotifierProperties() { return notifierProps; } + protected GusTransport getGusTransportInstance() { + if (gusTransport == null) { + synchronized (this) { + if (gusTransport == null) { + gusTransport = new GusTransport(_config.getValue(Property.GOC_PROXY_HOST.getName(), null), // no default since this is optional + _config.getValue(Property.GOC_PROXY_PORT.getName(), null), // no default since this is optional + _config.getValue(Property.GOC_PROXY_USERNAME.getName(), null), // no default since this is optional + _config.getValue(Property.GOC_PROXY_PASSWORD.getName(), null), // no default since this is optional + _config.getValue(Property.GOC_ENDPOINT.getName(), Property.GOC_ENDPOINT.getDefaultValue()) + "/services/oauth2/token", + _config.getValue(Property.GOC_CLIENT_ID.getName(), Property.GOC_CLIENT_ID.getDefaultValue()), + _config.getValue(Property.GOC_CLIENT_SECRET.getName(), Property.GOC_CLIENT_SECRET.getDefaultValue()), + _config.getValue(Property.GOC_USER.getName(), Property.GOC_USER.getDefaultValue()), + _config.getValue(Property.GOC_PWD.getName(), Property.GOC_PWD.getDefaultValue()), + new GusTransport.EndpointInfo(_config.getValue(Property.GOC_ENDPOINT.getName(), Property.GOC_ENDPOINT.getDefaultValue()), GusTransport.NO_TOKEN), + Integer.parseInt(_config.getValue(Property.GOC_CONNECTION_POOL_MAX_SIZE.getName(), Property.GOC_CONNECTION_POOL_MAX_SIZE.getDefaultValue())), + Integer.parseInt(_config.getValue(Property.GOC_CONNECTION_POOL_MAX_PER_ROUTE.getName(), Property.GOC_CONNECTION_POOL_MAX_PER_ROUTE.getDefaultValue()))); + } + } + } + return gusTransport; + } + private String urlEncode(String s) throws UnsupportedEncodingException{ return URLEncoder.encode(s,org.apache.commons.lang3.CharEncoding.UTF_8).replace("+", "%20"); } @@ -366,6 +511,10 @@ public enum Property { GOC_PROXY_HOST("notifier.property.proxy.host", ""), /** The GOC port. */ GOC_PROXY_PORT("notifier.property.proxy.port", ""), + /** The GOC proxy username. */ + GOC_PROXY_USERNAME("notifier.property.proxy.username", ""), + /** The GOC proxy password. */ + GOC_PROXY_PASSWORD("notifier.property.proxy.password", ""), /** The GOC client ID. */ GOC_CLIENT_ID("notifier.property.goc.client.id", "default_client_id"), /** The GOC client secret. */ @@ -373,7 +522,11 @@ public enum Property { /** The alert URL template to be included with GOC notifications. */ EMAIL_ALERT_URL_TEMPLATE("notifier.property.goc.alerturl.template", "http://localhost:8080/argus/alertId"), /** The metric URL template to be included with GOC notifications. */ - EMAIL_METRIC_URL_TEMPLATE("notifier.property.goc.metricurl.template", "http://localhost:8080/argus/metrics"); + EMAIL_METRIC_URL_TEMPLATE("notifier.property.goc.metricurl.template", "http://localhost:8080/argus/metrics"), + /** The connection pool size for connecting to GOC */ + GOC_CONNECTION_POOL_MAX_SIZE("notifier.property.goc.connectionpool.maxsize", "55"), + /** The connection pool max per route for connecting to GOC */ + GOC_CONNECTION_POOL_MAX_PER_ROUTE("notifier.property.goc.connectionpool.maxperroute", "20"); private final String _name; private final String _defaultValue; @@ -404,6 +557,15 @@ public String getDefaultValue() { //~ Inner classes ******************************************************************************************************************************** + public class PatchMethod extends PostMethod { + public PatchMethod(String uri) { super(uri); } + + @Override + public String getName() { + return "PATCH"; + } + } + /** * GOCData object to generate JSON. * @@ -429,10 +591,12 @@ public class GOCData { private static final String SM_SEVERITY__C_FIELD = "SM_Severity__c"; private static final String SM_SOURCEDOMAIN__C_FIELD = "SM_SourceDomain__c"; private static final String SR_ACTIONABLE__C_FIELD = "SR_Actionable__c"; - private static final String SM_USERDEFINED2__C_FIELD = "SR_Userdefined2__c"; - private static final String SM_USERDEFINED3__C_FIELD = "SR_Userdefined3__c"; - private static final String SM_USERDEFINED10__C_FIELD = "SR_Userdefined10__c"; - private static final String SM_USERDEFINED12__C_FIELD = "SR_Userdefined12__c"; + private static final String SM_USERDEFINED2__C_FIELD = "SM_Userdefined2__c"; + private static final String SM_USERDEFINED3__C_FIELD = "SM_Userdefined3__c"; + private static final String SM_USERDEFINED10__C_FIELD = "SM_Userdefined10__c"; + private static final String SM_USERDEFINED12__C_FIELD = "SM_Userdefined12__c"; + private static final String SM_ARTICLE_NUMBER__C_FIELD = "SM_Article_Number__c"; + private static final String SM_PRODUCTTAG__C_FIELD = "SM_Product_Tag__c"; //~ Instance fields ****************************************************************************************************************************** @@ -448,17 +612,22 @@ public class GOCData { private final int smSeverityc; // Number(1, 0) (External ID) --> 0 through 5 private final String smSourceDomainc; private final boolean srActionablec; // Checkbox --> true if SR needs to respond to this alert + // Userdefined fields private final String smUserdefined2c; private final String smUserdefined3c; private final String smUserdefined10c; private final String smUserdefined12c; + private final String smArticleNumber; + private final String smProductTag; // Product Tag associated with the alert object. + //~ Constructors ********************************************************************************************************************************* private GOCData(final boolean smActivec, final String smAlertIdc, final String smClassNamec, final long smClearedAtc, final long smCreatedAtc, final String smElementNamec, final String smEventNamec, final String smEventTextc, final long smLastNotifiedAtc, final int smSeverityc, - final String smSourceDomainc, final boolean srActionablec, final String smUserdefined2c, final String smUserdefined3c, final String smUserdefined10c, final String smUserdefined12c) { + final String smSourceDomainc, final boolean srActionablec, final String smUserdefined2c, final String smUserdefined3c, final String smUserdefined10c, final String smUserdefined12c, + final String smArticleNumber, final String smProductTag) { this.smActivec = smActivec; this.smAlertIdc = smAlertIdc; this.smClassNamec = smClassNamec; @@ -475,6 +644,8 @@ private GOCData(final boolean smActivec, final String smAlertIdc, final String s this.smUserdefined3c = smUserdefined3c; this.smUserdefined10c = smUserdefined10c; this.smUserdefined12c = smUserdefined12c; + this.smArticleNumber = smArticleNumber; + this.smProductTag = smProductTag; } @@ -528,6 +699,12 @@ public String toJSON() { if(smUserdefined12c != null) { gocData.addProperty(SM_USERDEFINED12__C_FIELD, smUserdefined12c); } + if(smArticleNumber != null) { + gocData.addProperty(SM_ARTICLE_NUMBER__C_FIELD, smArticleNumber); + } + if(smProductTag != null) { + gocData.addProperty(SM_PRODUCTTAG__C_FIELD, smProductTag); + } return gocData.toString(); } @@ -559,6 +736,8 @@ public class GOCDataBuilder { private String smUserdefined3c = null; private String smUserdefined10c = null; private String smUserdefined12c = null; + private String smArticleNumber; + private String smProductTag; /** Creates a new GOCDataBuilder object. */ public GOCDataBuilder() { } @@ -686,7 +865,7 @@ public GOCDataBuilder withSRActionable(final boolean sRActionablec) { /** * Specifies whether the userdefined2 field is defined. * - * @param smUserdefined2c True if actionable. + * @param smUserdefined2c user defined field. * * @return The updated builder object. */ @@ -696,9 +875,9 @@ public GOCDataBuilder withUserdefined2(final String smUserdefined2c) { } /** - * Specifies whether the userdefined2 field is defined. + * Specifies whether the userdefined3 field is defined. * - * @param smUserdefined3c True if actionable. + * @param smUserdefined3c user defined field. * * @return The updated builder object. */ @@ -708,9 +887,9 @@ public GOCDataBuilder withUserdefined3(final String smUserdefined3c) { } /** - * Specifies whether the userdefined2 field is defined. + * Specifies whether the userdefined10 field is defined. * - * @param smUserdefined10c True if actionable. + * @param smUserdefined10c user defined field. * * @return The updated builder object. */ @@ -720,9 +899,9 @@ public GOCDataBuilder withUserdefined10(final String smUserdefined10c) { } /** - * Specifies whether the userdefined2 field is defined. + * Specifies whether the userdefined12 field is defined. * - * @param smUserdefined12c True if actionable. + * @param smUserdefined12c user defined field. * * @return The updated builder object. */ @@ -732,172 +911,40 @@ public GOCDataBuilder withUserdefined12(final String smUserdefined12c) { } /** - * Create the GOCData object, use defaults where needed. - * - * @return GOCData created based on builder data - */ - public GOCData build() { - return new GOCData(smActivec, smElementNamec + ALERT_ID_SEPARATOR + smEventNamec, smClassNamec, smClearedAtc, smCreatedAtc, - smElementNamec, smEventNamec, smEventTextc, smLastNotifiedAtc, smSeverityc, SM_SOURCE_DOMAIN__C, srActionablec, smUserdefined2c, smUserdefined3c, smUserdefined10c, smUserdefined12c); - } - } - - - /** - * Manage GOC connections, oAuth and timeouts. - * - * @author Fiaz Hossain (fiaz.hossain@salesforce.com) - */ - public class GOCTransport { - - //~ Static fields/initializers ******************************************************************************************************************* - - private static final String UTF_8 = "UTF-8"; - private static final String NO_TOKEN = "NO_TOKEN"; - private static final long MIN_SESSION_REFRESH_THRESHOLD_MILLIS = 5 * 60 * 1000; // Wait at least 5 minutes between refresh attempts - private static final int CONNECTION_TIMEOUT_MILLIS = 10000; - private static final int READ_TIMEOUT_MILLIS = 10000; - private volatile EndpointInfo theEndpointInfo = null; - private volatile long lastRefresh = 0; - private final MultiThreadedHttpConnectionManager theConnectionManager; - { - theConnectionManager = new MultiThreadedHttpConnectionManager(); - - HttpConnectionManagerParams params = theConnectionManager.getParams(); - - params.setConnectionTimeout(CONNECTION_TIMEOUT_MILLIS); - params.setSoTimeout(READ_TIMEOUT_MILLIS); - } - - //~ Methods ************************************************************************************************************************************** - - /** - * Get authenticated endpoint and token. + * Specifies whether the userdefined2 field is defined. * - * @param config The system configuration. Cannot be null. - * @param logger The logger. Cannot be null. - * @param refresh - If true get a new token even if one exists. + * @param smArticleNumber user defined field. * - * @return EndpointInfo - with valid endpoint and token. The token can be a dummy or expired. + * @return The updated builder object. */ - public EndpointInfo getEndpointInfo(SystemConfiguration config, Logger logger, boolean refresh) { - if (theEndpointInfo == null || refresh) { - updateEndpoint(config, logger, lastRefresh); - } - return theEndpointInfo; + public GOCDataBuilder withArticleNumber(String smArticleNumber) { + this.smArticleNumber = smArticleNumber; + return this; } - /** - * Get HttpClient with proper proxy and timeout settings. - * - * @param config The system configuration. Cannot be null. - * - * @return HttpClient - */ - public HttpClient getHttpClient(SystemConfiguration config) { - HttpClient httpclient = new HttpClient(theConnectionManager); - - httpclient.getParams().setParameter("http.connection-manager.timeout", 2000L); // Wait for 2 seconds to get a connection from pool - - String host = config.getValue(Property.GOC_PROXY_HOST.getName(), Property.GOC_PROXY_HOST.getDefaultValue()); - - if (host != null && host.length() > 0) { - httpclient.getHostConfiguration().setProxy(host, - Integer.parseInt(config.getValue(Property.GOC_PROXY_PORT.getName(), Property.GOC_PROXY_PORT.getDefaultValue()))); - } - return httpclient; - } /** - * Update the global 'theEndpointInfo' state with a valid endpointInfo if login is successful or a dummy value if not successful. + * Specifies whether the userdefined2 field is defined. * - * @param config The system configuration. Cannot be null. - * @param logger The logger. Cannot be null. - * @param previousRefresh The last refresh time. - */ - private synchronized void updateEndpoint(SystemConfiguration config, Logger logger, long previousRefresh) { - long diff = System.currentTimeMillis() - previousRefresh; - - if (diff > MIN_SESSION_REFRESH_THRESHOLD_MILLIS) { - lastRefresh = System.currentTimeMillis(); - - PostMethod post = new PostMethod(config.getValue(Property.GOC_ENDPOINT.getName(), Property.GOC_ENDPOINT.getDefaultValue()) + - "/services/oauth2/token"); - - try { - post.addParameter("grant_type", "password"); - post.addParameter("client_id", - URLEncoder.encode(config.getValue(Property.GOC_CLIENT_ID.getName(), Property.GOC_CLIENT_ID.getDefaultValue()), UTF_8)); - post.addParameter("client_secret", - URLEncoder.encode(config.getValue(Property.GOC_CLIENT_SECRET.getName(), Property.GOC_CLIENT_SECRET.getDefaultValue()), UTF_8)); - post.addParameter("username", config.getValue(Property.GOC_USER.getName(), Property.GOC_USER.getDefaultValue())); - post.addParameter("password", config.getValue(Property.GOC_PWD.getName(), Property.GOC_PWD.getDefaultValue())); - - HttpClient httpclient = getHttpClient(config); - int respCode = httpclient.executeMethod(post); - - // Check for success - if (respCode == 200) { - JsonObject authResponse = new Gson().fromJson(post.getResponseBodyAsString(), JsonObject.class); - String endpoint = authResponse.get("instance_url").getAsString(); - String token = authResponse.get("access_token").getAsString(); - - logger.info("Success - getting access_token for endpoint '{}'", endpoint); - logger.debug("access_token '{}'", token); - theEndpointInfo = new EndpointInfo(endpoint, token); - } - else { - logger.error("Failure - getting oauth2 token, check username/password: '{}'", post.getResponseBodyAsString()); - } - - } catch (Exception e) { - logger.error("Failure - exception getting access_token '{}'", e); - } finally { - if (theEndpointInfo == null) { - theEndpointInfo = new EndpointInfo(config.getValue(Property.GOC_ENDPOINT.getName(), Property.GOC_ENDPOINT.getDefaultValue()), - NO_TOKEN); - } - post.releaseConnection(); - } - } - } - - //~ Inner Classes ******************************************************************************************************************************** - - } - - /** - * Utility class for endpoint information. - * - * @author fiaz.hossain - */ - public class EndpointInfo { - - private final String endPoint; - private final String token; - - private EndpointInfo(final String endPoint, final String token) { - this.endPoint = endPoint; - this.token = token; - } - - /** - * Valid endpoint. Either from config or endpont after authentication + * @param smProductTag user defined field. * - * @return endpoint + * @return The updated builder object. */ - public String getEndPoint() { - return endPoint; + public GOCDataBuilder withProductTag(String smProductTag) { + this.smProductTag = smProductTag; + return this; } /** - * Token can be either active, expired or a dummy value. + * Create the GOCData object, use defaults where needed. * - * @return token + * @return GOCData created based on builder data */ - public String getToken() { - return token; + public GOCData build() { + return new GOCData(smActivec, smElementNamec + ALERT_ID_SEPARATOR + smEventNamec, smClassNamec, smClearedAtc, smCreatedAtc, + smElementNamec, smEventNamec, smEventTextc, smLastNotifiedAtc, smSeverityc, SM_SOURCE_DOMAIN__C, srActionablec, smUserdefined2c, smUserdefined3c, smUserdefined10c, smUserdefined12c, smArticleNumber, smProductTag); } } + } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/GusNotifier.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/GusNotifier.java index 14df6bfb7..84d835c24 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/GusNotifier.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/GusNotifier.java @@ -28,335 +28,372 @@ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ - package com.salesforce.dva.argus.service.alert.notifier; -import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; - -import java.io.IOException; -import java.net.URLEncoder; -import java.sql.Date; -import java.text.MessageFormat; -import java.util.HashSet; -import java.util.Properties; -import java.util.Set; - -import javax.persistence.EntityManager; - -import com.salesforce.dva.argus.entity.Alert; -import com.salesforce.dva.argus.util.AlertUtils; -import com.salesforce.dva.argus.util.TemplateReplacer; -import org.apache.commons.httpclient.HttpClient; -import org.apache.commons.httpclient.MultiThreadedHttpConnectionManager; -import org.apache.commons.httpclient.methods.PostMethod; -import org.apache.commons.httpclient.methods.StringRequestEntity; -import org.apache.commons.httpclient.params.HttpConnectionManagerParams; -import org.slf4j.Logger; - import com.google.gson.Gson; -import com.google.gson.JsonObject; import com.google.inject.Inject; import com.google.inject.Provider; +import com.google.inject.Singleton; +import com.salesforce.dva.argus.entity.Alert; +import com.salesforce.dva.argus.entity.History; import com.salesforce.dva.argus.entity.Notification; import com.salesforce.dva.argus.entity.Trigger; import com.salesforce.dva.argus.entity.Trigger.TriggerType; -import com.salesforce.dva.argus.inject.SLF4JTypeListener; import com.salesforce.dva.argus.service.AnnotationService; import com.salesforce.dva.argus.service.AuditService; -import com.salesforce.dva.argus.service.MailService; import com.salesforce.dva.argus.service.MetricService; -import com.salesforce.dva.argus.service.AlertService.Notifier.NotificationStatus; +import com.salesforce.dva.argus.service.MonitorService; import com.salesforce.dva.argus.service.alert.DefaultAlertService.NotificationContext; +import com.salesforce.dva.argus.service.alert.notifier.GusTransport.EndpointInfo; +import com.salesforce.dva.argus.service.alert.notifier.GusTransport.GetAuthenticationTokenFailureException; import com.salesforce.dva.argus.system.SystemConfiguration; +import com.salesforce.dva.argus.util.AlertUtils; +import com.salesforce.dva.argus.util.TemplateReplacer; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.RequestBuilder; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.util.EntityUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.persistence.EntityManager; +import java.io.IOException; +import java.net.URLEncoder; +import java.sql.Date; +import java.text.MessageFormat; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.Set; + +import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; -import joptsimple.internal.Strings; /** * Chatter Notifier: api user can only post alert to PUBLIC group * * @author Ruofan Zhang (rzhang@salesforce.com) */ +@Singleton public class GusNotifier extends AuditNotifier { - //~ Static fields/initializers ******************************************************************************************************************* - private static final int CONNECTION_TIMEOUT_MILLIS = 10000; - private static final int READ_TIMEOUT_MILLIS = 10000; - private static final String UTF_8 = "UTF-8"; - - //~ Instance fields ****************************************************************************************************************************** - @SLF4JTypeListener.InjectLogger - private Logger _logger; - private final MultiThreadedHttpConnectionManager theConnectionManager; - { - theConnectionManager = new MultiThreadedHttpConnectionManager(); - - HttpConnectionManagerParams params = theConnectionManager.getParams(); - - params.setConnectionTimeout(CONNECTION_TIMEOUT_MILLIS); - params.setSoTimeout(READ_TIMEOUT_MILLIS); - } - - //~ Constructors ********************************************************************************************************************************* - - /** - * Creates a new GusNotifier object. - * - * @param metricService The metric service to use. Cannot be null. - * @param annotationService The annotation service to use. Cannot be null. - * @param auditService The audit service to use. Cannot be null. - * @param mailService The mail service to use. Cannot be null. - * @param config The system configuration. Cannot be null. - * @param emf The entity manager factory to use. Cannot be null. - */ - @Inject - public GusNotifier(MetricService metricService, AnnotationService annotationService, AuditService auditService, MailService mailService, - SystemConfiguration config, Provider emf) { - super(metricService, annotationService, auditService, config, emf); - requireArgument(mailService != null, "Mail service cannot be null."); - requireArgument(config != null, "The configuration cannot be null."); - } - - //~ Methods ************************************************************************************************************************************** - - @Override - public String getName() { - return GusNotifier.class.getName(); - } - - @Override - protected void sendAdditionalNotification(NotificationContext context) { - requireArgument(context != null, "Notification context cannot be null."); - super.sendAdditionalNotification(context); - sendGusNotification(context, NotificationStatus.TRIGGERED); - } - + private static final Logger _logger = LoggerFactory.getLogger(GusNotifier.class); + private static final int MAX_ATTEMPTS_GUS_POST = 3; + private final MonitorService monitorService; + private volatile GusTransport gusTransport = null; + + //~ Constructors ********************************************************************************************************************************* + + /** + * Creates a new GusNotifier object. + * + * @param metricService The metric service to use. Cannot be null. + * @param annotationService The annotation service to use. Cannot be null. + * @param auditService The audit service to use. Cannot be null. + * @param config The system configuration. Cannot be null. + * @param emf The entity manager factory to use. Cannot be null. + */ + @Inject + public GusNotifier(MetricService metricService, AnnotationService annotationService, AuditService auditService, + SystemConfiguration config, Provider emf, MonitorService monitorService) { + super(metricService, annotationService, auditService, config, emf); + requireArgument(config != null, "The configuration cannot be null."); + this.monitorService = monitorService; + } + + //~ Methods ************************************************************************************************************************************** + + @Override + public String getName() { + return GusNotifier.class.getName(); + } + + @Override + protected boolean sendAdditionalNotification(NotificationContext context) { + requireArgument(context != null, "Notification context cannot be null."); + if (!super.sendAdditionalNotification(context)){ + return false; + } + return sendGusNotification(context, NotificationStatus.TRIGGERED); + } + @Override - protected void clearAdditionalNotification(NotificationContext context) { + protected boolean clearAdditionalNotification(NotificationContext context) { requireArgument(context != null, "Notification context cannot be null."); super.clearAdditionalNotification(context); - sendGusNotification(context, NotificationStatus.CLEARED); + + boolean result = true; + if (context.getNotification().isEnableClearNotification()) { + result = sendGusNotification(context, NotificationStatus.CLEARED); + } + return result; + } + + protected boolean sendGusNotification(NotificationContext context, NotificationStatus status) { + Notification notification = null; + Trigger trigger = null; + + for (Notification tempNotification : context.getAlert().getNotifications()) { + if (tempNotification.getName().equalsIgnoreCase(context.getNotification().getName())) { + notification = tempNotification; + break; + } + } + requireArgument(notification != null, "Notification in notification context cannot be null."); + for (Trigger tempTrigger : context.getAlert().getTriggers()) { + if (tempTrigger.getName().equalsIgnoreCase(context.getTrigger().getName())) { + trigger = tempTrigger; + break; + } + } + requireArgument(trigger != null, "Trigger in notification context cannot be null."); + + Set to = new HashSet(notification.getSubscriptions()); + String feed = generateGusFeed(notification, trigger, context, status); + + return postToGus(context.getHistory(), + to, + feed, + _config, + context); + } + + private String generateGusFeed(Notification notification, Trigger trigger, NotificationContext context, NotificationStatus status) { + StringBuilder sb = new StringBuilder(); + Alert currentAlert = notification.getAlert(); + String expression = AlertUtils.getExpressionWithAbsoluteStartAndEndTimeStamps(context); + String notificationMessage = status == NotificationStatus.TRIGGERED ? "Triggered" : "Cleared"; + sb.append(MessageFormat.format("Alert {0} was {1} at {2}\n", TemplateReplacer.applyTemplateChanges(context, context.getAlert().getName()), notificationMessage, + DATE_FORMATTER.get().format(new Date(context.getTriggerFiredTime())))); + String customText = context.getNotification().getCustomText(); + if( customText != null && customText.length()>0 && status == NotificationStatus.TRIGGERED){ + sb.append(TemplateReplacer.applyTemplateChanges(context, customText)).append("\n"); + } + + context.getAlertEvaluationTrackingID().ifPresent(trackingID -> { + sb.append("Tracking ID: " + trackingID + "\n"); + }); + + if(currentAlert.getNotifications().size() > 1) + sb.append(MessageFormat.format("Notification: {0}\n", TemplateReplacer.applyTemplateChanges(context, notification.getName()))); + if(currentAlert.getTriggers().size() > 1) + sb.append(MessageFormat.format("Triggered by: {0}\n", TemplateReplacer.applyTemplateChanges(context, trigger.getName()))); + if(status == NotificationStatus.TRIGGERED) { + sb.append(MessageFormat.format("Notification is on cooldown until: {0}\n", + DATE_FORMATTER.get().format(new Date(context.getCoolDownExpiration())))); + } + + if (context.getEvaluatedMetricSnapshotURL().isPresent() && !context.getEvaluatedMetricSnapshotURL().get().equals("")) { + sb.append(MessageFormat.format("Snapshot of the evaluated metric data: {0}\n", context.getEvaluatedMetricSnapshotURL().get())); + } else { + if(!expression.equals("")) { + sb.append(MessageFormat.format("URL for evaluated metric expression: {0}\n", getExpressionUrl(expression))); + } + } + + sb.append(MessageFormat.format("Current view of the metric expression: {0}\n", + getExpressionUrl(context.getAlert().getExpression()))); + + if(context.getTriggeredMetric()!=null) { + if(status == NotificationStatus.TRIGGERED){ + sb.append(MessageFormat.format("Triggered on Metric: {0}", context.getTriggeredMetric().getIdentifier())); + }else { + sb.append(MessageFormat.format("Cleared on Metric: {0}", context.getTriggeredMetric().getIdentifier())); + } + } + + sb.append(MessageFormat.format("Trigger details: {0}\n", getTriggerDetails(trigger, context))); + if(!trigger.getType().equals(TriggerType.NO_DATA) && status == NotificationStatus.TRIGGERED){ + sb.append(MessageFormat.format("Triggering event value: {0}\n", context.getTriggerEventValue())); + } + sb.append("\n"); + for (String metricToAnnotate : notification.getMetricsToAnnotate()) { + sb.append(MessageFormat.format("Annotated series for {0}: {1}\n", metricToAnnotate, + getMetricUrl(metricToAnnotate, context.getTriggerFiredTime()))); + } + sb.append("\n"); + sb.append(MessageFormat.format("Alert definition: {0}\n", getAlertUrl(notification.getAlert().getId()))); + return sb.toString(); + } + + public boolean postToGus(History history, + Set to, + String feed, + SystemConfiguration config, + NotificationContext context) { + boolean result = false; + String failureMsg = null; + int retries = 0; + + if (Boolean.valueOf(config.getValue(com.salesforce.dva.argus.system.SystemConfiguration.Property.GUS_ENABLED))) { + String postEndpoint = config.getValue(Property.POST_ENDPOINT.getName(), Property.POST_ENDPOINT.getDefaultValue()); + // So far works for only one group, will accept a set of string in future. + String groupId = to.toArray(new String[to.size()])[0]; + + CloseableHttpClient httpClient = getGusTransportInstance().getHttpClient(); + boolean refresh = false; // get cached EndpointInfo by default + for (int i = 0; i < MAX_ATTEMPTS_GUS_POST; i++) { + retries = i; + CloseableHttpResponse response = null; + try { + EndpointInfo endpointInfo = getGusTransportInstance().getEndpointInfo(refresh); + String gusMessage = MessageFormat.format("{0}&subjectId={1}&text={2}", + postEndpoint, + groupId, + URLEncoder.encode(feed.toString(), "UTF-8")); + RequestBuilder rb = RequestBuilder.post() + .setHeader("Authorization", "Bearer " + endpointInfo.getToken()) + .setEntity(new StringEntity(gusMessage, ContentType.create("application/x-www-form-urlencoded"))) + .setUri(postEndpoint); + + response = httpClient.execute(rb.build()); + int respCode = response.getStatusLine().getStatusCode(); + + _logger.info("Gus message response code '{}'", respCode); + if (respCode == 201 || respCode == 204) { + String infoMsg = MessageFormat.format("Success - send to GUS group {0}", groupId); + _logger.info(infoMsg); + history.appendMessageNUpdateHistory(infoMsg, null, 0); + result = true; + break; + } else { + final String gusPostResponseBody = EntityUtils.toString(response.getEntity()); + failureMsg = MessageFormat.format("Failure - send to GUS group {0}. Cause {1}", groupId, gusPostResponseBody); + _logger.error(failureMsg); + history.appendMessageNUpdateHistory(failureMsg, null, 0); + + List> jsonResponseBody = new Gson().fromJson(gusPostResponseBody, List.class); + if (jsonResponseBody != null && jsonResponseBody.size() > 0) { + Map responseBodyMap = jsonResponseBody.get(0); + if (responseBodyMap != null && + ("INVALID_HEADER_TYPE".equals(responseBodyMap.get("message")) || + "INVALID_AUTH_HEADER".equals(responseBodyMap.get("errorCode")))) { + _logger.warn("Failed with invalid auth header, attempting to refresh token if possible"); + refresh = true; + continue; + } + } + refresh = false; + } + } catch (GetAuthenticationTokenFailureException e) { + failureMsg = MessageFormat.format("Caught GetAuthenticationTokenFailureException {0} when posting to gus group {1}, attempting to refresh token if possible", + e, groupId); + _logger.error(failureMsg); + history.appendMessageNUpdateHistory(failureMsg, null, 0); + refresh = true; // try forced refresh of token + } catch (Exception e) { + failureMsg = MessageFormat.format("Throws Exception {0} when posting to gus group {1}", e, groupId); + _logger.error(failureMsg); + history.appendMessageNUpdateHistory(failureMsg, null, 0); + refresh = false; + } finally { + try { + if (response != null) { + response.close(); + } + } catch (IOException e) { + _logger.error("Exception while attempting to close post to GUS response", e); + } + } + } + monitorService.modifyCounter(MonitorService.Counter.GUS_NOTIFICATIONS_RETRIES, retries, null); + monitorService.modifyCounter(MonitorService.Counter.GUS_NOTIFICATIONS_FAILED, result ? 0 : 1, null); + } else { + failureMsg = MessageFormat.format("Sending GUS notification is disabled. Not sending message to groups {0}.", to); + _logger.warn(failureMsg); + history.appendMessageNUpdateHistory(failureMsg, null, 0); + } + + context.setNotificationRetries(retries); + return result; } - - private void sendGusNotification(NotificationContext context, NotificationStatus status) { - Notification notification = null; - Trigger trigger = null; - - for (Notification tempNotification : context.getAlert().getNotifications()) { - if (tempNotification.getName().equalsIgnoreCase(context.getNotification().getName())) { - notification = tempNotification; - break; - } - } - requireArgument(notification != null, "Notification in notification context cannot be null."); - for (Trigger tempTrigger : context.getAlert().getTriggers()) { - if (tempTrigger.getName().equalsIgnoreCase(context.getTrigger().getName())) { - trigger = tempTrigger; - break; - } - } - requireArgument(trigger != null, "Trigger in notification context cannot be null."); - - Set to = new HashSet(notification.getSubscriptions()); - String feed = generateGusFeed(notification, trigger, context, status); - - postToGus(to, feed); + + @Override + public Properties getNotifierProperties() { + Properties result = super.getNotifierProperties(); + + for( Property property : Property.values()) { + result.put(property.getName(), property.getDefaultValue()); + } + return result; } - private String generateGusFeed(Notification notification, Trigger trigger, NotificationContext context, NotificationStatus status) { - StringBuilder sb = new StringBuilder(); - Alert currentAlert = notification.getAlert(); - String expression = AlertUtils.getExpressionWithAbsoluteStartAndEndTimeStamps(context); - String notificationMessage = status == NotificationStatus.TRIGGERED ? "Triggered" : "Cleared"; - sb.append(MessageFormat.format("Alert {0} was {1} at {2}\n", TemplateReplacer.applyTemplateChanges(context, context.getAlert().getName()), notificationMessage, - DATE_FORMATTER.get().format(new Date(context.getTriggerFiredTime())))); - String customText = context.getNotification().getCustomText(); - if( customText != null && customText.length()>0 && status == NotificationStatus.TRIGGERED){ - sb.append(TemplateReplacer.applyTemplateChanges(context, customText)).append("\n"); - } - if(currentAlert.getNotifications().size() > 1) - sb.append(MessageFormat.format("Notification: {0}\n", TemplateReplacer.applyTemplateChanges(context, notification.getName()))); - if(currentAlert.getTriggers().size() > 1) - sb.append(MessageFormat.format("Triggered by: {0}\n", TemplateReplacer.applyTemplateChanges(context, trigger.getName()))); - if(status == NotificationStatus.TRIGGERED) { - sb.append(MessageFormat.format("Notification is on cooldown until: {0}\n", - DATE_FORMATTER.get().format(new Date(context.getCoolDownExpiration())))); - } - if(!expression.equals("")) { - sb.append(MessageFormat.format("URL for evaluated metric expression: {0}\n", getExpressionUrl(expression))); - } else { - sb.append(MessageFormat.format("Evaluated metric expression: {0}\n", context.getAlert().getExpression())); - } - - if(context.getTriggeredMetric()!=null) { - if(status == NotificationStatus.TRIGGERED){ - sb.append(MessageFormat.format("Triggered on Metric: {0}
", context.getTriggeredMetric().getIdentifier())); - }else { - sb.append(MessageFormat.format("Cleared on Metric: {0}
", context.getTriggeredMetric().getIdentifier())); - } - } - - sb.append(MessageFormat.format("Trigger details: {0}\n", getTriggerDetails(trigger, context))); - if(!trigger.getType().equals(TriggerType.NO_DATA) && status == NotificationStatus.TRIGGERED){ - sb.append(MessageFormat.format("Triggering event value: {0}\n", context.getTriggerEventValue())); - } - sb.append("\n"); - for (String metricToAnnotate : notification.getMetricsToAnnotate()) { - sb.append(MessageFormat.format("Annotated series for {0}: {1}\n", metricToAnnotate, - getMetricUrl(metricToAnnotate, context.getTriggerFiredTime()))); - } - sb.append("\n"); - sb.append(MessageFormat.format("Alert definition: {0}\n", getAlertUrl(notification.getAlert().getId()))); - return sb.toString(); - } - - private void postToGus(Set to, String feed) { - - if (Boolean.valueOf(_config.getValue(com.salesforce.dva.argus.system.SystemConfiguration.Property.GUS_ENABLED))) { - // So far works for only one group, will accept a set of string in future. - String groupId = to.toArray(new String[to.size()])[0]; - PostMethod gusPost = new PostMethod(_config.getValue(Property.POST_ENDPOINT.getName(), Property.POST_ENDPOINT.getDefaultValue())); - - try { - gusPost.setRequestHeader("Authorization", "Bearer " + generateAccessToken()); - String gusMessage = MessageFormat.format("{0}&subjectId={1}&text={2}", - _config.getValue(Property.POST_ENDPOINT.getName(), Property.POST_ENDPOINT.getDefaultValue()), groupId, - URLEncoder.encode(feed.toString(), "UTF-8")); - - gusPost.setRequestEntity(new StringRequestEntity(gusMessage, "application/x-www-form-urlencoded", null)); - HttpClient httpclient = getHttpClient(_config); - int respCode = httpclient.executeMethod(gusPost); - _logger.info("Gus message response code '{}'", respCode); - if (respCode == 201 || respCode == 204) { - _logger.info("Success - send to GUS group {}", groupId); - } else { - _logger.error("Failure - send to GUS group {}. Cause {}", groupId, gusPost.getResponseBodyAsString()); - } - } catch (Exception e) { - _logger.error("Throws Exception {} when posting to gus group {}", e, groupId); - } finally { - gusPost.releaseConnection(); - } - } else { - _logger.info("Sending GUS notification is disabled. Not sending message to groups '{}'.", to); - } - } - - private String generateAccessToken() { - // Set up an HTTP client that makes a connection to REST API. - HttpClient httpclient = getHttpClient(_config); - - // Send a post request to the OAuth URL. - PostMethod oauthPost = new PostMethod(_config.getValue(Property.GUS_ENDPOINT.getName(), Property.GUS_ENDPOINT.getDefaultValue())); - - try { - oauthPost.addParameter("grant_type", "password"); - oauthPost.addParameter("client_id", - URLEncoder.encode(_config.getValue(Property.GUS_CLIENT_ID.getName(), Property.GUS_CLIENT_ID.getDefaultValue()), UTF_8)); - oauthPost.addParameter("client_secret", - URLEncoder.encode(_config.getValue(Property.GUS_CLIENT_SECRET.getName(), Property.GUS_CLIENT_SECRET.getDefaultValue()), UTF_8)); - oauthPost.addParameter("username", _config.getValue(Property.ARGUS_GUS_USER.getName(), Property.ARGUS_GUS_USER.getDefaultValue())); - oauthPost.addParameter("password", _config.getValue(Property.ARGUS_GUS_PWD.getName(), Property.ARGUS_GUS_PWD.getDefaultValue())); - - int respCode = httpclient.executeMethod(oauthPost); - - _logger.info("Response code '{}'", respCode); - - // Check for success - if (respCode == 200) { - JsonObject authResponse = new Gson().fromJson(oauthPost.getResponseBodyAsString(), JsonObject.class); - String endpoint = authResponse.get("instance_url").getAsString(); - String token = authResponse.get("access_token").getAsString(); - - _logger.info("Success - getting access_token for endpoint '{}'", endpoint); - _logger.info("access_token '{}'", token); - return token; - } - else { - _logger.error("Failure - getting oauth2 token, check username/password: '{}'", oauthPost.getResponseBodyAsString()); - } - } catch (RuntimeException | IOException e) { - _logger.error("Failure - exception getting gus access_token {}", e); - } finally { - oauthPost.releaseConnection(); - } - return Strings.EMPTY; - } - - /** - * Get HttpClient with proper proxy and timeout settings. - * - * @param config The system configuration. Cannot be null. - * - * @return HttpClient - */ - public HttpClient getHttpClient(SystemConfiguration config) { - HttpClient httpclient = new HttpClient(theConnectionManager); - - // Wait for 2 seconds to get a connection from pool - httpclient.getParams().setParameter("http.connection-manager.timeout", 2000L); - - String host = config.getValue(Property.GUS_PROXY_HOST.getName(), Property.GUS_PROXY_HOST.getDefaultValue()); - - if (host != null && host.length() > 0) { - httpclient.getHostConfiguration().setProxy(host, - Integer.parseInt(config.getValue(Property.GUS_PROXY_PORT.getName(), Property.GUS_PROXY_PORT.getDefaultValue()))); - } - return httpclient; - } - - @Override - public Properties getNotifierProperties() { - Properties result = super.getNotifierProperties(); - - for( Property property : Property.values()) { - result.put(property.getName(), property.getDefaultValue()); - } - return result; - } - - public enum Property { - /** The GUS user name. */ - ARGUS_GUS_USER("notifier.property.alert.gus_user", "test@test.com"), - /** The GUS password. */ - ARGUS_GUS_PWD("notifier.property.alert.gus_pwd", "password"), - /** The GUS endpoint. */ - GUS_ENDPOINT("notifier.property.alert.gus_endpoint", "https://gus.test.com"), - /** The GUS client ID. */ - GUS_CLIENT_ID("notifier.property.alert.gus_client_id", "test123"), - /** The GUS client secret. */ - GUS_CLIENT_SECRET("notifier.property.alert.gus_client_secret", "password"), - /** The GUS post endpoint. */ - POST_ENDPOINT("notifier.property.alert.gus_post_endpoint", "https://gus.test.com"), - /** The GUS proxy host. */ - GUS_PROXY_HOST("notifier.property.proxy.host", ""), - /** The GUS port. */ - GUS_PROXY_PORT("notifier.property.proxy.port", ""); - - private final String _name; - private final String _defaultValue; - - private Property(String name, String defaultValue) { - _name = name; - _defaultValue = defaultValue; - } - - /** - * Returns the property name. - * - * @return The property name. - */ - public String getName() { - return _name; - } - - /** - * Returns the default value. - * - * @return The default value. - */ - public String getDefaultValue() { - return _defaultValue; - } - } + protected GusTransport getGusTransportInstance() { + if (gusTransport == null) { + synchronized (this) { + if (gusTransport == null) { + gusTransport = new GusTransport(_config.getValue(Property.GUS_PROXY_HOST.getName(), null), // no default since this is optional + _config.getValue(Property.GUS_PROXY_PORT.getName(), null), // no default since this is optional + _config.getValue(Property.GUS_PROXY_USERNAME.getName(), null), // no default since this is optional + _config.getValue(Property.GUS_PROXY_PASSWORD.getName(), null), // no default since this is optional + _config.getValue(Property.GUS_ENDPOINT.getName(), Property.GUS_ENDPOINT.getDefaultValue()), + _config.getValue(Property.GUS_CLIENT_ID.getName(), Property.GUS_CLIENT_ID.getDefaultValue()), + _config.getValue(Property.GUS_CLIENT_SECRET.getName(), Property.GUS_CLIENT_SECRET.getDefaultValue()), + _config.getValue(Property.ARGUS_GUS_USER.getName(), Property.ARGUS_GUS_USER.getDefaultValue()), + _config.getValue(Property.ARGUS_GUS_PWD.getName(), Property.ARGUS_GUS_PWD.getDefaultValue()), + new GusTransport.EndpointInfo(_config.getValue(Property.GUS_ENDPOINT.getName(), Property.GUS_ENDPOINT.getDefaultValue()), GusTransport.NO_TOKEN), + Integer.parseInt(_config.getValue(Property.GUS_CONNECTION_POOL_MAX_SIZE.getName(), Property.GUS_CONNECTION_POOL_MAX_SIZE.getDefaultValue())), + Integer.parseInt(_config.getValue(Property.GUS_CONNECTION_POOL_MAX_PER_ROUTE.getName(), Property.GUS_CONNECTION_POOL_MAX_PER_ROUTE.getDefaultValue()))); + } + } + } + return gusTransport; + } + + public enum Property { + /** The GUS user name. */ + ARGUS_GUS_USER("notifier.property.alert.gus_user", "test@test.com"), + /** The GUS password. */ + ARGUS_GUS_PWD("notifier.property.alert.gus_pwd", "password"), + /** The GUS endpoint. */ + GUS_ENDPOINT("notifier.property.alert.gus_endpoint", "https://gus.test.com"), + /** The GUS client ID. */ + GUS_CLIENT_ID("notifier.property.alert.gus_client_id", "test123"), + /** The GUS client secret. */ + GUS_CLIENT_SECRET("notifier.property.alert.gus_client_secret", "password"), + /** The GUS post endpoint. */ + POST_ENDPOINT("notifier.property.alert.gus_post_endpoint", "https://gus.test.com"), + /** The GUS proxy host. */ + GUS_PROXY_HOST("notifier.property.proxy.host", ""), + /** The GUS port. */ + GUS_PROXY_PORT("notifier.property.proxy.port", ""), + /** The GUS proxy username. */ + GUS_PROXY_USERNAME("notifier.property.proxy.username", ""), + /** The GUS proxy password. */ + GUS_PROXY_PASSWORD("notifier.property.proxy.password", ""), + /** The connection pool size for connecting to Gus */ + GUS_CONNECTION_POOL_MAX_SIZE("notifier.property.gus.connectionpool.maxsize", "55"), + /** The connection pool max per route for connecting to Gus */ + GUS_CONNECTION_POOL_MAX_PER_ROUTE("notifier.property.gus.connectionpool.maxperroute", "20"); + + private final String _name; + private final String _defaultValue; + + private Property(String name, String defaultValue) { + _name = name; + _defaultValue = defaultValue; + } + + /** + * Returns the property name. + * + * @return The property name. + */ + public String getName() { + return _name; + } + + /** + * Returns the default value. + * + * @return The default value. + */ + public String getDefaultValue() { + return _defaultValue; + } + } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ \ No newline at end of file diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/GusTransport.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/GusTransport.java new file mode 100644 index 000000000..4d2dbdd28 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/GusTransport.java @@ -0,0 +1,293 @@ +package com.salesforce.dva.argus.service.alert.notifier; + +import com.google.common.base.Supplier; +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; +import com.google.common.cache.LoadingCache; +import com.google.common.util.concurrent.ExecutionError; +import com.google.common.util.concurrent.UncheckedExecutionException; +import com.google.gson.Gson; +import com.google.gson.JsonObject; +import com.salesforce.dva.argus.service.ArgusTransport; +import org.apache.commons.lang.StringUtils; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.RequestBuilder; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.util.EntityUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.net.URLEncoder; +import java.util.Objects; +import java.util.Optional; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; + +import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; + +/** + * Manage Gus connections, oAuth and timeouts. + * + */ +public class GusTransport { + //~ Static fields/initializers ******************************************************************************************************************* + public static final String NO_TOKEN = "NO_TOKEN"; + private static final Logger LOGGER = LoggerFactory.getLogger(GusTransport.class); + private static final String HTTP_CONNECTION_MANAGER_TIMEOUT_PARAMETER = "http.connection-manager.timeout"; + private static final long HTTP_CONNECTION_MANAGER_TIMEOUT_MILLIS = 2000L; + private static final String UTF_8 = "UTF-8"; + private static final long MIN_SESSION_REFRESH_THRESHOLD_MILLIS = 5 * 60 * 1000; // Wait 5min b/w refresh attempts + private static final String DUMMY_CACHE_KEY = "endpoint"; // dummy key since we are only caching 1 value + + /* + * Using a single entry cache to hold the current EndpointInfo and manage refreshes. Since the CacheLoader ignores + * the key, there will only ever be 1 value. Therefore, a dummy key is used. If a dummy key is not used, every new + * key will require a get() call from the Supplier instead of accessing the cached value under a diff key if it + * already exists. + * + * LoadingCache was chosen over Supplier with expiry since refresh is supported in LoadingCache. + * If LoadingCache.refresh() fails, the old value will continue to get used. + */ + private final LoadingCache endpointInfoCache; + private final ArgusTransport transport; + + public GusTransport(Optional proxyHost, Optional proxyPort, Optional proxyUsername, + Optional proxyPassword, String authEndpoint, String authClientId, String authClientSecret, + String authUsername, String authPassword, EndpointInfo defaultEndpointInfo, + long tokenCacheRefreshPeriodMillis, int connectionPoolMaxSize, int connectionPoolMaxPerRoute) { + requireArgument(StringUtils.isNotBlank(authEndpoint), + String.format("authEndpoint(%s) must not be blank", authEndpoint)); + requireArgument(StringUtils.isNotBlank(authClientId), + String.format("authClientId(%s) must not be blank", authClientId)); + requireArgument(StringUtils.isNotBlank(authClientSecret), + String.format("authClientSecret(%s) must not be blank", authClientSecret)); + requireArgument(StringUtils.isNotBlank(authUsername), + String.format("authUsername(%s) must not be blank", authUsername)); + requireArgument(StringUtils.isNotBlank(authPassword), + String.format("authPassword(%s) must not be blank", authPassword)); + requireArgument(defaultEndpointInfo != null, "defaultEndpointInfo must not be null"); + requireArgument(StringUtils.isNotBlank(defaultEndpointInfo.getEndPoint()), + String.format("defaultEndpointInfo.endpoint(%s) must not be blank", defaultEndpointInfo.getEndPoint())); + requireArgument(StringUtils.isNotBlank(defaultEndpointInfo.getToken()), + String.format("defaultEndpointInfo.token(%s) must not be blank", defaultEndpointInfo.getToken())); + requireArgument(tokenCacheRefreshPeriodMillis > 0, + String.format("cacheRefreshPeriodMillis(%d) must be > 0", tokenCacheRefreshPeriodMillis)); + + this.transport = new ArgusTransport(proxyHost, proxyPort, proxyUsername, proxyPassword, connectionPoolMaxSize, connectionPoolMaxPerRoute); + + EndpointInfoSupplier supplier = new EndpointInfoSupplier(authEndpoint, authClientId, + authClientSecret, authUsername, authPassword); + this.endpointInfoCache = CacheBuilder.newBuilder() + .refreshAfterWrite(tokenCacheRefreshPeriodMillis, TimeUnit.MILLISECONDS) + .initialCapacity(1) + .build(CacheLoader.from(supplier)); + // init cache + try { + this.endpointInfoCache.get(DUMMY_CACHE_KEY); + } catch (ExecutionException | UncheckedExecutionException | ExecutionError e) { + LOGGER.error("Failed to get auth token. Exception: {}", e.getMessage()); + // put default if there is a failure on init + this.endpointInfoCache.put(DUMMY_CACHE_KEY, defaultEndpointInfo); + } + } + + public GusTransport(Optional proxyHost, Optional proxyPort, Optional proxyUsername, Optional proxyPassword, String authEndpoint, + String authClientId, String authClientSecret, String authUsername, String authPassword, + EndpointInfo defaultEndpointInfo, int connectionPoolMaxSize, int connectionPoolMaxPerRoute) { + this(proxyHost, proxyPort, proxyUsername, proxyPassword, authEndpoint, authClientId, authClientSecret, authUsername, authPassword, + defaultEndpointInfo, MIN_SESSION_REFRESH_THRESHOLD_MILLIS, connectionPoolMaxSize, connectionPoolMaxPerRoute); + } + + public GusTransport(String proxyHost, String proxyPort, String proxyUsername, String proxyPassword, String authEndpoint, + String authClientId, String authClientSecret, String authUsername, String authPassword, + EndpointInfo defaultEndpointInfo, int connectionPoolMaxSize, int connectionPoolMaxPerRoute) { + this(ArgusTransport.validateProxyHostAndPortStrings(proxyHost, proxyPort) ? Optional.of(proxyHost) : Optional.empty(), + ArgusTransport.validateProxyHostAndPortStrings(proxyHost, proxyPort) ? Optional.of(Integer.parseInt(proxyPort)) : Optional.empty(), + ArgusTransport.validateProxyUsernameAndPassword(proxyUsername, proxyPassword) ? Optional.of(proxyUsername) : Optional.empty(), + ArgusTransport.validateProxyUsernameAndPassword(proxyUsername, proxyPassword) ? Optional.of(proxyPassword) : Optional.empty(), + authEndpoint, authClientId, authClientSecret, authUsername, authPassword, defaultEndpointInfo, + connectionPoolMaxSize, connectionPoolMaxPerRoute); + } + + //~ Methods ************************************************************************************************************************************** + + /** + * Get HttpClient. + * + * @return HttpClient + */ + public CloseableHttpClient getHttpClient() { + return transport.getHttpClient(); + } + + public EndpointInfo getEndpointInfo() throws GetAuthenticationTokenFailureException { + try { + return endpointInfoCache.get(DUMMY_CACHE_KEY); + } catch (ExecutionException | UncheckedExecutionException | ExecutionError e) { + Throwable cause = e.getCause(); + if (cause instanceof GetAuthenticationTokenFailureRuntimeException) { + throw new GetAuthenticationTokenFailureException(cause.getMessage(), cause.getCause()); + } else { + throw new GetAuthenticationTokenFailureException("Getting auth token failed", e); + } + } + } + + /** + * Get authenticated endpoint and token. + * + * @param refresh - If true get a new token even if one exists. + * + * @return EndpointInfo - with valid endpoint and token. The token can be a dummy or expired. + */ + public EndpointInfo getEndpointInfo(boolean refresh) throws GetAuthenticationTokenFailureException { + if (refresh) { + endpointInfoCache.refresh(DUMMY_CACHE_KEY); + } + return getEndpointInfo(); + } + + /** + * Supplier for loading EndpointInfo (endpoint and token). + */ + public class EndpointInfoSupplier implements Supplier { + private final String authEndpoint; + private final String authClientId; + private final String authClientSecret; + private final String authUsername; + private final String authPassword; + + public EndpointInfoSupplier(String authEndpoint, String authClientId, String authClientSecret, String authUsername, String authPassword) { + this.authEndpoint = authEndpoint; + this.authClientId = authClientId; + this.authClientSecret = authClientSecret; + this.authUsername = authUsername; + this.authPassword = authPassword; + } + + @Override + public EndpointInfo get() { + CloseableHttpResponse response = null; + try { + RequestBuilder rb = RequestBuilder.post() + .setUri(authEndpoint) + .addParameter("grant_type", "password") + .addParameter("client_id", URLEncoder.encode(authClientId, UTF_8)) + .addParameter("client_secret", URLEncoder.encode(authClientSecret, UTF_8)) + .addParameter("username", authUsername) + .addParameter("password", authPassword); + + response = transport.getHttpClient().execute(rb.build()); + int respCode = response.getStatusLine().getStatusCode(); + String responseBodyAsString = EntityUtils.toString(response.getEntity()); + + // Check for success + if (respCode == 200) { + JsonObject authResponse = new Gson().fromJson(responseBodyAsString, JsonObject.class); + String endpoint = authResponse.get("instance_url").getAsString(); + String token = authResponse.get("access_token").getAsString(); + + LOGGER.info("Success - getting access_token for endpoint '{}'", endpoint); + LOGGER.debug("access_token '{}'", token); + return new EndpointInfo(endpoint, token); + } else { + String errorMessage = String.format("Failure - getting oauth2 token (responseCode=%d), check username/password: '%s'", + respCode, + responseBodyAsString); + LOGGER.error(errorMessage); + throw new GetAuthenticationTokenFailureRuntimeException(errorMessage); + } + } catch (GetAuthenticationTokenFailureRuntimeException e) { + throw e; + } catch (Exception e) { + LOGGER.error("Failure - exception getting access_token '{}'", e); + throw new GetAuthenticationTokenFailureRuntimeException("Failure - exception getting access_token", e); + } finally { + if (response != null) { + try { + response.close(); + } catch (IOException e) { + LOGGER.error("Exception while attempting to close response", e); + } + } + } + } + } + + /** + * Utility class for endpoint information. + * + * @author fiaz.hossain + */ + public static class EndpointInfo { + + private final String endPoint; + private final String token; + + protected EndpointInfo(final String endPoint, final String token) { + this.endPoint = endPoint; + this.token = token; + } + + /** + * Valid endpoint. Either from config or endpont after authentication + * + * @return endpoint + */ + public String getEndPoint() { + return endPoint; + } + + /** + * Token can be either active, expired or a dummy value. + * + * @return token + */ + public String getToken() { + return token; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + EndpointInfo that = (EndpointInfo) o; + return Objects.equals(endPoint, that.endPoint) && + Objects.equals(token, that.token); + } + + @Override + public int hashCode() { + return Objects.hash(endPoint, token); + } + } + + /** + * Exception (checked) for failures when attempting to get a new auth token. + */ + public static class GetAuthenticationTokenFailureException extends Exception { + public GetAuthenticationTokenFailureException(String message) { + super(message); + } + public GetAuthenticationTokenFailureException(String message, Throwable cause) { super(message, cause); } + public GetAuthenticationTokenFailureException(Throwable cause) { + super(cause); + } + } + + /** + * RuntimeException for failures when attempting to get a new auth token (meant to be used internally). + */ + private static class GetAuthenticationTokenFailureRuntimeException extends RuntimeException { + public GetAuthenticationTokenFailureRuntimeException(String message) { + super(message); + } + public GetAuthenticationTokenFailureRuntimeException(String message, Throwable cause) { + super(message, cause); + } + public GetAuthenticationTokenFailureRuntimeException(Throwable cause) { + super(cause); + } + } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/NoOpNotifier.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/NoOpNotifier.java new file mode 100644 index 000000000..c449e1045 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/NoOpNotifier.java @@ -0,0 +1,79 @@ +/* + * Copyright (c) 2019, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus.service.alert.notifier; + +import com.google.inject.Inject; +import com.salesforce.dva.argus.service.AlertService.Notifier; +import com.salesforce.dva.argus.service.alert.DefaultAlertService.NotificationContext; +import com.salesforce.dva.argus.system.SystemConfiguration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import java.util.Properties; + +import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; + +/** + * Implementation of NoOpNotifier. Used by Alert Testing. + * + * @author Ian Keck (ikeck@salesforce.com) + */ +public class NoOpNotifier implements Notifier { + + private Logger _logger = LoggerFactory.getLogger(NoOpNotifier.class); + private SystemConfiguration config; + /** + * Creates a new NoOp Notifier. + * + * @param config The system configuration. Cannot be null. + */ + @Inject + public NoOpNotifier(SystemConfiguration config) { + this.config = config; + requireArgument(config != null, "The configuration cannot be null."); + } + + @Override + public boolean sendNotification(NotificationContext context) { return true; } + + @Override + public boolean clearNotification(NotificationContext context) { return true; } + + @Override + public String getName() { + return NoOpNotifier.class.getName(); + } + + @Override + public Properties getNotifierProperties(){ + return new Properties(); + } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/MutableGauge.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/NotificationForwarder.java similarity index 62% rename from ArgusCore/src/main/java/com/salesforce/dva/argus/entity/MutableGauge.java rename to ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/NotificationForwarder.java index b0e6ae4b5..59a9096bc 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/MutableGauge.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/NotificationForwarder.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2018, Salesforce.com, Inc. + * Copyright (c) 2016, Salesforce.com, Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without @@ -29,49 +29,20 @@ * POSSIBILITY OF SUCH DAMAGE. */ -package com.salesforce.dva.argus.entity; +package com.salesforce.dva.argus.service.alert.notifier; /** - * This the implementation the JMX-exported internal metric object, a standard - * MXBean. It is called MutableGauge because it need to read internal data - * and set its value. The JMX-facing interface is read-only to prevent others - * manipulate the value after the fact - * - * @author taozhang * + * @author Ian Keck (ikeck@salesforce.com) */ -public class MutableGauge implements GaugeMXBean { +public interface NotificationForwarder { - /* - * object name of the gauge, also the key to the object - */ - private String _objName; - - /** - * value of the gauge - */ - private Double _val; - - public MutableGauge(String objName) { - _objName = objName; - _val = 0.0; - } + public int forwardNotifications() throws InterruptedException; + public int getNotificationsDelivered(); + public int getNotificationsEnqueued(); + public int getNotificationsDiscarded(); + public int getMaxQueueLength(); + public int getNotificationsProcessed(); - @Override - public String getObjectName() { - return _objName; - } - - @Override - public Double getValue() { - return _val; - } - - /** - * set value to gauge - * @param val the value for the gauge - */ - public void setValue(Double val) { - _val = val; - } } +/* Copyright (c) 2019, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/PagerDutyNotifier.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/PagerDutyNotifier.java new file mode 100644 index 000000000..04c4fec89 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/PagerDutyNotifier.java @@ -0,0 +1,765 @@ +/* + * Copyright (c) 2019, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ +package com.salesforce.dva.argus.service.alert.notifier; + +import com.google.common.collect.ImmutableMap; +import com.google.gson.JsonArray; +import com.google.gson.JsonObject; +import com.google.inject.Inject; +import com.google.inject.Provider; +import com.google.inject.Singleton; +import com.salesforce.dva.argus.entity.History; +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.entity.Notification; +import com.salesforce.dva.argus.entity.Trigger; +import com.salesforce.dva.argus.service.AnnotationService; +import com.salesforce.dva.argus.service.ArgusTransport; +import com.salesforce.dva.argus.service.AuditService; +import com.salesforce.dva.argus.service.MetricService; +import com.salesforce.dva.argus.service.MonitorService; +import com.salesforce.dva.argus.service.alert.DefaultAlertService.NotificationContext; +import com.salesforce.dva.argus.system.SystemConfiguration; +import com.salesforce.dva.argus.util.AlertUtils; +import com.salesforce.dva.argus.util.TemplateReplacer; +import org.apache.commons.lang3.StringUtils; +import org.apache.http.HttpStatus; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.RequestBuilder; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.util.EntityUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.persistence.EntityManager; +import java.io.IOException; +import java.io.InterruptedIOException; +import java.text.DateFormat; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.text.MessageFormat; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Properties; +import java.util.TimeZone; + +import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; + + +/** + * PagerDuty notifier + * + * @author Phil Liew (pliew@salesforce.com) + */ +@Singleton +public class PagerDutyNotifier extends AuditNotifier { + + private static final Logger LOGGER = LoggerFactory.getLogger(PagerDutyNotifier.class); + private static final long DEFAULT_HTTP_RESPONSE_CODE_429_RETRY_DELAY_TIME = 1000; + + public static final ThreadLocal DATE_FORMAT = new ThreadLocal() { + + @Override + protected DateFormat initialValue() { + DateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm'Z'"); + sdf.setTimeZone(TimeZone.getTimeZone("UTC")); + return sdf; + } + }; + + private final MonitorService monitorService; + private final String endpoint; + private final String token; + private final ArgusTransport transport; + private final long httpResponseCode429RetryDelayTime; + + //~ Constructors ********************************************************************************************************************************* + + /** + * Creates a new GusNotifier object. + * + * @param metricService The metric service to use. Cannot be null. + * @param annotationService The annotation service to use. Cannot be null. + * @param auditService The audit service to use. Cannot be null. + * @param config The system configuration. Cannot be null. + * @param emf The entity manager factory to use. Cannot be null. + */ + @Inject + public PagerDutyNotifier(MetricService metricService, AnnotationService annotationService, AuditService auditService, SystemConfiguration config, Provider emf, + MonitorService monitorService) { + this(metricService, + annotationService, + auditService, + config, + emf, + monitorService, + config.getValue(Property.PAGERDUTY_ENDPOINT.getName(), Property.PAGERDUTY_ENDPOINT.getDefaultValue()), + config.getValue(Property.PAGERDUTY_TOKEN.getName(), Property.PAGERDUTY_TOKEN.getDefaultValue()), + DEFAULT_HTTP_RESPONSE_CODE_429_RETRY_DELAY_TIME); + } + + protected PagerDutyNotifier(MetricService metricService, AnnotationService annotationService, AuditService auditService, + SystemConfiguration config, Provider emf, + MonitorService monitorService, String endpoint, String token, long httpResponseCode429RetryDelayTime) { + super(metricService, annotationService, auditService, config, emf); + requireArgument(config != null, "The configuration cannot be null."); + requireArgument(monitorService != null, "The monitorService cannot be null."); + requireArgument(!StringUtils.isBlank(endpoint), "The endpoint cannot be blank."); + requireArgument(!StringUtils.isBlank(token), "The token cannot be blank."); + this.monitorService = monitorService; + this.endpoint = endpoint; + this.token = token; + + String proxyHostString = config.getValue(Property.PAGERDUTY_PROXY_HOST.getName(), Property.PAGERDUTY_PROXY_HOST.getDefaultValue()); + String proxyPortString = config.getValue(Property.PAGERDUTY_PROXY_PORT.getName(), Property.PAGERDUTY_PROXY_PORT.getDefaultValue()); + String proxyUsernameString = config.getValue(Property.PAGERDUTY_PROXY_USERNAME.getName(), Property.PAGERDUTY_PROXY_USERNAME.getDefaultValue()); + String proxyPasswordString = config.getValue(Property.PAGERDUTY_PROXY_PASSWORD.getName(), Property.PAGERDUTY_PROXY_PASSWORD.getDefaultValue()); + + boolean isValidProxy = ArgusTransport.validateProxyHostAndPortStrings(proxyHostString, proxyPortString); + boolean isValidProxyCredentials = ArgusTransport.validateProxyUsernameAndPassword(proxyUsernameString, proxyPasswordString); + + this.transport = new ArgusTransport(isValidProxy ? Optional.of(proxyHostString) : Optional.empty(), + isValidProxy ? Optional.of(Integer.parseInt(proxyPortString)) : Optional.empty(), + isValidProxyCredentials ? Optional.of(proxyUsernameString) : Optional.empty(), + isValidProxyCredentials ? Optional.of(proxyPasswordString) : Optional.empty(), + Integer.parseInt(config.getValue(Property.PAGERDUTY_CONNECTION_POOL_MAX_SIZE.getName(), Property.PAGERDUTY_CONNECTION_POOL_MAX_SIZE.getDefaultValue())), + Integer.parseInt(config.getValue(Property.PAGERDUTY_CONNECTION_POOL_MAX_PER_ROUTE.getName(), Property.PAGERDUTY_CONNECTION_POOL_MAX_PER_ROUTE.getDefaultValue()))); + this.httpResponseCode429RetryDelayTime = httpResponseCode429RetryDelayTime; + } + + //~ Methods ************************************************************************************************************************************** + + @Override + public String getName() { + return PagerDutyNotifier.class.getName(); + } + + @Override + protected boolean sendAdditionalNotification(NotificationContext context) { + requireArgument(context != null, "Notification context cannot be null."); + super.sendAdditionalNotification(context); + return sendPagerDutyNotification(context, NotificationStatus.TRIGGERED); + } + + @Override + protected boolean clearAdditionalNotification(NotificationContext context) { + requireArgument(context != null, "Notification context cannot be null."); + super.clearAdditionalNotification(context); + return sendPagerDutyNotification(context, NotificationStatus.CLEARED); + } + + protected boolean sendPagerDutyNotification(NotificationContext context, NotificationStatus status) { + Notification notification = context.getAlertNotification(); + requireArgument(notification != null, "Notification in notification context cannot be null."); + Trigger trigger = context.getAlertTrigger(); + requireArgument(trigger != null, "Trigger in notification context cannot be null."); + + List routingKeys = context.getNotification().getSubscriptions(); + requireArgument(routingKeys != null && !routingKeys.isEmpty(), "PagerDuty routing keys (subscriptions) cannot be empty."); + + if (routingKeys != null && routingKeys.size() > 1) { + // Only support one PagerDuty integration, there is no point for customers to get duplicate pager duty incidents for one notification + String warnMsg = MessageFormat.format("Only one PagerDuty integration key is supported, thus only {0} will be used", routingKeys.get(0)); + LOGGER.warn(warnMsg); + context.getHistory().appendMessageNUpdateHistory(warnMsg, null, 0); + } + String routingKey = routingKeys.get(0); + + String dedupKey = hashNotificationTriggerAndMetric(notification, trigger, context.getTriggeredMetric()); + PagerDutyMessage message = new PagerDutyMessage(routingKey, + NotificationStatus.TRIGGERED == status ? PagerDutyMessage.EventAction.TRIGGER : PagerDutyMessage.EventAction.RESOLVE, + dedupKey); + message.setClient("Argus Alert"); + message.setClientUrl(getAlertUrl(context.getAlert().getId())); + //message.addLink("Argus alert definition", getAlertUrl(context.getAlert().getId())); + message.setSummary( + TemplateReplacer.applyTemplateChanges(context, + "[Argus] Notification for Alert: " + context.getAlert().getName() + + " Notification: " + context.getNotification().getName() + + " Trigger: " + context.getTrigger().getName())); + String expression = AlertUtils.getExpressionWithAbsoluteStartAndEndTimeStamps(context); + message.setEvaluatedMetricExpression(expression); + + if (context.getEvaluatedMetricSnapshotURL().isPresent() && !context.getEvaluatedMetricSnapshotURL().get().equals("")) { + message.addLink("Snapshot of the evaluated metric: ", context.getEvaluatedMetricSnapshotURL().get()); + } + + message.addLink("Argus metric expression", getExpressionUrl(expression)); + for (String metricToAnnotate : notification.getMetricsToAnnotate()) { + message.addLink("Argus triggered metrics", getMetricUrl(metricToAnnotate, context.getTriggerFiredTime())); + } + message.setTriggerEvaluationTime(new Date(context.getTriggerFiredTime())); + message.setCooldown(new Date(context.getCoolDownExpiration())); + message.setTriggeredMetric(context.getTriggeredMetric().getIdentifier()); + message.setTriggerDetails(getTriggerDetails(context.getTrigger(), context)); + message.setTriggeringEventValue(Double.toString(context.getTriggerEventValue())); + context.getAlertEvaluationTrackingID().ifPresent(trackingID -> message.setTrackingID(trackingID)); + if (null != context.getNotification().getCustomText()) { + message.setCustomerText(TemplateReplacer.applyTemplateChanges(context, context.getNotification().getCustomText())); + } + if (null != context.getNotification().getMetricsToAnnotate() && !context.getNotification().getMetricsToAnnotate().isEmpty()) { + message.setMetricsToAnnotate(context.getNotification().getMetricsToAnnotate()); + } + message.setSource("Argus"); + message.setSeverity(PagerDutyMessage.Severity.ofLevel(notification.getSeverityLevel())); + message.setTimestamp(new Date()); + //message.setComponent(); + //message.setGroup(); + //message.setEventClass(); + + return sendMessage(context.getHistory(), message); + } + + protected String hashNotificationTriggerAndMetric(Notification n, Trigger t, Metric m) { + requireArgument(n != null && n.getId() != null, + "Notification cannot be null and notification id cannot be null."); + requireArgument(t != null, "Trigger cannot be null."); + requireArgument(m != null && m.getIdentifier() != null, + "Metric cannot be null and metric id cannot be null."); + + String notificationId = n.getId().toString(); + String triggerId = t.getId() != null ? t.getId().toString() : "0"; + int metricId = m.getIdentifier().hashCode(); + return String.format("%s$$%s$$%d", notificationId, triggerId, metricId); + } + + protected boolean sendMessage(History history, PagerDutyMessage message) { + boolean result = false; + String loggerMsg = null; + int retries = 0; + + String routingKey = message.getRoutingKey(); + PagerDutyMessage.EventAction eventAction = message.getEventAction(); + String dedupKey = message.getDedupKey(); + + if (Boolean.valueOf(_config.getValue(SystemConfiguration.Property.PAGERDUTY_ENABLED))) { + int maxPostAttempts = Integer.parseInt(_config.getValue(Property.PAGERDUTY_POST_MAX_ATTEMPTS.getName(), Property.PAGERDUTY_POST_MAX_ATTEMPTS.getDefaultValue())); + + CloseableHttpClient httpClient = transport.getHttpClient(); + CloseableHttpResponse response = null; + + String messageJson = generateJson(message); + + LOGGER.debug("PagerDuty request=" + messageJson); + RequestBuilder rb = RequestBuilder.post() + .setHeader("Authorization", "Token token=" + token) + .setEntity(new StringEntity(messageJson, ContentType.create("application/json"))) + .setUri(String.format("%s/v2/enqueue", endpoint)); + + for (int i = 0; i < maxPostAttempts; i++) { + try { + retries = i; + response = httpClient.execute(rb.build()); + LOGGER.debug("PagerDuty response=" + response); + int respCode = response.getStatusLine().getStatusCode(); + + // Check for success + if (respCode == HttpStatus.SC_ACCEPTED) { + loggerMsg = MessageFormat.format("Success - send PagerDuty Message for PD routingKey {0} to {1} incident (dedupKey: {2})", + routingKey, eventAction.getEventActionString(), dedupKey); + LOGGER.info(loggerMsg); + result = true; + break; + } else if (respCode == HttpStatus.SC_BAD_REQUEST) { + // Bad request, no need to retry + final String gusPostResponseBody = EntityUtils.toString(response.getEntity()); + loggerMsg = MessageFormat.format("Failure - send PagerDuty Message for PD routingKey {0} to {1} incident (dedupKey: {2}) due to bad request, response {3}.", + routingKey, eventAction.getEventActionString(), dedupKey, gusPostResponseBody); + LOGGER.error(loggerMsg); + + break; + } else if (respCode == 429) { + // Too many requests, try again if possible + loggerMsg = MessageFormat.format("Failure - send PagerDuty Message for PD routingKey {0} to {1} incident (dedupKey: {2}) due to too many requests.", + routingKey, eventAction.getEventActionString(), dedupKey); + LOGGER.warn(loggerMsg); + + /* From PagerDuty documentation: + "If your client is throttled, its rate limit will be reset after a minute interval. + Your client should expect and be able to handle this error code by waiting a minute before + making additional requests." + https://v2.developer.pagerduty.com/docs/rate-limiting + */ + Thread.sleep(httpResponseCode429RetryDelayTime); + + continue; + } else if (respCode == HttpStatus.SC_INTERNAL_SERVER_ERROR || + respCode == HttpStatus.SC_BAD_GATEWAY || + respCode == HttpStatus.SC_SERVICE_UNAVAILABLE || + respCode == HttpStatus.SC_GATEWAY_TIMEOUT) { + /* From PagerDuty documentation: + Retry on 500 or 5XX. + https://v2.developer.pagerduty.com/docs/events-api-v2#api-response-codes--retry-logic + */ + loggerMsg = MessageFormat.format("Failure - send PagerDuty Message for PD routingKey {0} to {1} incident (dedupKey: {2}) due to session time out.", + routingKey, eventAction.getEventActionString(), dedupKey); + LOGGER.warn(loggerMsg); + + continue; + } else { + final String postResponseBody = EntityUtils.toString(response.getEntity()); + loggerMsg = MessageFormat.format("Failure - send PagerDuty Message for PD routingKey {0} to {1} incident (dedupKey: {2}). Response code {3} response {4}", + routingKey, eventAction.getEventActionString(), dedupKey, respCode, postResponseBody); + LOGGER.error(loggerMsg); + + break; // don't retry + } + } catch (InterruptedIOException e) { + loggerMsg = MessageFormat.format("Interruption failure - send PagerDuty Message for PD routingKey {0} to {1} incident (dedupKey: {2}). Exception {3}", + routingKey, eventAction.getEventActionString(), dedupKey, e.getMessage()); + LOGGER.warn(loggerMsg, e); + + continue; // retry + } catch (Exception e) { + loggerMsg = MessageFormat.format("Failure - send PagerDuty Message for PD routingKey {0} to {1} incident (dedupKey: {2}). Exception {3}", + routingKey, eventAction.getEventActionString(), dedupKey, e.getMessage()); + LOGGER.error(loggerMsg, e); + + break; // don't retry + } finally { + try { + if (response != null) { + response.close(); + } + } catch (IOException e) { + LOGGER.error("Exception while attempting to close post to GUS response", e); + } + } + } + monitorService.modifyCounter(MonitorService.Counter.PAGERDUTY_NOTIFICATIONS_RETRIES, retries, null); + monitorService.modifyCounter(MonitorService.Counter.PAGERDUTY_NOTIFICATIONS_FAILED, result ? 0 : 1, null); + } else { + loggerMsg = MessageFormat.format("Sending PagerDuty notification is disabled. Not sending message for PD integration {0} to {1} incident (dedupKey: {2}).", + routingKey, eventAction.getEventActionString(), dedupKey); + LOGGER.warn(loggerMsg); + } + + if (StringUtils.isNotBlank(loggerMsg)) { + history.appendMessageNUpdateHistory(loggerMsg, null, 0); + } + return result; + } + + protected String generateJson(PagerDutyMessage message) { + JsonObject messageJson = new JsonObject(); + + messageJson.addProperty(PagerDutyMessage.ROUTING_KEY_FIELD, message.getRoutingKey()); + messageJson.addProperty(PagerDutyMessage.EVENT_ACTION_FIELD, message.getEventAction().getEventActionString()); + messageJson.addProperty(PagerDutyMessage.DEDUP_KEY_FIELD, message.getDedupKey()); + messageJson.addProperty(PagerDutyMessage.CLIENT_FIELD, message.getClient()); + messageJson.addProperty(PagerDutyMessage.CLIENT_URL_FIELD, message.getClientUrl()); + + JsonObject payloadJson = new JsonObject(); + payloadJson.addProperty(PagerDutyMessage.SUMMARY_FIELD, message.getSummary()); + if (message.getTimestamp() != null) { + payloadJson.addProperty(PagerDutyMessage.TIMESTAMP_FIELD, message.getTimestamp()); + } + payloadJson.addProperty(PagerDutyMessage.SOURCE_FIELD, message.getSource()); + if (message.getSeverity() != null) { + payloadJson.addProperty(PagerDutyMessage.SEVERITY_FIELD, message.getSeverity().getSeverityString()); + } + if (message.getComponent() != null) { + payloadJson.addProperty(PagerDutyMessage.COMPONENT_FIELD, message.getComponent()); + } + if (message.getGroup() != null) { + payloadJson.addProperty(PagerDutyMessage.GROUP_FIELD, message.getGroup()); + } + if (message.getEventClass() != null) { + payloadJson.addProperty(PagerDutyMessage.CLASS_FIELD, message.getEventClass()); + } + + Map customDetailsMap = message.getCustomDetailsMap(); + if (customDetailsMap != null && customDetailsMap.size() > 0) { + JsonObject customDetailsJson = new JsonObject(); + for (Map.Entry e : customDetailsMap.entrySet()) { + customDetailsJson.addProperty(e.getKey(), e.getValue()); + } + payloadJson.add(PagerDutyMessage.CUSTOM_DETAILS_FIELD, customDetailsJson); + } + messageJson.add(PagerDutyMessage.PAYLOAD_FIELD, payloadJson); + + List> links = message.getLinksList(); + if (links != null && links.size() > 0) { + JsonArray linksArrayJson = new JsonArray(); + for (Map l : links) { + JsonObject linkJson = new JsonObject(); + for (Map.Entry e : l.entrySet()) { + linkJson.addProperty(e.getKey(), e.getValue()); + } + linksArrayJson.add(linkJson); + } + messageJson.add(PagerDutyMessage.LINKS_FIELD, linksArrayJson); + } + return messageJson.toString(); + } + + @Override + public Properties getNotifierProperties() { + Properties result = super.getNotifierProperties(); + + for (Property property : Property.values()) { + result.put(property.getName(), property.getDefaultValue()); + } + return result; + } + + public enum Property { + + /** + * The PagerDuty endpoint. + */ + PAGERDUTY_ENDPOINT("notifier.property.pagerduty.endpoint", "https://events.pagerduty.com"), + /** + * The PagerDuty access token. + */ + PAGERDUTY_TOKEN("notifier.property.pagerduty.token", "TestToken"), + /** + * The PagerDuty proxy host. + */ + PAGERDUTY_PROXY_HOST("notifier.property.pagerduty.proxy.host", ""), + /** + * The PagerDuty port. + */ + PAGERDUTY_PROXY_PORT("notifier.property.pagerduty.proxy.port", ""), + /** + * The GOC proxy username. + */ + PAGERDUTY_PROXY_USERNAME("notifier.property.pagerduty.proxy.username", ""), + /** + * The GOC proxy password. + */ + PAGERDUTY_PROXY_PASSWORD("notifier.property.pagerduty.proxy.password", ""), + /** + * The PagerDuty connection max attempts to post notification. + */ + PAGERDUTY_POST_MAX_ATTEMPTS("notifier.property.pagerduty.maxPostAttempts", "3"), + /** + * The connection pool size for connecting to PagerDuty + */ + PAGERDUTY_CONNECTION_POOL_MAX_SIZE("notifier.property.pagerduty.connectionpool.maxsize", "55"), + /** + * The connection pool max per route for connecting to PagerDuty + */ + PAGERDUTY_CONNECTION_POOL_MAX_PER_ROUTE("notifier.property.pagerduty.connectionpool.maxperroute", "20"); + + private final String _name; + private final String _defaultValue; + + private Property(String name, String defaultValue) { + _name = name; + _defaultValue = defaultValue; + } + + /** + * Returns the property name. + * + * @return The property name. + */ + public String getName() { + return _name; + } + + /** + * Returns the default property value. + * + * @return The default property value. + */ + public String getDefaultValue() { + return _defaultValue; + } + } + + public static class PagerDutyMessage { + public static final String DEFAULT_CLIENT = "Argus Alert"; + + public static final String ROUTING_KEY_FIELD = "routing_key"; + public static final String EVENT_ACTION_FIELD = "event_action"; + public static final String DEDUP_KEY_FIELD = "dedup_key"; + public static final String PAYLOAD_FIELD = "payload"; + public static final String CLIENT_FIELD = "client"; + public static final String CLIENT_URL_FIELD = "client_url"; + + // subfields for payload + public static final String SUMMARY_FIELD = "summary"; + public static final String SOURCE_FIELD = "source"; + public static final String SEVERITY_FIELD = "severity"; + public static final String TIMESTAMP_FIELD = "timestamp"; + public static final String COMPONENT_FIELD = "component"; + public static final String GROUP_FIELD = "group"; + public static final String CLASS_FIELD = "class"; + public static final String CUSTOM_DETAILS_FIELD = "custom_details"; + public static final String IMAGES_FIELD = "images"; + public static final String LINKS_FIELD = "links"; + + // subfields for custom_details + public static final String TRACKING_ID_SUB_FIELD = "TrackingID"; + public static final String METRIC_EXPRESSION_SUB_FIELD = "Evaluated Metric Expression"; + public static final String EVALUATION_TIME_SUB_FIELD = "Trigger Evaluation Time"; + public static final String COOLDOWN_SUB_FIELD = "Cooldown till"; + public static final String TRIGGERRED_METRIC_SUB_FIELD = "Triggered on Metric"; + public static final String TRIGGERED_DETAILS_SUB_FIELD = "Triggered Details"; + public static final String TRIGGERING_EVENT_VALUE_SUB_FIELD = "Triggering Event Value"; + public static final String CUSTOMER_TEXT_SUB_FIELD = "Customer Text"; + public static final String METRICS_TO_ANNOTATE_SUB_FIELD = "Metrics to annotate"; + + //subfields for links + public static final String LINK_HREF_SUB_FIELD = "href"; + public static final String LINK_TEXT_SUB_FIELD = "text"; + + private final String routingKey; + private final EventAction eventAction; + private final String dedupKey; + private String summary; + private String source; + private Severity severity; + private String timestamp; + private String component; + private String group; + private String eventClass; + private String client; + private String clientUrl; + + private final Map> payload = new HashMap<>(); + private final Map customDetailsMap = new HashMap<>(); + private final List> linksList = new LinkedList>(); + + protected PagerDutyMessage(String routingKey, EventAction eventAction, String dedupKey) { + this.routingKey = routingKey; + this.eventAction = eventAction; + this.dedupKey = dedupKey; + this.client = DEFAULT_CLIENT; + payload.put(CUSTOM_DETAILS_FIELD, customDetailsMap); + } + + protected void setEvaluatedMetricExpression(String metricExpression) { + customDetailsMap.put(METRIC_EXPRESSION_SUB_FIELD, metricExpression); + } + + protected void setTriggerEvaluationTime(Date evaluationTime) { + customDetailsMap.put(EVALUATION_TIME_SUB_FIELD, DATE_FORMAT.get().format(evaluationTime)); + } + + protected void setCooldown(Date cooldown) { + customDetailsMap.put(COOLDOWN_SUB_FIELD, DATE_FORMAT.get().format(cooldown)); + } + + protected void setTriggeredMetric(String metric) { + customDetailsMap.put(TRIGGERRED_METRIC_SUB_FIELD, metric); + } + + protected void setTriggerDetails(String triggerDetails) { + customDetailsMap.put(TRIGGERED_DETAILS_SUB_FIELD, triggerDetails); + } + + protected void setTriggeringEventValue(String eventValue) { + customDetailsMap.put(TRIGGERING_EVENT_VALUE_SUB_FIELD, eventValue); + } + + protected void setCustomerText(String customerText) { + customDetailsMap.put(CUSTOMER_TEXT_SUB_FIELD, customerText); + } + + protected void setMetricsToAnnotate(List metrics) { + customDetailsMap.put(METRICS_TO_ANNOTATE_SUB_FIELD, StringUtils.join(metrics)); + } + + protected void setTrackingID(String trackingID) { + customDetailsMap.put(TRACKING_ID_SUB_FIELD, trackingID); + } + + protected void addLink(String label, String link) { + Map linkMap = new HashMap<>(); + linkMap.put(LINK_TEXT_SUB_FIELD, label); + linkMap.put(LINK_HREF_SUB_FIELD, link); + linksList.add(linkMap); + } + + protected String getRoutingKey() { + return routingKey; + } + + protected EventAction getEventAction() { + return eventAction; + } + + protected String getDedupKey() { + return dedupKey; + } + + protected String getSummary() { + return summary; + } + + protected void setSummary(String summary) { + + this.summary = StringUtils.left(summary, 1024); // summary max length is 1024 + } + + protected String getSource() { + return source; + } + + protected void setSource(String source) { + this.source = source; + } + + protected Severity getSeverity() { + return severity; + } + + protected void setSeverity(Severity severity) { + this.severity = severity; + } + + protected String getTimestamp() { + return timestamp; + } + + protected void setTimestamp(Date timestamp) { + this.timestamp = DATE_FORMAT.get().format(timestamp); + } + + protected String getComponent() { + return component; + } + + protected void setComponent(String component) { + this.component = component; + } + + protected String getGroup() { + return group; + } + + protected void setGroup(String group) { + this.group = group; + } + + protected String getEventClass() { + return eventClass; + } + + protected void setEventClass(String eventClass) { + this.eventClass = eventClass; + } + + protected String getClient() { + return client; + } + + protected void setClient(String client) { + this.client = client; + } + + protected String getClientUrl() { + return clientUrl; + } + + protected void setClientUrl(String clientUrl) { + this.clientUrl = clientUrl; + } + + protected Map> getPayload() { + return payload; + } + + protected Map getCustomDetailsMap() { + return customDetailsMap; + } + + protected List> getLinksList() { + return linksList; + } + + protected enum EventAction { + TRIGGER("trigger"), + ACKNOWLEDGE("acknowledge"), + RESOLVE("resolve"); + + private final String eventActionString; + + EventAction(String eventActionString) { + this.eventActionString = eventActionString; + } + + String getEventActionString() { + return eventActionString; + } + } + + protected enum Severity { + CRITICAL("critical", 1), + ERROR("error", 2), + WARNING("warning", 3), + INFO("info", 4); + + private static final Map levelMap = generateLevelMap(); + private final String severityString; + private final int severityLevel; + + Severity(String severityString, int severityLevel) { + this.severityString = severityString; + this.severityLevel = severityLevel; + } + + String getSeverityString() { + return severityString; + } + + int getSeverityLevel() { + return severityLevel; + } + + /** + * Get Severity enum based on integer severity level; defaults to INFO. + * @param severityLevel int severity level + * @return severity enum + */ + static Severity ofLevel(int severityLevel) { + return levelMap.getOrDefault(severityLevel, INFO); + } + + private static Map generateLevelMap() { + Map map = new HashMap<>(); + for (Severity s : Severity.values()) { + map.put(s.getSeverityLevel(), s); + } + return ImmutableMap.copyOf(map); + } + } + } + +} +/* Copyright (c) 2019, Salesforce.com, Inc. All rights reserved. */ \ No newline at end of file diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/RefocusBooleanNotifier.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/RefocusBooleanNotifier.java new file mode 100644 index 000000000..573cba621 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/RefocusBooleanNotifier.java @@ -0,0 +1,135 @@ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus.service.alert.notifier; + +import com.google.inject.Inject; +import com.google.inject.Provider; +import com.salesforce.dva.argus.entity.History; +import com.salesforce.dva.argus.service.AnnotationService; +import com.salesforce.dva.argus.service.AuditService; +import com.salesforce.dva.argus.service.MetricService; +import com.salesforce.dva.argus.service.RefocusService; +import com.salesforce.dva.argus.service.alert.DefaultAlertService.NotificationContext; +import com.salesforce.dva.argus.system.SystemConfiguration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.persistence.EntityManager; +import java.util.List; +import java.util.Properties; + +import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; + +/** + * Implementation of notifier interface for notifying Refocus. + * + * @author Janine Zou (yzou@salesforce.com), Ian Keck (ikeck@salesforce.com) + */ +public class RefocusBooleanNotifier extends AuditNotifier { + + private Logger _logger = LoggerFactory.getLogger(RefocusBooleanNotifier.class); + private RefocusService _refocusService; + + + /** + * Creates a new Refocus notifier. + * + * @param metricService The metric service. Cannot be null. + * @param annotationService The annotation service. Cannot be null. + * @param auditService The audit service. Cannot be null. + * @param config The system configuration. Cannot be null. + * @param emf The entity manager factory. Cannot be null. + */ + @Inject + public RefocusBooleanNotifier(MetricService metricService, AnnotationService annotationService, AuditService auditService, + RefocusService refocusService, SystemConfiguration config, Provider emf) { + super(metricService, annotationService, auditService, config, emf); + requireArgument(config != null, "The configuration cannot be null."); + this._refocusService = refocusService; + } + + @Override + protected boolean sendAdditionalNotification(NotificationContext context) { + return _sendRefocusNotification(context, true); + } + + @Override + protected boolean clearAdditionalNotification(NotificationContext context) { + return _sendRefocusNotification(context, false); + } + + // Future - TODO - userID from principal owner of alert, token_id from user preference. + private boolean _sendRefocusNotification(NotificationContext context, boolean isTriggerActive) { + requireArgument(context != null, "Notification context cannot be null."); + List aspectPaths = context.getNotification().getSubscriptions(); + + requireArgument(aspectPaths!=null && !aspectPaths.isEmpty(), "aspect paths (subscriptions) cannot be empty."); + + if(isTriggerActive) { + super.sendAdditionalNotification(context); + } else { + super.clearAdditionalNotification(context); + } + + // IMPORTANT - should interrupted exception be handled? + boolean result = true; + History history = context.getHistory(); + for (String aspect : aspectPaths) { + boolean tmp = _refocusService.sendRefocusNotification(aspect, + isTriggerActive ? "1" : "0", + "user_id", + "token_id", + history); + result = result && tmp; + } + + return result; + } + + + @Override + public String getName() { + return RefocusBooleanNotifier.class.getName(); + } + + @Override + public Properties getNotifierProperties() { + Properties notifierProps= super.getNotifierProperties(); + + for(Property property: Property.values()){ + notifierProps.put(property.getName(), property.getDefaultValue()); + } + + return notifierProps; + } + +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/RefocusForwarder.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/RefocusForwarder.java new file mode 100644 index 000000000..16bd72ac7 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/RefocusForwarder.java @@ -0,0 +1,797 @@ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus.service.alert.notifier; + +import com.google.gson.JsonArray; +import com.google.gson.JsonObject; +import com.google.gson.JsonParser; +import com.google.inject.Inject; +import com.google.inject.Provider; +import com.google.inject.Singleton; +import com.salesforce.dva.argus.entity.History; +import com.salesforce.dva.argus.service.DefaultService; +import com.salesforce.dva.argus.service.RefocusService; +import com.salesforce.dva.argus.system.SystemConfiguration; +import com.salesforce.dva.argus.system.SystemException; +import org.apache.commons.lang.StringUtils; +import org.apache.http.HttpStatus; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.client.BasicResponseHandler; +import org.apache.http.impl.client.CloseableHttpClient; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.persistence.EntityManager; +import java.io.IOException; +import java.io.InterruptedIOException; +import java.io.PrintWriter; +import java.io.StringWriter; +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.Properties; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Consumer; +import java.util.function.Supplier; + +/** + * Implementation of notifier interface for notifying Refocus. + * + * @author Ian Keck (ikeck@salesforce.com) + */ +// Future - rename to DefaultRefocusService +@Singleton +public class RefocusForwarder extends DefaultService implements RefocusService, NotificationForwarder { + + private static final Logger _logger = LoggerFactory.getLogger(RefocusForwarder.class); + private static final Logger _logger2 = LoggerFactory.getLogger(RefocusService.class); + + private String endpoint; + private String token; + protected SystemConfiguration config; + private boolean detailedLogging = true; // was false + private boolean detailedHistory = true; // was false // TODO - deploy with true or false? + private boolean mapToErrorLogging = false; // should be false! + + private LinkedBlockingQueue sampleQueue; + + private long max_send_interval_ms; + private int max_samples_to_send; + private long last_send_time; + private long last_forwarder_status_time; + private long forwarder_status_interval_ms; + private long interval_for_rate_limit_ms; + + private AtomicInteger queuedCounter; + private AtomicInteger deliveredCounter; + private AtomicInteger discardedCounter; + private int maxQueueLength; + + private Supplier stub_sender; + + + /** + * Creates a new Refocus Forwarder. + * + * @param config The system configuration. Cannot be null. + * @param emf The entity manager factory. Cannot be null. + */ + @Inject + public RefocusForwarder(SystemConfiguration config, Provider emf) { + super(config); + this.config = config; + this.stub_sender = null; + + endpoint = this.config.getValue(RefocusProperty.REFOCUS_ENDPOINT.getName(), RefocusProperty.REFOCUS_ENDPOINT.getDefaultValue()); + token = this.config.getValue(RefocusProperty.REFOCUS_TOKEN.getName(), RefocusProperty.REFOCUS_TOKEN.getDefaultValue()); + + // TODO - catch parse errors and log + max_samples_to_send = Integer.parseInt(this.config.getValue(RefocusProperty.REFOCUS_MAX_BULK_ITEMS.getName(), RefocusProperty.REFOCUS_MAX_BULK_ITEMS.getDefaultValue())); + max_send_interval_ms = Integer.parseInt(this.config.getValue(RefocusProperty.REFOCUS_SEND_INTERVAL_MS.getName(), RefocusProperty.REFOCUS_SEND_INTERVAL_MS.getDefaultValue())); + forwarder_status_interval_ms = Integer.parseInt(this.config.getValue(RefocusProperty.REFOCUS_FORWARDER_STATUS_INTERVAL_MS.getName(), RefocusProperty.REFOCUS_FORWARDER_STATUS_INTERVAL_MS.getDefaultValue())); + detailedHistory = Boolean.parseBoolean(this.config.getValue(RefocusProperty.REFOCUS_FORWARDING_HISTORY.getName(), RefocusProperty.REFOCUS_FORWARDING_HISTORY.getDefaultValue())); + detailedLogging = Boolean.parseBoolean(this.config.getValue(RefocusProperty.REFOCUS_PER_NOTIFICATION_LOGGING.getName(), RefocusProperty.REFOCUS_PER_NOTIFICATION_LOGGING.getDefaultValue())); + interval_for_rate_limit_ms = -1; + + + + + this.queuedCounter = new AtomicInteger(0); + this.deliveredCounter = new AtomicInteger(0); + this.discardedCounter = new AtomicInteger( 0); + this.maxQueueLength = 0; + this.sampleQueue = new LinkedBlockingQueue(); + this.last_send_time = System.currentTimeMillis(); + this.last_forwarder_status_time = this.last_send_time; + } + + + // -------------------------------------------------------------------- + // GetServiceProperties + // -------------------------------------------------------------------- + + // TODO - shouldn't this return current values? + @Override + public Properties getServiceProperties() { + Properties properties = new Properties(); + + for(RefocusProperty property: RefocusProperty.values()){ + properties.put(property.getName(), property.getDefaultValue()); + } + return properties; + } + + // -------------------------------------------------------------------- + // RefocusService Interface + // -------------------------------------------------------------------- + + @Override + public boolean sendRefocusNotification(String subject_aspect, String value, String user_id, String token_id, History history) { + + return sendRefocusNotification( subject_aspect, value, user_id, token_id, history, System.currentTimeMillis()); + } + + // FUTURE - pass in next fire time to handle expiration, and deprecate the previous method. + @Override + public boolean sendRefocusNotification(String subject_aspect, String value, String user_id, String token_id, History history, long nextFireTime) + { + if (!Boolean.valueOf(config.getValue(SystemConfiguration.Property.REFOCUS_ENABLED))) { + _logger2.info("Refocus notification is disabled."); // was _info() // TODO - log sample? + return false; + } + + RefocusSample sample = new RefocusSample(subject_aspect, value, history, user_id, token_id, nextFireTime); + try + { + this.sampleQueue.put(sample); + this.queuedCounter.incrementAndGet(); + + // String infoMsg = MessageFormat.format("Refocus Sample {0} enqueued.", sample.toJSON()); + String infoMsg = MessageFormat.format("Refocus Sample {0} enqueued by {1}.", sample.toJSON(), this.hashCode()); + _logger2.debug(infoMsg); // was _debug(infoMsg) + history.appendMessageNUpdateHistory(infoMsg, null, 0); + } + catch (InterruptedException e) + { + String errMsg = MessageFormat.format("Refocus Sample {0} not enqueued. {1}", sample.toJSON(), e.getMessage()); + _logger2.error(errMsg); // _error(errMsg); + } + return true; + } + + + // -------------------------------------------------------------------- + // Notification Forwarder Interface + // -------------------------------------------------------------------- + + @Override + public int forwardNotifications() throws InterruptedException { + + int count = 0; + + long forwardDuration = 0; + long curDuration = System.currentTimeMillis() - this.last_send_time; + int queueSize = this.sampleQueue.isEmpty() ? 0 : this.sampleQueue.size(); // probably redundant + + if (queueSize > maxQueueLength) + { + maxQueueLength = queueSize; + } + + if (((queueSize > 0) && (curDuration >= this.max_send_interval_ms)) || (queueSize > this.max_samples_to_send)) + { + _info(MessageFormat.format("RefocusForwarder: forwarding {0} samples queued in {1}ms", queueSize, curDuration)); // DEBUG + + // FUTURE - write function to pull samples and filter expired notifications. (expired = nextFireTime + 1.5mins) + Duration send_duration = new Duration(); + ArrayList samples = new ArrayList(); + count = this.sampleQueue.drainTo(samples, this.max_samples_to_send); + + _info(MessageFormat.format("RefocusForwarder: got {0} samples to forward.", samples.size())); // DEBUG + + if (count > 0) + { + last_send_time = System.currentTimeMillis(); + Long jobId = sendMessage(samples); + + if (jobId != null) + { + this.deliveredCounter.addAndGet(count); + String infoMsg = MessageFormat.format("RefocusForwarder: {0} samples forwarded. {1} total samples forwarded.", count, this.deliveredCounter.get()); + _info(infoMsg); + + // Future, with jobId, get the upsert status and log any errors! + } else + { + this.discardedCounter.addAndGet(count); // TODO - don't do this until we really have discarded them. + count = 0; + + // IMPORTANT - to handle. If we aren't successful. We can hold on to the notifications and retry later. (Need a lastRequest) + String warnMsg = MessageFormat.format("RefocusForwarder: {0} samples dropped. {1} total samples dropped", count, this.discardedCounter.get()); + _warn(warnMsg); + + } + + // Future - improve - for now, use this extremely crude method to rate limit Refocus requests to 500/second. + forwardDuration = send_duration.duration(); + } + } + + _logForwarderStats(); + _limitTo500PerSecond(forwardDuration); + + return count; + } + + @Override + public int getNotificationsEnqueued() + { + return this.queuedCounter.get(); + } + + @Override + public int getNotificationsDelivered() + { + return this.deliveredCounter.get(); + } + + @Override + public int getNotificationsDiscarded() + { + return this.discardedCounter.get(); + } + + @Override + public int getNotificationsProcessed() + { + return getNotificationsDelivered() + getNotificationsDiscarded(); + } + + @Override + public int getMaxQueueLength() + { + return this.maxQueueLength; + } + + // -------------------------------------------------------------------- + // Implementation + // -------------------------------------------------------------------- + + // Future - remove. this is a crude hack to rate limit. OK for now. + private void _limitTo500PerSecond(long dur) throws InterruptedException + { + Duration d = new Duration(); + long sleep_ms = 0; + + if (interval_for_rate_limit_ms == -1) + { + // Sleep period for rate limiting requests per minute. // FUTURE - this is a crude approach and should be removed. + int max_refocus_requests_per_minute = Integer.parseInt(this.config.getValue(RefocusProperty.REFOCUS_MAX_REQUESTS_PER_MINUTE.getName(), RefocusProperty.REFOCUS_MAX_REQUESTS_PER_MINUTE.getDefaultValue())); + if (max_refocus_requests_per_minute <= 0) { + max_refocus_requests_per_minute = Integer.parseInt(RefocusProperty.REFOCUS_MAX_REQUESTS_PER_MINUTE.getDefaultValue()); + } + // assumes sleep is quantized to 10ms intervals in practice. (verify!) + interval_for_rate_limit_ms = (max_refocus_requests_per_minute > 6000)? 0 : 60000/max_refocus_requests_per_minute; + } + + // Rate limit to REFOCUS_MAX_REQUESTS_PER_MINUTE sends/sec (default=500) + if (interval_for_rate_limit_ms > 0 && dur < interval_for_rate_limit_ms) + { + sleep_ms = interval_for_rate_limit_ms - dur; + Thread.sleep(sleep_ms); + } + } + + private void _logForwarderStats() throws InterruptedException + { + if (System.currentTimeMillis() - this.last_forwarder_status_time > forwarder_status_interval_ms) + { + _logger.info("RefocusForwarder: enqueued={} delivered={}, discarded={}, maxQueueLength={}", + getNotificationsEnqueued(), getNotificationsDelivered(), getNotificationsDiscarded(), getMaxQueueLength()); + this.last_forwarder_status_time = System.currentTimeMillis(); + } + } + + private String notificationsToJSON(ArrayList notifications) + { + JsonArray items = new JsonArray(); + for (RefocusSample s: notifications) + { + JsonObject sample = new JsonObject(); + sample.addProperty("name", s.name); + sample.addProperty("value", s.value); + items.add(sample); + } + return items.toString(); + } + + private void addBody(HttpPost post, ArrayList notifications) throws Exception + { + String body = notificationsToJSON(notifications); + StringEntity bodyEntity = new StringEntity(body); + post.setEntity(bodyEntity); + post.setHeader("Content-type", "application/json"); + } + + public void setStubSender( Supplier stub) + { + this.stub_sender = stub; + } + + +// private void perItemLogging( ArrayList notifications, Consumer f, String itemFormat) +// { +// // TODO - unwrap and rethrow InterruptedException from all functional iterators. +// if (detailedLogging) +// { +// assert(StringUtils.isNotBlank(itemFormat)); +// notifications.forEach( x -> { f.accept(MessageFormat.format(itemFormat,x.toJSON())); }); +// } +// } +// +// private void perItemHistory( ArrayList notifications, String itemFormat) +// { +// // TODO - unwrap and rethrow InterruptedException from all functional iterators. +// if (detailedHistory) +// { +// assert(StringUtils.isNotBlank(itemFormat)); +// String formatStr = itemFormat.replace("{0}", "%s"); +// notifications.forEach( x -> { x.history.appendMessageNUpdateHistory(String.format(formatStr, x.toJSON()), null, 0); } ); +// } +// } + + + private void perItemLoggingAndHistory( ArrayList notifications, Consumer f, String itemFormat) + { + // TODO - unwrap and rethrow InterruptedException from all functional iterators. + assert(StringUtils.isNotBlank(itemFormat)); + String formatStr = itemFormat.replace("{0}", "%s"); + + notifications.forEach( x -> { + if (detailedLogging) { + f.accept(MessageFormat.format(itemFormat, x.toJSON())); + } + if (detailedHistory) { + x.history.appendMessageNUpdateHistory(String.format(formatStr, x.toJSON()), null, 0); + } + }); + } + + + private void closeResponse(CloseableHttpResponse response) + { + try { + if (response != null) + { + response.close(); + } + } + catch(Exception e) + {} + } + + // This method exists to enable mocking of the client for unit testing. + // It is public because mocking seems to require it. + public CloseableHttpClient getHttpClient() + { + RefocusTransport refocusTransport = RefocusTransport.getInstance(); + CloseableHttpClient httpclient = refocusTransport.getHttpClient(config); + return httpclient; + } + + // FUTURE - IMPORTANT - the items are dequeued. We don't want to lose them, we need logic to sleep and retry. + // FUTURE - instead of the retry loop, add a retry handler as documented near the bottom of this page: + // future - https://hc.apache.org/httpcomponents-client-4.5.x/tutorial/html/fundamentals.html + // NOTE - An oddity of this code is that String.format() is used to generate a perItemMsgFormat with {N} notation used by MessageFormat. + + private Long sendMessage(ArrayList notifications) throws InterruptedException{ + String failureMsg = null; + String perItemMsgFormat = null; + int count = notifications.size(); + boolean success = false; + CloseableHttpClient httpclient = null; + Long refocusJobId = null; + + if (Boolean.valueOf(config.getValue(SystemConfiguration.Property.REFOCUS_ENABLED)) && count > 0) { + + int refreshMaxTimes = Integer.parseInt(config.getValue(RefocusProperty.REFOCUS_CONNECTION_REFRESH_MAX_TIMES.getName(), RefocusProperty.REFOCUS_CONNECTION_REFRESH_MAX_TIMES.getDefaultValue())); + try { + httpclient = getHttpClient(); + + Duration duration = new Duration(); + + HttpPost post = null; + CloseableHttpResponse response = null; + String responseBody = ""; + try { + post = new HttpPost(String.format("%s/v1/samples/upsert/bulk", endpoint)); + post.addHeader("Authorization", token); + post.addHeader("Accept", "application/json"); + addBody(post, notifications); + + for (int i = 0; !success && (i <= refreshMaxTimes); i++) + { + failureMsg = ""; // clear for retry + perItemMsgFormat = ""; + + if (i > 0) + { + _info("RefocusForwarder: Retrying Bulk Upsert"); + } + + int respCode = 0; + JsonObject parsedResponse = null; + Duration post_duration = new Duration(); + + try + { + // The stub sender is used to instrument the unit test + if (this.stub_sender != null) + { + try + { + response = null; // There is no HTTP response to release + RefocusResponse resp = this.stub_sender.get(); + respCode = resp.resp_code; + parsedResponse = resp.result; + if (parsedResponse != null) + { + responseBody = resp.result.toString(); + } + } + catch (RuntimeException ex) + { + Throwable cause = ex.getCause(); + if (cause == null) + { + throw new IOException("unknown"); + } + else if (cause instanceof InterruptedIOException) + { + throw (InterruptedIOException) cause; + } + else if (cause instanceof IOException) + { + throw (IOException) cause; + } + else + { + throw new IOException("unknown"); + } + } + } else + { + response = httpclient.execute(post); + respCode = response.getStatusLine().getStatusCode(); + responseBody = new BasicResponseHandler().handleResponse(response); + parsedResponse = (new JsonParser()).parse(responseBody).getAsJsonObject(); // TODO exceptions? + } + } + catch (InterruptedIOException ex) + { + respCode = HttpStatus.SC_REQUEST_TIMEOUT; + responseBody = ""; + parsedResponse = new JsonObject(); + } + catch (IOException ex) + { + respCode = -1; + responseBody = String.format("{ \"respCode\": \"%d\", \"message\": \"%s: %s\" }", respCode, ex.getClass().getName(), ex.getMessage()); + parsedResponse = (new JsonParser()).parse(responseBody).getAsJsonObject(); + } + finally + { + post_duration.duration(); + } + + duration.duration(); + + // Check for success + // TODO - allow all 2XXs to be success? + if (respCode == HttpStatus.SC_OK || respCode == HttpStatus.SC_CREATED || respCode == HttpStatus.SC_NO_CONTENT) + { + String upsertStatus = parsedResponse.get("status").getAsString(); + refocusJobId = parsedResponse.get("jobId").getAsLong(); + + String infoMsg = MessageFormat.format("Refocus Forwarder: Refocus Job ID: {0}. {1} samples sent in {2}ms (post = {3}ms). ", + refocusJobId, count, duration.duration(), post_duration.duration()); + _info(infoMsg); + perItemMsgFormat = "Refocus sample {0} sent."; + + success = true; + break; + + } else if (respCode == HttpStatus.SC_REQUEST_TIMEOUT) + { + // Indication that the session timedout, Need to refresh and retry + failureMsg = MessageFormat.format("Refocus Forwarder: Failed to forward {0} samples due to session time out.", count); + _warn(failureMsg); + perItemMsgFormat = "Failed to forward {0} due to session time out."; + + } else if (respCode == 429) + { + // Indication that Refocus is throttling. Need to wait and retry. + String warnMsg = MessageFormat.format("Refocus Forwarder: Refocus is Throttling ({0}) at {1}", respCode, System.currentTimeMillis()); + _error(warnMsg); + perItemMsgFormat = "Failed to forward {0} due to refocus throttling."; + + // FUTURE - get the time to retry at from the 429 response body. + if (i <= refreshMaxTimes - 1) + { + if (interval_for_rate_limit_ms > 0) + { + Thread.sleep(interval_for_rate_limit_ms); + } + } + + } else if (respCode == HttpStatus.SC_BAD_GATEWAY || respCode == HttpStatus.SC_SERVICE_UNAVAILABLE || respCode == HttpStatus.SC_GATEWAY_TIMEOUT) + { + // Indication that the session timedout, Need to refresh and retry + failureMsg = MessageFormat.format("Refocus Forwarder: Failed to forward {0} samples due to http error {1}", count, respCode ); + _error(failureMsg); + perItemMsgFormat = String.format("Failed to forward %s due to http error %d.", "{0}", respCode); + + } else { + failureMsg = MessageFormat.format("Refocus Forwarder: Failed to forward {0} samples. Response code {1} response:\n{2}", + count, respCode, responseBody); + _error(failureMsg); + perItemMsgFormat = String.format("Failed to forward %s Refocus samples. Response code %d response: %s", "{0}", respCode, responseBody); + break; // no retry + } + + closeResponse(response); + response = null; + } // for + + } catch (RuntimeException e) + { + throw e; + } + catch (Exception e) { + failureMsg = MessageFormat.format("Refocus Forwarder: Failed to forward {0} samples. Exception {1}\n{2}", + count, e.getMessage(), stackTraceToString(e)); + _error(failureMsg); + perItemMsgFormat = String.format("Failed to forward %s. Exception: %s", "{0}", e.getMessage()); + + } finally { + + closeResponse(response); + response = null; + + if (post != null) { + post.releaseConnection(); + } + } + + } catch (RuntimeException ex) { + + failureMsg = MessageFormat.format("Refocus Forwarder: Failed to forward {0} samples. Exception {1}\n{2}", + count, ex.getMessage(), stackTraceToString(ex)); + _error(failureMsg); + + perItemMsgFormat = String.format("Failed to forward %s. Runtime exception: %s", "{0}", ex.getMessage()); + perItemLoggingAndHistory(notifications, (x) -> _warn(x), perItemMsgFormat); + + // Unwrap and throw InterruptedException if necessary + Throwable cause = ex.getCause(); + if (cause != null && cause instanceof InterruptedException) + { + throw (InterruptedException) cause; + } + + throw new SystemException("Failed to forward Refocus notification.", ex); + } + // NOTE - don't close the client here. + + } else { + failureMsg = "RefocusForwarder: Refocus notification is disabled."; + perItemMsgFormat = String.format("Unable to send %s. Refocus notification is disabled.", "{0}"); + _info(failureMsg); + refocusJobId = null; + } + + // Per Item logging if appropriate + perItemLoggingAndHistory(notifications, success? (x) -> _info(x) : (x) -> _warn(x), perItemMsgFormat); + + return refocusJobId; + } + + + /** + * RefocusSample object to generate JSON. + */ + class RefocusSample { + + public static final String ASPECT_NAME_FIELD = "name"; + public static final String ASPECT_VALUE_FIELD = "value"; + public static final String USER_FIELD = "user"; + public static final String TOKEN_ID_FIELD = "token_id"; + + public final String name; + public final String value; + + public final String user_name; + public final String token_id; + public final History history; + public final long next_fire_time; + + public RefocusSample(final String name, final String value, final History history, final String user_name, final String token_id, final long next_fire_time) { + this.name = name; + this.value = value; + this.user_name = user_name; + this.token_id = token_id; + this.history = history; + this.next_fire_time = next_fire_time; + } + + /** + * Convert data to a JSON string. + * + * @return JSON string + */ + public String toJSON(boolean terse) { + JsonObject sampleData = new JsonObject(); + + sampleData.addProperty(ASPECT_NAME_FIELD, name); + sampleData.addProperty(ASPECT_VALUE_FIELD, value); + + if (!terse){ + sampleData.addProperty(USER_FIELD, user_name); + sampleData.addProperty(TOKEN_ID_FIELD, token_id); + } + return sampleData.toString(); + } + + public String toJSON() { + return toJSON(false); + } + } + + + // ================================================================================= + // Support for injecting responses + // ================================================================================= + + public RefocusResponse makeResponse(int resp, JsonObject result) + { + return new RefocusResponse(resp, result); + } + + public static class RefocusResponse { + public int resp_code; + public JsonObject result; + + RefocusResponse(int resp_code, JsonObject result) + { + this.resp_code = resp_code; + this.result = result; + } + } + + // ================================================================================= + // Utility Stuff + // ================================================================================= + + // TODO - move to an Argus-wide utility class. + public static class Duration { + long start; + long end; + + public Duration() { + start = System.currentTimeMillis(); + end = 0; + } + + public Duration(long start_time) + { + start = start_time; + end = 0; + } + + public Duration(Duration d) + { + this.start = d.start; + this.end = d.end; + } + + public void start() { + start = System.currentTimeMillis(); + end = 0; + } + + public long duration() { + if (end == 0) + { + end = System.currentTimeMillis(); + } + return end - start; + } + + public long cur_duration() { + long now = System.currentTimeMillis(); + return now - start; + } + } + + + // ========================================================================================= + // Syntactic sugar + // Note: these exist because I haven't found a mechanism for setting the log level programmatically. + + private void _info(String msg) + { + if (mapToErrorLogging) { + _logger.error(msg); + } + else { + _logger.info(msg); + } + } + + private void _error(String msg) + { + _logger.error(msg); + } + + private void _warn(String msg) + { + if (mapToErrorLogging) { + _logger.error(msg); + } + else { + _logger.warn(msg); + } + } + + private void _debug(String msg) + { + if (mapToErrorLogging) { + _logger.error(msg); + } + else { + _logger.debug(msg); + } + } + + public static String stackTraceToString(Throwable e) + { + StringWriter sw = new StringWriter(); + PrintWriter pw = new PrintWriter(sw); + e.printStackTrace(pw); + String sStackTrace = sw.toString(); // stack trace as a string + return sStackTrace; + } + +} +/* Copyright (c) 2019, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/RefocusNotifier.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/RefocusNotifier.java index 271cc94ac..651c634c5 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/RefocusNotifier.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/RefocusNotifier.java @@ -31,41 +31,23 @@ package com.salesforce.dva.argus.service.alert.notifier; -import com.google.gson.JsonObject; import com.google.inject.Inject; import com.google.inject.Provider; -import com.salesforce.dva.argus.inject.SLF4JTypeListener; import com.salesforce.dva.argus.service.AnnotationService; import com.salesforce.dva.argus.service.AuditService; import com.salesforce.dva.argus.service.MetricService; -import com.salesforce.dva.argus.service.alert.DefaultAlertService.NotificationContext; +import com.salesforce.dva.argus.service.RefocusService; import com.salesforce.dva.argus.system.SystemConfiguration; -import com.salesforce.dva.argus.system.SystemException; -import org.apache.commons.httpclient.HttpClient; -import org.apache.commons.httpclient.MultiThreadedHttpConnectionManager; -import org.apache.commons.httpclient.methods.PostMethod; -import org.apache.commons.httpclient.methods.StringRequestEntity; -import org.apache.commons.httpclient.params.HttpConnectionManagerParams; -import org.slf4j.Logger; - import javax.persistence.EntityManager; -import java.util.List; -import java.util.Properties; import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; /** * Implementation of notifier interface for notifying Refocus. * - * @author Janine Zou (yzou@salesforce.com) + * @author Janine Zou (yzou@salesforce.com), Ian Keck (ikeck@salesforce.com) */ -public class RefocusNotifier extends AuditNotifier { - - @SLF4JTypeListener.InjectLogger - private Logger _logger; - private final String endpoint; - private final String token; - +public class RefocusNotifier extends RefocusBooleanNotifier { /** * Creates a new Refocus notifier. @@ -73,243 +55,15 @@ public class RefocusNotifier extends AuditNotifier { * @param metricService The metric service. Cannot be null. * @param annotationService The annotation service. Cannot be null. * @param auditService The audit service. Cannot be null. + * @param refocusService The refocus service. Cannot be null. * @param config The system configuration. Cannot be null. * @param emf The entity manager factory. Cannot be null. */ @Inject public RefocusNotifier(MetricService metricService, AnnotationService annotationService, AuditService auditService, - SystemConfiguration config, Provider emf) { - super(metricService, annotationService, auditService, config, emf); + RefocusService refocusService, SystemConfiguration config, + Provider emf) { + super(metricService, annotationService, auditService, refocusService, config, emf); requireArgument(config != null, "The configuration cannot be null."); - endpoint = _config.getValue(Property.REFOCUS_ENDPOINT.getName(), Property.REFOCUS_ENDPOINT.getDefaultValue()); - token = _config.getValue(Property.REFOCUS_TOKEN.getName(), Property.REFOCUS_TOKEN.getDefaultValue()); - } - - @Override - protected void sendAdditionalNotification(NotificationContext context) { - _sendRefocusNotification(context, true); - } - - @Override - protected void clearAdditionalNotification(NotificationContext context) { - _sendRefocusNotification(context, false); - } - - private void _sendRefocusNotification(NotificationContext context, boolean isTriggerActive) { - List aspectPaths = context.getNotification().getSubscriptions(); - - //TODO: get customer specified refocus sample values when UI is ready, currently use 1 for active trigger and 0 for non-active trigger - - requireArgument(aspectPaths!=null && !aspectPaths.isEmpty(), "aspect paths (subscriptions) cannot be empty."); - - for (String aspect : aspectPaths) { - sendMessage(aspect, isTriggerActive); - } - } - - /** - * Sends an Refocus sample. - * - * @param aspectPath The Refocus aspect path. - * @param fired If the trigger is fired or not. - */ - private void sendMessage(String aspectPath, boolean fired) { - if (Boolean.valueOf(_config.getValue(SystemConfiguration.Property.REFOCUS_ENABLED))) { - int refreshMaxTimes = Integer.parseInt(_config.getValue(Property.REFOCUS_CONNECTION_REFRESH_MAX_TIMES.getName(), Property.REFOCUS_CONNECTION_REFRESH_MAX_TIMES.getDefaultValue())); - try { - - //TODO: get customer specified refocus sample values when UI is ready, currently use '1' for active trigger and '0' for non-active trigger - - RefocusSample refocusSample = new RefocusSample(aspectPath, fired ? "1" : "0"); - RefocusTransport refocusTransport = RefocusTransport.getInstance(); - HttpClient httpclient = refocusTransport.getHttpClient(_config); - - PostMethod post = null; - try { - post = new PostMethod(String.format("%s/v1/samples/upsert", endpoint)); - post.setRequestHeader("Authorization", token); - post.setRequestEntity(new StringRequestEntity(refocusSample.toJSON(), "application/json", null)); - - for (int i = 0; i < 1 + refreshMaxTimes; i++) { - - int respCode = httpclient.executeMethod(post); - - // Check for success - if (respCode == 200 || respCode == 201 || respCode == 204) { - _logger.info("Success - send Refocus sample '{}'.", refocusSample.toJSON()); - break; - } else if (respCode == 401) { - // Indication that the session timedout, Need to refresh and retry - continue; - } else { - _logger.error("Failure - send Refocus sample '{}'. Response code '{}' response '{}'", - refocusSample.toJSON(), respCode, post.getResponseBodyAsString()); - break; - } - } - } catch (Exception e) { - _logger.error("Failure - send Refocus sample '{}'. Exception '{}'", refocusSample.toJSON(), e); - } finally { - if (post != null) { - post.releaseConnection(); - } - } - - } catch (RuntimeException ex) { - throw new SystemException("Failed to send an Refocus notification.", ex); - } - } else { - _logger.info("Sending Refocus notification is disabled. Not sending message for aspect '{}'.", aspectPath); - } - } - - @Override - public String getName() { - return RefocusNotifier.class.getName(); - } - - @Override - public Properties getNotifierProperties() { - Properties notifierProps= super.getNotifierProperties(); - - for(Property property: Property.values()){ - notifierProps.put(property.getName(), property.getDefaultValue()); - } - return notifierProps; - } - - /** - * Enumerates implementation specific configuration properties. - * - * @author Janine Zou (yzou@salesforce.com) - */ - public enum Property { - - /** The Refocus endpoint. */ - REFOCUS_ENDPOINT("notifier.property.refocus.endpoint", "https://test.refocus.com"), - /** The Refocus access token. */ - REFOCUS_TOKEN("notifier.property.refocus.token", "test-token"), - /** The Refocus proxy host. */ - REFOCUS_PROXY_HOST("notifier.property.proxy.host", ""), - /** The Refocus port. */ - REFOCUS_PROXY_PORT("notifier.property.proxy.port", ""), - /** The Refocus connection refresh max times. */ - REFOCUS_CONNECTION_REFRESH_MAX_TIMES("notifier.property.refocus.refreshMaxTimes", "0"); - - private final String _name; - private final String _defaultValue; - - private Property(String name, String defaultValue) { - _name = name; - _defaultValue = defaultValue; - } - - /** - * Returns the property name. - * - * @return The property name. - */ - public String getName() { - return _name; - } - - /** - * Returns the default property value. - * - * @return The default property value. - */ - public String getDefaultValue() { - return _defaultValue; - } - } - - /** - * RefocusSample object to generate JSON. - * - * @author Janine Zou (yzou@salesforce.com) - */ - public class RefocusSample { - - public static final String ASPECT_NAME_FIELD = "name"; - public static final String ASPECT_VALUE_FIELD = "value"; - private final String name; - private final String value; - - private RefocusSample(final String name, final String value) { - this.name = name; - this.value = value; - } - - /** - * Convert data to a JSON string. - * - * @return JSON string - */ - public String toJSON() { - JsonObject sampleData = new JsonObject(); - - sampleData.addProperty(ASPECT_NAME_FIELD, name); - sampleData.addProperty(ASPECT_VALUE_FIELD, value); - return sampleData.toString(); - } - - } - - /** - * Manage Refocus connection, proxy and timeouts. - * - * @author Janine Zou (yzou@salesforce.com) - */ - public static class RefocusTransport { - - private static final int CONNECTION_TIMEOUT_MILLIS = 10000; - private static final int READ_TIMEOUT_MILLIS = 10000; - private final MultiThreadedHttpConnectionManager theConnectionManager; - { - theConnectionManager = new MultiThreadedHttpConnectionManager(); - - HttpConnectionManagerParams params = theConnectionManager.getParams(); - - params.setConnectionTimeout(CONNECTION_TIMEOUT_MILLIS); - params.setSoTimeout(READ_TIMEOUT_MILLIS); - } - - // make the class singleton - private RefocusTransport() { - - } - - public static RefocusTransport getInstance() { - return RefocusTransportHolder.INSTANCE; - } - - private static class RefocusTransportHolder { - private final static RefocusTransport INSTANCE = new RefocusTransport(); - } - - /** - * Get HttpClient with proper proxy and timeout settings. - * - * @param config The system configuration. Cannot be null. - * - * @return HttpClient - */ - public HttpClient getHttpClient(SystemConfiguration config) { - HttpClient httpclient = new HttpClient(theConnectionManager); - - httpclient.getParams().setParameter("http.connection-manager.timeout", 2000L); // Wait for 2 seconds to get a connection from pool - - String host = config.getValue(Property.REFOCUS_PROXY_HOST.getName(), Property.REFOCUS_PROXY_HOST.getDefaultValue()); - - if (host != null && host.length() > 0) { - httpclient.getHostConfiguration().setProxy(host, - Integer.parseInt(config.getValue(Property.REFOCUS_PROXY_PORT.getName(), Property.REFOCUS_PROXY_PORT.getDefaultValue()))); - } - return httpclient; - } - - } - - } diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/RefocusProperty.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/RefocusProperty.java new file mode 100644 index 000000000..c698e897b --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/RefocusProperty.java @@ -0,0 +1,98 @@ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus.service.alert.notifier; + +/** + * Enumerates implementation specific configuration properties. + * + * @author Ian Keck (ikeck@salesforce.com) + */ +public enum RefocusProperty { + + /** The Refocus endpoint. */ + REFOCUS_ENDPOINT("notifier.property.refocus.endpoint", "https://test.refocus.com"), + /** The Refocus access token. */ + REFOCUS_TOKEN("notifier.property.refocus.token", "test-token"), + /** The Refocus proxy host. */ + // QUESTION - should these be refocus specific? + REFOCUS_PROXY_HOST("notifier.property.proxy.host", ""), + /** The Refocus port. */ + REFOCUS_PROXY_PORT("notifier.property.proxy.port", ""), + /** The Refocus connection refresh max times. */ + REFOCUS_CONNECTION_REFRESH_MAX_TIMES("notifier.property.refocus.refreshMaxTimes", "3"), + /** The Refocus forwarder bulk size **/ + REFOCUS_MAX_BULK_ITEMS("notifier.property.refocus.bulk.max_samples", "2000"), + /** The Refocus forwarder push interval **/ + REFOCUS_SEND_INTERVAL_MS("notifier.property.refocus.bulk.send_interval_ms", "10000"), + /** The Limit on requests per minute **/ + REFOCUS_MAX_REQUESTS_PER_MINUTE("notifier.property.refocus.maxRequestsPerMinute", "500"), + /** Configuration for a custom keystore for dev box testing **/ + // TODO - document how to use this! + REFOCUS_CUSTOM_KEYSTORE_PATH("notifier.property.refocus.keystorePath", ""), + REFOCUS_CUSTOM_KEYSTORE_PASSWORD("notifier.property.refocus.keystorePassword", ""), + REFOCUS_CUSTOM_KEYSTORE_KEY_PASSWORD("notifier.property.refocus.keyPassword", ""), + + /** Logging Levels **/ + REFOCUS_FORWARDING_HISTORY("notifier.property.refocus.forwardingHistory", "false"), + REFOCUS_PER_NOTIFICATION_LOGGING("notifier.property.refocus.detailedNotificationLogging", "false"), + REFOCUS_FORWARDER_STATUS_INTERVAL_MS("notifier.property.refocus.statusIntervalMs", "10000"); + + + + private final String _name; + private final String _defaultValue; + + private RefocusProperty(String name, String defaultValue) { + _name = name; + _defaultValue = defaultValue; + } + + /** + * Returns the property name. + * + * @return The property name. + */ + public String getName() { + return _name; + } + + /** + * Returns the default property value. + * + * @return The default property value. + */ + public String getDefaultValue() { + return _defaultValue; + } +} + +/* Copyright (c) 2019, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/RefocusTransport.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/RefocusTransport.java new file mode 100644 index 000000000..683f1cc55 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/RefocusTransport.java @@ -0,0 +1,278 @@ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus.service.alert.notifier; + +import com.salesforce.dva.argus.system.SystemConfiguration; +import org.apache.http.HttpHost; +import org.apache.http.client.config.RequestConfig; +import org.apache.http.config.Registry; +import org.apache.http.config.RegistryBuilder; +import org.apache.http.conn.socket.ConnectionSocketFactory; +import org.apache.http.conn.socket.PlainConnectionSocketFactory; +import org.apache.http.conn.ssl.NoopHostnameVerifier; +import org.apache.http.conn.ssl.SSLConnectionSocketFactory; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.impl.conn.DefaultProxyRoutePlanner; +import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; +import org.apache.http.ssl.SSLContexts; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManager; +import javax.net.ssl.X509TrustManager; +import java.io.InputStream; +import java.security.KeyStore; +import java.security.cert.CertificateException; +import java.security.cert.X509Certificate; +import java.text.MessageFormat; + +/** + * Manage Refocus connection, proxy and timeouts. + * + * @author Ian Keck (ikeck@salesforce.com) + */ +public class RefocusTransport +{ + + private static final Logger _logger = LoggerFactory.getLogger(RefocusTransport.class); + + private static final int CONNECTION_TIMEOUT_MILLIS = 10000; + private static final int READ_TIMEOUT_MILLIS = 10000; + + private SSLContext theSslContext = null; + private boolean loadedSslContext = false; + private PoolingHttpClientConnectionManager theConnectionManager = null; + + // make the class singleton + private RefocusTransport() + { + } + + public static RefocusTransport getInstance() + { + return RefocusTransportHolder.INSTANCE; + } + + private static class RefocusTransportHolder + { + private final static RefocusTransport INSTANCE = new RefocusTransport(); + } + + + private SSLContext getSslContext(SystemConfiguration config) + { + if (!loadedSslContext ) + { + loadedSslContext = true; + try + { + KeyStore ks = readStore(config); + if (ks != null) + { + theSslContext = getCustomSSLContext(config, ks); + } + else // Default to TrustAll manager + { + SSLContext sslContext = SSLContext.getInstance("TLS"); + TrustManager[] temp = new TrustManager[] { new TrustAllManager() }; + sslContext.init(null, temp, null); + theSslContext = sslContext; + } + } catch (Exception e) // TODO - more correct exception capturing + { + // TODO - throw or log + } + } + return theSslContext; + } + + /** + * Construct the connection manager from the system config params. + * + * @param config + * @return the connection manager + */ + private PoolingHttpClientConnectionManager getTheConnectionManager(SystemConfiguration config) + { + if (theConnectionManager == null) + { + SSLContext sslContext = getSslContext(config); + + RegistryBuilder rb = RegistryBuilder.create(); + rb.register("http", PlainConnectionSocketFactory.getSocketFactory()); // register 2x? + if (sslContext != null) + { + rb.register("https", new SSLConnectionSocketFactory(sslContext)); + } + rb.register("http", new PlainConnectionSocketFactory()); // IMPORTANT - DEBUG - added - register http socket factory. + Registry rsf = rb.build(); + + theConnectionManager = new PoolingHttpClientConnectionManager(rsf); + theConnectionManager.setMaxTotal(200); + } + return theConnectionManager; + } + + /** + * Get HttpClient with proper proxy and timeout settings. + * + * @param config The system configuration. Cannot be null. + * @return HttpClient + */ + public CloseableHttpClient getHttpClient(SystemConfiguration config) + { + + CloseableHttpClient httpClient = null; + try + { + SSLContext sslContext = getSslContext(config); + PoolingHttpClientConnectionManager cm = getTheConnectionManager(config); + + RequestConfig requestConfig = RequestConfig.custom() + .setConnectTimeout(CONNECTION_TIMEOUT_MILLIS) + .setConnectionRequestTimeout(CONNECTION_TIMEOUT_MILLIS) + .setSocketTimeout(READ_TIMEOUT_MILLIS).build(); + + DefaultProxyRoutePlanner routePlanner = getRoutePlanner(config); + + HttpClientBuilder builder = HttpClients.custom() + .setDefaultRequestConfig(requestConfig) + .setConnectionManager(cm); + + if (sslContext != null) + { + // TODO - add support for verification, read from config, which cert should be used? + builder = builder + .setSSLContext(sslContext) + .setSSLHostnameVerifier(new NoopHostnameVerifier()); + } + if (routePlanner != null) + { + builder = builder.setRoutePlanner(routePlanner); + } + httpClient = builder.build(); + + } catch (Exception e) + { + // TODO - how to handle + } + return httpClient; + } + + + private KeyStore readStore(SystemConfiguration config) throws Exception + { + // TODO - replace with read key and cert file names and construct keystore (in memory) using fixed passwords. + // See example in infra-security + + String keystorePath = config.getValue(RefocusProperty.REFOCUS_CUSTOM_KEYSTORE_PATH.getName(), RefocusProperty.REFOCUS_CUSTOM_KEYSTORE_PATH.getDefaultValue()); + String keystorePassword = config.getValue(RefocusProperty.REFOCUS_CUSTOM_KEYSTORE_PASSWORD.getName(), RefocusProperty.REFOCUS_CUSTOM_KEYSTORE_PASSWORD.getDefaultValue()); + + if (keystorePath.isEmpty() || keystorePassword.isEmpty()) + { + return null; + } + + _logger.info(MessageFormat.format("Refocus: Keystore={0} KeystorePW={1}", keystorePath, keystorePassword)); // DEBUG - IMPORTANT _ REMOVE + + try (InputStream keyStoreStream = this.getClass().getResourceAsStream(keystorePath)) + { + KeyStore keyStore = KeyStore.getInstance("JKS"); // or "PKCS12" // TODO - what? + keyStore.load(keyStoreStream, keystorePassword.toCharArray()); + return keyStore; + } + } + + private SSLContext getCustomSSLContext(SystemConfiguration config, KeyStore ks) throws Exception + { + String keyPassword = config.getValue(RefocusProperty.REFOCUS_CUSTOM_KEYSTORE_KEY_PASSWORD.getName(), RefocusProperty.REFOCUS_CUSTOM_KEYSTORE_KEY_PASSWORD.getDefaultValue()); + + if (ks == null || keyPassword == null || keyPassword.isEmpty()) + { + return null; + } + _logger.info(MessageFormat.format("Refocus: KeyPassword={0}", keyPassword)); // DEBUG - IMPORTANT _ REMOVE + + + theSslContext = SSLContexts.custom() + .loadKeyMaterial(ks, keyPassword.toCharArray()) // use null as second param if you don't have a separate key password + .build(); + theSslContext.init(null, null, null); + return theSslContext; + } + + private DefaultProxyRoutePlanner getRoutePlanner(SystemConfiguration config) throws Exception + { + DefaultProxyRoutePlanner routePlanner = null; + try + { + String proxyHost = config.getValue(RefocusProperty.REFOCUS_PROXY_HOST.getName(), RefocusProperty.REFOCUS_PROXY_HOST.getDefaultValue()); + String proxyPort = config.getValue(RefocusProperty.REFOCUS_PROXY_PORT.getName(), RefocusProperty.REFOCUS_PROXY_PORT.getDefaultValue()); + int port = proxyPort.isEmpty() ? -1 : Integer.parseInt(proxyPort); + + _logger.info(MessageFormat.format("Refocus: Proxy={0} Port={1}", proxyHost, port)); // DEBUG - IMPORTANT _ REMOVE + + if (proxyHost != null && !proxyHost.isEmpty() && port != -1) + { + HttpHost proxy = new HttpHost(proxyHost, port); + routePlanner = new DefaultProxyRoutePlanner(proxy); + } + } + catch(Exception e) + {} + + return routePlanner; + } + + + + // IMPORTANT - replace with a better trust manager! Necessary in PRD to Proxy? + + public class TrustAllManager implements X509TrustManager { + public void checkClientTrusted(X509Certificate[] cert, String authType) throws CertificateException { + } + + public void checkServerTrusted(X509Certificate[] cert, String authType) throws CertificateException { + } + + public X509Certificate[] getAcceptedIssuers() { + return null; + } + } + +} + + +/* Copyright (c) 2019, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/RefocusValueNotifier.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/RefocusValueNotifier.java new file mode 100644 index 000000000..691881d11 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/notifier/RefocusValueNotifier.java @@ -0,0 +1,216 @@ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus.service.alert.notifier; + +import com.google.inject.Inject; +import com.google.inject.Provider; +import com.salesforce.dva.argus.entity.History; +import com.salesforce.dva.argus.entity.Notification; +import com.salesforce.dva.argus.service.AnnotationService; +import com.salesforce.dva.argus.service.AuditService; +import com.salesforce.dva.argus.service.MetricService; +import com.salesforce.dva.argus.service.RefocusService; +import com.salesforce.dva.argus.service.alert.DefaultAlertService.NotificationContext; +import com.salesforce.dva.argus.system.SystemConfiguration; + +import com.salesforce.dva.argus.util.AlertUtils; +import com.salesforce.dva.argus.util.TemplateReplacer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.persistence.EntityManager; +import java.sql.Date; +import java.text.MessageFormat; +import java.util.List; +import java.util.Properties; + +import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; + +/** + * Implementation of notifier interface for notifying Refocus. + * + * @author Ian Keck (ikeck@salesforce.com) + */ +public class RefocusValueNotifier extends AuditNotifier { + + private static final Logger _logger = LoggerFactory.getLogger(RefocusValueNotifier.class); + + private RefocusService _refocusService; + + /** + * Creates a new Refocus notifier. + * + * @param metricService The metric service. Cannot be null. + * @param annotationService The annotation service. Cannot be null. + * @param auditService The audit service. Cannot be null. + * @param refocusService The refocus service. Cannot be null. + * @param config The system configuration. Cannot be null. + * @param emf The entity manager factory. Cannot be null. + */ + @Inject + public RefocusValueNotifier(MetricService metricService, + AnnotationService annotationService, + AuditService auditService, + RefocusService refocusService, + SystemConfiguration config, + Provider emf) { + super(metricService, annotationService, auditService, config, emf); + requireArgument(config != null, "The configuration cannot be null."); + this._refocusService = refocusService; + } + + @Override + protected boolean sendAdditionalNotification(NotificationContext context) { + + _logger.info("In RefocusValueNotifier::sendAdditionalNotification()"); // IMPORTANT - DEBUG + double value = context.getTriggerEventValue(); + boolean rv = _sendRefocusNotification(context, _valueToString(context.getTriggerEventValue())); + _logger.info("Returning from RefocusValueNotifier::sendAdditionalNotification()"); // IMPORTANT - DEBUG + return rv; + } + + /** + * Refocus Value Notifiers are not Stateful. This method implementation is empty. + * + * @param notificationContext The notification context. + */ + @Override + public boolean clearNotification(NotificationContext notificationContext) { return true; } + + @Override + protected boolean clearAdditionalNotification(NotificationContext context) { return true; } + + private String _valueToString(double value) + { + // IMPORTANT - TODO - verify whether Refocus takes double precision values or just ints + return String.format("%f", value); // alternatively for int String.format("%ld", Math.round(value)); + } + + // TODO - userID from principal owner of alert, token_id from user preference. + private boolean _sendRefocusNotification(NotificationContext context, String value) { + requireArgument(context != null, "Notification context cannot be null."); + List aspectPaths = context.getNotification().getSubscriptions(); + + requireArgument(aspectPaths!=null && !aspectPaths.isEmpty(), "aspect paths (subscriptions) cannot be empty."); + super.sendAdditionalNotification(context); + + boolean result = true; + History history = context.getHistory(); + for (String aspect : aspectPaths) { + // IMPORTANT - TODO - should interrupted exception be handled? + _logger.info(MessageFormat.format("In RefocusValueNotifier::sendAdditionalNotification() sending {0} to aspect {1}", value, aspect)); // IMPORTANT - DEBUG + boolean tmp = _refocusService.sendRefocusNotification(aspect, + value, + "user_id", + "token_id", + history); + result = result && tmp; + } + + return result; + } + + /** + * Returns the audit entry body containing the alert information. + * The audit entry is different for RefocusValueNotifications. + * + * @param context The notification context. + * @param notificationStatus The source notification. + * + * @return The audit entry body to persist. + */ + @Override + protected String getAuditBody(NotificationContext context, NotificationStatus notificationStatus) { + String notificationMessage = MessageFormat.format("Alert {0} was sent to Refocus at {1}
", TemplateReplacer.applyTemplateChanges(context, context.getAlert().getName()), + DATE_FORMATTER.get().format(new Date(context.getTriggerFiredTime()))); + Notification notification = null; + String expression = AlertUtils.getExpressionWithAbsoluteStartAndEndTimeStamps(context); + + for (Notification tempNotification : context.getAlert().getNotifications()) { + if (tempNotification.getName().equalsIgnoreCase(context.getNotification().getName())) { + notification = tempNotification; + break; + } + } + + // IMPORTANT - review this with Sudhanshu + requireArgument(notification != null, "Notification in notification context cannot be null."); + + StringBuilder sb = new StringBuilder(); + + sb.append(notificationMessage); + String customText = context.getNotification().getCustomText(); + if( customText != null && customText.length()>0){ + sb.append(TemplateReplacer.applyTemplateChanges(context, customText)).append("
"); + } + context.getAlertEvaluationTrackingID().ifPresent(trackingID -> { + sb.append("Tracking ID: " + trackingID + "
"); + }); + + sb.append(MessageFormat.format("Notification: {0}
", TemplateReplacer.applyTemplateChanges(context,notification.getName()))); + + if (!expression.equals("")) sb.append(MessageFormat.format("Evaluated metric expression: {0}
", expression)); + else sb.append(MessageFormat.format("Evaluated metric expression: {0}
", context.getAlert().getExpression())); + + if (context.getEvaluatedMetricSnapshotURL().isPresent() && !context.getEvaluatedMetricSnapshotURL().get().equals("")) { + sb.append("

Snapshot of the current view the evaluated metric data.
"); + } else { + if(!expression.equals("")) { + sb.append("

Click here to view the evaluated metric data.
"); + } + } + + sb.append("

Click here for the current view of the metric data.

"); + sb.append("

Disclaimer: This alert was evaluated using the time series data as it existed at the time of evaluation. "); + sb.append("If the data source has inherent lag or a large aggregation window is used during data collection, it is possible "); + sb.append("for the time series data to be updated such that the alert condition is no longer met. This may be avoided by "); + sb.append("ensuring the time window used in alert expression is outside the range of the datasource lag."); + return sb.toString(); + } + + @Override + public String getName() { + return RefocusValueNotifier.class.getName(); + } + + @Override + public Properties getNotifierProperties() { + Properties notifierProps= super.getNotifierProperties(); + + for(Property property: Property.values()){ + notifierProps.put(property.getName(), property.getDefaultValue()); + } + + return notifierProps; + } + +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/retriever/ImageDataRetrievalContext.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/retriever/ImageDataRetrievalContext.java new file mode 100644 index 000000000..3e3d1696b --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/retriever/ImageDataRetrievalContext.java @@ -0,0 +1,54 @@ +package com.salesforce.dva.argus.service.alert.retriever; + +import com.salesforce.dva.argus.entity.Alert; +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.entity.Trigger; + +import java.util.Optional; + +import static com.salesforce.dva.argus.service.AlertService.Notifier.NotificationStatus; + +public class ImageDataRetrievalContext { + + private Alert alert; + private Trigger trigger; + private Long triggerFiredTime; + private Metric triggeredMetric; + + private NotificationStatus notificationStatus; + + public ImageDataRetrievalContext(Alert alert, Trigger trigger, Long triggerFiredTime, Metric triggeredMetric, NotificationStatus notificationStatus) { + this.alert = alert; + this.trigger = trigger; + this.triggerFiredTime = triggerFiredTime; + this.triggeredMetric = triggeredMetric; + this.notificationStatus = notificationStatus; + } + + public ImageDataRetrievalContext(Alert alert, Trigger trigger, Metric triggeredMetric, NotificationStatus notificationStatus) { + this.alert = alert; + this.trigger = trigger; + this.triggeredMetric = triggeredMetric; + this.notificationStatus = notificationStatus; + } + + public Alert getAlert() { + return alert; + } + + public Trigger getTrigger() { + return trigger; + } + + public Optional getTriggerFiredTime() { + return Optional.ofNullable(triggerFiredTime); + } + + public Metric getTriggeredMetric() { + return triggeredMetric; + } + + public NotificationStatus getNotificationStatus() { + return notificationStatus; + } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/retriever/ImageDataRetriever.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/retriever/ImageDataRetriever.java new file mode 100644 index 000000000..8c7633102 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/retriever/ImageDataRetriever.java @@ -0,0 +1,138 @@ +package com.salesforce.dva.argus.service.alert.retriever; + +import com.google.common.base.Strings; +import com.google.common.collect.ImmutableList; +import com.google.inject.Inject; +import com.salesforce.dva.argus.entity.ImagePoints; +import com.salesforce.dva.argus.entity.ImageProperties; +import com.salesforce.dva.argus.entity.Trigger; +import com.salesforce.dva.argus.service.ImageService; +import com.salesforce.dva.argus.service.alert.notifier.AuditNotifier; +import com.salesforce.dva.argus.system.SystemConfiguration; +import com.salesforce.dva.argus.system.SystemException; +import org.apache.commons.lang3.tuple.Pair; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.List; + +import static com.salesforce.dva.argus.entity.ImageProperties.ImageColors; +import static com.salesforce.dva.argus.service.AlertService.Notifier.NotificationStatus; + +public class ImageDataRetriever { + private static final Logger logger = LoggerFactory.getLogger(ImageDataRetriever.class); + + private ImageService imageService; + protected final SystemConfiguration config; + + @Inject + ImageDataRetriever(ImageService imageService, SystemConfiguration systemConfiguration) { + this.imageService = imageService; + this.config = systemConfiguration; + } + + public String getImageURL(final Pair imageDetails) { + if (imageDetails == null) { + logger.error("Cannot fetch the Image URL if the details of the image is not present"); + return null; + } + String imageID = imageDetails.getLeft(); + if (imageID == null) { + logger.error("Cannot fetch the Image URL if the image ID is not present"); + return null; + } + String template = config.getValue(AuditNotifier.Property.AUDIT_METRIC_IMAGE_URL_TEMPLATE.getName(), + AuditNotifier.Property.AUDIT_METRIC_IMAGE_URL_TEMPLATE.getDefaultValue()); + return template.replaceAll("\\$imageID\\$", imageID); + } + + public Pair getAnnotatedImage(final ImageDataRetrievalContext context) { + if (context == null) { + throw new IllegalArgumentException("The image data retrieval context cannot be null; The" + + " parameters are is required to fetch the image "); + } + + ImageProperties imageProperties = new ImageProperties(); + String chartName = context.getTriggeredMetric().getDisplayName(); + if (!Strings.isNullOrEmpty(chartName)) { + imageProperties.setChartName(chartName); + } + + if (context.getNotificationStatus() == NotificationStatus.TRIGGERED) { + Long triggerFiredTime = context.getTriggerFiredTime() + .orElseGet(() -> { + logger.error("Trigger fired time is required to generate the image for alert ID "+ + context.getAlert().getId() +". Defaulting to use the current timestamp"); + return System.currentTimeMillis(); + }); + imageProperties.setShadeYAxisArea(getYAxisCoordinates(triggerFiredTime, context.getTrigger().getInertia())); + imageProperties.setShadeXAxisArea(getXAxisCoordinates(context.getTrigger())); + } + + return imageService.generateAndStoreImage(ImmutableList.of(context.getTriggeredMetric()), imageProperties, true); + } + + private List getXAxisCoordinates(final Trigger trigger) { + ImagePoints dataPoints1, dataPoints2; + Double threshold = trigger.getThreshold(); + String label; + Double maxPoint, minPoint, secondaryThreshold; + + switch (trigger.getType()) { + case GREATER_THAN: + Double boundary = Double.sum(threshold, 1); + label = "value > "+ threshold; + dataPoints1 = new ImagePoints(boundary, Double.MAX_VALUE, label, ImageColors.VERY_LIGHT_PINK); + return ImmutableList.of(dataPoints1); + case GREATER_THAN_OR_EQ: + label = "value >= "+ threshold; + dataPoints1 = new ImagePoints(threshold, Double.MAX_VALUE, label, ImageColors.VERY_LIGHT_PINK); + return ImmutableList.of(dataPoints1); + case LESS_THAN: + label = "value < "+ threshold; + dataPoints1 = new ImagePoints(Double.MIN_VALUE, threshold-1, label, ImageColors.VERY_LIGHT_PINK); + return ImmutableList.of(dataPoints1); + case LESS_THAN_OR_EQ: + label = "value <= "+ threshold; + dataPoints1 = new ImagePoints(Double.MIN_VALUE, threshold, label, ImageColors.VERY_LIGHT_PINK); + return ImmutableList.of(dataPoints1); + case EQUAL: + label = "value = "+ threshold; + dataPoints1 = new ImagePoints(threshold, threshold, label, ImageColors.VERY_LIGHT_PINK); + return ImmutableList.of(dataPoints1); + case NOT_EQUAL: + label = "value != "+ threshold; + dataPoints1 = new ImagePoints(Double.MIN_VALUE, threshold - 1, label, ImageColors.VERY_LIGHT_PINK); + dataPoints2 = new ImagePoints(Double.sum(threshold, 1), Double.MAX_VALUE, ImageColors.VERY_LIGHT_PINK); + return ImmutableList.of(dataPoints1, dataPoints2); + case BETWEEN: + secondaryThreshold = trigger.getSecondaryThreshold(); + maxPoint = Math.max(threshold, secondaryThreshold); + minPoint = Math.min(threshold, secondaryThreshold); + label = minPoint + " <= value <= "+ maxPoint; + dataPoints1 = new ImagePoints(minPoint, maxPoint, label, ImageColors.VERY_LIGHT_PINK); + return ImmutableList.of(dataPoints1); + case NOT_BETWEEN: + secondaryThreshold = trigger.getSecondaryThreshold(); + maxPoint = Math.max(threshold, secondaryThreshold); + minPoint = Math.min(threshold, secondaryThreshold); + label = "value >= "+ maxPoint + " or value <= " + minPoint; + dataPoints1 = new ImagePoints(Double.MIN_VALUE, minPoint, ImageColors.VERY_LIGHT_PINK); + dataPoints2 = new ImagePoints(maxPoint, Double.MAX_VALUE, label, ImageColors.VERY_LIGHT_PINK); + return ImmutableList.of(dataPoints1, dataPoints2); + case NO_DATA: + return ImmutableList.of(); + default: + throw new SystemException("Unsupported trigger type " + trigger.getType()); + } + } + + private List getYAxisCoordinates(final Long triggerFiredTime, final Long inertia) { + Long shadeStartPoint = triggerFiredTime - inertia; + + String label = "Inertia(seconds)=" + ((double)inertia/1000L) * 100; + ImagePoints dataPoints = new ImagePoints(shadeStartPoint, triggerFiredTime, label, ImageColors.VERY_LIGHT_PINK); + return ImmutableList.of(dataPoints); + } + +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/testing/AlertTestResults.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/testing/AlertTestResults.java new file mode 100644 index 000000000..bb82e753d --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/alert/testing/AlertTestResults.java @@ -0,0 +1,106 @@ +package com.salesforce.dva.argus.service.alert.testing; + +import com.salesforce.dva.argus.entity.Alert; +import com.salesforce.dva.argus.entity.Metric; + +import java.math.BigInteger; +import java.util.List; +import java.util.Map; +import java.util.Set; + +public class AlertTestResults +{ + private String testUuid; + private Alert alert; + private String serializedAlert; + private String expression; + private String cronEntry; + private Long evaluationTime; + private List metrics; + private Map> triggerFirings; + private Set evaluatedTriggers; + private Set nonEvaluatedTriggers; + private List messages; + private boolean bIsSkipped; + private boolean bIsFailed; + private boolean bIsNoData; + private boolean bIsValid; + private Map tags; + private long latency; + private long evaluteOnlyLatency; + + + public AlertTestResults() {} + public AlertTestResults(String testUuid) { this.testUuid = testUuid; } + + public void setTestUuid(String testUuid) { this.testUuid = testUuid; } + public String getTestUuid() { return this.testUuid; } + + public void setAlert(Alert a) { this.alert = a; } + public Alert getAlert() { return this.alert; } + + public void setSerializedAlert(String serializedAlert) { this.serializedAlert = serializedAlert; }// NOTE - redundant + public String getSerializedAlert() { return this.serializedAlert; } + + public void setExpression(String expression) { this.expression = expression; } // NOTE - redundant + public String getExpression() { return this.expression; } + + public void setCronEntry(String cronEntry) { this.cronEntry = cronEntry; } // NOTE - redundant + public String getCronEntry() { return this.cronEntry; } + + public void setEvaluationTime(Long evaluationTime) { this.evaluationTime = evaluationTime; } + public Long getEvaluationTime() { return this.evaluationTime; } + + public void setMetrics(List metrics) { this.metrics = metrics; } + public List getMetrics() { return this.metrics; } + + // NOTE - we are assuming that triggers are uniquely named in the serialized alerts. + // TODO - better result format: e.g. triggerName -> (map: metric -> firingTime) + // todo - better yet firingTime(s) & name -> (map: metric -> firingTime(s)) + // map: triggerId -> (map: metric -> firingTime) + // TODO - replace with trigger names + public void setTriggerFirings(Map> triggerFirings) { this.triggerFirings = triggerFirings; } + public Map> getTriggerFirings() { return this.triggerFirings; } + + public void setEvaluatedTriggers(Set triggerIds) { this.evaluatedTriggers = triggerIds; } + public Set getEvaluatedTriggers() { return this.evaluatedTriggers; } + + public void setNonEvaluatedTriggers(Set triggerIds) { this.nonEvaluatedTriggers = triggerIds; } + public Set getNonEvaluatedTriggers() { return this.nonEvaluatedTriggers; } + + // TODO - add support for recording notifications fired + + public void setMessages(List messages) { this.messages = messages; } + public List getMessages() { return this.messages; } + + public void setIsSkipped(boolean skipped) { this.bIsSkipped = skipped; } + public boolean getIsSkipped() { return this.bIsSkipped; } + + public void setIsFailed(boolean failed) { this.bIsFailed = failed; } + public boolean getIsFailed() { return this.bIsFailed; } + + public void setIsNoData(boolean noData) { this.bIsNoData = noData; } + public boolean getIsNoData() { return this.bIsNoData; } + + public void setIsValid(boolean valid) { this.bIsValid = valid; } + public boolean getIsValid() { return this.bIsValid; } + + public void setTags( Map tags) { this.tags = tags; } + public Map getTags() { return this.tags; } + + public void setLatency(long latency) { this.latency = latency; } + public long getLatency() { return this.latency; } + + public void setEvaluateOnlyLatency(long latency) { this.evaluteOnlyLatency = latency; } + public long getEvaluteOnlyLatency() { return this.evaluteOnlyLatency; } + + // TODO - void addMessage(String msg); + // TODO - void setTriggerFirings(firings,notEvaluated); + // TODO - void setTestRequest(Alert a, Long evalTime); + // TODO - void setSummary(boolean failed, boolean skipped, boolean noData, Map tags, long latency, long evaluateOnlyLatency, List messages); +} + + + + + diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/annotation/AnnotationRecordList.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/annotation/AnnotationRecordList.java new file mode 100644 index 000000000..642e83ebf --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/annotation/AnnotationRecordList.java @@ -0,0 +1,300 @@ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus.service.annotation; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.node.JsonNodeType; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.hash.HashFunction; +import com.google.common.hash.Hashing; +import com.salesforce.dva.argus.entity.Annotation; + +import com.salesforce.dva.argus.service.schema.RecordFinder; +import net.openhft.hashing.LongHashFunction; + +import java.io.IOException; +import java.nio.charset.Charset; + +import org.apache.commons.codec.digest.DigestUtils; + +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; + +/** + * Stores map of annotation identifier to annotation. Used to serialize and deserialize Annotation map + * + */ +public class AnnotationRecordList implements RecordFinder { + + private Map _idToAnnotationMap = new HashMap<>(); + private String _scrollID; + private static ObjectMapper mapper = new ObjectMapper(); + + public AnnotationRecordList(List annotations, String scrollID) { + int count = 0; + for(Annotation annotation : annotations) { + _idToAnnotationMap.put(String.valueOf(count++), annotation); + } + setScrollID(scrollID); + } + + public AnnotationRecordList(List annotations, HashAlgorithm algorithm) { + for(Annotation annotation : annotations) { + String id = null; + + // Convert all timestamps to millis + long timestamp = annotation.getTimestamp(); + if (timestamp < 100000000000L) { + annotation.setTimestamp(timestamp * 1000); + } + + String annotationKey = Annotation.getIdentifierFieldsAsString(annotation); + if(HashAlgorithm.MD5.equals(algorithm)) { + id = DigestUtils.md5Hex(annotationKey); + } else if(HashAlgorithm.XXHASH.equals(algorithm)) { + id = String.valueOf(LongHashFunction.xx().hashChars(annotationKey)); + } else { + // Defaulting to md5 + id = DigestUtils.md5Hex(annotationKey); + } + _idToAnnotationMap.put(id, annotation); + } + } + + @Override + public List getRecords() { + return new ArrayList<>(_idToAnnotationMap.values()); + } + + @Override + public Set getIdSet() { + return _idToAnnotationMap.keySet(); + } + + @Override + public String getScrollID() { + return _scrollID; + } + + @Override + public void setScrollID(String scrollID) { + this._scrollID = scrollID; + } + + @Override + public Annotation getRecord(String id) { + return _idToAnnotationMap.get(id); + } + + @VisibleForTesting + static String getHashedSearchIdentifier(Annotation annotation) { + HashFunction hf = Hashing.murmur3_128(); + String searchIdentifier = new StringBuilder().append(annotation.getScope()).append(annotation.getMetric()) + .append(annotation.getTags().toString()).append(annotation.getType()).toString(); + return hf.newHasher().putString(searchIdentifier, Charset.defaultCharset()).hash().toString(); + } + + public static class IndexSerializer extends JsonSerializer { + + public static final long MILLIS_IN_A_DAY = 86400000L; + public static final long MAX_ANNOTATION_AGE_MS = 400 * MILLIS_IN_A_DAY;; + + @Override + public void serialize(AnnotationRecordList list, JsonGenerator jgen, SerializerProvider provider) + throws IOException { + + mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); + + for(Map.Entry entry : list._idToAnnotationMap.entrySet()) { + Annotation annotation = entry.getValue(); + + if(isAnnotationTimestampOld(annotation.getTimestamp())) continue; + + jgen.writeRaw("{ \"index\" : {\"_index\" : \"" + getAnnotationIndex(annotation.getTimestamp()) + "\",\"_type\": \"_doc" + + "\",\"_id\" : \"" + entry.getKey() + "\"}}"); + jgen.writeRaw(System.lineSeparator()); + Map fieldsData = new HashMap<>(); + fieldsData.put(AnnotationRecordType.SCOPE.getName(), annotation.getScope()); + fieldsData.put(AnnotationRecordType.METRIC.getName(), annotation.getMetric()); + fieldsData.put(AnnotationRecordType.TAGS.getName(), mapper.writeValueAsString(annotation.getTags())); + fieldsData.put(AnnotationRecordType.SOURCE.getName(), annotation.getSource()); + fieldsData.put(AnnotationRecordType.TYPE.getName(), annotation.getType()); + fieldsData.put(AnnotationRecordType.ID.getName(), annotation.getId()); + fieldsData.put(AnnotationRecordType.FIELDS.getName(), mapper.writeValueAsString(annotation.getFields())); + fieldsData.put(AnnotationRecordType.SEARCH_ID.getName(), getHashedSearchIdentifier(annotation)); + fieldsData.put(AnnotationRecordType.TIMESTAMP.getName(), Long.toString(annotation.getTimestamp())); + jgen.writeRaw(mapper.writeValueAsString(fieldsData)); + jgen.writeRaw(System.lineSeparator()); + } + } + + private boolean isAnnotationTimestampOld(Long timestampMillis) { + return System.currentTimeMillis() - timestampMillis > MAX_ANNOTATION_AGE_MS ? true : false; + } + + private String getAnnotationIndex(Long epochTimestamp) { + Date annotationDate = new Date(epochTimestamp); + SimpleDateFormat formatter= new SimpleDateFormat("yyyy-MM"); + String indexNameToAppend = String.format("%s-%s", ElasticSearchAnnotationService.ANNOTATION_INDEX_TEMPLATE_PATTERN_START, formatter.format(annotationDate)); + return indexNameToAppend; + } + } + + public static class Deserializer extends JsonDeserializer { + + @Override + public AnnotationRecordList deserialize(JsonParser jp, DeserializationContext context) + throws IOException { + + String scrollID = null; + Annotation annotation; + List records = Collections.emptyList(); + + JsonNode rootNode = jp.getCodec().readTree(jp); + if(rootNode.has("_scroll_id")) { + scrollID = rootNode.get("_scroll_id").asText(); + } + JsonNode hits = rootNode.get("hits").get("hits"); + + if(JsonNodeType.ARRAY.equals(hits.getNodeType())) { + records = new ArrayList<>(hits.size()); + Iterator iter = hits.elements(); + while(iter.hasNext()) { + JsonNode hit = iter.next(); + JsonNode source = hit.get("_source"); + + JsonNode scopeNode = source.get(AnnotationRecordType.SCOPE.getName()); + JsonNode metricNode = source.get(AnnotationRecordType.METRIC.getName()); + JsonNode sourceNode = source.get(AnnotationRecordType.SOURCE.getName()); + JsonNode idNode = source.get(AnnotationRecordType.ID.getName()); + JsonNode typeNode = source.get(AnnotationRecordType.TYPE.getName()); + JsonNode timestampNode = source.get(AnnotationRecordType.TIMESTAMP.getName()); + JsonNode tagsNode = source.get(AnnotationRecordType.TAGS.getName()); + JsonNode fieldsNode = source.get(AnnotationRecordType.FIELDS.getName()); + @SuppressWarnings("unchecked") + Map fieldsMap = mapper.readValue(fieldsNode.asText(), HashMap.class); + annotation = new Annotation(sourceNode.asText(), idNode.asText(), typeNode.asText(), scopeNode.asText(), metricNode.asText(), timestampNode.asLong()); + annotation.setFields(fieldsMap); + @SuppressWarnings("unchecked") + Map tags = mapper.readValue(tagsNode.asText(), HashMap.class); + annotation.setTags(tags); + records.add(annotation); + } + } + + return new AnnotationRecordList(records, scrollID); + } + } + + public enum HashAlgorithm { + MD5, + XXHASH; + + public static AnnotationRecordList.HashAlgorithm fromString(String str) throws IllegalArgumentException { + for(AnnotationRecordList.HashAlgorithm algo : AnnotationRecordList.HashAlgorithm.values()) { + if(algo.name().equalsIgnoreCase(str)) { + return algo; + } + } + + throw new IllegalArgumentException(str + " does not match any of the available algorithms."); + } + } + + /** + * Indicates the Annotation record field to be used for matching. + * + */ + public static enum AnnotationRecordType { + + /** Match against the scope field. */ + SCOPE("scope"), + /** Match against the metric field. */ + METRIC("metric"), + TAGS("tags"), + TIMESTAMP("ts"), + SOURCE("source"), + ID("id"), + TYPE("type"), + FIELDS("fields"), + SEARCH_ID("sid"); + + private String _name; + + private AnnotationRecordType(String name) { + _name = name; + } + + /** + * Returns a given record type corresponding to the given name. + * + * @param name The case sensitive name to match against. Cannot be null. + * + * @return The corresponding record type or null if no matching record type exists. + */ + @JsonCreator + public static AnnotationRecordType fromName(String name) { + for (AnnotationRecordType type : AnnotationRecordType.values()) { + if (type.getName().equalsIgnoreCase(name)) { + return type; + } + } + + throw new IllegalArgumentException("Illegal record type: " + name); + } + + /** + * Returns the record type name. + * + * @return The record type name. + */ + public String getName() { + return _name; + } + } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/annotation/DefaultAnnotationService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/annotation/DefaultAnnotationService.java index 0d853ff1d..3702eff08 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/annotation/DefaultAnnotationService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/annotation/DefaultAnnotationService.java @@ -34,17 +34,19 @@ import com.google.inject.Inject; import com.salesforce.dva.argus.entity.Annotation; import com.salesforce.dva.argus.entity.PrincipalUser; -import com.salesforce.dva.argus.inject.SLF4JTypeListener; import com.salesforce.dva.argus.service.AnnotationService; import com.salesforce.dva.argus.service.DefaultService; import com.salesforce.dva.argus.service.MonitorService; import com.salesforce.dva.argus.service.MonitorService.Counter; +import com.salesforce.dva.argus.service.NamedBinding; +import com.salesforce.dva.argus.service.AnnotationStorageService; import com.salesforce.dva.argus.service.TSDBService; import com.salesforce.dva.argus.service.tsdb.AnnotationQuery; import com.salesforce.dva.argus.system.SystemConfiguration; import com.salesforce.dva.argus.system.SystemException; import org.slf4j.Logger; -import java.util.ArrayList; +import org.slf4j.LoggerFactory; + import java.util.HashMap; import java.util.LinkedList; import java.util.List; @@ -63,12 +65,12 @@ public class DefaultAnnotationService extends DefaultService implements Annotati //~ Static fields/initializers ******************************************************************************************************************* private static final String USER_FIELD_NAME = "user"; + protected static final int MAX_ANNOTATION_SIZE_BYTES = 2000; //~ Instance fields ****************************************************************************************************************************** - @SLF4JTypeListener.InjectLogger - private Logger _logger; - private final TSDBService _tsdbService; + private Logger _logger = LoggerFactory.getLogger(getClass()); + private final AnnotationStorageService _annotationStorageService; private final MonitorService _monitorService; //~ Constructors ********************************************************************************************************************************* @@ -76,14 +78,14 @@ public class DefaultAnnotationService extends DefaultService implements Annotati /** * Creates a new DefaultAnnotationService object. * - * @param tsdbService The TSDB service used to perform annotation operations. Cannot be null. - * @param monitorService The monitor service instance to use. Cannot be null. + * @param annotationStorageService The storage service used to perform annotation operations. Cannot be null. + * @param monitorService The monitor service instance to use. Cannot be null. */ @Inject - DefaultAnnotationService(TSDBService tsdbService, MonitorService monitorService, SystemConfiguration config) { + DefaultAnnotationService(AnnotationStorageService annotationStorageService, MonitorService monitorService, SystemConfiguration config) { super(config); - requireArgument(tsdbService != null, "The TSDB service cannot be null."); - _tsdbService = tsdbService; + requireArgument(annotationStorageService != null, "The annotation storage service cannot be null."); + _annotationStorageService = annotationStorageService; _monitorService = monitorService; } @@ -94,15 +96,16 @@ public List getAnnotations(String expression) { requireNotDisposed(); requireArgument(AnnotationReader.isValid(expression), "Invalid annotation expression: " + expression); - AnnotationReader reader = new AnnotationReader(_tsdbService); + AnnotationReader reader = new AnnotationReader(_annotationStorageService); List annotations = new LinkedList<>(); try { - _logger.debug("Retrieving annotations using {}.", expression); + _logger.info("Retrieving annotations using {}.", expression); annotations.addAll(reader.parse(expression, Annotation.class)); } catch (ParseException ex) { throw new SystemException("Failed to parse the given expression", ex); } + _logger.info("Number of annotations read={}", annotations.size()); _monitorService.modifyCounter(Counter.ANNOTATION_READS, annotations.size(), null); return annotations; } @@ -112,7 +115,7 @@ public List getQueries(String expression) { requireNotDisposed(); requireArgument(AnnotationReader.isValid(expression), "Invalid annotation expression: " + expression); - AnnotationReader reader = new AnnotationReader(_tsdbService); + AnnotationReader reader = new AnnotationReader(_annotationStorageService); List queries = new LinkedList<>(); try { @@ -128,11 +131,22 @@ public List getQueries(String expression) { public void updateAnnotations(Map annotations) { requireNotDisposed(); requireArgument(annotations != null, "The set of annotations cannot be null."); + List putAnnotationList = new LinkedList<>(); for (Entry entry : annotations.entrySet()) { PrincipalUser user = entry.getValue(); Annotation annotation = entry.getKey(); requireArgument(annotation != null, "The annotation cannot be null."); + if (annotation.computeSizeBytes() > MAX_ANNOTATION_SIZE_BYTES) { + _logger.debug("Annotation size of {} bytes exceeded max size {} allowed for annotation {}.", + annotation.computeSizeBytes(), + MAX_ANNOTATION_SIZE_BYTES, + annotation); + Map tags = new HashMap<>(); + tags.put("source", annotation.getSource()); + _monitorService.modifyCounter(Counter.ANNOTATION_DROPS_MAXSIZEEXCEEDED, 1, tags); + continue; + } Map fields = new HashMap<>(annotation.getFields()); String userName; @@ -143,9 +157,12 @@ public void updateAnnotations(Map annotations) { fields.put(USER_FIELD_NAME, userName); } annotation.setFields(fields); + putAnnotationList.add(annotation); + } + _monitorService.modifyCounter(Counter.ANNOTATION_WRITES, putAnnotationList.size(), null); + if (!putAnnotationList.isEmpty()) { + _annotationStorageService.putAnnotations(putAnnotationList); } - _monitorService.modifyCounter(Counter.ANNOTATION_WRITES, annotations.size(), null); - _tsdbService.putAnnotations(new ArrayList<>(annotations.keySet())); } @Override diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/annotation/DualAnnotationService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/annotation/DualAnnotationService.java new file mode 100644 index 000000000..338554190 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/annotation/DualAnnotationService.java @@ -0,0 +1,206 @@ +package com.salesforce.dva.argus.service.annotation; + +import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.inject.Inject; +import com.google.inject.Singleton; +import com.salesforce.dva.argus.entity.Annotation; +import com.salesforce.dva.argus.service.AnnotationStorageService; +import com.salesforce.dva.argus.service.DefaultService; +import com.salesforce.dva.argus.service.NamedBinding; +import com.salesforce.dva.argus.service.TSDBService; +import com.salesforce.dva.argus.service.tsdb.AnnotationQuery; +import com.salesforce.dva.argus.system.SystemConfiguration; + +/* + * Annotation service that reads annotations from both TSDB, ElasticSearch. + * It defaults to writing annotations to ElasticSearch. + */ +@Singleton +public class DualAnnotationService extends DefaultService implements AnnotationStorageService { + private static Logger logger = LoggerFactory.getLogger(DualAnnotationService.class); + + private final AnnotationStorageService _elasticSearchAnnotationService; + private final TSDBService _tsdbService; + private final long annotationTransitionEpochMs; + private final ExecutorService _executorService; + + @Inject + protected DualAnnotationService(SystemConfiguration config, + @NamedBinding AnnotationStorageService elasticSearchAnnotationService, + TSDBService tsdbService) { + super(config); + requireArgument(elasticSearchAnnotationService != null, "ElasticSearchAnnotationService cannot be null."); + requireArgument(tsdbService != null, "TSDBService cannot be null."); + _elasticSearchAnnotationService = elasticSearchAnnotationService; + _tsdbService = tsdbService; + this.annotationTransitionEpochMs = Long.parseLong(config.getValue(Property.ANNOTATION_TRANSITION_EPOCH_MS.getName(), + Property.ANNOTATION_TRANSITION_EPOCH_MS.getDefaultValue())); + int connCount = Integer.parseInt(config.getValue(Property.ANNOTATION_THREADPOOL_CONNECTION_COUNT.getName(), + Property.ANNOTATION_THREADPOOL_CONNECTION_COUNT.getDefaultValue())); + requireArgument(connCount >= 2, "Connection count should be >=2"); + _executorService = Executors.newFixedThreadPool(connCount); + } + + @Override + public void dispose() { + super.dispose(); + _elasticSearchAnnotationService.dispose(); + _tsdbService.dispose(); + _executorService.shutdownNow(); + try { + _executorService.awaitTermination(10, TimeUnit.SECONDS); + } catch (InterruptedException e) { + logger.debug("Await Termination Interrupted", e); + } + } + + @Override + public void putAnnotations(List annotations) { + _elasticSearchAnnotationService.putAnnotations(annotations); + } + + @Override + public List getAnnotations(List queries) { + List annotations = new ArrayList<>(); + for (AnnotationQuery query : queries) { + convertTimestampToMillis(query); + + if(isQueryHavingEpochCutOff(query)){ + //split annotation query to TSDB and ES + annotations.addAll(runSplitQueries(query)); + } else if (query.getEndTimestamp() < annotationTransitionEpochMs){ + // annotation query to TSDB + annotations.addAll(_tsdbService.getAnnotations(Arrays.asList(query))); + } else { + // annotation query to ES + annotations.addAll(_elasticSearchAnnotationService.getAnnotations(Arrays.asList(query))); + } + } + return annotations; + } + + protected boolean isQueryHavingEpochCutOff(AnnotationQuery query){ + return query.getStartTimestamp() < annotationTransitionEpochMs && query.getEndTimestamp() >= annotationTransitionEpochMs; + } + + protected List runSplitQueries(AnnotationQuery original) { + logger.info("Reading annotations from TSDB and ES"); + Map>> queryFutureMap = new HashMap<>(); + List annotations = new ArrayList<>(); + List queries = splitQuery(original); + + queryFutureMap.put(queries.get(0), _executorService.submit(new QueryWorker(AnnotationServiceType.TSDB, queries.get(0)))); + queryFutureMap.put(queries.get(1), _executorService.submit(new QueryWorker(AnnotationServiceType.ES, queries.get(1)))); + + for (Entry>> entry : queryFutureMap.entrySet()) { + try { + annotations.addAll(entry.getValue().get()); + } catch (InterruptedException | ExecutionException e) { + logger.warn("Failed to get annotations. Reason: " + e.getMessage()); + } + } + return annotations; + } + + protected List splitQuery(AnnotationQuery original) { + List queries = new ArrayList(); + queries.add(new AnnotationQuery(original.getScope(), + original.getMetric(), + original.getTags(), + original.getType(), + original.getStartTimestamp(), + annotationTransitionEpochMs)); + + queries.add(new AnnotationQuery(original.getScope(), + original.getMetric(), + original.getTags(), + original.getType(), + annotationTransitionEpochMs, + original.getEndTimestamp())); + return queries; + } + + protected void convertTimestampToMillis(AnnotationQuery query) { + long queryStart = query.getStartTimestamp(); + long queryEnd = query.getEndTimestamp(); + if (queryStart < 100000000000L) query.setStartTimestamp(queryStart * 1000); + if (queryEnd < 100000000000L) query.setEndTimestamp(queryEnd * 1000); + } + + public enum Property { + ANNOTATION_THREADPOOL_CONNECTION_COUNT("service.property.annotation.threadpool.connection.count", "2"), + ANNOTATION_TRANSITION_EPOCH_MS("service.property.annotation.transition.epoch.ms", "1559153225000"); + + private final String _name; + private final String _defaultValue; + + private Property(String name, String defaultValue) { + _name = name; + _defaultValue = defaultValue; + } + + private String getDefaultValue() { + return _defaultValue; + } + + private String getName() { + return _name; + } + } + + private enum AnnotationServiceType { + TSDB, + ES; + } + + /** + * Helper class used to parallelize query execution. + * + * @author Dilip Devaraj (ddevaraj@salesforce.com) + */ + class QueryWorker implements Callable> { + private final AnnotationServiceType _annotationServiceType; + private final AnnotationQuery _annotationQuery; + + /** + * Creates a new QueryWorker object. + * + * @param annotationServiceType ES or TSDB annotation endpoint type + * @param annotationQuery The annotation query issued + */ + public QueryWorker(AnnotationServiceType annotationServiceType, AnnotationQuery annotationQuery) { + this._annotationServiceType = annotationServiceType; + this._annotationQuery = annotationQuery; + } + + @Override + public List call() { + List annotations; + if(_annotationServiceType.equals(AnnotationServiceType.TSDB)){ + annotations = _tsdbService.getAnnotations(Arrays.asList(_annotationQuery)); + logger.info("Read {} annotations from TSDB", annotations.size()); + } else{ + annotations = _elasticSearchAnnotationService.getAnnotations(Arrays.asList(_annotationQuery)); + logger.info("Read {} annotations from ES", annotations.size()); + } + return annotations; + } + } +} \ No newline at end of file diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/annotation/ElasticSearchAnnotationService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/annotation/ElasticSearchAnnotationService.java new file mode 100644 index 000000000..cc196e7d3 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/annotation/ElasticSearchAnnotationService.java @@ -0,0 +1,449 @@ +package com.salesforce.dva.argus.service.annotation; + +import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.nio.charset.Charset; +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.apache.http.HttpHost; +import org.apache.http.HttpStatus; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.nio.conn.PoolingNHttpClientConnectionManager; +import org.apache.http.impl.nio.reactor.DefaultConnectingIOReactor; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.RestClientBuilder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.module.SimpleModule; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.hash.HashFunction; +import com.google.common.hash.Hashing; +import com.google.inject.Inject; +import com.google.inject.Singleton; +import com.salesforce.dva.argus.entity.Annotation; +import com.salesforce.dva.argus.service.DefaultService; +import com.salesforce.dva.argus.service.MonitorService; +import com.salesforce.dva.argus.service.annotation.AnnotationRecordList.HashAlgorithm; +import com.salesforce.dva.argus.service.AnnotationStorageService; +import com.salesforce.dva.argus.service.schema.ElasticSearchUtils; +import com.salesforce.dva.argus.service.tsdb.AnnotationQuery; +import com.salesforce.dva.argus.system.SystemConfiguration; +import com.salesforce.dva.argus.system.SystemException; + +/** + * The Elastic Search implementation of the annotation storage service. + * + * @author Dilip Devaraj (ddevaraj@salesforce.com) + */ +@Singleton +public class ElasticSearchAnnotationService extends DefaultService implements AnnotationStorageService { + private static Logger logger = LoggerFactory.getLogger(ElasticSearchAnnotationService.class); + private static ObjectMapper genericObjectMapper = new ObjectMapper(); + + protected final MonitorService monitorService; + + /** Global ES properties */ + private static final int ANNOTATION_MAX_RETRY_TIMEOUT = 300 * 1000; + private static final String FIELD_TYPE_TEXT = "text"; + private static final String FIELD_TYPE_DATE ="date"; + public static final int ANNOTATION_INDEX_MAX_RESULT_WINDOW = 10000; + private RestClient esRestClient; + + /** Annotation index properties */ + private HashAlgorithm idgenHashAlgo; + private static String ANNOTATION_INDEX_TEMPLATE_NAME; + private final int replicationFactorForAnnotationIndex; + private final int numShardsForAnnotationIndex; + private final ObjectMapper annotationMapper; + public static String ANNOTATION_INDEX_TEMPLATE_PATTERN_START; + + private static final String EXCEPTION_MESSAGE = "Your query returns {0} or more annotations." + + " Please modify your query by reducing the time window."; + + @Inject + ElasticSearchAnnotationService(SystemConfiguration config, MonitorService monitorService, ElasticSearchUtils esUtils) { + super(config); + this.monitorService = monitorService; + /** Setup Global ES stuff */ + String algorithm = config.getValue(Property.ANNOTATION_ES_IDGEN_HASH_ALGO.getName(), Property.ANNOTATION_ES_IDGEN_HASH_ALGO.getDefaultValue()); + try { + idgenHashAlgo = HashAlgorithm.fromString(algorithm); + } catch(IllegalArgumentException e) { + logger.warn("{} is not supported by this service. Valid values are: {}.", algorithm, Arrays.asList(AnnotationRecordList.HashAlgorithm.values())); + idgenHashAlgo = HashAlgorithm.MD5; + } + logger.info("Using {} for Elasticsearch document id generation.", idgenHashAlgo); + + String[] nodes = config.getValue(Property.ANNOTATION_ES_ENDPOINT.getName(), Property.ANNOTATION_ES_ENDPOINT.getDefaultValue()).split(","); + HttpHost[] httpHosts = new HttpHost[nodes.length]; + for(int i=0; i { + try { + int connCount = Integer.parseInt(config.getValue(Property.ANNOTATION_ES_CONNECTION_COUNT.getName(), + Property.ANNOTATION_ES_CONNECTION_COUNT.getDefaultValue())); + PoolingNHttpClientConnectionManager connMgr = + new PoolingNHttpClientConnectionManager(new DefaultConnectingIOReactor()); + connMgr.setMaxTotal(connCount); + connMgr.setDefaultMaxPerRoute(connCount/httpHosts.length < 1 ? 1:connCount/httpHosts.length); + httpClientBuilder.setConnectionManager(connMgr); + return httpClientBuilder; + } catch(Exception e) { + throw new SystemException(e); + } + }; + RestClientBuilder.RequestConfigCallback requestConfigCallback = requestConfigBuilder -> { + int connTimeout = Integer.parseInt(config.getValue(Property.ANNOTATION_ES_ENDPOINT_CONNECTION_TIMEOUT_MILLIS.getName(), + Property.ANNOTATION_ES_ENDPOINT_CONNECTION_TIMEOUT_MILLIS.getDefaultValue())); + int socketTimeout = Integer.parseInt(config.getValue(Property.ANNOTATION_ES_ENDPOINT_SOCKET_TIMEOUT.getName(), + Property.ANNOTATION_ES_ENDPOINT_SOCKET_TIMEOUT.getDefaultValue())); + requestConfigBuilder.setConnectTimeout(connTimeout).setSocketTimeout(socketTimeout); + + logger.info("esRestClient set connectionTimeoutMillis {} socketTimeoutMillis {}", + connTimeout, socketTimeout); + + return requestConfigBuilder; + }; + esRestClient = RestClient.builder(httpHosts) + .setHttpClientConfigCallback(clientConfigCallback) + .setRequestConfigCallback(requestConfigCallback) + .setMaxRetryTimeoutMillis(ANNOTATION_MAX_RETRY_TIMEOUT) + .build(); + logger.info("esRestClient set MaxRetryTimeoutsMillis {}", ANNOTATION_MAX_RETRY_TIMEOUT); + + /** Set up annotation index stuff */ + annotationMapper = getAnnotationObjectMapper(new AnnotationRecordList.IndexSerializer()); + ANNOTATION_INDEX_TEMPLATE_NAME = config.getValue(Property.ANNOTATION_ES_INDEX_TEMPLATE_NAME.getName(), + Property.ANNOTATION_ES_INDEX_TEMPLATE_NAME.getDefaultValue()); + ANNOTATION_INDEX_TEMPLATE_PATTERN_START = config.getValue(Property.ANNOTATION_ES_INDEX_TEMPLATE_PATTERN_START.getName(), + Property.ANNOTATION_ES_INDEX_TEMPLATE_PATTERN_START.getDefaultValue()); + replicationFactorForAnnotationIndex = Integer.parseInt( + config.getValue(Property.ANNOTATION_ES_NUM_REPLICAS.getName(), Property.ANNOTATION_ES_NUM_REPLICAS.getDefaultValue())); + numShardsForAnnotationIndex = Integer.parseInt( + config.getValue(Property.ANNOTATION_ES_SHARDS_COUNT.getName(), Property.ANNOTATION_ES_SHARDS_COUNT.getDefaultValue())); + esUtils.createIndexTemplate(esRestClient, + ANNOTATION_INDEX_TEMPLATE_NAME, + ANNOTATION_INDEX_TEMPLATE_PATTERN_START, + this::createAnnotationIndexTemplateSettingsNode, + this::createAnnotationIndexTemplateMappingsNode); + } + + /** + * The set of implementation specific configuration properties. + * + */ + public enum Property { + + ANNOTATION_ES_ENDPOINT("service.property.annotation.elasticsearch.endpoint", "http://localhost:9200,http://localhost:9201"), + /** Connection timeout for ES REST client. */ + ANNOTATION_ES_ENDPOINT_CONNECTION_TIMEOUT_MILLIS("service.property.annotation.elasticsearch.endpoint.connection.timeout.millis", "10000"), + /** Socket connection timeout for ES REST client. */ + ANNOTATION_ES_ENDPOINT_SOCKET_TIMEOUT("service.property.annotation.elasticsearch.endpoint.socket.timeout", "10000"), + /** Connection count for ES REST client. */ + ANNOTATION_ES_CONNECTION_COUNT("service.property.annotation.elasticsearch.connection.count", "10"), + /** The hashing algorithm to use for generating document id. */ + ANNOTATION_ES_IDGEN_HASH_ALGO("service.property.annotation.elasticsearch.idgen.hash.algo", "MD5"), + /** Replication factor */ + ANNOTATION_ES_NUM_REPLICAS("service.property.annotation.elasticsearch.num.replicas", "1"), + /** Shard count */ + ANNOTATION_ES_SHARDS_COUNT("service.property.annotation.elasticsearch.shards.count", "6"), + /** Index template name */ + ANNOTATION_ES_INDEX_TEMPLATE_NAME("service.property.annotation.elasticsearch.indextemplate.name", "argus-annotation-template"), + /** Index template pattern match */ + ANNOTATION_ES_INDEX_TEMPLATE_PATTERN_START("service.property.annotation.elasticsearch.indextemplate.patternstart", "argus-annotation"); + + private final String _name; + private final String _defaultValue; + + Property(String name, String defaultValue) { + _name = name; + _defaultValue = defaultValue; + } + + /** + * Returns the property name. + * + * @return The property name. + */ + public String getName() { + return _name; + } + + /** + * Returns the default value for the property. + * + * @return The default value. + */ + public String getDefaultValue() { + return _defaultValue; + } + } + + /* Method to change the rest client. Used for testing. */ + protected void setESRestClient(RestClient restClient){ + this.esRestClient = restClient; + } + + private ObjectNode _createFieldNodeNoAnalyzer(String type) { + ObjectNode fieldNode = genericObjectMapper.createObjectNode(); + fieldNode.put("type", type); + return fieldNode; + } + + private ObjectNode createAnnotationIndexTemplateMappingsNode() { + ObjectNode propertiesNode = genericObjectMapper.createObjectNode(); + propertiesNode.set(AnnotationRecordList.AnnotationRecordType.SCOPE.getName(), createAnnotationFieldNodeAnalyzer(FIELD_TYPE_TEXT)); + propertiesNode.set(AnnotationRecordList.AnnotationRecordType.METRIC.getName(), createAnnotationFieldNodeAnalyzer(FIELD_TYPE_TEXT)); + propertiesNode.set(AnnotationRecordList.AnnotationRecordType.TAGS.getName(), createAnnotationFieldNodeAnalyzer(FIELD_TYPE_TEXT)); + propertiesNode.set(AnnotationRecordList.AnnotationRecordType.SOURCE.getName(), createAnnotationFieldNodeAnalyzer(FIELD_TYPE_TEXT)); + propertiesNode.set(AnnotationRecordList.AnnotationRecordType.ID.getName(), createAnnotationFieldNodeAnalyzer(FIELD_TYPE_TEXT)); + propertiesNode.set(AnnotationRecordList.AnnotationRecordType.TYPE.getName(), createAnnotationFieldNodeAnalyzer(FIELD_TYPE_TEXT)); + propertiesNode.set(AnnotationRecordList.AnnotationRecordType.FIELDS.getName(), createAnnotationFieldNodeAnalyzer(FIELD_TYPE_TEXT)); + propertiesNode.set(AnnotationRecordList.AnnotationRecordType.SEARCH_ID.getName(), createAnnotationFieldNodeAnalyzer(FIELD_TYPE_TEXT)); + propertiesNode.set(AnnotationRecordList.AnnotationRecordType.TIMESTAMP.getName(), _createFieldNodeNoAnalyzer(FIELD_TYPE_DATE)); + + ObjectNode docNode = genericObjectMapper.createObjectNode(); + docNode.set("properties", propertiesNode); + + ObjectNode mappingsNode = genericObjectMapper.createObjectNode(); + mappingsNode.set("_doc", docNode); + + return mappingsNode; + } + + private ObjectNode createAnnotationFieldNodeAnalyzer(String type) { + ObjectNode fieldNode = genericObjectMapper.createObjectNode(); + fieldNode.put("type", type); + fieldNode.put("analyzer", "annotation_analyzer"); + ObjectNode keywordNode = genericObjectMapper.createObjectNode(); + keywordNode.put("type", "keyword"); + ObjectNode fieldsNode = genericObjectMapper.createObjectNode(); + fieldsNode.set("raw", keywordNode); + fieldNode.set("fields", fieldsNode); + return fieldNode; + } + + private ObjectNode createAnnotationIndexTemplateSettingsNode() { + ObjectNode annotationAnalyzer = genericObjectMapper.createObjectNode(); + annotationAnalyzer.put("tokenizer", "annotation_tokenizer"); + annotationAnalyzer.set("filter", genericObjectMapper.createArrayNode().add("lowercase")); + + ObjectNode analyzerNode = genericObjectMapper.createObjectNode(); + analyzerNode.set("annotation_analyzer", annotationAnalyzer); + + ObjectNode tokenizerNode = genericObjectMapper.createObjectNode(); + tokenizerNode.set("annotation_tokenizer", genericObjectMapper.createObjectNode().put("type", "pattern").put("pattern", ElasticSearchUtils.TOKENIZER_PATTERN)); + + ObjectNode analysisNode = genericObjectMapper.createObjectNode(); + analysisNode.set("analyzer", analyzerNode); + analysisNode.set("tokenizer", tokenizerNode); + + ObjectNode indexNode = genericObjectMapper.createObjectNode(); + indexNode.put("max_result_window", ANNOTATION_INDEX_MAX_RESULT_WINDOW); + indexNode.put("number_of_replicas", replicationFactorForAnnotationIndex); + indexNode.put("number_of_shards", numShardsForAnnotationIndex); + + ObjectNode settingsNode = genericObjectMapper.createObjectNode(); + settingsNode.set("analysis", analysisNode); + settingsNode.set("index", indexNode); + + return settingsNode; + } + + @Override + public void dispose() { + requireNotDisposed(); + shutdownElasticSearchService(); + } + + private void shutdownElasticSearchService(){ + try { + esRestClient.close(); + logger.info("Shutdown of ElasticSearch RESTClient complete"); + } catch (IOException e) { + logger.warn("ElasticSearch RestClient failed to shutdown properly.", e); + } + } + + /** + * Enumeration of supported HTTP methods. + * + * @author Dilip Devaraj (ddevaraj@salesforce.com) + */ + private enum HttpMethod { + + /** POST operation. */ + POST("POST"), + /** PUT operation. */ + PUT("PUT"), + /** HEAD operation. */ + HEAD("HEAD"); + + private String name; + + HttpMethod(String name) { + this.setName(name); + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + } + + @VisibleForTesting + static ObjectMapper getAnnotationObjectMapper(JsonSerializer serializer) { + ObjectMapper mapper = new ObjectMapper(); + mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); + SimpleModule module = new SimpleModule(); + module.addSerializer(AnnotationRecordList.class, serializer); + module.addDeserializer(AnnotationRecordList.class, new AnnotationRecordList.Deserializer()); + mapper.registerModule(module); + + return mapper; + } + + @Override + public void putAnnotations(List annotations) { + String requestUrl = "_bulk"; + + try { + AnnotationRecordList indexAnnotationRecordList = new AnnotationRecordList(annotations, idgenHashAlgo); + String requestBody = annotationMapper.writeValueAsString(indexAnnotationRecordList); + Set failedRecords = new HashSet<>(); + ElasticSearchUtils.PutResponse putResponse = ElasticSearchUtils.performESRequest(esRestClient, requestUrl, requestBody); + + if(putResponse.isErrors()) { + for(ElasticSearchUtils.PutResponse.Item item : putResponse.getItems()) { + if (item.getIndex() != null && item.getIndex().getStatus() != HttpStatus.SC_CREATED) { + logger.warn("Failed to add record {} to index. Reason: {}", + indexAnnotationRecordList.getRecord(item.getIndex().get_id()), + annotationMapper.writeValueAsString(item.getIndex().getError())); + failedRecords.add(indexAnnotationRecordList.getRecord(item.getIndex().get_id())); + } + } + } + + if (failedRecords.size() > 0) { + logger.warn("{} records were not written to annotation ES", failedRecords.size()); + monitorService.modifyCounter(MonitorService.Counter.ANNOTATION_RECORDS_WRITE_FAILURES, failedRecords.size(), null); + } + + } catch (IOException e) { + throw new SystemException("Failed to index annotation to ES.", e); + } + } + + @Override + public List getAnnotations(List queries) { + requireNotDisposed(); + requireArgument(queries != null, "Annotation queries cannot be null."); + List annotations = new ArrayList<>(); + int from = 0, scrollSize = ANNOTATION_INDEX_MAX_RESULT_WINDOW; + + String requestUrl = String.format("/%s-*/_search", ANNOTATION_INDEX_TEMPLATE_PATTERN_START); + try{ + for (AnnotationQuery query : queries) { + String queryJson = constructTermQuery(query, from, scrollSize); + final long start = System.currentTimeMillis(); + Request request = new Request(HttpMethod.POST.getName(), requestUrl); + request.setEntity(new StringEntity(queryJson, ContentType.APPLICATION_JSON)); + Response response = esRestClient.performRequest(request); + final long time = System.currentTimeMillis() - start; + logger.info("ES get request completed in {} ms", time); + String str = ElasticSearchUtils.extractResponse(response); + AnnotationRecordList list = ElasticSearchUtils.toEntity(str, new TypeReference() {}, annotationMapper); + annotations.addAll(list.getRecords()); + + if(annotations.size() == scrollSize) { + logger.error("Maximum annotations limit execeeded for query- " + query.toString()); + throw new RuntimeException(MessageFormat.format(EXCEPTION_MESSAGE, scrollSize)); + } + } + } catch(IOException ex) { + throw new SystemException(ex); + } + return annotations; + } + + private String constructTermQuery(AnnotationQuery query, int from, int size) throws JsonProcessingException { + convertTimestampToMillis(query); + + ObjectNode queryNode = constructSearchQueryNode(query, genericObjectMapper); + ObjectNode rootNode = annotationMapper.createObjectNode(); + rootNode.set("query", queryNode); + rootNode.put("from", from); + rootNode.put("size", size); + + return rootNode.toString(); + } + + protected void convertTimestampToMillis(AnnotationQuery query) { + long queryStart = query.getStartTimestamp(); + long queryEnd = query.getEndTimestamp(); + if (queryStart < 100000000000L) query.setStartTimestamp(queryStart * 1000); + if (queryEnd < 100000000000L) query.setEndTimestamp(queryEnd * 1000); + } + + private ObjectNode constructSearchQueryNode(AnnotationQuery query, ObjectMapper mapper) throws JsonProcessingException { + ArrayNode mustNodes = mapper.createArrayNode(); + ObjectNode node = mapper.createObjectNode(); + ObjectNode termNode = mapper.createObjectNode(); + termNode.put(AnnotationRecordList.AnnotationRecordType.SEARCH_ID.getName() + ".raw", getHashedSearchIdentifier(query)); + node.set("term", termNode); + mustNodes.add(node); + + node = mapper.createObjectNode(); + ObjectNode rangeNode = mapper.createObjectNode(); + ObjectNode timestampNode = mapper.createObjectNode(); + timestampNode.put("gte", Long.toString(query.getStartTimestamp())); + timestampNode.put("lte", Long.toString(query.getEndTimestamp())); + rangeNode.set(AnnotationRecordList.AnnotationRecordType.TIMESTAMP.getName(), timestampNode); + node.set("range", rangeNode); + mustNodes.add(node); + + ObjectNode boolNode = mapper.createObjectNode(); + boolNode.set("must", mustNodes); + + ObjectNode queryNode = mapper.createObjectNode(); + queryNode.set("bool", boolNode); + return queryNode; + } + + @VisibleForTesting + static String getHashedSearchIdentifier(AnnotationQuery annotationQuery) { + HashFunction hf = Hashing.murmur3_128(); + String searchIdentifier = new StringBuilder().append(annotationQuery.getScope()).append(annotationQuery.getMetric()) + .append(annotationQuery.getTags().toString()).append(annotationQuery.getType()).toString(); + return hf.newHasher().putString(searchIdentifier, Charset.defaultCharset()).hash().toString(); + } +} \ No newline at end of file diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/audit/DefaultAuditService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/audit/DefaultAuditService.java index 97662c198..d97a40218 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/audit/DefaultAuditService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/audit/DefaultAuditService.java @@ -130,7 +130,7 @@ public List findByEntity(BigInteger entityId, BigInteger limit) { JPAEntity entity = findEntity(em, entityId, JPAEntity.class); if (entity == null) { - throw new IllegalArgumentException(MessageFormat.format("The entity with Id {} does not exist.", entityId)); + throw new IllegalArgumentException(MessageFormat.format("The entity with Id {0} does not exist.", entityId)); } List result = limit == null ? Audit.findByEntity(em, entity) : Audit.findByEntity(em, entity, limit); diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/cache/CacheProperty.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/cache/CacheProperty.java new file mode 100644 index 000000000..a994e38e4 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/cache/CacheProperty.java @@ -0,0 +1,42 @@ +package com.salesforce.dva.argus.service.cache; + +/** + * Enumerates the implementation specific configuration properties. + * + * @author Tom Valine (tvaline@salesforce.com) + */ +public enum CacheProperty { + + /** The global cache expiry in seconds. */ + REDIS_CACHE_EXPIRY_IN_SEC("service.property.cache.redis.cache.expiry.in.sec", "3600"), + /** The cache endpoint. */ + REDIS_CLUSTER("service.property.cache.redis.cluster", "default_value"), + /** The maximum number of cache connections. */ + REDIS_SERVER_MAX_CONNECTIONS("service.property.cache.redis.server.max.connections", "100"); + + private final String _name; + private final String _defaultValue; + + private CacheProperty(String name, String defaultValue) { + _name = name; + _defaultValue = defaultValue; + } + + /** + * Returns the property name. + * + * @return The property name. + */ + public String getName() { + return _name; + } + + /** + * Returns the default value for the property. + * + * @return The default value. + */ + public String getDefaultValue() { + return _defaultValue; + } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/cache/CacheRedisClient.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/cache/CacheRedisClient.java new file mode 100644 index 000000000..7ce2fed7d --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/cache/CacheRedisClient.java @@ -0,0 +1,84 @@ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus.service.cache; + +import redis.clients.jedis.HostAndPort; +import redis.clients.jedis.Jedis; +import redis.clients.jedis.JedisCluster; +import redis.clients.jedis.JedisPool; + +import java.io.IOException; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.Set; +import java.util.TreeSet; + +import com.salesforce.dva.argus.system.SystemConfiguration; +import org.apache.commons.pool2.impl.GenericObjectPoolConfig; + +/** + * Cache service implementation that uses Redis client. + * + * @author Kunal Nawale (knawale@salesforce.com) + */ +public class CacheRedisClient { + private JedisCluster jedisClusterClient; + + public CacheRedisClient() { + } + + public void init(SystemConfiguration config) { + GenericObjectPoolConfig poolConfig = new GenericObjectPoolConfig(); + int maxTotal = Integer.parseInt(config.getValue(CacheProperty.REDIS_SERVER_MAX_CONNECTIONS.getName(), + CacheProperty.REDIS_SERVER_MAX_CONNECTIONS.getDefaultValue())); + poolConfig.setMaxTotal(maxTotal); + + String[] hostsPorts = config.getValue(CacheProperty.REDIS_CLUSTER.getName(), + CacheProperty.REDIS_CLUSTER.getDefaultValue()).split(","); + + Set jedisClusterNodes = new HashSet<>(); + for (String hostPort : hostsPorts) { + String[] hostPortPair = hostPort.split(":"); + jedisClusterNodes.add(new HostAndPort(hostPortPair[0], Integer.parseInt(hostPortPair[1]))); + } + jedisClusterClient = new JedisCluster(jedisClusterNodes, poolConfig); + } + + public JedisCluster getJedisClusterClient() { + return jedisClusterClient; + } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/cache/NoOperationCacheService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/cache/NoOperationCacheService.java index 4229dfdd0..526379714 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/cache/NoOperationCacheService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/cache/NoOperationCacheService.java @@ -28,7 +28,7 @@ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ - + package com.salesforce.dva.argus.service.cache; import com.google.inject.Inject; @@ -75,11 +75,6 @@ public void put(Map entries, int ttl) { // This is a no operation service. This method should do nothing and result in a cache miss. } - @Override - public void clear() { - // This is a no operation service. This method should do nothing and result in a cache miss. - } - @Override public boolean exist(String key) { return false; diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/cache/RedisCacheService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/cache/RedisCacheService.java index f49a21619..6cac18a71 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/cache/RedisCacheService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/cache/RedisCacheService.java @@ -67,7 +67,8 @@ public class RedisCacheService extends DefaultService implements CacheService { private final Logger _logger = LoggerFactory.getLogger(getClass()); private final SystemConfiguration _config; - private JedisCluster _jedisClusterClient; + private JedisCluster _jedisClusterClient; + //~ Constructors ********************************************************************************************************************************* @@ -77,22 +78,11 @@ public class RedisCacheService extends DefaultService implements CacheService { * @param config The system configuration. Cannot be null. */ @Inject - public RedisCacheService(SystemConfiguration config) { + public RedisCacheService(SystemConfiguration config, CacheRedisClient cacheRedisClient) { super(config); _config = config; - GenericObjectPoolConfig poolConfig = new GenericObjectPoolConfig(); - poolConfig.setMaxTotal(Integer.parseInt( - _config.getValue(Property.REDIS_SERVER_MAX_CONNECTIONS.getName(), Property.REDIS_SERVER_MAX_CONNECTIONS.getDefaultValue()))); - - String[] hostsPorts = _config.getValue(Property.REDIS_CLUSTER.getName(), Property.REDIS_CLUSTER.getDefaultValue()).split(","); - - Set jedisClusterNodes = new HashSet(); - for (String hostPort : hostsPorts) { - String[] hostPortPair = hostPort.split(":"); - - jedisClusterNodes.add(new HostAndPort(hostPortPair[0], Integer.parseInt(hostPortPair[1]))); - } - _jedisClusterClient = new JedisCluster(jedisClusterNodes, poolConfig); + cacheRedisClient.init(config); + _jedisClusterClient = cacheRedisClient.getJedisClusterClient(); } //~ Methods ************************************************************************************************************************************** @@ -159,24 +149,6 @@ public void expire(Set keys, int ttl) { } } - @Override - public void clear() { - Iterator poolIterator = _jedisClusterClient.getClusterNodes().values().iterator(); - - while (poolIterator.hasNext()) { - JedisPool pool = poolIterator.next(); - Jedis jedis = pool.getResource(); - - try { - jedis.flushAll(); - } catch (Exception ex) { - _logger.error("Exception in cache service: {} ", ex.getMessage()); - } finally { - jedis.close(); - } - } - } - @Override public boolean exist(String key) { boolean isKeyExisting = false; @@ -286,14 +258,14 @@ public Map getRange(Set keys, int startOffset, int endOff @Override public int getCacheExpirationTime() { - return Integer.parseInt(_config.getValue(Property.REDIS_CACHE_EXPIRY_IN_SEC.getName(), Property.REDIS_CACHE_EXPIRY_IN_SEC.getDefaultValue())); + return Integer.parseInt(_config.getValue(CacheProperty.REDIS_CACHE_EXPIRY_IN_SEC.getName(), CacheProperty.REDIS_CACHE_EXPIRY_IN_SEC.getDefaultValue())); } @Override public Properties getServiceProperties() { Properties serviceProps= new Properties(); - for(Property property:Property.values()){ + for(CacheProperty property:CacheProperty.values()){ serviceProps.put(property.getName(), property.getDefaultValue()); } return serviceProps; @@ -311,45 +283,5 @@ public void dispose() { //~ Enums **************************************************************************************************************************************** - /** - * Enumerates the implementation specific configuration properties. - * - * @author Tom Valine (tvaline@salesforce.com) - */ - public enum Property { - - /** The global cache expiry in seconds. */ - REDIS_CACHE_EXPIRY_IN_SEC("service.property.cache.redis.cache.expiry.in.sec", "3600"), - /** The cache endpoint. */ - REDIS_CLUSTER("service.property.cache.redis.cluster", "default_value"), - /** The maximum number of cache connections. */ - REDIS_SERVER_MAX_CONNECTIONS("service.property.cache.redis.server.max.connections", "100"); - - private final String _name; - private final String _defaultValue; - - private Property(String name, String defaultValue) { - _name = name; - _defaultValue = defaultValue; - } - - /** - * Returns the property name. - * - * @return The property name. - */ - public String getName() { - return _name; - } - - /** - * Returns the default value for the property. - * - * @return The default value. - */ - public String getDefaultValue() { - return _defaultValue; - } - } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/callback/DefaultCallbackService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/callback/DefaultCallbackService.java index a57980e42..0e351c652 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/callback/DefaultCallbackService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/callback/DefaultCallbackService.java @@ -13,19 +13,25 @@ */ package com.salesforce.dva.argus.service.callback; -import java.io.IOException; +import java.io.StringReader; +import java.sql.Date; import java.text.MessageFormat; +import java.util.HashMap; +import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; -import com.fasterxml.jackson.databind.ObjectMapper; import com.google.inject.Inject; -import com.salesforce.dva.argus.entity.History; -import com.salesforce.dva.argus.inject.SLF4JTypeListener; +import com.salesforce.dva.argus.entity.Alert; +import com.salesforce.dva.argus.entity.Notification; +import com.salesforce.dva.argus.entity.Trigger; import com.salesforce.dva.argus.service.CallbackService; import com.salesforce.dva.argus.service.DefaultService; import com.salesforce.dva.argus.service.alert.DefaultAlertService; +import com.salesforce.dva.argus.service.alert.notifier.AuditNotifier; +import com.salesforce.dva.argus.service.alert.notifier.CallbackNotifier; import com.salesforce.dva.argus.system.SystemConfiguration; +import com.salesforce.dva.argus.util.AlertUtils; import com.salesforce.dva.argus.util.TemplateReplacer; import org.apache.http.HttpEntity; import org.apache.http.HttpHeaders; @@ -36,8 +42,18 @@ import org.apache.http.client.methods.RequestBuilder; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; +import org.apache.http.impl.client.HttpClients; import org.apache.http.message.BasicHttpResponse; +import org.apache.http.util.EntityUtils; import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.json.Json; +import javax.json.JsonBuilderFactory; +import javax.json.JsonObject; +import javax.json.JsonObjectBuilder; +import javax.json.JsonReader; +import javax.json.stream.JsonGenerator; /** * Default {@link CallbackService} implementation sending the request via a shared apache HttpClient @@ -45,12 +61,14 @@ * @author svenkrause */ public class DefaultCallbackService extends DefaultService implements CallbackService { + + static { + } + //~ Instance fields ****************************************************************************************************************************** - @SLF4JTypeListener.InjectLogger - private Logger _logger; + private final Logger _logger = LoggerFactory.getLogger(DefaultCallbackService.class); private final HttpClientPool httpClientPool; - private final ObjectMapper _mapper; //~ Constructors ********************************************************************************************************************************* @@ -62,71 +80,152 @@ public class DefaultCallbackService extends DefaultService implements CallbackSe @Inject public DefaultCallbackService(SystemConfiguration config) { super(config); - _mapper = new ObjectMapper(); int poolSize = Property.POOL_SIZE.getInt(config); TimeUnit timeUnit = Property.POOL_REFRESH_UNIT.getEnum(config, TimeUnit.class); int refresh = Property.POOL_REFRESH_TIME.getInt(config); httpClientPool = new HttpClientPool(poolSize, refresh, timeUnit); + } @Override - public HttpResponse sendNotification(DefaultAlertService.NotificationContext context) { - String subscription = context.getNotification().getSubscriptions().stream().collect(Collectors.joining()); + public HttpResponse sendNotification(DefaultAlertService.NotificationContext context, CallbackNotifier notifier) { + + Notification notification = context.getNotification(); + String subscription = notification.getSubscriptions().stream().collect(Collectors.joining()); String notificationMessage = null; - Request request = null; + CallbackRequest request = new CallbackRequest(); - try { - request = _mapper.readValue(subscription, Request.class); - } catch (Exception e) { - return errorResponse(subscription + " cannot be parsed. ", e); - } + request.setUri(subscription); + request.setMethod(Method.POST); + + String payload = createPayload(context, notifier); + request.setBody(payload); + + Map header = new HashMap<>(); + header.put("Content-Type", "application/json"); + request.setHeader(header); notificationMessage = MessageFormat.format("Callback via Url {0} Method {1} Body {2}", - request.getUri(), request.getMethod().name(), getResolvedBody(context, request)); + request.getUri(), request.getMethod().name(), payload); try { - HttpResponse response = sendNotification(buildRequest(context, request), notificationMessage); + HttpResponse response = sendNotification(buildRequest(request), notificationMessage); return response; } catch (Exception e) { return errorResponse(notificationMessage + " failed. ", e); } } + protected String createPayload(DefaultAlertService.NotificationContext context, + CallbackNotifier notifier) { + + Notification notification = context.getNotification(); + Trigger trigger = context.getTrigger(); + Alert alert = context.getAlert(); + + Map config = new HashMap<>(); + config.put(JsonGenerator.PRETTY_PRINTING, true); + + JsonBuilderFactory factory = Json.createBuilderFactory(config); + + JsonObjectBuilder alertBuilder = factory.createObjectBuilder(); + JsonObjectBuilder triggerBuilder = factory.createObjectBuilder(); + JsonObjectBuilder notificationBuilder = factory.createObjectBuilder(); + + alertBuilder.add("name", TemplateReplacer.applyTemplateChanges(context, alert.getName())); + alertBuilder.add("alertUrl", notifier.getAlertUrl(alert.getId())); + alertBuilder.add("firedAt", context.getTriggerFiredTime()); + context.getAlertEvaluationTrackingID().ifPresent(trackingID -> { + alertBuilder.add("trackingID", trackingID); + }); + + notificationBuilder.add("name", TemplateReplacer.applyTemplateChanges(context, notification.getName())); + notificationBuilder.add("status", "Triggered"); + notificationBuilder.add("CoolDownUntil", AuditNotifier.DATE_FORMATTER.get().format(new Date(context.getCoolDownExpiration()))); + + triggerBuilder.add("name", TemplateReplacer.applyTemplateChanges(context, trigger.getName())); + + Trigger.TriggerType triggerType = trigger.getType(); + + triggerBuilder.add("type", triggerType.toString()); + + if (triggerType == Trigger.TriggerType.BETWEEN || triggerType == Trigger.TriggerType.NOT_BETWEEN) { + triggerBuilder.add("primaryThreshold", trigger.getThreshold()); + triggerBuilder.add("secondaryThreshold", trigger.getSecondaryThreshold()); + } else { + triggerBuilder.add("threshold", trigger.getThreshold()); + } + + triggerBuilder.add("inertia", trigger.getInertia()); + + if(!trigger.getType().equals(Trigger.TriggerType.NO_DATA)) { + triggerBuilder.add("triggeredValue", context.getTriggerEventValue()); + } + + if(context.getTriggeredMetric() != null) { + triggerBuilder.add("triggeredMetric", context.getTriggeredMetric().getIdentifier()); + } + + JsonObjectBuilder rootBuilder = factory.createObjectBuilder(); + + String customText = context.getNotification().getCustomText(); + + if( customText != null && customText.length() > 0){ + customText = TemplateReplacer.applyTemplateChanges(context, customText); + + JsonReader reader = null; + + try { + reader = Json.createReader(new StringReader(customText)); + JsonObject customJson = reader.readObject(); + rootBuilder.add("custom", customJson); + } finally { + if(reader != null) { + reader.close(); + } + } + } + + if (context.getEvaluatedMetricSnapshotURL().isPresent()) { + alertBuilder.add("evaluatedMetricSnapshotUrl", context.getEvaluatedMetricSnapshotURL().get()); + } else { + String expression = AlertUtils.getExpressionWithAbsoluteStartAndEndTimeStamps(context); + + if(!expression.equals("")) { + alertBuilder.add("evaluatedMetricUrl", notifier.getExpressionUrl(expression)); + } + } + + alertBuilder.add("evaluatedMetric", alert.getExpression()); + + rootBuilder.add("alert", alertBuilder) + .add("notification", notificationBuilder) + .add("trigger", triggerBuilder); + + return rootBuilder.build().toString(); + } + @Override public void dispose() { httpClientPool.shutdown(); super.dispose(); } - private HttpUriRequest buildRequest(DefaultAlertService.NotificationContext context, - CallbackService.Request request) { + private HttpUriRequest buildRequest(CallbackService.CallbackRequest request) { RequestBuilder builder = RequestBuilder .create(request.getMethod().name()) .setUri(request.getUri()) - .setEntity(getBody(context, request)); + .setEntity(getBody(request)); request.getHeader().forEach((k, v) -> builder.addHeader(k, v)); return builder.build(); } - private String getResolvedBody(DefaultAlertService.NotificationContext context, - CallbackService.Request request) { - - if (request.getBody() != null) { - String body = request.getBody(); - body = TemplateReplacer.applyTemplateChanges(context, body); - return body; - } - return null; - } - - private HttpEntity getBody(DefaultAlertService.NotificationContext context, - CallbackService.Request request) - { - String body = getResolvedBody(context, request); + private HttpEntity getBody(CallbackService.CallbackRequest request) { + String body = request.getBody(); if (body != null) { StringEntity entity; @@ -143,13 +242,14 @@ private HttpEntity getBody(DefaultAlertService.NotificationContext context, private HttpResponse sendNotification(HttpUriRequest request, String notificationMessage) { - HttpClient httpClient = httpClientPool.borrowObject(); try { - return httpClient.execute(request); + HttpClient httpClient; + httpClient = httpClientPool.borrowObject(); + HttpResponse response = httpClient.execute(request); + EntityUtils.consume(response.getEntity()); + return response; } catch (Throwable t) { return errorResponse(notificationMessage + " failed. ", t); - } finally { - httpClientPool.returnObject(httpClient); } } @@ -161,7 +261,6 @@ private static HttpResponse errorResponse(String reason, Throwable t) { } - public enum Property { POOL_SIZE("service.callback.pool.size", "10"), diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/collect/DefaultCollectionService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/collect/DefaultCollectionService.java index 6ac44b4ee..63cccf2e4 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/collect/DefaultCollectionService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/collect/DefaultCollectionService.java @@ -28,17 +28,39 @@ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ - + package com.salesforce.dva.argus.service.collect; +import static com.salesforce.dva.argus.service.MQService.MQQueue.ANNOTATION; +import static com.salesforce.dva.argus.service.MQService.MQQueue.METRIC; +import static com.salesforce.dva.argus.service.MQService.MQQueue.HISTOGRAM; +import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import javax.persistence.EntityManager; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.type.CollectionType; import com.google.inject.Inject; import com.google.inject.Provider; import com.salesforce.dva.argus.entity.Annotation; +import com.salesforce.dva.argus.entity.Histogram; +import com.salesforce.dva.argus.entity.HistogramBucket; import com.salesforce.dva.argus.entity.Metric; import com.salesforce.dva.argus.entity.PrincipalUser; -import com.salesforce.dva.argus.inject.SLF4JTypeListener.InjectLogger; +import com.salesforce.dva.argus.entity.TSDBEntity; +import com.salesforce.dva.argus.service.AnnotationStorageService; import com.salesforce.dva.argus.service.AuditService; import com.salesforce.dva.argus.service.CollectionService; import com.salesforce.dva.argus.service.MQService; @@ -52,19 +74,6 @@ import com.salesforce.dva.argus.service.WardenService.SubSystem; import com.salesforce.dva.argus.service.jpa.DefaultJPAService; import com.salesforce.dva.argus.system.SystemConfiguration; -import org.slf4j.Logger; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import javax.persistence.EntityManager; - -import static com.salesforce.dva.argus.service.MQService.MQQueue.ANNOTATION; -import static com.salesforce.dva.argus.service.MQService.MQQueue.METRIC; -import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; /** * Default implementation of the CollectionService interface. @@ -75,16 +84,18 @@ public class DefaultCollectionService extends DefaultJPAService implements Colle //~ Static fields/initializers ******************************************************************************************************************* + protected static final int MAX_ANNOTATION_SIZE_BYTES = 2000; private static final int BATCH_METRICS = 50; + private static final int MAX_HISTOGRAM_BUCKETS = 100; + private static final Logger _logger = LoggerFactory.getLogger(DefaultCollectionService.class); //~ Instance fields ****************************************************************************************************************************** - @InjectLogger - private Logger _logger; @Inject Provider emf; private final MQService _mqService; private final TSDBService _tsdbService; + private final AnnotationStorageService _annotationStorageService; private final SchemaService _schemaService; private final WardenService _wardenService; private final MonitorService _monitorService; @@ -95,24 +106,24 @@ public class DefaultCollectionService extends DefaultJPAService implements Colle /** * Creates a new DefaultCollectionService object. * - * @param mqService The MQ service implementation with which to queue and dequeue submitted metrics and annotations - * @param tsdbService The TSDB service implementation with which to write metrics and annotations into storage. - * @param auditService The audit service instance to use. Cannot be null. - * @param mailService The mail service instance to use. Cannot be null. - * @param configuration The system configuration instance to use. Cannot be null. - * @param historyService The job history service instance to use. Cannot be null. - * @param schemaService The schema service instance to use. Cannot be null. - * @param wardenService The warden service instance to use. Cannot be null. - * @param monitorService The monitor service instance to use. Cannot be null. - * @param namespaceService The namespace service instance to use. Cannot be null. + * @param mqService The MQ service implementation with which to queue and dequeue submitted metrics and annotations + * @param tsdbService The TSDB service implementation with which to write metrics and annotations into storage. + * @param auditService The audit service instance to use. Cannot be null. + * @param annotationStorageService The audit service instance to use. Cannot be null. + * @param configuration The system configuration instance to use. Cannot be null. + * @param schemaService The schema service instance to use. Cannot be null. + * @param wardenService The warden service instance to use. Cannot be null. + * @param monitorService The monitor service instance to use. Cannot be null. + * @param namespaceService The namespace service instance to use. Cannot be null. */ @Inject - DefaultCollectionService(MQService mqService, TSDBService tsdbService, AuditService auditService, - SystemConfiguration configuration, SchemaService schemaService, WardenService wardenService, - MonitorService monitorService, NamespaceService namespaceService) { + DefaultCollectionService(MQService mqService, TSDBService tsdbService, AuditService auditService, + AnnotationStorageService annotationStorageService, SystemConfiguration configuration, SchemaService schemaService, WardenService wardenService, + MonitorService monitorService, NamespaceService namespaceService) { super(auditService, configuration); _mqService = mqService; _tsdbService = tsdbService; + _annotationStorageService = annotationStorageService; _schemaService = schemaService; _wardenService = wardenService; _monitorService = monitorService; @@ -148,7 +159,7 @@ public void submitMetrics(PrincipalUser submitter, List metrics) { } metrics = _getAllowedMetrics(metrics, submitter); _logger.info("User metric data is: Metrics size - {}, Datapoints size - {}, Minimum resolution - {}.", metrics.size(), - metricData.getDataPointsSize(), metricData.getMinResolutionDataPointsAcrossAllMetrics()); + metricData.getDataPointsSize(), metricData.getMinResolutionDataPointsAcrossAllMetrics()); } List> batches = _batchMetrics(metrics); @@ -165,10 +176,36 @@ public void submitAnnotation(PrincipalUser submitter, Annotation annotation) { public void submitAnnotations(PrincipalUser submitter, List annotations) { requireNotDisposed(); requireArgument(submitter != null, "Submitting user cannot be null."); - requireArgument(annotations != null, "The list of annotaions to submit cannot be null."); + requireArgument(annotations != null, "The list of annotations to submit cannot be null."); checkSubmitAnnotationPolicyRequirementsMet(submitter, annotations); - _monitorService.modifyCounter(Counter.ANNOTATION_WRITES, annotations.size(), null); - _mqService.enqueue(ANNOTATION.getQueueName(), annotations); + + List putAnnotationList = new LinkedList<>(); + /* Replace unsupported characters in annotation */ + for (Annotation annotation : annotations) { + if (annotation.computeSizeBytes() > MAX_ANNOTATION_SIZE_BYTES) { + _logger.debug("Annotation size of {} bytes exceeded max size {} allowed for annotation {}.", + annotation.computeSizeBytes(), + MAX_ANNOTATION_SIZE_BYTES, + annotation); + Map tags = new HashMap<>(); + tags.put("source", annotation.getSource()); + _monitorService.modifyCounter(Counter.ANNOTATION_DROPS_MAXSIZEEXCEEDED, 1, tags); + } else { + annotation.setScope(TSDBEntity.replaceUnsupportedChars(annotation.getScope())); + annotation.setMetric(TSDBEntity.replaceUnsupportedChars(annotation.getMetric())); + Map filteredTags = new HashMap<>(); + for (String tagKey : annotation.getTags().keySet()) { + filteredTags.put(TSDBEntity.replaceUnsupportedChars(tagKey), TSDBEntity.replaceUnsupportedChars(annotation.getTags().get(tagKey))); + } + annotation.setTags(filteredTags); + + putAnnotationList.add(annotation); + } + } + _monitorService.modifyCounter(Counter.ANNOTATION_WRITES, putAnnotationList.size(), null); + if (!putAnnotationList.isEmpty()) { + _mqService.enqueue(ANNOTATION.getQueueName(), putAnnotationList); + } } @Override @@ -220,12 +257,77 @@ public int commitAnnotations(int annotationCount, int timeout) { List dequeued = _mqService.dequeue(ANNOTATION.getQueueName(), Annotation.class, timeout, annotationCount); if (!dequeued.isEmpty()) { - _tsdbService.putAnnotations(dequeued); + _annotationStorageService.putAnnotations(dequeued); _logger.debug("Committed {} annotations.", dequeued.size()); } return dequeued.size(); } + @Override + public int commitHistograms(int histogramCount, int timeout) { + requireNotDisposed(); + requireArgument(histogramCount > 0, "Histogram count must be greater than zero."); + requireArgument(timeout > 0, "The timeout in milliseconds must be greater than zero."); + + List dequeued = _mqService.dequeue(HISTOGRAM.getQueueName(), Histogram.class, timeout, histogramCount); + + if (!dequeued.isEmpty()) { + _tsdbService.putHistograms(dequeued); + _logger.debug("Committed {} histograms.", dequeued.size()); + } + return dequeued.size(); + } + + @Override + public void submitHistogram(PrincipalUser submitter, Histogram histogram) { + submitHistograms(submitter, Arrays.asList(new Histogram[] { histogram })); + } + + @Override + public void submitHistograms(PrincipalUser submitter, List histograms) { + requireNotDisposed(); + requireArgument(submitter != null, "Submitting user cannot be null."); + requireArgument(histograms != null, "The list of histograms to submit cannot be null."); + checkSubmitHistogramPolicyRequirementsMet(submitter, histograms); + + List filteredHistograms = new ArrayList<>(); + + /* Replace unsupported characters in histogram and validate bounds */ + for (Histogram histogram : histograms) { + if(histogram.getBuckets() == null || histogram.getBuckets().isEmpty()){ + _logger.warn("Histogram buckets is null or empty. Dropping this histogram"); + _monitorService.modifyCounter(Counter.HISTOGRAM_DROPPED, 1, null); + } else if (histogram.getBuckets().size() > MAX_HISTOGRAM_BUCKETS) { + _logger.warn("Histogram buckets exceeded max size {}. Dropping this histogram", MAX_HISTOGRAM_BUCKETS); + _monitorService.modifyCounter(Counter.HISTOGRAM_DROPPED, 1, null); + } else { + boolean boundsCheck = true; + for(HistogramBucket histogramBucket: histogram.getBuckets().keySet()){ + if(histogramBucket.getLowerBound() >= histogramBucket.getUpperBound()){ + _logger.warn("Histogram lower bound, must be less than upper bound. Dropping this histogram"); + _monitorService.modifyCounter(Counter.HISTOGRAM_DROPPED, 1, null); + boundsCheck = false; + break; + } + } + if(boundsCheck == false) continue; + + histogram.setScope(TSDBEntity.replaceUnsupportedChars(histogram.getScope())); + histogram.setMetric(TSDBEntity.replaceUnsupportedChars(histogram.getMetric())); + Map filteredTags = new HashMap<>(); + for (String tagKey : histogram.getTags().keySet()) { + filteredTags.put(TSDBEntity.replaceUnsupportedChars(tagKey), TSDBEntity.replaceUnsupportedChars(histogram.getTags().get(tagKey))); + } + histogram.setTags(filteredTags); + + filteredHistograms.add(histogram); + } + } + + _monitorService.modifyCounter(Counter.HISTOGRAM_WRITES, filteredHistograms.size(), null); + _mqService.enqueue(HISTOGRAM.getQueueName(), filteredHistograms); + } + @Override public void dispose() { super.dispose(); @@ -234,7 +336,7 @@ public void dispose() { private void checkSubmitAnnotationPolicyRequirementsMet(PrincipalUser submitter, List annotations) { assert (submitter != null) : "Submitter should not be null."; assert (annotations != null) : "List of annotations should not be null."; - _logger.warn("Policy checks for submitting annotations are not yet implmented."); + _logger.warn("Policy checks for submitting annotations are not yet implemented."); } private void checkSubmitMetricPolicyRequirementsMet(PrincipalUser submitter, List metrics) { @@ -243,6 +345,12 @@ private void checkSubmitMetricPolicyRequirementsMet(PrincipalUser submitter, Lis _wardenService.assertSubSystemUsePermitted(submitter, SubSystem.POSTING); } + private void checkSubmitHistogramPolicyRequirementsMet(PrincipalUser submitter, List histograms) { + assert (submitter != null) : "Submitter should not be null."; + assert (histograms != null) : "List of histograms should not be null."; + _logger.warn("Policy checks for submitting histograms are not yet implemented."); + } + /* * Parses the metrics, and gets data points size and minimum resolution of data points across all metrics. To get minimum resolution, calculate * the minimum of the minimum resolution across each metric category If there is not more than 1 data point in a metric category, then don't @@ -267,6 +375,8 @@ private MetricData _parseMetricData(List metrics) { metricCategoryMap.put(metricCategory, new ArrayList(metric.getDatapoints().keySet())); } } + + Metric minResolutionMetric = null; for (Entry> entry : metricCategoryMap.entrySet()) { Long minDiffInMetricCategory = null; List dataPointsTimeStampList = entry.getValue(); @@ -279,10 +389,19 @@ private MetricData _parseMetricData(List metrics) { } if (minDiff == null) { minDiff = minDiffInMetricCategory; + minResolutionMetric = entry.getKey(); + }else { + if(minDiff>minDiffInMetricCategory) { + minResolutionMetric = entry.getKey(); + minDiff = minDiffInMetricCategory; + } } - minDiff = Math.min(minDiffInMetricCategory, minDiff); } } + + if(minDiff!=null && minDiff> _batchMetrics(List metrics) { batches.add(batch); batch = new ArrayList(BATCH_METRICS); } + + /* + * We are doing the unsupported character replacement before we write to the queue. + * This way the same data is seen by any downstream schema or metric consumers, and both will be in sync. + */ + metric.setScope(TSDBEntity.replaceUnsupportedChars(metric.getScope())); + metric.setMetric(TSDBEntity.replaceUnsupportedChars(metric.getMetric())); + metric.setNamespace(TSDBEntity.replaceUnsupportedChars(metric.getNamespace())); + Map filteredTags = new HashMap<>(); + for(String tagKey : metric.getTags().keySet()) { + filteredTags.put(TSDBEntity.replaceUnsupportedChars(tagKey), TSDBEntity.replaceUnsupportedChars(metric.getTags().get(tagKey))); + } + metric.setTags(filteredTags); + batch.add(metric); count++; } diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/image/DefaultImageService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/image/DefaultImageService.java new file mode 100644 index 000000000..a45e31d59 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/image/DefaultImageService.java @@ -0,0 +1,154 @@ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus.service.image; + +import com.google.inject.Inject; +import com.salesforce.dva.argus.entity.ImageProperties; +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.service.DefaultService; +import com.salesforce.dva.argus.service.ImageService; +import com.salesforce.dva.argus.service.ImageStorageService; +import com.salesforce.dva.argus.system.SystemConfiguration; +import com.salesforce.dva.argus.system.SystemException; +import com.salesforce.dva.argus.util.ImageUtils; +import org.apache.commons.lang3.tuple.Pair; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.List; +import java.util.Optional; + +import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; + +public class DefaultImageService extends DefaultService implements ImageService { + + + //~ Instance fields ****************************************************************************************************************************** + + private final ImageStorageService imageStorageService; + private static Logger logger = LoggerFactory.getLogger(DefaultImageService.class); + + /** + * + * @param imageStorageService The storage service used to perform image operations. Cannot be null. + * @param config System Configuration + */ + @Inject + protected DefaultImageService(ImageStorageService imageStorageService,SystemConfiguration config) { + super(config); + requireArgument(imageStorageService != null, "The image storage service cannot be null."); + this.imageStorageService=imageStorageService; + } + + //~ Methods ************************************************************************************************************************************** + + /** + * Generates an image for the given list of metrics + * + * @param metrics List of metrics for which image is generated + * @param imageProperties Properties of the image that need to be generated + * @return Returns Byte Array of the JPEG Image or null + */ + @Override + public byte[] generateImage(List metrics, ImageProperties imageProperties){ + + try { + return ImageUtils.getMetricsImage(metrics,imageProperties); + } + catch (IOException exception) + { + logger.error("Exception while generating the Image",exception); + throw new SystemException("Exception while generating the Image",exception); + } + + + } + + /** + * Saves the image byte array in an sync or async fashion + * + * @param imageBytes Byte Array of the JPEG image to be saved + * @param sync saves image synchronously or asynchronously + * @return Returns a Unique Id for this image or null + */ + + @Override + public String storeImage(byte[] imageBytes, boolean sync) { + + requireArgument((imageBytes != null && imageBytes.length>0), "imageBytes cannot be null or Empty"); + // ImageId is the Md5 Hash of the imageBytes + String imageId= ImageUtils.convertBytesToMd5Hash(imageBytes); + imageStorageService.putImage(imageId,imageBytes,sync); + return imageId; + } + + + /** + * Generates and Saves the image in an sync or async fashion for the given list of metrics + * + * @param metrics List of metrics for which JPEG image is generated + * @param imageProperties Properties of the image that need to be generated + * @param sync saves image synchronously or asynchronously + * @return Pair Object with first argument contains Unique Id for image and second argument contains associated JPEG image byte array + */ + @Override + public Pair generateAndStoreImage(List metrics, ImageProperties imageProperties, boolean sync) { + + try { + byte[] imageBytes = ImageUtils.getMetricsImage(metrics,imageProperties); + String imageId = ImageUtils.convertBytesToMd5Hash(imageBytes); + imageStorageService.putImage(imageId,imageBytes,sync); + Pair imageObject = Pair.of(imageId,imageBytes); + return imageObject; + } + catch (IOException exception) + { + logger.error("Exception while generating the Image",exception); + throw new SystemException("Exception while generating the Image",exception); + } + + } + + /** + * Returns image byte Array for the given imageId + * + * @param imageId Key of the image + * @return Associated JPEG image is returned + */ + @Override + public byte[] getImageById(String imageId) { + + requireArgument((imageId != null && imageId.length()>0), "imageId cannot be null or Empty"); + return imageStorageService.getImage(imageId); + } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/image/ElasticSearchImageService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/image/ElasticSearchImageService.java new file mode 100644 index 000000000..a21a9e7fe --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/image/ElasticSearchImageService.java @@ -0,0 +1,404 @@ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus.service.image; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.module.SimpleModule; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.google.common.annotations.VisibleForTesting; +import com.google.inject.Inject; +import com.google.inject.Singleton; +import com.salesforce.dva.argus.service.DefaultService; +import com.salesforce.dva.argus.service.ImageStorageService; +import com.salesforce.dva.argus.service.MonitorService; +import com.salesforce.dva.argus.service.schema.ElasticSearchUtils; +import com.salesforce.dva.argus.system.SystemConfiguration; +import com.salesforce.dva.argus.system.SystemException; +import org.apache.http.HttpHost; +import org.apache.http.HttpStatus; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.nio.conn.PoolingNHttpClientConnectionManager; +import org.apache.http.impl.nio.reactor.DefaultConnectingIOReactor; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseListener; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.RestClientBuilder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; + +import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; + + +/** + * ElasticSearch implementation of Image Service to store and query image. + * + * @author Chandravyas Annakula (cannakula@salesforce.com) + */ + +@Singleton +public class ElasticSearchImageService extends DefaultService implements ImageStorageService { + + private static Logger logger = LoggerFactory.getLogger(ElasticSearchImageService.class); + private static ObjectMapper genericObjectMapper = new ObjectMapper(); + + protected final MonitorService monitorService; + /** Global ES properties */ + private static final int IMAGE_MAX_RETRY_TIMEOUT = 300 * 1000; + private static final int IMAGE_INDEX_MAX_RESULT_WINDOW = 10000; + private static final String FIELD_TYPE_BINARY = "binary"; + private static final String FIELD_TYPE_DATE ="date"; + private static final String PUT_REQUEST_URL= "_bulk"; + + private RestClient esRestClient; + + /** Image index properties */ + private static String imageIndexTemplateName; + public static String imageIndexTemplatePatternStart; + public static String imageIndexType; + private static String imageSearchRequestURL; + private final int replicationFactorForImageIndex; + private final int numShardsForImageIndex; + private final ObjectMapper imageMapper; + + @Inject + protected ElasticSearchImageService(SystemConfiguration config,MonitorService monitorService, ElasticSearchUtils esUtils) { + super(config); + this.monitorService = monitorService; + /** Setup Global ES stuff */ + String[] nodes = config.getValue(ElasticSearchImageService.Property.IMAGE_ES_ENDPOINT.getName(), ElasticSearchImageService.Property.IMAGE_ES_ENDPOINT.getDefaultValue()).split(","); + HttpHost[] httpHosts = new HttpHost[nodes.length]; + for(int i=0; i { + try { + int connCount = Integer.parseInt(config.getValue(ElasticSearchImageService.Property.IMAGE_ES_CONNECTION_COUNT.getName(), + ElasticSearchImageService.Property.IMAGE_ES_CONNECTION_COUNT.getDefaultValue())); + PoolingNHttpClientConnectionManager connMgr = + new PoolingNHttpClientConnectionManager(new DefaultConnectingIOReactor()); + connMgr.setMaxTotal(connCount); + connMgr.setDefaultMaxPerRoute(connCount/httpHosts.length < 1 ? 1:connCount/httpHosts.length); + httpClientBuilder.setConnectionManager(connMgr); + return httpClientBuilder; + } catch(Exception e) { + throw new SystemException(e); + } + }; + RestClientBuilder.RequestConfigCallback requestConfigCallback = requestConfigBuilder -> { + int connTimeout = Integer.parseInt(config.getValue(ElasticSearchImageService.Property.IMAGE_ES_ENDPOINT_CONNECTION_TIMEOUT_MILLIS.getName(), + ElasticSearchImageService.Property.IMAGE_ES_ENDPOINT_CONNECTION_TIMEOUT_MILLIS.getDefaultValue())); + int socketTimeout = Integer.parseInt(config.getValue(ElasticSearchImageService.Property.IMAGE_ES_ENDPOINT_SOCKET_TIMEOUT.getName(), + ElasticSearchImageService.Property.IMAGE_ES_ENDPOINT_SOCKET_TIMEOUT.getDefaultValue())); + requestConfigBuilder.setConnectTimeout(connTimeout).setSocketTimeout(socketTimeout); + + logger.info("esRestClient set connectionTimeoutMillis {} socketTimeoutMillis {}", + connTimeout, socketTimeout); + + return requestConfigBuilder; + }; + esRestClient = RestClient.builder(httpHosts) + .setHttpClientConfigCallback(clientConfigCallback) + .setRequestConfigCallback(requestConfigCallback) + .setMaxRetryTimeoutMillis(IMAGE_MAX_RETRY_TIMEOUT) + .build(); + logger.info("esRestClient set MaxRetryTimeoutsMillis {}", IMAGE_MAX_RETRY_TIMEOUT); + + /** Set up image index stuff */ + imageMapper = getImageObjectMapper(new ImageRecord.IndexSerializer()); + imageIndexType = config.getValue(Property.IMAGE_ES_INDEX_TYPE.getName(), + ElasticSearchImageService.Property.IMAGE_ES_INDEX_TYPE.getDefaultValue()); + imageIndexTemplateName = config.getValue(ElasticSearchImageService.Property.IMAGE_ES_INDEX_TEMPLATE_NAME.getName(), + ElasticSearchImageService.Property.IMAGE_ES_INDEX_TEMPLATE_NAME.getDefaultValue()); + imageIndexTemplatePatternStart = config.getValue(ElasticSearchImageService.Property.IMAGE_ES_INDEX_TEMPLATE_PATTERN_START.getName(), + ElasticSearchImageService.Property.IMAGE_ES_INDEX_TEMPLATE_PATTERN_START.getDefaultValue()); + imageSearchRequestURL = String.format("/%s-*/_search", imageIndexTemplatePatternStart); + replicationFactorForImageIndex = Integer.parseInt( + config.getValue(ElasticSearchImageService.Property.IMAGE_ES_NUM_REPLICAS.getName(), ElasticSearchImageService.Property.IMAGE_ES_NUM_REPLICAS.getDefaultValue())); + numShardsForImageIndex = Integer.parseInt( + config.getValue(ElasticSearchImageService.Property.IMAGE_ES_SHARDS_COUNT.getName(), ElasticSearchImageService.Property.IMAGE_ES_SHARDS_COUNT.getDefaultValue())); + esUtils.createIndexTemplate(esRestClient, + imageIndexTemplateName, + imageIndexTemplatePatternStart, + this::createImageIndexTemplateSettingsNode, + this::createImageIndexTemplateMappingNode); + } + + @Override + public void putImage(String imageId,byte[] imageBytes, boolean sync) { + + try { + ImageRecord indexImageRecord = new ImageRecord(imageId,imageBytes); + String requestBody = imageMapper.writeValueAsString(indexImageRecord); + Request request = new Request(ElasticSearchUtils.HttpMethod.POST.getName(), PUT_REQUEST_URL); + request.setEntity(new StringEntity(requestBody, ContentType.APPLICATION_JSON)); + if (sync) { + performESRequest(request); + } + else{ + performAsyncESRequest(request); + } + + } catch (IOException e) { + logger.error("Failed to index image record to ES.",e); + throw new SystemException("Failed to index image record to ES.", e); + } + + } + + @Override + public byte[] getImage(String imageId) { + requireNotDisposed(); + requireArgument(imageId != null, "imageId cannot be null"); + ImageRecord record=null; + try{ + String queryJson = constructSimpleIdSearchQuery(imageId).toString(); + final long start = System.currentTimeMillis(); + Request request = new Request(ElasticSearchUtils.HttpMethod.POST.getName(), imageSearchRequestURL); + request.setEntity(new StringEntity(queryJson, ContentType.APPLICATION_JSON)); + Response response = esRestClient.performRequest(request); + final long time = System.currentTimeMillis() - start; + logger.info("ES get request completed in {} ms", time); + String str = extractResponse(response); + record = ElasticSearchUtils.toEntity(str, new TypeReference() {},imageMapper); + } catch(IOException ex) { + logger.error("Exception while retrieving the image",ex); + throw new SystemException("Exception while retrieving the image",ex); + } + return record.getImageBytes(); + } + + + private void performESRequest(Request request) throws IOException { + Response response = esRestClient.performRequest(request); + String strResponse = extractResponse(response); + ElasticSearchUtils.PutResponse putResponse = genericObjectMapper.readValue(strResponse, ElasticSearchUtils.PutResponse.class); + parseResponseErrors(putResponse); + } + + private void parseResponseErrors(ElasticSearchUtils.PutResponse putResponse) throws JsonProcessingException { + + if (putResponse.isErrors()) { + for (ElasticSearchUtils.PutResponse.Item item : putResponse.getItems()) { + if (item.getIndex() != null && item.getIndex().getStatus() != HttpStatus.SC_CREATED) { + logger.warn("Failed to add image record with id {} to index. Reason: {}", + item.getIndex().get_id(), + imageMapper.writeValueAsString(item.getIndex().getError())); + monitorService.modifyCounter(MonitorService.Counter.IMAGE_RECORDS_WRITE_FAILURES, 1, null); + + } + } + } + } + + private void performAsyncESRequest(Request request) { + esRestClient.performRequestAsync(request,new ResponseListener() { + @Override + public void onSuccess(Response response) { + String strResponse = extractResponse(response); + try { + ElasticSearchUtils.PutResponse putResponse = genericObjectMapper.readValue(strResponse, ElasticSearchUtils.PutResponse.class); + parseResponseErrors(putResponse); + } catch (IOException exception) { + logger.error("Failed to parse the Elastic Search response in Async call.", exception); + } + + } + + @Override + public void onFailure(Exception exception) { + logger.error("Failed to index image record to ES in Async.", exception); + throw new SystemException("Failed to index image record to ES in Async.", exception); + } + }); + } + + private ObjectNode constructSimpleIdSearchQuery(String imageId) { + + ArrayNode valuesNode = genericObjectMapper.createArrayNode(); + valuesNode.add(imageId); + + ObjectNode idsNode = genericObjectMapper.createObjectNode(); + idsNode.set("values", valuesNode); + + ObjectNode queryNode = genericObjectMapper.createObjectNode(); + queryNode.set("ids", idsNode); + + ObjectNode searchQuery= genericObjectMapper.createObjectNode(); + searchQuery.set("query",queryNode); + + return searchQuery; + } + + private ObjectNode createFieldNodeNoAnalyzer(String type) { + ObjectNode fieldNode = genericObjectMapper.createObjectNode(); + fieldNode.put("type", type); + return fieldNode; + } + + private ObjectNode createImageIndexTemplateSettingsNode() { + ObjectNode indexNode = genericObjectMapper.createObjectNode(); + indexNode.put("max_result_window", IMAGE_INDEX_MAX_RESULT_WINDOW); + indexNode.put("number_of_replicas", replicationFactorForImageIndex); + indexNode.put("number_of_shards", numShardsForImageIndex); + + ObjectNode settingsNode = genericObjectMapper.createObjectNode(); + settingsNode.set("index", indexNode); + + return settingsNode; + } + + private ObjectNode createImageIndexTemplateMappingNode() { + ObjectNode propertiesNode = genericObjectMapper.createObjectNode(); + propertiesNode.set(ImageRecord.ImageRecordType.IMAGEBLOB.getName(), createFieldNodeNoAnalyzer(FIELD_TYPE_BINARY)); + propertiesNode.set(ImageRecord.ImageRecordType.MTS.getName(), createFieldNodeNoAnalyzer(FIELD_TYPE_DATE)); + + ObjectNode docNode = genericObjectMapper.createObjectNode(); + docNode.set("properties", propertiesNode); + + ObjectNode mappingsNode = genericObjectMapper.createObjectNode(); + mappingsNode.set(imageIndexType, docNode); + + return mappingsNode; + } + + /** Converting static call to instance method call to make this unit testable + * Helper to process the response.

+ * @param response ES response + * @return Stringified response + */ + public String extractResponse(Response response) { + return ElasticSearchUtils.extractResponse(response); + } + + + @Override + public void dispose() { + requireNotDisposed(); + shutdownElasticSearchService(); + } + + private void shutdownElasticSearchService(){ + try { + esRestClient.close(); + logger.info("Shutdown of ElasticSearch RESTClient complete"); + } catch (IOException e) { + logger.warn("ElasticSearch RestClient failed to shutdown properly.", e); + } + } + + /* Method to change the rest client. Used for testing. */ + protected void setESRestClient(RestClient restClient){ + this.esRestClient = restClient; + } + + + @VisibleForTesting + static ObjectMapper getImageObjectMapper(JsonSerializer serializer) { + ObjectMapper mapper = new ObjectMapper(); + mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); + SimpleModule module = new SimpleModule(); + module.addSerializer(ImageRecord.class, serializer); + module.addDeserializer(ImageRecord.class, new ImageRecord.Deserializer()); + mapper.registerModule(module); + + return mapper; + } + + + /** + * The set of implementation specific configuration properties. + * + */ + public enum Property { + + IMAGE_ES_ENDPOINT("service.property.image.elasticsearch.endpoint", "http://localhost:9200,http://localhost:9201"), + /** Connection timeout for ES REST client. */ + IMAGE_ES_ENDPOINT_CONNECTION_TIMEOUT_MILLIS("service.property.image.elasticsearch.endpoint.connection.timeout.millis", "10000"), + /** Socket connection timeout for ES REST client. */ + IMAGE_ES_ENDPOINT_SOCKET_TIMEOUT("service.property.image.elasticsearch.endpoint.socket.timeout", "10000"), + /** Connection count for ES REST client. */ + IMAGE_ES_CONNECTION_COUNT("service.property.image.elasticsearch.connection.count", "10"), + /** Replication factor */ + IMAGE_ES_NUM_REPLICAS("service.property.image.elasticsearch.num.replicas", "1"), + /** Shard count */ + IMAGE_ES_SHARDS_COUNT("service.property.image.elasticsearch.shards.count", "6"), + /** Index type */ + IMAGE_ES_INDEX_TYPE("service.property.image.elasticsearch.index.type", "_doc"), + /** Index template name */ + IMAGE_ES_INDEX_TEMPLATE_NAME("service.property.image.elasticsearch.indextemplate.name", "argus-image-template"), + /** Index template pattern match */ + IMAGE_ES_INDEX_TEMPLATE_PATTERN_START("service.property.image.elasticsearch.indextemplate.patternstart", "argus-image"); + + private final String _name; + private final String _defaultValue; + + Property(String name, String defaultValue) { + _name = name; + _defaultValue = defaultValue; + } + + /** + * Returns the property name. + * + * @return The property name. + */ + public String getName() { + return _name; + } + + /** + * Returns the default value for the property. + * + * @return The default value. + */ + public String getDefaultValue() { + return _defaultValue; + } + } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/image/ImageRecord.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/image/ImageRecord.java new file mode 100644 index 000000000..a64cfaadb --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/image/ImageRecord.java @@ -0,0 +1,190 @@ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus.service.image; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.node.JsonNodeType; +import com.salesforce.dva.argus.util.ImageUtils; + +import java.io.IOException; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +/** + * Used to serialize and deserialize image byte array + * + */ +public class ImageRecord { + + private String imageId; + private byte[] imageBytes; + private static ObjectMapper mapper = new ObjectMapper(); + private static SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd"); + + + public ImageRecord(String imageId,byte[] imageBytes) { + this.imageId=imageId; + this.imageBytes=imageBytes; + } + public String getImageId() { + return imageId; + } + + public byte[] getImageBytes() { + return imageBytes; + } + + public static class IndexSerializer extends JsonSerializer { + + + @Override + public void serialize(ImageRecord record, JsonGenerator jgen, SerializerProvider provider) + throws IOException { + mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); + // Encoding image byte array to Base64 to store in ES + String imageBlob = ImageUtils.encodeBytesToBase64(record.getImageBytes()); + Long currentTimestamp = System.currentTimeMillis(); + String indexType = ElasticSearchImageService.imageIndexType; + jgen.writeRaw("{ \"index\" : {\"_index\" : \"" + getImageIndex(currentTimestamp) + "\",\"_type\": \"" + indexType + "\",\"_id\" : \"" + record.getImageId() + "\"}}"); + jgen.writeRaw(System.lineSeparator()); + Map fieldsData = new HashMap<>(); + fieldsData.put(ImageRecordType.IMAGEBLOB.getName(), imageBlob); + fieldsData.put(ImageRecordType.MTS.getName(), Long.toString(currentTimestamp)); + jgen.writeRaw(mapper.writeValueAsString(fieldsData)); + jgen.writeRaw(System.lineSeparator()); + } + + + /** + * Creating new index for every day + * @param currentTimeStamp Current Time Stamp + * @return Index Name based on TimeStamp + */ + protected String getImageIndex(Long currentTimeStamp) { + Date currentDate = new Date(currentTimeStamp); + String indexNameToAppend = String.format("%s-%s", ElasticSearchImageService.imageIndexTemplatePatternStart, formatter.format(currentDate)); + return indexNameToAppend; + } + } + + public static class Deserializer extends JsonDeserializer { + + @Override + public ImageRecord deserialize(JsonParser jp, DeserializationContext context) + throws IOException { + + List records = new ArrayList<>(); + JsonNode rootNode = jp.getCodec().readTree(jp); + JsonNode hits = rootNode.get("hits").get("hits"); + if(JsonNodeType.ARRAY.equals(hits.getNodeType())) { + records = new ArrayList<>(hits.size()); + Iterator iter = hits.elements(); + while(iter.hasNext()) { + JsonNode hit = iter.next(); + JsonNode source = hit.get("_source"); + JsonNode imageBlob = source.get(ImageRecordType.IMAGEBLOB.getName()); + byte[] decodedImage = ImageUtils.decodeBase64ToBytes(imageBlob.asText()); + records.add(decodedImage); + } + } + + if (records.size()>0) + { + return new ImageRecord(null,records.get(0)); + } + else + { + return new ImageRecord(null,null); + } + + } + } + + /** + * Indicates the Image record field to be used for matching. + * + */ + public static enum ImageRecordType { + + /** Image Blob Field */ + IMAGEBLOB("imageblob"), + /** Modified Time Stamp Field */ + MTS("mts"); + + private String _name; + + private ImageRecordType(String name) { + _name = name; + } + + /** + * Returns a given record type corresponding to the given name. + * + * @param name The case sensitive name to match against. Cannot be null. + * + * @return The corresponding record type or null if no matching record type exists. + */ + @JsonCreator + public static ImageRecordType fromName(String name) { + for (ImageRecordType type : ImageRecordType.values()) { + if (type.getName().equalsIgnoreCase(name)) { + return type; + } + } + + throw new IllegalArgumentException("Illegal record type: " + name); + } + + /** + * Returns the record type name. + * + * @return The record type name. + */ + public String getName() { + return _name; + } + } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/image/NoOperationImageService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/image/NoOperationImageService.java new file mode 100644 index 000000000..44433438d --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/image/NoOperationImageService.java @@ -0,0 +1,70 @@ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus.service.image; + +import com.google.inject.Inject; +import com.salesforce.dva.argus.entity.ImageProperties; +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.service.DefaultService; +import com.salesforce.dva.argus.service.ImageService; +import com.salesforce.dva.argus.system.SystemConfiguration; +import org.apache.commons.lang3.tuple.Pair; + +import java.util.List; + +public class NoOperationImageService extends DefaultService implements ImageService { + + @Inject + public NoOperationImageService(SystemConfiguration config) { + super(config); + } + + @Override + public byte[] generateImage(List metrics, ImageProperties properties) { + return null; + } + + @Override + public String storeImage(byte[] imageBytes, boolean sync) { + return null; + } + + @Override + public Pair generateAndStoreImage(List metrics, ImageProperties properties, boolean sync) { + return null; + } + + @Override + public byte[] getImageById(String imageId) { + return null; + } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/jpa/DefaultDashboardService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/jpa/DefaultDashboardService.java index 86756ac77..4a9b06c08 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/jpa/DefaultDashboardService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/jpa/DefaultDashboardService.java @@ -44,6 +44,7 @@ import org.slf4j.Logger; import java.math.BigInteger; import java.util.List; +import java.util.Date; import javax.persistence.EntityManager; import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; @@ -109,7 +110,7 @@ public Dashboard findDashboardByPrimaryKey(BigInteger id) { public Dashboard updateDashboard(Dashboard dashboard) { requireNotDisposed(); requireArgument(dashboard != null, "Cannot update a null dashboard"); - + dashboard.setModifiedDate(new Date()); EntityManager em = emf.get(); Dashboard result = mergeEntity(em, dashboard); diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/jpa/DefaultJPAService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/jpa/DefaultJPAService.java index ea68fbedf..3e63d19d4 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/jpa/DefaultJPAService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/jpa/DefaultJPAService.java @@ -32,6 +32,7 @@ package com.salesforce.dva.argus.service.jpa; import com.google.inject.Inject; +import com.google.inject.Provider; import com.salesforce.dva.argus.entity.Audit; import com.salesforce.dva.argus.entity.Identifiable; import com.salesforce.dva.argus.entity.JPAEntity; @@ -40,7 +41,9 @@ import com.salesforce.dva.argus.system.SystemConfiguration; import java.math.BigInteger; +import java.util.HashMap; import java.util.List; +import java.util.Map; import javax.persistence.EntityManager; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaDelete; @@ -59,6 +62,9 @@ public abstract class DefaultJPAService extends DefaultService { //~ Instance fields ****************************************************************************************************************************** protected final AuditService _auditService; + @Inject + protected Provider emf; + //~ Constructors ********************************************************************************************************************************* @@ -138,6 +144,7 @@ protected E findEntity(EntityManager em, BigInteger id, requireArgument(em != null, "The entity manager cannot be null."); requireArgument(id != null && id.compareTo(ZERO) > 0, "ID must be positive and non-zero"); requireArgument(type != null, "The entity cannot be null."); + em.getEntityManagerFactory().getCache().evictAll(); return em.find(type, id); } @@ -160,5 +167,10 @@ protected List findEntitiesMarkedForDeletion(EntityM em.getEntityManagerFactory().getCache().evictAll(); return JPAEntity.findEntitiesMarkedForDeletion(em, type, limit); } + + public void setEntityManagerProvider(Provider emProv) { + emf = emProv; + } + } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/mail/DefaultMailService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/mail/DefaultMailService.java index ac9373b5b..6dd9214d8 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/mail/DefaultMailService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/mail/DefaultMailService.java @@ -32,19 +32,23 @@ package com.salesforce.dva.argus.service.mail; import com.google.inject.Inject; -import com.salesforce.dva.argus.inject.SLF4JTypeListener; import com.salesforce.dva.argus.service.DefaultService; import com.salesforce.dva.argus.service.MailService; import com.salesforce.dva.argus.system.SystemConfiguration; import com.salesforce.dva.argus.system.SystemException; import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import java.text.MessageFormat; import java.util.ArrayList; import java.util.List; import java.util.Properties; import java.util.Set; +import javax.activation.DataHandler; +import javax.activation.DataSource; import javax.mail.Address; import javax.mail.BodyPart; import javax.mail.Message; +import javax.mail.MessagingException; import javax.mail.Multipart; import javax.mail.Session; import javax.mail.Transport; @@ -53,6 +57,7 @@ import javax.mail.internet.MimeBodyPart; import javax.mail.internet.MimeMessage; import javax.mail.internet.MimeMultipart; +import javax.mail.util.ByteArrayDataSource; import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; import static com.salesforce.dva.argus.system.SystemAssert.requireState; @@ -66,8 +71,7 @@ public class DefaultMailService extends DefaultService implements MailService { //~ Instance fields ****************************************************************************************************************************** - @SLF4JTypeListener.InjectLogger - private Logger _logger; + private final Logger _logger = LoggerFactory.getLogger(DefaultMailService.class); private final SystemConfiguration _config; //~ Constructors ********************************************************************************************************************************* @@ -100,49 +104,64 @@ private Properties getMailProperties() { } @Override - public void sendMessage(Set to, String subject, String body, String contentType, Priority priority) { + public boolean sendMessage(EmailContext context) { requireState(!isDisposed(), "Cannot call methods on a disposed service."); - requireArgument(to != null && !to.isEmpty(), "Recipients cannot be null or empty."); - if (contentType == null || contentType.isEmpty()) { - contentType = "text; charset=utf-8"; - } - if (priority == null) { - priority = Priority.NORMAL; - } + requireArgument(context.getRecipients() != null && !context.getRecipients().isEmpty(), + "Recipients cannot be null or empty."); + String contentType = (context.getContentType() == null || context.getContentType().isEmpty()) ? + "text; charset=utf-8" : context.getContentType(); + + MailService.Priority priority = (context.getEmailPriority() == null) ? Priority.NORMAL : context.getEmailPriority(); + if (Boolean.valueOf(_config.getValue(com.salesforce.dva.argus.system.SystemConfiguration.Property.EMAIL_ENABLED))) { try { Session session = Session.getInstance(getMailProperties()); MimeMessage message = new MimeMessage(session); message.setFrom(new InternetAddress(_config.getValue(com.salesforce.dva.argus.system.SystemConfiguration.Property.ADMIN_EMAIL))); - message.setSubject(subject); - message.setRecipients(Message.RecipientType.TO, getEmailToAddresses(to)); + message.setSubject(context.getSubject()); + message.setRecipients(Message.RecipientType.TO, getEmailToAddresses(context.getRecipients())); message.addHeader("X-Priority", String.valueOf(priority.getXPriority())); + Multipart multipart = new MimeMultipart(); BodyPart messageBodyPart1 = new MimeBodyPart(); + messageBodyPart1.setContent(context.getEmailBody(), contentType); + multipart.addBodyPart(messageBodyPart1); - messageBodyPart1.setContent(body, contentType); - - Multipart multipart = new MimeMultipart(); + context.getImageDetails().ifPresent(imageDetail -> { + try { + BodyPart imageBodyPart = new MimeBodyPart(); + DataSource dataSource = new ByteArrayDataSource(imageDetail.getRight(), "image/jpg"); + imageBodyPart.setDataHandler(new DataHandler(dataSource)); + imageBodyPart.setHeader("Content-ID", "<" + imageDetail.getLeft() + ">"); + imageBodyPart.setDisposition(MimeBodyPart.INLINE); + multipart.addBodyPart(imageBodyPart); + } catch (MessagingException e) { + _logger.warn("Unable to embed image into the email with subject" + context.getSubject(), e); + } + }); - multipart.addBodyPart(messageBodyPart1); message.setContent(multipart); Transport transport = session.getTransport(); - transport.connect(_config.getValue(Property.EMAIL_SMTP_HOST.getName(),Property.EMAIL_SMTP_HOST.getDefaultValue()), + transport.connect(_config.getValue(Property.EMAIL_SMTP_HOST.getName(),Property.EMAIL_SMTP_HOST.getDefaultValue()), _config.getValue(Property.EMAIL_SMTP_USERNAME.getName(),Property.EMAIL_SMTP_USERNAME.getDefaultValue()), _config.getValue(Property.EMAIL_SMTP_PASSWORD.getName(), Property.EMAIL_SMTP_PASSWORD.getDefaultValue())); transport.sendMessage(message, message.getAllRecipients()); - _logger.info("Sent email having subject '{}' to {}.", subject, to); + _logger.info("Sent email having subject '{}' to {}.", context.getSubject(), context.getRecipients()); + return true; } catch (Exception ex) { - _logger.error("MailService: Failed to send an email notification.", ex); - throw new SystemException("Failed to send an email notification.", ex); + String logMessage = MessageFormat.format("MailService: Failed to send an email notification to {0} .", context.getRecipients()); + _logger.error(logMessage, ex); + throw new SystemException(logMessage, ex); } } else { - _logger.info("Sending email is disabled. Not sending email having subject '{}' to {}.", subject, to); + _logger.warn("Sending email is disabled. Not sending email having subject '{}' to {}.", context.getSubject(), context.getRecipients()); } + + return false; } private Address[] getEmailToAddresses(Set recipientEmailAddresses) throws AddressException { diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/mail/EmailContext.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/mail/EmailContext.java new file mode 100644 index 000000000..e41c011af --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/mail/EmailContext.java @@ -0,0 +1,95 @@ +package com.salesforce.dva.argus.service.mail; + +import com.salesforce.dva.argus.service.MailService; +import org.apache.commons.lang3.tuple.Pair; + +import java.util.Optional; +import java.util.Set; + +public class EmailContext { + + private Set recipients; + private String subject; + private String emailBody; + private String contentType; + private MailService.Priority emailPriority; + private Pair imageDetails; + + public Set getRecipients() { + return recipients; + } + + public String getSubject() { + return subject; + } + + public String getEmailBody() { + return emailBody; + } + + public String getContentType() { + return contentType; + } + + public MailService.Priority getEmailPriority() { + return emailPriority; + } + + public Optional> getImageDetails() { + return Optional.ofNullable(imageDetails); + } + + private EmailContext(Builder emailContextBuilder) { + this.recipients = emailContextBuilder.recipients; + this.subject = emailContextBuilder.subject; + this.emailBody = emailContextBuilder.emailBody; + this.contentType = emailContextBuilder.contentType; + this.emailPriority = emailContextBuilder.emailPriority; + this.imageDetails = emailContextBuilder.imageDetails; + } + + public static class Builder { + + private Set recipients; + private String subject; + private String emailBody; + private String contentType; + private MailService.Priority emailPriority; + private Pair imageDetails; + + public Builder withRecipients(Set recipients) { + this.recipients = recipients; + return this; + } + + public Builder withSubject(String subject) { + this.subject = subject; + return this; + } + + public Builder withEmailBody(String emailBody) { + this.emailBody = emailBody; + return this; + } + + public Builder withContentType(String contentType) { + this.contentType = contentType; + return this; + } + + public Builder withEmailPriority(MailService.Priority emailPriority) { + this.emailPriority = emailPriority; + return this; + } + + public Builder withImageDetails(Pair imageDetails) { + this.imageDetails = imageDetails; + return this; + } + + public EmailContext build() { + return new EmailContext(this); + } + + } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/management/DefaultManagementService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/management/DefaultManagementService.java index cfb2d0920..dfa9be2c6 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/management/DefaultManagementService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/management/DefaultManagementService.java @@ -28,7 +28,7 @@ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ - + package com.salesforce.dva.argus.service.management; import com.google.inject.Inject; @@ -52,6 +52,7 @@ import com.salesforce.dva.argus.system.SystemException; import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.lang.reflect.Method; import java.util.List; @@ -68,8 +69,7 @@ public class DefaultManagementService extends DefaultService implements Manageme //~ Instance fields ****************************************************************************************************************************** - @SLF4JTypeListener.InjectLogger - private Logger _logger; + private final Logger _logger = LoggerFactory.getLogger(DefaultManagementService.class); private final WardenService _wardenService; private final MonitorService _monitorService; private final SchedulingService _schedulingService; @@ -92,7 +92,7 @@ public class DefaultManagementService extends DefaultService implements Manageme * @param alertService The alert service. Cannot be null. */ @Inject - DefaultManagementService(WardenService wardenService, MonitorService monitorService, SchedulingService schedulingService, UserService userService, + public DefaultManagementService(WardenService wardenService, MonitorService monitorService, SchedulingService schedulingService, UserService userService, AuditService auditService, HistoryService historyService, AlertService alertService, SystemConfiguration config) { super(config); requireArgument(wardenService != null, "Warden service cannot be null."); @@ -276,7 +276,7 @@ public void cleanupRecords() { _historyService.deleteExpiredHistory(); _auditService.deleteExpiredAudits(); } - + @Override @Transactional public void cleanupDeletedAlerts(int limit) { diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/AsyncMetricService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/AsyncMetricService.java index 080a6883b..134d14baf 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/AsyncMetricService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/AsyncMetricService.java @@ -27,8 +27,8 @@ public class AsyncMetricService extends DefaultMetricService { @Inject protected AsyncMetricService(MonitorService monitorService, Provider> metricsprovider, Provider> queryprovider, - BatchService batchService, SystemConfiguration config) { - super(monitorService, metricsprovider, queryprovider, config); + BatchService batchService, MetricQueryProcessor queryProcessor, SystemConfiguration config) { + super(monitorService, queryProcessor, metricsprovider, queryprovider, config); _batchService = batchService; } diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/ConsumerOffsetMetric.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/ConsumerOffsetMetric.java new file mode 100644 index 000000000..300c87fbb --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/ConsumerOffsetMetric.java @@ -0,0 +1,238 @@ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus.service.metric; + +import com.salesforce.dva.argus.entity.Metric; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; + +import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; + +/** + * Entity to model the storage of offset metrics. + * @author sudhanshu.bahety + */ +public class ConsumerOffsetMetric { + + private String metric; + private String topic; + private Long time; + private Double value; + private Map tags; + + private static final String TAG_TOPIC = "topic"; + private static final String TAG_SERVICE = "service"; + private static final String ARGUS_CORE = "argus.core"; + + private static final Logger logger = LoggerFactory.getLogger(ConsumerOffsetMetric.class); + + /** + * + * @param metric name of the metric + * @param topic name of the topic + * @param time time when metric was emitted + * @param value offset number for the topic at time + * @param tags additional tags + */ + public ConsumerOffsetMetric(String metric, String topic, Long time, Double value, Map tags) { + requireArgument(metric != null && !metric.isEmpty(), "Metric cannot be null or empty"); + requireArgument(topic != null && !topic.isEmpty(), "Topic cannot be null or empty"); + requireArgument(time != null, "Time cannot be null"); + requireArgument(value != null, "Value cannot be null"); + this.metric = metric; + this.topic = topic; + this.tags = tags; + this.time = time; + this.value = value; + } + + /** + * + * @param m TSDB entity metric to be converted to consumer offset metric. + */ + public ConsumerOffsetMetric(Metric m) { + validateMetric(m); + this.topic = m.getTag(TAG_TOPIC); + m.removeTag(TAG_TOPIC); + m.setTag(TAG_SERVICE, m.getScope()); + + this.metric = m.getMetric(); + this.tags = new HashMap<>(m.getTags()); + + m.getDatapoints().forEach((time, value) -> { + this.time = time; + this.value = value; + }); + } + + /** + * + * @return converts to TSDB Entity metric. + */ + public Metric convertToMetric() { + Metric m = new Metric(ARGUS_CORE, this.metric); + if (this.tags.containsKey(TAG_SERVICE)) { + m.setScope(tags.get(TAG_SERVICE)); + } + this.tags.remove(TAG_SERVICE); + m.setTags(this.tags); + m.setTag(TAG_TOPIC, this.topic); + m.addDatapoint(this.time, this.value); + return m; + } + + /** + * + * @param metricList The list of TSDB metric that needs to be converted + * @return The list of consumer offset metrics after conversion. + */ + public static List convertToConsumerOffsetMetrics(List metricList) { + List cOMetricList = new ArrayList<>(); + metricList.forEach(m -> { + try { + cOMetricList.add(new ConsumerOffsetMetric(m)); + } catch (Exception ex) { + logger.error("Failed converting Metric {} to Consumer Offset Metric", m, ex); + } + }); + + return cOMetricList; + } + + /** + * + * @param m Checks for required tag field in metric m needed before conversion. + */ + private static void validateMetric(Metric m) { + requireArgument(m != null, "Cannot convert null Metric to Consumer Offset Metric"); + requireArgument(m.getTag(TAG_TOPIC) != null, "Topic tag not present in Metric"); + requireArgument(m.getNumOfDatapoints() == 1, "In order to convert metric, you can only have a single data point."); + } + + /** + * + * @param consumerOffsetMetrics The list of consumer offset metric + * @return the list of converted TSDB metric + */ + public static List convertToMetrics(List consumerOffsetMetrics) { + Map metricMap = new HashMap<>(); + consumerOffsetMetrics.forEach(cOMetric -> { + try { + Metric metric = cOMetric.convertToMetric(); + String identifier = metric.getIdentifier(); + if (!metricMap.containsKey(identifier)) { + metricMap.put(identifier, metric); + } else { + metricMap.get(identifier).addDatapoint(cOMetric.getTime(), cOMetric.getValue()); + } + } catch (Exception ex) { + logger.error("Failed while converting Consumer Offset Metric {} to Metric", cOMetric, ex); + } + }); + return new ArrayList<>(metricMap.values()); + } + + /** + * + * @param consumerOffsetMetric an instance of the consumer offset metric + * @return a unique identifier for hashing purposes. + */ + public static String getIdentifierFieldsAsString(ConsumerOffsetMetric consumerOffsetMetric) { + return new StringBuilder(consumerOffsetMetric.getMetric()).append(":") + .append(consumerOffsetMetric.getTopic()) + .append(consumerOffsetMetric.getTime()) + .append(consumerOffsetMetric.getValue()) + .append(consumerOffsetMetric.getTags()).toString(); + } + + @Override + public String toString() { + String format = "ConsumerOffsetMetric(metric=>{0}, topic=>{1}, time=>{2,number,#}, value=>{3,number,#}, tags=>{4})"; + Object [] params = {getMetric(), getTopic(), getTime(), getValue(), getTags()}; + return MessageFormat.format(format, params); + } + + /* + Getter and Setter functions. + */ + public String getMetric() { + return metric; + } + + public void setMetric(String _metric) { + this.metric = _metric; + } + + public String getTopic() { + return topic; + } + + public void setTopic(String _topic) { + this.topic = _topic; + } + + public Map getTags() { + return Collections.unmodifiableMap(new TreeMap<>(this.tags)); + } + + public void setTags(Map _tags) { + this.tags = _tags; + } + + public void setTag(String _key, String _value) { + this.tags.put(_key, _value); + } + + public Long getTime() { + return time; + } + + public void setTime(Long _time) { + this.time = _time; + } + + public Double getValue() { + return value; + } + + public void setValue(Double _value) { + this.value = _value; + } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/DefaultMetricService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/DefaultMetricService.java index 73cbe3793..93594276b 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/DefaultMetricService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/DefaultMetricService.java @@ -38,25 +38,31 @@ import com.salesforce.dva.argus.service.MetricService; import com.salesforce.dva.argus.service.MonitorService; import com.salesforce.dva.argus.service.MonitorService.Counter; +import com.salesforce.dva.argus.service.TSDBService.QueryStartTimeWindow; +import com.salesforce.dva.argus.service.TSDBService.QueryTimeSeriesExpansion; +import com.salesforce.dva.argus.service.TSDBService.QueryTimeWindow; import com.salesforce.dva.argus.service.tsdb.MetricQuery; import com.salesforce.dva.argus.system.SystemAssert; import com.salesforce.dva.argus.system.SystemConfiguration; import com.salesforce.dva.argus.system.SystemException; - -import com.salesforce.dva.argus.system.SystemMain; +import com.salesforce.dva.argus.util.QueryContext; import com.salesforce.dva.argus.util.QueryContextHolder; - -import org.apache.commons.lang.exception.ExceptionUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.text.MessageFormat; import java.util.ArrayList; import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; import java.util.List; +import java.util.Map; +import java.util.Map.Entry; import java.util.Set; -import java.util.HashSet; import java.util.regex.Matcher; import java.util.regex.Pattern; +import java.util.stream.Collectors; import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; @@ -74,10 +80,9 @@ public class DefaultMetricService extends DefaultService implements MetricServic private final Provider> _metricReaderProviderForMetrics; private final Provider> _metricReaderProviderForQueries; private final SystemConfiguration _configuration; - private String expandedTimeSeriesRange; - private String queryTimeWindow; - private Integer numDiscoveryResults = 0; - private Integer numDiscoveryQueries = 0; + private MetricQueryProcessor _queryProcessor; + private String dcListRegex; + private Double DOUBLE_ZERO=0.0d; //~ Constructors ********************************************************************************************************************************* @@ -85,12 +90,13 @@ public class DefaultMetricService extends DefaultService implements MetricServic * Creates a new DefaultMetricService object. * * @param monitorService The monitor service instance to use. Cannot be null. + * @param queryProcessor The metric query processor used to evaluate queries * @param metricsprovider The metric reader provider used to perform metric operations. Cannot be null. * @param queryprovider The metric reader provider used to construct metric queries without fetching data. Cannot be null. * @param config The system configuration. Cannot be null. */ @Inject - protected DefaultMetricService(MonitorService monitorService, Provider> metricsprovider, + public DefaultMetricService(MonitorService monitorService, MetricQueryProcessor queryProcessor, Provider> metricsprovider, Provider> queryprovider, SystemConfiguration config) { super(config); requireArgument(monitorService != null, "Monitor service cannot be null."); @@ -98,56 +104,72 @@ protected DefaultMetricService(MonitorService monitorService, Provider getMetrics(String expression) { + public MetricQueryResult getMetrics(String expression) { requireNotDisposed(); return getMetrics(expression, System.currentTimeMillis()); } @Override - public List getMetrics(String expression, long relativeTo) { + public MetricQueryResult getMetrics(String expression, long relativeTo) { requireNotDisposed(); return getMetrics(Arrays.asList(new String[] { expression }), relativeTo); } @Override - public List getMetrics(List expressions) { + public MetricQueryResult getMetrics(List expressions) { requireNotDisposed(); return getMetrics(expressions, System.currentTimeMillis()); } @Override - public List getMetrics(List expressions, long relativeTo) { + public MetricQueryResult getMetrics(List expressions, long relativeTo) { requireNotDisposed(); SystemAssert.requireArgument(MetricReader.isValid(expressions), "Illegal metric expression found: " + expressions); + final long start = System.currentTimeMillis(); MetricReader reader = _metricReaderProviderForMetrics.get(); - List metrics = new ArrayList<>(expressions.size()); - + MetricQueryResult queryResult = new MetricQueryResult(); try { - numDiscoveryResults = 0; - numDiscoveryQueries = 0; for (String expression : expressions) { _logger.debug("Reading metric for expression {}", expression); - metrics.addAll(reader.parse(expression, relativeTo, Metric.class, new QueryContextHolder(), false)); - expandedTimeSeriesRange = reader.getExpandedTimeSeriesRange(); - queryTimeWindow = reader.getQueryTimeWindow(); - numDiscoveryResults += reader.getNumDiscoveryResults(); - numDiscoveryQueries += reader.getNumDiscoveryQueries(); + QueryContextHolder currCtxHolder = new QueryContextHolder(); + reader.parse(expression, relativeTo, Metric.class, currCtxHolder, true); + _queryProcessor.mergeQueryResults(queryResult, _queryProcessor.evaluateQuery(currCtxHolder.getCurrentQueryContext(), relativeTo)); } } catch (ParseException ex) { throw new SystemException("Failed to parse the given expression", ex); - } catch(Exception ex) { - _logger.error("Exception occured when evaluating metric expressions {} - {}" , Arrays.toString(expressions.toArray()), ExceptionUtils.getFullStackTrace(ex)); - throw ex; } - _monitorService.modifyCounter(Counter.DATAPOINT_READS, _getDatapointsAcrossMetrics(metrics), null); - return metrics; + // Removing metrics which has no datapoints and handle Default TSDB min value + List metrics = queryResult.getMetricsList(); + if (metrics!=null) { + Iterator metricIterator = metrics.iterator(); + while (metricIterator.hasNext()) { + Metric metric = metricIterator.next(); + if (metric.getDatapoints()==null || metric.getDatapoints().size() == 0) { + metricIterator.remove(); + }else if(metric.getDatapoints().values().contains(Double.MIN_VALUE)) { + metric.setDatapoints(setMinValuesToZero(metric.getDatapoints())); + } + } + queryResult.setMetricsList(metrics); + } + + _monitorService.modifyCounter(Counter.DATAPOINT_READS, _getDatapointsAcrossMetrics(queryResult.getMetricsList()), null); + queryResult.setExpandedTimeSeriesRange(QueryTimeSeriesExpansion.getExpandedTimeSeriesRange(queryResult.getNumTSDBResults())); + queryResult.setQueryStartTimeWindow(QueryStartTimeWindow.getWindow(relativeTo - queryResult.getQueryStartTimeMillis())); + queryResult.setQueryTimeWindow(QueryTimeWindow.getWindow(queryResult.getQueryTimeRangeInMillis())); + + final long time = System.currentTimeMillis() - start; + _monitorService.modifyCounter(Counter.METRICS_GETMETRICS_LATENCY, time, null); + _monitorService.modifyCounter(Counter.METRICS_GETMETRICS_COUNT, expressions.size(), null); + return queryResult; } @Override @@ -193,35 +215,31 @@ public List getQueries(List expressions, long relativeTo) { } @Override - public String getExpandedTimeSeriesRange() - { - { - return expandedTimeSeriesRange; - } - } - - @Override - public String getQueryTimeWindow() - { - { - return queryTimeWindow; - } - } - - @Override - public Integer getNumDiscoveryResults() - { - { - return numDiscoveryResults; - } + public List parseToMetricQuery(String expressions, long relativeTo) { + requireNotDisposed(); + SystemAssert.requireArgument(MetricReader.isValid(expressions), "Illegal metric expression found: " + expressions); + return parseToMetricQuery(Arrays.asList(expressions), relativeTo); } - + + @Override - public Integer getNumDiscoveryQueries() - { - { - return numDiscoveryQueries; + public List parseToMetricQuery(List expressions, long relativeTo) { + requireNotDisposed(); + + MetricReader reader = _metricReaderProviderForQueries.get(); + List queries = new ArrayList<>(); + + try { + for (String expression : expressions) { + _logger.debug("Parsing expression to metric query for {}", expression); + QueryContextHolder contextHolder = new QueryContextHolder(); + reader.parse(expression, relativeTo, MetricQuery.class, contextHolder, false); + queries.add(_queryProcessor.convertTSDBQueryToMetricQuery(contextHolder.getCurrentQueryContext().getExpression())); + } + } catch (ParseException ex) { + throw new SystemException("Failed to parse the given expression", ex); } + return queries; } @Override @@ -239,34 +257,75 @@ private long _getDatapointsAcrossMetrics(List metrics) { return dataPointsSize; } - public List getDCFromExpression(String expression) { - Set DC = new HashSet(); + private String getMatchedDCAgainstRegex(String scope, String regex) { + + if(scope == null || scope.isEmpty()) { + _logger.warn("Can't retrieve DC from empty scope."); + return null; + } + Matcher m; - String dcList = _configuration.getValue(com.salesforce.dva.argus.system.SystemConfiguration.Property.DC_LIST).replaceAll(",","|"); - String defaultDC = _configuration.getValue(SystemConfiguration.Property.DC_DEFAULT); - ArrayList patterns = new ArrayList<>(Arrays.asList( "\\.(?i)(" + dcList + ")\\.", - ":argus\\.")); - - if (expression == null || expression.isEmpty()) { - _logger.error("Expression either null or empty. Cannot retrive DC from the expression. Returning default value " + defaultDC); - DC.add(defaultDC); - } else { - // Get all the expanded queries from MetricService and identify all different DCs. - List queries = getQueries(expression); - for(MetricQuery currentQuery: queries) { - m = Pattern.compile(patterns.get(0)).matcher(currentQuery.getScope()); - while (m.find()) DC.add(m.group().substring(1, m.group().length() - 1).toUpperCase()); - - // If it matches ":argus., then it's argus metrics emitted by PRD." - m = Pattern.compile(patterns.get(1)).matcher(currentQuery.getScope()); - if(m.find()) DC.add(defaultDC); + m = Pattern.compile(regex).matcher(scope); + while (m.find()) { + String dc = m.group().substring(1, m.group().length() - 1).toUpperCase(); + _logger.debug(MessageFormat.format("Retrieved DC: {0} from scope: {1}", dc, scope)); + return dc; + } + return null; + } + + @Override + public List extractDCFromMetricQuery(List mQList) { + Set dcList = new HashSet<>(); + + for (MetricQuery mQ: mQList) { + String currentDC = getDCFromScope(mQ.getScope()); //TODO: If the dc gets transferred to tags, we need to update this. + if (currentDC != null) { + dcList.add(currentDC); + } + } + + _logger.debug("DCs detected: {}", dcList); + return new ArrayList<>(dcList); + } + + @Override + public String extractDCFromMetric(Metric m) { + return getDCFromScope(m.getScope()); //TODO: If the dc gets transferred to tags, we need to update this. + } + + private String getDCFromScope(String scope) { + + if(scope == null || scope.isEmpty()) { + _logger.warn("Can't retrieve DC from empty scope."); + return null; + } + + try { + String dc = getMatchedDCAgainstRegex(scope, "\\.(?i)(" + dcListRegex + ")\\."); + + if (dc != null) { + return dc; + } else { + _logger.debug(MessageFormat.format("Unable to identify dc from scope: {0}", scope)); + return null; } + } catch (Exception ex) { + _logger.error("Unable to retrieve DC from scope. Exception: {0}", ex); + return null; } - if (DC.size() == 0) { - _logger.info("Unable to identify DC from expression: " + expression +" . Returning default value PRD " + defaultDC); - DC.add(defaultDC); + } + + private Map setMinValuesToZero(Map datapoints){ + Map result = new HashMap<>(); + for(Entry entry:datapoints.entrySet()) { + if(entry.getValue()==Double.MIN_VALUE) { + result.put(entry.getKey(), DOUBLE_ZERO); + }else { + result.put(entry.getKey(), entry.getValue()); + } } - return new ArrayList<>(DC); + return result; } } - /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ \ No newline at end of file + /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/ElasticSearchConsumerOffsetMetricsService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/ElasticSearchConsumerOffsetMetricsService.java new file mode 100644 index 000000000..be29f1964 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/ElasticSearchConsumerOffsetMetricsService.java @@ -0,0 +1,506 @@ +package com.salesforce.dva.argus.service.metric; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.module.SimpleModule; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.google.common.annotations.VisibleForTesting; +import com.google.inject.Inject; +import com.google.inject.Singleton; +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.service.DefaultService; +import com.salesforce.dva.argus.service.MetricStorageService; +import com.salesforce.dva.argus.service.MonitorService; +import com.salesforce.dva.argus.service.SchemaService; +import com.salesforce.dva.argus.service.schema.ConsumerOffsetRecordList; +import com.salesforce.dva.argus.service.schema.ElasticSearchUtils; +import com.salesforce.dva.argus.service.schema.ElasticSearchUtils.HashAlgorithm; +import com.salesforce.dva.argus.service.tsdb.MetricQuery; +import com.salesforce.dva.argus.system.SystemConfiguration; +import com.salesforce.dva.argus.system.SystemException; +import org.apache.http.HttpHost; +import org.apache.http.HttpStatus; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.nio.conn.PoolingNHttpClientConnectionManager; +import org.apache.http.impl.nio.reactor.DefaultConnectingIOReactor; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.RestClientBuilder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; + +@Singleton +public class ElasticSearchConsumerOffsetMetricsService extends DefaultService implements MetricStorageService { + + private static Logger logger = LoggerFactory.getLogger(ElasticSearchConsumerOffsetMetricsService.class); + private static ObjectMapper genericObjectMapper = new ObjectMapper(); + + protected final MonitorService monitorService; + + /** Global ES properties */ + private static final int MAX_RETRY_TIMEOUT = 300_000; + public static final int INDEX_MAX_RESULT_WINDOW = 10000; + public static final String DATE_FORMAT = "yyyy-MM-dd"; + public static final String INDEX_FORMAT = "%s-%s"; + + private static final String FIELD_TYPE_TEXT = "text"; + private static final String FIELD_TYPE_DATE = "date"; + private static final String FIELD_TYPE_LONG = "long"; + private static final String FIELD_TYPE_KEYWORD = "keyword"; + + private RestClient esRestClient; + + /** Index properties */ + private HashAlgorithm idgenHashAlgo; + private static String INDEX_TEMPLATE_NAME; + private final int REPLICATION_FACTOR; + private final int NUM_SHARDS; + private final ObjectMapper metricMapper; + public static String INDEX_TEMPLATE_PATTERN_START; + public static final String SCOPE_NAME = "ajna.consumer"; + public static final String METRIC_NAME = "metric.consumer.lag"; + public static final String requestUrl = "_bulk"; + + + private static final String EXCEPTION_MESSAGE = "Your query returns {0} or more metrics." + + " Please modify your query by reducing the time window."; + + @Inject + protected ElasticSearchConsumerOffsetMetricsService(SystemConfiguration config, MonitorService monitorService, ElasticSearchUtils esUtils) { + super(config); + this.monitorService = monitorService; + /** Setup Global ES stuff */ + String algorithm = config.getValue(Property.CONSUMER_OFFSET_METRICS_ES_IDGEN_HASH_ALGO.getName(), Property.CONSUMER_OFFSET_METRICS_ES_IDGEN_HASH_ALGO.getDefaultValue()); + try { + idgenHashAlgo = HashAlgorithm.fromString(algorithm); + } catch(IllegalArgumentException e) { + logger.warn("{} is not supported by this service. Valid values are: {}.", algorithm, Arrays.asList(HashAlgorithm.values())); + idgenHashAlgo = HashAlgorithm.MD5; + } + logger.info("Using {} for Elasticsearch document id generation.", idgenHashAlgo); + + String[] nodes = config.getValue(Property.CONSUMER_OFFSET_METRICS_ES_ENDPOINT.getName(), Property.CONSUMER_OFFSET_METRICS_ES_ENDPOINT.getDefaultValue()).split(","); + HttpHost[] httpHosts = new HttpHost[nodes.length]; + for(int i=0; i { + try { + int connCount = Integer.parseInt(config.getValue(Property.CONSUMER_OFFSET_METRICS_ES_CONNECTION_COUNT.getName(), + Property.CONSUMER_OFFSET_METRICS_ES_CONNECTION_COUNT.getDefaultValue())); + PoolingNHttpClientConnectionManager connMgr = + new PoolingNHttpClientConnectionManager(new DefaultConnectingIOReactor()); + connMgr.setMaxTotal(connCount); + int connCountPerRoute = connCount/httpHosts.length; + connMgr.setDefaultMaxPerRoute(connCountPerRoute < 1 ? 1:connCountPerRoute); + httpClientBuilder.setConnectionManager(connMgr); + return httpClientBuilder; + } catch(Exception e) { + throw new SystemException(e); + } + }; + RestClientBuilder.RequestConfigCallback requestConfigCallback = requestConfigBuilder -> { + int connTimeout = Integer.parseInt(config.getValue(Property.CONSUMER_OFFSET_METRICS_ES_ENDPOINT_CONNECTION_TIMEOUT_MILLIS.getName(), + Property.CONSUMER_OFFSET_METRICS_ES_ENDPOINT_CONNECTION_TIMEOUT_MILLIS.getDefaultValue())); + int socketTimeout = Integer.parseInt(config.getValue(Property.CONSUMER_OFFSET_METRICS_ES_ENDPOINT_SOCKET_TIMEOUT.getName(), + Property.CONSUMER_OFFSET_METRICS_ES_ENDPOINT_SOCKET_TIMEOUT.getDefaultValue())); + requestConfigBuilder.setConnectTimeout(connTimeout).setSocketTimeout(socketTimeout); + + logger.info("esRestClient set connectionTimeoutMillis {} socketTimeoutMillis {}", + connTimeout, socketTimeout); + + return requestConfigBuilder; + }; + esRestClient = RestClient.builder(httpHosts) + .setHttpClientConfigCallback(clientConfigCallback) + .setRequestConfigCallback(requestConfigCallback) + .setMaxRetryTimeoutMillis(MAX_RETRY_TIMEOUT) + .build(); + logger.info("esRestClient set MaxRetryTimeoutsMillis {}", MAX_RETRY_TIMEOUT); + + /** Set up akc consumer offset index stuff */ + metricMapper = getMetricObjectMapper(new ConsumerOffsetRecordList.IndexSerializer(), new ConsumerOffsetRecordList.Deserializer()); + INDEX_TEMPLATE_NAME = config.getValue(Property.CONSUMER_OFFSET_METRICS_ES_INDEX_TEMPLATE_NAME.getName(), + Property.CONSUMER_OFFSET_METRICS_ES_INDEX_TEMPLATE_NAME.getDefaultValue()); + INDEX_TEMPLATE_PATTERN_START = config.getValue(Property.CONSUMER_OFFSET_METRICS_ES_INDEX_TEMPLATE_PATTERN_START.getName(), + Property.CONSUMER_OFFSET_METRICS_ES_INDEX_TEMPLATE_PATTERN_START.getDefaultValue()); + REPLICATION_FACTOR = Integer.parseInt( + config.getValue(Property.CONSUMER_OFFSET_METRICS_ES_NUM_REPLICAS.getName(), Property.CONSUMER_OFFSET_METRICS_ES_NUM_REPLICAS.getDefaultValue())); + NUM_SHARDS = Integer.parseInt( + config.getValue(Property.CONSUMER_OFFSET_METRICS_ES_SHARDS_COUNT.getName(), Property.CONSUMER_OFFSET_METRICS_ES_SHARDS_COUNT.getDefaultValue())); + esUtils.createIndexTemplate(esRestClient, + INDEX_TEMPLATE_NAME, + INDEX_TEMPLATE_PATTERN_START, + ()-> createIndexTemplateSettingsNode(REPLICATION_FACTOR, NUM_SHARDS), + () -> createMappingsNode()); + } + + /** + * The set of implementation specific configuration properties. + * + */ + public enum Property { + + CONSUMER_OFFSET_METRICS_ES_ENDPOINT("service.property.akc.consumer.offset.elasticsearch.endpoint", "http://localhost:9200,http://localhost:9201"), + /** Connection timeout for ES REST client. */ + CONSUMER_OFFSET_METRICS_ES_ENDPOINT_CONNECTION_TIMEOUT_MILLIS("service.property.akc.consumer.offset.elasticsearch.endpoint.connection.timeout", "10000"), + /** Socket connection timeout for ES REST client. */ + CONSUMER_OFFSET_METRICS_ES_ENDPOINT_SOCKET_TIMEOUT("service.property.akc.consumer.offset.elasticsearch.endpoint.socket.timeout", "10000"), + /** Connection count for ES REST client. */ + CONSUMER_OFFSET_METRICS_ES_CONNECTION_COUNT("service.property.akc.consumer.offset.elasticsearch.connection.count", "10"), + /** The hashing algorithm to use for generating document id. */ + CONSUMER_OFFSET_METRICS_ES_IDGEN_HASH_ALGO("service.property.akc.consumer.offset.elasticsearch.idgen.hash.algo", "MD5"), + /** Replication factor */ + CONSUMER_OFFSET_METRICS_ES_NUM_REPLICAS("service.property.akc.consumer.offset.elasticsearch.num.replicas", "1"), + /** Shard count */ + CONSUMER_OFFSET_METRICS_ES_SHARDS_COUNT("service.property.akc.consumer.offset.elasticsearch.shards.count", "3"), + /** Index template name */ + CONSUMER_OFFSET_METRICS_ES_INDEX_TEMPLATE_NAME("service.property.akc.consumer.offset.elasticsearch.indextemplate.name", "argus-akc-consumer-offset-template"), + /** Index template pattern match */ + CONSUMER_OFFSET_METRICS_ES_INDEX_TEMPLATE_PATTERN_START("service.property.akc.consumer.offset.elasticsearch.indextemplate.patternstart", "argus-akc-consumer-offset"); + + private final String _name; + private final String _defaultValue; + + Property(String name, String defaultValue) { + _name = name; + _defaultValue = defaultValue; + } + + /** + * Returns the property name. + * + * @return The property name. + */ + public String getName() { + return _name; + } + + /** + * Returns the default value for the property. + * + * @return The default value. + */ + public String getDefaultValue() { + return _defaultValue; + } + } + + /* Method to change the rest client. Used for testing. */ + protected void setESRestClient(RestClient restClient){ + this.esRestClient = restClient; + } + + private ObjectNode createMappingsNode() { + ObjectNode propertiesNode = genericObjectMapper.createObjectNode(); + propertiesNode.set(ConsumerOffsetRecordList.ConsumerOffsetRecordType.METRIC.getName(), createFieldNodeAnalyzer(FIELD_TYPE_TEXT)); + propertiesNode.set(ConsumerOffsetRecordList.ConsumerOffsetRecordType.TOPIC.getName(), createFieldNodeAnalyzer(FIELD_TYPE_TEXT)); + propertiesNode.set(ConsumerOffsetRecordList.ConsumerOffsetRecordType.VALUE.getName(), _createFieldNodeNoAnalyzer(FIELD_TYPE_LONG)); + propertiesNode.set(ConsumerOffsetRecordList.ConsumerOffsetRecordType.TIMESERIES.getName(), _createFieldNodeNoAnalyzer(FIELD_TYPE_DATE)); + propertiesNode.set(ConsumerOffsetRecordList.ConsumerOffsetRecordType.TAGS.getName(), createFieldNodeAnalyzer(FIELD_TYPE_TEXT)); + + ObjectNode docNode = genericObjectMapper.createObjectNode(); + docNode.set("properties", propertiesNode); + + ObjectNode mappingsNode = genericObjectMapper.createObjectNode(); + mappingsNode.set("_doc", docNode); + + logger.info("Setting up Mapping for Consumer Offset Metric Service - {}", mappingsNode.toString()); + return mappingsNode; + } + + private ObjectNode _createFieldNodeNoAnalyzer(String type) { + ObjectNode fieldNode = genericObjectMapper.createObjectNode(); + fieldNode.put("type", type); + return fieldNode; + } + + private ObjectNode createFieldNodeAnalyzer(String type) { + ObjectNode fieldNode = genericObjectMapper.createObjectNode(); + fieldNode.put("type", type); + fieldNode.put("analyzer", "akc-consumer-offset_analyzer"); + ObjectNode keywordNode = genericObjectMapper.createObjectNode(); + keywordNode.put("type", FIELD_TYPE_KEYWORD); + ObjectNode fieldsNode = genericObjectMapper.createObjectNode(); + fieldsNode.set("raw", keywordNode); + fieldNode.set("fields", fieldsNode); + return fieldNode; + } + + private ObjectNode createIndexTemplateSettingsNode(int replicationFactor, int numShards) { + ObjectNode metricAnalyzer = genericObjectMapper.createObjectNode(); + metricAnalyzer.put("tokenizer", "akc-consumer-offset_tokenizer"); + metricAnalyzer.set("filter", genericObjectMapper.createArrayNode().add("lowercase")); + + ObjectNode analyzerNode = genericObjectMapper.createObjectNode(); + analyzerNode.set("akc-consumer-offset_analyzer", metricAnalyzer); + + ObjectNode tokenizerNode = genericObjectMapper.createObjectNode(); + tokenizerNode.set("akc-consumer-offset_tokenizer", genericObjectMapper.createObjectNode().put("type", "pattern").put("pattern", ElasticSearchUtils.TOKENIZER_PATTERN)); + + ObjectNode analysisNode = genericObjectMapper.createObjectNode(); + analysisNode.set("analyzer", analyzerNode); + analysisNode.set("tokenizer", tokenizerNode); + + ObjectNode indexNode = genericObjectMapper.createObjectNode(); + indexNode.put("max_result_window", INDEX_MAX_RESULT_WINDOW); + indexNode.put("number_of_replicas", replicationFactor); + indexNode.put("number_of_shards", numShards); + + ObjectNode settingsNode = genericObjectMapper.createObjectNode(); + settingsNode.set("analysis", analysisNode); + settingsNode.set("index", indexNode); + + logger.info("Setting up Index Template for Consumer Offset Metric Service - {}", settingsNode.toString()); + return settingsNode; + } + + @Override + public void dispose() { + requireNotDisposed(); + shutdownElasticSearchService(); + } + + private void shutdownElasticSearchService(){ + try { + esRestClient.close(); + logger.info("Shutdown of ElasticSearch RESTClient complete"); + } catch (IOException e) { + logger.warn("ElasticSearch RestClient failed to shutdown properly.", e); + } + } + + @VisibleForTesting + static ObjectMapper getMetricObjectMapper(JsonSerializer serializer, JsonDeserializer deserializer) { + ObjectMapper mapper = new ObjectMapper(); + mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); + SimpleModule module = new SimpleModule(); + module.addSerializer(ConsumerOffsetRecordList.class, serializer); + module.addDeserializer(ConsumerOffsetRecordList.class, deserializer); + mapper.registerModule(module); + + return mapper; + } + + @Override + public void putMetrics(List metrics) { + requireArgument(metrics.size() > 0, "Cannot push empty list of metrics to ES."); + try { + final long start = System.currentTimeMillis(); + ConsumerOffsetRecordList indexRecordList = new ConsumerOffsetRecordList(ConsumerOffsetMetric.convertToConsumerOffsetMetrics(metrics), idgenHashAlgo); + String requestBody = metricMapper.writeValueAsString(indexRecordList); + Set failedRecords = new HashSet<>(); + ElasticSearchUtils.PutResponse putResponse = ElasticSearchUtils.performESRequest(esRestClient, requestUrl, requestBody); + final long latency = System.currentTimeMillis() - start; + monitorService.modifyCounter(MonitorService.Counter.CONSUMER_OFFSET_RECORDS_WRITE_LATENCY, latency, null); + + if(putResponse.isErrors()) { + for(ElasticSearchUtils.PutResponse.Item item : putResponse.getItems()) { + if (item.getIndex() != null && item.getIndex().getStatus() != HttpStatus.SC_CREATED) { + logger.warn("Failed to add record {} to index. Reason: {}", + indexRecordList.getRecord(item.getIndex().get_id()), + metricMapper.writeValueAsString(item.getIndex().getError())); + failedRecords.add(indexRecordList.getRecord(item.getIndex().get_id())); + } + } + } + + if (failedRecords.size() > 0) { + logger.warn("{} records were not written to Consumer Offset ES", failedRecords.size()); + monitorService.modifyCounter(MonitorService.Counter.CONSUMER_OFFSET_RECORDS_WRITE_FAILURES, failedRecords.size(), null); + } + + } catch (IOException e) { + throw new SystemException("Failed to index consumer offset metric to ES.", e); + } + } + + @Override + public Map> getMetrics(List queries) { + requireNotDisposed(); + requireArgument(queries != null, "ConsumerOffset queries cannot be null."); + int from = 0, scrollSize = INDEX_MAX_RESULT_WINDOW, numOfMetrics = 0; + + Map > result = new HashMap<>(); + String requestUrl = String.format("/%s-*/_search", INDEX_TEMPLATE_PATTERN_START); + try { + for (MetricQuery query : queries) { + List consumerOffsetMetrics = new ArrayList<>(); + String queryJson = constructQuery(new MetricQuery(query), from, scrollSize); + final long start = System.currentTimeMillis(); + Request request = new Request(ElasticSearchUtils.HttpMethod.POST.getName(), requestUrl); + request.setEntity(new StringEntity(queryJson, ContentType.APPLICATION_JSON)); + Response response = esRestClient.performRequest(request); + final long latency = System.currentTimeMillis() - start; + monitorService.modifyCounter(MonitorService.Counter.CONSUMER_OFFSET_RECORDS_READ_LATENCY, latency, null); + logger.debug("ES get request for consumer offset completed in {} ms", latency); + String extractResponse = ElasticSearchUtils.extractResponse(response); + ConsumerOffsetRecordList recordList = ElasticSearchUtils.toEntity(extractResponse, new TypeReference() {}, metricMapper); + consumerOffsetMetrics.addAll(recordList.getRecords()); + result.put(query, ConsumerOffsetMetric.convertToMetrics(consumerOffsetMetrics)); + + numOfMetrics += consumerOffsetMetrics.size(); + if(numOfMetrics >= scrollSize) { + logger.error("Maximum metrics limit execeeded for query- " + query.toString()); + throw new RuntimeException(MessageFormat.format(EXCEPTION_MESSAGE, scrollSize)); + } + } + } catch(IOException ex) { + throw new SystemException(ex); + } + return result; + } + + @VisibleForTesting + protected String constructQuery(MetricQuery query, int scrollFrom, int size) { + query.setStartTimestamp(ElasticSearchUtils.convertTimestampToMillis(query.getStartTimestamp())); + query.setEndTimestamp(ElasticSearchUtils.convertTimestampToMillis(query.getEndTimestamp())); + ObjectNode aggsNode = constructAggregationNode(query, genericObjectMapper); + ObjectNode queryNode = constructQueryNode(query, genericObjectMapper); + ObjectNode rootNode = metricMapper.createObjectNode(); + if (aggsNode != null && queryNode != null) { + rootNode.set("aggs", aggsNode); + rootNode.set("query", queryNode); + rootNode.put("from", scrollFrom); + rootNode.put("size", size); + + return rootNode.toString(); + } else { + logger.error("Failed to construct query"); + return null; + } + } + + private ObjectNode addParentNodeWithNameInOrder(ObjectNode node, int index, String... nodeNameList) { + if (nodeNameList.length == index) { + return node; + } + ObjectNode parentNode = genericObjectMapper.createObjectNode(); + parentNode.set(nodeNameList[index], node); + return addParentNodeWithNameInOrder(parentNode, index + 1, nodeNameList); + } + + private ObjectNode addParentNodeWithNameInOrder(String value, int index, String... nodeNameList) { + ObjectNode parentNode = genericObjectMapper.createObjectNode(); + if (0 == index) { + parentNode.put(nodeNameList[index], value); + return parentNode; + } + ObjectNode node = addParentNodeWithNameInOrder(value, index - 1, nodeNameList); + parentNode.set(nodeNameList[index], node); + return parentNode; + } + + private ObjectNode constructQueryNode(MetricQuery query, ObjectMapper mapper) { + try { + Map tags = query.getTags(); + Long startTimestamp = query.getStartTimestamp(), endTimestamp = query.getEndTimestamp(); + requireArgument(startTimestamp != null && endTimestamp != null, "Start and end timestamps should be present while querying."); + + String topicFieldName = ConsumerOffsetRecordList.ConsumerOffsetRecordType.TOPIC.getName(); + String tagFieldName = ConsumerOffsetRecordList.ConsumerOffsetRecordType.TAGS.getName(); + String tsFieldName = ConsumerOffsetRecordList.ConsumerOffsetRecordType.TIMESERIES.getName(); + + ArrayNode filterNode = mapper.createArrayNode(); + + if (tags != null && tags.containsKey(topicFieldName)) { + String topicFilters = tags.get(topicFieldName); + tags.remove(topicFieldName); + String regexPattern = SchemaService.convertToRegex(topicFilters); + ObjectNode topicFilterNode = addParentNodeWithNameInOrder(regexPattern, 1, topicFieldName + ".raw", "regexp"); + filterNode.add(topicFilterNode); + } + + if (tags != null && tags.size() > 0) { + for (Map.Entry tag: query.getTags().entrySet()) { + String rawTagString = SchemaService.convertToRegex(mapper.writeValueAsString(tag)); + rawTagString = rawTagString.substring(1, rawTagString.length() - 1); + String regexPattern = ".*(" + rawTagString.replaceAll("\"", "[\"]") + ").*"; + ObjectNode regexFilterNode = addParentNodeWithNameInOrder(regexPattern, 1, tagFieldName + ".raw", "regexp"); + filterNode.add(regexFilterNode); + } + } + + ObjectNode timeSeriesRangeNode = mapper.createObjectNode(); + timeSeriesRangeNode.put("gte", Long.toString(startTimestamp)); + timeSeriesRangeNode.put("lte", Long.toString(endTimestamp)); + ObjectNode timeSeriesNode = addParentNodeWithNameInOrder(timeSeriesRangeNode, 0, tsFieldName, "range"); + + ArrayNode mustNode = mapper.createArrayNode(); + mustNode.add(timeSeriesNode); + + ObjectNode compoundNode = mapper.createObjectNode(); + compoundNode.set("must", mustNode); + compoundNode.set("filter", filterNode); + + ObjectNode queryNode = addParentNodeWithNameInOrder(compoundNode, 0, "bool"); + return queryNode; + } catch (Exception e) { + logger.error("Failed to construct query node -{}", e); + return null; + } + } + + private ObjectNode constructAggregationNode(MetricQuery query, ObjectMapper mapper) { + try { + Long downsamplingPeriod = query.getDownsamplingPeriod(); + MetricQuery.Aggregator downsamplerAgg = query.getDownsampler(); + requireArgument(downsamplerAgg != null, "Downsampler aggregation should be present while querying for consumer offset."); + requireArgument(downsamplingPeriod != null, "Downsampling period should be present while querying for consumer offset."); + Long convertMillistoMinute = Long.valueOf(60 * 1000); + + String topicFieldName = ConsumerOffsetRecordList.ConsumerOffsetRecordType.TOPIC.getName(); + String valueFieldName = ConsumerOffsetRecordList.ConsumerOffsetRecordType.VALUE.getName(); + String tsFieldName = ConsumerOffsetRecordList.ConsumerOffsetRecordType.TIMESERIES.getName(); + + ObjectNode maxOffsetGreaterThanAggNode = addParentNodeWithNameInOrder(valueFieldName, 2, "field", downsamplerAgg.getDescription(), "max_offset_greater_than"); + + ObjectNode dateHistogramNode = mapper.createObjectNode(); + dateHistogramNode.put("field", tsFieldName); + dateHistogramNode.put("interval", String.format("%sm", downsamplingPeriod / convertMillistoMinute)); + + ObjectNode maxOffsetPerUnitTimeGreaterThanNode = mapper.createObjectNode(); + maxOffsetPerUnitTimeGreaterThanNode.set("date_histogram", dateHistogramNode); + maxOffsetPerUnitTimeGreaterThanNode.set("aggs", maxOffsetGreaterThanAggNode); + ObjectNode maxOffsetPerUnitTimeGreaterThanAggNode = addParentNodeWithNameInOrder(maxOffsetPerUnitTimeGreaterThanNode, 0, "max_offset_per_unit_time_greater_than"); + + ObjectNode topicBucketNode = addParentNodeWithNameInOrder(topicFieldName + ".raw", 0, "field"); + ObjectNode maxTopicOffsetPerUnitTimeGreaterThanAggNode = mapper.createObjectNode(); + maxTopicOffsetPerUnitTimeGreaterThanAggNode.set("terms", topicBucketNode); + maxTopicOffsetPerUnitTimeGreaterThanAggNode.set("aggs", maxOffsetPerUnitTimeGreaterThanAggNode); + + ObjectNode parentNode = addParentNodeWithNameInOrder(maxTopicOffsetPerUnitTimeGreaterThanAggNode, 0, "max_topic_offset_per_unit_time_greater_than"); + + return parentNode; + } catch (Exception e) { + logger.error("Failed to construct aggregate node - {}", e); + return null; + } + } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/MetricQueryProcessor.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/MetricQueryProcessor.java new file mode 100644 index 000000000..23f67f025 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/MetricQueryProcessor.java @@ -0,0 +1,221 @@ +package com.salesforce.dva.argus.service.metric; + +import com.google.inject.Inject; +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.service.DiscoveryService; +import com.salesforce.dva.argus.service.MonitorService; +import com.salesforce.dva.argus.service.MonitorService.Counter; +import com.salesforce.dva.argus.service.QueryStoreService; +import com.salesforce.dva.argus.service.TSDBService; +import com.salesforce.dva.argus.service.TSDBService.QueryStartTimeWindow; +import com.salesforce.dva.argus.service.TSDBService.QueryTimeSeriesExpansion; +import com.salesforce.dva.argus.service.TSDBService.QueryTimeWindow; +import com.salesforce.dva.argus.service.metric.transform.Transform; +import com.salesforce.dva.argus.service.metric.transform.TransformFactory; +import com.salesforce.dva.argus.service.metric.transform.TransformFactory.Function; +import com.salesforce.dva.argus.service.tsdb.MetricQuery; +import com.salesforce.dva.argus.service.tsdb.MetricQuery.Aggregator; +import com.salesforce.dva.argus.system.SystemConfiguration; +import com.salesforce.dva.argus.util.QueryContext; +import com.salesforce.dva.argus.util.TSDBQueryExpression; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/* + * This class has methods which are used to evaluate the metric query expression once it is parsed + */ +public class MetricQueryProcessor { + + private static Logger logger = LoggerFactory.getLogger(MetricQueryProcessor.class); + + private DiscoveryService _discoveryService; + + private TSDBService _tsdbService; + + private MonitorService _monitorService; + + private QueryStoreService _queryStoreService; + + private TransformFactory _factory; + + private static final String HOSTNAME; + + static { + HOSTNAME = SystemConfiguration.getHostname(); + } + + + @Inject + public MetricQueryProcessor(TSDBService tsdbService, DiscoveryService discoveryService, MonitorService monitorService, TransformFactory factory, QueryStoreService queryStoreService) { + _tsdbService = tsdbService; + _discoveryService = discoveryService; + _monitorService=monitorService; + _queryStoreService=queryStoreService; + _factory = factory; + } + + public MetricQueryResult evaluateQuery(QueryContext context, long relativeTo){ + MetricQueryResult queryResult = new MetricQueryResult(); + if(context.getChildContexts()!=null && context.getChildContexts().size()!=0) { + for(QueryContext childCtx : context.getChildContexts()) { + mergeQueryResults(queryResult, evaluateQuery(childCtx, relativeTo)); + } + } + + if(context.getExpression()!=null) { + mergeQueryResults(queryResult, evaluateTSDBQuery(context.getExpression())); + } + + + + if(context.getTransform()!=null) { + boolean constantsOnly = false; + // fill transform needs to know whether its generating a constant line or its filling gaps in some computed metrics + if((TransformFactory.Function.FILL.equals(context.getTransform()) && (context.getChildContexts()==null || context.getChildContexts().size()==0) && context.getExpression()==null)) { + constantsOnly = true; + } + queryResult.setMetricsList(evaluateTransform(context.getTransform(), queryResult.getMetricsList(), context.getConstants(), relativeTo, constantsOnly, context)); + queryResult.addTransform(context.getTransform()); + } + queryResult.setExpandedTimeSeriesRange(QueryTimeSeriesExpansion.getExpandedTimeSeriesRange(queryResult.getNumTSDBResults())); + queryResult.setQueryTimeWindow(QueryTimeWindow.getWindow(queryResult.getQueryTimeRangeInMillis())); + queryResult.setQueryStartTimeWindow(QueryStartTimeWindow.getWindow(relativeTo - queryResult.getQueryStartTimeMillis())); + return queryResult; + } + + public void mergeQueryResults(MetricQueryResult parentResult, MetricQueryResult childResult) { + parentResult.getMetricsList().addAll(childResult.getMetricsList()); + parentResult.setNumTSDBResults(parentResult.getNumTSDBResults() + childResult.getNumTSDBResults()); + parentResult.setNumDiscoveryQueries(parentResult.getNumDiscoveryQueries() + childResult.getNumDiscoveryQueries()); + parentResult.setNumDiscoveryResults(parentResult.getNumDiscoveryResults() + childResult.getNumDiscoveryResults()); + parentResult.addInboundMetricQueries(childResult.getInboundMetricQueries()); + parentResult.addTransforms(childResult.getTransforms()); + if(childResult.getQueryTimeRangeInMillis() > parentResult.getQueryTimeRangeInMillis()) { + parentResult.setQueryTimeRangeInMillis(childResult.getQueryTimeRangeInMillis()); + } + if(childResult.getQueryStartTimeMillis() < parentResult.getQueryStartTimeMillis()) { + parentResult.setQueryStartTimeMillis(childResult.getQueryStartTimeMillis()); + } + } + + public MetricQuery convertTSDBQueryToMetricQuery(TSDBQueryExpression expression) { + Long startTimestamp = expression.getStartTimestamp(); + Long endTimestamp = expression.getEndTimestamp(); + String namespace = expression.getNamespace(); + String scope = expression.getScope(); + String metric = expression.getMetric(); + Aggregator aggregator = expression.getAggregator(); + Map tags = expression.getTags(); + Aggregator downsampler = expression.getDownsampler(); + Long downsamplingPeriod = expression.getDownsamplingPeriod(); + + MetricQuery query = new MetricQuery(scope, metric, tags, startTimestamp, endTimestamp); + query.setNamespace(namespace); + query.setDownsampler(downsampler); + query.setDownsamplingPeriod(downsamplingPeriod); + query.setPercentile(expression.getPercentile()); + query.setShowHistogramBuckets(expression.isShowHistogramBuckets()); + if(!query.getShowHistogramBuckets() && query.getPercentile()==null) { + query.setAggregator(getSubstituteAggregator(aggregator)); + }else { + query.setAggregator(aggregator); + } + + return query; + } + + private MetricQueryResult evaluateTSDBQuery(TSDBQueryExpression expression) { + final long start = System.currentTimeMillis(); + + MetricQueryResult queryResult = new MetricQueryResult(); + + + MetricQuery query = convertTSDBQueryToMetricQuery(expression); + List queries = _discoveryService.getMatchingQueries(query); + + if (queries.size() == 0) { // No metrics inflow to argus in last DEFAULT_RETENTION_DISCOVERY_DAYS days. Save the raw query processed within inBoundMetricQuery. + queryResult.addInboundMetricQuery(query); + } + + // Stores all the user queries + List metricsQueried = new ArrayList<>(); + for (MetricQuery metricQuery:queries) { + metricsQueried.add(new Metric(metricQuery.getScope(),metricQuery.getMetric())); + queryResult.addInboundMetricQuery(metricQuery); + } + + try { + _queryStoreService.putArgusWsQueries(metricsQueried); + } + catch (Exception e) + { + logger.warn("Inserting Queries to QueryStore failed due to "+e); + } + List metrics = new ArrayList(); + Map> metricsMap = _tsdbService.getMetrics(queries); + for(List m : metricsMap.values()) { + metrics.addAll(m); + } + Collections.sort(metrics); + queryResult.setMetricsList(metrics); + Long startTimestamp = expression.getStartTimestamp(); + queryResult.setQueryTimeRangeInMillis(expression.getEndTimestamp() - startTimestamp); + queryResult.setQueryStartTimeMillis(startTimestamp); + if(queries.size() !=1 || queries.get(0) != query) { + queryResult.setNumDiscoveryResults(queries.size()); + queryResult.setNumDiscoveryQueries(1); + } + queryResult.setNumTSDBResults(metrics.size()); + + final long time = System.currentTimeMillis() - start; + _monitorService.modifyCounter(Counter.METRICQUERYPROCESSOR_EVALUATETSDBQUERY_LATENCY, time, null); + _monitorService.modifyCounter(Counter.METRICQUERYPROCESSOR_EVALUATETSDBQUERY_COUNT, 1, null); + + + return queryResult; + } + + /* + * We replace the aggregator to provide a non-interpolated default behavior for MIN, MAX and SUM + */ + private Aggregator getSubstituteAggregator(Aggregator aggregator) { + switch (aggregator) { + case MIN: + return Aggregator.MIMMIN; + case MAX: + return Aggregator.MIMMAX; + case SUM: + return Aggregator.ZIMSUM; + case IMIN: + return Aggregator.MIN; + case IMAX: + return Aggregator.MAX; + case ISUM: + return Aggregator.SUM; + default: + return aggregator; + } + } + + private List evaluateTransform(Function function, List result, List constants, long relativeTo, boolean constantsOnly, QueryContext currentQueryContext) { + if(TransformFactory.Function.FILL.getName().equals(function.getName())) { + constants.add(String.valueOf(relativeTo)); + constants.add(String.valueOf(constantsOnly)); + } + + Transform transform = _factory.getTransform(function.getName()); + Map tags = new HashMap<>(); + tags.put("host", HOSTNAME); + tags.put("transform", function.getName()); + _monitorService.modifyCounter(Counter.TRANSFORMS_EVALUATED, 1, tags); + List metrics = ((constants == null || constants.isEmpty()) ? transform.transform(currentQueryContext, result) : transform.transform(currentQueryContext, result, constants)); + Collections.sort(metrics); + return metrics; + } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/MetricQueryResult.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/MetricQueryResult.java new file mode 100644 index 000000000..dcfa37be1 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/MetricQueryResult.java @@ -0,0 +1,133 @@ +package com.salesforce.dva.argus.service.metric; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.service.metric.transform.TransformFactory; +import com.salesforce.dva.argus.service.tsdb.MetricQuery; +import org.glassfish.grizzly.utils.ArraySet; + +/* + * This class encapsulates the results of the metric query evaluation that are returned to the caller + */ +public class MetricQueryResult { + + private List metrics = new ArrayList<>(); + + // Returns the number of time series that current query has expanded to. + private String expandedTimeSeriesRange = "unknown"; + + // Returns the time window of current query based on total query time range + private String queryTimeWindow = "unknown"; + + // Returns the time window of the current query based on the start time of the query + private String queryStartTimeWindow = "unknown"; + + private Integer numDiscoveryResults = 0; + + private Integer numDiscoveryQueries = 0; + + private Long queryTimeRangeInMillis = 0L; + + private Integer numTSDBResults = 0; + + private Long queryStartTimeMillis = System.currentTimeMillis(); + + private Set transforms = new HashSet<>(); + + private Set inboundMetricQueries = new HashSet<>(); + + public List getMetricsList() { + return metrics; + } + + public void setMetricsList(List metrics) { + this.metrics = metrics; + } + + public String getExpandedTimeSeriesRange() { + return expandedTimeSeriesRange; + } + + public void setExpandedTimeSeriesRange(String expandedTimeSeriesRange) { + this.expandedTimeSeriesRange = expandedTimeSeriesRange; + } + + public String getQueryTimeWindow() { + return queryTimeWindow; + } + + public void setQueryTimeWindow(String queryTimeWindow) { + this.queryTimeWindow = queryTimeWindow; + } + + public Integer getNumDiscoveryResults() { + return numDiscoveryResults; + } + + public void setNumDiscoveryResults(Integer numDiscoveryResults) { + this.numDiscoveryResults = numDiscoveryResults; + } + + public Integer getNumDiscoveryQueries() { + return numDiscoveryQueries; + } + + public void setNumDiscoveryQueries(Integer numDiscoveryQueries) { + this.numDiscoveryQueries = numDiscoveryQueries; + } + + public Long getQueryTimeRangeInMillis() { + return queryTimeRangeInMillis; + } + + public void setQueryTimeRangeInMillis(Long queryTimeRangeInMillis) { + this.queryTimeRangeInMillis = queryTimeRangeInMillis; + } + + public Integer getNumTSDBResults() { + return numTSDBResults; + } + + public void setNumTSDBResults(Integer numTSDBResults) { + this.numTSDBResults = numTSDBResults; + } + + public String getQueryStartTimeWindow() { + return queryStartTimeWindow; + } + + public void setQueryStartTimeWindow(String queryStartTimeWindow) { + this.queryStartTimeWindow = queryStartTimeWindow; + } + + public Long getQueryStartTimeMillis() { + return queryStartTimeMillis; + } + + public void setQueryStartTimeMillis(Long queryStartTimeMillis) { + this.queryStartTimeMillis = queryStartTimeMillis; + } + + public boolean containsTransform() { return (transforms.size() > 0); } + + public void addTransform(TransformFactory.Function transform) { this.transforms.add(transform); } + + public List getTransforms() { return new ArrayList<>(this.transforms); } + + public void addTransforms(List transforms) { this.transforms.addAll(transforms); } + + public void removeTransform(TransformFactory.Function transform) { this.transforms.remove(transform); } + + + public List getInboundMetricQueries() { return new ArrayList<>(inboundMetricQueries); } + + public void removeMetricQueries(MetricQuery metricQuery) { this.inboundMetricQueries.remove(metricQuery); } + + public void addInboundMetricQuery(MetricQuery metricQuery) { this.inboundMetricQueries.add(metricQuery); } + + public void addInboundMetricQueries(List metricQuery) { this.inboundMetricQueries.addAll(metricQuery); } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/NoOperationMetricsStorageService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/NoOperationMetricsStorageService.java new file mode 100644 index 000000000..a55396d51 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/NoOperationMetricsStorageService.java @@ -0,0 +1,29 @@ +package com.salesforce.dva.argus.service.metric; + +import com.google.inject.Inject; +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.service.DefaultService; +import com.salesforce.dva.argus.service.MetricStorageService; +import com.salesforce.dva.argus.service.tsdb.MetricQuery; +import com.salesforce.dva.argus.system.SystemConfiguration; + +import java.util.List; +import java.util.Map; +import java.util.Properties; + +public class NoOperationMetricsStorageService extends DefaultService implements MetricStorageService { + + @Inject + public NoOperationMetricsStorageService(SystemConfiguration config) { + super(config); + } + + @Override + public void putMetrics(List metrics) {} + + @Override + public Map> getMetrics(List queries) { + return null; + } + +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/AbstractArithmeticTransform.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/AbstractArithmeticTransform.java deleted file mode 100644 index bb4c4d86b..000000000 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/AbstractArithmeticTransform.java +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Copyright (c) 2016, Salesforce.com, Inc. - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * 3. Neither the name of Salesforce.com nor the names of its contributors may - * be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ - -package com.salesforce.dva.argus.service.metric.transform; - -import com.salesforce.dva.argus.entity.Metric; -import com.salesforce.dva.argus.util.QueryContext; - -import java.text.MessageFormat; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; - -/** - * Raj Sarkapally. - * - * @author Raj Sarkapally (rsarkapally@salesforce.com) - */ -public abstract class AbstractArithmeticTransform implements Transform { - - //~ Static fields/initializers ******************************************************************************************************************* - - private static final String RESULT_METRIC_NAME = "result"; - - //~ Methods ************************************************************************************************************************************** - - @Override - public List transform(QueryContext context, List metrics) { - if (metrics == null) { - throw new MissingDataException("The metrics list cannot be null or empty while performing arithmetic transformations."); - } - if (metrics.isEmpty()) { - return metrics; - } - - Metric result = new Metric(getResultScopeName(), RESULT_METRIC_NAME); - Map resultDatapoints = new HashMap<>(); - Iterator> it = metrics.get(0).getDatapoints().entrySet().iterator(); - - while (it.hasNext()) { - Entry entry = it.next(); - List operands = null; - - try { - operands = getOperands(entry.getKey(), metrics); - } catch (MissingDataException mde) { - continue; - } - resultDatapoints.put(entry.getKey(), performOperation(operands)); - } - result.setDatapoints(resultDatapoints); - MetricDistiller.setCommonAttributes(metrics, result); - - List resultMetrics = new ArrayList<>(); - - Collections.addAll(resultMetrics, result); - return resultMetrics; - } - - private List getOperands(Long timestamp, List metrics) { - List operands = new ArrayList<>(); - - for (Metric metric : metrics) { - Double operand = metric.getDatapoints().get(timestamp); - - if (operand == null) { - throw new MissingDataException(MessageFormat.format("Datapoint does not exist for timestamp: {0} for metric: {1}", timestamp, - metric)); - } - operands.add(operand); - } - return operands; - } - - /** - * Performs the arithmetic operation defined in sub class. - * - * @param operands param1 First parameter - * - * @return The result of arithmetic operation between first parameter and second parameter. - */ - protected abstract Double performOperation(List operands); -} -/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/AliasByTagTransform.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/AliasByTagTransform.java index 7a341fee9..cbfceb47b 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/AliasByTagTransform.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/AliasByTagTransform.java @@ -9,7 +9,8 @@ public class AliasByTagTransform implements Transform { - static final String DELIMITER = ","; + public static final String DELIMITER = ","; + public static final String DEFAULT_SCOPE_NAME=TransformFactory.Function.ALIASBYTAG.name(); @Override public List transform(QueryContext context, List metrics) { @@ -25,6 +26,9 @@ public List transform(QueryContext context, List metrics) { if(!displayName.isEmpty()) { displayName = displayName.substring(0, displayName.length() - 1); metric.setDisplayName(displayName); + metric.setScope(DEFAULT_SCOPE_NAME); + metric.setMetric(displayName); + metric.setTags(null); } } return metrics; @@ -50,6 +54,9 @@ public List transform(QueryContext queryContext, List metrics, L if(!displayName.isEmpty()) { displayName = displayName.substring(0, displayName.length() - 1); metric.setDisplayName(displayName); + metric.setScope(DEFAULT_SCOPE_NAME); + metric.setMetric(displayName); + metric.setTags(null); } } return metrics; @@ -57,7 +64,7 @@ public List transform(QueryContext queryContext, List metrics, L @Override public List transform(QueryContext queryContext, @SuppressWarnings("unchecked") List... metrics) { - throw new UnsupportedOperationException("ALIASBYTAG doesn't need a list of lists!"); + throw new UnsupportedOperationException("ALIASBYTAG doesn't support multiple lists of metrics!"); } @Override diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/AliasTransform.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/AliasTransform.java index 0ccd1b57c..5a0e4cefe 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/AliasTransform.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/AliasTransform.java @@ -36,6 +36,8 @@ import com.salesforce.dva.argus.util.QueryContext; import java.util.List; +import java.util.Map; +import java.util.Map.Entry; /** * Transforms the name of one or more metrics.
@@ -113,6 +115,7 @@ public List transform(QueryContext queryContext, List metrics, L String newScopeName = metric.getScope().replaceAll(scopeSearchRegex, scopeReplaceText); metric.setScope(newScopeName); } + metric.setDisplayName(getDisplayName(metric)); } return metrics; @@ -125,7 +128,41 @@ public String getResultScopeName() { @Override public List transform(QueryContext queryContext, @SuppressWarnings("unchecked") List... listOfList) { - throw new UnsupportedOperationException("Alias doesn't need list of list!"); + throw new UnsupportedOperationException("Alias doesn't support multiple lists of metrics!"); } + + private String getDisplayName(Metric metric) { + StringBuilder result = new StringBuilder(); + if(metric.getScope() != null && metric.getScope().length()>0 && !metric.getScope().equals(AliasByTagTransform.DEFAULT_SCOPE_NAME)) { + result.append(metric.getScope()); + } + if(metric.getScope() != null && metric.getScope().length()>0 && !metric.getScope().equals(AliasByTagTransform.DEFAULT_SCOPE_NAME) + && metric.getMetric() != null && metric.getMetric().length()>0) { + result.append(':'); + } + if(metric.getMetric() != null && metric.getMetric().length()>0) { + result.append(metric.getMetric()); + } + + result.append(createTagString(metric.getTags())); + return result.toString(); + } + + private String createTagString(Map tags) { + StringBuilder result = new StringBuilder(); + if(tags != null && tags.size()>0) { + result.append('{'); + for(Entry tag:tags.entrySet()) { + result.append(tag.getKey()); + result.append('='); + result.append(tag.getValue()); + result.append(','); + } + result.deleteCharAt(result.length()-1); + result.append('}'); + } + return result.toString(); + } + } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/AnomalySTLTransform.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/AnomalySTLTransform.java index 508525845..d0e0fee05 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/AnomalySTLTransform.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/AnomalySTLTransform.java @@ -184,7 +184,7 @@ public String getResultScopeName() { @Override public List transform(QueryContext queryContext, List... listOfList) { - throw new UnsupportedOperationException("This class is deprecated!"); + throw new UnsupportedOperationException("AnomalySTLTransform doesn't support multiple lists of metrics!"); } } diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/CountTransformWrapUnion.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/CountTransformWrapUnion.java index 2997eb6db..b5ea7df08 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/CountTransformWrapUnion.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/CountTransformWrapUnion.java @@ -104,7 +104,7 @@ public String getResultScopeName() { @Override public List transform(QueryContext queryContext, List... listOfList) { - throw new UnsupportedOperationException("Count doesn't need list of list!"); + throw new UnsupportedOperationException("Count doesn't support multiple lists of metrics!"); } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/DerivativeTransform.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/DerivativeTransform.java deleted file mode 100644 index 01edcfb7c..000000000 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/DerivativeTransform.java +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Copyright (c) 2016, Salesforce.com, Inc. - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * 3. Neither the name of Salesforce.com nor the names of its contributors may - * be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ - -package com.salesforce.dva.argus.service.metric.transform; - -import com.salesforce.dva.argus.entity.Metric; -import com.salesforce.dva.argus.system.SystemAssert; -import com.salesforce.dva.argus.util.QueryContext; -import com.salesforce.dva.argus.util.TransformUtil; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.TreeMap; - -/** - * Implementation of Derivative transform. - * - * @author Raj Sarkapally (rsarkapally@salesforce.com) - */ -public class DerivativeTransform implements Transform { - - //~ Methods ************************************************************************************************************************************** - - @Override - public List transform(QueryContext context, List metrics) { - SystemAssert.requireArgument(metrics != null, "Cannot transform null metric/metrics"); - return computeDerivedValues(metrics, -1L); - } - - @Override - public List transform(QueryContext queryContext, List metrics, List constants) { - SystemAssert.requireArgument(metrics != null, "Cannot transform null metric/metrics"); - SystemAssert.requireArgument(constants.size() == 1, - "Derivative Transform can have exactly one constant"); - String intervalSizeStr = constants.get(0); - Long intervalSizeInSeconds = TransformUtil.getWindowInSeconds(intervalSizeStr) * 1000; - return computeDerivedValues(metrics, intervalSizeInSeconds); - } - - private List computeDerivedValues(List metrics, Long intervalWidth){ - List result = new ArrayList<>(metrics.size()); - - for (Metric metric : metrics) { - Map sortedDatapoints = new TreeMap<>(); - - sortedDatapoints.putAll(metric.getDatapoints()); - - Map derivativeDatapoints = new HashMap<>(); - Entry prevEntry = null; - - for (Entry entry : sortedDatapoints.entrySet()) { - - if (prevEntry == null) { - continue; - } else { - if(intervalWidth<=0) { - derivativeDatapoints.put(entry.getKey(), entry.getValue() - prevEntry.getValue()); - }else { - derivativeDatapoints.put(entry.getKey(), ((entry.getValue() - prevEntry.getValue())*intervalWidth)/(entry.getKey()-prevEntry.getKey())); - } - } - prevEntry = entry; - } - metric.setDatapoints(derivativeDatapoints); - result.add(metric); - } - return result; - } - - - - @Override - public String getResultScopeName() { - return TransformFactory.Function.DERIVATIVE.name(); - } - - @Override - public List transform(QueryContext queryContext, List... listOfList) { - throw new UnsupportedOperationException("This class is deprecated!"); - } -} -/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ \ No newline at end of file diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/DivideValueReducerOrMapping.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/DivideValueReducerOrMapping.java index bccbffc75..35113f9e4 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/DivideValueReducerOrMapping.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/DivideValueReducerOrMapping.java @@ -101,7 +101,7 @@ public Map mapping(Map originalDatapoints, List values, List constants) { - throw new UnsupportedOperationException("Divide Transform with reducer is not supposed to be used without a constant"); + return reduce(values); } @Override diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/DivideValueZipper.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/DivideValueZipper.java index 5774d481a..74fc0feb2 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/DivideValueZipper.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/DivideValueZipper.java @@ -31,6 +31,9 @@ package com.salesforce.dva.argus.service.metric.transform; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import com.salesforce.dva.argus.system.SystemAssert; import com.salesforce.dva.argus.system.SystemException; @@ -45,14 +48,18 @@ public class DivideValueZipper implements ValueZipper { //~ Methods ************************************************************************************************************************************** + private final Logger _logger = LoggerFactory.getLogger(DivideValueZipper.class); + @Override public Double zip(Double originalDp, Double baseDp) { try { Double original = (originalDp == null) ? 0.0 : originalDp; Double base = (baseDp == null) ? 1.0 : baseDp; - SystemAssert.requireArgument(base != 0.0, "Datapoints in base metric shouldn't contain zero!"); - + if (base == 0.0) { + _logger.debug("Encountered zero denominator when executing the DivideValueZipper. So, skipping the value"); + return null; + } return (original / base); } catch (Exception e) { throw new SystemException("Fail to parse the double value of original Datapoint or base Datapoint!", e); @@ -64,4 +71,4 @@ public String name() { return TransformFactory.Function.DIVIDE_V.name(); } } -/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ +/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ \ No newline at end of file diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/DownsampleTransform.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/DownsampleTransform.java index e34ffc02d..635b1b326 100755 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/DownsampleTransform.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/DownsampleTransform.java @@ -235,7 +235,7 @@ private Map createDownsampleDatapoints(Map originalD long currWindowEndTime = windowStart + windowSize; values = new ArrayList<>(); for(long timestamp : sortedTimeStamps) { - if(timestamp >= windowStart && (timestamp < currWindowEndTime || timestamp==endTime)) { + if(timestamp >= windowStart && timestamp < currWindowEndTime) { values.add(originalDatapoints.get(timestamp)); } } @@ -282,7 +282,7 @@ public String getResultScopeName() { @Override public List transform(QueryContext queryContext, List... listOfList) { - throw new UnsupportedOperationException("Downsample doesn't need list of list!"); + throw new UnsupportedOperationException("Downsample doesn't support multiple lists of metrics!"); } private long truncateTimeField(long time, int field){ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/ExcludeTransformWrap.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/ExcludeTransformWrap.java index cca48e497..81af7d59a 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/ExcludeTransformWrap.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/ExcludeTransformWrap.java @@ -70,7 +70,7 @@ public String getResultScopeName() { @Override public List transform(QueryContext queryContext, List... listOfList) { - throw new UnsupportedOperationException("Exclude doesn't need list of list!"); + throw new UnsupportedOperationException("Exclude doesn't support multiple lists of metrics!"); } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/FillCalculateTransform.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/FillCalculateTransform.java index 0063cf97c..82d64270c 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/FillCalculateTransform.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/FillCalculateTransform.java @@ -171,7 +171,7 @@ public String getResultScopeName() { @Override public List transform(QueryContext queryContext, List... listOfList) { - throw new UnsupportedOperationException("Fill_Calculate doesn't need list of list!"); + throw new UnsupportedOperationException("Fill_Calculate doesn't support multiple lists of metrics!"); } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/FillTransform.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/FillTransform.java index ec29674c6..7c8965db1 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/FillTransform.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/FillTransform.java @@ -61,7 +61,7 @@ public class FillTransform implements Transform { /** The default metric scope for results. */ public static final String DEFAULT_SCOPE_NAME = "scope"; - public static final int MAX_DATAPOINTS_FOR_FILL = DiscoveryService.MAX_DATAPOINTS_PER_RESPONSE; + public static final int MAX_DATAPOINTS_FOR_FILL = 2000000; //~ Methods ************************************************************************************************************************************** @@ -78,14 +78,26 @@ private static Map _fillMetricTransform(QueryContext queryContext, Long[] startAndEndTimestamps = QueryUtils.getStartAndEndTimesWithMaxInterval(queryContext); + //if interval size is more than a minute, rounding the start and end times to nearest minute + if(windowSizeInSeconds >= 60) { + startAndEndTimestamps[0] = (startAndEndTimestamps[0]/(60*1000))*(60*1000); + startAndEndTimestamps[1] = (startAndEndTimestamps[1]/(60*1000))*(60*1000); + } + Long startTimestamp = startAndEndTimestamps[0]>0 ? startAndEndTimestamps[0] : sortedTimestamps[0]; Long endTimestamp = startAndEndTimestamps[1]>0 ? startAndEndTimestamps[1] : sortedTimestamps[sortedTimestamps.length - 1]; + // create a new datapoints map propagateDatpoints, which have all the // expected timestamps, then fill the missing value int index = 1; int numDatapoints = 0; - while (startTimestamp <= endTimestamp && numDatapoints++ < MAX_DATAPOINTS_FOR_FILL) { + + if(startTimestamp < endTimestamp && ((endTimestamp - startTimestamp)/(windowSizeInSeconds * 1000) >= MAX_DATAPOINTS_FOR_FILL)) { + throw new RuntimeException("Fill transform cannot generate more than -" + MAX_DATAPOINTS_FOR_FILL + " datapoints"); + } + + while (startTimestamp <= endTimestamp) { filledDatapoints.put(startTimestamp, sortedDatapoints.containsKey(startTimestamp) ? sortedDatapoints.get(startTimestamp) : null); if (index >= sortedDatapoints.size()) { startTimestamp = startTimestamp + windowSizeInSeconds * 1000; @@ -261,7 +273,7 @@ public String getResultScopeName() { @Override public List transform(QueryContext queryContext, List... listOfList) { - throw new UnsupportedOperationException("Fill doesb't need list of list!"); + throw new UnsupportedOperationException("Fill doesn't support multiple lists of metrics!"); } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ \ No newline at end of file diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/GroupByTagTransform.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/GroupByTagTransform.java index e8e7a674b..76f2c5daa 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/GroupByTagTransform.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/GroupByTagTransform.java @@ -31,12 +31,12 @@ public List transform(QueryContext queryContext, List metrics, L SystemAssert.requireArgument(constants != null && constants.size() >= 2, "Constants list cannot be null and its size must be 2 or more."); // Find a constant with a function name: expected syntax is: tagk[,tagk]*,transform[,transformConstant]* - // scanning back to find the transform function + // Scanning from front to avoid handling #UNION# as transform rather than as constant with DIVIDE and other similar transforms TransformFactory.Function functionName = null; int functionElementId = -1; - - for (int i = constants.size() - 1; i >= 0; i--) { - String constant = constants.get(i); + + for (int i = 0; i < constants.size(); i++) { + String constant = constants.get(i); try { functionName = TransformFactory.Function.valueOf(constant); functionElementId = i; @@ -48,7 +48,7 @@ public List transform(QueryContext queryContext, List metrics, L } if (functionName == null) { - throw new UnsupportedOperationException("GroupByTag needs a function name to be provided"); + throw new UnsupportedOperationException("GroupByTag needs a valid function name to be provided in uppercase."); } List tags = constants.subList(0, functionElementId); @@ -88,7 +88,7 @@ public List transform(QueryContext queryContext, List metrics, L @Override public List transform(QueryContext queryContext, @SuppressWarnings("unchecked") List... metrics) { - throw new UnsupportedOperationException("Group By Tags Transform doesn't need list of list!"); + throw new UnsupportedOperationException("Group By Tags Transform doesn't support multiple lists of metrics!"); } @Override diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/GroupByTransform.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/GroupByTransform.java index 7a3b12f80..222141fb1 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/GroupByTransform.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/GroupByTransform.java @@ -75,7 +75,7 @@ public List transform(QueryContext queryContext, List metrics, L @Override public List transform(QueryContext queryContext, @SuppressWarnings("unchecked") List... metrics) { - throw new UnsupportedOperationException("Group By Transform doesn't need list of list!"); + throw new UnsupportedOperationException("Group By Transform doesn't support multiple lists of metrics!"); } @Override diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/GroupTransformWrapUnion.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/GroupTransformWrapUnion.java index 9f95a901f..1742595f1 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/GroupTransformWrapUnion.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/GroupTransformWrapUnion.java @@ -107,7 +107,7 @@ public String getResultScopeName() { @Override public List transform(QueryContext queryContext, List... listOfList) { - throw new UnsupportedOperationException("Group doesn't need list of list!"); + throw new UnsupportedOperationException("Group doesn't support multiple lists of metrics!"); } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/HoltWintersDeviation.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/HoltWintersDeviation.java index 7edeebc3b..78080daef 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/HoltWintersDeviation.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/HoltWintersDeviation.java @@ -117,7 +117,7 @@ public List transform(QueryContext queryContext, List metrics, L @SuppressWarnings("unchecked") @Override public List transform(QueryContext queryContext, List... listOfList) { - throw new UnsupportedOperationException("HoltWintersDeviation doesn't need list of list!"); + throw new UnsupportedOperationException("HoltWintersDeviation doesn't support multiple lists of metrics!"); } @Override diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/HoltWintersForecast.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/HoltWintersForecast.java index 79064f205..b9dbea18a 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/HoltWintersForecast.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/HoltWintersForecast.java @@ -117,7 +117,7 @@ public List transform(QueryContext queryContext, List metrics, L @SuppressWarnings("unchecked") @Override public List transform(QueryContext queryContext, List... listOfList) { - throw new UnsupportedOperationException("HoltWintersForecast doesn't need list of list!"); + throw new UnsupportedOperationException("HoltWintersForecast doesn't support multiple lists of metrics!"); } @Override diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/IncludeTransform.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/IncludeTransform.java index c8ff58db7..f4b01a07c 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/IncludeTransform.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/IncludeTransform.java @@ -81,7 +81,7 @@ public String getResultScopeName() { @Override public List transform(QueryContext queryContext, List... listOfList) { - throw new UnsupportedOperationException("Include doesn't need list of list!"); + throw new UnsupportedOperationException("Include doesn't support multiple lists of metrics!"); } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/InterpolateTransform.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/InterpolateTransform.java index ca4c54f17..3b5933e2b 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/InterpolateTransform.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/InterpolateTransform.java @@ -210,7 +210,7 @@ private void interpolateCount(Map resultDatapoints, InterpolationT @Override public List transform(QueryContext queryContext, List... listOfList) { - throw new UnsupportedOperationException("Interpolation Transform is not supposed to be used with a list of metric list!"); + throw new UnsupportedOperationException("Interpolation Transform doesn't support multiple lists of metrics!"); } @Override diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/LimitTransform.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/LimitTransform.java index bc27e3887..e787eb593 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/LimitTransform.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/LimitTransform.java @@ -93,7 +93,7 @@ public String getResultScopeName() { @Override public List transform(QueryContext queryContext, List... listOfList) { - throw new UnsupportedOperationException("Limit doesn't need list of list!"); + throw new UnsupportedOperationException("Limit doesn't support multiple lists of metrics!"); } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MetricFilterWithInteralReducerTransform.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MetricFilterWithInteralReducerTransform.java index 976167a06..2c957f4b9 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MetricFilterWithInteralReducerTransform.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MetricFilterWithInteralReducerTransform.java @@ -192,8 +192,11 @@ public List transform(QueryContext queryContext, List metrics, L String limit = constants.get(0); String type = constants.get(1); Map extendedSortedMap = createExtendedMap(metrics, type); - List filteredMetricList = this.valueFilter.filter(extendedSortedMap, limit); - + List filteredMetricList = new ArrayList(); + if (extendedSortedMap.isEmpty()) { + return filteredMetricList; + } + filteredMetricList = this.valueFilter.filter(extendedSortedMap, limit); return filteredMetricList; } @@ -202,7 +205,6 @@ private Map createExtendedMap(List metrics, String type) for (Metric metric : metrics) { String extendedEvaluation = internalReducer(metric, type); - if(extendedEvaluation!=null) { extendedSortedMap.put(metric, extendedEvaluation); } @@ -212,7 +214,7 @@ private Map createExtendedMap(List metrics, String type) @Override public List transform(QueryContext queryContext, List... listOfList) { - throw new UnsupportedOperationException("Filter doesn't need list of list!"); + throw new UnsupportedOperationException("Filter doesn't support multiple lists of metrics!"); } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MetricMappingTransform.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MetricMappingTransform.java index 8c52705b4..502edef53 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MetricMappingTransform.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MetricMappingTransform.java @@ -138,7 +138,7 @@ private Map cleanDPs(Map originalDPs) { @Override public List transform(QueryContext queryContext, List... listOfList) { - throw new UnsupportedOperationException("Mapping doesn't need list of list!"); + throw new UnsupportedOperationException("Mapping doesn't support multiple lists of metrics!"); } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MetricReducerOrMappingTransform.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MetricReducerOrMappingTransform.java index 180eb0884..002bba187 100755 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MetricReducerOrMappingTransform.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MetricReducerOrMappingTransform.java @@ -33,12 +33,15 @@ import com.salesforce.dva.argus.entity.Metric; import com.salesforce.dva.argus.system.SystemAssert; +import com.salesforce.dva.argus.system.SystemException; import com.salesforce.dva.argus.util.QueryContext; +import org.apache.commons.lang3.math.NumberUtils; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -62,7 +65,9 @@ public class MetricReducerOrMappingTransform implements Transform { protected static String FULLJOIN = "UNION"; protected static String INTERSECT = "INTERSECT"; protected Boolean fulljoinIndicator=true; - public static final Set DEFAULT_FULL_JOIN_EXCLUDE_CLASSES = new HashSet(Arrays.asList(new Class[] {DivideValueReducerOrMapping.class, DiffValueReducerOrMapping.class, DeviationValueReducerOrMapping.class})); + public static final Set DEFAULT_FULL_JOIN_EXCLUDE_CLASSES = new HashSet(Arrays.asList(new Class[] {DivideValueReducerOrMapping.class, DiffValueReducerOrMapping.class, DeviationValueReducerOrMapping.class, ScaleValueReducerOrMapping.class})); + public static final Set DEFAULT_CONSTANT_VALUE_INCLUDE_CLASSES = new HashSet(Arrays.asList(new Class[] {DivideValueReducerOrMapping.class})); + //~ Constructors ********************************************************************************************************************************* @@ -114,6 +119,14 @@ public List transform(QueryContext queryContext, List metrics, L return transform(queryContext, metrics); } } + // Handling special case of DIVIDE when datapoints for some timeseries are missing and when default value is given + if(constants.size() == 2 && DEFAULT_CONSTANT_VALUE_INCLUDE_CLASSES.contains(valueReducerOrMapping.getClass())) { + if (constants.get(0).toUpperCase().equals(FULLJOIN)){ + SystemAssert.requireArgument(NumberUtils.isNumber(constants.get(1)), + "Default constant value supplied to DIVIDE transform is not a number"); + return Arrays.asList(reduce(metrics, Arrays.asList(constants.get(1)))); + } + } return mapping(metrics, constants); } @@ -152,7 +165,21 @@ protected List mapping(List metrics, List constants) { protected Metric reduce(List metrics, List constants) { SystemAssert.requireArgument(metrics != null, "Cannot transform empty metric/metrics"); if(valueReducerOrMapping instanceof DivideValueReducerOrMapping && metrics.size() < 2) { - throw new IllegalArgumentException("DIVIDE Transform needs at least 2 metrics to perform the operation."); + // Handling special scenario when there is only one metric is passed to divide transform + if (metrics.size() == 1) { + if (constants == null || constants.size()==0) { + throw new IllegalArgumentException("DIVIDE Transform needs default constant value along with union constant when there is only one metric to perform the operation"); + } else if (constants.size() == 1) { + Double defaultValue = Double.parseDouble(constants.get(0)); + Metric constructedMetric = metrics.get(0); + Map dataPoints = new LinkedHashMap<>(); + for (Map.Entry point : metrics.get(0).getDatapoints().entrySet()) { + dataPoints.put(point.getKey(), defaultValue); + } + constructedMetric.setDatapoints(dataPoints); + return constructedMetric; + } + } } MetricDistiller distiller = new MetricDistiller(); @@ -189,11 +216,18 @@ protected Map reduce(Map> collated, List reducedDatapoints = new HashMap<>(); for (Map.Entry> entry : collated.entrySet()) { - if (entry.getValue().size() < metrics.size() && !fulljoinIndicator) { + + // Handling special case of DIVIDE when datapoints for some timeseries are missing and when default value is given + if (entry.getValue().size() < metrics.size() && DEFAULT_CONSTANT_VALUE_INCLUDE_CLASSES.contains(valueReducerOrMapping.getClass()) && constants!=null && constants.size()==1) + { + reducedDatapoints.put(entry.getKey(), Double.parseDouble(constants.get(0))); + continue; + } + else if (entry.getValue().size() < metrics.size() && !fulljoinIndicator) { continue; } - Double reducedValue = constants == null || constants.isEmpty() ? + Double reducedValue = constants == null || constants.isEmpty() ? this.valueReducerOrMapping.reduce(entry.getValue()) : this.valueReducerOrMapping.reduce(entry.getValue(), constants); if(reducedValue!=null) { @@ -220,7 +254,7 @@ protected Map> collate(List metrics) { @Override public List transform(QueryContext queryContext, List... listOfList) { - throw new UnsupportedOperationException("ReducerOrMapping doesn't need list of list!"); + throw new UnsupportedOperationException("ReducerOrMapping doesn't support multiple lists of metrics!"); } } diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MetricReducerTransform.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MetricReducerTransform.java index 3e0886473..8292b3617 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MetricReducerTransform.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MetricReducerTransform.java @@ -143,7 +143,7 @@ public List transform(QueryContext queryContext, List metrics, L @Override public List transform(QueryContext queryContext, List... listOfList) { - throw new UnsupportedOperationException("Reducer doesn't need list of list!"); + throw new UnsupportedOperationException("Reducer doesn't support multiple lists of metrics!"); } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MetricUnionTransform.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MetricUnionTransform.java index 7c842698d..c0e5d9136 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MetricUnionTransform.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MetricUnionTransform.java @@ -182,7 +182,7 @@ public List transform(QueryContext queryContext, List metrics, L @Override public List transform(QueryContext queryContext, List... listOfList) { - throw new UnsupportedOperationException("Union doesn't need list of list"); + throw new UnsupportedOperationException("Union doesn't support multiple lists of metrics!"); } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MetricZipperTransform.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MetricZipperTransform.java index d6613e45e..1a91656bf 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MetricZipperTransform.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/MetricZipperTransform.java @@ -40,6 +40,9 @@ import java.util.List; import java.util.Map; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + /** * This class transforms a list of metrics in a mapping way, which means apply the same function to every metric. More specifically, an interface * valueMapping will be passed in , which implements how to apply a mapping function to datapoints of every metric. @@ -55,6 +58,8 @@ public class MetricZipperTransform implements Transform { protected static String FULLJOIN = "UNION"; protected Boolean fulljoinIndicator = false; + private final Logger _logger = LoggerFactory.getLogger(MetricZipperTransform.class); + //~ Constructors ********************************************************************************************************************************* /** @@ -90,8 +95,10 @@ public List transform(QueryContext context, List metrics) { return metrics; } - SystemAssert.requireArgument(metrics.size() >= 2 && metrics.get(metrics.size() - 1) != null, - "Cannot transform without a base metric as second param!"); + if(metrics.size() < 2) { + _logger.debug( "Zipper transform requires a base metric as second param! Since this is not present, skipping the metric"); + return new ArrayList(); + } return zip(metrics.subList(0, metrics.size() - 1), metrics.get(metrics.size() - 1)); } @@ -105,9 +112,12 @@ public List transform(QueryContext context, List metrics) { * @return The merged metrics. */ public List zip(List metrics, Metric baseMetric) { - SystemAssert.requireArgument(baseMetric != null, "Zipper transform requires base metric as second param!"); - List zippedMetrics = new ArrayList(); + if(baseMetric == null) { + _logger.debug( "Zipper transform requires base metric as second param! Since this is null, skipping the metric"); + return zippedMetrics; + } + Map baseDatapoints = baseMetric.getDatapoints(); for (Metric metric : metrics) { @@ -129,10 +139,11 @@ public List zip(List metrics, Metric baseMetric) { * @return The merged data points. */ public Map zip(Map originalDatapoints, Map baseDatapoints) { - SystemAssert.requireArgument(baseDatapoints != null && !baseDatapoints.isEmpty(), - "Zipper transform requires valid baseDatapoints from base metric!"); - Map zippedDP = new HashMap<>(); + if(baseDatapoints == null || baseDatapoints.isEmpty()) { + _logger.debug("Zipper transform requires valid baseDatapoints from base metric. Since this is empty, skipping the metric"); + return zippedDP; + } for (Map.Entry originalDP : originalDatapoints.entrySet()) { Long originalKey = originalDP.getKey(); @@ -141,7 +152,10 @@ public Map zip(Map originalDatapoints, Map zip(Map originalDatapoints, Map zip(Map originalDatapoints, Map transform(QueryContext queryContext, List... listOfList) { - throw new UnsupportedOperationException("Zipper doesn't need list of list!"); + throw new UnsupportedOperationException("SUM_V, SCALE_V, DIFF_V, DIVIDE_V, NORMALIZE_V transform doesn't support multiple lists of metrics!"); } } -/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ +/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ \ No newline at end of file diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/NormalizeTransformWrap.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/NormalizeTransformWrap.java index 038e2db42..cbd20eb7a 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/NormalizeTransformWrap.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/NormalizeTransformWrap.java @@ -147,7 +147,7 @@ public String getResultScopeName() { @Override public List transform(QueryContext queryContext, List... listOfList) { - throw new UnsupportedOperationException("NormalizeTransformWrap doesn't support list of list!"); + throw new UnsupportedOperationException("NormalizeTransformWrap doesn't support multiple lists of metrics!"); } //~ Inner Classes ******************************************************************************************************************************** diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/PropagateTransform.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/PropagateTransform.java index 5c60cd70c..86b9ad972 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/PropagateTransform.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/PropagateTransform.java @@ -54,8 +54,8 @@ public class PropagateTransform implements Transform { private void _propagateMetricTransform(Metric metric, long windowSizeInSeconds, QueryContext queryContext) { - // if the datapoint set is empty or has a single datapoint, return directly - if(metric.getDatapoints().isEmpty() || metric.getDatapoints().size() == 1) { + // if the datapoint set is empty + if(metric.getDatapoints().isEmpty()) { return; } Long[] startAndEndTimestamps = QueryUtils.getStartAndEndTimesWithMaxInterval(queryContext); @@ -154,7 +154,7 @@ public String getResultScopeName() { @Override public List transform(QueryContext queryContext, List... listOfList) { - throw new UnsupportedOperationException("Propagate Transform doesn't accept list of metric list!"); + throw new UnsupportedOperationException("Propagate Transform doesn't support multiple lists of metrics!"); } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/RangeTransformWrap.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/RangeTransformWrap.java index 51bf8bd58..455bdc78c 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/RangeTransformWrap.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/RangeTransformWrap.java @@ -125,7 +125,7 @@ public boolean apply(Map.Entry datapoint) { @Override public List transform(QueryContext queryContext, List... listOfList) { - throw new UnsupportedOperationException("Range Transform doesn't accept list of metric list!"); + throw new UnsupportedOperationException("Range Transform doesn't support multiple lists of metrics!"); } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/RateTransform.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/RateTransform.java new file mode 100644 index 000000000..b24eba0d2 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/RateTransform.java @@ -0,0 +1,146 @@ +package com.salesforce.dva.argus.service.metric.transform; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map.Entry; +import java.util.TreeMap; + +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.system.SystemAssert; +import com.salesforce.dva.argus.util.QueryContext; +import com.salesforce.dva.argus.util.QueryUtils; +import com.salesforce.dva.argus.util.TransformUtil; +/** + * It provides methods to implement Rate transform + * @author Raj Sarkapally (rsarkapally@salesforce.com) + * + */ +public class RateTransform implements Transform{ + private static long DEFAULT_INTERVAL=60*1000; + private static boolean DEFAULT_SKIP_NEGATIVE_VALUES=true; + private static boolean DEFAULT_INTERPOLATE_MISSING_DATAPOINTS=true; + + @Override + public List transform(QueryContext queryContext, List metrics) { + Long[] startAndEndTimestamps = QueryUtils.getStartAndEndTimesWithMaxInterval(queryContext); + return performRate(metrics, startAndEndTimestamps[0], startAndEndTimestamps[1], DEFAULT_INTERVAL, + DEFAULT_SKIP_NEGATIVE_VALUES, DEFAULT_INTERPOLATE_MISSING_DATAPOINTS); + } + + @Override + public List transform(QueryContext queryContext, + List metrics, List constants) { + SystemAssert.requireArgument(constants != null && constants.size() == 3, + "Rate Transform needs 3 constants (interval, skipNegativeValues, interpolateMissingValues)!"); + + Long[] startAndEndTimestamps = QueryUtils.getStartAndEndTimesWithMaxInterval(queryContext); + long intervalInMilli = TransformUtil.getWindowInSeconds(constants.get(0)) * 1000; + return performRate(metrics, startAndEndTimestamps[0], startAndEndTimestamps[1], intervalInMilli, + Boolean.valueOf(constants.get(1)), Boolean.valueOf(constants.get(2))); + } + + @Override + public List transform(QueryContext queryContext, + List... metrics) { + throw new UnsupportedOperationException("Rate transform doesn't need list of list"); + } + + @Override + public String getResultScopeName() { + return TransformFactory.Function.RATE.name(); + } + + private List performRate(List metrics,long startTimestampInMilli, long endTimestampInMilli, long intervalInMilli, + boolean skipNegativeValues, boolean interpolateMissingDP){ + List result= new ArrayList<>(); + for(Metric metric:metrics) { + if(metric.getDatapoints().size()>=2) { + TreeMap sortedDatapoints = new TreeMap<>(metric.getDatapoints()); + startTimestampInMilli = startTimestampInMilli > 0 ? startTimestampInMilli:sortedDatapoints.firstKey(); + endTimestampInMilli = endTimestampInMilli > 0 ?endTimestampInMilli:sortedDatapoints.lastKey(); + if(interpolateMissingDP) { + addFirstNLastDatapointsIfMissing(sortedDatapoints, startTimestampInMilli, endTimestampInMilli, intervalInMilli); + sortedDatapoints=performInterpolation(sortedDatapoints, intervalInMilli); + } + sortedDatapoints = calculateRateValues(sortedDatapoints, intervalInMilli); + if(skipNegativeValues) { + sortedDatapoints = removeNegativeValues(sortedDatapoints); + } + metric.setDatapoints(sortedDatapoints); + result.add(metric); + }else { + // For Rate Transform we are culling the data when there is only one datapoint for a metric + if (metric.getDatapoints().size()==1) + { + metric.setDatapoints(new HashMap<>()); + } + result.add(metric); + } + } + return result; + } + + public TreeMap performInterpolation(TreeMap sortedDatapoints, long intervalInMilli) { + if(sortedDatapoints.size()<2) { + return sortedDatapoints; + } + TreeMap result = new TreeMap<>(); + Long prevTimestamp = sortedDatapoints.firstKey(); + Entry prevDP = sortedDatapoints.firstEntry(); + for(Entry currDP:sortedDatapoints.entrySet()) { + while(currDP.getKey() > (prevTimestamp+intervalInMilli)) { + Long missingTimestamp = prevTimestamp+intervalInMilli; + Double missingValue= getInterpolatedvalue(prevDP, currDP, missingTimestamp); + result.put(missingTimestamp, missingValue); + prevTimestamp = missingTimestamp; + } + result.put(currDP.getKey(), currDP.getValue()); + prevDP=currDP; + prevTimestamp=currDP.getKey(); + } + return result; + } + + private TreeMap removeNegativeValues(TreeMap datapoints){ + TreeMap result = new TreeMap<>(); + for(Entry entry:datapoints.entrySet()) { + if(entry.getValue()>=0) { + result.put(entry.getKey(), entry.getValue()); + } + } + return result; + } + + private TreeMap calculateRateValues(TreeMap sortedDatapoints, long intervalInMilli) { + TreeMap result = new TreeMap<>(); + Entry prevEntry = null; + for (Entry currEntry : sortedDatapoints.entrySet()) { + if (prevEntry !=null){ + double rateValue = intervalInMilli * (currEntry.getValue()-prevEntry.getValue())/(currEntry.getKey()-prevEntry.getKey()); + result.put(currEntry.getKey(), rateValue); + } + prevEntry = currEntry; + } + return result; + } + + private void addFirstNLastDatapointsIfMissing(TreeMap sortedDatapoints, long startTimestampInMilli, long endTimestampInMilli, long intervalInMilli) { + if(sortedDatapoints.size()>=2) { + if(sortedDatapoints.firstKey() >= (startTimestampInMilli + intervalInMilli)) { + double firstDPValue= getInterpolatedvalue(sortedDatapoints.firstEntry(), sortedDatapoints.higherEntry(sortedDatapoints.firstKey()), startTimestampInMilli); + sortedDatapoints.put(startTimestampInMilli, firstDPValue); + } + if(endTimestampInMilli >= (sortedDatapoints.lastKey()+intervalInMilli)) { + double lastDPValue= getInterpolatedvalue(sortedDatapoints.lowerEntry(sortedDatapoints.lastKey()), sortedDatapoints.lastEntry(), endTimestampInMilli); + sortedDatapoints.put(endTimestampInMilli, lastDPValue); + } + } + } + + private double getInterpolatedvalue(Entry prevDP, Entry nextDP, long timestamp){ + double slope = (nextDP.getValue()-prevDP.getValue())/(nextDP.getKey()-prevDP.getKey()); + double result = prevDP.getValue() + slope*(timestamp-prevDP.getKey()); + return result; + } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/ShiftValueMapping.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/ShiftValueMapping.java index ec7855287..2faa14aec 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/ShiftValueMapping.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/ShiftValueMapping.java @@ -63,7 +63,7 @@ public Map mapping(Map originalDatapoints, List= Long.MIN_VALUE), - "You are not allowed to shift like this, be nice to me!"); + "Timestamp of shifted metrics not in range Long.MIN_VALUE - Long.MAX_VALUE"); shiftDatapoints.put(newTimestamp, entry.getValue()); } return shiftDatapoints; diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/SliceTransform.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/SliceTransform.java new file mode 100644 index 000000000..296c93040 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/SliceTransform.java @@ -0,0 +1,115 @@ +package com.salesforce.dva.argus.service.metric.transform; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.system.SystemAssert; +import com.salesforce.dva.argus.util.QueryContext; +import com.salesforce.dva.argus.util.TransformUtil; +/** + * It provides methods to implement Slice transform + * @author Raj Sarkapally + * + */ +public class SliceTransform implements Transform{ + private static String START_TIME = "start"; + private static String END_TIME = "end"; + private static long SECOND_IN_MILLI=1000l; + + @Override + public List transform(QueryContext context, List metrics) { + throw new UnsupportedOperationException("Slice Transform needs interval start time and end time."); + } + + @Override + public List transform(QueryContext queryContext, + List metrics, List constants) { + SystemAssert.requireArgument(constants != null, "Slice Transform needs interval start time and end time."); + SystemAssert.requireArgument(constants.size() == 2, "Slice Transform must provide exactly 2 constants which are interval start time and interval end time."); + + String startEndTimePattern= "("+ START_TIME + "|"+ END_TIME +")(\\s*[+-]\\s*\\d+[smhd])?"; + String sliceStartTime = constants.get(0).trim(); + String sliceEndTime = constants.get(1).trim(); + SystemAssert.requireArgument((isLong(sliceStartTime) || sliceStartTime.matches(startEndTimePattern)), "The start time of Slice transform is invalid."); + SystemAssert.requireArgument((isLong(sliceEndTime) || sliceEndTime.matches(startEndTimePattern)), "The end time of Slice transform is invalid."); + + long sliceStartTimeInMilli = calculateTime(sliceStartTime, queryContext.getChildContexts().get(0).getExpression().getStartTimestamp(), + queryContext.getChildContexts().get(0).getExpression().getEndTimestamp()); + + long sliceEndTimeInMilli = calculateTime(sliceEndTime, queryContext.getChildContexts().get(0).getExpression().getStartTimestamp(), + queryContext.getChildContexts().get(0).getExpression().getEndTimestamp()); + + metrics.forEach(metric -> { + Map slicedDatapoints = new HashMap<>(); + metric.getDatapoints().forEach((timestamp,value) ->{ + if(timestamp >= sliceStartTimeInMilli && timestamp <=sliceEndTimeInMilli) { + slicedDatapoints.put(timestamp, value); + } + }); + metric.setDatapoints(slicedDatapoints); + }); + return metrics; + } + + @Override + public List transform(QueryContext queryContext, + List... metrics) { + throw new UnsupportedOperationException("Slice Transform doesn't need list of list."); + } + + @Override + public String getResultScopeName() { + return TransformFactory.Function.SLICE.name(); + } + + private long calculateTime(String time,long queryStartTime, long queryEndTime) { + if(isLong(time)) { + return Long.valueOf(time); + }else { + long startREndtime; + String remTimeString; + if(time.contains(START_TIME)) { + startREndtime=queryStartTime; + remTimeString=time.substring(START_TIME.length()).trim(); + if(remTimeString.isEmpty()) { + return queryStartTime; + } + }else { + startREndtime=queryEndTime; + remTimeString=time.substring(END_TIME.length()).trim(); + if(remTimeString.isEmpty()) { + return queryEndTime; + } + } + return calculate(startREndtime, remTimeString.charAt(0), SECOND_IN_MILLI * TransformUtil.getWindowInSeconds(remTimeString.substring(1).trim())); + } + } + + private long calculate(long operand1, char operator, long operand2) { + switch(operator) { + case '+': + return operand1 + operand2; + case '-': + return operand1 - operand2; + case '*': + return operand1 * operand2; + case '/': + return operand1/operand2; + default: + return operand1-operand2; + } + } + + private boolean isLong(String s) { + try { + Long.valueOf(s); + return true; + }catch(NumberFormatException e) { + return false; + }catch(Throwable t) { + return false; + } + } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/SortTransformWrapAboveAndBelow.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/SortTransformWrapAboveAndBelow.java index 8668cec51..ab0aab0da 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/SortTransformWrapAboveAndBelow.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/SortTransformWrapAboveAndBelow.java @@ -114,7 +114,7 @@ public String getResultScopeName() { @Override public List transform(QueryContext queryContext, List... listOfList) { - throw new UnsupportedOperationException("Sort transform doesn't support list of metric list!"); + throw new UnsupportedOperationException("Sort transform doesn't support multiple lists of metrics!"); } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/TransformFactory.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/TransformFactory.java index 23e6e2d27..f6791ed32 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/TransformFactory.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/TransformFactory.java @@ -28,7 +28,7 @@ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ - + package com.salesforce.dva.argus.service.metric.transform; import com.google.inject.Inject; @@ -58,12 +58,23 @@ public class TransformFactory { * Creates a new TransformFactory object. * * @param tsdbService The TSDB service to use. + * @param metadataService */ - @Inject public TransformFactory(TSDBService tsdbService) { _tsdbService = tsdbService; } + /** + * Creates a new TransformFactory object. + * + * @param tsdbService The TSDB service to use. + * @param metadataService + */ + @Inject + public TransformFactory(TSDBService tsdbService, TSDBService bService) { + _tsdbService = tsdbService; + } + //~ Methods ************************************************************************************************************************************** /** @@ -190,6 +201,10 @@ public Transform getTransform(String functionName) { return new AnomalyDetectionRPCATransform(); case INTERPOLATE: return new InterpolateTransform(); + case RATE: + return new RateTransform(); + case SLICE: + return new SliceTransform(); default: throw new UnsupportedOperationException(functionName); } // end switch @@ -264,7 +279,9 @@ public enum Function { ANOMALY_ZSCORE("ANOMALY_ZSCORE", "Calculates an anomaly score (0-100) for each value of the metric based on the z-score of each value with a Gaussian distribution."), ANOMALY_KMEANS("ANOMALY_KMEANS", "Calculates an anomaly score (0-100) for each value of the metric based on a K-means clustering of the metric data."), ANOMALY_RPCA("ANOMALY_RPCA", "Calculates an anomaly score (0-100) for each value of the metric based on the RPCA matrix decomposition algorithm."), - INTERPOLATE("INTERPOLATE", "Performs interpolation of multiple time series, that can then be used for aggregation"); + INTERPOLATE("INTERPOLATE", "Performs interpolation of multiple time series, that can then be used for aggregation"), + RATE("RATE", "Performs Rate for all given time series"), + SLICE("SLICE", "Removes data points before interval start time and after interval end time. "); private final String _name; private final String _description; diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/ZeroIfMissingSum.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/ZeroIfMissingSum.java index 8557b82d6..93330a54a 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/ZeroIfMissingSum.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/metric/transform/ZeroIfMissingSum.java @@ -113,7 +113,7 @@ private Double performOperation(Double operand1, Double operand2) { @Override public List transform(QueryContext queryContext, List... listOfList) { - throw new UnsupportedOperationException("Zero if missing Sum Transform is not supposed to be used with a list of metric list!"); + throw new UnsupportedOperationException("Zero if missing Sum Transform doesn't support multiple lists of metrics!"); } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/monitor/CounterMetric.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/monitor/CounterMetric.java new file mode 100644 index 000000000..fb797778b --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/monitor/CounterMetric.java @@ -0,0 +1,75 @@ +package com.salesforce.dva.argus.service.monitor; + +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.service.MonitorService; + +public class CounterMetric extends GaugeMetric implements MetricMXBean { + private static final double DOUBLE_COMPARISON_MAX_DELTA = 0.001; + /** + * The adder inherited from GaugeMetric will be re-purposed to act as a monotonic adder, instead of being + * a gauge adder which gets reset on a periodic basis to update the gauge. The previousResetCounterValue + * field will keep track of the value of the adder when the previous gauge value was calculated. + */ + protected volatile Double previousResetAdderValue; + + public CounterMetric(Metric metric, MonitorService.Counter counter) { + this(metric, counter.getJMXMetricNameSuffix()); + } + + public CounterMetric(Metric metric, String jmxMetricNameSuffix) { + super(metric, _createJMXObjectNameForMetric(metric, jmxMetricNameSuffix, MetricType.COUNTER_METRIC)); + previousResetAdderValue = 0.0; + } + + /** + * Here the resetting of the "gauge" adder is simulated. The new gauge value is calculated based on the + * last value that the adder was "reset" and the current value of the monotonic adder. + * + * @return the new value of the gauge + */ + @Override + public Double computeNewGaugeValueAndResetGaugeAdder() { + // do not reset adder, because it should be monotonically increasing + double currentResetAdderValue = adder.doubleValue(); + value = getCurrentGaugeAdderValue(currentResetAdderValue); + previousResetAdderValue = currentResetAdderValue; + return value; + } + + /** + * @return value of the monotonic counter + */ + @Override + public Double getValue() { + return adder.doubleValue(); + } + + /** + * The current gauge value is calculated based on the last value that the adder was "reset" and the + * current value of the monotonic counter. + * + * @return the current value of the gauge adder + */ + @Override + public Double getCurrentGaugeAdderValue() { + return getCurrentGaugeAdderValue(getValue()); + } + + private Double getCurrentGaugeAdderValue(Double currentAdderValue) { + if (compareGreaterThanOrEqual(currentAdderValue, previousResetAdderValue, DOUBLE_COMPARISON_MAX_DELTA)) { + // new gauge value + return currentAdderValue - previousResetAdderValue; + } else { + // overflow case if previous value is greater than current value + return currentAdderValue + (Double.MAX_VALUE - previousResetAdderValue); + } + } + + private static boolean compareGreaterThanOrEqual(double x, double y, double delta) { + return x > y || compareAlmostEqual(x, y, delta); + } + + private static boolean compareAlmostEqual(double x, double y, double delta) { + return x == y || Math.abs(x - y) < delta; + } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/monitor/CounterMetricJMXExporter.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/monitor/CounterMetricJMXExporter.java deleted file mode 100644 index 6a362755d..000000000 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/monitor/CounterMetricJMXExporter.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Copyright (c) 2018, Salesforce.com, Inc. - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * 3. Neither the name of Salesforce.com nor the names of its contributors may - * be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ -package com.salesforce.dva.argus.service.monitor; - -import java.lang.management.ManagementFactory; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; - -import javax.management.InstanceAlreadyExistsException; -import javax.management.MBeanRegistrationException; -import javax.management.MBeanServer; -import javax.management.MalformedObjectNameException; -import javax.management.NotCompliantMBeanException; -import javax.management.ObjectName; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.google.inject.Inject; -import com.google.inject.Singleton; -import com.salesforce.dva.argus.entity.Metric; -import com.salesforce.dva.argus.entity.MutableGauge; -import com.salesforce.dva.argus.inject.SLF4JTypeListener; - -import scala.collection.mutable.ArrayBuilder.ofBoolean; - -/** - * This is the implementation for @GaugeExporter to export metrics to JMX. It - * transform incoming metric object into @MutableGauge object, register it with - * JMX if it is new, and set its value. The MBeanServer will take care of - * making it available through JMX port. - * - * @author taozhang - * - */ -@Singleton -public class CounterMetricJMXExporter implements GaugeExporter { - - private final MBeanServer mbeanServer = ManagementFactory.getPlatformMBeanServer(); - private final Logger _logger = LoggerFactory.getLogger(CounterMetricJMXExporter.class); - - private final Map _exportedMetrics = new ConcurrentHashMap(); - - private String _createObjectNameForMetric(Metric metric) { - String objName = "ArgusMetrics:type=Counter,scope=" + metric.getScope() + ",metric=" + metric.getMetric(); - if (null != metric.getTags()) { - for (String key : metric.getTags().keySet()) { - objName = objName + "," + (key.equalsIgnoreCase("type")? "_type":key) + "=" + metric.getTags().get(key); - } - } - return objName; - } - - @Inject - public CounterMetricJMXExporter() { - _logger.info("CounterMetricJMXExporter created."); - } - - @Override - public void exportGauge(Metric metric, Double value) { - String objectName = this._createObjectNameForMetric(metric); - - synchronized (_exportedMetrics) { - _logger.debug("exportGauge(): +++ set {} to {}", objectName, value); - if (!_exportedMetrics.containsKey(objectName)) { - MutableGauge gauge = new MutableGauge(objectName); - gauge.setValue(value); - _exportedMetrics.put(objectName, gauge); - try { - _logger.debug("exportGauge(): !!!!!! come to register {} to JMX", objectName); - mbeanServer.registerMBean(gauge, new ObjectName(objectName)); - } catch (InstanceAlreadyExistsException | MBeanRegistrationException | NotCompliantMBeanException - | MalformedObjectNameException e) { - _logger.error("exportGauge(): failed to register internal counter {} to JMX {}", objectName, e); - } - } else { - _exportedMetrics.get(objectName).setValue(value); - } - } - } - -} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/monitor/DataLagMonitor.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/monitor/DataLagMonitor.java deleted file mode 100644 index 3196af229..000000000 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/monitor/DataLagMonitor.java +++ /dev/null @@ -1,130 +0,0 @@ -package com.salesforce.dva.argus.service.monitor; - -import java.text.MessageFormat; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import org.apache.commons.lang.exception.ExceptionUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.salesforce.dva.argus.entity.Metric; -import com.salesforce.dva.argus.service.MailService; -import com.salesforce.dva.argus.service.MetricService; -import com.salesforce.dva.argus.system.SystemConfiguration; - -/* - * This class runs a thread which periodically checks if there is data lag on Argus side. - * - */ -public class DataLagMonitor extends Thread{ - - private String _dataLagQueryExpression; - - private long _dataLagThreshold; - - private String _dataLagNotificationEmailId; - - private String _hostName; - - private boolean isDataLagging = false; - - private MetricService _metricService; - - private MailService _mailService; - - private static final Long SLEEP_INTERVAL_MILLIS = 60*1000L; - - private final Logger _logger = LoggerFactory.getLogger(DataLagMonitor.class); - - public DataLagMonitor(SystemConfiguration sysConfig, MetricService metricService, MailService mailService) { - _metricService = metricService; - _mailService = mailService; - _dataLagQueryExpression = sysConfig.getValue(com.salesforce.dva.argus.system.SystemConfiguration.Property.DATA_LAG_QUERY_EXPRESSION); - _dataLagThreshold = Long.valueOf(sysConfig.getValue(com.salesforce.dva.argus.system.SystemConfiguration.Property.DATA_LAG_THRESHOLD)); - _dataLagNotificationEmailId = sysConfig.getValue(com.salesforce.dva.argus.system.SystemConfiguration.Property.DATA_LAG_NOTIFICATION_EMAIL_ADDRESS); - _hostName = sysConfig.getHostname(); - _logger.info("Data lag monitor initialized"); - } - - @Override - public void run() { - _logger.info("Data lag monitor thread started"); - boolean firstTime = true; - while (!isInterrupted()) { - try { - if(!firstTime) { - sleep(SLEEP_INTERVAL_MILLIS); - }else { - // waiting 5 seconds for everything to initialize - sleep(5*1000); - firstTime = false; - } - long currTime = System.currentTimeMillis(); - List metrics = _metricService.getMetrics(_dataLagQueryExpression, currTime); - if(metrics==null || metrics.isEmpty()) { - _logger.info("Data lag detected as metric list is empty"); - if(!isDataLagging) { - isDataLagging=true; - sendDataLagEmailNotification(); - } - continue; - } - - //assuming only one time series in result - Metric currMetric = metrics.get(0); - if(currMetric.getDatapoints()==null || currMetric.getDatapoints().size()==0) { - _logger.info("Data lag detected as data point list is empty"); - if(!isDataLagging) { - isDataLagging=true; - sendDataLagEmailNotification(); - } - continue; - }else { - long lastDataPointTime = 0L; - for(Long dataPointTime : currMetric.getDatapoints().keySet()) { - if(dataPointTime > lastDataPointTime) { - lastDataPointTime = dataPointTime; - } - } - if((currTime - lastDataPointTime)> _dataLagThreshold) { - _logger.info("Data lag detected as the last data point recieved is more than the data threshold of " + _dataLagThreshold + " ms"); - if(!isDataLagging) { - isDataLagging=true; - sendDataLagEmailNotification(); - } - continue; - } - } - if(isDataLagging) { - isDataLagging = false; - sendDataLagEmailNotification(); - } - }catch(Exception e) { - _logger.error("Exception thrown in data lag monitor thread - " + ExceptionUtils.getFullStackTrace(e)); - } - } - } - - private void sendDataLagEmailNotification() { - Set emailAddresseses = new HashSet(); - emailAddresseses.add(_dataLagNotificationEmailId); - String subject = ""; - if(isDataLagging) { - subject = "Alert evaluation on host - "+ _hostName + " has been stopped due to metric data lag"; - }else { - subject = "Alert evaluation on host - "+ _hostName + " has been resumed as the metric data lag has cleared"; - } - - StringBuilder body = new StringBuilder(); - body.append(MessageFormat.format("Evaluated metric expression: {0}
", _dataLagQueryExpression)); - body.append(MessageFormat.format("Configured data lag threshold: {0}
", _dataLagThreshold)); - - _mailService.sendMessage(emailAddresseses, subject, body.toString(), "text/html; charset=utf-8", MailService.Priority.NORMAL); - } - - public boolean isDataLagging() { - return isDataLagging; - } -} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/monitor/DataLagMonitorConsumerOffset.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/monitor/DataLagMonitorConsumerOffset.java new file mode 100644 index 000000000..92f409a82 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/monitor/DataLagMonitorConsumerOffset.java @@ -0,0 +1,460 @@ +/* + * + * * Copyright (c) 2016, Salesforce.com, Inc. + * * All rights reserved. + * * + * * Redistribution and use in source and binary forms, with or without + * * modification, are permitted provided that the following conditions are met: + * * + * * 1. Redistributions of source code must retain the above copyright notice, + * * this list of conditions and the following disclaimer. + * * + * * 2. Redistributions in binary form must reproduce the above copyright notice, + * * this list of conditions and the following disclaimer in the documentation + * * and/or other materials provided with the distribution. + * * + * * 3. Neither the name of Salesforce.com nor the names of its contributors may + * * be used to endorse or promote products derived from this software without + * * specific prior written permission. + * * + * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * * POSSIBILITY OF SUCH DAMAGE. + * + */ + +/** + * Implements data lag detection on alert client side using consumer offset lag posted by kafka consumers in the upstream. + * @author Sudhanshu.Bahety (sudhanshu.bahety@salesforce.com) + * */ +package com.salesforce.dva.argus.service.monitor; + +import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.Sets; +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParser; +import com.google.inject.Inject; +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.service.MailService; +import com.salesforce.dva.argus.service.MetricService; +import com.salesforce.dva.argus.service.MetricStorageService; +import com.salesforce.dva.argus.service.MonitorService; +import com.salesforce.dva.argus.service.TSDBService; +import com.salesforce.dva.argus.service.mail.EmailContext; +import com.salesforce.dva.argus.service.tsdb.MetricQuery; +import com.salesforce.dva.argus.system.SystemConfiguration; +import com.salesforce.dva.argus.system.SystemException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.ws.rs.NotFoundException; +import java.text.MessageFormat; +import java.util.AbstractMap; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ExecutorCompletionService; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.stream.Collectors; + +import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; + +public class DataLagMonitorConsumerOffset implements DataLagService { + + private String hostName; + private Map dataLagTriggerThresholdPerDC = new HashMap<>(); + private Map dataLagClearThresholdPerDC = new HashMap<>(); + private Map expressionPerDC = new HashMap<>(); + private Map lagStatePerDC = new HashMap<>(); + private Set dcSet = new HashSet<>(); + private MetricStorageService consumerOffsetMetricService; + private MetricService metricService; + private TSDBService tsdbService; + private MailService mailService; + private Set enforceLagPresentSet; + private final Logger logger = LoggerFactory.getLogger(DataLagMonitorConsumerOffset.class); + private SystemConfiguration sysConfig; + + private static final String TOPIC_TAG = "topic"; + private static final Long SLEEP_INTERVAL_MILLIS = 60 * 1000L; + private static Long datalagInertia = 5 * 60 * 1000L; + + // TODO: Remove this once verification completes. + private static Boolean isInDebugMode = false; + private static String DEFAULT_EMAIL = "sudhanshu.bahety@salesforce.com"; + private static final String DEFAULT_SUBJECT = "Data Lag Consumer Offset Method detected a state change"; + private static String DEBUG_PREFIX; + + private final ExecutorCompletionService>> completionService; + + @Inject + public DataLagMonitorConsumerOffset(SystemConfiguration config, MetricStorageService consumerOffsetMetricService, MetricService metricService, TSDBService tsdbService, MailService mailService) { + this.sysConfig = config; + initDebug(); + this.consumerOffsetMetricService = consumerOffsetMetricService; + this.tsdbService = tsdbService; + this.metricService = metricService; + this.mailService = mailService; + this.hostName = SystemConfiguration.getHostname(); + datalagInertia = Long.valueOf(sysConfig.getValue(Property.DATA_LAG_INERTIA.getName(), Property.DATA_LAG_INERTIA.getDefaultValue())); + completionService = new ExecutorCompletionService<>(Executors.newFixedThreadPool(5)); + init(); + this.logger.info(DEBUG_PREFIX + "Data lag consumer offset monitor initialized"); + } + + // TODO: Remove this once verification completes. + private void initDebug() { + isInDebugMode = Boolean.valueOf(sysConfig.getValue(Property.DATA_LAG_DEBUG.getName(), Property.DATA_LAG_DEBUG.getDefaultValue())); + DEFAULT_EMAIL = String.valueOf(sysConfig.getValue(Property.DATA_LAG_EMAIL.getName(), Property.DATA_LAG_EMAIL.getDefaultValue())); + if (isInDebugMode) { + DEBUG_PREFIX = "[DEBUG-DATALAG] "; + } else { + DEBUG_PREFIX = ""; + } + } + + private void init() { + dcSet = Sets.newHashSet(sysConfig.getValue(SystemConfiguration.Property.DC_LIST).split(",")); + enforceLagPresentSet = Sets.newHashSet(sysConfig.getValue(DataLagService.Property.DATA_LAG_ENFORCE_DC_LIST.getName(), DataLagService.Property.DATA_LAG_ENFORCE_DC_LIST.getDefaultValue()).trim().toUpperCase().split(",")); + // Read expression per DC from the config file + String defaultExpression = sysConfig.getValue(Property.DATA_LAG_DEFAULT_EXPRESSION.getName(), Property.DATA_LAG_DEFAULT_EXPRESSION.getDefaultValue()); + try { + JsonObject dataLagQueryExpressions = new JsonParser().parse(sysConfig.getValue(Property.DATA_LAG_QUERY_EXPRESSION.getName(), Property.DATA_LAG_QUERY_EXPRESSION.getDefaultValue())).getAsJsonObject(); + for (Map.Entry entry : dataLagQueryExpressions.entrySet()) { + String currentExpression = entry.getKey().trim(); + JsonArray dcList = entry.getValue().getAsJsonArray(); + for (JsonElement value : dcList) { + try { + String currentDC = value.getAsString().trim().toUpperCase(); + expressionPerDC.put(currentDC, currentExpression.replace("#DC#", currentDC.toLowerCase()));//Note: When we post from AKC, all DCs are in lower case. + dcSet.add(currentDC); + } catch (Exception ex) { + logger.error(DEBUG_PREFIX + "Exception occured while parsing the datalag expression for DC: " + value + ", using default expression. Exception: " + ex); + } + } + } + } catch (Exception ex) { + logger.error(DEBUG_PREFIX + "Exception occured while parsing the datalag expression json list, using default expression. Exception: ", ex); + } finally { + // Fill with default values + for (String dc : dcSet) { + dc = dc.trim().toUpperCase(); + if (!expressionPerDC.containsKey(dc)) { + expressionPerDC.put(dc, defaultExpression); + } + lagStatePerDC.put(dc, false); + } + } + + // Read default thresholds. + Long defaultTriggerThreshold = Long.valueOf(sysConfig.getValue(Property.DATA_LAG_DEFAULT_TRIGGER_THRESHOLD.getName(), Property.DATA_LAG_DEFAULT_TRIGGER_THRESHOLD.getDefaultValue())); + Long defaultClearThreshold = Long.valueOf(sysConfig.getValue(Property.DATA_LAG_DEFAULT_CLEAR_THRESHOLD.getName(), Property.DATA_LAG_DEFAULT_CLEAR_THRESHOLD.getDefaultValue())); + // Read DC per threshold from the config file. + readThresholds(sysConfig.getValue(Property.DATA_LAG_TRIGGER_THRESHOLD.getName(), Property.DATA_LAG_TRIGGER_THRESHOLD.getDefaultValue()), dataLagTriggerThresholdPerDC, defaultTriggerThreshold); + readThresholds(sysConfig.getValue(Property.DATA_LAG_CLEAR_THRESHOLD.getName(), Property.DATA_LAG_CLEAR_THRESHOLD.getDefaultValue()), dataLagClearThresholdPerDC, defaultClearThreshold); + } + + private void readThresholds(String thresholdProperty, Map dataLagThresholdPerDC, Long defaultValue) { + requireArgument(dataLagThresholdPerDC != null, "Data lag threshold per dc cannot be null"); + requireArgument(defaultValue != null, "Default threshold value cannot be null"); + JsonObject dataLagThresholdObject = null; + try { + dataLagThresholdObject = new JsonParser().parse(thresholdProperty).getAsJsonObject(); + Set> entriesForThreshold = dataLagThresholdObject.entrySet(); + for (Map.Entry entry : entriesForThreshold) { + Long currentThreshold = Long.valueOf(entry.getKey().trim()); + JsonArray dcList = entry.getValue().getAsJsonArray(); + for (JsonElement value : dcList) { + try { + String currentDC = value.getAsString().trim().toUpperCase(); + dataLagThresholdPerDC.put(currentDC, currentThreshold); + dcSet.add(currentDC); + } catch (Exception ex) { + logger.error(DEBUG_PREFIX + "Exception occured while parsing threshold for DC: " + value + ", using default threshold. Exception: ", ex); + } + } + } + } catch (Exception ex) { + logger.error(DEBUG_PREFIX + "Exception occured while parsing threshold value per dc. Exception: ", ex); + } finally { + dcSet.stream() + .filter(dc -> !dataLagThresholdPerDC.containsKey(dc)) + .forEach(dc -> dataLagThresholdPerDC.put(dc, defaultValue)); + } + } + + @Override + public void run() { + logger.info(DEBUG_PREFIX + "Data lag consumer offset monitor thread started"); + while (!Thread.currentThread().isInterrupted()) { + try { + Thread.sleep(SLEEP_INTERVAL_MILLIS); + queryMetricsForDC(dcSet, System.currentTimeMillis()).forEach(this::computeDataLag); + } catch (Exception e) { + logger.error(DEBUG_PREFIX + "Exception thrown in data lag monitor thread: " + e); + } + } + } + + @Override + public Map> queryMetricsForDC(Set dcSet, Long startTime) { + requireArgument(dcSet != null && !dcSet.isEmpty(), "DCs for which data lag is to be queried cannot be null or empty"); + if (startTime == null) { + logger.warn(DEBUG_PREFIX + "Start time from which data lag is to be computed is null, taking current value by default"); + startTime = System.currentTimeMillis(); + } + + Long startTimeFinal = startTime; + + Map> metricsPerDC = new HashMap<>(); + + for (String dc : dcSet) { + completionService.submit(() -> { + List metrics = new ArrayList<>(); + String currentDcExpression = expressionPerDC.get(dc); + List metricQueryList = metricService.parseToMetricQuery(currentDcExpression, startTimeFinal); + try { + metrics = consumerOffsetMetricService.getMetrics(metricQueryList).values().stream().flatMap(Collection::stream).collect(Collectors.toList()); + } catch (Exception e) { + metrics.clear(); + logger.error(DEBUG_PREFIX + "Consumer Offset Metric Service failed to get metric for expression: " + currentDcExpression + " while being queried by DataLagMonitorConsumerOffset, for DC: " + dc + ". Querying TSDB for metrics. Exception: ", e); + } + try { + if (metrics.size() == 0) { + logger.warn(DEBUG_PREFIX + "Cannot retrieve metrics from ES cluster. Querying TSDB for metrics."); + metrics = metricService.getMetrics(currentDcExpression, startTimeFinal).getMetricsList(); + } + } catch (Exception e) { + metrics.clear(); + logger.error(DEBUG_PREFIX + "TSDB Metric Service failed to get metric for expression: " + currentDcExpression + " while being queried by DataLagMonitorConsumerOffset, for DC: " + dc + " Exception: ", e); + } + return new AbstractMap.SimpleEntry<>(dc, metrics); + }); + } + + for (int idx = 0; idx < dcSet.size(); ++idx) { + try { + AbstractMap.SimpleEntry> result = completionService.take().get(); + metricsPerDC.put(result.getKey(), result.getValue()); + } catch (Exception e) { + logger.error(DEBUG_PREFIX + "Exception occured while querying metrics", e); + } + } + + return metricsPerDC; + } + + @VisibleForTesting + protected String getDCFromTopic(String topic) { + requireArgument(topic != null, "Topic for which dc is to be extracted cannot be null"); + String finalTopic = topic.toUpperCase(); + for(String s: dcSet) { + if (finalTopic.contains(s)) { + return s; + } + } + throw new NotFoundException(DEBUG_PREFIX + "No Data center could be inferred from topic: " + topic); + } + + /* + In current implementation, inertia value is same as the look back window for which metric is queried. + This helps in reducing the problem of only checking if all the value returned in time series violates the condition. + TODO: If the look back window is different from the inertia period, the logic has to be changed. + */ + @Override + public Boolean computeDataLag(String dc, List metricList) { + + if (metricList.size() <= 0) { + logger.error(DEBUG_PREFIX + "No Metrics could be obtained for dc: {}, enabling data lag by default.", dc); + lagStatePerDC.put(dc, true); + return true; + } + else if (metricList.size() != 1) { + logger.warn(DEBUG_PREFIX + "More than 1 metrics returned for a single dc: {}, Metric list: {}\nCombining all data points to compute data lag.",dc, metricList); + } + requireArgument(dc != null, "Data center for which data lag is to be computed cannot be null"); + + dc = dc.trim().toUpperCase(); + Map datapoints = new HashMap<>(); + metricList.forEach(m -> datapoints.putAll(m.getDatapoints())); + Long triggeringThreshold = dataLagTriggerThresholdPerDC.get(dc); + Long clearingThreshold = dataLagClearThresholdPerDC.get(dc); + boolean isTriggering = true, isClearing = true, initialState = lagStatePerDC.get(dc); + + if(datapoints.size() == 0) { + logger.warn(DEBUG_PREFIX + "No metrics retrieved for Metrics: {}", metricList); + logger.warn(DEBUG_PREFIX + "Enabling data lag for dc: {}", dc); + lagStatePerDC.put(dc, true); + // If we are unable to retrieve metric for the current minute, we are resorting to the default value of 0. + pushMetric(System.currentTimeMillis(), 0.0, dc); + } else { + for (Double currentValue : datapoints.values()) { + isTriggering &= (currentValue >= triggeringThreshold); + isClearing &= (currentValue < clearingThreshold); + + if (!isTriggering && !isClearing) { + break; + } + } + + if (isTriggering && isClearing) { + logger.error(DEBUG_PREFIX + MessageFormat.format("Both Triggering and Clearing conditions cannot hold true at the same time. datapoints: {0}, Triggering threshold: {1}, Clearing threshold: {2}", datapoints, triggeringThreshold, clearingThreshold)); + lagStatePerDC.put(dc, true); + } + else if (isTriggering) { + lagStatePerDC.put(dc, true); + } else if (isClearing) { + lagStatePerDC.put(dc, false); + } + pushMetric(System.currentTimeMillis(), Collections.max(datapoints.values()), dc); + } + + // TODO: Remove this once verification completes. + if ( isInDebugMode && (initialState ^ lagStatePerDC.get(dc)) ) { // Notify whenever there is a state change. + + StringBuilder message = new StringBuilder(); + String state = isTriggering ? "Triggering" : "Clearing"; + + message.append("

Data lag state change was detected by consumer offset method.

"); + message.append(MessageFormat.format("DC: {0}, State: {1}, Triggering Threshold: {2}, Clearing Threshold: {3}", dc, state, triggeringThreshold, clearingThreshold)); + message.append(MessageFormat.format("
List retrieved by ES: {0}", metricList)); + EmailContext.Builder emailContextBuilder = new EmailContext.Builder() + .withRecipients(Sets.newHashSet(DEFAULT_EMAIL)) + .withSubject(DEFAULT_SUBJECT) + .withEmailBody(message.toString()) + .withContentType("text/html; charset=utf-8") + .withEmailPriority(MailService.Priority.HIGH); + mailService.sendMessage(emailContextBuilder.build()); + } + + return lagStatePerDC.get(dc); + } + + @Override + public Boolean isDataLagging(String currentDC) { + if (currentDC == null) { + return false; + } + currentDC = currentDC.trim().toUpperCase(); + + if (enforceLagPresentSet.contains(currentDC)) { + return true; + } + if (lagStatePerDC.containsKey(currentDC)) { + return lagStatePerDC.get(currentDC); + } + + return lagStatePerDC.values() + .stream() + .reduce((e1, e2) -> (e1 || e2)) + .orElse(false); + } + + @Override + public void pushMetric(Long time, Double value, String dc) { + requireArgument( value != null, "Value of conusmer offset metric cannot be null"); + if (time == null) { + logger.warn("Time when the metric is pushed is null. Using current time"); + time = System.currentTimeMillis(); + } + if (dc == null) { + logger.warn("DC for which metric is pushed is null. Using NO_DC_SPECIFIED as value"); + dc = "NO_DC_SPECIFIED"; + } + + String finalDC = dc; + + Metric trackingMetric = new Metric(MonitorService.Counter.DATALAG_PER_DC_OFFSET_LAG.getScope(), MonitorService.Counter.DATALAG_PER_DC_OFFSET_LAG.getMetric()); + ExecutorService _executorService = Executors.newSingleThreadExecutor(); + Map tags = new HashMap<>(); + + tags.put("dc", dc); + tags.put("host", hostName); + + trackingMetric.setTags(tags); + Map currentDatapoint = new HashMap<>(); + currentDatapoint.put(time, value); + trackingMetric.setDatapoints(currentDatapoint); + + try { + _executorService.submit(()->{ + tsdbService.putMetrics(Collections.singletonList(trackingMetric)); + logger.debug(DEBUG_PREFIX + MessageFormat.format("Pushing datalag metric - hostname:{0}, dc:{1}, offset:{2}", hostName, finalDC, value)); + }); + } catch (Exception ex) { + logger.error(DEBUG_PREFIX + "Exception occurred while pushing datalag metric to tsdb: ", ex); + } finally { + _executorService.shutdown(); + } + } + + /** + * The set of implementation specific configuration properties. + * + */ + public enum Property { + + /** Minute Threshold before you enable data lag */ + DATA_LAG_TRIGGER_THRESHOLD("system.property.data.lag.consumer.offset.trigger.threshold", "thresholdPerDC"), + /** Minute Threshold before you disable data lag */ + DATA_LAG_CLEAR_THRESHOLD("system.property.data.lag.consumer.offset.clear.threshold", "thresholdPerDC"), + /** Expression per dc to determine data lag */ + DATA_LAG_QUERY_EXPRESSION("system.property.data.lag.consumer.offset.expression.list","expressionListPerDC"), + /** Default expression if the expression for dc cannot be queried. */ + DATA_LAG_DEFAULT_EXPRESSION("system.property.data.lag.consumer.offset.default.expression","defaultExpression"), + /** Default threshold if the trigger threshold for dc is not specified. */ + DATA_LAG_DEFAULT_TRIGGER_THRESHOLD("system.property.data.lag.consumer.offset.default.trigger.threshold","23000"), + /** Default threshold if the clear threshold for dc is not specified. */ + DATA_LAG_DEFAULT_CLEAR_THRESHOLD("system.property.data.lag.consumer.offset.default.clear.threshold","5000"), + /** Inertia value for which data lag should continuously hold true. */ + DATA_LAG_INERTIA("system.property.data.lag.consumer.offset.default.inertia.millis","300000"), + + // TODO: Remove this once verification completes. + /** Data lag to be run in debug mode to check the behaviour */ + DATA_LAG_DEBUG("system.property.data.lag.consumer.offset.debug.mode", "true"), + /** Default email for debugging purposes */ + DATA_LAG_EMAIL("system.property.data.lag.consumer.offset.debug.email", "sudhanshu.bahety@salesforce.com"); + + private final String _name; + private final String _defaultValue; + + Property(String name, String defaultValue) { + _name = name; + _defaultValue = defaultValue; + } + + /** + * Returns the property name. + * + * @return The property name. + */ + public String getName() { + return _name; + } + + /** + * Returns the default value for the property. + * + * @return The default value. + */ + public String getDefaultValue() { + return _defaultValue; + } + } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/monitor/DataLagMonitorGoldenMetric.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/monitor/DataLagMonitorGoldenMetric.java new file mode 100644 index 000000000..ee2965a80 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/monitor/DataLagMonitorGoldenMetric.java @@ -0,0 +1,266 @@ +package com.salesforce.dva.argus.service.monitor; + +import com.google.common.collect.Sets; +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParser; +import com.google.inject.Inject; +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.service.MetricService; +import com.salesforce.dva.argus.service.MonitorService; +import com.salesforce.dva.argus.service.TSDBService; +import com.salesforce.dva.argus.system.SystemConfiguration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.MessageFormat; +import java.util.AbstractMap.SimpleEntry; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.TreeMap; +import java.util.concurrent.ExecutorCompletionService; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; + +import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; + +/* + * This class runs a thread which periodically checks if there is data lag on Argus side. + * + */ +public class DataLagMonitorGoldenMetric implements DataLagService { + + + private String _hostName; + private long _dataLagThreshold; + private Map _isDataLaggingbyDCMap = new TreeMap<>(); + private Map _expressionPerDC = new TreeMap<>(); + private Map _lagPerDC = new TreeMap<>(); + private Set enforceLagPresentSet; + private MetricService _metricService; + private TSDBService _tsdbService; + private static Long SLEEP_INTERVAL_MILLIS = 60*1000L; + private static final Double MAX_LAG_TIME_MILLIS = 4.0*60*60*1000; + private final Logger _logger = LoggerFactory.getLogger(DataLagMonitorGoldenMetric.class); + private SystemConfiguration _sysConfig; + private final ExecutorCompletionService>> _completionService; + + @Inject + public DataLagMonitorGoldenMetric(SystemConfiguration sysConfig, MetricService metricService, TSDBService tsdbService) { + _sysConfig = sysConfig; + _metricService = metricService; + _tsdbService = tsdbService; + _hostName = SystemConfiguration.getHostname(); + init(); + _completionService = new ExecutorCompletionService<>(Executors.newFixedThreadPool(5)); + _logger.info("Data lag golden metric monitor initialized"); + } + + private void init() { + String _defaultExpression = _sysConfig.getValue(Property.DATA_LAG_DEFAULT_EXPRESSION.getName(), Property.DATA_LAG_DEFAULT_EXPRESSION.getDefaultValue()); + _dataLagThreshold = Long.valueOf(_sysConfig.getValue(Property.DATA_LAG_THRESHOLD.getName(), Property.DATA_LAG_THRESHOLD.getDefaultValue())); + try { + JsonObject _dataLagQueryExpressions = new JsonParser().parse(_sysConfig.getValue(Property.DATA_LAG_QUERY_EXPRESSION.getName(), Property.DATA_LAG_QUERY_EXPRESSION.getDefaultValue())).getAsJsonObject(); + Set> entries = _dataLagQueryExpressions.entrySet(); + for (Map.Entry entry : entries) { + String currentExpression = entry.getKey().trim(); + JsonArray dcList = entry.getValue().getAsJsonArray(); + for (JsonElement value : dcList) { + try { + String currentDC = value.getAsString().trim(); + _expressionPerDC.put(currentDC, currentExpression.replace("#DC#", currentDC)); + _isDataLaggingbyDCMap.put(currentDC, false); + } catch (Exception ex) { + _logger.error("Exception occured while parsing the datalag expression for DC: " + value + ", using default expression. Exception: {0}", ex); + } + } + } + } catch (Exception ex) { + _logger.error("Exception occured while parsing the datalag expression json list, using default expression. Exception: ", ex); + } + + for (String dc : _sysConfig.getValue(SystemConfiguration.Property.DC_LIST).split(",")) { + if (!_expressionPerDC.containsKey(dc)) { + _expressionPerDC.put(dc, _defaultExpression); + _isDataLaggingbyDCMap.put(dc, false); + } + _lagPerDC.put(dc, 0.0); + } + + enforceLagPresentSet = Sets.newHashSet(_sysConfig.getValue(DataLagService.Property.DATA_LAG_ENFORCE_DC_LIST.getName(), DataLagService.Property.DATA_LAG_ENFORCE_DC_LIST.getDefaultValue()).split(",")); + } + + @Override + public void run() { + _logger.info("Data lag golden metric monitor thread started"); + while (!Thread.currentThread().isInterrupted()) { + try { + Thread.sleep(SLEEP_INTERVAL_MILLIS); + queryMetricsForDC(_expressionPerDC.keySet(), System.currentTimeMillis()).forEach(this::computeDataLag); + } catch (Exception e) { + _logger.error("Exception thrown in data lag golden metric monitor thread: ", e); + } + } + } + + @Override + public Boolean isDataLagging(String currentDC) { + if (currentDC == null) { + return false; + } + currentDC = currentDC.trim().toUpperCase(); + + if (enforceLagPresentSet.contains(currentDC) ) { + return true; + } + if (_isDataLaggingbyDCMap.containsKey(currentDC) ) { + return _isDataLaggingbyDCMap.get(currentDC); + } + return _isDataLaggingbyDCMap.values() + .stream() + .reduce((e1, e2) -> (e1 || e2)) + .orElse(false); + } + + @Override + public Map > queryMetricsForDC(Set dcSet, Long startTime) { + requireArgument(dcSet != null && !dcSet.isEmpty(), "DCs for which data lag is to be queried cannot be null or empty"); + requireArgument(startTime != null, "start time from which query begins cannot be empty"); + + Map> metricsPerDC = new HashMap<>(); + for (String dc : dcSet) { + _completionService.submit(() -> { + List metrics = new ArrayList<>(); + try { + metrics = _metricService.getMetrics(_expressionPerDC.get(dc), startTime).getMetricsList(); + } catch (Exception e) { + metrics.clear(); + _logger.error("Metric Service failed to get metric for expression: " + _expressionPerDC.get(dc) + " while being queried by DataLagMonitorGoldenMetric, for DC: " + dc + " Exception: ", e); + } + + return new SimpleEntry<>(dc, metrics); + }); + } + + for (int idx = 0; idx < dcSet.size(); ++idx) { + try { + Future>> future = _completionService.take(); + SimpleEntry> result = future.get(); + String currentDC = result.getKey(); + List metrics = result.getValue(); + metricsPerDC.put(currentDC, metrics); + } catch (Exception e) { + _logger.error(MessageFormat.format("Exception thrown while evaluating lag time for dc with message: ", e)); + } + } + return metricsPerDC; + } + + @Override + public Boolean computeDataLag(String dc, List metrics) { + requireArgument(dc != null, "Data center for which data lag is to be computed cannot be null"); + + double lagTimeInMillis; + Long currTime = System.currentTimeMillis(); + + if (metrics == null || metrics.isEmpty()) { + _logger.info("Data lag detected as metric list is empty for DC: " + dc); + lagTimeInMillis = Math.min(MAX_LAG_TIME_MILLIS, _lagPerDC.get(dc) + SLEEP_INTERVAL_MILLIS); + } else { + if (metrics.size() > 1) { + _logger.warn("More than 1 metric returned by the metric service while querying for data lag: {}", metrics); + } + //Assuming only one time series in result. + Metric currMetric = metrics.get(0); + if (currMetric.getDatapoints() == null || currMetric.getDatapoints().size() == 0) { + _logger.info("Data lag detected as data point list is empty for DC: " + dc); + lagTimeInMillis = Math.min(MAX_LAG_TIME_MILLIS, _lagPerDC.get(dc) + SLEEP_INTERVAL_MILLIS); + } else { + long lastDataPointTime = Collections.max(currMetric.getDatapoints().keySet()); + lagTimeInMillis = (currTime - lastDataPointTime); + } + } + + _lagPerDC.put(dc, lagTimeInMillis); + _isDataLaggingbyDCMap.put(dc, lagTimeInMillis > _dataLagThreshold); + pushMetric(currTime, lagTimeInMillis, dc); + + return lagTimeInMillis > _dataLagThreshold; + } + + @Override + public void pushMetric(Long currTime, Double lagTime, String currentDC) { + requireArgument(currTime != null, "Time when the metric is pushed should not be null"); + requireArgument( lagTime != null, "Value of conusmer offset metric cannot be null"); + requireArgument(currentDC != null, "Should specify data center for which offset is being pushed"); + + Metric trackingMetric = new Metric(MonitorService.Counter.DATALAG_PER_DC_TIME_LAG.getScope(), MonitorService.Counter.DATALAG_PER_DC_TIME_LAG.getMetric()); + ExecutorService _executorService = Executors.newSingleThreadExecutor(); + Map tags = new HashMap<>(); + + tags.put("dc", currentDC); + tags.put("host", _hostName); + + trackingMetric.setTags(tags); + Map currentDatapoint = new HashMap<>(); + currentDatapoint.put(currTime, lagTime); + trackingMetric.setDatapoints(currentDatapoint); + + try { + _executorService.submit(()->{ + _tsdbService.putMetrics(Collections.singletonList(trackingMetric)); + _logger.debug(MessageFormat.format("Pushing datalag metric - hostname:{0}, dc:{1}, lagTime:{2}",_hostName, currentDC, lagTime)); + }); + } catch (Exception ex) { + _logger.error("Exception occurred while pushing datalag metric to tsdb: ", ex); + } finally { + _executorService.shutdown(); + } + } + + /** + * The set of implementation specific configuration properties. + * + */ + public enum Property { + + /** Minute Threshold before you enable data lag */ + DATA_LAG_THRESHOLD("system.property.data.lag.threshold.millis", "300000"), + /** Expression per dc to determine data lag */ + DATA_LAG_QUERY_EXPRESSION("system.property.data.lag.expression.list","expressionListPerDC"), + /** Default expression if the expression for dc cannot be queried. */ + DATA_LAG_DEFAULT_EXPRESSION("system.property.data.lag.default.expression","defaultExpression"); + + private final String _name; + private final String _defaultValue; + + Property(String name, String defaultValue) { + _name = name; + _defaultValue = defaultValue; + } + + /** + * Returns the property name. + * + * @return The property name. + */ + public String getName() { + return _name; + } + + /** + * Returns the default value for the property. + * + * @return The default value. + */ + public String getDefaultValue() { + return _defaultValue; + } + } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/monitor/DataLagService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/monitor/DataLagService.java new file mode 100644 index 000000000..0500b2438 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/monitor/DataLagService.java @@ -0,0 +1,118 @@ +/* + * + * * Copyright (c) 2016, Salesforce.com, Inc. + * * All rights reserved. + * * + * * Redistribution and use in source and binary forms, with or without + * * modification, are permitted provided that the following conditions are met: + * * + * * 1. Redistributions of source code must retain the above copyright notice, + * * this list of conditions and the following disclaimer. + * * + * * 2. Redistributions in binary form must reproduce the above copyright notice, + * * this list of conditions and the following disclaimer in the documentation + * * and/or other materials provided with the distribution. + * * + * * 3. Neither the name of Salesforce.com nor the names of its contributors may + * * be used to endorse or promote products derived from this software without + * * specific prior written permission. + * * + * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * * POSSIBILITY OF SUCH DAMAGE. + * + */ +package com.salesforce.dva.argus.service.monitor; + +import com.salesforce.dva.argus.entity.Metric; + +import java.util.Map; +import java.util.Set; + +import java.util.List; +/** + * Interface to check for data lag based on various approaches. + * @author sudhanshu.bahety + */ +public interface DataLagService extends Runnable { + /** + * + * @param dcSet List of dc for which we need to make query + * @param startTime start time for the query + * @return Mapping of metric per dc that is to be used to compute data lag + */ + public Map> queryMetricsForDC(Set dcSet, Long startTime); + + /** + * + * @param dc name of the data centre + * @param metricList List of metrics for the specific data centre + * @return Status based on hypothesis whether data is lagging in dc or not + */ + public Boolean computeDataLag(String dc, List metricList); + + /** + * + * @param dc name of the data centre + * @return if data is lagging in the dc + */ + public Boolean isDataLagging(String dc); + + /** + * + * @param time time when the lag metric is pushed + * @param value value of the lag metric + * @param dc dc corresponding to the lag metric + */ + public void pushMetric(Long time, Double value, String dc); + + /** + * The set of implementation specific configuration properties. + * + */ + public enum Property { + + /** Flag to enable/disable monitoring */ + DATA_LAG_MONITOR_ENABLED("system.property.monitor.data.lag", "false"), + /** Whitelist scopes for which data lag always evaluates to false*/ + DATA_LAG_WHITE_LISTED_SCOPES("system.property.data.lag.whitelisted.scopes", "whiteListedScope"), + /** Whitelist scope of user for which data lag always evaluates to false*/ + DATA_LAG_WHITE_LISTED_USERS("system.property.data.lag.whitelisted.username", "default"), + /** List of DC for which data lag present should always evaluate to true*/ + DATA_LAG_ENFORCE_DC_LIST("system.property.data.lag.enforce.dc.list", "dcList"); + + private final String _name; + private final String _defaultValue; + + Property(String name, String defaultValue) { + _name = name; + _defaultValue = defaultValue; + } + + /** + * Returns the property name. + * + * @return The property name. + */ + public String getName() { + return _name; + } + + /** + * Returns the default value for the property. + * + * @return The default value. + */ + public String getDefaultValue() { + return _defaultValue; + } + } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/monitor/DefaultMonitorService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/monitor/DefaultMonitorService.java index 33b3b81fe..c570156be 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/monitor/DefaultMonitorService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/monitor/DefaultMonitorService.java @@ -43,16 +43,15 @@ import com.salesforce.dva.argus.entity.ServiceManagementRecord.Service; import com.salesforce.dva.argus.entity.Trigger; import com.salesforce.dva.argus.entity.Trigger.TriggerType; -import com.salesforce.dva.argus.inject.SLF4JTypeListener; import com.salesforce.dva.argus.service.AlertService; import com.salesforce.dva.argus.service.DashboardService; import com.salesforce.dva.argus.service.MailService; import com.salesforce.dva.argus.service.MetricService; +import com.salesforce.dva.argus.service.MetricStorageService; import com.salesforce.dva.argus.service.MonitorService; import com.salesforce.dva.argus.service.ServiceManagementService; import com.salesforce.dva.argus.service.TSDBService; import com.salesforce.dva.argus.service.UserService; -import com.salesforce.dva.argus.service.alert.DefaultAlertService; import com.salesforce.dva.argus.service.alert.notifier.AuditNotifier; import com.salesforce.dva.argus.service.jpa.DefaultJPAService; import com.salesforce.dva.argus.service.metric.transform.TransformFactory.Function; @@ -62,7 +61,14 @@ import com.sun.management.OperatingSystemMXBean; import com.sun.management.UnixOperatingSystemMXBean; import org.slf4j.Logger; - +import org.slf4j.LoggerFactory; + +import javax.management.InstanceAlreadyExistsException; +import javax.management.MBeanRegistrationException; +import javax.management.MBeanServer; +import javax.management.MalformedObjectNameException; +import javax.management.NotCompliantMBeanException; +import javax.management.ObjectName; import java.lang.management.GarbageCollectorMXBean; import java.lang.management.ManagementFactory; import java.lang.management.MemoryPoolMXBean; @@ -104,21 +110,24 @@ public class DefaultMonitorService extends DefaultJPAService implements MonitorS //~ Instance fields ****************************************************************************************************************************** - @SLF4JTypeListener.InjectLogger - private Logger _logger; - private final TSDBService _tsdbService; + private final Logger _logger = LoggerFactory.getLogger(DefaultMonitorService.class); + private TSDBService _tsdbService; private final UserService _userService; private final AlertService _alertService; private final ServiceManagementService _serviceManagementService; private final DashboardService _dashboardService; private final MetricService _metricService; private final MailService _mailService; - private final GaugeExporter _gaugeExporter; - private final Map _metrics = new ConcurrentHashMap<>(); + private final Map _metrics; + private final Map _registeredMetrics; private final PrincipalUser _adminUser; private final SystemConfiguration _sysConfig; + private final MBeanServer _mbeanServer; private Thread _monitorThread; - private DataLagMonitor _dataLagMonitorThread; + private Thread _dataLagMonitorThread; + private DataLagService _dataLagService; + // TODO: remove this after data lag consumer offset service verification is over. + private MetricStorageService _metricStorageService; //~ Constructors ********************************************************************************************************************************* @@ -137,8 +146,8 @@ public class DefaultMonitorService extends DefaultJPAService implements MonitorS */ @Inject public DefaultMonitorService(TSDBService tsdbService, UserService userService, AlertService alertService, - ServiceManagementService serviceManagementService, DashboardService dashboardService, MetricService metricService, MailService mailService, - GaugeExporter gaugeExporter, SystemConfiguration sysConfig) { + ServiceManagementService serviceManagementService, DashboardService dashboardService, MetricService metricService, MailService mailService, + SystemConfiguration sysConfig, DataLagService dataLagService, MetricStorageService metricStorageService) { super(null, sysConfig); requireArgument(tsdbService != null, "TSDB service cannot be null."); requireArgument(userService != null, "User service cannot be null."); @@ -154,7 +163,12 @@ public DefaultMonitorService(TSDBService tsdbService, UserService userService, A _metricService = metricService; _mailService = mailService; _adminUser = _userService.findAdminUser(); - _gaugeExporter = gaugeExporter; + _mbeanServer = ManagementFactory.getPlatformMBeanServer(); + _metrics = new ConcurrentHashMap<>(); + _registeredMetrics = new ConcurrentHashMap<>(); + _dataLagService = dataLagService; + // TODO: remove this after data lag consumer offset service verification is over. + _metricStorageService = metricStorageService; } //~ Methods ************************************************************************************************************************************** @@ -208,23 +222,26 @@ public synchronized void startRecordingCounters() { _checkAlertExistence(true); _monitorThread = new MonitorThread("system-monitor"); - _monitorThread.setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() { - - @Override - public void uncaughtException(Thread t, Throwable e) { - _logger.error("Uncaught exception occurred while pushing monitor counters for {}. Reason: {}", HOSTNAME, e.getMessage()); - t.interrupt(); + _monitorThread.setUncaughtExceptionHandler((t, e) -> { + _logger.error("Uncaught exception occurred while pushing monitor counters for {}. Reason: {}", HOSTNAME, e.getMessage()); + t.interrupt(); } - }); + ); _monitorThread.start(); _logger.info("System monitor thread started."); - if (Boolean.valueOf(_sysConfig.getValue(com.salesforce.dva.argus.system.SystemConfiguration.Property.DATA_LAG_MONITOR_ENABLED))) { + // TODO: remove this after data lag consumer offset service verification is over. + if (Boolean.valueOf(_sysConfig.getValue(DataLagService.Property.DATA_LAG_MONITOR_ENABLED.getName(), DataLagService.Property.DATA_LAG_MONITOR_ENABLED.getDefaultValue()))) { _logger.info("Starting data lag monitor thread."); - _dataLagMonitorThread = new DataLagMonitor(_sysConfig, _metricService, _mailService); + _dataLagMonitorThread = new Thread(_dataLagService, "datalag-monitor-thread-" + _dataLagService.getClass().getSimpleName()); _dataLagMonitorThread.start(); _logger.info("Data lag monitor thread started."); + + if(Boolean.valueOf(_sysConfig.getValue(DataLagMonitorConsumerOffset.Property.DATA_LAG_DEBUG.getName(), DataLagMonitorConsumerOffset.Property.DATA_LAG_DEBUG.getDefaultValue()))) { + _logger.info("Starting data lag consumer offset monitor thread in debug mode"); + new Thread(new DataLagMonitorConsumerOffset(_sysConfig, _metricStorageService, _metricService, _tsdbService, _mailService)).start(); + } } } } @@ -272,9 +289,8 @@ public void updateCustomCounter(String name, double value, Map t requireArgument(name != null && !name.isEmpty(), "Cannot update a counter with null or empty name."); Metric metric = _constructCounterKey(name, tags); - _gaugeExporter.exportGauge(metric, value); _logger.debug("Updating {} counter for {} to {}.", name, tags, value); - _metrics.put(metric, value); + _metrics.computeIfAbsent(metric, k -> _getGaugeMXBeanInstance(k)).setValue(value); } @Override @@ -282,7 +298,13 @@ public void updateCounter(Counter counter, double value, Map tag requireNotDisposed(); requireArgument(counter != null, "Cannot update a null counter."); requireArgument(!"argus.jvm".equalsIgnoreCase(counter.getScope()), "Cannot update JVM counters"); - updateCustomCounter(counter.getMetric(), value, tags); + Metric metric = _constructCounterKey(counter.getMetric(), tags); + if (Counter.MetricType.COUNTER.equals(counter.getMetricType())) { + _metrics.computeIfAbsent(metric, k -> _getCounterMXBeanInstance(k, counter)).setValue(value); + } else { + _metrics.computeIfAbsent(metric, k -> _getGaugeMXBeanInstance(k)).setValue(value); + } + _logger.debug("Updating {} counter for {} to {}.", metric.getMetric(), metric.getTags(), value); } @Override @@ -290,15 +312,12 @@ public double modifyCustomCounter(String name, double delta, Map requireNotDisposed(); SystemAssert.requireArgument(name != null && !name.isEmpty(), "Cannot modify a counter with null or empty name."); - Metric key = _constructCounterKey(name, tags); + Metric metric = _constructCounterKey(name, tags); synchronized (_metrics) { - Double value = _metrics.get(key); - double newValue = value == null ? delta : value + delta; + double newValue = _metrics.computeIfAbsent(metric, k -> _getGaugeMXBeanInstance(k)).addValue(delta); - _logger.debug("Modifying {} counter from {} to {}.", name, value, newValue); - _metrics.put(key, newValue); - _gaugeExporter.exportGauge(key, newValue); + _logger.debug("Modifying {} counter adding delta {} to get new sum {}.", name, delta, newValue); return newValue; } } @@ -308,7 +327,15 @@ public double modifyCounter(Counter counter, double delta, Map t requireNotDisposed(); requireArgument(counter != null, "Cannot modify a null counter."); requireArgument(!"argus.jvm".equalsIgnoreCase(counter.getScope()), "Cannot modify JVM counters"); - return modifyCustomCounter(counter.getMetric(), delta, tags); + Metric metric = _constructCounterKey(counter.getMetric(), tags); + double v; + if (Counter.MetricType.COUNTER.equals(counter.getMetricType())) { + v = _metrics.computeIfAbsent(metric, k -> _getCounterMXBeanInstance(k, counter)).addValue(delta); + } else { + v = _metrics.computeIfAbsent(metric, k -> _getGaugeMXBeanInstance(k)).addValue(delta); + } + _logger.debug("Modifying {} counter {} adding delta {} to new sum {}.", metric.getMetric(), metric.getTags(), delta, v); + return v; } @Override @@ -326,12 +353,10 @@ public double getCustomCounter(String name, Map tags) { Double value; synchronized (_metrics) { - value = _metrics.get(metric); - if (value == null) { - value = Double.NaN; - } + GaugeMetric b = _metrics.get(metric); + value = b != null ? b.getCurrentGaugeAdderValue() : Double.NaN; } - _logger.debug("Value for {} counter having tags {} is {}.", name, tags, value); + _logger.debug("Value for {} counter having tags {} is {}.", metric.getMetric(), metric.getTags(), value); return value; } @@ -378,14 +403,30 @@ public synchronized void dispose() { } @Override - public boolean isDataLagging() { - if(_dataLagMonitorThread!=null) { - return _dataLagMonitorThread.isDataLagging(); + public boolean isDataLagging(String dataCenter) { + if(_dataLagMonitorThread != null) { + return _dataLagService.isDataLagging(dataCenter); }else { return false; } } + @Override + public void exportMetric(Metric metric, Double value) { + requireNotDisposed(); + Counter counter = Counter.fromMetricName(metric.getMetric()); + if (counter != null && Counter.MetricType.COUNTER.equals(counter.getMetricType())) { + _metrics.computeIfAbsent(metric, k -> _getCounterMXBeanInstance(k, counter)).setValue(value); + } else { + _metrics.computeIfAbsent(metric, k -> _getGaugeMXBeanInstance(k)).setValue(value); + } + _logger.debug("Exporting JMX counter {} {} new value {}.", metric.getMetric(), metric.getTags(), value); + } + + public void setTSDBService(TSDBService tsdbService) { + _tsdbService = tsdbService; + } + private void _setServiceEnabled(boolean enabled) { synchronized (_serviceManagementService) { ServiceManagementRecord record = _serviceManagementService.findServiceManagementRecord(Service.MONITORING); @@ -545,6 +586,12 @@ private void _updateJVMStatsCounters() { case DAEMON_THREADS: value = (double) ManagementFactory.getThreadMXBean().getDaemonThreadCount(); break; + case MBEANSERVER_MBEAN_TOTAL: + value = _mbeanServer.getMBeanCount().doubleValue(); + Metric metric = _constructCounterKey(counter.getMetric(), Collections.emptyMap()); + metric.setUnits(units); + _metrics.computeIfAbsent(metric, k -> _getGaugeMXBeanInstance(k)).setValue(value); + continue; default: throw new IllegalArgumentException("Unexpected Counter: This should never happen"); } // end switch @@ -552,7 +599,7 @@ private void _updateJVMStatsCounters() { Metric metric = _constructCounterKey(counter.getMetric(), Collections.emptyMap()); metric.setUnits(units); - _metrics.put(metric, value); + _metrics.computeIfAbsent(metric, k -> new GaugeMetric(k)).setValue(value); } } // end if } // end for @@ -576,6 +623,39 @@ private Dashboard _getDashboardForScope(String name, String scope) { return dashboard; } + private CounterMetric _getCounterMXBeanInstance(Metric m, Counter c) { + _logger.debug("Get CounterMetric=" + m.getMetric() + m.getTags()); + return (CounterMetric)_registeredMetrics.computeIfAbsent(m, k -> _createAndRegisterCounterMXBean(k, c)); + } + + private CounterMetric _createAndRegisterCounterMXBean(Metric m, Counter c) { + CounterMetric b = new CounterMetric(m, c); + _registerMBean(b); + _logger.debug("Created and registered CounterMetric=" + b.getObjectName()); + return b; + } + + private GaugeMetric _getGaugeMXBeanInstance(Metric m) { + _logger.debug("Get GaugeMetric=" + m.getMetric() + m.getTags()); + return (GaugeMetric)_registeredMetrics.computeIfAbsent(m, k -> _createAndRegisterGaugeMXBean(k)); + } + + private GaugeMetric _createAndRegisterGaugeMXBean(Metric m) { + GaugeMetric b = new GaugeMetric(m); + _registerMBean(b); + _logger.debug("Created and registered GaugeMetric=" + b.getObjectName()); + return b; + } + + private void _registerMBean(MetricMXBean b) { + try { + _mbeanServer.registerMBean(b, new ObjectName(b.getObjectName())); + } catch (InstanceAlreadyExistsException | MBeanRegistrationException | NotCompliantMBeanException + | MalformedObjectNameException e) { + _logger.error("Error registering MetricMXBean with name={}:", b.getObjectName(), e); + } + } + /** * Determines if an alert exists, creates it if it doesn't and then sets it to be enabled or disabled, as required. * @@ -633,7 +713,7 @@ protected synchronized void _checkAlertExistence(boolean enabled) { default: throw new SystemException("Attempting to create an unsupported monitoring alert" + alertName); } - requireArgument(metricExpression != null && triggerType != null & triggerName != null, "Unsupported monitor alert " + alertName); + requireArgument(metricExpression != null && triggerType != null && triggerName != null, "Unsupported monitor alert " + alertName); Alert alert = new Alert(_adminUser, _adminUser, _constructAlertName(alertName), metricExpression, "0 * * * *"); Notification notification = new Notification(NOTIFICATION_NAME, alert, AuditNotifier.class.getName(), new ArrayList(), @@ -682,7 +762,7 @@ public void run() { try { _pushCounters(); } catch (Exception ex) { - _logger.error("Error occurred while pushing monitor counters for {}. Reason: {}", HOSTNAME, ex.getMessage()); + _logger.error("Error occurred while pushing monitor counters for {}. Reason: {}", HOSTNAME, ex); } } } @@ -692,7 +772,7 @@ private void _pushCounters() { int sizeJVMMetrics = 0; _logger.debug("Pushing monitor service counters for {}.", HOSTNAME); - Map counters = new HashMap<>(); + Map counters = new HashMap<>(); _updateJVMStatsCounters(); @@ -711,12 +791,13 @@ private void _pushCounters() { long timestamp = (System.currentTimeMillis() / 60000) * 60000L; - for (Entry entry : counters.entrySet()) { - Map dataPoints = new HashMap<>(1); + for (Entry entry : counters.entrySet()) { + GaugeMetric gauge = entry.getValue(); + double value = gauge.computeNewGaugeValueAndResetGaugeAdder(); - dataPoints.put(timestamp, entry.getValue()); + Map dataPoints = new HashMap<>(1); + dataPoints.put(timestamp, value); entry.getKey().setDatapoints(dataPoints); - _gaugeExporter.exportGauge(entry.getKey(), entry.getValue()); } if (!isDisposed()) { _logger.info("Pushing {} monitoring metrics to TSDB.", counters.size()); @@ -735,10 +816,5 @@ private void _sleepForPollPeriod() { } } - @Override - public void exportMetric(Metric metric, Double value) { - _gaugeExporter.exportGauge(metric, value); - } - } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ \ No newline at end of file diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/monitor/GaugeMetric.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/monitor/GaugeMetric.java new file mode 100644 index 000000000..30ac22500 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/monitor/GaugeMetric.java @@ -0,0 +1,103 @@ +package com.salesforce.dva.argus.service.monitor; + +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.service.MonitorService; + +import java.util.concurrent.atomic.DoubleAdder; + +public class GaugeMetric implements MetricMXBean { + + /** + * This is the adder used to keep track of the sum of deltas being added to the counter + */ + protected final DoubleAdder adder; + /** + * This is the current value of the gauge + */ + protected volatile Double value; + /** + * Metric associated with the counter value + */ + private final Metric metric; + /** + * Name associated with the metric in JMX + */ + private final String jmxName; + + public GaugeMetric(Metric metric) { + this(metric, _createJMXObjectNameForMetric(metric, "", MetricType.GAUGE_METRIC)); + } + + protected GaugeMetric(Metric metric, String jmxName) { + this.metric = metric; + this.jmxName = jmxName; + this.value = 0.0; + this.adder = new DoubleAdder(); + } + + @Override + public String getObjectName() { + return jmxName; + } + + /** + * @return the current gauge value + */ + @Override + public Double getValue() { + return value; + } + + /** + * @return the current gauge sum since the last time the gauge adder reset + */ + public Double getCurrentGaugeAdderValue() { + return adder.doubleValue(); + } + + /** + * Add a new delta to the adder + * @param delta + * @return the current gauge sum since the last time the gauge adder reset + */ + public Double addValue(Double delta) { + adder.add(delta); + return adder.doubleValue(); + } + + /** + * The old value in the adder will be cleared and set to the new value. + * + * @param value the adder will be set to this value + */ + public void setValue(Double value) { + adder.reset(); + adder.add(value); + } + + /** + * Compute the new value of the gauge. Reset the gauge adder. + * + * @return the new value of the gauge + */ + public Double computeNewGaugeValueAndResetGaugeAdder() { + value = adder.sumThenReset(); + return value; + } + + protected static String _createJMXObjectNameForMetric(Metric metric, String jmxMetricNameSuffix, MetricType metricType) { + String objName = "ArgusMetrics:type=" + metricType.getName() + ",scope=" + metric.getScope() + ",metric=" + metric.getMetric() + jmxMetricNameSuffix; + if (null != metric.getTags()) { + for (String key : metric.getTags().keySet()) { + objName = objName + "," + (key.equalsIgnoreCase("type") || key.equalsIgnoreCase("scope") + || key.equalsIgnoreCase("metric") ? "_" + key : key) + "=" + metric.getTags().get(key); + } + } + return objName; + } + + protected static String _createJMXObjectNameForMetric(Metric metric, MonitorService.Counter counter) { + return _createJMXObjectNameForMetric(metric, counter.getJMXMetricNameSuffix(), MetricType.GAUGE_METRIC); + } + +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/GaugeMXBean.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/monitor/MetricMXBean.java similarity index 87% rename from ArgusCore/src/main/java/com/salesforce/dva/argus/entity/GaugeMXBean.java rename to ArgusCore/src/main/java/com/salesforce/dva/argus/service/monitor/MetricMXBean.java index 2d366e797..00d27ec49 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/entity/GaugeMXBean.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/monitor/MetricMXBean.java @@ -29,19 +29,19 @@ * POSSIBILITY OF SUCH DAMAGE. */ -package com.salesforce.dva.argus.entity; +package com.salesforce.dva.argus.service.monitor; /** * This is a standard MXBean interface. It will be used to build MXBean * to export internal metrix to JMX. - * + * * This one provides read-only interface as I want to prevent any one * to change the metric through JMX interface - * + * * @author taozhang * */ -public interface GaugeMXBean { +public interface MetricMXBean { /** * This is expected to return the object name of the bean. It will @@ -49,11 +49,26 @@ public interface GaugeMXBean { * @return */ String getObjectName(); - + /** * This should return gauge value of the internal metrics * @return */ Double getValue(); + public enum MetricType { + GAUGE_METRIC("Gauge"), + COUNTER_METRIC("Counter"); + + private String type; + + MetricType(String type) { + this.type = type; + } + + public String getName() { + return type; + } + } + } diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/mq/kafka/Consumer.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/mq/kafka/Consumer.java index a9535aae2..95a3b9822 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/mq/kafka/Consumer.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/mq/kafka/Consumer.java @@ -1,403 +1,14 @@ -/* - * Copyright (c) 2016, Salesforce.com, Inc. - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * 3. Neither the name of Salesforce.com nor the names of its contributors may - * be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ - package com.salesforce.dva.argus.service.mq.kafka; import com.fasterxml.jackson.databind.JavaType; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.salesforce.dva.argus.service.mq.kafka.KafkaMessageService.Property; -import com.salesforce.dva.argus.system.SystemConfiguration; -import kafka.consumer.ConsumerConfig; -import kafka.consumer.ConsumerIterator; -import kafka.consumer.KafkaStream; -import kafka.consumer.Whitelist; -import kafka.javaapi.consumer.ConsumerConnector; -import kafka.message.MessageAndMetadata; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import java.io.IOException; + import java.io.Serializable; -import java.text.MessageFormat; -import java.util.ArrayList; -import java.util.HashMap; import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.ThreadFactory; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicLong; - -/** - * The Kafka client consumer. - * - * @author Tom Valine (tvaline@salesforce.com) - */ -public class Consumer { - - //~ Instance fields ****************************************************************************************************************************** - - private final int MAX_BUFFER_SIZE; - private final Logger _logger = LoggerFactory.getLogger(getClass()); - private final SystemConfiguration _configuration; - private final Map _topics = new HashMap<>(); - private final AtomicLong count = new AtomicLong(0); - private final ObjectMapper _mapper; - - //~ Constructors ********************************************************************************************************************************* - - /** - * Creates a new Consumer object. - * - * @param configuration The system configuration. Cannot be null. - */ - public Consumer(SystemConfiguration configuration) { - this._configuration = configuration; - this._mapper = new ObjectMapper(); - MAX_BUFFER_SIZE = Integer.parseInt(_configuration.getValue(Property.KAFKA_CONSUMER_MESSAGES_TO_BUFFER.getName(), - Property.KAFKA_CONSUMER_MESSAGES_TO_BUFFER.getDefaultValue())); - } - - //~ Methods ************************************************************************************************************************************** - - /** - * This method creates Kafka streams for a topic so that messages can be streamed to the local buffer. If the streams for the given topic have - * already been initialized the returns. Information about a particular topic is stored in a HashMap. This method uses double-checked locking to - * make sure only one client thread can initialize streams for a topic. Moreover, it also helps subsequent calls, to check if the topic has been - * initialized, be not synchronized and hence return faster. - * - * @param topic The topic to initialize. - */ - public void initializeTopic(String topic) { - if (_topics.get(topic) == null) { - synchronized (this) { - if (_topics.get(topic) == null) { - _logger.info("Initializing streams for topic: {}", topic); - - Properties props = new Properties(); - - props.setProperty("zookeeper.connect", - _configuration.getValue(Property.ZOOKEEPER_CONNECT.getName(), Property.ZOOKEEPER_CONNECT.getDefaultValue())); - props.setProperty("group.id", - _configuration.getValue(Property.KAFKA_CONSUMER_GROUPID.getName(), Property.KAFKA_CONSUMER_GROUPID.getDefaultValue())); - props.setProperty("auto.offset.reset", _configuration.getValue(Property.KAFKA_CONSUMER_OFFSET_RESET.getName(), Property.KAFKA_CONSUMER_OFFSET_RESET.getDefaultValue())); - props.setProperty("auto.commit.interval.ms", "60000"); - props.setProperty("fetch.message.max.bytes", "2000000"); - - ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(new ConsumerConfig(props)); - List> streams = _createStreams(consumer, topic); - Topic t = new Topic(topic, consumer, streams.size()); - - _topics.put(topic, t); - _startStreamingMessages(topic, streams); - } - } - } - } - - private List> _createStreams(ConsumerConnector consumer, String topicName) { - int numStreams = Math.max(Integer.parseInt( - _configuration.getValue(Property.KAFKA_CONSUMER_STREAMS_PER_TOPIC.getName(), - Property.KAFKA_CONSUMER_STREAMS_PER_TOPIC.getDefaultValue())), 2); - - return consumer.createMessageStreamsByFilter(new Whitelist(topicName), numStreams); - } - - /** - * Retrieves the executor service for the given topic from the map of topics and submits a KafkaConsumer task for each stream in the list of - * streams. - * - * @param topic The topic to start streaming messages from. - * @param streams The streams for those messages. - */ - private void _startStreamingMessages(String topic, List> streams) { - ExecutorService executorService = _topics.get(topic).getStreamExecutorService(); - - for (final KafkaStream stream : streams) { - executorService.submit(new KafkaConsumer(stream)); - } - } - - /** - * Dequeues messages from the local buffer as specified by the limit. If no messages are available to dequeue, then waits for at most timeout - * milliseconds before returning. - * - * @param The result type. - * @param topic The topic to dequeue messages from. - * @param type The type that each message should be converted to. - * @param timeout The max amount of time in milliseconds that the function can take to dequeue limit number of messages. If number of dequeued - * messages is less than limit, then only those messages are returned. - * @param limit The max number of messages to dequeue. - * - * @return Messages of the given type belonging to the given topic. Empty list if no such topic exists or the method times out. - */ - public List dequeueFromBuffer(String topic, Class type, int timeout, int limit) { - List result = new ArrayList(); - long cutoff = System.currentTimeMillis() + timeout; - BlockingQueue queue = _topics.get(topic).getMessages(); - - while (System.currentTimeMillis() < cutoff && (limit < 0 || result.size() < limit)) { - if (Thread.currentThread().isInterrupted()) { - break; - } - try { - String message = queue.poll(timeout, TimeUnit.MILLISECONDS); - - if (message != null && !message.isEmpty()) { - if (String.class.isAssignableFrom(type)) { - result.add(type.cast(message)); - } else { - result.add(_mapper.readValue(message, type)); - } - if (result.size() % 1000 == 0) { - _logger.debug("Dequeued {} messages from local buffer.", result.size()); - } - } - } catch (InterruptedException e) { - _logger.warn("Interrupted while waiting for poll() to return a message."); - Thread.currentThread().interrupt(); - } catch (IOException e) { - _logger.warn("Exception while deserializing message to type: " + type + ". Skipping this message.", e); - } - } - return result; - } - - /** - * Dequeues messages from the local buffer as specified by the limit. If no messages are available to dequeue, then waits for at most timeout - * milliseconds before returning. - * - * @param The result type. - * @param topic The topic to dequeue messages from. - * @param type The type that each message should be converted to. - * @param timeout The max amount of time in milliseconds that the function can take to dequeue limit number of messages. If number of dequeued - * messages is less than limit, then only those messages are returned. - * @param limit The max number of messages to dequeue. - * - * @return Messages of the given type belonging to the given topic. Empty list if no such topic exists or the method times out. - */ - public List dequeueFromBuffer(String topic, JavaType type, int timeout, int limit) { - List result = new ArrayList<>(); - long cutoff = System.currentTimeMillis() + timeout; - BlockingQueue queue = _topics.get(topic).getMessages(); - - while (System.currentTimeMillis() < cutoff && (limit < 0 || result.size() < limit)) { - if (Thread.currentThread().isInterrupted()) { - break; - } - try { - String message = queue.poll(timeout, TimeUnit.MILLISECONDS); - - if (message != null && !message.isEmpty()) { - T object = _mapper.readValue(message, type); - - result.add(object); - if (result.size() % 1000 == 0) { - _logger.debug("Dequeued {} messages from local buffer.", result.size()); - } - } - } catch (InterruptedException e) { - _logger.warn("Interrupted while waiting for poll() to return a message."); - Thread.currentThread().interrupt(); - } catch (IOException e) { - _logger.warn("Exception while deserializing message to type: " + type + ". Skipping this message.", e); - } - } - return result; - } - - /** Enqueue un-flushed messages back on to Kafka. */ - public void shutdown() { - for (Topic topic : _topics.values()) { - if (topic.getConsumerConnector() != null) { - topic.getConsumerConnector().shutdown(); - } - topic.getStreamExecutorService().shutdownNow(); - try { - topic.getStreamExecutorService().awaitTermination(60, TimeUnit.SECONDS); - } catch (InterruptedException e) { - _logger.warn("Stream executor service was interrupted while awaiting termination. This should never happen."); - } - } - _logger.debug("Pushing unflushed messages back to Kafka."); - - Producer producer = new Producer(_configuration); - - for (Map.Entry entry : _topics.entrySet()) { - String topicName = entry.getKey(); - Topic topic = entry.getValue(); - List unflushedMessages = new ArrayList(); - - if (!topic.getMessages().isEmpty()) { - topic.getMessages().drainTo(unflushedMessages); - producer.enqueue(topicName, unflushedMessages); - } - _logger.debug("{} messages for topic {} enqueued on Kafka queue", unflushedMessages.size(), topicName); - } - producer.shutdown(); - } - - //~ Inner Classes ******************************************************************************************************************************** - - /** - * The Kafka consumer worker. - * - * @author Bhinav Sura (bhinav.sura@salesforce.com) - */ - private class KafkaConsumer implements Runnable { - - private final KafkaStream _stream; - - /** - * Creates a new Consumer object. - * - * @param stream The Kafka stream to consume. - */ - public KafkaConsumer(KafkaStream stream) { - _logger.debug("Creating a new stream"); - _stream = stream; - } - - @Override - public void run() { - ConsumerIterator it = _stream.iterator(); - - while (it.hasNext()) { - Thread.yield(); - if (Thread.currentThread().isInterrupted()) { - _logger.info("Interrupted... Will exit now."); - break; - } - - MessageAndMetadata m = it.next(); - - try { - String message = new String(m.message()); - String topic = m.topic(); - - if (message != null) { - _topics.get(topic).getMessages().put(message); - - long c = count.incrementAndGet(); - - if (c % 50000 == 0) { - _logger.debug("Read {} messages.", count.get()); - } - if (_topics.get(topic).getMessages().size() % 1000 == 0) { - _logger.debug("Message queued. Queue size = {}", _topics.get(topic).getMessages().size()); - } - } - } catch (InterruptedException ie) { - _logger.debug("Interrupted while consuming message."); - Thread.currentThread().interrupt(); - } - } - } - } - - /** - * Inner class that contains information about a particular topic. It includes a kafka consumer connector, an executor service for maintaining the - * streaming threads and a blocking queue to store the actual messages. - * - * @author Bhinav Sura (bhinav.sura@salesforce.com) - */ - private class Topic { - - private ConsumerConnector _consumerConnector; - private ExecutorService _streamExecutorService; - private BlockingQueue _messages; - - /** - * Creates a new Topic object. - * - * @param name The topic name. - * @param consumerConnector The Kafka consumer connector. - * @param numStreams The number of streams with which to consume the topic. - */ - public Topic(String name, ConsumerConnector consumerConnector, int numStreams) { - this(name, consumerConnector, numStreams, new LinkedBlockingQueue(MAX_BUFFER_SIZE)); - } - - /** - * Creates a new Topic object. - * - * @param name The topic name. - * @param consumerConnector The Kafka consumer connector. - * @param numStreams The number of streams with which to consume the topic. - * @param messages The queue into which messages will be consumed. - */ - public Topic(final String name, ConsumerConnector consumerConnector, int numStreams, BlockingQueue messages) { - _consumerConnector = consumerConnector; - _messages = messages; - _streamExecutorService = Executors.newFixedThreadPool(numStreams, new ThreadFactory() { - - AtomicInteger id = new AtomicInteger(0); - - @Override - public Thread newThread(Runnable r) { - return new Thread(r, MessageFormat.format("{0}-stream-{1}", name, id.getAndIncrement())); - } - }); - } - /** - * Returns the consumer connector. - * - * @return The consumer connector. - */ - public ConsumerConnector getConsumerConnector() { - return _consumerConnector; - } +public interface Consumer { + List dequeueFromBuffer(String topic, Class type, int timeout, int limit); - /** - * Returns the stream executor service. - * - * @return The stream executor service. - */ - public ExecutorService getStreamExecutorService() { - return _streamExecutorService; - } + List dequeueFromBuffer(String topic, JavaType type, int timeout, int limit); - /** - * Returns the dequeued messages. - * - * @return The dequeued messages. - */ - public BlockingQueue getMessages() { - return _messages; - } - } -} -/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ + void shutdown(); +} \ No newline at end of file diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/mq/kafka/DefaultConsumer.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/mq/kafka/DefaultConsumer.java new file mode 100644 index 000000000..dcc87904f --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/mq/kafka/DefaultConsumer.java @@ -0,0 +1,472 @@ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus.service.mq.kafka; + +import com.fasterxml.jackson.databind.JavaType; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.cache.Cache; +import com.google.common.cache.CacheBuilder; +import com.salesforce.dva.argus.service.mq.kafka.KafkaMessageService.Property; +import com.salesforce.dva.argus.system.SystemConfiguration; +import org.apache.kafka.clients.CommonClientConfigs; +import org.apache.kafka.clients.consumer.CommitFailedException; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.ConsumerRebalanceListener; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.common.TopicPartition; +import org.apache.kafka.common.config.SslConfigs; +import org.apache.kafka.common.errors.WakeupException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.validation.constraints.Null; +import java.io.IOException; +import java.io.Serializable; +import java.text.MessageFormat; +import java.time.Duration; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; + +/** + * The Kafka client consumer. + * + * @author Tom Valine (tvaline@salesforce.com) + */ +public class DefaultConsumer implements Consumer { + + //~ Instance fields ****************************************************************************************************************************** + + private final int MAX_BUFFER_SIZE; + // Number of abnormal exceptions allowed per X time-window minutes, within a topic's consumers, before the Consumer worker-thread stops altogether + private final int ALLOWED_EXCEPTION_COUNT; + private final int ALLOWED_EXCEPTION_TIME_WINDOW; + private final Logger _logger = LoggerFactory.getLogger(getClass()); + private final SystemConfiguration _configuration; + private final Map _topics = new HashMap<>(); + private final AtomicLong count = new AtomicLong(0); + private final ObjectMapper _mapper; + + //~ Constructors ********************************************************************************************************************************* + + /** + * Creates a new DefaultConsumer object. + * + * @param configuration The system configuration. Cannot be null. + */ + public DefaultConsumer(SystemConfiguration configuration) { + this._configuration = configuration; + this._mapper = new ObjectMapper(); + MAX_BUFFER_SIZE = Integer.parseInt(_configuration.getValue(Property.KAFKA_CONSUMER_MESSAGES_TO_BUFFER.getName(), + Property.KAFKA_CONSUMER_MESSAGES_TO_BUFFER.getDefaultValue())); + ALLOWED_EXCEPTION_COUNT = Integer.parseInt(_configuration.getValue(Property.KAFKA_CONSUMER_ALLOWED_EXCEPTION_COUNT_PER_TIME_WINDOW.getName(), + Property.KAFKA_CONSUMER_ALLOWED_EXCEPTION_COUNT_PER_TIME_WINDOW.getDefaultValue())); + ALLOWED_EXCEPTION_TIME_WINDOW = Integer.parseInt(_configuration.getValue(Property.KAFKA_CONSUMER_ALLOWED_EXCEPTION_TIME_WINDOW_MINUTES.getName(), + Property.KAFKA_CONSUMER_ALLOWED_EXCEPTION_TIME_WINDOW_MINUTES.getDefaultValue())); + _logger.info("MQ DefaultConsumer tolerating no more than {} abnormal exceptions per {} minutes", ALLOWED_EXCEPTION_COUNT, ALLOWED_EXCEPTION_TIME_WINDOW); + } + + @VisibleForTesting + protected DefaultConsumer(SystemConfiguration configuration, ObjectMapper mapper, int maxBufferSize) { + _configuration = configuration; + _mapper = mapper; + MAX_BUFFER_SIZE = maxBufferSize; + ALLOWED_EXCEPTION_COUNT = 3; + ALLOWED_EXCEPTION_TIME_WINDOW = 3; + } + + //~ Methods ************************************************************************************************************************************** + + /** + * This method creates Kafka streams for a topic so that messages can be streamed to the local buffer. If the streams for the given topic have + * already been initialized the returns. Information about a particular topic is stored in a HashMap. This method uses double-checked locking to + * make sure only one client thread can initialize streams for a topic. Moreover, it also helps subsequent calls, to check if the topic has been + * initialized, be not synchronized and hence return faster. + * + * @param topic The topic to initialize. + */ + protected void initializeTopic(String topic) { + if (_topics.get(topic) == null) { + synchronized (this) { + if (_topics.get(topic) == null) { + _logger.info("Initializing streams for topic: {}", topic); + + Properties props = new Properties(); + // Set basic Kafka props + props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, + _configuration.getValue(Property.KAFKA_BROKERS.getName(), Property.KAFKA_BROKERS.getDefaultValue())); + props.setProperty(ConsumerConfig.GROUP_ID_CONFIG, + _configuration.getValue(Property.KAFKA_CONSUMER_GROUPID.getName(), Property.KAFKA_CONSUMER_GROUPID.getDefaultValue())); + props.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, _configuration.getValue(Property.KAFKA_CONSUMER_OFFSET_RESET.getName(), Property.KAFKA_CONSUMER_OFFSET_RESET.getDefaultValue())); + props.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); + props.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); + // Set security props + props.setProperty(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, + _configuration.getValue(Property.KAFKA_SECURITY_PROTOCOL.getName(), Property.KAFKA_SECURITY_PROTOCOL.getDefaultValue())); + props.setProperty(SslConfigs.SSL_PROVIDER_CONFIG, + _configuration.getValue(Property.KAFKA_SSL_PROVIDER.getName(), Property.KAFKA_SSL_PROVIDER.getDefaultValue())); + + props.setProperty(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, + _configuration.getValue(Property.KAFKA_SSL_KEYSTORE_TYPE.getName(), Property.KAFKA_SSL_KEYSTORE_TYPE.getDefaultValue())); + props.setProperty(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, + _configuration.getValue(Property.KAFKA_SSL_KEYSTORE_LOCATION.getName(), Property.KAFKA_SSL_KEYSTORE_LOCATION.getDefaultValue())); + props.setProperty(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, + _configuration.getValue(Property.KAFKA_SSL_KEYSTORE_PASSWORD.getName(), Property.KAFKA_SSL_KEYSTORE_PASSWORD.getDefaultValue())); + props.setProperty(SslConfigs.SSL_KEY_PASSWORD_CONFIG, + _configuration.getValue(Property.KAFKA_SSL_KEY_PASSWORD.getName(), Property.KAFKA_SSL_KEY_PASSWORD.getDefaultValue())); + + props.put(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG, + _configuration.getValue(Property.KAFKA_SSL_TRUSTSTORE_TYPE.getName(), Property.KAFKA_SSL_TRUSTSTORE_TYPE.getDefaultValue())); + props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, + _configuration.getValue(Property.KAFKA_SSL_TRUSTSTORE_LOCATION.getName(), Property.KAFKA_SSL_TRUSTSTORE_LOCATION.getDefaultValue())); + props.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, + _configuration.getValue(Property.KAFKA_SSL_TRUSTSTORE_PASSWORD.getName(), Property.KAFKA_SSL_TRUSTSTORE_PASSWORD.getDefaultValue())); + int numStreams = Math.max(Integer.parseInt(_configuration.getValue(Property.KAFKA_CONSUMER_STREAMS_PER_TOPIC.getName(), + Property.KAFKA_CONSUMER_STREAMS_PER_TOPIC.getDefaultValue())), + 2); + Topic t = new Topic(topic, props, numStreams); + _topics.put(topic, t); + } + } + } + } + + /** + * Dequeues messages from the local buffer as specified by the limit. If no messages are available to dequeue, then waits for at most timeout + * milliseconds before returning. + * + * @param The result type. + * @param topic The topic to dequeue messages from. + * @param type The type that each message should be converted to. + * @param timeout The max amount of time in milliseconds that the function can take to dequeue limit number of messages. If number of dequeued + * messages is less than limit, then only those messages are returned. + * @param limit The max number of messages to dequeue. + * + * @return Messages of the given type belonging to the given topic. Empty list if no such topic exists or the method times out. + */ + public List dequeueFromBuffer(String topic, Class type, int timeout, int limit) { + initializeTopic(topic); + + List result = new ArrayList(); + long cutoff = System.currentTimeMillis() + timeout; + BlockingQueue queue = _topics.get(topic).getMessages(); + + while (System.currentTimeMillis() < cutoff && (limit < 0 || result.size() < limit)) { + if (Thread.currentThread().isInterrupted()) { + break; + } + try { + String message = queue.poll(timeout, TimeUnit.MILLISECONDS); + + if (message != null && !message.isEmpty()) { + if (String.class.isAssignableFrom(type)) { + result.add(type.cast(message)); + } else { + result.add(deserialize(message, type)); + } + if (result.size() % 1000 == 0) { + _logger.debug("Dequeued {} messages from local buffer.", result.size()); + } + } + } catch (InterruptedException e) { + _logger.warn("Interrupted while waiting for poll() to return a message."); + Thread.currentThread().interrupt(); + } catch (IOException e) { + _logger.warn("Exception while deserializing message to type: " + type + ". Skipping this message.", e); + } + } + return result; + } + + /** + * Dequeues messages from the local buffer as specified by the limit. If no messages are available to dequeue, then waits for at most timeout + * milliseconds before returning. + * + * @param The result type. + * @param topic The topic to dequeue messages from. + * @param type The type that each message should be converted to. + * @param timeout The max amount of time in milliseconds that the function can take to dequeue limit number of messages. If number of dequeued + * messages is less than limit, then only those messages are returned. + * @param limit The max number of messages to dequeue. + * + * @return Messages of the given type belonging to the given topic. Empty list if no such topic exists or the method times out. + */ + public List dequeueFromBuffer(String topic, JavaType type, int timeout, int limit) { + initializeTopic(topic); + List result = new ArrayList<>(); + long cutoff = System.currentTimeMillis() + timeout; + BlockingQueue queue = _topics.get(topic).getMessages(); + + while (System.currentTimeMillis() < cutoff && (limit < 0 || result.size() < limit)) { + if (Thread.currentThread().isInterrupted()) { + break; + } + try { + String message = queue.poll(timeout, TimeUnit.MILLISECONDS); + + if (message != null && !message.isEmpty()) { + T object = _mapper.readValue(message, type); + + result.add(object); + if (result.size() % 1000 == 0) { + _logger.debug("Dequeued {} messages from local buffer.", result.size()); + } + } + } catch (InterruptedException e) { + _logger.warn("Interrupted while waiting for poll() to return a message."); + Thread.currentThread().interrupt(); + } catch (IOException e) { + _logger.warn("Exception while deserializing message to type: " + type + ". Skipping this message.", e); + } + } + return result; + } + + /** Enqueue un-flushed messages back on to Kafka. */ + public void shutdown() { + for (Topic topic : _topics.values()) { + for (ConsumerWorker worker: topic.getWorkers()) { + worker.shutdown(); + } + topic.getExecutorService().shutdownNow(); + try { + topic.getExecutorService().awaitTermination(60, TimeUnit.SECONDS); + } catch (InterruptedException e) { + _logger.warn("Stream executor service was interrupted while awaiting termination. This should never happen."); + } + } + _logger.debug("Pushing unflushed messages back to Kafka."); + + DefaultProducer producer = new DefaultProducer(_configuration); + + for (Map.Entry entry : _topics.entrySet()) { + String topicName = entry.getKey(); + Topic topic = entry.getValue(); + List unflushedMessages = new ArrayList(); + + if (!topic.getMessages().isEmpty()) { + topic.getMessages().drainTo(unflushedMessages); + producer.enqueue(topicName, unflushedMessages); + } + _logger.debug("{} messages for topic {} enqueued on Kafka queue", unflushedMessages.size(), topicName); + } + producer.shutdown(); + } + + @VisibleForTesting + protected T deserialize(String message, Class type) throws IOException { + return _mapper.readValue(message, type); + } + + //~ Inner Classes ******************************************************************************************************************************** + + /** + * The Kafka consumer worker. + * + * @author Bhinav Sura (bhinav.sura@salesforce.com) + */ + private class ConsumerWorker implements Runnable { + private final AtomicBoolean closed = new AtomicBoolean(false); + private Properties consumerProps; + private KafkaConsumer consumer; + private Cache exceptionLog = CacheBuilder.newBuilder() + .expireAfterWrite(ALLOWED_EXCEPTION_TIME_WINDOW, TimeUnit.MINUTES).build(); + private final String topic; + + /** + * Creates a new DefaultConsumer object. + * + */ + public ConsumerWorker(Properties consumerProps, String topic) { + _logger.debug("Creating a new stream"); + this.consumerProps = consumerProps; + this.topic = topic; + } + + public void shutdown() { + closed.set(true); + _logger.error("ConsumerWorker received shutdown call"); + consumer.wakeup(); + } + + @Override + public void run() { + while (!closed.get() && exceptionLog.size() <= ALLOWED_EXCEPTION_COUNT) { + _logger.info("Constructing KafkaConsumer"); + consumer = new KafkaConsumer<>(consumerProps); + try { + consumer.subscribe(Arrays.asList(topic), new ConsumerRebalanceListener() { + @Override + public void onPartitionsRevoked(Collection partitions) { + for (TopicPartition tp : partitions) + _logger.info("Partitions revoked for topic=" + tp.topic() + " and partition=" + tp.partition()); + } + + @Override + public void onPartitionsAssigned(Collection partitions) { + for (TopicPartition tp : partitions) + _logger.info("Partitions assigned for topic=" + tp.topic() + " and partition=" + tp.partition()); + } + }); + while (!closed.get()) { + if (Thread.currentThread().isInterrupted()) { + _logger.info("Interrupted... Will exit now."); + break; + } + try { + ConsumerRecords records = consumer.poll(Duration.ofMillis(Long.MAX_VALUE)); + for (ConsumerRecord record : records) { + String message = record.value(); + String topic = record.topic(); + + if (message != null) { + _topics.get(topic).getMessages().put(message); + + long c = count.incrementAndGet(); + + if (c % 50000 == 0) { + _logger.debug("Read {} messages.", count.get()); + } + if (_topics.get(topic).getMessages().size() % 1000 == 0) { + _logger.debug("Message queued. Queue size = {}", _topics.get(topic).getMessages().size()); + } + } + } + } catch (InterruptedException ex) { + _logger.debug("Interrupted while consuming message."); + Thread.currentThread().interrupt(); + } catch (CommitFailedException ex) { + _logger.error("Commit failed, continuing polls: ", ex); + } catch (WakeupException e) { + throw e; + } catch (Exception ex) { + _logger.error("Abnormal exception encountered in poll loop. Exiting loop: ", ex); + exceptionLog.put(System.currentTimeMillis(), ex); + exceptionLog.cleanUp(); + break; + } + } + } catch (WakeupException e) { + // Ignore exception if closing + if (!closed.get()) throw e; + } finally { + consumer.close(); + _logger.info("Consumer closed"); + } + } + if (exceptionLog.size() > ALLOWED_EXCEPTION_COUNT) { + _logger.error("ConsumerWorker exiting because there were more than {} abnormal exceptions within {} minutes", ALLOWED_EXCEPTION_COUNT, ALLOWED_EXCEPTION_TIME_WINDOW); + } + _logger.info("ConsumerWorker finished"); + } + } + + /** + * Inner class that contains information about a particular topic. It includes a kafka consumer connector, an executor service for maintaining the + * streaming threads and a blocking queue to store the actual messages. + * + * @author Bhinav Sura (bhinav.sura@salesforce.com) + */ + private class Topic { + ExecutorService executorService; + BlockingQueue messages; + List workers = new ArrayList<>(); + + /** + * Creates a new Topic object. + * + * @param topicName + * @param consumerProps + * @param numStreams The number of streams with which to consume the topic. + */ + public Topic(final String topicName, Properties consumerProps, int numStreams) { + messages = new LinkedBlockingQueue<>(MAX_BUFFER_SIZE); + executorService = Executors.newFixedThreadPool(numStreams, new ThreadFactory() { + AtomicInteger id = new AtomicInteger(0); + + @Override + public Thread newThread(Runnable r) { + return new Thread(r, MessageFormat.format("{0}-consumerworker-{1}", topicName, id.getAndIncrement())); + } + }); + for (int i = 0; i < numStreams; i++) { + ConsumerWorker worker = new ConsumerWorker(consumerProps, topicName); + workers.add(worker); + executorService.submit(worker); + } + } + + /** + * Returns the consumer connector. + * + * @return The consumer connector. + */ + public List getWorkers() { + return workers; + } + + /** + * Returns the stream executor service. + * + * @return The stream executor service. + */ + public ExecutorService getExecutorService(){ + return executorService; + } + + /** + * Returns the dequeued messages. + * + * @return The dequeued messages. + */ + public BlockingQueue getMessages() { + return messages; + } + } +} +/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/mq/kafka/DefaultProducer.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/mq/kafka/DefaultProducer.java new file mode 100644 index 000000000..f474ccc21 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/mq/kafka/DefaultProducer.java @@ -0,0 +1,272 @@ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus.service.mq.kafka; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.annotations.VisibleForTesting; +import com.salesforce.dva.argus.service.mq.kafka.KafkaMessageService.Property; +import com.salesforce.dva.argus.system.SystemConfiguration; +import com.salesforce.dva.argus.system.SystemException; +import org.apache.kafka.clients.CommonClientConfigs; +import org.apache.kafka.clients.producer.BufferExhaustedException; +import org.apache.kafka.clients.producer.Callback; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.clients.producer.RecordMetadata; +import org.apache.kafka.common.config.SslConfigs; +import org.apache.kafka.common.serialization.StringSerializer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.Serializable; +import java.text.MessageFormat; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * Produces events onto the Kafka broker. + * + * @author Tom Valine (tvaline@salesforce.com) + */ +public class DefaultProducer implements Producer { + + //~ Instance fields ****************************************************************************************************************************** + + private final Logger _logger = LoggerFactory.getLogger(getClass()); + protected SystemConfiguration _configuration; + protected KafkaProducer _producer; + protected ExecutorService _executorService; + private final ObjectMapper _mapper; + + //~ Constructors ********************************************************************************************************************************* + + protected DefaultProducer() { + _mapper = new ObjectMapper(); + } + + /** + * Creates a new DefaultProducer object. + * + * @param config The system configuration. + */ + public DefaultProducer(SystemConfiguration config) { + this(); + _configuration = config; + _producer = createProducer(); + _executorService = createExecutorService(); + } + + @VisibleForTesting + protected DefaultProducer(SystemConfiguration config, KafkaProducer producer, ExecutorService executorService, ObjectMapper mapper) { + _configuration = config; + _producer = producer; + _executorService = executorService; + _mapper = mapper; + } + + //~ Methods ************************************************************************************************************************************** + + protected ExecutorService createExecutorService() { + int producerConnections = Math.max(Integer.valueOf( + _configuration.getValue(Property.KAFKA_PRODUCER_CONNECTIONS.getName(), Property.KAFKA_PRODUCER_CONNECTIONS.getDefaultValue())), 10); + + return Executors.newFixedThreadPool(producerConnections, new ThreadFactory() { + + AtomicInteger id = new AtomicInteger(0); + + @Override + public Thread newThread(Runnable r) { + return new Thread(r, MessageFormat.format("kafka-producer-worker-{0}", id.getAndIncrement())); + } + }); + } + + protected KafkaProducer createProducer() { + Map producerConfig = new HashMap(); + + producerConfig.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, + _configuration.getValue(Property.KAFKA_BROKERS.getName(), Property.KAFKA_BROKERS.getDefaultValue())); + producerConfig.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, "snappy"); + producerConfig.put(ProducerConfig.CLIENT_ID_CONFIG, "argus.producer"); + producerConfig.put(ProducerConfig.BUFFER_MEMORY_CONFIG, + Long.parseLong( + _configuration.getValue(Property.KAFKA_PRODUCER_BUFFER_MEMORY.getName(), Property.KAFKA_PRODUCER_BUFFER_MEMORY.getDefaultValue()))); + producerConfig.put(ProducerConfig.BATCH_SIZE_CONFIG, + Integer.parseInt( + _configuration.getValue(Property.KAFKA_PRODUCER_BATCH_SIZE.getName(), Property.KAFKA_PRODUCER_BATCH_SIZE.getDefaultValue()))); + // Set security props + producerConfig.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, + _configuration.getValue(Property.KAFKA_SECURITY_PROTOCOL.getName(), Property.KAFKA_SECURITY_PROTOCOL.getDefaultValue())); + producerConfig.put(SslConfigs.SSL_PROVIDER_CONFIG, + _configuration.getValue(Property.KAFKA_SSL_PROVIDER.getName(), Property.KAFKA_SSL_PROVIDER.getDefaultValue())); + + producerConfig.put(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, + _configuration.getValue(Property.KAFKA_SSL_KEYSTORE_TYPE.getName(), Property.KAFKA_SSL_KEYSTORE_TYPE.getDefaultValue())); + producerConfig.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, + _configuration.getValue(Property.KAFKA_SSL_KEYSTORE_LOCATION.getName(), Property.KAFKA_SSL_KEYSTORE_LOCATION.getDefaultValue())); + producerConfig.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, + _configuration.getValue(Property.KAFKA_SSL_KEYSTORE_PASSWORD.getName(), Property.KAFKA_SSL_KEYSTORE_PASSWORD.getDefaultValue())); + producerConfig.put(SslConfigs.SSL_KEY_PASSWORD_CONFIG, + _configuration.getValue(Property.KAFKA_SSL_KEY_PASSWORD.getName(), Property.KAFKA_SSL_KEY_PASSWORD.getDefaultValue())); + + producerConfig.put(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG, + _configuration.getValue(Property.KAFKA_SSL_TRUSTSTORE_TYPE.getName(), Property.KAFKA_SSL_TRUSTSTORE_TYPE.getDefaultValue())); + producerConfig.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, + _configuration.getValue(Property.KAFKA_SSL_TRUSTSTORE_LOCATION.getName(), Property.KAFKA_SSL_TRUSTSTORE_LOCATION.getDefaultValue())); + producerConfig.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, + _configuration.getValue(Property.KAFKA_SSL_TRUSTSTORE_PASSWORD.getName(), Property.KAFKA_SSL_TRUSTSTORE_PASSWORD.getDefaultValue())); + return new KafkaProducer(producerConfig, new StringSerializer(), new StringSerializer()); + } + + /** + * Adds the messages to the DefaultProducer Buffer which will later be batched by Kafka and sent to the brokers. + * + * @param The value type. + * @param topic The topic to produce onto. + * @param objects The list of objects to enqueue. + * + * @return The number of objects that were successfully added to the DefaultProducer Buffer. + */ + public int enqueue(final String topic, List objects) { + int messagesBuffered = 0; + + for (T object : objects) { + final String value; + + if (String.class.isAssignableFrom(object.getClass())) { + value = String.class.cast(object); + } else { + try { + value = serialize(object); + } catch (JsonProcessingException e) { + _logger.warn("Exception while serializing the object to a string. Skipping this object.", e); + continue; + } + } + try { + boolean addedToBuffer = _executorService.submit(new ProducerWorker(topic, value)).get(); + + if (addedToBuffer) { + messagesBuffered++; + } + } catch (InterruptedException e) { + _logger.warn("Enqueue operation was interrupted by calling code."); + Thread.currentThread().interrupt(); + } catch (ExecutionException e) { + throw new SystemException(e); + } + } + return messagesBuffered; + } + + /** Shuts down the producer. */ + public void shutdown() { + if (_producer != null) { + _producer.close(); + } + _executorService.shutdown(); + try { + if (!_executorService.awaitTermination(10, TimeUnit.SECONDS)) { + _logger.warn("Shutdown of Kafka executor service timed out after 10 seconds."); + _executorService.shutdownNow(); + } + } catch (InterruptedException ex) { + _logger.warn("Shutdown of executor service was interrupted."); + Thread.currentThread().interrupt(); + } + } + + @VisibleForTesting + protected String serialize(T obj) throws JsonProcessingException { + return _mapper.writeValueAsString(obj); + } + + //~ Inner Classes ******************************************************************************************************************************** + + /** + * The worker used by the producer executor service. + * + * @author Bhinav Sura (bhinav.sura@salesforce.com) + */ + private class ProducerWorker implements Callable { + + private final String _topic; + private final String _message; + + /** + * Creates a new DefaultProducer object. + * + * @param topic The topic to produce onto. + * @param message The message to enqueue. + */ + public ProducerWorker(String topic, String message) { + this._topic = topic; + this._message = message; + } + + @Override + public Boolean call() { + ProducerRecord record = new ProducerRecord<>(_topic, _message); + + try { + _producer.send(record, new Callback() { + @Override + public void onCompletion(RecordMetadata metaData, Exception exception) { + if (exception != null) { + _logger.warn("Exception while sending message: {}", exception); + } else { + _logger.trace("Message sent to partition {} with offset {}.", metaData.partition(), metaData.offset()); + } + } + }); + } catch (BufferExhaustedException e) { + _logger.warn("Buffer exhausted on kafka producer. Skipping this message.", e); + return false; + } catch (Exception e) { + _logger.warn("Exception occurred when executing producer send(). ", e); + throw new SystemException(e); + } + return true; + } + } +} +/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/mq/kafka/KafkaMessageService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/mq/kafka/KafkaMessageService.java index 713259ff9..655a2e901 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/mq/kafka/KafkaMessageService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/mq/kafka/KafkaMessageService.java @@ -37,7 +37,9 @@ import com.salesforce.dva.argus.service.DefaultService; import com.salesforce.dva.argus.service.MQService; import com.salesforce.dva.argus.system.SystemConfiguration; + import java.io.Serializable; +import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.List; import java.util.Properties; @@ -54,9 +56,9 @@ public class KafkaMessageService extends DefaultService implements MQService { //~ Instance fields ****************************************************************************************************************************** - private Producer _producer = null; private Consumer _consumer = null; + private final SystemConfiguration _config; //~ Constructors ********************************************************************************************************************************* @@ -70,11 +72,20 @@ public KafkaMessageService(SystemConfiguration config) { super(config); requireArgument(config != null, "System configuration cannot be null."); if (!Boolean.parseBoolean(config.getValue(Property.KAFKA_DISABLE_PRODUCER.getName(), Property.KAFKA_DISABLE_PRODUCER.getDefaultValue()))) { - _producer = new Producer(config); + try { + _producer = (Producer) Class.forName(config.getValue(SystemConfiguration.Property.MQ_SERVICE_PRODUCER_IMPL_CLASS)).getConstructor(SystemConfiguration.class).newInstance(config); + } catch (ClassNotFoundException | NoSuchMethodException | IllegalAccessException | InvocationTargetException | InstantiationException ex) { + _producer = new DefaultProducer(config); + } } if (!Boolean.parseBoolean(config.getValue(Property.KAFKA_DISABLE_CONSUMER.getName(), Property.KAFKA_DISABLE_CONSUMER.getDefaultValue()))) { - _consumer = new Consumer(config); + try { + _consumer = (Consumer) Class.forName(config.getValue(SystemConfiguration.Property.MQ_SERVICE_CONSUMER_IMPL_CLASS)).getConstructor(SystemConfiguration.class).newInstance(config); + } catch (ClassNotFoundException | NoSuchMethodException | IllegalAccessException | InvocationTargetException | InstantiationException ex) { + _consumer = new DefaultConsumer(config); + } } + _config = config; } //~ Methods ************************************************************************************************************************************** @@ -87,6 +98,23 @@ private void requireConsumerEnabled() { requireState(_consumer != null, "Cannot perform this action when Consumer is disabled"); } + /* + * Transform an MQService MQQueue name to the configured topic name if redefined in .properties file + */ + private String toKafkaTopic(String topic) { + if (topic.equals(MQQueue.ALERT.getQueueName())) { + return _config.getValue(Property.KAFKA_ALERTS_TOPIC.getName(), Property.KAFKA_ALERTS_TOPIC.getDefaultValue()); + } else if (topic.equals(MQQueue.ANNOTATION.getQueueName())) { + return _config.getValue(Property.KAFKA_ANNOTATIONS_TOPIC.getName(), Property.KAFKA_ANNOTATIONS_TOPIC.getDefaultValue()); + } else if (topic.equals(MQQueue.METRIC.getQueueName())) { + return _config.getValue(Property.KAFKA_METRICS_TOPIC.getName(), Property.KAFKA_METRICS_TOPIC.getDefaultValue()); + } else if (topic.equals(MQQueue.HISTOGRAM.getQueueName())) { + return _config.getValue(Property.KAFKA_HISTOGRAMS_TOPIC.getName(), Property.KAFKA_HISTOGRAMS_TOPIC.getDefaultValue()); + } else { + return topic; + } + } + @Override public void enqueue(String topic, T object) { requireNotDisposed(); @@ -105,7 +133,7 @@ public void enqueue(final String topic, List objects requireProducerEnabled(); requireArgument(topic != null && !topic.trim().isEmpty(), "Topic name cannot be null or empty."); requireArgument(objects != null, "The list of objects to enqueue cannot be null."); - _producer.enqueue(topic, objects); + _producer.enqueue(toKafkaTopic(topic), objects); } @Override @@ -140,7 +168,7 @@ public List dequeue(String topic, Class type, int requireArgument(type != null, "Result object runtime type cannot be null."); requireArgument(timeout > 0, "Timeout in milliseconds must be greater than zero."); requireArgument(limit > 0, "Limit must be non-negative."); - _consumer.initializeTopic(topic); + topic = toKafkaTopic(topic); return _consumer.dequeueFromBuffer(topic, type, timeout, limit); } @@ -152,7 +180,7 @@ public List dequeue(String topic, JavaType type, int requireArgument(type != null, "Result object runtime type cannot be null."); requireArgument(timeout > 0, "Timeout in milliseconds must be greater than zero."); requireArgument(limit > 0, "Limit must be non-negative."); - _consumer.initializeTopic(topic); + topic = toKafkaTopic(topic); return _consumer.dequeueFromBuffer(topic, type, timeout, limit); } @@ -190,8 +218,6 @@ public enum Property { KAFKA_BROKERS("service.property.mq.kafka.brokers", "localhost:9093"), /** Specifies the number of producer connections. Default is 10. */ KAFKA_PRODUCER_CONNECTIONS("service.property.mq.kafka.producer.connections", "10"), - /** Indicates whether the producer should block when the buffer is full. Default is false. */ - KAFKA_PRODUCER_BLOCK_ON_BUFFER_FULL("service.property.mq.kafka.producer.block.on.buffer.full", "false"), /** Specifies the allocated producer buffer memory in bytes. Default is 134217728. */ KAFKA_PRODUCER_BUFFER_MEMORY("service.property.mq.kafka.producer.buffer.memory", "134217728"), /** Specifies the batch size. Default is 102400. */ @@ -202,12 +228,28 @@ public enum Property { KAFKA_CONSUMER_MESSAGES_TO_BUFFER("service.property.mq.kafka.consumer.messages.to.buffer", "1"), /** Specifies the default consumer group ID. */ KAFKA_CONSUMER_GROUPID("service.property.mq.kafka.consumer.groupid", "argus-consumer-unit"), - /** Specifies the default consumer group ID. */ - KAFKA_CONSUMER_OFFSET_RESET("service.property.mq.kafka.consumer.auto.offset.reset", "smallest"), - /** Specifies the Kafka ZooKeeper connection endpoint. */ - ZOOKEEPER_CONNECT("service.property.mq.zookeeper.connect", "localhost:2185"), - /** Specifies the Kafka Zookeeper connection timeout in milliseconds. Default is 10000. */ - ZOOKEEPER_CONNECTION_TIMEOUT_MS("service.property.mq.zookeeper.connection.timeout.ms", "10000"), + /** Where to start a new group.id from */ + KAFKA_CONSUMER_OFFSET_RESET("service.property.mq.kafka.consumer.auto.offset.reset", "earliest"), + /** Number of abnormal exceptions allowed per X minutes before the Consumer worker-thread stops altogether */ + KAFKA_CONSUMER_ALLOWED_EXCEPTION_COUNT_PER_TIME_WINDOW("service.property.mq.kafka.consumer.allowed.exception.count", "3"), + KAFKA_CONSUMER_ALLOWED_EXCEPTION_TIME_WINDOW_MINUTES("service.property.mq.kafka.consumer.allowed.exception.time.window.mins", "3"), + /** Customize Kafka topic names to produce to / consume from */ + KAFKA_ALERTS_TOPIC("service.property.mq.kafka.alerts.topic", "argusAlertQueue"), + KAFKA_ANNOTATIONS_TOPIC("service.property.mq.kafka.annotations.topic", "argusAnnotationQueue"), + KAFKA_METRICS_TOPIC("service.property.mq.kafka.metrics.topic", "argusMetricQueue"), + KAFKA_HISTOGRAMS_TOPIC("service.property.mq.kafka.histograms.topic", "argusHistogramQueue"), + /** Kafka Client SSL properties */ + KAFKA_SECURITY_PROTOCOL("service.property.mq.kafka.security.protocol", "PLAINTEXT"), + KAFKA_SSL_PROVIDER("service.property.mq.kafka.ssl.provider", ""), + KAFKA_SSL_KEYSTORE_TYPE("service.property.mq.kafka.ssl.keystore.type", ""), + KAFKA_SSL_KEYSTORE_LOCATION("service.property.mq.kafka.ssl.keystore.location", ""), + KAFKA_SSL_KEYSTORE_PASSWORD("service.property.mq.kafka.ssl.keystore.password", ""), + KAFKA_SSL_KEY_PASSWORD("service.property.mq.kafka.ssl.key.password", ""), + KAFKA_SSL_TRUSTSTORE_TYPE("service.property.mq.kafka.ssl.truststore.type", ""), + KAFKA_SSL_TRUSTSTORE_LOCATION("service.property.mq.kafka.ssl.truststore.location", ""), + KAFKA_SSL_TRUSTSTORE_PASSWORD("service.property.mq.kafka.ssl.truststore.password", ""), + + /** Whether to disable KafkaProducer instances from being created */ KAFKA_DISABLE_PRODUCER("service.property.mq.kafka.producer.disable", "false"), /** Whether to disable KafkaConsumer instances from being created */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/mq/kafka/Producer.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/mq/kafka/Producer.java index ccf3e9ff9..91f8f0200 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/mq/kafka/Producer.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/mq/kafka/Producer.java @@ -1,236 +1,10 @@ -/* - * Copyright (c) 2016, Salesforce.com, Inc. - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * 3. Neither the name of Salesforce.com nor the names of its contributors may - * be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ - package com.salesforce.dva.argus.service.mq.kafka; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.salesforce.dva.argus.service.mq.kafka.KafkaMessageService.Property; -import com.salesforce.dva.argus.system.SystemConfiguration; -import com.salesforce.dva.argus.system.SystemException; -import org.apache.kafka.clients.producer.BufferExhaustedException; -import org.apache.kafka.clients.producer.Callback; -import org.apache.kafka.clients.producer.KafkaProducer; -import org.apache.kafka.clients.producer.ProducerConfig; -import org.apache.kafka.clients.producer.ProducerRecord; -import org.apache.kafka.clients.producer.RecordMetadata; -import org.apache.kafka.common.serialization.StringSerializer; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import java.io.Serializable; -import java.text.MessageFormat; -import java.util.HashMap; import java.util.List; -import java.util.Map; -import java.util.concurrent.Callable; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.ThreadFactory; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicInteger; -/** - * Produces events onto the Kafka broker. - * - * @author Tom Valine (tvaline@salesforce.com) - */ -public class Producer { +public interface Producer { + int enqueue(final String topic, List objects); - //~ Instance fields ****************************************************************************************************************************** - - private final Logger _logger = LoggerFactory.getLogger(getClass()); - private final SystemConfiguration _configuration; - private KafkaProducer _producer; - private final ExecutorService _executorService; - private final ObjectMapper _mapper; - - //~ Constructors ********************************************************************************************************************************* - - /** - * Creates a new Producer object. - * - * @param config The system configuration. - */ - public Producer(SystemConfiguration config) { - _configuration = config; - _producer = _createProducer(); - _executorService = _createExecutorService(); - _mapper = new ObjectMapper(); - } - - //~ Methods ************************************************************************************************************************************** - - private ExecutorService _createExecutorService() { - int producerConnections = Math.max(Integer.valueOf( - _configuration.getValue(Property.KAFKA_PRODUCER_CONNECTIONS.getName(), Property.KAFKA_PRODUCER_CONNECTIONS.getDefaultValue())), 10); - - return Executors.newFixedThreadPool(producerConnections, new ThreadFactory() { - - AtomicInteger id = new AtomicInteger(0); - - @Override - public Thread newThread(Runnable r) { - return new Thread(r, MessageFormat.format("kafka-producer-worker-{0}", id.getAndIncrement())); - } - }); - } - - private KafkaProducer _createProducer() { - Map producerConfig = new HashMap(); - - producerConfig.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, - _configuration.getValue(Property.KAFKA_BROKERS.getName(), Property.KAFKA_BROKERS.getDefaultValue())); - producerConfig.put(ProducerConfig.ACKS_CONFIG, "1"); - producerConfig.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, "snappy"); - producerConfig.put(ProducerConfig.CLIENT_ID_CONFIG, "argus.producer"); - producerConfig.put(ProducerConfig.BLOCK_ON_BUFFER_FULL_CONFIG, - Boolean.parseBoolean( - _configuration.getValue(Property.KAFKA_PRODUCER_BLOCK_ON_BUFFER_FULL.getName(), - Property.KAFKA_PRODUCER_BLOCK_ON_BUFFER_FULL.getDefaultValue()))); - producerConfig.put(ProducerConfig.BUFFER_MEMORY_CONFIG, - Long.parseLong( - _configuration.getValue(Property.KAFKA_PRODUCER_BUFFER_MEMORY.getName(), Property.KAFKA_PRODUCER_BUFFER_MEMORY.getDefaultValue()))); - producerConfig.put(ProducerConfig.BATCH_SIZE_CONFIG, - Integer.parseInt( - _configuration.getValue(Property.KAFKA_PRODUCER_BATCH_SIZE.getName(), Property.KAFKA_PRODUCER_BATCH_SIZE.getDefaultValue()))); - return new KafkaProducer(producerConfig, new StringSerializer(), new StringSerializer()); - } - - /** - * Adds the messages to the Producer Buffer which will later be batched by Kafka and sent to the brokers. - * - * @param The value type. - * @param topic The topic to produce onto. - * @param objects The list of objects to enqueue. - * - * @return The number of objects that were successfully added to the Producer Buffer. - */ - public int enqueue(final String topic, List objects) { - int messagesBuffered = 0; - - for (T object : objects) { - final String value; - - if (String.class.isAssignableFrom(object.getClass())) { - value = String.class.cast(object); - } else { - try { - value = _mapper.writeValueAsString(object); - } catch (JsonProcessingException e) { - _logger.warn("Exception while serializing the object to a string. Skipping this object.", e); - continue; - } - } - try { - boolean addedToBuffer = _executorService.submit(new ProducerWorker(topic, value)).get(); - - if (addedToBuffer) { - messagesBuffered++; - } - } catch (InterruptedException e) { - _logger.warn("Enqueue operation was interrupted by calling code."); - Thread.currentThread().interrupt(); - } catch (ExecutionException e) { - throw new SystemException(e); - } - } - return messagesBuffered; - } - - /** Shuts down the producer. */ - public void shutdown() { - if (_producer != null) { - _producer.close(); - } - _executorService.shutdown(); - try { - if (!_executorService.awaitTermination(10, TimeUnit.SECONDS)) { - _logger.warn("Shutdown of Kafka executor service timed out after 10 seconds."); - _executorService.shutdownNow(); - } - } catch (InterruptedException ex) { - _logger.warn("Shutdown of executor service was interrupted."); - Thread.currentThread().interrupt(); - } - } - - //~ Inner Classes ******************************************************************************************************************************** - - /** - * The worker used by the producer executor service. - * - * @author Bhinav Sura (bhinav.sura@salesforce.com) - */ - private class ProducerWorker implements Callable { - - private final String _topic; - private final String _message; - - /** - * Creates a new Producer object. - * - * @param topic The topic to produce onto. - * @param message The message to enqueue. - */ - public ProducerWorker(String topic, String message) { - this._topic = topic; - this._message = message; - } - - @Override - public Boolean call() { - ProducerRecord record = new ProducerRecord<>(_topic, _message); - - try { - _producer.send(record, new Callback() { - - @Override - public void onCompletion(RecordMetadata metaData, Exception exception) { - if (exception != null) { - _logger.warn("Exception while sending message. ", exception); - } else { - _logger.trace("Message sent to partition {} with offset {}.", metaData.partition(), metaData.offset()); - } - } - }); - } catch (BufferExhaustedException e) { - _logger.warn("Buffer exhausted on kafka producer. Skipping this message.", e); - return false; - } catch (Exception e) { - _logger.warn("Exception occurred when executing producer send(). ", e); - throw new SystemException(e); - } - return true; - } - } -} -/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ + void shutdown(); +} \ No newline at end of file diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/querystore/ElasticSearchQueryStoreService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/querystore/ElasticSearchQueryStoreService.java new file mode 100644 index 000000000..974538465 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/querystore/ElasticSearchQueryStoreService.java @@ -0,0 +1,941 @@ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus.service.querystore; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.module.SimpleModule; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.ImmutableMap; +import com.google.common.hash.BloomFilter; +import com.google.common.hash.Funnels; +import com.google.common.hash.HashFunction; +import com.google.common.hash.Hashing; +import com.google.inject.Inject; +import com.google.inject.Singleton; +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.entity.QueryStoreRecord; +import com.salesforce.dva.argus.service.DefaultService; +import com.salesforce.dva.argus.service.MonitorService; +import com.salesforce.dva.argus.service.QueryStoreService; +import com.salesforce.dva.argus.system.SystemConfiguration; +import com.salesforce.dva.argus.system.SystemException; +import org.apache.http.HttpEntity; +import org.apache.http.HttpHost; +import org.apache.http.HttpStatus; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.nio.conn.PoolingNHttpClientConnectionManager; +import org.apache.http.impl.nio.reactor.DefaultConnectingIOReactor; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.RestClientBuilder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.net.InetAddress; +import java.net.MalformedURLException; +import java.net.URL; +import java.net.UnknownHostException; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Calendar; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.Set; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; + +import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; + +/** + * Implementation of User Query Store using ElasticSearch along with Bloom Filter. + * + * @author ChandraVyas Annakula (cannakula@salesforce.com) + */ +@Singleton +public class ElasticSearchQueryStoreService extends DefaultService implements QueryStoreService { + + private static Logger logger = LoggerFactory.getLogger(ElasticSearchQueryStoreService.class); + + /** Bloom Filter properties */ + private static final long QS_BLOOM_POLL_INTERVAL_MS = 10 * 60 * 1000L; + private static final int QS_BLOOM_DAY_IN_SECONDS = 24 * 60 * 60; + + /** Global ES properties */ + private static final int QUERY_STORE_INDEX_MAX_RESULT_WINDOW = 10000; + private static final int QUERY_STORE_MAX_RETRY_TIMEOUT = 300 * 1000; + private static final String FIELD_TYPE_TEXT = "text"; + private static final String FIELD_TYPE_DATE ="date"; + private RestClient esRestClient; + private final int bulkIndexingSize; + private QueryStoreRecordList.HashAlgorithm idgenHashAlgo; + + /** Query Store index properties */ + private static String QUERY_STORE_INDEX_TEMPLATE_NAME; + private static String QUERY_STORE_INDEX_TEMPLATE_PATTERN_START; + private static String QUERY_STORE_INDEX_NAME; + private static String QUERY_STORE_TYPE_NAME; + private final ObjectMapper queryStoreMapper; + private final int replicationFactorForQueryStoreIndex; + private final int numShardsForQueryStoreIndex; + + protected static BloomFilter createdBloom; + + protected final MonitorService monitorService; + + private int createdBloomExpectedNumberInsertions; + private double createdBloomErrorRate; + + private final String qsRandomBloomAppend; + private final Thread bloomFilterMonitorThread; + private final Map bloomFilterMonitorTags; + private final SystemConfiguration config; + private int bloomFilterFlushHourToStartAt; + private ScheduledExecutorService scheduledExecutorService; + private String createdBloomFileName; + protected final boolean bloomFileWritingEnabled; + + + @Inject + public ElasticSearchQueryStoreService(SystemConfiguration config, MonitorService monitorService) { + super(config); + this.config = config; + this.monitorService =monitorService; + + /** Setup Bloom Filter related items */ + + String appendValue; + try { + appendValue = Integer.toString(Math.abs(InetAddress.getLocalHost().getHostName().hashCode())); + } catch (IOException io) { + appendValue = "123456"; + logger.error("Failed to create qsRandomBloomAppend for querystore bloom, using {}. {}", appendValue, io); + } + qsRandomBloomAppend = appendValue; + bloomFileWritingEnabled = Boolean.parseBoolean(config.getValue(Property.QUERY_STORE_BLOOM_FILE_WRITING_ENABLED.getName(), Property.QUERY_STORE_BLOOM_FILE_WRITING_ENABLED.getDefaultValue())); + String bfStateBaseDir = config.getValue(Property.QUERY_STORE_BF_STATE_BASE_DIR.getName(), Property.QUERY_STORE_BF_STATE_BASE_DIR.getDefaultValue()); + createdBloomFileName = bfStateBaseDir + "/querystore_created_bloom.state." + + config.getValue(SystemConfiguration.ARGUS_INSTANCE_ID, "noid"); + bloomFilterMonitorTags = new ImmutableMap.Builder() + .put("instanceId", config.getValue(SystemConfiguration.ARGUS_INSTANCE_ID, "noid")) + .build(); + createdBloomExpectedNumberInsertions = Integer.parseInt(config.getValue(Property.QUERY_STORE_CREATED_BLOOM_EXPECTED_NUMBER_INSERTIONS.getName(), Property.QUERY_STORE_CREATED_BLOOM_EXPECTED_NUMBER_INSERTIONS.getDefaultValue())); + createdBloomErrorRate = Double.parseDouble(config.getValue(Property.QUERY_STORE_CREATED_BLOOM_ERROR_RATE.getName(), Property.QUERY_STORE_CREATED_BLOOM_ERROR_RATE.getDefaultValue())); + createdBloom = createOrReadBloomFilter(createdBloomFileName, createdBloomExpectedNumberInsertions, createdBloomErrorRate); + bloomFilterMonitorThread = new Thread(new ElasticSearchQueryStoreService.BloomFilterMonitorThread(), "bloom-filter-monitor"); + bloomFilterMonitorThread.start(); + bloomFilterFlushHourToStartAt = getBloomFilterFlushHourToStartAt(); + createScheduledExecutorService(bloomFilterFlushHourToStartAt); + + /** Setup Global ES stuff */ + String algorithm = config.getValue(Property.QUERY_STORE_ES_IDGEN_HASH_ALGO.getName(), Property.QUERY_STORE_ES_IDGEN_HASH_ALGO.getDefaultValue()); + try { + idgenHashAlgo = QueryStoreRecordList.HashAlgorithm.fromString(algorithm); + } catch(IllegalArgumentException e) { + logger.warn("{} is not supported by this service. Valid values are: {}.", algorithm, Arrays.asList(QueryStoreRecordList.HashAlgorithm.values())); + idgenHashAlgo = QueryStoreRecordList.HashAlgorithm.MD5; + } + logger.info("Using {} for Elasticsearch document id generation.", idgenHashAlgo); + bulkIndexingSize = Integer.parseInt( + config.getValue(Property.QUERY_STORE_ES_INDEXING_BATCH_SIZE.getName(), Property.QUERY_STORE_ES_INDEXING_BATCH_SIZE.getDefaultValue())); + + String[] nodes = config.getValue(Property.QUERY_STORE_ES_ENDPOINT.getName(), Property.QUERY_STORE_ES_ENDPOINT.getDefaultValue()).split(","); + HttpHost[] httpHosts = new HttpHost[nodes.length]; + for(int i=0; i { + try { + int connCount = Integer.parseInt(config.getValue(Property.QUERY_STORE_ES_CONNECTION_COUNT.getName(), + Property.QUERY_STORE_ES_CONNECTION_COUNT.getDefaultValue())); + PoolingNHttpClientConnectionManager connMgr = + new PoolingNHttpClientConnectionManager(new DefaultConnectingIOReactor()); + connMgr.setMaxTotal(connCount); + connMgr.setDefaultMaxPerRoute(connCount / httpHosts.length); + httpClientBuilder.setConnectionManager(connMgr); + return httpClientBuilder; + } catch(Exception e) { + throw new SystemException(e); + } + }; + RestClientBuilder.RequestConfigCallback requestConfigCallback = requestConfigBuilder -> { + int connTimeout = Integer.parseInt(config.getValue(Property.QUERY_STORE_ES_ENDPOINT_CONNECTION_TIMEOUT.getName(), + Property.QUERY_STORE_ES_ENDPOINT_CONNECTION_TIMEOUT.getDefaultValue())); + int socketTimeout = Integer.parseInt(config.getValue(Property.QUERY_STORE_ES_ENDPOINT_SOCKET_TIMEOUT.getName(), + Property.QUERY_STORE_ES_ENDPOINT_SOCKET_TIMEOUT.getDefaultValue())); + requestConfigBuilder.setConnectTimeout(connTimeout).setSocketTimeout(socketTimeout); + + logger.info("esRestClient set connTimeoutMillis {} socketTimeoutMillis {}", + connTimeout, socketTimeout); + + return requestConfigBuilder; + }; + esRestClient = RestClient.builder(httpHosts) + .setHttpClientConfigCallback(clientConfigCallback) + .setRequestConfigCallback(requestConfigCallback) + .setMaxRetryTimeoutMillis(QUERY_STORE_MAX_RETRY_TIMEOUT) + .build(); + logger.info("esRestClient set MaxRetryTimeoutsMillis {}", QUERY_STORE_MAX_RETRY_TIMEOUT); + + /** Set up querystore index stuff */ + queryStoreMapper = getQueryStoreObjectMapper(new QueryStoreRecordList.IndexSerializer()); + QUERY_STORE_TYPE_NAME = config.getValue(Property.QUERY_STORE_ES_INDEX_TYPE.getName(), + Property.QUERY_STORE_ES_INDEX_TYPE.getDefaultValue()); + QUERY_STORE_INDEX_TEMPLATE_NAME = config.getValue(Property.QUERY_STORE_ES_INDEX_TEMPLATE_NAME.getName(), + Property.QUERY_STORE_ES_INDEX_TEMPLATE_NAME.getDefaultValue()); + QUERY_STORE_INDEX_TEMPLATE_PATTERN_START = config.getValue(Property.QUERY_STORE_ES_INDEX_TEMPLATE_PATTERN_START.getName(), + Property.QUERY_STORE_ES_INDEX_TEMPLATE_PATTERN_START.getDefaultValue()); + QUERY_STORE_INDEX_NAME = config.getValue(Property.QUERY_STORE_ES_INDEX_NAME.getName(), + Property.QUERY_STORE_ES_INDEX_NAME.getDefaultValue()); + replicationFactorForQueryStoreIndex = Integer.parseInt( + config.getValue(Property.QUERY_STORE_ES_NUM_REPLICAS.getName(), Property.QUERY_STORE_ES_NUM_REPLICAS.getDefaultValue())); + numShardsForQueryStoreIndex = Integer.parseInt( + config.getValue(Property.QUERY_STORE_ES_SHARDS_COUNT.getName(), Property.QUERY_STORE_ES_SHARDS_COUNT.getDefaultValue())); + createQueryStoreIndexTemplate(QUERY_STORE_INDEX_TEMPLATE_NAME, replicationFactorForQueryStoreIndex, numShardsForQueryStoreIndex, + () -> createQueryStoreMappingsNode()); + + } + + + protected int getNumSecondsUntilTargetHour(int targetHour){ + logger.info("Initialized bloom filter flushing out, at {} hour of day", targetHour); + Calendar calendar = Calendar.getInstance(); + int hour = calendar.get(Calendar.HOUR_OF_DAY); + int secondsPastHour = calendar.get(Calendar.MINUTE) * 60; + int hoursUntil = hour < targetHour ? (targetHour - hour) : (targetHour + 24 - hour); + return hoursUntil * 60 * 60 - secondsPastHour; + } + + /* + * Have a different flush start hour for schema committers based on hostname, to prevent thundering herd problem. + */ + private int getBloomFilterFlushHourToStartAt() { + int bloomFilterFlushHourToStartAt = 0; + try { + String toHash = InetAddress.getLocalHost().getHostName() + config.getValue(config.ARGUS_INSTANCE_ID, "noid"); + HashFunction hf = Hashing.murmur3_128(); + bloomFilterFlushHourToStartAt = Math.abs(hf.newHasher().putString(toHash, Charset.defaultCharset()).hash().asInt() % 24); + } catch (UnknownHostException e) { + logger.warn("BloomFilter UnknownHostException", e); + } + logger.info("BloomFilter flush hour to start at {}th hour of day", bloomFilterFlushHourToStartAt); + return bloomFilterFlushHourToStartAt; + } + + private BloomFilter createOrReadBloomFilter(String filename, int expectedNumberInsertions, double errorRate) { + File bfFile = new File(filename); + if (bloomFileWritingEnabled && bfFile.exists()) { + logger.info("Bloomfilter state file {} exists, using it to pre-populate bloom", filename); + try (InputStream inputStream = new FileInputStream(bfFile)) { + return BloomFilter.readFrom(inputStream, Funnels.stringFunnel(Charset.defaultCharset())); + } catch (IOException io) { + logger.error("Bloomfilter state file {} read error, not using prev state: {}", filename, io); + return BloomFilter.create(Funnels.stringFunnel(Charset.defaultCharset()), expectedNumberInsertions, errorRate); + } + } + logger.info("Bloomfilter state file {} NOT present or bloomFileWritingEnabled is false, starting fresh bloom", filename); + return BloomFilter.create(Funnels.stringFunnel(Charset.defaultCharset()), expectedNumberInsertions, errorRate); + } + + private void writeBloomsToFile() { + if (!bloomFileWritingEnabled) { + return; + } + + File createdBloomFile = new File(this.createdBloomFileName); + if (!createdBloomFile.getParentFile().exists()) { + createdBloomFile.getParentFile().mkdirs(); + } + try (OutputStream out = new FileOutputStream(createdBloomFile)) { + createdBloom.writeTo(out); + logger.info("Successfully wrote created-metrics bloomfilter to file {}", this.createdBloomFileName); + } catch (IOException io) { + logger.error("Failed to write to createdBloom file", io); + } + + } + + + private void createScheduledExecutorService(int targetHourToStartAt){ + scheduledExecutorService = Executors.newScheduledThreadPool(1); + int initialDelayInSeconds = getNumSecondsUntilTargetHour(targetHourToStartAt); + ElasticSearchQueryStoreService.BloomFilterFlushThread bloomFilterFlushThread = new ElasticSearchQueryStoreService.BloomFilterFlushThread(); + scheduledExecutorService.scheduleAtFixedRate(bloomFilterFlushThread, initialDelayInSeconds, QS_BLOOM_DAY_IN_SECONDS, TimeUnit.SECONDS); + } + + private void shutdownScheduledExecutorService(){ + logger.info("Shutting down scheduled bloom filter flush executor service"); + scheduledExecutorService.shutdown(); + try { + scheduledExecutorService.awaitTermination(10, TimeUnit.SECONDS); + } catch (InterruptedException ex) { + logger.warn("Shutdown of executor service was interrupted."); + Thread.currentThread().interrupt(); + } + } + + private void shutdownElasticSearchService(){ + try { + esRestClient.close(); + logger.info("Shutdown of ElasticSearch RESTClient complete"); + } catch (IOException e) { + logger.warn("ElasticSearch RestClient failed to shutdown properly.", e); + } + } + + + @Override + public void putArgusWsQueries(List metrics) { + requireNotDisposed(); + requireArgument(metrics != null, "Metric list cannot be null."); + if (metrics.size()==0) { + return; + } + Set recordsToAdd = new HashSet<>(metrics.size()); + + for(Metric metric : metrics) { + String key = QueryStoreRecord.constructBloomKey(metric.getScope(),metric.getMetric()); + if (!createdBloom.mightContain(key)) { + recordsToAdd.add(metric); + } + } + insertRecordsToES(recordsToAdd); + } + + /** + * Inserts User Query Records to ES + * @param recordsToAdd The metrics for which to store query records for. Cannot be null, but may be empty. + */ + + protected void insertRecordsToES(Set recordsToAdd) { + requireArgument(recordsToAdd != null, "Metrics list cannot be null."); + if (recordsToAdd.size()==0){ + return; + } + + int totalCount = 0; + long start = System.currentTimeMillis(); + List> fracturedQueryStoreRecordList = fractureQueryStoreRecords(recordsToAdd); + for(Set records : fracturedQueryStoreRecordList) { + if(!records.isEmpty()) { + Set failedRecords = upsertQueryStoreRecords(records); + records.removeAll(failedRecords); + addQueryRecordsToCreatedBloom(records); + totalCount += records.size(); + } + } + monitorService.modifyCounter(MonitorService.Counter.QUERYSTORE_RECORDS_WRITTEN, totalCount, null); + monitorService.modifyCounter(MonitorService.Counter.QUERYSTORE_RECORDS_WRITE_LATENCY, + (System.currentTimeMillis() - start), + null); + + logger.info("{} new metrics sent to ES in {} ms.", totalCount, System.currentTimeMillis()-start); + } + + /* Converts the given list of metrics to a list of QueryStore records. At the same time, fracture the records list + * if its size is greater than QUERY_STORE_ES_INDEXING_BATCH_SIZE. + */ + protected List> fractureQueryStoreRecords(Set metrics) { + List> fracturedList = new ArrayList<>(); + + Set records = new HashSet<>(bulkIndexingSize); + for(Metric metric : metrics) { + records.add(new QueryStoreRecord(metric.getScope(),metric.getMetric())); + if(records.size() == bulkIndexingSize) { + fracturedList.add(records); + records = new HashSet<>(bulkIndexingSize); + } + } + + if(!records.isEmpty()) { + fracturedList.add(records); + } + + return fracturedList; + } + + /** + * @param records Set of records to insert + * @return List of records that failed + */ + protected Set upsertQueryStoreRecords(Set records) { + + String requestUrl = String.format("/%s/%s/_bulk", QUERY_STORE_INDEX_NAME, QUERY_STORE_TYPE_NAME); + String strResponse; + ObjectMapper localObjectMapper = new ObjectMapper(); + QueryStoreRecordList indexQueryStoreRecordList = new QueryStoreRecordList(records, idgenHashAlgo); + + try { + String requestBody = queryStoreMapper.writeValueAsString(indexQueryStoreRecordList); + Request request = new Request(HttpMethod.POST.getName(), requestUrl); + request.setEntity(new StringEntity(requestBody, ContentType.APPLICATION_JSON)); + Response response = esRestClient.performRequest(request); + strResponse = extractResponse(response); + } + catch (IOException e) + { + throw new SystemException("Failed to insert query store record to ES. ", e); + } + + try { + Set failedRecords = new HashSet<>(); + PutResponse putResponse = localObjectMapper.readValue(strResponse, PutResponse.class); + + if(putResponse.errors) { + for(PutResponse.Item item : putResponse.items) { + if (item.index !=null && item.index.status != HttpStatus.SC_CREATED) { + logger.warn("Failed to add record {} to index {}. Reason: {}", indexQueryStoreRecordList.getRecord(item.index._id), QUERY_STORE_INDEX_NAME, localObjectMapper.writeValueAsString(item.index.error)); + failedRecords.add(indexQueryStoreRecordList.getRecord(item.index._id)); + } + } + + if (failedRecords.size() != 0) { + logger.warn("{} records were not written to index {}", failedRecords.size(), QUERY_STORE_INDEX_NAME); + } + } + + return failedRecords; + } catch (IOException e) { + throw new SystemException("Failed to parse response of querystore ES records insertion. The response was: " + strResponse, e); + } + } + + protected void addQueryRecordsToCreatedBloom(Set records) { + for (QueryStoreRecord record : records) { + createdBloom.put(record.toBloomFilterKey()); + } + } + + /** Helper to process the response.

+ * Throws IllegalArgumentException when the http status code is in the 400 range
+ * Throws SystemException when the http status code is outsdie of the 200 and 400 range + * @param response ES response + * @return Stringified response + */ + protected String extractResponse(Response response) { + requireArgument(response != null, "HttpResponse object cannot be null."); + + return doExtractResponse(response.getStatusLine().getStatusCode(), response.getEntity()); + } + + /** + * testable version of {@link ElasticSearchQueryStoreService#extractResponse(Response)} + * @param statusCode + * @param entity + * @return + */ + @VisibleForTesting + static String doExtractResponse(int statusCode, HttpEntity entity) { + String message = null; + + if (entity != null) { + try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) { + entity.writeTo(baos); + message = baos.toString("UTF-8"); + } + catch (IOException ex) { + throw new SystemException(ex); + } + } + + //if the response is in the 400 range, use IllegalArgumentException, which currently translates to a 400 error + if (statusCode>= HttpStatus.SC_BAD_REQUEST && statusCode < HttpStatus.SC_INTERNAL_SERVER_ERROR) { + throw new IllegalArgumentException("Status code: " + statusCode + " . Error occurred. " + message); + } + //everything else that's not in the 200 range, use SystemException, which translates to a 500 error. + if ((statusCode < HttpStatus.SC_OK) || (statusCode >= HttpStatus.SC_MULTIPLE_CHOICES)) { + throw new SystemException("Status code: " + statusCode + " . Error occurred. " + message); + } else { + return message; + } + } + + @VisibleForTesting + static ObjectMapper getQueryStoreObjectMapper(JsonSerializer serializer) { + ObjectMapper mapper = new ObjectMapper(); + + mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); + SimpleModule module = new SimpleModule(); + module.addSerializer(QueryStoreRecordList.class, serializer); + module.addDeserializer(QueryStoreRecordList.class, new QueryStoreRecordList.Deserializer()); + mapper.registerModule(module); + + return mapper; + } + + /* Method to change the rest client. Used for testing. */ + protected void setESRestClient(RestClient restClient) + { + this.esRestClient = restClient; + } + + private void createQueryStoreIndexTemplate(String templateName, int replicationFactor, int numShards, + Supplier createIndexTemplateMappingsNode) { + try { + ObjectMapper mapper = new ObjectMapper(); + ObjectNode rootNode = mapper.createObjectNode(); + String templatePattern = String.format("%s*", QUERY_STORE_INDEX_TEMPLATE_PATTERN_START); + rootNode.put("template",templatePattern); + rootNode.set("settings", createQueryStoreIndexTemplateSettingsNode(replicationFactor, numShards)); + rootNode.set("mappings", createIndexTemplateMappingsNode.get()); + String requestBody = rootNode.toString(); + String requestUrl = new StringBuilder().append("/_template/").append(templateName).toString(); + Request request = new Request(HttpMethod.PUT.getName(), requestUrl); + request.setEntity(new StringEntity(requestBody, ContentType.APPLICATION_JSON)); + Response response = esRestClient.performRequest(request); + extractResponse(response); + } catch (Exception e) { + logger.error("Failed to check/create {} index template. It is failed because of the error {}", + templateName, e); + } + } + + private ObjectNode createQueryStoreIndexTemplateSettingsNode(int replicationFactor, int numShards) { + ObjectMapper mapper = new ObjectMapper(); + + ObjectNode indexNode = mapper.createObjectNode(); + indexNode.put("max_result_window", QUERY_STORE_INDEX_MAX_RESULT_WINDOW); + indexNode.put("number_of_replicas", replicationFactor); + indexNode.put("number_of_shards", numShards); + + ObjectNode settingsNode = mapper.createObjectNode(); + settingsNode.set("index", indexNode); + + return settingsNode; + } + + private ObjectNode createQueryStoreMappingsNode() { + ObjectMapper mapper = new ObjectMapper(); + + ObjectNode propertiesNode = mapper.createObjectNode(); + propertiesNode.set(QueryStoreRecordList.QueryStoreRecordType.SCOPE.getName(), createQueryStoreKeywordFieldNode(FIELD_TYPE_TEXT)); + propertiesNode.set(QueryStoreRecordList.QueryStoreRecordType.METRIC.getName(), createQueryStoreKeywordFieldNode(FIELD_TYPE_TEXT)); + propertiesNode.set("sourcehost",createQueryStoreKeywordFieldNode(FIELD_TYPE_TEXT)); + propertiesNode.set("mts", createQueryStoreFieldNodeNoAnalyzer(FIELD_TYPE_DATE)); + ObjectNode typeNode = mapper.createObjectNode(); + typeNode.set("properties", propertiesNode); + + ObjectNode mappingsNode = mapper.createObjectNode(); + mappingsNode.set(QUERY_STORE_TYPE_NAME, typeNode); + + return mappingsNode; + } + + private ObjectNode createQueryStoreKeywordFieldNode(String type) { + ObjectMapper mapper = new ObjectMapper(); + ObjectNode fieldNode = mapper.createObjectNode(); + fieldNode.put("type", type); + ObjectNode keywordNode = mapper.createObjectNode(); + keywordNode.put("type", "keyword"); + ObjectNode fieldsNode = mapper.createObjectNode(); + fieldsNode.set("raw", keywordNode); + fieldNode.set("fields", fieldsNode); + return fieldNode; + } + + private ObjectNode createQueryStoreFieldNodeNoAnalyzer(String type) { + ObjectMapper mapper = new ObjectMapper(); + ObjectNode fieldNode = mapper.createObjectNode(); + fieldNode.put("type", type); + return fieldNode; + } + + @Override + public void dispose() { + requireNotDisposed(); + if (bloomFilterMonitorThread != null && bloomFilterMonitorThread.isAlive()) { + logger.info("Stopping bloom filter monitor thread."); + bloomFilterMonitorThread.interrupt(); + logger.info("Bloom filter monitor thread interrupted."); + try { + logger.info("Waiting for bloom filter monitor thread to terminate."); + bloomFilterMonitorThread.join(); + } catch (InterruptedException ex) { + logger.warn("Bloom filter monitor thread was interrupted while shutting down."); + } + logger.info("System monitoring stopped."); + } else { + logger.info("Requested shutdown of bloom filter monitor thread aborted, as it is not yet running."); + } + shutdownScheduledExecutorService(); + shutdownElasticSearchService(); + } + + @Override + public Properties getServiceProperties() { + Properties serviceProps = new Properties(); + + for (Property property : Property.values()) { + serviceProps.put(property.getName(), property.getDefaultValue()); + } + return serviceProps; + } + + //~ Inner Classes ******************************************************************************************************************************** + + /** + * Bloom Filter monitoring thread. + * + */ + private class BloomFilterMonitorThread implements Runnable { + @Override + public void run() { + logger.info("Initialized qsRandomBloomAppend for bloom filter key = {}", qsRandomBloomAppend); + while (!Thread.currentThread().isInterrupted()) { + _sleepForPollPeriod(); + if (!Thread.currentThread().isInterrupted()) { + try { + _checkBloomFilterUsage(); + } catch (Exception ex) { + logger.warn("Exception occurred while checking bloom filter usage.", ex); + } + } + } + } + + private void _checkBloomFilterUsage() { + monitorService.modifyCounter(MonitorService.Counter.QUERY_STORE_BLOOM_CREATED_APPROXIMATE_ELEMENT_COUNT, createdBloom.approximateElementCount(), bloomFilterMonitorTags); + logger.info("Bloom for created-timestamp expected error rate = {}", createdBloom.expectedFpp()); + } + + private void _sleepForPollPeriod() { + try { + logger.info("Sleeping for {}s before checking bloom filter statistics.", QS_BLOOM_POLL_INTERVAL_MS / 1000); + Thread.sleep(QS_BLOOM_POLL_INTERVAL_MS); + } catch (InterruptedException ex) { + logger.warn("ElasticSearchQueryStoreService memory monitor thread was interrupted."); + Thread.currentThread().interrupt(); + } + } + } + + /** + * Writes the createdBloom to disk. + */ + private class BloomFilterFlushThread implements Runnable { + @Override + public void run() { + try { + _flushBloomFilter(); + } catch (Exception ex) { + logger.warn("Exception occurred while flushing bloom filter.", ex); + } + } + + private void _flushBloomFilter() { + logger.info("Flushing out bloom filter entries"); + writeBloomsToFile(); + } + } + + + /** + * Enumeration of supported HTTP methods. + * + */ + private enum HttpMethod { + + /** POST operation. */ + POST("POST"), + /** PUT operation. */ + PUT("PUT"), + /** HEAD operation. */ + HEAD("HEAD"); + + private String name; + + HttpMethod(String name) { + this.setName(name); + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + } + + + /** + * The set of implementation specific configuration properties. + * + */ + public enum Property { + + QUERY_STORE_BLOOM_FILE_WRITING_ENABLED("service.property.querystore.bloom.file.writing.enabled", "false"), + QUERY_STORE_BF_STATE_BASE_DIR("service.property.querystore.bf.state.base.dir", "bloomstate"), + + /* + * (Comment from old bloom filter pattern) + * Estimated Filter Size using bloomFilter 1 million entries + * https://hur.st/bloomfilter/?n=1000000&p=1.0E-5&m=&k= 2.86MiB + * Storing in a Set 100K entries with avg length of 15 chars would be 100K * 15 * 2 B = 30B * 100K = 3 MB + * If # of entries is 1 million, then it would be 30 MB resulting in savings in space. + */ + QUERY_STORE_CREATED_BLOOM_EXPECTED_NUMBER_INSERTIONS("service.property.querystore.bloomfilter.created.expected.number.insertions", "40"), + QUERY_STORE_CREATED_BLOOM_ERROR_RATE("service.property.querystore.bloomfilter.created.error.rate", "0.00001"), + + QUERY_STORE_ES_ENDPOINT("service.property.querystore.elasticsearch.endpoint", "http://localhost:9200,http://localhost:9201"), + /** Connection timeout for ES REST client. */ + QUERY_STORE_ES_ENDPOINT_CONNECTION_TIMEOUT("service.property.querystore.elasticsearch.endpoint.connection.timeout", "10000"), + /** Socket connection timeout for ES REST client. */ + QUERY_STORE_ES_ENDPOINT_SOCKET_TIMEOUT("service.property.querystore.elasticsearch.endpoint.socket.timeout", "10000"), + /** Connection count for ES REST client. */ + QUERY_STORE_ES_CONNECTION_COUNT("service.property.querystore.elasticsearch.connection.count", "10"), + /** The no. of records to batch for bulk indexing requests. + * https://www.elastic.co/guide/en/elasticsearch/guide/current/indexing-performance.html#_using_and_sizing_bulk_requests + */ + QUERY_STORE_ES_INDEXING_BATCH_SIZE("service.property.querystore.elasticsearch.indexing.batch.size", "10000"), + /** The hashing algorithm to use for generating document id. */ + QUERY_STORE_ES_IDGEN_HASH_ALGO("service.property.querystore.elasticsearch.idgen.hash.algo", "MD5"), + + + /** Replication factor for query store */ + QUERY_STORE_ES_NUM_REPLICAS("service.property.querystore.elasticsearch.num.replicas", "1"), + /** Shard count for query store */ + QUERY_STORE_ES_SHARDS_COUNT("service.property.querystore.elasticsearch.shards.count", "6"), + /** Query store index type */ + QUERY_STORE_ES_INDEX_TYPE("service.property.querystore.elasticsearch.index.type", "argus-query_type"), + /** Query store index template name */ + QUERY_STORE_ES_INDEX_TEMPLATE_NAME("service.property.querystore.elasticsearch.indextemplate.name", "argus-querystore-template"), + /** Query store index template pattern match */ + QUERY_STORE_ES_INDEX_TEMPLATE_PATTERN_START("service.property.querystore.elasticsearch.indextemplate.patternstart", "argusqs"), + /** Query store index name */ + QUERY_STORE_ES_INDEX_NAME("service.property.querystore.elasticsearch.index.name", "argusqs-v1"); + + + private final String _name; + private final String _defaultValue; + + Property(String name, String defaultValue) { + _name = name; + _defaultValue = defaultValue; + } + + /** + * Returns the property name. + * + * @return The property name. + */ + public String getName() { + return _name; + } + + /** + * Returns the default value for the property. + * + * @return The default value. + */ + public String getDefaultValue() { + return _defaultValue; + } + } + + /** + * Used for constructing Elastic Search Response object + */ + static class PutResponse { + private int took; + private boolean errors; + private List items; + + public PutResponse() {} + + public int getTook() { + return took; + } + + public void setTook(int took) { + this.took = took; + } + + public boolean isErrors() { + return errors; + } + + public void setErrors(boolean errors) { + this.errors = errors; + } + + public List getItems() { + return items; + } + + public void setItems(List items) { + this.items = items; + } + + @JsonIgnoreProperties(ignoreUnknown = true) + static class Item { + private CreateItem create; + private CreateItem index; + private CreateItem update; + + public Item() {} + + public CreateItem getCreate() { + return create; + } + + public void setCreate(CreateItem create) { + this.create = create; + } + + public CreateItem getIndex() { + return index; + } + + public void setIndex(CreateItem index) { + this.index = index; + } + + public CreateItem getUpdate() { + return update; + } + + public void setUpdate(CreateItem update) { + this.update = update; + } + } + + @JsonIgnoreProperties(ignoreUnknown = true) + static class CreateItem { + private String _index; + private String _type; + private String _id; + private int status; + private int _version; + private Error error; + + public CreateItem() {} + + public String get_index() { + return _index; + } + + public void set_index(String _index) { + this._index = _index; + } + + public String get_type() { + return _type; + } + + public void set_type(String _type) { + this._type = _type; + } + + public String get_id() { + return _id; + } + + public void set_id(String _id) { + this._id = _id; + } + + public int get_version() { + return _version; + } + + public void set_version(int _version) { + this._version = _version; + } + + public int getStatus() { + return status; + } + + public void setStatus(int status) { + this.status = status; + } + + public Error getError() { + return error; + } + + public void setError(Error error) { + this.error = error; + } + } + + @JsonIgnoreProperties(ignoreUnknown = true) + static class Error { + private String type; + private String reason; + + public Error() {} + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getReason() { + return reason; + } + + public void setReason(String reason) { + this.reason = reason; + } + } + } + +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/querystore/NoOperationQueryStoreService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/querystore/NoOperationQueryStoreService.java new file mode 100644 index 000000000..1754040ac --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/querystore/NoOperationQueryStoreService.java @@ -0,0 +1,25 @@ +package com.salesforce.dva.argus.service.querystore; + +import com.google.inject.Inject; +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.service.DefaultService; +import com.salesforce.dva.argus.service.QueryStoreService; +import com.salesforce.dva.argus.system.SystemConfiguration; + +import java.util.List; + +/** + * Query Store service that does absolutely nothing. Meant as an available QueryStoreService binding for dependents + * of ArgusCore that do not need a QueryStoreService. + */ +public class NoOperationQueryStoreService extends DefaultService implements QueryStoreService { + + @Inject + public NoOperationQueryStoreService(SystemConfiguration config) { + super(config); + } + + @Override + public void putArgusWsQueries(List metrics){ return;} + +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/querystore/QueryStoreRecordList.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/querystore/QueryStoreRecordList.java new file mode 100644 index 000000000..181f5e6dc --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/querystore/QueryStoreRecordList.java @@ -0,0 +1,229 @@ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus.service.querystore; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.node.JsonNodeType; +import com.salesforce.dva.argus.entity.QueryStoreRecord; +import net.openhft.hashing.LongHashFunction; +import org.apache.commons.codec.digest.DigestUtils; + +import java.io.IOException; +import java.net.InetAddress; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; + +/** + * Stores map of QueryStoreRecords and also useful to serialize and deserialize QueryStoreRecord map + * + */ +public class QueryStoreRecordList { + + private Map _idToQueryStoreRecordMap = new HashMap<>(); + private String _scrollID; + + public QueryStoreRecordList(List records, String scrollID) { + int count = 0; + for(QueryStoreRecord record : records) { + _idToQueryStoreRecordMap.put(String.valueOf(count++), record); + } + setScrollID(scrollID); + } + + public QueryStoreRecordList(Set records, HashAlgorithm algorithm) { + for(QueryStoreRecord record : records) { + String id = null; + String qsrKey = QueryStoreRecord.getFieldsAsString(record); + if(HashAlgorithm.MD5.equals(algorithm)) { + id = DigestUtils.md5Hex(qsrKey); + } else { + id = String.valueOf(LongHashFunction.xx().hashChars(qsrKey)); + } + _idToQueryStoreRecordMap.put(id, record); + } + } + + public List getRecords() { + return new ArrayList<>(_idToQueryStoreRecordMap.values()); + } + + public String getScrollID() { + return _scrollID; + } + + public void setScrollID(String scrollID) { + this._scrollID = scrollID; + } + + public QueryStoreRecord getRecord(String id) { + return _idToQueryStoreRecordMap.get(id); + } + + public static class IndexSerializer extends JsonSerializer { + + @Override + public void serialize(QueryStoreRecordList list, JsonGenerator jgen, SerializerProvider provider) + throws IOException { + + ObjectMapper mapper = new ObjectMapper(); + mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); + + for(Map.Entry entry : list._idToQueryStoreRecordMap.entrySet()) { + String fieldsData = mapper.writeValueAsString(entry.getValue()); + jgen.writeRaw("{ \"index\" : {\"_id\" : \"" + entry.getKey() + "\"}}"); + jgen.writeRaw(System.lineSeparator()); + String hostName; + try { + hostName = InetAddress.getLocalHost().getHostName(); + } catch (Exception e) + { + hostName = "nohostname"; + } + String sourceHost= "\"sourcehost\":" + "\""+hostName+"\""; + long currentTimeMillis = System.currentTimeMillis(); + String updateTimeStampField = "\"mts\":" + currentTimeMillis; + jgen.writeRaw(fieldsData.substring(0, fieldsData.length()-1) + "," +sourceHost+"," + updateTimeStampField + "}"); + jgen.writeRaw(System.lineSeparator()); + } + } + } + + + public static class Deserializer extends JsonDeserializer { + + @Override + public QueryStoreRecordList deserialize(JsonParser jp, DeserializationContext context) + throws IOException { + + String scrollID = null; + List records = Collections.emptyList(); + + JsonNode rootNode = jp.getCodec().readTree(jp); + if(rootNode.has("_scroll_id")) { + scrollID = rootNode.get("_scroll_id").asText(); + } + JsonNode hits = rootNode.get("hits").get("hits"); + + if(JsonNodeType.ARRAY.equals(hits.getNodeType())) { + records = new ArrayList<>(hits.size()); + Iterator iter = hits.elements(); + while(iter.hasNext()) { + JsonNode hit = iter.next(); + JsonNode source = hit.get("_source"); + + JsonNode scopeNode = source.get(QueryStoreRecordType.SCOPE.getName()); + JsonNode metricNode = source.get(QueryStoreRecordType.METRIC.getName()); + + records.add(new QueryStoreRecord(scopeNode.asText(), metricNode.asText())); + } + } + + return new QueryStoreRecordList(records, scrollID); + } + } + + public enum HashAlgorithm { + MD5, + XXHASH; + + public static QueryStoreRecordList.HashAlgorithm fromString(String str) throws IllegalArgumentException { + for(QueryStoreRecordList.HashAlgorithm algo : QueryStoreRecordList.HashAlgorithm.values()) { + if(algo.name().equalsIgnoreCase(str)) { + return algo; + } + } + + throw new IllegalArgumentException(str + " does not match any of the available algorithms."); + } + } + + /** + * Indicates the Query Store record field to be used for matching. + * + */ + public static enum QueryStoreRecordType { + + /** Match against the scope field. */ + SCOPE("scope"), + /** Match against the metric field. */ + METRIC("metric"); + + private String _name; + + private QueryStoreRecordType(String name) { + _name = name; + } + + /** + * Returns a given record type corresponding to the given name. + * + * @param name The case sensitive name to match against. Cannot be null. + * + * @return The corresponding record type or null if no matching record type exists. + */ + @JsonCreator + public static QueryStoreRecordType fromName(String name) { + for (QueryStoreRecordType type : QueryStoreRecordType.values()) { + if (type.getName().equalsIgnoreCase(name)) { + return type; + } + } + + throw new IllegalArgumentException("Illegal record type: " + name); + } + + /** + * Returns the record type name. + * + * @return The record type name. + */ + public String getName() { + return _name; + } + } + +} + diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schedule/DefaultSchedulingService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schedule/DefaultSchedulingService.java index 2b9ab6f70..091d3b2af 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schedule/DefaultSchedulingService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schedule/DefaultSchedulingService.java @@ -39,7 +39,6 @@ import com.salesforce.dva.argus.entity.JPAEntity; import com.salesforce.dva.argus.entity.ServiceManagementRecord; import com.salesforce.dva.argus.entity.ServiceManagementRecord.Service; -import com.salesforce.dva.argus.inject.SLF4JTypeListener; import com.salesforce.dva.argus.service.AlertService; import com.salesforce.dva.argus.service.AuditService; import com.salesforce.dva.argus.service.DefaultService; @@ -52,6 +51,7 @@ import com.salesforce.dva.argus.system.SystemException; import it.sauronsoftware.cron4j.Scheduler; import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Arrays; @@ -78,8 +78,7 @@ public class DefaultSchedulingService extends DefaultService implements Scheduli //~ Instance fields ****************************************************************************************************************************** - @SLF4JTypeListener.InjectLogger - private Logger _logger; + private Logger _logger = LoggerFactory.getLogger(DefaultSchedulingService.class);; private final AlertService _alertService; private final GlobalInterlockService _globalInterlockService; private final UserService _userService; @@ -100,7 +99,7 @@ public class DefaultSchedulingService extends DefaultService implements Scheduli * @param config */ @Inject - DefaultSchedulingService(AlertService alertService, GlobalInterlockService globalInterlockService, UserService userService, + public DefaultSchedulingService(AlertService alertService, GlobalInterlockService globalInterlockService, UserService userService, ServiceManagementService serviceManagementRecordService, AuditService auditService, SystemConfiguration config) { super(config); requireArgument(alertService != null, "Alert service cannot be null."); @@ -246,9 +245,11 @@ public void run() { * * @author Raj Sarkapally (rsarkapally@salesforce.com) */ - private class SchedulingThread extends Thread { + class SchedulingThread extends Thread { private final LockType lockType; + Scheduler scheduler = null; + String key = null; /** * Creates a new SchedulingThread object. @@ -283,30 +284,29 @@ protected List getEnabledJobs() { @Override public void run() { - Scheduler scheduler = null; - String key = null; - while (!isInterrupted()) { - if (_isSchedulingServiceEnabled()) { - if (key == null) { - key = _becomeMaster(); - } - while (!isInterrupted() && key != null && _isSchedulingServiceEnabled()) { - scheduler = _refreshJobSchedule(scheduler); - key = _refreshMaster(key); - } - } - + doSchedule(); boolean interrupted = interrupted(); - - _releaseLock(key); if (!interrupted) { _sleepForMasterPollPeriod(); } else { interrupt(); } } - _disposeScheduler(scheduler); + disposeScheduler(); + } + + void doSchedule() { + if (_isSchedulingServiceEnabled()) { + if (key == null) { + key = _becomeMaster(); + } + while (!isInterrupted() && key != null && _isSchedulingServiceEnabled()) { + scheduler = _refreshJobSchedule(); + key = refreshMaster(key); + } + } + _releaseLock(key); } /** @@ -329,7 +329,7 @@ protected String _becomeMaster() { } } - private String _refreshMaster(String oldKey) { + String refreshMaster(String oldKey) { assert oldKey != null : "Can only refresh a key that already exists."; try { _logger.info("Sleeping for {}s before next attempt refreshing {} schedule.", GLOBAL_LOCK_REFRESH_PERIOD_MS / 1000, lockType); @@ -356,34 +356,36 @@ private String _refreshMaster(String oldKey) { /** * Refreshes the job schedule with the current list of enabled jobs of the type to be scheduled. * - * @param scheduler The scheduler to update. - * * @return The updated scheduler. */ - protected Scheduler _refreshJobSchedule(Scheduler scheduler) { - _disposeScheduler(scheduler); + protected Scheduler _refreshJobSchedule() { + disposeScheduler(); - Scheduler result = new Scheduler(); + scheduler = new Scheduler(); _logger.info("Refreshing job schedule."); for (CronJob job : getEnabledJobs()) { - _logger.debug("Adding job to scheduler: {}", job); - try { - result.schedule(job.getCronEntry(), new RunnableJob(lockType, job)); - } catch (Exception ex) { - String msg = "Failed to schedule job {0} : {1}"; - JPAEntity entity = JPAEntity.class.cast(job); - - _auditService.createAudit(msg, entity, entity, ex.getMessage()); - _logger.error("Failed to schedule job {} : {}", job, ex.getMessage()); - } + doScheduleJob(scheduler, job); } - result.start(); + scheduler.start(); _logger.info("Job schedule refreshed."); - return result; + return scheduler; + } + + void doScheduleJob(Scheduler scheduler, CronJob job) { + _logger.debug("Adding job to scheduler: {}", job); + try { + scheduler.schedule(job.getCronEntry(), new RunnableJob(lockType, job)); + } catch (Exception ex) { + String msg = "Failed to schedule job {0} : {1}"; + JPAEntity entity = JPAEntity.class.cast(job); + + _auditService.createAudit(msg, entity, entity, ex.getMessage()); + _logger.error("Failed to schedule job {} : {}", job, ex.getMessage()); + } } - private void _disposeScheduler(Scheduler scheduler) { + void disposeScheduler() { if (scheduler != null) { scheduler.stop(); } diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schedule/DistributedDatabaseSchedulingService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schedule/DistributedDatabaseSchedulingService.java index 7547232b2..9de8ebbf5 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schedule/DistributedDatabaseSchedulingService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schedule/DistributedDatabaseSchedulingService.java @@ -30,6 +30,24 @@ */ package com.salesforce.dva.argus.service.schedule; +import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.TimeUnit; + +import org.apache.commons.lang.exception.ExceptionUtils; +import org.slf4j.Logger; + import com.google.inject.Inject; import com.google.inject.Singleton; import com.google.inject.persist.Transactional; @@ -38,43 +56,23 @@ import com.salesforce.dva.argus.entity.Metric; import com.salesforce.dva.argus.entity.Notification; import com.salesforce.dva.argus.entity.ServiceManagementRecord; -import com.salesforce.dva.argus.entity.Trigger; import com.salesforce.dva.argus.entity.ServiceManagementRecord.Service; +import com.salesforce.dva.argus.entity.Trigger; import com.salesforce.dva.argus.entity.Trigger.TriggerType; -import com.salesforce.dva.argus.inject.SLF4JTypeListener; import com.salesforce.dva.argus.service.AlertService; import com.salesforce.dva.argus.service.AuditService; import com.salesforce.dva.argus.service.DefaultService; import com.salesforce.dva.argus.service.DistributedSchedulingLockService; -import com.salesforce.dva.argus.service.MetricService; import com.salesforce.dva.argus.service.GlobalInterlockService.LockType; +import com.salesforce.dva.argus.service.MetricService; import com.salesforce.dva.argus.service.MonitorService; import com.salesforce.dva.argus.service.SchedulingService; import com.salesforce.dva.argus.service.ServiceManagementService; import com.salesforce.dva.argus.service.TSDBService; import com.salesforce.dva.argus.service.UserService; import com.salesforce.dva.argus.service.alert.AlertDefinitionsCache; -import com.salesforce.dva.argus.service.monitor.GaugeExporter; import com.salesforce.dva.argus.system.SystemConfiguration; - -import org.apache.commons.lang.exception.ExceptionUtils; -import org.slf4j.Logger; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Random; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.ThreadFactory; -import java.util.concurrent.TimeUnit; - -import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; +import org.slf4j.LoggerFactory; /** * Implementation of Distributed scheduling using database @@ -88,8 +86,7 @@ public class DistributedDatabaseSchedulingService extends DefaultService impleme //~ Instance fields ****************************************************************************************************************************** - @SLF4JTypeListener.InjectLogger - private Logger _logger; + private final Logger _logger = LoggerFactory.getLogger(DistributedDatabaseSchedulingService.class); private final AlertService _alertService; private final UserService _userService; private final ServiceManagementService _serviceManagementRecordService; @@ -301,7 +298,7 @@ private class SchedulingThread extends Thread { * Creates a new SchedulingThread object. * * @param name The name of the thread. - * @param Schedulingtype Type of the schedule. Cannot be null. + * @param lockType Type of the lockType. TODO: this isn't used? */ public SchedulingThread(String name, LockType lockType) { super(name); @@ -446,7 +443,7 @@ public void run() { notification.setTriggers(alert.getTriggers()); } alert = _alertService.updateAlert(alert); - Metric trackerMetric = new Metric("argus.core", "alerts.kpi"); + Metric trackerMetric = new Metric(MonitorService.Counter.ALERTS_KPI.getScope(), MonitorService.Counter.ALERTS_KPI.getMetric()); trackerMetric.setTag("host",SystemConfiguration.getHostname()); Map datapoints = new HashMap<>(); datapoints.put(fiveMinuteStartTime, 1.0); @@ -454,6 +451,7 @@ public void run() { //sleeping for a minute to make sure the new alert is updated in cache sleep(60*1000); + _alertService.exportMetric(trackerMetric, 1.0); try { _tsdbService.putMetrics(Arrays.asList(new Metric[] {trackerMetric})); } catch (Exception ex) { @@ -466,27 +464,26 @@ public void run() { boolean alertEvaluated = false; while(System.currentTimeMillis() < currCycleEndTime) { try { - List metrics = _metricService.getMetrics("-5m:notifications.sent:alert-"+alert.getId().intValue()+":zimsum:1m-sum"); - if(metrics!=null && !metrics.isEmpty()) { - for(Metric metric : metrics) { - if(metric.getDatapoints()!=null && metric.getDatapoints().keySet().size()>0) { - List notificationTimestamps = new ArrayList(metric.getDatapoints().keySet()); - Collections.sort(notificationTimestamps); - long notificationSentTime = notificationTimestamps.get(0); - long alertEvaluationTime = notificationSentTime - metricPublishTime; - alertEvaluated = true; - publishKPIMetric(fiveMinuteStartTime, new Double(alertEvaluationTime)); - } - } + List metrics = _metricService.getMetrics("-5m:argus.alerts:notifications.sent{alertId="+alert.getId().intValue()+"}:zimsum:1m-sum").getMetricsList(); + if(metrics != null && !metrics.isEmpty()) { + _logger.info("AlertEvaluationKPIReporter: Found notifications.sent.alert for dedicated test alert:{}", alert.getId()); + + long notificationSeenTime = System.currentTimeMillis(); + long alertEvaluationTime = notificationSeenTime - metricPublishTime; + alertEvaluated = true; + publishKPIMetric(fiveMinuteStartTime, new Double(alertEvaluationTime)); + break; }else { sleep(10*1000); } - if(alertEvaluated){ - break; - } }catch(Exception ex) { - _logger.info("Exception occured when getting notification related datapoints - "+ ex.getMessage()); - sleep(10*1000); + //NOTE: donot log error for missing expected metrics, as it may take time to be available + if (!ex.getMessage().startsWith("Failed to get metrics")){ + _logger.error("AlertEvaluationKPIReporter: Hit exception when generating alert.evaluation.kpi metrics - "+ ex.getMessage()); + } else { + _logger.warn("AlertEvaluationKPIReporter: Expected test alert {}:{} has not been sent, wait for 10 more seconds...", alert.getId(), alert.getName()); + } + sleep(10*1000); } } @@ -497,7 +494,7 @@ public void run() { } catch(Exception e) { _logger.error("Exception occured when computing alert evaluation kpi metric - "+ ExceptionUtils.getFullStackTrace(e)); } finally { - _logger.error("marking alert with name {} and id {} for deletion", alert.getName(), alert.getId() == null? null: alert.getId().intValue()); + _logger.info("marking alert with name {} and id {} for deletion", alert.getName(), alert.getId() == null? null: alert.getId().intValue()); _alertService.markAlertForDeletion(alert.getName(), _userService.findAdminUser()); } } @@ -518,4 +515,4 @@ private void publishKPIMetric(long timestamp, Double kpiValue) { } } } -/* Copyright (c) 2018, Salesforce.com, Inc. All rights reserved. */ \ No newline at end of file +/* Copyright (c) 2018, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schedule/DistributedZookeeperSchedulingService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schedule/DistributedZookeeperSchedulingService.java deleted file mode 100644 index 2d07c8eeb..000000000 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schedule/DistributedZookeeperSchedulingService.java +++ /dev/null @@ -1,480 +0,0 @@ -/* - * Copyright (c) 2016, Salesforce.com, Inc. - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * 3. Neither the name of Salesforce.com nor the names of its contributors may - * be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ - -package com.salesforce.dva.argus.service.schedule; - -import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Properties; - -import org.apache.zookeeper.KeeperException; -import org.quartz.CronScheduleBuilder; -import org.quartz.CronTrigger; -import org.quartz.JobBuilder; -import org.quartz.JobDetail; -import org.quartz.Scheduler; -import org.quartz.SchedulerException; -import org.quartz.SchedulerFactory; -import org.quartz.TriggerBuilder; -import org.quartz.impl.StdSchedulerFactory; -import org.slf4j.Logger; - -import com.google.inject.Inject; -import com.google.inject.Singleton; -import com.google.inject.persist.Transactional; -import com.salesforce.dva.argus.entity.CronJob; -import com.salesforce.dva.argus.entity.JPAEntity; -import com.salesforce.dva.argus.entity.ServiceManagementRecord; -import com.salesforce.dva.argus.entity.ServiceManagementRecord.Service; -import com.salesforce.dva.argus.inject.SLF4JTypeListener; -import com.salesforce.dva.argus.service.AlertService; -import com.salesforce.dva.argus.service.AuditService; -import com.salesforce.dva.argus.service.DefaultService; -import com.salesforce.dva.argus.service.GlobalInterlockService.LockType; -import com.salesforce.dva.argus.service.SchedulingService; -import com.salesforce.dva.argus.service.ServiceManagementService; -import com.salesforce.dva.argus.service.UserService; -import com.salesforce.dva.argus.system.SystemConfiguration; -import com.salesforce.dva.argus.util.zookeeper.Barrier; -import com.salesforce.dva.argus.util.zookeeper.ClientNode; -import com.salesforce.dva.argus.util.zookeeper.ClientNode.ClientsResult; - -/** - * Distributed scheduling using zookeeper - * Each node/server will schedule upto a fixed configured number of jobs. - * When one worker node goes down, another spare do nothing node will take its place. If there are not enough worker nodes, then system will - * not schedule any alerts since it waits for minimum number of worker nodes to be available. - * - * @author Dilip Devaraj (ddevaraj@salesforce.com) - */ -@Singleton -public class DistributedZookeeperSchedulingService extends DefaultService implements SchedulingService { - - //~ Static fields/initializers ******************************************************************************************************************* - - private static final long SCHEDULER_REFRESH_JOBS_PERIOD_MS = 1000L * 60L * 15L; - private static final String QUARTZ_THREADPOOL_COUNT = "org.quartz.threadPool.threadCount"; - private static final String QUARTZ_THREAD_PRIORITY = "org.quartz.threadPool.threadPriority"; - private static final String QUARTZ_THREAD_PRIORITY_VALUE = "3"; - - //~ Instance fields ****************************************************************************************************************************** - - @SLF4JTypeListener.InjectLogger - private Logger _logger; - private final AlertService _alertService; - private final UserService _userService; - private final ServiceManagementService _serviceManagementRecordService; - private final AuditService _auditService; - private Thread _alertSchedulingThread; - private SystemConfiguration _configuration; - - //~ Constructors ********************************************************************************************************************************* - - /** - * Creates a new QuartzSchedulingZookeeperService object. - * - * @param alertService The alert service instance to use. Cannot be null. - * @param userService The user service instance to use. Cannot be null. - * @param serviceManagementRecordService The serviceManagementRecordService instance to use. Cannot be null. - * @param auditService The audit service. Cannot be null. - * @param config The system configuration used to configure the service. - */ - @Inject - DistributedZookeeperSchedulingService(AlertService alertService, UserService userService, - ServiceManagementService serviceManagementRecordService, AuditService auditService, SystemConfiguration config) { - super(config); - requireArgument(alertService != null, "Alert service cannot be null."); - requireArgument(userService != null, "User service cannot be null."); - requireArgument(serviceManagementRecordService != null, "Service management record service cannot be null."); - requireArgument(auditService != null, "Audit service cannot be null."); - requireArgument(config != null, "System configuration cannot be null."); - _alertService = alertService; - _userService = userService; - _serviceManagementRecordService = serviceManagementRecordService; - _auditService = auditService; - _configuration = config; - } - - //~ Methods ************************************************************************************************************************************** - - @Override - @Transactional - public synchronized void startAlertScheduling() { - requireNotDisposed(); - if (_alertSchedulingThread != null && _alertSchedulingThread.isAlive()) { - _logger.info("Request to start alert scheduling aborted as it is already running."); - } else { - _logger.info("Starting alert scheduling thread."); - _alertSchedulingThread = new SchedulingThread("schedule-alerts", LockType.ALERT_SCHEDULING); - _alertSchedulingThread.start(); - _logger.info("Alert scheduling thread started."); - } - } - - @Override - public synchronized void dispose() { - stopAlertScheduling(); - super.dispose(); - _serviceManagementRecordService.dispose(); - _alertService.dispose(); - _userService.dispose(); - } - - @Override - public synchronized void stopAlertScheduling() { - requireNotDisposed(); - if (_alertSchedulingThread != null && _alertSchedulingThread.isAlive()) { - _logger.info("Stopping alert scheduling"); - _alertSchedulingThread.interrupt(); - _logger.info("Alert scheduling thread interrupted."); - try { - _logger.info("Waiting for alert scheduling thread to terminate."); - _alertSchedulingThread.join(); - } catch (InterruptedException ex) { - _logger.warn("Alert job scheduler was interrupted while shutting down."); - } - _logger.info("Alert job scheduling stopped."); - } else { - _logger.info("Requested shutdown of alert scheduling aborted as it is not yet running."); - } - } - - @Override - @Transactional - public synchronized void enableScheduling() { - requireNotDisposed(); - _logger.info("Globally enabling all scheduling."); - _setServiceEnabled(true); - _logger.info("All scheduling globally enabled."); - } - - @Override - @Transactional - public synchronized void disableScheduling() { - requireNotDisposed(); - _logger.info("Globally disabling all scheduling."); - _setServiceEnabled(false); - _logger.info("All scheduling globally disabled."); - } - - @Transactional - private boolean _isSchedulingServiceEnabled() { - synchronized (_serviceManagementRecordService) { - return _serviceManagementRecordService.isServiceEnabled(Service.SCHEDULING); - } - } - - /** - * Enables the scheduling service. - * - * @param enabled True to enable, false to disable. - */ - @Transactional - protected void _setServiceEnabled(boolean enabled) { - synchronized (_serviceManagementRecordService) { - ServiceManagementRecord record = _serviceManagementRecordService.findServiceManagementRecord(Service.SCHEDULING); - - if (record == null) { - record = new ServiceManagementRecord(_userService.findAdminUser(), Service.SCHEDULING, enabled); - } - record.setEnabled(enabled); - _serviceManagementRecordService.updateServiceManagementRecord(record); - } - } - - //~ Enums **************************************************************************************************************************************** - - /** - * The implementation specific configuration properties. - * - * @author Dilip Devaraj (ddevaraj@salesforce.com) - */ - public enum Property { - - /** Specifies the number of threads used for scheduling. Defaults to 1. */ - QUARTZ_THREADPOOL_COUNT("service.property.scheduling.quartz.threadPool.threadCount", "1"), - ZOOKEEPER_CONNECT("service.property.scheduling.zookeeper.connect", "localhost:2185"), - BARRIER_ROOT_PATH("service.property.scheduling.zookeeper.barrierRootPath", "/barrier"), - ELECTION_ROOT_PATH("service.property.scheduling.zookeeper.electionRootPath", "/election"), - QUARTZ_MAX_JOBS_PER_SCHEDULER("service.property.scheduling.zookeeper.maxJobsPerScheduler", "4000"); - - private final String _name; - private final String _defaultValue; - - private Property(String name, String defaultValue) { - _name = name; - _defaultValue = defaultValue; - } - - /** - * Returns the name of the property. - * - * @return The name of the property. - */ - public String getName() { - return _name; - } - - /** - * Returns the default property value. - * - * @return The default property value. - */ - public String getDefaultValue() { - return _defaultValue; - } - } - - //~ Inner Classes ******************************************************************************************************************************** - - /** - * Job scheduler. - * - * @author Dilip Devaraj (ddevaraj@salesforce.com) - */ - public class SchedulingThread extends Thread { - - private final LockType lockType; - ClientNode clientNode; - - /** - * Creates a new SchedulingThread object. - * - * @param name The name of the thread. - * @param lockType Type of the lock. Cannot be null. - */ - public SchedulingThread(String name, LockType lockType) { - super(name); - this.lockType = lockType; - } - - /** - * Fetch specified number of enabled CRON jobs from database, for this node. - * - * @return returns specified number enabled jobs for the given job type. - */ - protected List getEnabledJobs() { - List result = new ArrayList<>(); - - if (!isDisposed()) { - if (LockType.ALERT_SCHEDULING.equals(lockType)) { - int maxJobsPerSchedulerInstance = Integer.parseInt(_configuration.getValue(Property.QUARTZ_MAX_JOBS_PER_SCHEDULER.getName(), - Property.QUARTZ_MAX_JOBS_PER_SCHEDULER.getDefaultValue())); - ClientsResult clientsResult = clientNode.calculateAllNodesResult(); - _logger.info("Retreiving upto {} enabled alerts to schedule for this node {}.", maxJobsPerSchedulerInstance, - clientsResult.getNodePosition()); - int offset; - synchronized (_alertService) { - if(clientsResult.getNodePosition() == 0) { - offset = 0; - } else { - offset = clientsResult.getNodePosition() * maxJobsPerSchedulerInstance + 1; - } - result.addAll(_alertService.findAlertsByLimitOffsetStatus(maxJobsPerSchedulerInstance, offset,true)); - } - _logger.info("Retrieved {} alerts from offset {}.", result.size(), offset); - } - } - return result; - } - - @Override - public void run() { - Scheduler scheduler = null; - int maxJobsPerSchedulerInstance = Integer.parseInt(_configuration.getValue(Property.QUARTZ_MAX_JOBS_PER_SCHEDULER.getName(), - Property.QUARTZ_MAX_JOBS_PER_SCHEDULER.getDefaultValue())); - - while (!isInterrupted()) { - if (_isSchedulingServiceEnabled()) { - String zookeeperURL = _configuration.getValue(Property.ZOOKEEPER_CONNECT.getName(), Property.ZOOKEEPER_CONNECT.getDefaultValue()); - String electionRootPath = _configuration.getValue(Property.ELECTION_ROOT_PATH.getName(), Property.ELECTION_ROOT_PATH.getDefaultValue()); - - if (clientNode == null) { - try { - clientNode = new ClientNode(zookeeperURL, electionRootPath); - clientNode.run(); - } catch (IOException | IllegalStateException ex) { - _logger.error("Failed to connect client node to zookeeper service. Retrying", ex.getMessage()); - continue; - } - } - - while (!isInterrupted() && clientNode != null && _isSchedulingServiceEnabled()) { - Barrier barrier = null; - try{ - int totalEnabledJobs = _alertService.alertCountByStatus(true); - int nodesRequired = (totalEnabledJobs + maxJobsPerSchedulerInstance -1) / maxJobsPerSchedulerInstance; - String barrierRootPath = _configuration.getValue(Property.BARRIER_ROOT_PATH.getName(), Property.BARRIER_ROOT_PATH.getDefaultValue()); - try{ - barrier = new Barrier(zookeeperURL, barrierRootPath, nodesRequired); - _logger.info("Waiting for {} nodes to enter barrier, to schedule {} jobs", nodesRequired, totalEnabledJobs); - boolean flag = barrier.enter(); - _logger.info("Entered barrier"); - if(!flag) { - _logger.error("Error when entering the barrier"); - } - } catch (KeeperException e){ - _logger.error("Error when entering the barrier. Retry {}", e); - Barrier.setZookeeper(null); - continue; - } catch (InterruptedException e){ - _logger.error("Error when entering the barrier: {}",e); - } - - scheduler = _refreshJobSchedule(scheduler); - } catch(IllegalStateException e) { - _logger.error("Failed to refresh jobs due to zookeeper exception: {}", e); - clientNode = null; - continue; - } finally { - if (Barrier.getZooKeeper() != null) { - try{ - _logger.info("Waiting to leave barrier"); - if(barrier != null) barrier.leave(); - } catch (KeeperException e){ - _logger.error("Exception {}",e); - } catch (InterruptedException e){ - _logger.error("Exception {}",e); - } - _logger.info("Left barrier"); - } - } - sleepBeforeRefreshScheduler(); - } - } - } - _disposeScheduler(scheduler); - } - - /** - * Refreshes the job schedule with the current list of enabled jobs of the type to be scheduled. - * - * @param scheduler The scheduler to update. - * - * @return The updated scheduler. - * @throws IllegalStateException On illegal state - */ - protected Scheduler _refreshJobSchedule(Scheduler scheduler) throws IllegalStateException { - _disposeScheduler(scheduler); - - String schedulerName = null; - Properties props = new Properties(); - - // Set quartz worker thread properties - props.put(QUARTZ_THREADPOOL_COUNT, - _configuration.getValue(Property.QUARTZ_THREADPOOL_COUNT.getName(), Property.QUARTZ_THREADPOOL_COUNT.getDefaultValue())); - props.put(QUARTZ_THREAD_PRIORITY, QUARTZ_THREAD_PRIORITY_VALUE); - props.put(StdSchedulerFactory.PROP_SCHED_SCHEDULER_THREADS_INHERIT_CONTEXT_CLASS_LOADER_OF_INITIALIZING_THREAD, true); - - /* Have multiple scheduler instances for different job types, so that when - * we stop the previous instance of a scheduler during the refresh cycle it does not affect another scheduler. - */ - switch (Thread.currentThread().getName()) { - case "schedule-alerts": - default: - schedulerName = "AlertScheduler"; - } - props.put(StdSchedulerFactory.PROP_SCHED_INSTANCE_NAME, schedulerName); - - SchedulerFactory schedulerFactory; - Scheduler result = null; - - try { - schedulerFactory = new StdSchedulerFactory(props); - result = schedulerFactory.getScheduler(); - } catch (Exception e) { - _logger.error("Exception in setting up scheduler: {}", e); - return result; - } - for (CronJob job : getEnabledJobs()) { - _logger.debug("Adding job to scheduler: {}", job); - try { - // Convert from linux cron to quartz cron expression - String quartzCronEntry = "0 " + job.getCronEntry().substring(0, job.getCronEntry().length() - 1) + "?"; - JobDetail jobDetail = JobBuilder.newJob(RunnableJob.class).build(); - CronTrigger cronTrigger = TriggerBuilder.newTrigger().withSchedule(CronScheduleBuilder.cronSchedule(quartzCronEntry)).build(); - - // Pass parameter to quartz worker threads - jobDetail.getJobDataMap().put(RunnableJob.CRON_JOB, job); - jobDetail.getJobDataMap().put(RunnableJob.LOCK_TYPE, lockType); - jobDetail.getJobDataMap().put("AlertService", _alertService); - jobDetail.getJobDataMap().put("AuditService", _auditService); - result.scheduleJob(jobDetail, cronTrigger); - } catch (Exception ex) { - String msg = "Failed to schedule job {0} : {1}"; - JPAEntity entity = JPAEntity.class.cast(job); - - _auditService.createAudit(msg, entity, entity, ex.getMessage()); - _logger.error("Failed to schedule job {} : {}", job, ex.getMessage()); - } - } - try { - result.start(); - } catch (SchedulerException e) { - _logger.error("Exception in starting scheduler: {}", e); - } - _logger.info("Job schedule refreshed."); - return result; - } - - private void sleepBeforeRefreshScheduler() { - try { - _logger.info("Sleeping for {}s before refreshing scheduler jobs", SCHEDULER_REFRESH_JOBS_PERIOD_MS / 1000, lockType); - sleep(SCHEDULER_REFRESH_JOBS_PERIOD_MS); - } catch (InterruptedException ex) { - _logger.warn("Scheduling was interrupted."); - interrupt(); - } - } - - private void _disposeScheduler(Scheduler scheduler) { - if (scheduler != null) { - try { - scheduler.shutdown(); - - /* Add a small sleep so Tomcat does not complain - the web application has started a thread, - * but has failed to stop it.This is very likely to create a memory leak. - */ - Thread.sleep(2000); - } catch (SchedulerException e) { - _logger.error("Quartz failed to shutdown {}", e); - } catch (InterruptedException e) { - _logger.warn("Shutdown of quartz scheduler was interrupted."); - Thread.currentThread().interrupt(); - } - } - } - } -} -/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/AbstractSchemaService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/AbstractSchemaService.java index e6f4fe83f..f0686ad19 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/AbstractSchemaService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/AbstractSchemaService.java @@ -1,48 +1,45 @@ package com.salesforce.dva.argus.service.schema; +import com.google.common.collect.ImmutableMap; +import com.google.common.hash.BloomFilter; +import com.google.common.hash.Funnels; +import com.google.common.hash.HashFunction; +import com.google.common.hash.Hashing; +import com.salesforce.dva.argus.entity.AbstractSchemaRecord; +import com.salesforce.dva.argus.entity.KeywordQuery; +import com.salesforce.dva.argus.entity.MetatagsRecord; +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.entity.MetricSchemaRecord; +import com.salesforce.dva.argus.entity.MetricSchemaRecordQuery; +import com.salesforce.dva.argus.service.DefaultService; +import com.salesforce.dva.argus.service.MonitorService; +import com.salesforce.dva.argus.service.SchemaService; +import com.salesforce.dva.argus.system.SystemAssert; +import com.salesforce.dva.argus.system.SystemConfiguration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; import java.net.InetAddress; import java.net.UnknownHostException; import java.nio.charset.Charset; -import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; import java.util.HashSet; import java.util.List; -import java.util.Map.Entry; +import java.util.Map; import java.util.Properties; +import java.util.Random; import java.util.Set; -import java.util.HashMap; -import java.util.Map; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.File; - import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.lang3.tuple.Pair; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.google.common.hash.BloomFilter; -import com.google.common.hash.Funnels; - -import com.salesforce.dva.argus.entity.KeywordQuery; -import com.salesforce.dva.argus.entity.Metric; -import com.salesforce.dva.argus.entity.MetricSchemaRecord; -import com.salesforce.dva.argus.entity.MetricSchemaRecordQuery; -import com.salesforce.dva.argus.entity.MetatagsRecord; -import com.salesforce.dva.argus.service.DefaultService; -import com.salesforce.dva.argus.service.MonitorService; -import com.salesforce.dva.argus.service.SchemaService; -import com.salesforce.dva.argus.system.SystemAssert; -import com.salesforce.dva.argus.system.SystemConfiguration; - /** * Implementation of the abstract schema service class * @@ -50,86 +47,53 @@ */ public abstract class AbstractSchemaService extends DefaultService implements SchemaService { private static final long POLL_INTERVAL_MS = 10 * 60 * 1000L; - private static final int DAY_IN_SECONDS = 24 * 60 * 60; - private static final int HOUR_IN_SECONDS = 60 * 60; - - /* Have three separate bloom filters one for metrics schema, one only for scope names schema and one only for scope name and metric name schema. - * Since scopes will continue to repeat more often on subsequent kafka batch reads, we can easily check this from the bloom filter for scopes only. - * Hence we can avoid the extra call to populate scopenames index on ES in subsequent Kafka reads. - * The same logic applies to scope name and metric name schema. - */ - protected static BloomFilter bloomFilter; - protected static BloomFilter bloomFilterScopeOnly; - protected static BloomFilter bloomFilterScopeAndMetricOnly; - protected static BloomFilter bloomFilterMetatags; + static BloomFilter bloomFilter; //this used to be called modifiedBloom but it can mislead reader of the code protected final MonitorService _monitorService; - private int randomBloomAppend; - private int bloomFilterExpectedNumberInsertions; - private double bloomFilterErrorRate; - private int bloomFilterScopeOnlyExpectedNumberInsertions; - private double bloomFilterScopeOnlyErrorRate; - private int bloomFilterScopeAndMetricOnlyExpectedNumberInsertions; - private double bloomFilterScopeAndMetricOnlyErrorRate; - private int bloomFilterMetatagsExpectedNumberInsertions; - private double bloomFilterMetatagsErrorRate; + + + private int modifiedBloomExpectedNumberInsertions; + private double modifiedBloomErrorRate; + private final Logger _logger = LoggerFactory.getLogger(getClass()); private final Thread _bloomFilterMonitorThread; - protected final boolean _syncPut; - private int bloomFilterFlushHourToStartAt; + private final Map bloomFilterMonitorTags; + private final SystemConfiguration config; + final boolean _syncPut; private ScheduledExecutorService scheduledExecutorService; - private String bfTagsStateFilename; - protected final boolean bloomFileWritingEnabled; - + private String modifiedBloomFileName; + private final boolean bloomFileWritingEnabled; + private boolean modifiedBloomClearingEnabled; + private int modifiedBloomClearingPeriodHours; + private int modifiedBloomFlushPeriodHours; protected AbstractSchemaService(SystemConfiguration config, MonitorService monitorService) { super(config); - + this.config = config; _monitorService = monitorService; - try { - randomBloomAppend = Math.abs(InetAddress.getLocalHost().getHostName().hashCode()); - } catch (IOException io) { - _logger.error("failed to create randomBloomAppend", io); - randomBloomAppend = 12345; - } bloomFileWritingEnabled = Boolean.parseBoolean(config.getValue(Property.BLOOM_FILE_WRITING_ENABLED.getName(), - Property.BLOOM_FILE_WRITING_ENABLED.getDefaultValue())); - - String bfStateBaseDir = config.getValue(Property.BF_STATE_BASE_DIR.getName(), - Property.BF_STATE_BASE_DIR.getDefaultValue()); - bfTagsStateFilename = bfStateBaseDir + "/bloomfilter_tags.state." + - config.getValue(SystemConfiguration.ARGUS_INSTANCE_ID, "noid"); - - bloomFilterExpectedNumberInsertions = Integer.parseInt(config.getValue(Property.BLOOMFILTER_EXPECTED_NUMBER_INSERTIONS.getName(), - Property.BLOOMFILTER_EXPECTED_NUMBER_INSERTIONS.getDefaultValue())); - bloomFilterErrorRate = Double.parseDouble(config.getValue(Property.BLOOMFILTER_ERROR_RATE.getName(), - Property.BLOOMFILTER_ERROR_RATE.getDefaultValue())); - - bloomFilterScopeOnlyExpectedNumberInsertions = Integer.parseInt(config.getValue(Property.BLOOMFILTER_SCOPE_ONLY_EXPECTED_NUMBER_INSERTIONS.getName(), - Property.BLOOMFILTER_SCOPE_ONLY_EXPECTED_NUMBER_INSERTIONS.getDefaultValue())); - bloomFilterScopeOnlyErrorRate = Double.parseDouble(config.getValue(Property.BLOOMFILTER_SCOPE_ONLY_ERROR_RATE.getName(), - Property.BLOOMFILTER_SCOPE_ONLY_ERROR_RATE.getDefaultValue())); - - bloomFilterScopeAndMetricOnlyExpectedNumberInsertions = Integer.parseInt(config.getValue(Property.BLOOMFILTER_SCOPE_AND_METRIC_ONLY_EXPECTED_NUMBER_INSERTIONS.getName(), - Property.BLOOMFILTER_SCOPE_AND_METRIC_ONLY_EXPECTED_NUMBER_INSERTIONS.getDefaultValue())); - bloomFilterScopeAndMetricOnlyErrorRate = Double.parseDouble(config.getValue(Property.BLOOMFILTER_SCOPE_AND_METRIC_ONLY_ERROR_RATE.getName(), - Property.BLOOMFILTER_SCOPE_AND_METRIC_ONLY_ERROR_RATE.getDefaultValue())); - - bloomFilterMetatagsExpectedNumberInsertions = - Integer.parseInt(config.getValue(Property.BLOOMFILTER_METATAGS_EXPECTED_NUMBER_INSERTIONS.getName(), - Property.BLOOMFILTER_METATAGS_EXPECTED_NUMBER_INSERTIONS.getDefaultValue())); - bloomFilterMetatagsErrorRate = - Double.parseDouble(config.getValue(Property.BLOOMFILTER_METATAGS_ERROR_RATE.getName(), - Property.BLOOMFILTER_METATAGS_ERROR_RATE.getDefaultValue())); - createOrReadBloomFilter(); - - bloomFilterScopeOnly = BloomFilter.create(Funnels.stringFunnel(Charset.defaultCharset()), bloomFilterScopeOnlyExpectedNumberInsertions , bloomFilterScopeOnlyErrorRate); - bloomFilterScopeAndMetricOnly = BloomFilter.create(Funnels.stringFunnel(Charset.defaultCharset()), - bloomFilterScopeAndMetricOnlyExpectedNumberInsertions , bloomFilterScopeAndMetricOnlyErrorRate); - bloomFilterMetatags = BloomFilter.create(Funnels.stringFunnel(Charset.defaultCharset()), - bloomFilterMetatagsExpectedNumberInsertions , bloomFilterMetatagsErrorRate); + Property.BLOOM_FILE_WRITING_ENABLED.getDefaultValue())); + modifiedBloomClearingEnabled = Boolean.parseBoolean(config.getValue(Property.MODIFIED_BLOOM_CLEARING_ENABLED.getName(), + Property.MODIFIED_BLOOM_CLEARING_ENABLED.getDefaultValue())); + modifiedBloomClearingPeriodHours = Integer.parseInt(config.getValue(Property.MODIFIED_BLOOM_CLEARING_PERIOD_HOURS.getName(), + Property.MODIFIED_BLOOM_CLEARING_PERIOD_HOURS.getDefaultValue())); + modifiedBloomFlushPeriodHours = Integer.parseInt(config.getValue(Property.MODIFIED_BLOOM_FLUSH_PERIOD_HOURS.getName(), + Property.MODIFIED_BLOOM_FLUSH_PERIOD_HOURS.getDefaultValue())); + + String bfStateBaseDir = config.getValue(Property.BF_STATE_BASE_DIR.getName(), + Property.BF_STATE_BASE_DIR.getDefaultValue()); + modifiedBloomFileName = bfStateBaseDir + "/modified_bloom.state." + + config.getValue(SystemConfiguration.ARGUS_INSTANCE_ID, "noid"); + bloomFilterMonitorTags = new ImmutableMap.Builder() + .put("instanceId", config.getValue(SystemConfiguration.ARGUS_INSTANCE_ID, "noid")) + .build(); + modifiedBloomExpectedNumberInsertions = Integer.parseInt(config.getValue(Property.MODIFIED_BLOOM_EXPECTED_NUMBER_INSERTIONS.getName(), + Property.MODIFIED_BLOOM_EXPECTED_NUMBER_INSERTIONS.getDefaultValue())); + modifiedBloomErrorRate = Double.parseDouble(config.getValue(Property.MODIFIED_BLOOM_ERROR_RATE.getName(), + Property.MODIFIED_BLOOM_ERROR_RATE.getDefaultValue())); + bloomFilter = createOrReadBloomFilter(modifiedBloomFileName, modifiedBloomExpectedNumberInsertions, modifiedBloomErrorRate); _syncPut = Boolean.parseBoolean( config.getValue(Property.SYNC_PUT.getName(), Property.SYNC_PUT.getDefaultValue())); @@ -137,8 +101,11 @@ protected AbstractSchemaService(SystemConfiguration config, MonitorService monit _bloomFilterMonitorThread = new Thread(new BloomFilterMonitorThread(), "bloom-filter-monitor"); _bloomFilterMonitorThread.start(); - bloomFilterFlushHourToStartAt = getBloomFilterFlushHourToStartAt(); - createScheduledExecutorService(bloomFilterFlushHourToStartAt); + createScheduledExecutorService(); + } + + void clearBlooms() { + bloomFilter = BloomFilter.create(Funnels.stringFunnel(Charset.defaultCharset()), modifiedBloomExpectedNumberInsertions, modifiedBloomErrorRate); } @Override @@ -153,83 +120,59 @@ public void put(List metrics) { requireNotDisposed(); SystemAssert.requireArgument(metrics != null, "Metric list cannot be null."); - // Create a list of metricsToPut that do not exist on the BLOOMFILTER and then call implementation - // specific put with only those subset of metricsToPut. - List metricsToPut = new ArrayList<>(metrics.size()); - Set scopesToPut = new HashSet<>(metrics.size()); - - Set> scopesAndMetricsNamesToPut = new HashSet<>(metrics.size()); - Map metatagsToPut = new HashMap<>(); + Set metricsToIndex = new HashSet<>(metrics.size()); + Set scopesToIndex = new HashSet<>(); + Set metatagsToIndex = new HashSet<>(); for(Metric metric : metrics) { - // check metric schema bloom filter + // index the schema records that haven't been seen by the bloom if(metric.getTags().isEmpty()) { - // if metric does not have tags - String key = constructKey(metric, null, null); - boolean found = bloomFilter.mightContain(key); - if(!found) { - metricsToPut.add(metric); + String key = AbstractSchemaRecord.constructKey(metric, null, null); + if (!bloomFilter.mightContain(key)) { + metricsToIndex.add(metric); } } else { - // if metric has tags - boolean newTags = false; - for(Entry tagEntry : metric.getTags().entrySet()) { - String key = constructKey(metric, tagEntry.getKey(), tagEntry.getValue()); - boolean found = bloomFilter.mightContain(key); - if(!found) { - newTags = true; + metric.getTags().forEach((tagk, tagv) -> {String key = AbstractSchemaRecord.constructKey(metric, tagk, tagv); + if (!bloomFilter.mightContain(key)) { + metricsToIndex.add(metric); } - } - - if(newTags) { - metricsToPut.add(metric); - } + }); } + // index the scopes that haven't been seen by the bloom String scopeName = metric.getScope(); - String metricName = metric.getMetric(); - - // Check scope only bloom filter - String key = constructScopeOnlyKey(scopeName); - boolean found = bloomFilterScopeOnly.mightContain(key); - if(!found) { - scopesToPut.add(scopeName); + String key = AbstractSchemaRecord.constructKey(scopeName); + if (!bloomFilter.mightContain(key)) { + scopesToIndex.add(scopeName); } - // Check scope and metric only bloom filter - key = constructScopeAndMetricOnlyKey(scopeName, metricName); - found = bloomFilterScopeAndMetricOnly.mightContain(key); - if(!found) { - scopesAndMetricsNamesToPut.add(Pair.of(scopeName, metricName)); + // index the metatags that haven't been seen by the bloom + MetatagsRecord mtags = metric.getMetatagsRecord(); + if(mtags != null) { + key = mtags.getKey(); + if(key != null) { + if (!bloomFilter.mightContain(key)) { + metatagsToIndex.add(mtags); + } + } } - - // Check if metatags are present and populate accordingly - MetatagsRecord mtags = metric.getMetatagsRecord(); - if(mtags != null) { - key = mtags.getKey(); - if(key != null) { - found = bloomFilterMetatags.mightContain(key); - if(!found) { - metatagsToPut.put(key, mtags); - } - } - } } - implementationSpecificPut(metricsToPut, scopesToPut, scopesAndMetricsNamesToPut, metatagsToPut); + implementationSpecificPut( + metricsToIndex, + scopesToIndex, + metatagsToIndex + ); } - /* - * Calls the implementation specific write for indexing the records - * - * @param metrics The metrics metadata that will be written to a separate index. - * @param scopeNames The scope names that will be written to a separate index. - * @param scopesAndMetricNames The scope and metric names that will be written to a separate index. + /** + * @param metricsToIndex Metrics not seen by this instance before + * @param scopesToIndex Scopes not seen by this instance before + * @param metatagsToIndex Metatags not seen by this instance before */ - protected abstract void implementationSpecificPut(List metrics, - Set scopeNames, - Set> scopesAndMetricNames, - Map metatagsToPut); + protected abstract void implementationSpecificPut(Set metricsToIndex, + Set scopesToIndex, + Set metatagsToIndex); @Override public void dispose() { @@ -266,123 +209,81 @@ public void dispose() { @Override public abstract List keywordSearch(KeywordQuery query); - protected String constructKey(Metric metric, String tagk, String tagv) { - return constructKey(metric.getScope(), - metric.getMetric(), - tagk, - tagv, - metric.getNamespace(), - metric.getMetatagsRecord()==null?null:metric.getMetatagsRecord().getMetatagValue(MetricSchemaRecord.RETENTION_DISCOVERY)); - } - - protected String constructKey(String scope, String metric, String tagk, String tagv, String namespace, String retention) { - - StringBuilder sb = new StringBuilder(scope); - - if(!StringUtils.isEmpty(metric)) { - sb.append('\0').append(metric); - } - - if(!StringUtils.isEmpty(namespace)) { - sb.append('\0').append(namespace); - } - - if(!StringUtils.isEmpty(tagk)) { - sb.append('\0').append(tagk); - } - - if(!StringUtils.isEmpty(tagv)) { - sb.append('\0').append(tagv); - } - - //there is use case where users simply want to update the retention without touching rest of a metric - if(!StringUtils.isEmpty(retention)) { - sb.append('\0').append(retention); - } - - // Add randomness for each instance of bloom filter running on different - // schema clients to reduce probability of false positives that metric schemas are not written to ES - sb.append('\0').append(randomBloomAppend); - - return sb.toString(); + protected int getNumSecondsUntilNthHourOfDay(int nthHour, Calendar fromCalendar) { + int hour = fromCalendar.get(Calendar.HOUR_OF_DAY); + int secondsPastHour = fromCalendar.get(Calendar.MINUTE) * 60; + int hoursUntil = hour < nthHour ? (nthHour - hour) : (nthHour + 24 - hour); + return hoursUntil * 60 * 60 - secondsPastHour; } - protected String constructScopeOnlyKey(String scope) { - - return constructKey(scope, null, null, null, null, null); - } + protected int getNumSecondsUntilNthHourOfWeek(int nthHour, Calendar fromCalendar) { - protected String constructScopeAndMetricOnlyKey(String scope, String metric) { + // Sunday == 1; Saturday == 7 + int day = fromCalendar.get(Calendar.DAY_OF_WEEK) - 1; + int hour = fromCalendar.get(Calendar.HOUR_OF_DAY); + // The current nth hour of the Sunday - Sat week + int currNthHour = day * 24 + hour; - return constructKey(scope, metric, null, null, null, null); + int hoursUntil = currNthHour < nthHour ? (nthHour - currNthHour) : (nthHour + 7*24 - currNthHour); + int secondsPastHour = fromCalendar.get(Calendar.MINUTE) * 60; + return hoursUntil * 60 * 60 - secondsPastHour; } - protected int getNumHoursUntilTargetHour(int targetHour){ - _logger.info("Initialized bloom filter flushing out, at {} hour of day", targetHour); - Calendar calendar = Calendar.getInstance(); - int hour = calendar.get(Calendar.HOUR_OF_DAY); - return hour < targetHour ? (targetHour - hour) : (targetHour + 24 - hour); - } - - /* - * Have a different flush start hour for schema committers based on hostname, to prevent thundering herd problem. - */ - private int getBloomFilterFlushHourToStartAt() { - int bloomFilterFlushHourToStartAt = 0; - try { - bloomFilterFlushHourToStartAt = Math.abs(InetAddress.getLocalHost().getHostName().hashCode() % 24); - } catch (UnknownHostException e) { - _logger.warn("BloomFilter UnknownHostException", e); + private BloomFilter createOrReadBloomFilter(String filename, int expectedNumberInsertions, double errorRate) { + File bfFile = new File(filename); + if (bloomFileWritingEnabled && bfFile.exists()) { + _logger.info("Bloomfilter state file {} exists, using it to pre-populate bloom", filename); + try (InputStream inputStream = new FileInputStream(bfFile)) { + return BloomFilter.readFrom(inputStream, Funnels.stringFunnel(Charset.defaultCharset())); + } catch (IOException io) { + _logger.error("Bloomfilter state file {} read error, not using prev state: {}", filename, io); + return BloomFilter.create(Funnels.stringFunnel(Charset.defaultCharset()), expectedNumberInsertions, errorRate); + } } - _logger.info("BloomFilter flush hour to start at {}th hour of day", bloomFilterFlushHourToStartAt); - return bloomFilterFlushHourToStartAt; - } - - private void createOrReadBloomFilter() { - File bfFile = new File(this.bfTagsStateFilename); - if (bloomFileWritingEnabled && bfFile.exists() ) { - _logger.info("State file for bloom tags exists, using it to pre-populate bloom"); - try (InputStream inputStream = new FileInputStream(bfFile)) { - this.bloomFilter = BloomFilter.readFrom(inputStream, - Funnels.stringFunnel(Charset.defaultCharset())); - } catch (IOException io) { - _logger.error("tags bloomfilter read error, not using prev state", io); - this.bloomFilter = BloomFilter.create(Funnels.stringFunnel(Charset.defaultCharset()), - bloomFilterExpectedNumberInsertions , - bloomFilterErrorRate); - } - return; - } - - _logger.info("State file for bloom tags NOT present or bloomFileWritingEnabled is false, starting fresh bloom"); - this.bloomFilter = BloomFilter.create(Funnels.stringFunnel(Charset.defaultCharset()), - bloomFilterExpectedNumberInsertions , - bloomFilterErrorRate); + _logger.info("Bloomfilter state file {} NOT present or bloomFileWritingEnabled is false, starting fresh bloom", filename); + return BloomFilter.create(Funnels.stringFunnel(Charset.defaultCharset()), expectedNumberInsertions, errorRate); } - private void writeTagsBloomFilterToFile() { + private void writeBloomsToFile() { if (!bloomFileWritingEnabled) { return; } - File bfTagsFile = new File(this.bfTagsStateFilename); - if (!bfTagsFile.getParentFile().exists()) { - bfTagsFile.getParentFile().mkdir(); - } - try (OutputStream out = new FileOutputStream(bfTagsFile)) { - bloomFilter.writeTo(out); - _logger.info("Succesfully wrote tags bloomfilter to file {}", this.bfTagsStateFilename); - } catch (IOException io) { - _logger.error("Failed to write tags bf to file", io); - } + File modifiedBloomFile = new File(this.modifiedBloomFileName); + if (!modifiedBloomFile.getParentFile().exists()) { + modifiedBloomFile.getParentFile().mkdirs(); + } + try (OutputStream out = new FileOutputStream(modifiedBloomFile)) { + bloomFilter.writeTo(out); + _logger.info("Succesfully wrote bloomfilter to file {}", this.modifiedBloomFileName); + } catch (IOException io) { + _logger.error("Failed to write to bloomFilter file", io); + } } - private void createScheduledExecutorService(int targetHourToStartAt){ + private void createScheduledExecutorService() { + String toHash; + try { + toHash = InetAddress.getLocalHost().getHostName() + config.getValue(config.ARGUS_INSTANCE_ID, "noid"); + } catch (UnknownHostException ex) { + toHash = String.valueOf(new Random().nextDouble()); + } + HashFunction hf = Hashing.murmur3_128(); + int hourHash = Math.abs(hf.newHasher().putString(toHash, Charset.defaultCharset()).hash().asInt()); scheduledExecutorService = Executors.newScheduledThreadPool(1); - int initialDelayInSeconds = getNumHoursUntilTargetHour(targetHourToStartAt) * HOUR_IN_SECONDS; - BloomFilterFlushThread bloomFilterFlushThread = new BloomFilterFlushThread(); - scheduledExecutorService.scheduleAtFixedRate(bloomFilterFlushThread, initialDelayInSeconds, DAY_IN_SECONDS, TimeUnit.SECONDS); + + int nthFlushHour = hourHash % modifiedBloomFlushPeriodHours; + _logger.info("Bloom filter will flush to disk at hour {} of every day", nthFlushHour); + int flushDelaySeconds = getNumSecondsUntilNthHourOfDay(nthFlushHour, Calendar.getInstance()); + BloomFilterFlushThread flushingThread = new BloomFilterFlushThread(); + scheduledExecutorService.scheduleAtFixedRate(flushingThread, flushDelaySeconds, modifiedBloomFlushPeriodHours * 60 * 60, TimeUnit.SECONDS); + + BloomFilterClearThread clearingThread = new BloomFilterClearThread(); + int nthClearHour = hourHash % modifiedBloomClearingPeriodHours; + _logger.info("Bloom filter will clear once a week, at {} days + {} hours after Sunday 12AM", nthClearHour / 24, nthClearHour % 24); + int clearDelaySeconds = getNumSecondsUntilNthHourOfWeek(nthClearHour, Calendar.getInstance()); + scheduledExecutorService.scheduleAtFixedRate(clearingThread, clearDelaySeconds, modifiedBloomClearingPeriodHours * 60 * 60, TimeUnit.SECONDS); } private void shutdownScheduledExecutorService(){ @@ -403,46 +304,28 @@ private void shutdownScheduledExecutorService(){ */ public enum Property { SYNC_PUT("service.property.schema.sync.put", "false"), - BLOOM_FILE_WRITING_ENABLED("service.property.schema.bloom.file.writing.enabled", "false"), - BF_STATE_BASE_DIR("service.property.schema.bf.state.base.dir", "bloomstate"), - BLOOMFILTER_EXPECTED_NUMBER_INSERTIONS("service.property.schema.bloomfilter.expected.number.insertions", "40"), - BLOOMFILTER_ERROR_RATE("service.property.schema.bloomfilter.error.rate", "0.00001"), - - /* - * Estimated Filter Size using bloomFilter 1 million entries - * https://hur.st/bloomfilter/?n=1000000&p=1.0E-5&m=&k= 2.86MiB - * Storing in a Set 100K entries with avg length of 15 chars would be 100K * 15 * 2 B = 30B * 100K = 3 MB - * If # of entries is 1 million, then it would be 30 MB resulting in savings in space. - */ - BLOOMFILTER_SCOPE_ONLY_EXPECTED_NUMBER_INSERTIONS("service.property.schema.bloomfilter.scope.only.expected.number.insertions", "40"), - BLOOMFILTER_SCOPE_ONLY_ERROR_RATE("service.property.schema.bloomfilter.scope.only.error.rate", "0.00001"), + BLOOM_FILE_WRITING_ENABLED("service.property.schema.bloom.file.writing.enabled", "false"), + BF_STATE_BASE_DIR("service.property.schema.bf.state.base.dir", "bloomstate"), /* - * Estimated Filter Size using bloomFilter 500 million entries - * https://hur.st/bloomfilter/?n=10000000&p=1.0E-5&m=&k= 1.39GiB - * Storing in a Set 100M entries with avg length of 30 chars would be 100M * 30 * 2 B = 60B * 100M = 6 GB - * If # of entries is 500 million, then it would be 30 GB resulting in savings in space. - */ - - BLOOMFILTER_SCOPE_AND_METRIC_ONLY_EXPECTED_NUMBER_INSERTIONS("service.property.schema.bloomfilter.scope.and.metric.only.expected.number.insertions", "40"), - BLOOMFILTER_SCOPE_AND_METRIC_ONLY_ERROR_RATE("service.property.schema.bloomfilter.scope.and.metric.only.error.rate", "0.00001"), - - /* - * Estimated Filter Size using bloomFilter 1 million entries - * https://hur.st/bloomfilter/?n=1000000&p=1.0E-5&m=&k= 2.86MiB - * Storing in a Set 100K entries with avg length of 15 chars would be 100K * 15 * 2 B = 30B * 100K = 3 MB - * If # of entries is 1 million, then it would be 30 MB resulting in savings in space. - */ - - BLOOMFILTER_METATAGS_EXPECTED_NUMBER_INSERTIONS("service.property.schema.bloomfilter.metatags.expected.number.insertions", "1000000"), - BLOOMFILTER_METATAGS_ERROR_RATE("service.property.schema.bloomfilter.metatags.error.rate", "0.00001"); + * (Comment from old bloom filter pattern) + * Estimated Filter Size using bloomFilter 1 million entries + * https://hur.st/bloomfilter/?n=1000000&p=1.0E-5&m=&k= 2.86MiB + * Storing in a Set 100K entries with avg length of 15 chars would be 100K * 15 * 2 B = 30B * 100K = 3 MB + * If # of entries is 1 million, then it would be 30 MB resulting in savings in space. + */ + MODIFIED_BLOOM_EXPECTED_NUMBER_INSERTIONS("service.property.schema.bloomfilter.modified.expected.number.insertions", "40"), + MODIFIED_BLOOM_ERROR_RATE("service.property.schema.bloomfilter.modified.error.rate", "0.00001"), + MODIFIED_BLOOM_CLEARING_ENABLED("service.property.schema.bloomfilter.modified.clearing.enabled", "true"), + MODIFIED_BLOOM_CLEARING_PERIOD_HOURS("service.property.schema.bloomfilter.modified.clearing.period.hours", String.valueOf(7 * 24)), + MODIFIED_BLOOM_FLUSH_PERIOD_HOURS("service.property.schema.bloomfilter.modified.flush.period.hours", "24"); private final String _name; private final String _defaultValue; - private Property(String name, String defaultValue) { + Property(String name, String defaultValue) { _name = name; _defaultValue = defaultValue; } @@ -477,7 +360,7 @@ public String getDefaultValue() { private class BloomFilterMonitorThread implements Runnable { @Override public void run() { - _logger.info("Initialized randomBloomAppend for bloom filter key = {}", randomBloomAppend); + _logger.info("Initialized randomBloomAppend for bloom filter key = {}", AbstractSchemaRecord.getBloomAppend()); while (!Thread.currentThread().isInterrupted()) { _sleepForPollPeriod(); if (!Thread.currentThread().isInterrupted()) { @@ -491,15 +374,9 @@ public void run() { } private void _checkBloomFilterUsage() { - _monitorService.modifyCounter(MonitorService.Counter.BLOOMFILTER_APPROXIMATE_ELEMENT_COUNT, bloomFilter.approximateElementCount(), null); - _monitorService.modifyCounter(MonitorService.Counter.BLOOMFILTER_SCOPE_ONLY_APPROXIMATE_ELEMENT_COUNT, bloomFilterScopeOnly.approximateElementCount(), null); - _monitorService.modifyCounter(MonitorService.Counter.BLOOMFILTER_SCOPE_AND_METRIC_ONLY_APPROXIMATE_ELEMENT_COUNT, bloomFilterScopeAndMetricOnly.approximateElementCount(), null); - _monitorService.modifyCounter(MonitorService.Counter.BLOOMFILTER_METATAGS_APPROXIMATE_ELEMENT_COUNT, bloomFilterMetatags.approximateElementCount(), null); - - _logger.info("Metrics Bloom expected error rate = {}", bloomFilter.expectedFpp()); - _logger.info("Scope only Bloom expected error rate = {}", bloomFilterScopeOnly.expectedFpp()); - _logger.info("Scope and metric only Bloom expected error rate = {}", bloomFilterScopeAndMetricOnly.expectedFpp()); - _logger.info("Metic Metatags Bloom expected error rate = {}", bloomFilterMetatags.expectedFpp()); + _monitorService.modifyCounter(MonitorService.Counter.BLOOM_MODIFIED_APPROXIMATE_ELEMENT_COUNT, bloomFilter.approximateElementCount(), bloomFilterMonitorTags); + + _logger.info("Bloom for modified-timestamp expected error rate = {}", bloomFilter.expectedFpp()); } private void _sleepForPollPeriod() { @@ -513,28 +390,35 @@ private void _sleepForPollPeriod() { } } + /** + * Clears bloomfilters to allow for modified-timestamp updating + */ + private class BloomFilterClearThread implements Runnable { + @Override + public void run() { + modifiedBloomClearingEnabled = Boolean.valueOf(config.refreshAndGetValue( + SystemConfiguration.Property.SCHEMA_SERVICE_PROPERTY_FILE, + Property.MODIFIED_BLOOM_CLEARING_ENABLED.getName(), Property.MODIFIED_BLOOM_CLEARING_ENABLED.getDefaultValue())); + _logger.info("Refreshed {} property and got {}.", Property.MODIFIED_BLOOM_CLEARING_ENABLED.getName(), modifiedBloomClearingEnabled); + if (modifiedBloomClearingEnabled) { + _logger.info("Clearing modifiedBloom filter entries"); + bloomFilter = BloomFilter.create(Funnels.stringFunnel(Charset.defaultCharset()), modifiedBloomExpectedNumberInsertions, modifiedBloomErrorRate); + } + } + } + + /** + * Writes bloomFilter to disk to allow processes restarts to synchornize themselves + */ private class BloomFilterFlushThread implements Runnable { @Override public void run() { - try{ - _flushBloomFilter(); + try { + _logger.info("Flushing bloom filter entries to disk"); + writeBloomsToFile(); } catch (Exception ex) { _logger.warn("Exception occurred while flushing bloom filter.", ex); } } - - private void _flushBloomFilter() { - _logger.info("Flushing out bloom filter entries"); - // Write the main tags bloom filter to file first before flushing - writeTagsBloomFilterToFile(); - - bloomFilter = BloomFilter.create(Funnels.stringFunnel(Charset.defaultCharset()), bloomFilterExpectedNumberInsertions , bloomFilterErrorRate); - bloomFilterScopeOnly = BloomFilter.create(Funnels.stringFunnel(Charset.defaultCharset()), bloomFilterScopeOnlyExpectedNumberInsertions , bloomFilterScopeOnlyErrorRate); - bloomFilterScopeAndMetricOnly = BloomFilter.create(Funnels.stringFunnel(Charset.defaultCharset()), - bloomFilterScopeAndMetricOnlyExpectedNumberInsertions , bloomFilterScopeAndMetricOnlyErrorRate); - bloomFilterMetatags = BloomFilter.create(Funnels.stringFunnel(Charset.defaultCharset()), - bloomFilterMetatagsExpectedNumberInsertions , bloomFilterMetatagsErrorRate); - /* Don't need explicit synchronization to prevent slowness majority of the time*/ - } } } diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/AsyncHbaseSchemaService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/AsyncHbaseSchemaService.java index 9b17ae90e..de94569db 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/AsyncHbaseSchemaService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/AsyncHbaseSchemaService.java @@ -34,10 +34,10 @@ import com.google.inject.Inject; import com.google.inject.Singleton; import com.salesforce.dva.argus.entity.KeywordQuery; +import com.salesforce.dva.argus.entity.MetatagsRecord; import com.salesforce.dva.argus.entity.Metric; import com.salesforce.dva.argus.entity.MetricSchemaRecord; import com.salesforce.dva.argus.entity.MetricSchemaRecordQuery; -import com.salesforce.dva.argus.entity.MetatagsRecord; import com.salesforce.dva.argus.service.AsyncHBaseClientFactory; import com.salesforce.dva.argus.service.MonitorService; import com.salesforce.dva.argus.service.SchemaService; @@ -47,8 +47,6 @@ import com.stumbleupon.async.Callback; import com.stumbleupon.async.Deferred; import com.stumbleupon.async.TimeoutException; - -import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.hbase.util.Bytes; import org.hbase.async.CompareFilter.CompareOp; import org.hbase.async.FilterList; @@ -70,12 +68,10 @@ import java.util.Collections; import java.util.HashSet; import java.util.List; -import java.util.Properties; import java.util.Map.Entry; +import java.util.Properties; import java.util.Set; import java.util.TreeSet; -import java.util.HashMap; -import java.util.Map; /** @@ -84,6 +80,7 @@ * @author Bhinav Sura (bhinav.sura@salesforce.com) */ @Singleton +@Deprecated public class AsyncHbaseSchemaService extends AbstractSchemaService { //~ Static fields/initializers ******************************************************************************************************************* @@ -195,14 +192,11 @@ private String _plusOneNConstructRowKey(MetricSchemaRecord record, String table //~ Methods ************************************************************************************************************************************** @Override - protected void implementationSpecificPut(List metrics, - Set scopeNames, - Set> scopesAndMetricNames, - Map metatagsToPut) { + protected void implementationSpecificPut(Set metricsToIndex, Set scopesToIndex, Set metatagsToIndex) { requireNotDisposed(); - SystemAssert.requireArgument(metrics != null, "Metric list cannot be null."); + SystemAssert.requireArgument(metricsToIndex != null, "Metric list cannot be null."); - for (Metric metric : metrics) { + for (Metric metric : metricsToIndex) { if (metric.getTags().isEmpty()) { _putWithoutTag(metric); } diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/CachedDiscoveryService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/CachedDiscoveryService.java index 8061734ca..33d43a1d1 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/CachedDiscoveryService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/CachedDiscoveryService.java @@ -6,6 +6,7 @@ import java.text.MessageFormat; import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.TreeMap; @@ -27,7 +28,9 @@ import com.salesforce.dva.argus.service.CacheService; import com.salesforce.dva.argus.service.DefaultService; import com.salesforce.dva.argus.service.DiscoveryService; +import com.salesforce.dva.argus.service.MonitorService; import com.salesforce.dva.argus.service.NamedBinding; +import com.salesforce.dva.argus.service.MonitorService.Counter; import com.salesforce.dva.argus.service.SchemaService.RecordType; import com.salesforce.dva.argus.service.tsdb.AnnotationQuery; import com.salesforce.dva.argus.service.tsdb.MetricQuery; @@ -41,6 +44,8 @@ public class CachedDiscoveryService extends DefaultService implements DiscoveryS private static final ObjectMapper MAPPER = new ObjectMapper(); private static final int EXPIRY_TIME_SECS = 3600; private final long UPPER_LIMIT_TIME_GET_QUERIES_IN_MILLIS; + private final long _maxDataPointsPerQuery; + private final boolean _enforceDatapointsLimit; //~ Instance fields ****************************************************************************************************************************** @@ -48,11 +53,12 @@ public class CachedDiscoveryService extends DefaultService implements DiscoveryS private final DiscoveryService _discoveryService; private final CacheService _cacheService; private final ExecutorService _executorService; + private final MonitorService _monitorService; //~ Constructors ********************************************************************************************************************************* @Inject - public CachedDiscoveryService(CacheService cacheService, @NamedBinding DiscoveryService discoveryService, SystemConfiguration config) { + public CachedDiscoveryService(CacheService cacheService, @NamedBinding DiscoveryService discoveryService, SystemConfiguration config, MonitorService monitorService) { super(config); SystemAssert.requireArgument(cacheService != null, "Cache Service cannot be null."); SystemAssert.requireArgument(discoveryService != null, "Discovery Service cannot be null."); @@ -63,6 +69,9 @@ public CachedDiscoveryService(CacheService cacheService, @NamedBinding Discovery _cacheService = cacheService; _discoveryService = discoveryService; _executorService = Executors.newCachedThreadPool(); + _maxDataPointsPerQuery = Long.valueOf(config.getValue(SystemConfiguration.Property.MAX_DATAPOINTS_ALLOWED_PER_QUERY)); + _enforceDatapointsLimit = Boolean.valueOf(config.getValue(SystemConfiguration.Property.ENFORCE_DATAPOINTS_LIMIT)); + this._monitorService = monitorService; } //~ Methods ************************************************************************************************************************************** @@ -114,25 +123,27 @@ public List getMatchingQueries(MetricQuery query) { if(DiscoveryService.isWildcardQuery(query)) { String value = _cacheService.get(_getKey(query)); if(value == null) { // Cache Miss - _logger.info(MessageFormat.format("CACHE MISS for Wildcard Query: '{'{0}'}'. Will read from persistent storage.", query)); + _logger.debug(MessageFormat.format("CACHE MISS for Wildcard Query: {0}. Will read from persistent storage.", query)); queries = _discoveryService.getMatchingQueries(query); long timeToGetQueriesMillis = (System.nanoTime() - start) / 1000000; - _logger.info("Time to get matching queries from store in ms: " + timeToGetQueriesMillis); + _logger.debug("Time to get matching queries from store in ms: " + timeToGetQueriesMillis); if(timeToGetQueriesMillis > UPPER_LIMIT_TIME_GET_QUERIES_IN_MILLIS){ _logger.warn("Long time to get matching queries in ms: {} for query {}", timeToGetQueriesMillis, query); } _executorService.submit(new CacheInsertWorker(query, queries)); } else { // Cache Hit - _logger.info(MessageFormat.format("CACHE HIT for Wildcard Query: '{'{0}'}'", query)); - _logger.info("Time to get matching queries from cache in ms: " + (System.nanoTime() - start) / 1000000); + _logger.debug(MessageFormat.format("CACHE HIT for Wildcard Query: {0}", query)); + _logger.debug("Time to get matching queries from cache in ms: " + (System.nanoTime() - start) / 1000000); try { JavaType type = MAPPER.getTypeFactory().constructCollectionType(List.class, MetricQuery.class); List matchedQueries = MAPPER.readValue(value, type); _checkIfExceedsLimits(query, matchedQueries); for(int i=0; i getMatchingQueries(MetricQuery query) { } } } else { - _logger.info(MessageFormat.format("MetricQuery'{'{0}'}' does not have any wildcards", query)); + _logger.debug(MessageFormat.format("MetricQuery {0} does not have any wildcards", query)); queries.add(query); } @@ -188,14 +199,14 @@ public String getDefaultValue() { private void _checkIfExceedsLimits(MetricQuery query, List matchedQueries) { - int noOfTimeseriesAllowed = DiscoveryService.maxTimeseriesAllowed(query); + int noOfTimeseriesAllowed = DiscoveryService.maxTimeseriesAllowed(query, _maxDataPointsPerQuery); int numOfExpandedTimeseries = 1; for(MetricQuery mq : matchedQueries) { numOfExpandedTimeseries += DiscoveryService.numApproxTimeseriesForQuery(mq); } if(numOfExpandedTimeseries > noOfTimeseriesAllowed) { - throw new WildcardExpansionLimitExceededException(EXCEPTION_MESSAGE); + DiscoveryService.throwMaximumDatapointsExceededException(query, _maxDataPointsPerQuery, _enforceDatapointsLimit, _monitorService, _logger); } } @@ -246,7 +257,7 @@ private String _getKey(MetricQuery query) { try { Map sortedTags = Collections.emptyMap(); if(query.getTags() != null && !query.getTags().isEmpty()) { - sortedTags = new TreeMap<>(query.getTags()); + sortedTags = query.getTags(); } String key = MessageFormat.format("{0}:{1}'{'{2}'}'", query.getScope(), query.getMetric(), MAPPER.writeValueAsString(sortedTags)); diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/ConsumerOffsetRecordList.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/ConsumerOffsetRecordList.java new file mode 100644 index 000000000..2ac4d4dcb --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/ConsumerOffsetRecordList.java @@ -0,0 +1,236 @@ +package com.salesforce.dva.argus.service.schema; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.node.JsonNodeType; +import com.salesforce.dva.argus.service.metric.ConsumerOffsetMetric; +import com.salesforce.dva.argus.service.metric.ElasticSearchConsumerOffsetMetricsService; +import com.salesforce.dva.argus.service.schema.ElasticSearchUtils.HashAlgorithm; +import net.openhft.hashing.LongHashFunction; +import org.apache.commons.codec.digest.DigestUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static com.salesforce.dva.argus.service.metric.ElasticSearchConsumerOffsetMetricsService.METRIC_NAME; +import static com.salesforce.dva.argus.service.metric.ElasticSearchConsumerOffsetMetricsService.SCOPE_NAME; + +public class ConsumerOffsetRecordList implements RecordFinder { + + private Map _idToMetricMap = new HashMap<>(); + private String _scrollID; + private static ObjectMapper mapper = new ObjectMapper(); + private static Logger logger = LoggerFactory.getLogger(ConsumerOffsetRecordList.class); + + public ConsumerOffsetRecordList(List metrics, String scrollID) { + int count = 0; + for(ConsumerOffsetMetric metric : metrics) { + _idToMetricMap.put(String.valueOf(count++), metric); + } + setScrollID(scrollID); + } + + public ConsumerOffsetRecordList(List metrics, HashAlgorithm algorithm) { + for(ConsumerOffsetMetric metric : metrics) { + String id; + metric.setTime(ElasticSearchUtils.convertTimestampToMillis(metric.getTime())); + String metricKey = ConsumerOffsetMetric.getIdentifierFieldsAsString(metric); + if(HashAlgorithm.XXHASH.equals(algorithm)) { + id = String.valueOf(LongHashFunction.xx().hashChars(metricKey)); + } else { + id = DigestUtils.md5Hex(metricKey); + } + _idToMetricMap.put(id, metric); + } + } + + @Override + public List getRecords() { + return new ArrayList<>(_idToMetricMap.values()); + } + + @Override + public Set getIdSet() { + return _idToMetricMap.keySet(); + } + + @Override + public String getScrollID() { + return _scrollID; + } + + @Override + public void setScrollID(String scrollID) { + this._scrollID = scrollID; + } + + @Override + public ConsumerOffsetMetric getRecord(String id) { + return _idToMetricMap.get(id); + } + + public static class IndexSerializer extends JsonSerializer { + + public static final long MILLIS_IN_A_DAY = 86400000L; + public static final long MAX_METRIC_AGE_MS = 30 * MILLIS_IN_A_DAY; + + @Override + public void serialize(ConsumerOffsetRecordList list, JsonGenerator jgen, SerializerProvider provider) + throws IOException { + try { + mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); + + for (Map.Entry entry : list._idToMetricMap.entrySet()) { + ConsumerOffsetMetric offsetMetric = entry.getValue(); + + if (isMetricTimestampOld(offsetMetric.getTime())) continue; + + jgen.writeRaw("{ \"index\" : " + + "{\"_index\" : \"" + getMetricIndex(offsetMetric.getTime()) + "\"," + + "\"_type\": \"_doc\"," + + "\"_id\" : \"" + entry.getKey() + + "\"}}"); + jgen.writeRaw(System.lineSeparator()); + Map fieldsData = new HashMap<>(); + fieldsData.put(ConsumerOffsetRecordType.METRIC.getName(), offsetMetric.getMetric()); + fieldsData.put(ConsumerOffsetRecordType.TOPIC.getName(), offsetMetric.getTopic()); + fieldsData.put(ConsumerOffsetRecordType.TIMESERIES.getName(), Long.toString(offsetMetric.getTime())); + fieldsData.put(ConsumerOffsetRecordType.VALUE.getName(), Double.toString(offsetMetric.getValue())); + fieldsData.put(ConsumerOffsetRecordType.TAGS.getName(), mapper.writeValueAsString(offsetMetric.getTags())); + jgen.writeRaw(mapper.writeValueAsString(fieldsData)); + jgen.writeRaw(System.lineSeparator()); + } + } catch (Exception ex) { + logger.error("Error while Serializing Consumer Offset Record List: ", ex); + logger.error("RecordList: {}", list.getRecords()); + } + } + + private boolean isMetricTimestampOld(Long timestampMillis) { + return System.currentTimeMillis() - timestampMillis > MAX_METRIC_AGE_MS; + } + + private String getMetricIndex(Long epochTimestamp) { + Date metricDate = new Date(epochTimestamp); + SimpleDateFormat formatter = new SimpleDateFormat(ElasticSearchConsumerOffsetMetricsService.DATE_FORMAT); + String indexNameToAppend = String.format(ElasticSearchConsumerOffsetMetricsService.INDEX_FORMAT, + ElasticSearchConsumerOffsetMetricsService.INDEX_TEMPLATE_PATTERN_START, + formatter.format(metricDate)); + logger.debug("Metric Index Name to Append for Consumer Offset Documents: {}", indexNameToAppend); + return indexNameToAppend; + } + } + + public static class Deserializer extends JsonDeserializer { + + @Override + public ConsumerOffsetRecordList deserialize(JsonParser jp, DeserializationContext context) + throws IOException { + + String scrollID = null; + List result = new ArrayList<>(); + + JsonNode rootNode = jp.getCodec().readTree(jp); + try { + if (rootNode.has("_scroll_id")) { + scrollID = rootNode.get("_scroll_id").asText(); + } + + JsonNode recordsPerTopic = rootNode.get("aggregations").get("max_topic_offset_per_unit_time_greater_than").get("buckets"); + + if (JsonNodeType.ARRAY.equals(recordsPerTopic.getNodeType())) { + Iterator topicIter = recordsPerTopic.elements(); + topicIter.forEachRemaining(topicJson -> { + JsonNode topicNode = topicJson.get("key"); + JsonNode recordsPerUnitTime = topicJson.get("max_offset_per_unit_time_greater_than").get("buckets"); + if (JsonNodeType.ARRAY.equals(recordsPerUnitTime.getNodeType())) { + Iterator timeIter = recordsPerUnitTime.elements(); + timeIter.forEachRemaining(valJson -> { + JsonNode timestampNode = valJson.get("key"); + JsonNode valueNode = valJson.get("max_offset_greater_than").get("value"); + Map tags = new HashMap<>(); + tags.put("service", SCOPE_NAME); + ConsumerOffsetMetric consumerOffsetMetric = new ConsumerOffsetMetric(METRIC_NAME, topicNode.asText(), timestampNode.asLong(), valueNode.asDouble(), tags); + result.add(consumerOffsetMetric); + }); + } + }); + } + return new ConsumerOffsetRecordList(result, scrollID); + } catch (Exception ex) { + logger.error("Exception occured while deserializing consumer offset documents: ", ex); + logger.error("Json to parse: {}", rootNode); + throw ex; + } + } + } + + /** + * Indicates the schema record field to be used for indexing in ES. + * + * @author Sudhanshu Bahety (sudhanshu.bahety@salesforce.com) + */ + public static enum ConsumerOffsetRecordType { + + /** Match against the metric field. */ + METRIC("metric"), + /** Match against the topic field. */ + TOPIC("topic"), + /** Match against the value field. */ + VALUE("value"), + /** Match against the timeseries field */ + TIMESERIES("ts"), + /** Match against the tags field. */ + TAGS("tags"); + + private String _name; + + ConsumerOffsetRecordType(String name) { + _name = name; + } + + /** + * Returns a given record type corresponding to the given name. + * + * @param name The case sensitive name to match against. Cannot be null. + * + * @return The corresponding record type or null if no matching record type exists. + */ + @JsonCreator + public static ConsumerOffsetRecordType fromName(String name) { + for (ConsumerOffsetRecordType type : ConsumerOffsetRecordType.values()) { + if (type.getName().equalsIgnoreCase(name)) { + return type; + } + } + + throw new IllegalArgumentException("Illegal record type: " + name); + } + + /** + * Returns the record type name. + * + * @return The record type name. + */ + public String getName() { + return _name; + } + } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/DefaultDiscoveryService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/DefaultDiscoveryService.java index 3f23867f0..a4caf6431 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/DefaultDiscoveryService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/DefaultDiscoveryService.java @@ -38,6 +38,7 @@ import com.salesforce.dva.argus.entity.SchemaQuery; import com.salesforce.dva.argus.service.DefaultService; import com.salesforce.dva.argus.service.DiscoveryService; +import com.salesforce.dva.argus.service.MonitorService; import com.salesforce.dva.argus.service.SchemaService; import com.salesforce.dva.argus.service.SchemaService.RecordType; import com.salesforce.dva.argus.service.tsdb.MetricQuery; @@ -66,6 +67,9 @@ public class DefaultDiscoveryService extends DefaultService implements Discovery private final Logger _logger = LoggerFactory.getLogger(DefaultDiscoveryService.class); private final SchemaService _schemaService; + private final long _maxDataPointsPerQuery; + private final boolean _enforceDatapointsLimit; + private final MonitorService _monitorService; //~ Constructors ********************************************************************************************************************************* @@ -76,9 +80,12 @@ public class DefaultDiscoveryService extends DefaultService implements Discovery * @param config Service properties */ @Inject - public DefaultDiscoveryService(SchemaService schemaService, SystemConfiguration config) { + public DefaultDiscoveryService(SchemaService schemaService, SystemConfiguration config, MonitorService monitorService) { super(config); this._schemaService = schemaService; + this._maxDataPointsPerQuery = Long.valueOf(config.getValue(SystemConfiguration.Property.MAX_DATAPOINTS_ALLOWED_PER_QUERY)); + this._enforceDatapointsLimit = Boolean.valueOf(config.getValue(SystemConfiguration.Property.ENFORCE_DATAPOINTS_LIMIT)); + this._monitorService = monitorService; } //~ Methods ************************************************************************************************************************************** @@ -139,18 +146,18 @@ public List getMatchingQueries(MetricQuery query) { long start = System.nanoTime(); if (DiscoveryService.isWildcardQuery(query)) { - _logger.info(MessageFormat.format("MetricQuery'{'{0}'}' contains wildcards. Will match against schema records.", query)); - - int limit = 10000; - int noOfTimeseriesAllowed = DiscoveryService.maxTimeseriesAllowed(query); + _logger.debug(MessageFormat.format("MetricQuery {0} contains wildcards. Will match against schema records.", query)); + + // Use limit 0 to indicate an "unlimited" query + int limit = 0; + int noOfTimeseriesAllowed = DiscoveryService.maxTimeseriesAllowed(query, _maxDataPointsPerQuery); if(noOfTimeseriesAllowed == 0) { - throw new WildcardExpansionLimitExceededException(EXCEPTION_MESSAGE); + DiscoveryService.throwMaximumDatapointsExceededException(query, _maxDataPointsPerQuery, _enforceDatapointsLimit, _monitorService, _logger); } Map queries = new HashMap<>(); if (query.getTags() == null || query.getTags().isEmpty()) { - MetricSchemaRecordQuery schemaQuery = new MetricSchemaRecordQuery.MetricSchemaRecordQueryBuilder().namespace(query.getNamespace()) .scope(query.getScope()) .metric(query.getMetric()) @@ -159,33 +166,23 @@ public List getMatchingQueries(MetricQuery query) { .limit(limit) .page(1) .build(); - - while (true) { - List records = _schemaService.get(schemaQuery); - for (MetricSchemaRecord record : records) { - String identifier = _getIdentifier(record); - - if (!queries.containsKey(identifier)) { - if (queries.size() == noOfTimeseriesAllowed) { - throw new WildcardExpansionLimitExceededException(EXCEPTION_MESSAGE); - } - MetricQuery mq = new MetricQuery(record.getScope(), record.getMetric(), null, 0L, 1L); + List records = _schemaService.get(schemaQuery); + for (MetricSchemaRecord record : records) { + String identifier = _getIdentifier(record); - mq.setNamespace(record.getNamespace()); - _copyRemainingProperties(mq, query); - queries.put(identifier, mq); + if (!queries.containsKey(identifier)) { + if (queries.size() == noOfTimeseriesAllowed) { + DiscoveryService.throwMaximumDatapointsExceededException(query, _maxDataPointsPerQuery, _enforceDatapointsLimit, _monitorService, _logger); } + + MetricQuery mq = new MetricQuery(record.getScope(), record.getMetric(), null, 0L, 1L); + + mq.setNamespace(record.getNamespace()); + _copyRemainingProperties(mq, query); + queries.put(identifier, mq); } - - if (records.size() < limit) { - break; - } - - schemaQuery.setScanFrom(records.get(records.size() - 1)); - schemaQuery.setPage(schemaQuery.getPage()+1); } - expandedQueryList = new ArrayList<>(queries.values()); } else { Map timeseriesCount = new HashMap<>(); @@ -206,57 +203,45 @@ public List getMatchingQueries(MetricQuery query) { || SchemaService.containsWildcard(tag.getKey()) || SchemaService.containsWildcard(tag.getValue()); - while (true) { - List records; - - - if(!containsWildcard) { - records = Arrays.asList(new MetricSchemaRecord(query.getNamespace(), query.getScope(), query.getMetric(), - tag.getKey(), tag.getValue())); - } else { - records = _schemaService.get(schemaQuery); - } - - for (MetricSchemaRecord record : records) { - if (_getTotalTimeseriesCount(timeseriesCount) == noOfTimeseriesAllowed) { - throw new WildcardExpansionLimitExceededException(EXCEPTION_MESSAGE); - } - - String identifier = _getIdentifier(record); + List records; + if(!containsWildcard) { + records = Arrays.asList(new MetricSchemaRecord(query.getNamespace(), query.getScope(), query.getMetric(), + tag.getKey(), tag.getValue())); + } else { + records = _schemaService.get(schemaQuery); + } + + for (MetricSchemaRecord record : records) { + if (_getTotalTimeseriesCount(timeseriesCount) == noOfTimeseriesAllowed) { + DiscoveryService.throwMaximumDatapointsExceededException(query, _maxDataPointsPerQuery, _enforceDatapointsLimit, _monitorService, _logger); + } + + String identifier = _getIdentifier(record); - if (queries.containsKey(identifier)) { - MetricQuery mq = queries.get(identifier); + if (queries.containsKey(identifier)) { + MetricQuery mq = queries.get(identifier); - if (mq.getTags().containsKey(record.getTagKey())) { - String oldValue = mq.getTag(record.getTagKey()); - String newValue = oldValue + "|" + record.getTagValue(); + if (mq.getTags().containsKey(record.getTagKey())) { + String oldValue = mq.getTag(record.getTagKey()); + String newValue = oldValue + "|" + record.getTagValue(); - mq.setTag(record.getTagKey(), newValue); - } else { - mq.setTag(record.getTagKey(), record.getTagValue()); - } - timeseriesCount.put(identifier, DiscoveryService.numApproxTimeseriesForQuery(mq)); + mq.setTag(record.getTagKey(), newValue); } else { - Map tags = new HashMap(); + mq.setTag(record.getTagKey(), record.getTagValue()); + } + timeseriesCount.put(identifier, DiscoveryService.numApproxTimeseriesForQuery(mq)); + } else { + Map tags = new HashMap(); - tags.put(record.getTagKey(), record.getTagValue()); + tags.put(record.getTagKey(), record.getTagValue()); - MetricQuery mq = new MetricQuery(record.getScope(), record.getMetric(), tags, 0L, 1L); + MetricQuery mq = new MetricQuery(record.getScope(), record.getMetric(), tags, 0L, 1L); - mq.setNamespace(record.getNamespace()); - _copyRemainingProperties(mq, query); - queries.put(identifier, mq); - timeseriesCount.put(identifier, 1); - } - } - - if (records.size() < limit) { - break; + mq.setNamespace(record.getNamespace()); + _copyRemainingProperties(mq, query); + queries.put(identifier, mq); + timeseriesCount.put(identifier, 1); } - - schemaQuery.setScanFrom(records.get(records.size() - 1)); - schemaQuery.setPage(schemaQuery.getPage()+1); - } } @@ -270,7 +255,7 @@ public List getMatchingQueries(MetricQuery query) { } // end if-else } else { - _logger.info(MessageFormat.format("MetricQuery'{'{0}'}' does not have any wildcards", query)); + _logger.debug(MessageFormat.format("MetricQuery {0} does not have any wildcards", query)); expandedQueryList = Arrays.asList(query); } // end if-else _logger.debug("Time to get matching queries in ms: " + (System.nanoTime() - start) / 1000000); @@ -299,11 +284,11 @@ private String _getIdentifier(MetricSchemaRecord record) { } private void _logMatchedQueries(List queryList) { - _logger.info("Matched Queries:"); + _logger.debug("Matched Queries:"); int i = 1; for (MetricQuery q : queryList) { - _logger.info(MessageFormat.format("MetricQuery{0} = {1}", i++, q)); + _logger.debug(MessageFormat.format("MetricQuery{0} = {1}", i++, q)); } } @@ -313,6 +298,8 @@ private void _copyRemainingProperties(MetricQuery dest, MetricQuery orig) { dest.setAggregator(orig.getAggregator()); dest.setDownsampler(orig.getDownsampler()); dest.setDownsamplingPeriod(orig.getDownsamplingPeriod()); + dest.setShowHistogramBuckets((orig.getShowHistogramBuckets())); + dest.setPercentile((orig.getPercentile())); } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/ElasticSearchSchemaService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/ElasticSearchSchemaService.java index fe47446ba..99b970fb4 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/ElasticSearchSchemaService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/ElasticSearchSchemaService.java @@ -1,73 +1,70 @@ package com.salesforce.dva.argus.service.schema; -import static com.salesforce.dva.argus.entity.MetricSchemaRecord.DEFAULT_RETENTION_DISCOVERY_DAYS; -import static com.salesforce.dva.argus.entity.MetricSchemaRecord.EXPIRATION_TS; -import static com.salesforce.dva.argus.entity.MetricSchemaRecord.RETENTION_DISCOVERY; -import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.UnsupportedEncodingException; -import java.net.MalformedURLException; -import java.net.URL; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.Set; -import java.util.SortedSet; -import java.util.function.Supplier; - +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.module.SimpleModule; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.annotations.VisibleForTesting; +import com.google.inject.Inject; +import com.google.inject.Singleton; +import com.salesforce.dva.argus.entity.AbstractSchemaRecord; import com.salesforce.dva.argus.entity.KeywordQuery; +import com.salesforce.dva.argus.entity.MetatagsRecord; import com.salesforce.dva.argus.entity.Metric; import com.salesforce.dva.argus.entity.MetricSchemaRecord; import com.salesforce.dva.argus.entity.MetricSchemaRecordQuery; -import com.salesforce.dva.argus.entity.ScopeAndMetricOnlySchemaRecord; import com.salesforce.dva.argus.entity.ScopeOnlySchemaRecord; -import com.salesforce.dva.argus.entity.MetatagsRecord; -import org.apache.commons.lang3.tuple.Pair; +import com.salesforce.dva.argus.service.MonitorService; +import com.salesforce.dva.argus.service.MonitorService.Counter; +import com.salesforce.dva.argus.service.SchemaService; +import com.salesforce.dva.argus.service.schema.ElasticSearchSchemaService.PutResponse.Item; +import com.salesforce.dva.argus.service.schema.MetricSchemaRecordList.HashAlgorithm; +import com.salesforce.dva.argus.service.tsdb.MetricQuery; +import com.salesforce.dva.argus.system.SystemAssert; +import com.salesforce.dva.argus.system.SystemConfiguration; +import com.salesforce.dva.argus.system.SystemException; import org.apache.http.HttpEntity; import org.apache.http.HttpHost; import org.apache.http.HttpStatus; -import org.apache.http.client.config.RequestConfig.Builder; +import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; -import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; import org.apache.http.impl.nio.conn.PoolingNHttpClientConnectionManager; import org.apache.http.impl.nio.reactor.DefaultConnectingIOReactor; -import org.apache.http.util.EntityUtils; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; -import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.client.RestClientBuilder.HttpClientConfigCallback; import org.elasticsearch.client.RestClientBuilder.RequestConfigCallback; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.slf4j.MDC; -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import com.fasterxml.jackson.annotation.JsonInclude.Include; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.module.SimpleModule; -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.google.inject.Inject; -import com.google.inject.Singleton; -import com.salesforce.dva.argus.service.MonitorService; -import com.salesforce.dva.argus.service.MonitorService.Counter; -import com.salesforce.dva.argus.service.SchemaService; -import com.salesforce.dva.argus.service.schema.ElasticSearchSchemaService.PutResponse.Item; -import com.salesforce.dva.argus.service.schema.MetricSchemaRecordList.HashAlgorithm; -import com.salesforce.dva.argus.system.SystemAssert; -import com.salesforce.dva.argus.system.SystemConfiguration; -import com.salesforce.dva.argus.system.SystemException; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.net.MalformedURLException; +import java.net.URL; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Properties; +import java.util.Set; +import java.util.SortedSet; + +import static com.salesforce.dva.argus.entity.MetricSchemaRecord.RETENTION_DISCOVERY; +import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; /** * Implementation of the schema service using ElasticSearch. @@ -77,79 +74,42 @@ @Singleton public class ElasticSearchSchemaService extends AbstractSchemaService { - private static String SCOPE_INDEX_NAME; - private static String SCOPE_TYPE_NAME; - - private static String SCOPE_AND_METRIC_INDEX_NAME; - private static String SCOPE_AND_METRIC_TYPE_NAME; - - private static String METATAGS_INDEX_NAME; - private static String METATAGS_TYPE_NAME; + private static Logger _logger = LoggerFactory.getLogger(ElasticSearchSchemaService.class); - private static final String INDEX_NAME = "metadata_index"; - private static final String TYPE_NAME = "metadata_type"; + /** Global ES properties */ private static final String KEEP_SCROLL_CONTEXT_OPEN_FOR = "1m"; - private static final int INDEX_MAX_RESULT_WINDOW = 10000; + static final String SCROLL_ENDPOINT = "/_search/scroll"; private static final int MAX_RETRY_TIMEOUT = 300 * 1000; private static final String FIELD_TYPE_TEXT = "text"; private static final String FIELD_TYPE_DATE ="date"; private static final String FIELD_TYPE_INTEGER = "integer"; - - private static final long ONE_DAY_IN_MILLIS = 24L * 3600L * 1000L; - - private final ObjectMapper _mapper; - private final ObjectMapper _createScopeOnlyMapper; - private final ObjectMapper _updateScopeOnlyMapper; - private final ObjectMapper _createScopeAndMetricOnlyMapper; - private final ObjectMapper _updateScopeAndMetricOnlyMapper; - private final ObjectMapper _createMetatagsMapper; - private final ObjectMapper _updateMetatagsMapper; - - private static Logger _logger = LoggerFactory.getLogger(ElasticSearchSchemaService.class); private RestClient _esRestClient; - private final int _replicationFactor; - private final int _numShards; - private final int _replicationFactorForScopeIndex; - private final int _numShardsForScopeIndex; - private final int _replicationFactorForScopeAndMetricIndex; - private final int _numShardsForScopeAndMetricIndex; - private final int _replicationFactorForMetatagsIndex; - private final int _numShardsForMetatagsIndex; private final int _bulkIndexingSize; private HashAlgorithm _idgenHashAlgo; - private boolean _useScopeMetricNamesIndex; - - @Inject - public ElasticSearchSchemaService(SystemConfiguration config, MonitorService monitorService) { - super(config, monitorService); - - _mapper = createObjectMapper(); - - _createScopeOnlyMapper = _getScopeOnlyObjectMapper(new ScopeOnlySchemaRecordList.CreateSerializer()); - _updateScopeOnlyMapper = _getScopeOnlyObjectMapper(new ScopeOnlySchemaRecordList.UpdateSerializer()); + /** Main index properties */ + private static String TAGS_INDEX_NAME; + private static String TAGS_TYPE_NAME; + private final ObjectMapper indexMetadataMapper; - _createScopeAndMetricOnlyMapper = _getScopeAndMetricOnlyObjectMapper(new ScopeAndMetricOnlySchemaRecordList.CreateSerializer()); - _updateScopeAndMetricOnlyMapper = _getScopeAndMetricOnlyObjectMapper(new ScopeAndMetricOnlySchemaRecordList.UpdateSerializer()); + /** Scope-only index properties */ + private static String SCOPE_INDEX_NAME; + private static String SCOPE_TYPE_NAME; + private final ObjectMapper indexScopeOnlyMapper; - _createMetatagsMapper = _getMetatagsObjectMapper(new MetatagsSchemaRecordList.CreateSerializer()); - _updateMetatagsMapper = _getMetatagsObjectMapper(new MetatagsSchemaRecordList.UpdateSerializer()); + /** Metatags index properties */ + private static String METATAGS_INDEX_NAME; + private static String METATAGS_TYPE_NAME; + private final ObjectMapper indexMetatagsMapper; - SCOPE_INDEX_NAME = config.getValue(Property.ELASTICSEARCH_SCOPE_INDEX_NAME.getName(), - Property.ELASTICSEARCH_SCOPE_INDEX_NAME.getDefaultValue()); - SCOPE_TYPE_NAME = config.getValue(Property.ELASTICSEARCH_SCOPE_TYPE_NAME.getName(), - Property.ELASTICSEARCH_SCOPE_TYPE_NAME.getDefaultValue()); + private static ObjectMapper genericObjectMapper = new ObjectMapper(); - SCOPE_AND_METRIC_INDEX_NAME = config.getValue(Property.ELASTICSEARCH_SCOPE_AND_METRIC_INDEX_NAME.getName(), - Property.ELASTICSEARCH_SCOPE_AND_METRIC_INDEX_NAME.getDefaultValue()); - SCOPE_AND_METRIC_TYPE_NAME = config.getValue(Property.ELASTICSEARCH_SCOPE_AND_METRIC_TYPE_NAME.getName(), - Property.ELASTICSEARCH_SCOPE_AND_METRIC_TYPE_NAME.getDefaultValue()); - METATAGS_INDEX_NAME = config.getValue(Property.ELASTICSEARCH_METATAGS_INDEX_NAME.getName(), - Property.ELASTICSEARCH_METATAGS_INDEX_NAME.getDefaultValue()); - METATAGS_TYPE_NAME = config.getValue(Property.ELASTICSEARCH_METATAGS_TYPE_NAME.getName(), - Property.ELASTICSEARCH_METATAGS_TYPE_NAME.getDefaultValue()); + @Inject + public ElasticSearchSchemaService(SystemConfiguration config, MonitorService monitorService, ElasticSearchUtils esUtils) { + super(config, monitorService); + /* Setup Global ES stuff */ String algorithm = config.getValue(Property.ELASTICSEARCH_IDGEN_HASH_ALGO.getName(), Property.ELASTICSEARCH_IDGEN_HASH_ALGO.getDefaultValue()); try { _idgenHashAlgo = HashAlgorithm.fromString(algorithm); @@ -157,44 +117,12 @@ public ElasticSearchSchemaService(SystemConfiguration config, MonitorService mon _logger.warn("{} is not supported by this service. Valid values are: {}.", algorithm, Arrays.asList(HashAlgorithm.values())); _idgenHashAlgo = HashAlgorithm.MD5; } - _logger.info("Using {} for Elasticsearch document id generation.", _idgenHashAlgo); - - _replicationFactor = Integer.parseInt( - config.getValue(Property.ELASTICSEARCH_NUM_REPLICAS.getName(), Property.ELASTICSEARCH_NUM_REPLICAS.getDefaultValue())); - - _numShards = Integer.parseInt( - config.getValue(Property.ELASTICSEARCH_SHARDS_COUNT.getName(), Property.ELASTICSEARCH_SHARDS_COUNT.getDefaultValue())); - - _replicationFactorForScopeIndex = Integer.parseInt( - config.getValue(Property.ELASTICSEARCH_NUM_REPLICAS_FOR_SCOPE_INDEX.getName(), Property.ELASTICSEARCH_NUM_REPLICAS_FOR_SCOPE_INDEX.getDefaultValue())); - - _numShardsForScopeIndex = Integer.parseInt( - config.getValue(Property.ELASTICSEARCH_SHARDS_COUNT_FOR_SCOPE_INDEX.getName(), Property.ELASTICSEARCH_SHARDS_COUNT_FOR_SCOPE_INDEX.getDefaultValue())); - - _replicationFactorForScopeAndMetricIndex = Integer.parseInt( - config.getValue(Property.ELASTICSEARCH_NUM_REPLICAS_FOR_SCOPE_AND_METRIC_INDEX.getName(), Property.ELASTICSEARCH_NUM_REPLICAS_FOR_SCOPE_AND_METRIC_INDEX.getDefaultValue())); - - _numShardsForScopeAndMetricIndex = Integer.parseInt( - config.getValue(Property.ELASTICSEARCH_SHARDS_COUNT_FOR_SCOPE_AND_METRIC_INDEX.getName(), Property.ELASTICSEARCH_SHARDS_COUNT_FOR_SCOPE_AND_METRIC_INDEX.getDefaultValue())); - - _replicationFactorForMetatagsIndex = Integer.parseInt( - config.getValue(Property.ELASTICSEARCH_NUM_REPLICAS_FOR_METATAGS_INDEX.getName(), - Property.ELASTICSEARCH_NUM_REPLICAS_FOR_METATAGS_INDEX.getDefaultValue())); - - _numShardsForMetatagsIndex = Integer.parseInt( - config.getValue(Property.ELASTICSEARCH_SHARDS_COUNT_FOR_METATAGS_INDEX.getName(), - Property.ELASTICSEARCH_SHARDS_COUNT_FOR_METATAGS_INDEX.getDefaultValue())); - _bulkIndexingSize = Integer.parseInt( config.getValue(Property.ELASTICSEARCH_INDEXING_BATCH_SIZE.getName(), Property.ELASTICSEARCH_INDEXING_BATCH_SIZE.getDefaultValue())); - _useScopeMetricNamesIndex = Boolean.parseBoolean( - config.getValue(Property.ELASTICSEARCH_USE_SCOPE_AND_METRIC_INDEX.getName(), Property.ELASTICSEARCH_USE_SCOPE_AND_METRIC_INDEX.getDefaultValue())); - String[] nodes = config.getValue(Property.ELASTICSEARCH_ENDPOINT.getName(), Property.ELASTICSEARCH_ENDPOINT.getDefaultValue()).split(","); HttpHost[] httpHosts = new HttpHost[nodes.length]; - for(int i=0; i { + try { + int connCount = Integer.parseInt(config.getValue(Property.ELASTICSEARCH_CONNECTION_COUNT.getName(), + Property.ELASTICSEARCH_CONNECTION_COUNT.getDefaultValue())); + PoolingNHttpClientConnectionManager connMgr = + new PoolingNHttpClientConnectionManager(new DefaultConnectingIOReactor()); + connMgr.setMaxTotal(connCount); + connMgr.setDefaultMaxPerRoute(connCount / httpHosts.length); + httpClientBuilder.setConnectionManager(connMgr); + return httpClientBuilder; + } catch(Exception e) { + throw new SystemException(e); } }; + RequestConfigCallback requestConfigCallback = requestConfigBuilder -> { + int connTimeout = Integer.parseInt(config.getValue(Property.ELASTICSEARCH_ENDPOINT_CONNECTION_TIMEOUT.getName(), + Property.ELASTICSEARCH_ENDPOINT_CONNECTION_TIMEOUT.getDefaultValue())); + int socketTimeout = Integer.parseInt(config.getValue(Property.ELASTICSEARCH_ENDPOINT_SOCKET_TIMEOUT.getName(), + Property.ELASTICSEARCH_ENDPOINT_SOCKET_TIMEOUT.getDefaultValue())); + requestConfigBuilder.setConnectTimeout(connTimeout).setSocketTimeout(socketTimeout); - RequestConfigCallback requestConfigCallback = new RestClientBuilder.RequestConfigCallback() { + _logger.info("_esRestClient set connTimeoutMillis {} socketTimeoutMillis {}", + connTimeout, socketTimeout); - @Override - public Builder customizeRequestConfig(Builder requestConfigBuilder) { - int connTimeout = Integer.parseInt(config.getValue(Property.ELASTICSEARCH_ENDPOINT_CONNECTION_TIMEOUT.getName(), - Property.ELASTICSEARCH_ENDPOINT_CONNECTION_TIMEOUT.getDefaultValue())); - int socketTimeout = Integer.parseInt(config.getValue(Property.ELASTICSEARCH_ENDPOINT_SOCKET_TIMEOUT.getName(), - Property.ELASTICSEARCH_ENDPOINT_SOCKET_TIMEOUT.getDefaultValue())); - requestConfigBuilder.setConnectTimeout(connTimeout).setSocketTimeout(socketTimeout); - - _logger.info("_esRestClient set connTimeoutMillis {} socketTimeoutMillis {}", - connTimeout, socketTimeout); - - return requestConfigBuilder; - } + return requestConfigBuilder; }; - _esRestClient = RestClient.builder(httpHosts) .setHttpClientConfigCallback(clientConfigCallback) .setRequestConfigCallback(requestConfigCallback) .setMaxRetryTimeoutMillis(MAX_RETRY_TIMEOUT) .build(); - _logger.info("_esRestClient set MaxRetryTimeoutsMillis {}", MAX_RETRY_TIMEOUT); - _createIndexIfNotExists(INDEX_NAME, _replicationFactor, _numShards, () -> _createMappingsNode()); + /* Set up main index stuff */ + indexMetadataMapper = _getMetadataObjectMapper(new MetricSchemaRecordList.IndexSerializer()); + TAGS_INDEX_NAME = config.getValue(Property.ELASTICSEARCH_TAGS_INDEX_NAME.getName(), + Property.ELASTICSEARCH_TAGS_INDEX_NAME.getDefaultValue()); + TAGS_TYPE_NAME = config.getValue(Property.ELASTICSEARCH_TAGS_TYPE_NAME.getName(), + Property.ELASTICSEARCH_TAGS_TYPE_NAME.getDefaultValue()); + int replicationFactor = Integer.parseInt( + config.getValue(Property.ELASTICSEARCH_NUM_REPLICAS_FOR_TAGS_INDEX.getName(), Property.ELASTICSEARCH_NUM_REPLICAS_FOR_TAGS_INDEX.getDefaultValue())); + int numShards = Integer.parseInt( + config.getValue(Property.ELASTICSEARCH_SHARDS_COUNT_FOR_TAGS_INDEX.getName(), Property.ELASTICSEARCH_SHARDS_COUNT_FOR_TAGS_INDEX.getDefaultValue())); + + esUtils.createIndexIfNotExists(_esRestClient, + TAGS_INDEX_NAME, + replicationFactor, + numShards, + () -> _createMappingsNode()); + + + /** Set up scope-only index stuff */ + indexScopeOnlyMapper = _getScopeOnlyObjectMapper(new ScopeOnlySchemaRecordList.IndexSerializer()); + SCOPE_INDEX_NAME = config.getValue(Property.ELASTICSEARCH_SCOPE_INDEX_NAME.getName(), + Property.ELASTICSEARCH_SCOPE_INDEX_NAME.getDefaultValue()); + SCOPE_TYPE_NAME = config.getValue(Property.ELASTICSEARCH_SCOPE_TYPE_NAME.getName(), + Property.ELASTICSEARCH_SCOPE_TYPE_NAME.getDefaultValue()); + int replicationFactorForScopeIndex = Integer.parseInt( + config.getValue(Property.ELASTICSEARCH_NUM_REPLICAS_FOR_SCOPE_INDEX.getName(), Property.ELASTICSEARCH_NUM_REPLICAS_FOR_SCOPE_INDEX.getDefaultValue())); + int numShardsForScopeIndex = Integer.parseInt( + config.getValue(Property.ELASTICSEARCH_SHARDS_COUNT_FOR_SCOPE_INDEX.getName(), Property.ELASTICSEARCH_SHARDS_COUNT_FOR_SCOPE_INDEX.getDefaultValue())); + + esUtils.createIndexIfNotExists(_esRestClient, + SCOPE_INDEX_NAME, + replicationFactorForScopeIndex, + numShardsForScopeIndex, + () -> _createScopeMappingsNode()); - _createIndexIfNotExists(SCOPE_INDEX_NAME, _replicationFactorForScopeIndex, _numShardsForScopeIndex, - () -> _createScopeMappingsNode()); + /** Set up metatags index stuff */ + indexMetatagsMapper = _getMetatagsObjectMapper(new MetatagsSchemaRecordList.IndexSerializer()); + METATAGS_INDEX_NAME = config.getValue(Property.ELASTICSEARCH_METATAGS_INDEX_NAME.getName(), + Property.ELASTICSEARCH_METATAGS_INDEX_NAME.getDefaultValue()); + METATAGS_TYPE_NAME = config.getValue(Property.ELASTICSEARCH_METATAGS_TYPE_NAME.getName(), + Property.ELASTICSEARCH_METATAGS_TYPE_NAME.getDefaultValue()); + int replicationFactorForMetatagsIndex = Integer.parseInt( + config.getValue(Property.ELASTICSEARCH_NUM_REPLICAS_FOR_METATAGS_INDEX.getName(), + Property.ELASTICSEARCH_NUM_REPLICAS_FOR_METATAGS_INDEX.getDefaultValue())); + int numShardsForMetatagsIndex = Integer.parseInt( + config.getValue(Property.ELASTICSEARCH_SHARDS_COUNT_FOR_METATAGS_INDEX.getName(), + Property.ELASTICSEARCH_SHARDS_COUNT_FOR_METATAGS_INDEX.getDefaultValue())); - _createIndexIfNotExists(SCOPE_AND_METRIC_INDEX_NAME, _replicationFactorForScopeAndMetricIndex, - _numShardsForScopeAndMetricIndex, () -> _createScopeAndMetricMappingsNode()); + esUtils.createIndexIfNotExists(_esRestClient, + METATAGS_INDEX_NAME, + replicationFactorForMetatagsIndex, + numShardsForMetatagsIndex, + () -> _createMetatagsMappingsNode()); - _createIndexIfNotExists(METATAGS_INDEX_NAME, _replicationFactorForMetatagsIndex, - _numShardsForMetatagsIndex, () -> _createMetatagsMappingsNode()); } - @Override public void dispose() { super.dispose(); @@ -284,103 +243,74 @@ public Properties getServiceProperties() { } @Override - protected void implementationSpecificPut(List metrics, - Set scopeNames, - Set> scopesAndMetricNames, - Map metatagsToPut) { - SystemAssert.requireArgument(metrics != null, "Metrics list cannot be null."); + protected void implementationSpecificPut(Set metricsToIndex, + Set scopesToIndex, + Set metatagsToIndex) { + SystemAssert.requireArgument(metricsToIndex != null, "Metrics list cannot be null."); - // Put metric tags + // Push to metadata index + int successCount = 0; long start = System.currentTimeMillis(); - List> fracturedList = _fracture(metrics); - - int count = 0; - for(List records : fracturedList) { - if(!records.isEmpty()) { - upsert(records); - count += records.size(); - } - } - - _monitorService.modifyCounter(MonitorService.Counter.SCHEMARECORDS_WRITTEN, count, null); - _monitorService.modifyCounter(MonitorService.Counter.SCHEMARECORDS_WRITE_LATENCY, - (System.currentTimeMillis() - start), - null); - - _logger.info("{} new metrics were indexed in {} ms.", count, (System.currentTimeMillis() - start)); - - // Put scopes - start = System.currentTimeMillis(); - List> fracturedScopesList = _fractureScopes(scopeNames); - - count = 0; - for(List records : fracturedScopesList) { - if(!records.isEmpty()) { - upsertScopes(records); - count += records.size(); - } + long timeTaken; + List> fracturedSchemas = _fracture(metricsToIndex); + for(Set records : fracturedSchemas) { + if(!records.isEmpty()) { + Set failedRecords = doBulkIndex(TAGS_INDEX_NAME, TAGS_TYPE_NAME, new MetricSchemaRecordList(records, _idgenHashAlgo), indexMetadataMapper); + records.removeAll(failedRecords); + _addToModifiedBloom(records); + successCount += records.size(); + } } - _monitorService.modifyCounter(MonitorService.Counter.SCOPENAMES_WRITTEN, count, null); - _monitorService.modifyCounter(MonitorService.Counter.SCOPENAMES_WRITE_LATENCY, - (System.currentTimeMillis() - start), - null); - - _logger.info("{} new scopes were indexed in {} ms.", count, (System.currentTimeMillis() - start)); + timeTaken = System.currentTimeMillis() - start; + _monitorService.modifyCounter(MonitorService.Counter.SCHEMARECORDS_WRITTEN, successCount, null); + _monitorService.modifyCounter(MonitorService.Counter.SCHEMARECORDS_WRITE_LATENCY, timeTaken, null); + _logger.info("{} schema records sent to ES and bloomFilter in {} ms.", successCount, timeTaken); - // Put scopes+metrics + // Push to scope-only index + successCount = 0; start = System.currentTimeMillis(); - List> fracturedScopesAndMetricsList = - _fractureScopeAndMetrics(scopesAndMetricNames); - - count = 0; - for(List records : fracturedScopesAndMetricsList) { - if(!records.isEmpty()) { - upsertScopeAndMetrics(records); - count += records.size(); - } + List> fracturedScopes = _fractureScopes(scopesToIndex); + for(Set records : fracturedScopes) { + if(!records.isEmpty()) { + Set failedRecords = doBulkIndex(SCOPE_INDEX_NAME, SCOPE_TYPE_NAME, new ScopeOnlySchemaRecordList(records, _idgenHashAlgo), indexScopeOnlyMapper); + records.removeAll(failedRecords); + _addToModifiedBloom(records); + successCount += records.size(); + } } - _monitorService.modifyCounter(MonitorService.Counter.SCOPEANDMETRICNAMES_WRITTEN, count, null); - _monitorService.modifyCounter(Counter.SCOPEANDMETRICNAMES_WRITE_LATENCY, - (System.currentTimeMillis() - start), - null); - - _logger.info("{} new scope and metric names were indexed in {} ms.", - count, - (System.currentTimeMillis() - start)); + timeTaken = System.currentTimeMillis() - start; + _monitorService.modifyCounter(MonitorService.Counter.SCOPENAMES_WRITTEN, successCount, null); + _monitorService.modifyCounter(MonitorService.Counter.SCOPENAMES_WRITE_LATENCY, timeTaken, null); + _logger.info("{} scopes sent to ES and bloomFilter in {} ms.", successCount, timeTaken); - // Put Metric MetatagsRecord + // Push to metatags index + successCount = 0; start = System.currentTimeMillis(); - List> fracturedMetatagsList = - _fractureMetatags(metatagsToPut); - - count = 0; - for(List records : fracturedMetatagsList) { - if(!records.isEmpty()) { - upsertMetatags(records); - count += records.size(); - } + List> fracturedMetatags = _fractureMetatags(metatagsToIndex); + for(Set records : fracturedMetatags) { + if(!records.isEmpty()) { + Set failedRecords = doBulkIndex(METATAGS_INDEX_NAME, METATAGS_TYPE_NAME, new MetatagsSchemaRecordList(records, _idgenHashAlgo), indexMetatagsMapper); + records.removeAll(failedRecords); + _addToModifiedBloom(records); + successCount += records.size(); + } } - _monitorService.modifyCounter(MonitorService.Counter.METATAGS_WRITTEN, count, null); - _monitorService.modifyCounter(Counter.METATAGS_WRITE_LATENCY, - (System.currentTimeMillis() - start), - null); - - _logger.info("{} new metatags were indexed in {} ms.", - count, - (System.currentTimeMillis() - start)); - + timeTaken = System.currentTimeMillis() - start; + _monitorService.modifyCounter(MonitorService.Counter.METATAGS_WRITTEN, successCount, null); + _monitorService.modifyCounter(Counter.METATAGS_WRITE_LATENCY, timeTaken, null); + _logger.info("{} metatags sent to ES and bloomFilter in {} ms.", successCount, timeTaken); } /* Convert the given list of metrics to a list of metric schema records. At the same time, fracture the records list * if its size is greater than ELASTICSEARCH_INDEXING_BATCH_SIZE. */ - protected List> _fracture(List metrics) { - List> fracturedList = new ArrayList<>(); + protected List> _fracture(Set metrics) { + List> fracturedList = new ArrayList<>(); - List records = new ArrayList<>(_bulkIndexingSize); + Set records = new HashSet<>(_bulkIndexingSize); for(Metric metric : metrics) { if(metric.getTags().isEmpty()) { MetricSchemaRecord msr = new MetricSchemaRecord(metric.getScope(), metric.getMetric()); @@ -388,7 +318,7 @@ protected List> _fracture(List metrics) { records.add(msr); if(records.size() == _bulkIndexingSize) { fracturedList.add(records); - records = new ArrayList<>(_bulkIndexingSize); + records = new HashSet<>(_bulkIndexingSize); } continue; } @@ -412,7 +342,7 @@ protected List> _fracture(List metrics) { retentionInt)); if(records.size() == _bulkIndexingSize) { fracturedList.add(records); - records = new ArrayList<>(_bulkIndexingSize); + records = new HashSet<>(_bulkIndexingSize); } } } @@ -424,42 +354,19 @@ protected List> _fracture(List metrics) { return fracturedList; } - /* Convert the given list of scope and metric names to a list of scope and metric only schema records. - * At the same time, fracture the records list if its size is greater than ELASTICSEARCH_INDEXING_BATCH_SIZE. - */ - protected List> _fractureScopeAndMetrics(Set> scopesAndMetricNames) { - List> fracturedList = new ArrayList<>(); - - List records = new ArrayList<>(_bulkIndexingSize); - for(Pair scopeAndMetric : scopesAndMetricNames) { - records.add(new ScopeAndMetricOnlySchemaRecord(scopeAndMetric.getLeft(), scopeAndMetric.getRight())); - - if(records.size() == _bulkIndexingSize) { - fracturedList.add(records); - records = new ArrayList<>(_bulkIndexingSize); - } - } - - if(!records.isEmpty()) { - fracturedList.add(records); - } - - return fracturedList; - } - /* Convert the given list of scopes to a list of scope only schema records. At the same time, fracture the records list * if its size is greater than ELASTICSEARCH_INDEXING_BATCH_SIZE. */ - protected List> _fractureScopes(Set scopeNames) { - List> fracturedList = new ArrayList<>(); + protected List> _fractureScopes(Set scopeNames) { + List> fracturedList = new ArrayList<>(); - List records = new ArrayList<>(_bulkIndexingSize); + Set records = new HashSet<>(_bulkIndexingSize); for(String scope : scopeNames) { records.add(new ScopeOnlySchemaRecord(scope)); if(records.size() == _bulkIndexingSize) { fracturedList.add(records); - records = new ArrayList<>(_bulkIndexingSize); + records = new HashSet<>(_bulkIndexingSize); } } @@ -470,19 +377,19 @@ protected List> _fractureScopes(Set scopeNam return fracturedList; } - protected List> _fractureMetatags(Map metatagsToPut) { - List> fracturedList = new ArrayList<>(); - - List records = new ArrayList<>(_bulkIndexingSize); - for(Map.Entry entry : metatagsToPut.entrySet()) { - //remove this special metatag to prevent it from going to ES - entry.getValue().removeMetatag(RETENTION_DISCOVERY); - MetatagsRecord mtag = new MetatagsRecord(entry.getValue().getMetatags(), entry.getValue().getKey()); - records.add(mtag); - if(records.size() == _bulkIndexingSize) { - fracturedList.add(records); - records = new ArrayList<>(_bulkIndexingSize); - } + protected List> _fractureMetatags(Set metatagsToPut) { + List> fracturedList = new ArrayList<>(); + + Set records = new HashSet<>(_bulkIndexingSize); + for(MetatagsRecord record: metatagsToPut) { + //remove this special metatag to prevent it from going to ES + record.removeMetatag(RETENTION_DISCOVERY); + MetatagsRecord mtag = new MetatagsRecord(record.getMetatags(), record.getKey()); + records.add(mtag); + if(records.size() == _bulkIndexingSize) { + fracturedList.add(records); + records = new HashSet<>(_bulkIndexingSize); + } } if(!records.isEmpty()) { @@ -494,88 +401,117 @@ protected List> _fractureMetatags(Map get(MetricSchemaRecordQuery query) { - requireNotDisposed(); - SystemAssert.requireArgument(query != null, "MetricSchemaRecordQuery cannot be null."); - long size = (long) query.getLimit() * query.getPage(); - SystemAssert.requireArgument(size > 0 && size <= Integer.MAX_VALUE, - "(limit * page) must be greater than 0 and atmost Integer.MAX_VALUE"); - - - Map tags = new HashMap<>(); - tags.put("type", "REGEXP_WITHOUT_AGGREGATION"); - long start = System.currentTimeMillis(); - boolean scroll = false; - StringBuilder sb = new StringBuilder().append("/") - .append(INDEX_NAME) - .append("/") - .append(TYPE_NAME) - .append("/") - .append("_search"); - - int from = 0, scrollSize; - if(query.getLimit() * query.getPage() > 10000) { - sb.append("?scroll=").append(KEEP_SCROLL_CONTEXT_OPEN_FOR); - scroll = true; - int total = query.getLimit() * query.getPage(); - scrollSize = (int) (total / (total / 10000 + 1)); - } else { - from = query.getLimit() * (query.getPage() - 1); - scrollSize = query.getLimit(); + requireNotDisposed(); + SystemAssert.requireArgument(query != null, "MetricSchemaRecordQuery cannot be null."); + SystemAssert.requireArgument(query.getLimit() >= 0, "Limit must be >= 0"); + SystemAssert.requireArgument(query.getPage() >= 1, "Page must be >= 1"); + try { + Math.multiplyExact(query.getLimit(), query.getPage()); + } catch (ArithmeticException ex) { + SystemAssert.requireArgument(true, "(limit * page) cannot result in int overflow"); } - String requestUrl = sb.toString(); - String queryJson = _constructTermQuery(query, from, scrollSize); + Map tags = new HashMap<>(); + tags.put("type", "REGEXP_WITHOUT_AGGREGATION"); + tags.put("user", Optional.ofNullable(MDC.get("USER")).orElse("NULLUSER")); + long start = System.currentTimeMillis(); + StringBuilder sb = new StringBuilder().append(String.format("/%s/%s/_search", TAGS_INDEX_NAME, TAGS_TYPE_NAME)); + List finalResult; try { - _logger.debug("get POST requestUrl {} queryJson {}", requestUrl, queryJson); - Response response = _esRestClient.performRequest(HttpMethod.POST.getName(), requestUrl, Collections.emptyMap(), new StringEntity(queryJson)); - - MetricSchemaRecordList list = toEntity(extractResponse(response), new TypeReference() {}); - - if(scroll) { - requestUrl = new StringBuilder().append("/").append("_search").append("/").append("scroll").toString(); + /* + If the limit is 0, this is an unbounded query from MetricQueryProcessor + It is unknown whether the matched doc count will be <= ES window limit or greater at this point + First, send a non-scroll request to get the doc count + If the total doc count is > ES window limit, re-send the request with ?scroll and start scrolling + [ Need to re-ask for the entire first 10k since ordering / eliminating seen-documents is not guaranteed without scroll ] + Else return + */ + if (query.getLimit() == 0) { + MetricSchemaRecordList list = _getRecords(sb.toString(), _constructTermQuery(query, 0, ElasticSearchUtils.INDEX_MAX_RESULT_WINDOW)); + if (list.getTotalHits() > ElasticSearchUtils.INDEX_MAX_RESULT_WINDOW) { + sb.append("?scroll=").append(KEEP_SCROLL_CONTEXT_OPEN_FOR); + list = _getRecords(sb.toString(), _constructTermQuery(query, 0, ElasticSearchUtils.INDEX_MAX_RESULT_WINDOW)); + List records = new LinkedList<>(list.getRecords()); + _appendScrollRecordsUntilCountOrEnd(records, list.getScrollID(), query.getLimit() * query.getPage(), ElasticSearchUtils.INDEX_MAX_RESULT_WINDOW); + finalResult = records; + } else { + finalResult = list.getRecords(); + } + _monitorService.modifyCounter(Counter.SCHEMARECORDS_DOCS_PULLED, finalResult.size(), tags); + } + // If the user manually asks for a much later page and/or a high limit past the ES window limit, a scroll is mandatory + else if (query.getLimit() * query.getPage() > ElasticSearchUtils.INDEX_MAX_RESULT_WINDOW) { + sb.append("?scroll=").append(KEEP_SCROLL_CONTEXT_OPEN_FOR); + MetricSchemaRecordList list = _getRecords(sb.toString(), _constructTermQuery(query, 0, ElasticSearchUtils.INDEX_MAX_RESULT_WINDOW)); List records = new LinkedList<>(list.getRecords()); + _appendScrollRecordsUntilCountOrEnd(records, list.getScrollID(), query.getLimit() * query.getPage(), ElasticSearchUtils.INDEX_MAX_RESULT_WINDOW); - while(true) { - String scrollID = list.getScrollID(); - - Map requestBody = new HashMap<>(); - requestBody.put("scroll_id", scrollID); - requestBody.put("scroll", KEEP_SCROLL_CONTEXT_OPEN_FOR); - - String requestJson = new ObjectMapper().writeValueAsString(requestBody); - _logger.debug("get Scroll POST requestUrl {} queryJson {}", requestUrl, queryJson); - response = _esRestClient.performRequest(HttpMethod.POST.getName(), requestUrl, Collections.emptyMap(), new StringEntity(requestJson)); + int fromIndex = query.getLimit() * (query.getPage() - 1); + if (records.size() <= fromIndex) { + finalResult = Collections.emptyList(); + } else { + finalResult = records.subList(fromIndex, records.size()); + } + _monitorService.modifyCounter(Counter.SCHEMARECORDS_DOCS_PULLED, records.size(), tags); + } + // Otherwise no need to scroll + else { + int from = query.getLimit() * (query.getPage() - 1); + MetricSchemaRecordList list = _getRecords(sb.toString(), _constructTermQuery(query, from, ElasticSearchUtils.INDEX_MAX_RESULT_WINDOW)); + finalResult = list.getRecords(); + _monitorService.modifyCounter(Counter.SCHEMARECORDS_DOCS_PULLED, finalResult.size(), tags); + } + _monitorService.modifyCounter(Counter.SCHEMARECORDS_QUERY_COUNT, 1, tags); + _monitorService.modifyCounter(Counter.SCHEMARECORDS_QUERY_LATENCY, (System.currentTimeMillis() - start), tags); + } catch (UnsupportedEncodingException | JsonProcessingException e) { + throw new SystemException("Search failed: " + e); + } catch (IOException e) { + throw new SystemException("IOException when trying to perform ES request" + e); + } + return finalResult; + } - list = toEntity(extractResponse(response), new TypeReference() {}); - records.addAll(list.getRecords()); + MetricSchemaRecordList _getRecords(String requestUrl, String queryJson) throws IOException { + _logger.debug("get POST requestUrl {} queryJson {}", requestUrl, queryJson); + Request request = new Request(HttpMethod.POST.getName(), requestUrl); + request.setEntity(new StringEntity(queryJson, ContentType.APPLICATION_JSON)); + Response response = _esRestClient.performRequest(request); - if(records.size() >= query.getLimit() * query.getPage() || list.getRecords().size() < scrollSize) { - break; - } - } + String esResponse = extractResponse(response); + logAndMonitorESFailureResponses(esResponse); + return toEntity(esResponse, new TypeReference() {}); + } - int fromIndex = query.getLimit() * (query.getPage() - 1); - if(records.size() <= fromIndex) { - _monitorService.modifyCounter(Counter.SCHEMARECORDS_QUERY_COUNT, 1, tags); - _monitorService.modifyCounter(Counter.SCHEMARECORDS_QUERY_LATENCY, (System.currentTimeMillis() - start), tags); - return Collections.emptyList(); - } + /** + * Appends documents to records argument by using ES Scroll API + * @param records List to mutate and add scrolled records to + * @param startingScrollId Starting scroll ID + * @param count User-provied total count of docs to add to records (0 if unbounded) + * @param scrollSize ES request "size" parameter + * @throws IOException + */ + void _appendScrollRecordsUntilCountOrEnd(List records, String startingScrollId, int count, int scrollSize) throws IOException { + Map requestBody = new HashMap<>(); + requestBody.put("scroll", KEEP_SCROLL_CONTEXT_OPEN_FOR); + String scrollId = startingScrollId; + + while (true) { + requestBody.put("scroll_id", scrollId); + String requestJson = genericObjectMapper.writeValueAsString(requestBody); + Request request = new Request(HttpMethod.POST.getName(), SCROLL_ENDPOINT); + request.setEntity(new StringEntity(requestJson, ContentType.APPLICATION_JSON)); + Response response = _esRestClient.performRequest(request); - _monitorService.modifyCounter(Counter.SCHEMARECORDS_QUERY_COUNT, 1, tags); - _monitorService.modifyCounter(Counter.SCHEMARECORDS_QUERY_LATENCY, (System.currentTimeMillis() - start), tags); - return records.subList(fromIndex, records.size()); + MetricSchemaRecordList list = toEntity(extractResponse(response), new TypeReference() {}); + records.addAll(list.getRecords()); + scrollId = list.getScrollID(); - } else { - _monitorService.modifyCounter(Counter.SCHEMARECORDS_QUERY_COUNT, 1, tags); - _monitorService.modifyCounter(Counter.SCHEMARECORDS_QUERY_LATENCY, (System.currentTimeMillis() - start), tags); - return list.getRecords(); + // If total records retrieved is greater than what the user manually asked for + // Or if we are on the last scroll page + if(count != 0 && records.size() >= count || list.getRecords().size() < scrollSize) { + break; } - - } catch (UnsupportedEncodingException | JsonProcessingException e) { - throw new SystemException("Search failed.", e); - } catch (IOException e) { - throw new SystemException("IOException when trying to perform ES request.", e); } } @@ -602,20 +538,14 @@ public List getUnique(MetricSchemaRecordQuery query, RecordT tags.put("type", "REGEXP_WITH_AGGREGATION"); long start = System.currentTimeMillis(); - String indexName = INDEX_NAME; - String typeName = TYPE_NAME; + String indexName = TAGS_INDEX_NAME; + String typeName = TAGS_TYPE_NAME; if (query.isQueryOnlyOnScope() && RecordType.SCOPE.equals(type)) { indexName = SCOPE_INDEX_NAME; typeName = SCOPE_TYPE_NAME; } - else if (_useScopeMetricNamesIndex && query.isQueryOnlyOnScopeAndMetric() && - (RecordType.SCOPE.equals(type) || RecordType.METRIC.equals(type))) - { - indexName = SCOPE_AND_METRIC_INDEX_NAME; - typeName = SCOPE_AND_METRIC_TYPE_NAME; - } String requestUrl = new StringBuilder().append("/") .append(indexName) @@ -629,7 +559,10 @@ else if (_useScopeMetricNamesIndex && query.isQueryOnlyOnScopeAndMetric() && String queryJson = _constructTermAggregationQuery(query, type); _logger.debug("getUnique POST requestUrl {} queryJson {}", requestUrl, queryJson); - Response response = _esRestClient.performRequest(HttpMethod.POST.getName(), requestUrl, Collections.emptyMap(), new StringEntity(queryJson)); + + Request request = new Request(HttpMethod.POST.getName(), requestUrl); + request.setEntity(new StringEntity(queryJson, ContentType.APPLICATION_JSON)); + Response response = _esRestClient.performRequest(request); String str = extractResponse(response); List records = SchemaService.constructMetricSchemaRecordsForType(toEntity(str, new TypeReference>() {}), type); @@ -637,10 +570,6 @@ else if (_useScopeMetricNamesIndex && query.isQueryOnlyOnScopeAndMetric() && _monitorService.modifyCounter(Counter.SCOPENAMES_QUERY_COUNT, 1, tags); _monitorService.modifyCounter(Counter.SCOPENAMES_QUERY_LATENCY, (System.currentTimeMillis() - start), tags); - } else if (_useScopeMetricNamesIndex && query.isQueryOnlyOnScopeAndMetric() && - (RecordType.SCOPE.equals(type) || RecordType.METRIC.equals(type))) { - _monitorService.modifyCounter(Counter.SCOPEANDMETRICNAMES_QUERY_COUNT, 1, tags); - _monitorService.modifyCounter(Counter.SCOPEANDMETRICNAMES_QUERY_LATENCY, (System.currentTimeMillis() - start), tags); } else { _monitorService.modifyCounter(Counter.SCHEMARECORDS_QUERY_COUNT, 1, tags); _monitorService.modifyCounter(Counter.SCHEMARECORDS_QUERY_LATENCY, (System.currentTimeMillis() - start), tags); @@ -677,9 +606,9 @@ public List keywordSearch(KeywordQuery kq) { tags.put("type", "FTS_WITH_AGGREGATION"); long start = System.currentTimeMillis(); StringBuilder sb = new StringBuilder().append("/") - .append(INDEX_NAME) + .append(TAGS_INDEX_NAME) .append("/") - .append(TYPE_NAME) + .append(TAGS_TYPE_NAME) .append("/") .append("_search"); try { @@ -687,12 +616,12 @@ public List keywordSearch(KeywordQuery kq) { if(kq.getQuery() != null) { int from = 0, scrollSize = 0; + boolean scroll = false;; - if(kq.getLimit() * kq.getPage() > 10000) { + if(kq.getLimit() * kq.getPage() > ElasticSearchUtils.INDEX_MAX_RESULT_WINDOW) { sb.append("?scroll=").append(KEEP_SCROLL_CONTEXT_OPEN_FOR); scroll = true; - int total = kq.getLimit() * kq.getPage(); - scrollSize = (int) (total / (total / 10000 + 1)); + scrollSize = ElasticSearchUtils.INDEX_MAX_RESULT_WINDOW; } else { from = kq.getLimit() * (kq.getPage() - 1); scrollSize = kq.getLimit(); @@ -702,7 +631,9 @@ public List keywordSearch(KeywordQuery kq) { String queryJson = _constructQueryStringQuery(tokens, from, scrollSize); String requestUrl = sb.toString(); - Response response = _esRestClient.performRequest(HttpMethod.POST.getName(), requestUrl, Collections.emptyMap(), new StringEntity(queryJson)); + Request request = new Request(HttpMethod.POST.getName(), requestUrl); + request.setEntity(new StringEntity(queryJson, ContentType.APPLICATION_JSON)); + Response response = _esRestClient.performRequest(request); String strResponse = extractResponse(response); MetricSchemaRecordList list = toEntity(strResponse, new TypeReference() {}); @@ -715,9 +646,9 @@ public List keywordSearch(KeywordQuery kq) { requestBody.put("scroll_id", list.getScrollID()); requestBody.put("scroll", KEEP_SCROLL_CONTEXT_OPEN_FOR); - response = _esRestClient.performRequest(HttpMethod.POST.getName(), requestUrl, Collections.emptyMap(), - new StringEntity(new ObjectMapper().writeValueAsString(requestBody))); - + request = new Request(HttpMethod.POST.getName(), requestUrl); + request.setEntity(new StringEntity(genericObjectMapper.writeValueAsString(requestBody), ContentType.APPLICATION_JSON)); + response = _esRestClient.performRequest(request); list = toEntity(extractResponse(response), new TypeReference() {}); records.addAll(list.getRecords()); @@ -775,7 +706,10 @@ public List keywordSearch(KeywordQuery kq) { String queryJson = _constructQueryStringQuery(kq, tokensMap); String requestUrl = sb.toString(); - Response response = _esRestClient.performRequest(HttpMethod.POST.getName(), requestUrl, Collections.emptyMap(), new StringEntity(queryJson)); + + Request request = new Request(HttpMethod.POST.getName(), requestUrl); + request.setEntity(new StringEntity(queryJson, ContentType.APPLICATION_JSON)); + Response response = _esRestClient.performRequest(request); String strResponse = extractResponse(response); List records = SchemaService.constructMetricSchemaRecordsForType( @@ -813,14 +747,16 @@ private List _analyzedTokens(String query) { List tokens = new ArrayList<>(); - String requestUrl = new StringBuilder("/").append(INDEX_NAME).append("/_analyze").toString(); + String requestUrl = new StringBuilder("/").append(TAGS_INDEX_NAME).append("/_analyze").toString(); String requestBody = "{\"analyzer\" : \"metadata_analyzer\", \"text\": \"" + query + "\" }"; try { - Response response = _esRestClient.performRequest(HttpMethod.POST.getName(), requestUrl, Collections.emptyMap(), new StringEntity(requestBody)); + Request request = new Request(HttpMethod.POST.getName(), requestUrl); + request.setEntity(new StringEntity(requestBody, ContentType.APPLICATION_JSON)); + Response response = _esRestClient.performRequest(request); String strResponse = extractResponse(response); - JsonNode tokensNode = _mapper.readTree(strResponse).get("tokens"); + JsonNode tokensNode = indexMetadataMapper.readTree(strResponse).get("tokens"); if(tokensNode.isArray()) { for(JsonNode tokenNode : tokensNode) { tokens.add(tokenNode.get("token").asText()); @@ -833,22 +769,24 @@ private List _analyzedTokens(String query) { } } - protected void upsert(List records) { - String requestUrl = new StringBuilder().append("/") - .append(INDEX_NAME) - .append("/") - .append(TYPE_NAME) - .append("/") - .append("_bulk") - .toString(); - - String strResponse = ""; + /** + * The generic bulk index method for any one of our existing indices + * @param indexName + * @param typeName + * @param recordFinder + * @param mapper + * @param + * @return + */ + Set doBulkIndex(String indexName, String typeName, RecordFinder recordFinder, ObjectMapper mapper) { + String requestUrl = String.format("/%s/%s/_bulk", indexName, typeName); + String strResponse; - MetricSchemaRecordList msrList = new MetricSchemaRecordList(records, _idgenHashAlgo); try { - String requestBody = _mapper.writeValueAsString(msrList); - Response response = _esRestClient.performRequest(HttpMethod.POST.getName(), requestUrl, - Collections.emptyMap(), new StringEntity(requestBody)); + String requestBody = mapper.writeValueAsString(recordFinder); + Request request = new Request(HttpMethod.POST.getName(), requestUrl); + request.setEntity(new StringEntity(requestBody, ContentType.APPLICATION_JSON)); + Response response = _esRestClient.performRequest(request); strResponse = extractResponse(response); } catch (IOException e) { //TODO: Retry with exponential back-off for handling EsRejectedExecutionException/RemoteTransportException/TimeoutException?? @@ -856,379 +794,58 @@ protected void upsert(List records) { } try { - PutResponse putResponse = new ObjectMapper().readValue(strResponse, PutResponse.class); + Set failedRecords = new HashSet<>(); + PutResponse putResponse = genericObjectMapper.readValue(strResponse, PutResponse.class); //TODO: If response contains HTTP 429 Too Many Requests (EsRejectedExecutionException), then retry with exponential back-off. if(putResponse.errors) { - List recordsToRemove = new ArrayList<>(); - List updateMtsFieldList = new ArrayList<>(); for(Item item : putResponse.items) { - if(item.create != null) { - if(item.create.status == HttpStatus.SC_CONFLICT) { - updateMtsFieldList.add(item.create._id); - }else if(item.create.status != HttpStatus.SC_CREATED) { - _logger.warn("Failed to index metric {}. Reason: {}", msrList.getRecord(item.create._id), - new ObjectMapper().writeValueAsString(item.create.error)); - recordsToRemove.add(msrList.getRecord(item.create._id)); - } - } - } - if(updateMtsFieldList.size()>0) { - _logger.debug("mts filed will be updated for docs with ids {}", updateMtsFieldList); - Response response = updateMtsField(updateMtsFieldList,INDEX_NAME,TYPE_NAME, msrList); - PutResponse updateResponse = new ObjectMapper().readValue(extractResponse(response), PutResponse.class); - for(Item item: updateResponse.items) { - if(item.update != null && item.update.status != HttpStatus.SC_OK) { - _logger.debug("Failed to update mts field for metric {}. Reason: {}",msrList.getRecord(item.update._id), - new ObjectMapper().writeValueAsString(item.update.error)); - recordsToRemove.add(msrList.getRecord(item.update._id)); - } - } + if (item.index !=null && item.index.status != HttpStatus.SC_CREATED) { + _logger.warn("Failed to add record {} to index {}. Reason: {}", recordFinder.getRecord(item.index._id), indexName, genericObjectMapper.writeValueAsString(item.index.error)); + failedRecords.add(recordFinder.getRecord(item.index._id)); + } } - if(recordsToRemove.size() != 0) { - _logger.warn("{} records were not written to ES", recordsToRemove.size()); - records.removeAll(recordsToRemove); + if (failedRecords.size() != 0) { + _logger.warn("{} records were not written to index {}", failedRecords.size(), indexName); } } - //add to bloom filter - _addToBloomFilter(records); - + return failedRecords; } catch(IOException e) { throw new SystemException("Failed to parse reponse of put metrics. The response was: " + strResponse, e); } } - protected void upsertScopeAndMetrics(List records) { - String requestUrl = new StringBuilder().append("/") - .append(SCOPE_AND_METRIC_INDEX_NAME) - .append("/") - .append(SCOPE_AND_METRIC_TYPE_NAME) - .append("/") - .append("_bulk") - .toString(); - - try { - - ScopeAndMetricOnlySchemaRecordList createSchemaRecordList = new ScopeAndMetricOnlySchemaRecordList(records, _idgenHashAlgo); - String requestBody = _createScopeAndMetricOnlyMapper.writeValueAsString(createSchemaRecordList); - PutResponse putResponse = _performRequest(requestUrl, requestBody); - - Pair, List> failedResponses = _parseFailedResponses(putResponse); - - List failedIds = failedResponses.getLeft(); - List updateRequiredIds = failedResponses.getRight(); - - if (updateRequiredIds.size() > 0) { - - List updateRequiredRecords = new ArrayList<>(); - - for (String id : updateRequiredIds) { - updateRequiredRecords.add(createSchemaRecordList.getRecord(id)); - } - - ScopeAndMetricOnlySchemaRecordList updateSchemaRecordList = new ScopeAndMetricOnlySchemaRecordList(updateRequiredRecords, _idgenHashAlgo); - requestBody = _updateScopeAndMetricOnlyMapper.writeValueAsString(updateSchemaRecordList); - putResponse = _performRequest(requestUrl, requestBody); - - failedResponses = _parseFailedResponses(putResponse); - - // We collect new failures. - failedIds.addAll(failedResponses.getLeft()); - - // We do not collect update failures if they fail with 409 (version_conflict_engine_exception). - // This usually happens when there is another concurrent update happening to mts field" - } - - if (failedIds.size() > 0) { - _logger.warn("{} records were not written to scope and metric ES", failedIds.size()); - } - - for (String id : failedIds) { - records.remove(createSchemaRecordList.getRecord(id)); - } - - //add to bloom filter - _addToBloomFilterScopeAndMetricOnly(records); - - } catch (IOException e) { - throw new SystemException("Failed to create/update scope and metric ES. ", e); - } - } - - protected void upsertScopes(List records) { - - String requestUrl = new StringBuilder().append("/") - .append(SCOPE_INDEX_NAME) - .append("/") - .append(SCOPE_TYPE_NAME) - .append("/") - .append("_bulk") - .toString(); - - try { - - ScopeOnlySchemaRecordList createSchemaRecordList = new ScopeOnlySchemaRecordList(records, _idgenHashAlgo); - String requestBody = _createScopeOnlyMapper.writeValueAsString(createSchemaRecordList); - PutResponse putResponse = _performRequest(requestUrl, requestBody); - - Pair, List> failedResponses = _parseFailedResponses(putResponse); - - List failedIds = failedResponses.getLeft(); - List updateRequiredIds = failedResponses.getRight(); - - if (updateRequiredIds.size() > 0) { - - List updateRequiredRecords = new ArrayList<>(); - - for (String id : updateRequiredIds) { - updateRequiredRecords.add(createSchemaRecordList.getRecord(id)); - } - - ScopeOnlySchemaRecordList updateSchemaRecordList = new ScopeOnlySchemaRecordList(updateRequiredRecords, _idgenHashAlgo); - requestBody = _updateScopeOnlyMapper.writeValueAsString(updateSchemaRecordList); - putResponse = _performRequest(requestUrl, requestBody); - - failedResponses = _parseFailedResponses(putResponse); - - // We collect new failures. - failedIds.addAll(failedResponses.getLeft()); - - // We do not collect update failures if they fail with 409 (version_conflict_engine_exception). - // This usually happens when there is another concurrent update happening to mts field" - } - - if (failedIds.size() > 0) { - _logger.warn("{} records were not written to scope ES", failedIds.size()); - } - - for(String id : failedIds) { - records.remove(createSchemaRecordList.getRecord(id)); - } - - //add to bloom filter - _addToBloomFilterScopeOnly(records); - - } catch (IOException e) { - throw new SystemException("Failed to create/update scope ES. ", e); - } - } - - protected void upsertMetatags(List records) { - String requestUrl = new StringBuilder().append("/") - .append(METATAGS_INDEX_NAME) - .append("/") - .append(METATAGS_TYPE_NAME) - .append("/") - .append("_bulk") - .toString(); - - try { - MetatagsSchemaRecordList createMetatagsSchemaRecordList = - new MetatagsSchemaRecordList(records, - _idgenHashAlgo); - String requestBody = _createMetatagsMapper.writeValueAsString(createMetatagsSchemaRecordList); - PutResponse putResponse = _performRequest(requestUrl, requestBody); - - Pair, List> failedResponses = _parseFailedResponses(putResponse); - - List failedIds = failedResponses.getLeft(); - List updateRequiredIds = failedResponses.getRight(); - - if (updateRequiredIds.size() > 0) { - List updateRequiredRecords = new ArrayList<>(); - for (String id : updateRequiredIds) { - updateRequiredRecords.add(createMetatagsSchemaRecordList.getRecord(id)); - } - MetatagsSchemaRecordList updateMetatagsSchemaRecordList = - new MetatagsSchemaRecordList(updateRequiredRecords, _idgenHashAlgo); - requestBody = _updateMetatagsMapper.writeValueAsString(updateMetatagsSchemaRecordList); - putResponse = _performRequest(requestUrl, requestBody); - - failedResponses = _parseFailedResponses(putResponse); - - // We collect new failures. - failedIds.addAll(failedResponses.getLeft()); - - // We do not collect update failures if they fail with 409 (version_conflict_engine_exception). - // This usually happens when there is another concurrent update happening to mts field" - } - - if (failedIds.size() > 0) { - _logger.warn("{} records were not written to metatags ES", failedIds.size()); - } - - for(String id : failedIds) { - records.remove(createMetatagsSchemaRecordList.getRecord(id)); - } - - //add to bloom filter - _addToBloomFilterMetatags(records); - - } catch (IOException e) { - throw new SystemException("Failed to create/update scope ES. ", e); - } - - } - - private PutResponse _performRequest(String requestUrl, String requestBody) throws IOException { - - String strResponse = ""; - - Response response = _esRestClient.performRequest(HttpMethod.POST.getName(), requestUrl, Collections.emptyMap(), new StringEntity(requestBody)); - - //TODO: Retry with exponential back-off for handling EsRejectedExecutionException/RemoteTransportException/TimeoutException?? - - strResponse = extractResponse(response); - - PutResponse putResponse = new ObjectMapper().readValue(strResponse, PutResponse.class); - return putResponse; - } - - private Pair, List> _parseFailedResponses(PutResponse putResponse) throws IOException { - - List failedIds = new ArrayList<>(); - List updateRequiredIds = new ArrayList<>(); - - //TODO: If response contains HTTP 429 Too Many Requests (EsRejectedExecutionException), then retry with exponential back-off. - if (putResponse.errors) { - for (Item item : putResponse.items) { - - if (item.create != null && item.create.status != HttpStatus.SC_CREATED) { - - if (item.create.status == HttpStatus.SC_CONFLICT) { - updateRequiredIds.add(item.create._id); - } else { - _logger.debug("Failed to create document. Reason: " + new ObjectMapper().writeValueAsString(item.create.error)); - failedIds.add(item.create._id); - } - } - - if (item.update != null && item.update.status != HttpStatus.SC_OK) { - - if (item.update.status == HttpStatus.SC_CONFLICT) { - updateRequiredIds.add(item.update._id); - } else { - _logger.warn("Failed to update document. Reason: " + new ObjectMapper().writeValueAsString(item.update.error)); - failedIds.add(item.update._id); - } - } - } - } - return Pair.of(failedIds, updateRequiredIds); - } - - protected Response updateMtsField(List docIds, String index, String type, MetricSchemaRecordList msrList) { - Response result= null; - if(docIds != null && docIds.size()>0) { - String requestUrl = new StringBuilder().append("/") - .append(index) - .append("/") - .append(type) - .append("/") - .append("_bulk") - .toString(); - try { - String requestBody = _getRequestBodyForMtsFieldUpdate(docIds, msrList, System.currentTimeMillis()); - result = _esRestClient.performRequest(HttpMethod.POST.getName(), requestUrl, Collections.emptyMap(), - new StringEntity(requestBody)); - } catch (IOException e) { - throw new SystemException(e); - } - } - return result; - } - - static String _getRequestBodyForMtsFieldUpdate(List docIds, MetricSchemaRecordList msrList, long currentTimeMillis) { - StringBuilder result = new StringBuilder(); - for(String docId:docIds) { - MetricSchemaRecord record = msrList.getRecord(docId); - if (record == null) { //this should never happen - _logger.warn("ES create response contains ID {} that was not in original request", docId); - continue; - } - - Integer retention = record.getRetentionDiscovery(); - Long expiration = currentTimeMillis + (retention==null? DEFAULT_RETENTION_DISCOVERY_DAYS:retention) * ONE_DAY_IN_MILLIS; - - result.append("{\"update\" : {\"_id\" : \"").append(docId).append("\" } }") - .append(System.lineSeparator()) - .append("{\"doc\" : {\"mts\": ").append(currentTimeMillis) - .append(",\"").append(EXPIRATION_TS).append("\":").append(expiration); - if (retention != null) { - result.append(",\"").append(RETENTION_DISCOVERY).append("\":").append(retention); - } - result.append("}}"); - result.append(System.lineSeparator()); - } - return result.toString(); - } - - protected void _addToBloomFilter(List records){ - _logger.info("Adding {} records into bloom filter.", records.size()); - for (MetricSchemaRecord record : records) { - String key = constructKey(record.getScope(), - record.getMetric(), - record.getTagKey(), - record.getTagValue(), - record.getNamespace(), - record.getRetentionDiscovery()==null?null:record.getRetentionDiscovery().toString()); - bloomFilter.put(key); - } - } - - protected void _addToBloomFilterScopeAndMetricOnly(List records) { - _logger.info("Adding {} records into scope and metric only bloom filter.", records.size()); - for (ScopeAndMetricOnlySchemaRecord record : records) { - String key = constructScopeAndMetricOnlyKey(record.getScope(), record.getMetric()); - bloomFilterScopeAndMetricOnly.put(key); - } - } - - protected void _addToBloomFilterScopeOnly(List records) { - _logger.info("Adding {} records into scope only bloom filter.", records.size()); - for (ScopeOnlySchemaRecord record : records) { - String key = constructScopeOnlyKey(record.getScope()); - bloomFilterScopeOnly.put(key); - } - } - - protected void _addToBloomFilterMetatags(List records) { - _logger.info("Adding {} records into metatags bloom filter.", records.size()); - for (MetatagsRecord record : records) { - String key = record.getKey(); - bloomFilterMetatags.put(key); + protected void _addToModifiedBloom(Set records) { + for (AbstractSchemaRecord record : records) { + bloomFilter.put(record.toBloomFilterKey()); } } private String _constructTermAggregationQuery(MetricSchemaRecordQuery query, RecordType type) { - ObjectMapper mapper = new ObjectMapper(); - ObjectNode queryNode = _constructQueryNode(query, mapper); + ObjectNode queryNode = _constructQueryNode(query, genericObjectMapper); long size = query.getLimit() * query.getPage(); SystemAssert.requireArgument(size > 0 && size <= Integer.MAX_VALUE, "(limit * page) must be greater than 0 and less than Integer.MAX_VALUE"); - ObjectNode aggsNode = _constructAggsNode(type, Math.max(size, 10000), mapper); + ObjectNode aggsNode = _constructAggsNode(type, Math.max(size, ElasticSearchUtils.INDEX_MAX_RESULT_WINDOW), genericObjectMapper); - ObjectNode rootNode = mapper.createObjectNode(); - rootNode.put("query", queryNode); + ObjectNode rootNode = genericObjectMapper.createObjectNode(); + rootNode.set("query", queryNode); rootNode.put("size", 0); - rootNode.put("aggs", aggsNode); + rootNode.set("aggs", aggsNode); return rootNode.toString(); } private String _constructTermQuery(MetricSchemaRecordQuery query, int from, int size) { - ObjectMapper mapper = new ObjectMapper(); - - ObjectNode queryNode = _constructQueryNode(query, mapper); + ObjectNode queryNode = _constructQueryNode(query, genericObjectMapper); - ObjectNode rootNode = _mapper.createObjectNode(); - rootNode.put("query", queryNode); + ObjectNode rootNode = indexMetadataMapper.createObjectNode(); + rootNode.set("query", queryNode); rootNode.put("from", from); rootNode.put("size", size); - return rootNode.toString(); } @@ -1238,35 +855,31 @@ private ObjectNode _constructSimpleQueryStringNode(List tokens, RecordTy return null; } - ObjectMapper mapper = new ObjectMapper(); - StringBuilder queryString = new StringBuilder(); for(String token : tokens) { queryString.append('+').append(token).append(' '); } queryString.replace(queryString.length() - 1, queryString.length(), "*"); - ObjectNode node = mapper.createObjectNode(); - ArrayNode fieldsNode = mapper.createArrayNode(); + ObjectNode node = genericObjectMapper.createObjectNode(); + ArrayNode fieldsNode = genericObjectMapper.createArrayNode(); for(RecordType type : types) { fieldsNode.add(type.getName()); } - node.put("fields", fieldsNode); + node.set("fields", fieldsNode); node.put("query", queryString.toString()); - ObjectNode simpleQueryStringNode = mapper.createObjectNode(); - simpleQueryStringNode.put("simple_query_string", node); + ObjectNode simpleQueryStringNode = genericObjectMapper.createObjectNode(); + simpleQueryStringNode.set("simple_query_string", node); return simpleQueryStringNode; } private String _constructQueryStringQuery(List tokens, int from, int size) { - ObjectMapper mapper = new ObjectMapper(); - ObjectNode simpleQueryStringNode = _constructSimpleQueryStringNode(tokens, RecordType.values()); - ObjectNode rootNode = mapper.createObjectNode(); - rootNode.put("query", simpleQueryStringNode); + ObjectNode rootNode = genericObjectMapper.createObjectNode(); + rootNode.set("query", simpleQueryStringNode); rootNode.put("from", from); rootNode.put("size", size); @@ -1274,28 +887,26 @@ private String _constructQueryStringQuery(List tokens, int from, int siz } private String _constructQueryStringQuery(KeywordQuery kq, Map> tokensMap) { - ObjectMapper mapper = new ObjectMapper(); - - ArrayNode filterNodes = mapper.createArrayNode(); + ArrayNode filterNodes = genericObjectMapper.createArrayNode(); for(Map.Entry> entry : tokensMap.entrySet()) { ObjectNode simpleQueryStringNode = _constructSimpleQueryStringNode(entry.getValue(), entry.getKey()); filterNodes.add(simpleQueryStringNode); } - ObjectNode boolNode = mapper.createObjectNode(); - boolNode.put("filter", filterNodes); + ObjectNode boolNode = genericObjectMapper.createObjectNode(); + boolNode.set("filter", filterNodes); - ObjectNode queryNode = mapper.createObjectNode(); - queryNode.put("bool", boolNode); + ObjectNode queryNode = genericObjectMapper.createObjectNode(); + queryNode.set("bool", boolNode); - ObjectNode rootNode = mapper.createObjectNode(); - rootNode.put("query", queryNode); + ObjectNode rootNode = genericObjectMapper.createObjectNode(); + rootNode.set("query", queryNode); rootNode.put("size", 0); long size = kq.getLimit() * kq.getPage(); SystemAssert.requireArgument(size > 0 && size <= Integer.MAX_VALUE, "(limit * page) must be greater than 0 and less than Integer.MAX_VALUE"); - rootNode.put("aggs", _constructAggsNode(kq.getType(), Math.max(size, 10000), mapper)); + rootNode.set("aggs", _constructAggsNode(kq.getType(), Math.max(size, ElasticSearchUtils.INDEX_MAX_RESULT_WINDOW), genericObjectMapper)); return rootNode.toString(); @@ -1307,7 +918,7 @@ private ObjectNode _constructQueryNode(MetricSchemaRecordQuery query, ObjectMapp ObjectNode node = mapper.createObjectNode(); ObjectNode regexpNode = mapper.createObjectNode(); regexpNode.put(RecordType.METRIC.getName() + ".raw", SchemaService.convertToRegex(query.getMetric())); - node.put("regexp", regexpNode); + node.set("regexp", regexpNode); filterNodes.add(node); } @@ -1315,7 +926,7 @@ private ObjectNode _constructQueryNode(MetricSchemaRecordQuery query, ObjectMapp ObjectNode node = mapper.createObjectNode(); ObjectNode regexpNode = mapper.createObjectNode(); regexpNode.put(RecordType.SCOPE.getName() + ".raw", SchemaService.convertToRegex(query.getScope())); - node.put("regexp", regexpNode); + node.set("regexp", regexpNode); filterNodes.add(node); } @@ -1323,31 +934,41 @@ private ObjectNode _constructQueryNode(MetricSchemaRecordQuery query, ObjectMapp ObjectNode node = mapper.createObjectNode(); ObjectNode regexpNode = mapper.createObjectNode(); regexpNode.put(RecordType.TAGK.getName() + ".raw", SchemaService.convertToRegex(query.getTagKey())); - node.put("regexp", regexpNode); + node.set("regexp", regexpNode); filterNodes.add(node); } + ArrayNode mustNotNodes = mapper.createArrayNode(); if(SchemaService.containsFilter(query.getTagValue())) { + String trueTagValue = query.getTagValue(); + ArrayNode parentNode = filterNodes; + if (query.getTagValue().charAt(0) == MetricQuery.TAG_NOT_EQUALS_INTERNAL_PREFIX.charAt(0)) { + trueTagValue = trueTagValue.substring(1); + parentNode = mustNotNodes; + } ObjectNode node = mapper.createObjectNode(); ObjectNode regexpNode = mapper.createObjectNode(); - regexpNode.put(RecordType.TAGV.getName() + ".raw", SchemaService.convertToRegex(query.getTagValue())); - node.put("regexp", regexpNode); - filterNodes.add(node); + regexpNode.put(RecordType.TAGV.getName() + ".raw", SchemaService.convertToRegex(trueTagValue)); + node.set("regexp", regexpNode); + parentNode.add(node); } if(SchemaService.containsFilter(query.getNamespace())) { ObjectNode node = mapper.createObjectNode(); ObjectNode regexpNode = mapper.createObjectNode(); regexpNode.put(RecordType.NAMESPACE.getName() + ".raw", SchemaService.convertToRegex(query.getNamespace())); - node.put("regexp", regexpNode); + node.set("regexp", regexpNode); filterNodes.add(node); } ObjectNode boolNode = mapper.createObjectNode(); - boolNode.put("filter", filterNodes); + boolNode.set("filter", filterNodes); + if (mustNotNodes.size() > 0) { + boolNode.set("must_not", mustNotNodes); + } ObjectNode queryNode = mapper.createObjectNode(); - queryNode.put("bool", boolNode); + queryNode.set("bool", boolNode); return queryNode; } @@ -1355,15 +976,15 @@ private ObjectNode _constructAggsNode(RecordType type, long limit, ObjectMapper ObjectNode termsNode = mapper.createObjectNode(); termsNode.put("field", type.getName() + ".raw"); - termsNode.put("order", mapper.createObjectNode().put("_term", "asc")); + termsNode.set("order", mapper.createObjectNode().put("_term", "asc")); termsNode.put("size", limit); termsNode.put("execution_hint", "map"); ObjectNode distinctValuesNode = mapper.createObjectNode(); - distinctValuesNode.put("terms", termsNode); + distinctValuesNode.set("terms", termsNode); ObjectNode aggsNode = mapper.createObjectNode(); - aggsNode.put("distinct_values", distinctValuesNode); + aggsNode.set("distinct_values", distinctValuesNode); return aggsNode; } @@ -1371,7 +992,7 @@ private ObjectNode _constructAggsNode(RecordType type, long limit, ObjectMapper /* Helper method to convert JSON String representation to the corresponding Java entity. */ private T toEntity(String content, TypeReference type) { try { - return _mapper.readValue(content, type); + return indexMetadataMapper.readValue(content, type); } catch (IOException ex) { throw new SystemException(ex); } @@ -1383,12 +1004,6 @@ protected void setRestClient(RestClient restClient) this._esRestClient = restClient; } - /* Method to enable ScopeMetricNames Index. Used for testing. */ - protected void enableScopeMetricNamesIndex() - { - this._useScopeMetricNamesIndex = true; - } - /** Helper to process the response.

* Throws IllegalArgumentException when the http status code is in the 400 range
* Throws SystemException when the http status code is outsdie of the 200 and 400 range @@ -1397,7 +1012,6 @@ protected void enableScopeMetricNamesIndex() */ protected String extractResponse(Response response) { requireArgument(response != null, "HttpResponse object cannot be null."); - return doExtractResponse(response.getStatusLine().getStatusCode(), response.getEntity()); } @@ -1434,12 +1048,12 @@ static String doExtractResponse(int statusCode, HttpEntity entity) { } @VisibleForTesting - static ObjectMapper createObjectMapper() { + static ObjectMapper _getMetadataObjectMapper(JsonSerializer serializer) { ObjectMapper mapper = new ObjectMapper(); mapper.setSerializationInclusion(Include.NON_NULL); SimpleModule module = new SimpleModule(); - module.addSerializer(MetricSchemaRecordList.class, new MetricSchemaRecordList.Serializer()); + module.addSerializer(MetricSchemaRecordList.class, serializer); module.addDeserializer(MetricSchemaRecordList.class, new MetricSchemaRecordList.Deserializer()); module.addDeserializer(List.class, new SchemaRecordList.AggDeserializer()); mapper.registerModule(module); @@ -1447,19 +1061,6 @@ static ObjectMapper createObjectMapper() { return mapper; } - private ObjectMapper _getScopeAndMetricOnlyObjectMapper(JsonSerializer serializer) { - ObjectMapper mapper = new ObjectMapper(); - - mapper.setSerializationInclusion(Include.NON_NULL); - SimpleModule module = new SimpleModule(); - module.addSerializer(ScopeAndMetricOnlySchemaRecordList.class, serializer); - module.addDeserializer(ScopeAndMetricOnlySchemaRecordList.class, new ScopeAndMetricOnlySchemaRecordList.Deserializer()); - module.addDeserializer(List.class, new SchemaRecordList.AggDeserializer()); - mapper.registerModule(module); - - return mapper; - } - private ObjectMapper _getScopeOnlyObjectMapper(JsonSerializer serializer) { ObjectMapper mapper = new ObjectMapper(); @@ -1485,39 +1086,8 @@ private ObjectMapper _getMetatagsObjectMapper(JsonSerializer createMappingsNode) { - try { - Response response = _esRestClient.performRequest(HttpMethod.HEAD.getName(), "/" + indexName); - boolean indexExists = response.getStatusLine().getStatusCode() == HttpStatus.SC_OK ? true : false; - - if(!indexExists) { - _logger.info("Index [" + indexName + "] does not exist. Will create one."); - ObjectMapper mapper = new ObjectMapper(); - - ObjectNode rootNode = mapper.createObjectNode(); - rootNode.put("settings", _createSettingsNode(replicationFactor, numShards)); - rootNode.put("mappings", createMappingsNode.get()); - - String settingsAndMappingsJson = rootNode.toString(); - String requestUrl = new StringBuilder().append("/").append(indexName).toString(); - - response = _esRestClient.performRequest(HttpMethod.PUT.getName(), requestUrl, Collections.emptyMap(), new StringEntity(settingsAndMappingsJson)); - extractResponse(response); - } - } catch (Exception e) { - _logger.error("Failed to check/create {} index. ElasticSearchSchemaService may not function. {}", - indexName, e); - } + + void logAndMonitorESFailureResponses(String esResponse){ + ObjectMapper mapper = new ObjectMapper(); + JsonNode tree; + try { + tree = mapper.readTree(esResponse); + if(tree.get("failures") != null) { + _logger.warn("ES Response get failures- {}", esResponse); + _monitorService.modifyCounter(MonitorService.Counter.ELASTIC_SEARCH_GET_FAILURES, 1, null); + } + } catch (IOException e) { + _logger.warn("Failed to parse ES json response {}", e); + } } /** @@ -1685,47 +1215,40 @@ public enum Property { ELASTICSEARCH_ENDPOINT_SOCKET_TIMEOUT("service.property.schema.elasticsearch.endpoint.socket.timeout", "10000"), /** Connection count for ES REST client. */ ELASTICSEARCH_CONNECTION_COUNT("service.property.schema.elasticsearch.connection.count", "10"), - /** Replication factor for metadata_index. */ - ELASTICSEARCH_NUM_REPLICAS("service.property.schema.elasticsearch.num.replicas", "1"), - /** Shard count for metadata_index. */ - ELASTICSEARCH_SHARDS_COUNT("service.property.schema.elasticsearch.shards.count", "10"), - /** Replication factor for scopenames */ - ELASTICSEARCH_NUM_REPLICAS_FOR_SCOPE_INDEX("service.property.schema.elasticsearch.num.replicas.for.scope.index", "1"), - /** Shard count for scopenames */ - ELASTICSEARCH_SHARDS_COUNT_FOR_SCOPE_INDEX("service.property.schema.elasticsearch.shards.count.for.scope.index", "6"), - /** Replication factor for metatags */ - ELASTICSEARCH_NUM_REPLICAS_FOR_METATAGS_INDEX("service.property.schema.elasticsearch.num.replicas.for.metatags.index", "1"), - /** Shard count for metatags */ - ELASTICSEARCH_SHARDS_COUNT_FOR_METATAGS_INDEX("service.property.schema.elasticsearch.shards.count.for.metatags.index", "6"), /** The no. of records to batch for bulk indexing requests. * https://www.elastic.co/guide/en/elasticsearch/guide/current/indexing-performance.html#_using_and_sizing_bulk_requests */ ELASTICSEARCH_INDEXING_BATCH_SIZE("service.property.schema.elasticsearch.indexing.batch.size", "10000"), - - ELASTICSEARCH_USE_SCOPE_AND_METRIC_INDEX("service.property.schema.elasticsearch.use.scopeandmetric.index", "false"), - /** The hashing algorithm to use for generating document id. */ ELASTICSEARCH_IDGEN_HASH_ALGO("service.property.schema.elasticsearch.idgen.hash.algo", "MD5"), + /** Name of the main scope:metric:tagk:tagv index */ + ELASTICSEARCH_TAGS_INDEX_NAME("service.property.schema.elasticsearch.index.name", "metadata_index"), + /** Type within the main index */ + ELASTICSEARCH_TAGS_TYPE_NAME("service.property.schema.elasticsearch.type.name", "metadata_type"), + /** Replication factor for main index */ + ELASTICSEARCH_NUM_REPLICAS_FOR_TAGS_INDEX("service.property.schema.elasticsearch.num.replicas", "1"), + /** Shard count for main index */ + ELASTICSEARCH_SHARDS_COUNT_FOR_TAGS_INDEX("service.property.schema.elasticsearch.shards.count", "10"), + + /** Name of scope only index */ ELASTICSEARCH_SCOPE_INDEX_NAME("service.property.schema.elasticsearch.scope.index.name", "scopenames"), /** Type within scope only index */ ELASTICSEARCH_SCOPE_TYPE_NAME("service.property.schema.elasticsearch.scope.type.name", "scope_type"), + /** Replication factor for scopenames */ + ELASTICSEARCH_NUM_REPLICAS_FOR_SCOPE_INDEX("service.property.schema.elasticsearch.num.replicas.for.scope.index", "1"), + /** Shard count for scopenames */ + ELASTICSEARCH_SHARDS_COUNT_FOR_SCOPE_INDEX("service.property.schema.elasticsearch.shards.count.for.scope.index", "6"), /** Name of metatags only index */ ELASTICSEARCH_METATAGS_INDEX_NAME("service.property.schema.elasticsearch.metatags.index.name", "metatags"), /** Type within metatags only index */ ELASTICSEARCH_METATAGS_TYPE_NAME("service.property.schema.elasticsearch.metatags.type.name", "metatags_type"), - - /** Replication factor for scope and metric names */ - ELASTICSEARCH_NUM_REPLICAS_FOR_SCOPE_AND_METRIC_INDEX("service.property.schema.elasticsearch.num.replicas.for.scopeandmetric.index", "1"), - /** Shard count for scope and metric names */ - ELASTICSEARCH_SHARDS_COUNT_FOR_SCOPE_AND_METRIC_INDEX("service.property.schema.elasticsearch.shards.count.for.scopeandmetric.index", "6"), - - /** Name of scope and metric only index */ - ELASTICSEARCH_SCOPE_AND_METRIC_INDEX_NAME("service.property.schema.elasticsearch.scopeandmetric.index.name", "scopemetricnames"), - /** Type within scope and metric only index */ - ELASTICSEARCH_SCOPE_AND_METRIC_TYPE_NAME("service.property.schema.elasticsearch.scopeandmetric.type.name", "scopemetric_type"); + /** Replication factor for metatags */ + ELASTICSEARCH_NUM_REPLICAS_FOR_METATAGS_INDEX("service.property.schema.elasticsearch.num.replicas.for.metatags.index", "1"), + /** Shard count for metatags */ + ELASTICSEARCH_SHARDS_COUNT_FOR_METATAGS_INDEX("service.property.schema.elasticsearch.shards.count.for.metatags.index", "6"); private final String _name; private final String _defaultValue; diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/ElasticSearchUtils.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/ElasticSearchUtils.java new file mode 100644 index 000000000..754206701 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/ElasticSearchUtils.java @@ -0,0 +1,383 @@ +package com.salesforce.dva.argus.service.schema; + +import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.Collections; +import java.util.List; +import java.util.function.Supplier; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.core.type.TypeReference; +import org.apache.http.HttpEntity; +import org.apache.http.HttpStatus; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.google.common.annotations.VisibleForTesting; +import com.google.inject.Singleton; +import com.salesforce.dva.argus.system.SystemException; + + +/** + * ElasticSearch Initializer + * + * @author Kunal Nawale (knawale@salesforce.com) + */ +@Singleton +public class ElasticSearchUtils { + + private static Logger _logger = LoggerFactory.getLogger(ElasticSearchUtils.class); + private static ObjectMapper mapper = new ObjectMapper(); + + public static final int INDEX_MAX_RESULT_WINDOW = 10000; + + public static final String TOKENIZER_PATTERN = "([^\\p{L}\\d]+)|(?<=[\\p{L}&&[^\\p{Lu}]])(?=\\p{Lu})|(?<=\\p{Lu})(?=\\p{Lu}[\\p{L}&&[^\\p{Lu}]])"; + + + public ElasticSearchUtils() { + } + + public static Long convertTimestampToMillis(Long timestamp) { + if (timestamp < 1_00_000_000_000L) return (timestamp * 1000); + return timestamp; + } + + public void createIndexIfNotExists(RestClient esRestClient, + String indexName, + int replicationFactor, + int numShards, + Supplier createMappingsNode) { + try { + Response response = esRestClient.performRequest(HttpMethod.HEAD.getName(), "/" + indexName); + boolean indexExists = response.getStatusLine().getStatusCode() == HttpStatus.SC_OK ? true : false; + + if(!indexExists) { + _logger.info("Index [" + indexName + "] does not exist. Will create one."); + + ObjectNode rootNode = mapper.createObjectNode(); + rootNode.set("settings", _createSettingsNode(replicationFactor, numShards)); + rootNode.set("mappings", createMappingsNode.get()); + + String settingsAndMappingsJson = rootNode.toString(); + String requestUrl = new StringBuilder().append("/").append(indexName).toString(); + + response = esRestClient.performRequest(HttpMethod.PUT.getName(), + requestUrl, + Collections.emptyMap(), + new StringEntity(settingsAndMappingsJson, ContentType.APPLICATION_JSON)); + extractResponse(response); + } + } catch (Exception e) { + _logger.error("Failed to check/create {} index. ElasticSearchSchemaService may not function. {}", + indexName, e); + } + } + + public void createIndexTemplate(RestClient esRestClient, + String templateName, + String templatePattern, + Supplier createIndexTemplateSettingsNode, + Supplier createIndexTemplateMappingsNode) { + try { + ObjectNode rootNode = mapper.createObjectNode(); + rootNode.put("template",templatePattern + "*"); + rootNode.set("settings", createIndexTemplateSettingsNode.get()); + rootNode.set("mappings", createIndexTemplateMappingsNode.get()); + + String settingsAndMappingsJson = rootNode.toString(); + String requestUrl = new StringBuilder().append("/_template/").append(templateName).toString(); + + Request request = new Request(HttpMethod.PUT.getName(), requestUrl); + request.setEntity(new StringEntity(settingsAndMappingsJson, ContentType.APPLICATION_JSON)); + Response response = esRestClient.performRequest(request); + extractResponse(response); + } catch (Exception e) { + _logger.error("Failed to check/create {} index template. Failure due to {}", templateName, e); + } + } + + private ObjectNode _createSettingsNode(int replicationFactor, int numShards) { + ObjectNode metadataAnalyzer = mapper.createObjectNode(); + metadataAnalyzer.put("tokenizer", "metadata_tokenizer"); + metadataAnalyzer.set("filter", mapper.createArrayNode().add("lowercase")); + + ObjectNode analyzerNode = mapper.createObjectNode(); + analyzerNode.set("metadata_analyzer", metadataAnalyzer); + + ObjectNode tokenizerNode = mapper.createObjectNode(); + tokenizerNode.set("metadata_tokenizer", mapper.createObjectNode().put("type", "pattern").put("pattern", TOKENIZER_PATTERN)); + + ObjectNode analysisNode = mapper.createObjectNode(); + analysisNode.set("analyzer", analyzerNode); + analysisNode.set("tokenizer", tokenizerNode); + + ObjectNode indexNode = mapper.createObjectNode(); + indexNode.put("max_result_window", INDEX_MAX_RESULT_WINDOW); + indexNode.put("number_of_replicas", replicationFactor); + indexNode.put("number_of_shards", numShards); + + ObjectNode settingsNode = mapper.createObjectNode(); + settingsNode.set("analysis", analysisNode); + settingsNode.set("index", indexNode); + + return settingsNode; + } + + /* Helper method to convert JSON String representation to the corresponding Java entity. */ + public static T toEntity(String content, TypeReference type, ObjectMapper mapper) { + try { + return mapper.readValue(content, type); + } catch (IOException ex) { + throw new SystemException(ex); + } + } + + /** Helper to process the response.

+ * Throws IllegalArgumentException when the http status code is in the 400 range
+ * Throws SystemException when the http status code is outside of the 200 and 400 range + * @param response ES response + * @return Stringified response + */ + public static String extractResponse(Response response) { + requireArgument(response != null, "HttpResponse object cannot be null."); + + return doExtractResponse(response.getStatusLine().getStatusCode(), response.getEntity()); + } + + /** + * testable version of {@link ElasticSearchSchemaService#extractResponse(Response)} + * @param statusCode + * @param entity + * @return + */ + @VisibleForTesting + public static String doExtractResponse(int statusCode, HttpEntity entity) { + String message = null; + + if (entity != null) { + try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) { + entity.writeTo(baos); + message = baos.toString("UTF-8"); + } + catch (IOException ex) { + throw new SystemException(ex); + } + } + + //if the response is in the 400 range, use IllegalArgumentException, which currently translates to a 400 error + if (statusCode>= HttpStatus.SC_BAD_REQUEST && statusCode < HttpStatus.SC_INTERNAL_SERVER_ERROR) { + throw new IllegalArgumentException("Status code: " + statusCode + " . Error occurred. " + message); + } + //everything else that's not in the 200 range, use SystemException, which translates to a 500 error. + if ((statusCode < HttpStatus.SC_OK) || (statusCode >= HttpStatus.SC_MULTIPLE_CHOICES)) { + throw new SystemException("Status code: " + statusCode + " . Error occurred. " + message); + } else { + return message; + } + } + + public enum HashAlgorithm { + MD5, + XXHASH; + + public static ElasticSearchUtils.HashAlgorithm fromString(String str) throws IllegalArgumentException { + for(ElasticSearchUtils.HashAlgorithm algo : ElasticSearchUtils.HashAlgorithm.values()) { + if(algo.name().equalsIgnoreCase(str)) { + return algo; + } + } + throw new IllegalArgumentException(str + " does not match any of the available algorithms."); + } + } + + /** + * Enumeration of supported HTTP methods. + * + * @author Bhinav Sura (bhinav.sura@salesforce.com) + */ + public enum HttpMethod { + + /** POST operation. */ + POST("POST"), + /** PUT operation. */ + PUT("PUT"), + /** HEAD operation. */ + HEAD("HEAD"); + + private String name; + + HttpMethod(String name) { + this.setName(name); + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + } + + public static PutResponse performESRequest(RestClient esRestClient, String requestUrl, String requestBody) throws IOException { + ObjectMapper genericObjectMapper = new ObjectMapper(); + Request request = new Request(HttpMethod.POST.getName(), requestUrl); + request.setEntity(new StringEntity(requestBody, ContentType.APPLICATION_JSON)); + Response response = esRestClient.performRequest(request); + String strResponse = extractResponse(response); + PutResponse putResponse = genericObjectMapper.readValue(strResponse, PutResponse.class); + return putResponse; + } + + /** + * Used for constructing Elastic Search Response object + */ + public static class PutResponse { + private int took; + private boolean errors; + private List items; + + public PutResponse() {} + + public int getTook() { + return took; + } + + public void setTook(int took) { + this.took = took; + } + + public boolean isErrors() { + return errors; + } + + public void setErrors(boolean errors) { + this.errors = errors; + } + + public List getItems() { + return items; + } + + public void setItems(List items) { + this.items = items; + } + + @JsonIgnoreProperties(ignoreUnknown = true) + public static class Item { + private PutResponse.CreateItem create; + private PutResponse.CreateItem index; + private PutResponse.CreateItem update; + + public Item() {} + + public PutResponse.CreateItem getCreate() { + return create; + } + + public void setCreate(PutResponse.CreateItem create) { + this.create = create; + } + + public PutResponse.CreateItem getIndex() { + return index; + } + + public void setIndex(PutResponse.CreateItem index) { + this.index = index; + } + + public PutResponse.CreateItem getUpdate() { + return update; + } + + public void setUpdate(PutResponse.CreateItem update) { + this.update = update; + } + } + + @JsonIgnoreProperties(ignoreUnknown = true) + public static class CreateItem { + private String _index; + private String _id; + private int status; + private int _version; + private PutResponse.Error error; + + public CreateItem() {} + + public String get_index() { + return _index; + } + + public void set_index(String _index) { + this._index = _index; + } + + public String get_id() { + return _id; + } + + public void set_id(String _id) { + this._id = _id; + } + + public int get_version() { + return _version; + } + + public void set_version(int _version) { + this._version = _version; + } + + public int getStatus() { + return status; + } + + public void setStatus(int status) { + this.status = status; + } + + public PutResponse.Error getError() { + return error; + } + + public void setError(PutResponse.Error error) { + this.error = error; + } + } + + @JsonIgnoreProperties(ignoreUnknown = true) + static class Error { + private String type; + private String reason; + + public Error() {} + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getReason() { + return reason; + } + + public void setReason(String reason) { + this.reason = reason; + } + } + } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/MetatagsSchemaRecordList.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/MetatagsSchemaRecordList.java index 1689a0a14..1a9c4538c 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/MetatagsSchemaRecordList.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/MetatagsSchemaRecordList.java @@ -1,19 +1,8 @@ package com.salesforce.dva.argus.service.schema; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import org.apache.commons.codec.digest.DigestUtils; - import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.JsonDeserializer; import com.fasterxml.jackson.databind.JsonNode; @@ -24,8 +13,18 @@ import com.salesforce.dva.argus.entity.MetatagsRecord; import com.salesforce.dva.argus.service.SchemaService.RecordType; import com.salesforce.dva.argus.service.schema.MetricSchemaRecordList.HashAlgorithm; - import net.openhft.hashing.LongHashFunction; +import org.apache.commons.codec.digest.DigestUtils; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; /** * Represents a list of MetatagsRecord. @@ -33,12 +32,12 @@ * * @author Kunal Nawale (knawale@salesforce.com) */ -public class MetatagsSchemaRecordList { +public class MetatagsSchemaRecordList implements RecordFinder { private Map _idToSchemaRecordMap = new HashMap<>(); private String _scrollID; - public MetatagsSchemaRecordList(List records, String scrollID) { + public MetatagsSchemaRecordList(Set records, String scrollID) { int count = 0; for(MetatagsRecord record : records) { _idToSchemaRecordMap.put(String.valueOf(count++), record); @@ -46,7 +45,7 @@ public MetatagsSchemaRecordList(List records, String scrollID) { setScrollID(scrollID); } - public MetatagsSchemaRecordList(List records, HashAlgorithm algorithm) { + public MetatagsSchemaRecordList(Set records, HashAlgorithm algorithm) { for(MetatagsRecord record : records) { String id = null; if(HashAlgorithm.MD5.equals(algorithm)) { @@ -62,6 +61,11 @@ public List getRecords() { return new ArrayList<>(_idToSchemaRecordMap.values()); } + @Override + public Set getIdSet() { + return _idToSchemaRecordMap.keySet(); + } + public String getScrollID() { return _scrollID; } @@ -70,10 +74,11 @@ public void setScrollID(String scrollID) { this._scrollID = scrollID; } - MetatagsRecord getRecord(String id) { + public MetatagsRecord getRecord(String id) { return _idToSchemaRecordMap.get(id); } + /* static class CreateSerializer extends JsonSerializer { @Override @@ -100,6 +105,22 @@ public void serialize(MetatagsSchemaRecordList list, JsonGenerator jgen, Seriali SchemaRecordList.addUpdateJson(jgen, entry.getKey()); } } + }*/ + + static class IndexSerializer extends JsonSerializer { + + @Override + public void serialize(MetatagsSchemaRecordList list, JsonGenerator jgen, SerializerProvider provider) + throws IOException { + + ObjectMapper mapper = new ObjectMapper(); + mapper.setSerializationInclusion(Include.NON_NULL); + + for(Map.Entry entry : list._idToSchemaRecordMap.entrySet()) { + String fieldsData = mapper.writeValueAsString(entry.getValue().getMetatags()); + SchemaRecordList.addIndexJson(jgen, entry.getKey(), fieldsData); + } + } } static class Deserializer extends JsonDeserializer { @@ -109,7 +130,7 @@ public MetatagsSchemaRecordList deserialize(JsonParser jp, DeserializationContex throws IOException { String scrollID = null; - List records = Collections.emptyList(); + Set records = Collections.emptySet(); JsonNode rootNode = jp.getCodec().readTree(jp); if(rootNode.has("_scroll_id")) { @@ -118,7 +139,7 @@ public MetatagsSchemaRecordList deserialize(JsonParser jp, DeserializationContex JsonNode hits = rootNode.get("hits").get("hits"); if(JsonNodeType.ARRAY.equals(hits.getNodeType())) { - records = new ArrayList<>(hits.size()); + records = new HashSet<>(hits.size()); Iterator iter = hits.elements(); while(iter.hasNext()) { JsonNode hit = iter.next(); diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/MetricSchemaRecordList.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/MetricSchemaRecordList.java index b5f48edd8..0ff28cd1b 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/MetricSchemaRecordList.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/MetricSchemaRecordList.java @@ -1,15 +1,5 @@ package com.salesforce.dva.argus.service.schema; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import org.apache.commons.codec.digest.DigestUtils; - import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; @@ -23,18 +13,28 @@ import com.fasterxml.jackson.databind.node.JsonNodeType; import com.salesforce.dva.argus.entity.MetricSchemaRecord; import com.salesforce.dva.argus.service.SchemaService.RecordType; - import net.openhft.hashing.LongHashFunction; +import org.apache.commons.codec.digest.DigestUtils; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; import static com.salesforce.dva.argus.entity.MetricSchemaRecord.DEFAULT_RETENTION_DISCOVERY_DAYS; import static com.salesforce.dva.argus.entity.MetricSchemaRecord.EXPIRATION_TS; -public class MetricSchemaRecordList { +public class MetricSchemaRecordList implements RecordFinder { private final static long ONE_DAY_IN_MILLIS = 24L * 3600L * 1000L; private Map _idToSchemaRecordMap = new HashMap<>(); private String _scrollID; + private long totalHits; - public MetricSchemaRecordList(List records, HashAlgorithm algorithm) { + public MetricSchemaRecordList(Set records, HashAlgorithm algorithm) { for(MetricSchemaRecord record : records) { String id = null; if(HashAlgorithm.MD5.equals(algorithm)) { @@ -46,36 +46,49 @@ public MetricSchemaRecordList(List records, HashAlgorithm al } } - private MetricSchemaRecordList(List records, String scrollID) { + private MetricSchemaRecordList(List records, String scrollID, long totalHits) { int count = 0; for(MetricSchemaRecord record : records) { _idToSchemaRecordMap.put(String.valueOf(count++), record); } setScrollID(scrollID); + this.totalHits = totalHits; } + @Override public List getRecords() { return new ArrayList<>(_idToSchemaRecordMap.values()); } + + @Override + public Set getIdSet() { + return _idToSchemaRecordMap.keySet(); + } + @Override public String getScrollID() { return _scrollID; } + @Override public void setScrollID(String scrollID) { this._scrollID = scrollID; } - MetricSchemaRecord getRecord(String id) { + @Override + public MetricSchemaRecord getRecord(String id) { return _idToSchemaRecordMap.get(id); } - - - enum HashAlgorithm { + + public long getTotalHits() { + return totalHits; + } + + public enum HashAlgorithm { MD5, XXHASH; - static HashAlgorithm fromString(String str) throws IllegalArgumentException { + public static HashAlgorithm fromString(String str) throws IllegalArgumentException { for(HashAlgorithm algo : HashAlgorithm.values()) { if(algo.name().equalsIgnoreCase(str)) { return algo; @@ -85,14 +98,14 @@ static HashAlgorithm fromString(String str) throws IllegalArgumentException { throw new IllegalArgumentException(str + " does not match any of the available algorithms."); } } - - - static class Serializer extends JsonSerializer { + + /* these two serializers are no longer used + static class CreateSerializer extends JsonSerializer { @Override public void serialize(MetricSchemaRecordList list, JsonGenerator jgen, SerializerProvider provider) - throws IOException, JsonProcessingException { - + throws IOException{ + ObjectMapper mapper = new ObjectMapper(); mapper.setSerializationInclusion(Include.NON_NULL); final long now = System.currentTimeMillis(); @@ -113,8 +126,67 @@ public void serialize(MetricSchemaRecordList list, JsonGenerator jgen, Serialize } } } - - + + static class UpdateSerializer extends JsonSerializer { + + @Override + public void serialize(MetricSchemaRecordList list, JsonGenerator jgen, SerializerProvider provider) + throws IOException { + + ObjectMapper mapper = new ObjectMapper(); + mapper.setSerializationInclusion(Include.NON_NULL); + final long now = System.currentTimeMillis(); + + for(Map.Entry entry : list._idToSchemaRecordMap.entrySet()) { + jgen.writeRaw("{\"update\":{\"_id\":\"" + entry.getKey() + "\"}}"); + jgen.writeRaw(System.lineSeparator()); + + StringBuilder updateSB = new StringBuilder(); + updateSB.append("\"mts\":" + now); + Integer retention = entry.getValue().getRetentionDiscovery(); + if (retention != null) { + updateSB.append(",\"").append(RETENTION_DISCOVERY).append("\":").append(retention); + updateSB.append(",\"").append(EXPIRATION_TS).append("\":").append(now + ONE_DAY_IN_MILLIS * retention); + } else { + updateSB.append(",\"").append(EXPIRATION_TS).append("\":").append(now + ONE_DAY_IN_MILLIS * DEFAULT_RETENTION_DISCOVERY_DAYS); + + } + jgen.writeRaw("{\"doc\":{" + updateSB + "}}"); + jgen.writeRaw(System.lineSeparator()); + } + } + } */ + + /** + * serialize to json string used for a bulk INDEX call + */ + static class IndexSerializer extends JsonSerializer { + + @Override + public void serialize(MetricSchemaRecordList list, JsonGenerator jgen, SerializerProvider provider) + throws IOException{ + + ObjectMapper mapper = new ObjectMapper(); + mapper.setSerializationInclusion(Include.NON_NULL); + final long now = System.currentTimeMillis(); + + for(Map.Entry entry : list._idToSchemaRecordMap.entrySet()) { + jgen.writeRaw("{ \"index\" : {\"_id\" : \"" + entry.getKey() + "\"}}"); + jgen.writeRaw(System.lineSeparator()); + String fieldsData = mapper.writeValueAsString(entry.getValue()); + + String mtsField = "\"mts\":" + now; + String ctsField = "\"cts\":" + now; //there might be no point for cts now since it will always = mts + Integer retention = entry.getValue().getRetentionDiscovery(); + Long expiration = now + (retention==null? DEFAULT_RETENTION_DISCOVERY_DAYS:retention) * ONE_DAY_IN_MILLIS; + String expirationField = "\"" + EXPIRATION_TS + "\":" + expiration; + + jgen.writeRaw(fieldsData.substring(0, fieldsData.length()-1) + "," + mtsField + "," + ctsField + "," + expirationField + "}"); + jgen.writeRaw(System.lineSeparator()); + } + } + } + static class Deserializer extends JsonDeserializer { @Override @@ -128,6 +200,7 @@ public MetricSchemaRecordList deserialize(JsonParser jp, DeserializationContext if(rootNode.has("_scroll_id")) { scrollID = rootNode.get("_scroll_id").asText(); } + long totalHits = rootNode.get("hits").get("total").asLong(); JsonNode hits = rootNode.get("hits").get("hits"); if(JsonNodeType.ARRAY.equals(hits.getNodeType())) { @@ -153,7 +226,7 @@ public MetricSchemaRecordList deserialize(JsonParser jp, DeserializationContext } } - return new MetricSchemaRecordList(records, scrollID); + return new MetricSchemaRecordList(records, scrollID, totalHits); } } diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/NoOperationSchemaService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/NoOperationSchemaService.java new file mode 100644 index 000000000..a575b62f9 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/NoOperationSchemaService.java @@ -0,0 +1,54 @@ +package com.salesforce.dva.argus.service.schema; + +import com.google.inject.Inject; +import com.salesforce.dva.argus.entity.KeywordQuery; +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.entity.MetricSchemaRecord; +import com.salesforce.dva.argus.entity.MetricSchemaRecordQuery; +import com.salesforce.dva.argus.service.DefaultService; +import com.salesforce.dva.argus.service.SchemaService; +import com.salesforce.dva.argus.system.SystemConfiguration; + +import java.util.List; + +/** + * Schema service that does absolutely nothing. Meant as an available SchemaService binding for dependents + * of ArgusCore that do not need a SchemaService. + */ +public class NoOperationSchemaService extends DefaultService implements SchemaService { + + @Inject + public NoOperationSchemaService(SystemConfiguration config) { + super(config); + } + + @Override + public void put(List metrics) { + throw new UnsupportedOperationException(NoOperationSchemaService.class.getName() + " does not support puts"); + } + + @Override + public void put(Metric metric) { + throw new UnsupportedOperationException(NoOperationSchemaService.class.getName() + " does not support puts"); + } + + @Override + public List get(MetricSchemaRecordQuery query) { + return null; + } + + @Override + public List getUnique(MetricSchemaRecordQuery query, RecordType type) { + return null; + } + + @Override + public List browseUnique(MetricSchemaRecordQuery query, RecordType type, int indexLevel) { + return null; + } + + @Override + public List keywordSearch(KeywordQuery query) { + return null; + } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/RecordFinder.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/RecordFinder.java new file mode 100644 index 000000000..66dda6583 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/RecordFinder.java @@ -0,0 +1,26 @@ +package com.salesforce.dva.argus.service.schema; + +import com.fasterxml.jackson.databind.ObjectMapper; + +import java.util.List; +import java.util.Set; + +/** + * The common interface for the existing schema record list classes when they are being used + * to retrieve record by Id + * @param + */ +public interface RecordFinder { + + Set getIdSet(); + + String getScrollID(); + + void setScrollID(String scrollID); + + T getRecord(String id); + + List getRecords(); + + +} \ No newline at end of file diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/SchemaRecordList.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/SchemaRecordList.java index a5c1a8feb..ef490b554 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/SchemaRecordList.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/SchemaRecordList.java @@ -2,7 +2,6 @@ import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.JsonDeserializer; import com.fasterxml.jackson.databind.JsonNode; @@ -29,12 +28,25 @@ public static void addCreateJson(JsonGenerator jgen, String id, String fieldsDat jgen.writeRaw(System.lineSeparator()); } - public static void addUpdateJson(JsonGenerator jgen, String id) throws IOException { + /*public static void addUpdateJson(JsonGenerator jgen, String id) throws IOException { jgen.writeRaw("{ \"update\" : {\"_id\" : \"" + id + "\"}}"); jgen.writeRaw(System.lineSeparator()); String timeStampField = "\"mts\":" + System.currentTimeMillis(); jgen.writeRaw("{ \"doc\" :{" + timeStampField + "} }"); jgen.writeRaw(System.lineSeparator()); + } */ + + public static void addIndexJson(JsonGenerator jgen, String id, String fieldsData) throws IOException { + + jgen.writeRaw("{ \"index\" : {\"_id\" : \"" + id + "\"}}"); + jgen.writeRaw(System.lineSeparator()); + + long currentTimeMillis = System.currentTimeMillis(); + + String createTimeStampField = "\"cts\":" + currentTimeMillis; + String updateTimeStampField = "\"mts\":" + currentTimeMillis; + jgen.writeRaw(fieldsData.substring(0, fieldsData.length()-1) + "," + createTimeStampField + "," + updateTimeStampField + "}"); + jgen.writeRaw(System.lineSeparator()); } static class AggDeserializer extends JsonDeserializer> { diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/ScopeAndMetricOnlySchemaRecordList.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/ScopeAndMetricOnlySchemaRecordList.java deleted file mode 100644 index eed5f2d10..000000000 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/ScopeAndMetricOnlySchemaRecordList.java +++ /dev/null @@ -1,140 +0,0 @@ -package com.salesforce.dva.argus.service.schema; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import com.salesforce.dva.argus.entity.ScopeAndMetricOnlySchemaRecord; -import org.apache.commons.codec.digest.DigestUtils; - -import com.fasterxml.jackson.annotation.JsonInclude.Include; -import com.fasterxml.jackson.core.JsonGenerator; -import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.databind.DeserializationContext; -import com.fasterxml.jackson.databind.JsonDeserializer; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.JsonSerializer; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.SerializerProvider; -import com.fasterxml.jackson.databind.node.JsonNodeType; -import com.salesforce.dva.argus.service.SchemaService.RecordType; -import com.salesforce.dva.argus.service.schema.MetricSchemaRecordList.HashAlgorithm; - -import net.openhft.hashing.LongHashFunction; - -/** - * Represents a list of scope and metric names from discovery queries. - * Internally it has a mapping from hash id of scope and metric names to the actual scope and metric names. - * - * @author Naveen Reddy Karri (nkarri@salesforce.com) - */ -public class ScopeAndMetricOnlySchemaRecordList { - - private Map _idToSchemaRecordMap = new HashMap<>(); - private String _scrollID; - - public ScopeAndMetricOnlySchemaRecordList(List records, String scrollID) { - int count = 0; - for(ScopeAndMetricOnlySchemaRecord record : records) { - _idToSchemaRecordMap.put(String.valueOf(count++), record); - } - setScrollID(scrollID); - } - - public ScopeAndMetricOnlySchemaRecordList(List records, HashAlgorithm algorithm) { - for(ScopeAndMetricOnlySchemaRecord record : records) { - String id = null; - String scopeAndMetricName = ScopeAndMetricOnlySchemaRecord.getFieldsAsString(record); - if(HashAlgorithm.MD5.equals(algorithm)) { - id = DigestUtils.md5Hex(scopeAndMetricName); - } else { - id = String.valueOf(LongHashFunction.xx().hashChars(scopeAndMetricName)); - } - _idToSchemaRecordMap.put(id, record); - } - } - - public List getRecords() { - return new ArrayList<>(_idToSchemaRecordMap.values()); - } - - public String getScrollID() { - return _scrollID; - } - - public void setScrollID(String scrollID) { - this._scrollID = scrollID; - } - - ScopeAndMetricOnlySchemaRecord getRecord(String id) { - return _idToSchemaRecordMap.get(id); - } - - static class CreateSerializer extends JsonSerializer { - - @Override - public void serialize(ScopeAndMetricOnlySchemaRecordList list, JsonGenerator jgen, SerializerProvider provider) - throws IOException { - - ObjectMapper mapper = new ObjectMapper(); - mapper.setSerializationInclusion(Include.NON_NULL); - - for(Map.Entry entry : list._idToSchemaRecordMap.entrySet()) { - String fieldsData = mapper.writeValueAsString(entry.getValue()); - SchemaRecordList.addCreateJson(jgen, entry.getKey(), fieldsData); - } - } - } - - static class UpdateSerializer extends JsonSerializer { - - @Override - public void serialize(ScopeAndMetricOnlySchemaRecordList list, JsonGenerator jgen, SerializerProvider provider) - throws IOException { - - ObjectMapper mapper = new ObjectMapper(); - mapper.setSerializationInclusion(Include.NON_NULL); - - for(Map.Entry entry : list._idToSchemaRecordMap.entrySet()) { - SchemaRecordList.addUpdateJson(jgen, entry.getKey()); - } - } - } - - static class Deserializer extends JsonDeserializer { - - @Override - public ScopeAndMetricOnlySchemaRecordList deserialize(JsonParser jp, DeserializationContext context) - throws IOException { - - String scrollID = null; - List records = Collections.emptyList(); - - JsonNode rootNode = jp.getCodec().readTree(jp); - if(rootNode.has("_scroll_id")) { - scrollID = rootNode.get("_scroll_id").asText(); - } - JsonNode hits = rootNode.get("hits").get("hits"); - - if(JsonNodeType.ARRAY.equals(hits.getNodeType())) { - records = new ArrayList<>(hits.size()); - Iterator iter = hits.elements(); - while(iter.hasNext()) { - JsonNode hit = iter.next(); - JsonNode source = hit.get("_source"); - - JsonNode scopeNode = source.get(RecordType.SCOPE.getName()); - JsonNode metricNode = source.get(RecordType.METRIC.getName()); - - records.add(new ScopeAndMetricOnlySchemaRecord(scopeNode.asText(), metricNode.asText())); - } - } - - return new ScopeAndMetricOnlySchemaRecordList(records, scrollID); - } - } -} \ No newline at end of file diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/ScopeOnlySchemaRecordList.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/ScopeOnlySchemaRecordList.java index 5807c4a74..760eca996 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/ScopeOnlySchemaRecordList.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/schema/ScopeOnlySchemaRecordList.java @@ -1,19 +1,8 @@ package com.salesforce.dva.argus.service.schema; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import org.apache.commons.codec.digest.DigestUtils; - import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.JsonDeserializer; import com.fasterxml.jackson.databind.JsonNode; @@ -24,8 +13,18 @@ import com.salesforce.dva.argus.entity.ScopeOnlySchemaRecord; import com.salesforce.dva.argus.service.SchemaService.RecordType; import com.salesforce.dva.argus.service.schema.MetricSchemaRecordList.HashAlgorithm; - import net.openhft.hashing.LongHashFunction; +import org.apache.commons.codec.digest.DigestUtils; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; /** * Represents a list of scope names from discovery queries. @@ -33,12 +32,12 @@ * * @author Dilip Devaraj (ddevaraj@salesforce.com) */ -public class ScopeOnlySchemaRecordList { +public class ScopeOnlySchemaRecordList implements RecordFinder { private Map _idToSchemaRecordMap = new HashMap<>(); private String _scrollID; - public ScopeOnlySchemaRecordList(List records, String scrollID) { + public ScopeOnlySchemaRecordList(Set records, String scrollID) { int count = 0; for(ScopeOnlySchemaRecord record : records) { _idToSchemaRecordMap.put(String.valueOf(count++), record); @@ -46,7 +45,7 @@ public ScopeOnlySchemaRecordList(List records, String scr setScrollID(scrollID); } - public ScopeOnlySchemaRecordList(List records, HashAlgorithm algorithm) { + public ScopeOnlySchemaRecordList(Set records, HashAlgorithm algorithm) { for(ScopeOnlySchemaRecord record : records) { String id = null; String scopeOnly = record.getScope(); @@ -62,7 +61,12 @@ public ScopeOnlySchemaRecordList(List records, HashAlgori public List getRecords() { return new ArrayList<>(_idToSchemaRecordMap.values()); } - + + @Override + public Set getIdSet() { + return _idToSchemaRecordMap.keySet(); + } + public String getScrollID() { return _scrollID; } @@ -71,10 +75,11 @@ public void setScrollID(String scrollID) { this._scrollID = scrollID; } - ScopeOnlySchemaRecord getRecord(String id) { + public ScopeOnlySchemaRecord getRecord(String id) { return _idToSchemaRecordMap.get(id); } - + + /* static class CreateSerializer extends JsonSerializer { @Override @@ -105,6 +110,23 @@ public void serialize(ScopeOnlySchemaRecordList list, JsonGenerator jgen, Serial SchemaRecordList.addUpdateJson(jgen, entry.getKey()); } } + } */ + + static class IndexSerializer extends JsonSerializer { + + @Override + public void serialize(ScopeOnlySchemaRecordList list, JsonGenerator jgen, SerializerProvider provider) + throws IOException { + + ObjectMapper mapper = new ObjectMapper(); + mapper.setSerializationInclusion(Include.NON_NULL); + + for(Map.Entry entry : list._idToSchemaRecordMap.entrySet()) { + + String fieldsData = mapper.writeValueAsString(entry.getValue()); + SchemaRecordList.addIndexJson(jgen, entry.getKey(), fieldsData); + } + } } static class Deserializer extends JsonDeserializer { @@ -114,7 +136,7 @@ public ScopeOnlySchemaRecordList deserialize(JsonParser jp, DeserializationConte throws IOException { String scrollID = null; - List records = Collections.emptyList(); + Set records = Collections.emptySet(); JsonNode rootNode = jp.getCodec().readTree(jp); if(rootNode.has("_scroll_id")) { @@ -123,7 +145,7 @@ public ScopeOnlySchemaRecordList deserialize(JsonParser jp, DeserializationConte JsonNode hits = rootNode.get("hits").get("hits"); if(JsonNodeType.ARRAY.equals(hits.getNodeType())) { - records = new ArrayList<>(hits.size()); + records = new HashSet<>(hits.size()); Iterator iter = hits.elements(); while(iter.hasNext()) { JsonNode hit = iter.next(); diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/AbstractTSDBService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/AbstractTSDBService.java index 326ef3450..76a972678 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/AbstractTSDBService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/AbstractTSDBService.java @@ -40,24 +40,15 @@ import java.net.URL; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; -import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Properties; -import java.util.Set; -import java.util.TreeMap; import java.util.concurrent.Callable; -import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.IntUnaryOperator; @@ -95,6 +86,7 @@ import com.fasterxml.jackson.databind.module.SimpleModule; import com.google.inject.Inject; import com.salesforce.dva.argus.entity.Annotation; +import com.salesforce.dva.argus.entity.Histogram; import com.salesforce.dva.argus.entity.Metric; import com.salesforce.dva.argus.service.DefaultService; import com.salesforce.dva.argus.service.MonitorService; @@ -182,7 +174,7 @@ public AbstractTSDBService(SystemConfiguration config, MonitorService monitorSer requireArgument((readEndPoint != null) && (!readEndPoint.isEmpty()), "Illegal read endpoint URL."); } - _readBackupEndPoints = Arrays.asList(config.getValue(Property.TSD_ENDPOINT_BACKUP_READ.getName(), Property.TSD_ENDPOINT_BACKUP_READ.getDefaultValue()).split(",")); + _readBackupEndPoints = new ArrayList<>(Arrays.asList(config.getValue(Property.TSD_ENDPOINT_BACKUP_READ.getName(), Property.TSD_ENDPOINT_BACKUP_READ.getDefaultValue()).split(","))); if(_readBackupEndPoints.size() < _readEndPoints.size()){ for(int i=0; i< _readEndPoints.size() - _readBackupEndPoints.size();i++) @@ -202,7 +194,7 @@ public AbstractTSDBService(SystemConfiguration config, MonitorService monitorSer requireArgument(connTimeout >= 1, "Timeout must be greater than 0."); _keyUidCache = CacheBuilder.newBuilder() - .maximumSize(100000) + .maximumSize(1000000) .expireAfterAccess(1, TimeUnit.HOURS) .build(); @@ -210,7 +202,9 @@ public AbstractTSDBService(SystemConfiguration config, MonitorService monitorSer int index = 0; for (String readEndpoint : _readEndPoints) { _readPortMap.put(readEndpoint, getClient(connCount / 2, connTimeout, socketTimeout, tsdbConnectionReuseCount ,readEndpoint)); - _readBackupEndPointsMap.put(readEndpoint, _readBackupEndPoints.get(index)); + if (index < _readBackupEndPoints.size()) { + _readBackupEndPointsMap.put(readEndpoint, _readBackupEndPoints.get(index)); + } index ++; } for (String readBackupEndpoint : _readBackupEndPoints) { @@ -320,6 +314,23 @@ public static String constructTSDBMetricName(Metric metric) { } return sb.toString(); } + + + /** + * We construct OpenTSDB metric name as a combination of Argus histogram's metric, scope as follows: + * + * metric(otsdb) = metric(argus)<DELIMITER>scope(argus) + * + * @param histogram The histogram + * @return OpenTSDB metric name constructed from scope, metric. + */ + public static String constructTSDBMetricName(Histogram histogram) { + StringBuilder sb = new StringBuilder(); + + sb.append(histogram.getMetric()).append(DELIMITER).append(histogram.getScope()); + + return sb.toString(); + } /** * Given otsdb metric name, return argus metric. @@ -401,11 +412,32 @@ public void putMetrics(List metrics) { put(fracturedList, endpoint + "/api/put", HttpMethod.POST, CHUNK_SIZE); } catch(Exception ex) { _logger.warn("Failure while trying to push metrics", ex); - _retry(fracturedList, _roundRobinIterator, "/api/put", HttpMethod.POST, CHUNK_SIZE); + retry(fracturedList, _roundRobinIterator, "/api/put", HttpMethod.POST, CHUNK_SIZE); } } + + @Override + public void putHistograms(List histograms) { + requireNotDisposed(); + requireArgument(histograms != null, "Histograms can not be null"); + + String endpoint = _roundRobinIterator.next(); + _logger.debug("Pushing {} histograms to TSDB using endpoint {}.", histograms.size(), endpoint); + + List histogramList = new ArrayList<>(); + histogramList.addAll(histograms); + + try { + put(histogramList, endpoint + "/api/histogram", HttpMethod.POST, CHUNK_SIZE); + } catch(Exception ex) { + _logger.warn("Failure while trying to push histograms", ex); + retry(histogramList, _roundRobinIterator, "/api/histogram", HttpMethod.POST, CHUNK_SIZE); + } + } + + void retry(List objects, Iterator endPointIterator, String urlPath, HttpMethod httpMethod, int chunkSize) { + Exception failure = null; - public void _retry(List objects, Iterator endPointIterator, String urlPath, HttpMethod httpMethod, int chunkSize) { for(int i=0;i void _retry(List objects, Iterator endPointIterator, Strin put(objects, endpoint + urlPath, httpMethod, chunkSize); return; } catch(Exception ex) { - _logger.info("Failed while trying to push data. We will retry for {} more times", RETRY_COUNT-i); + failure = ex; + _logger.info("Failed while trying to push data. We will retry for {} more times", RETRY_COUNT-i-1); } } _logger.error("Retried for {} times and we still failed. Dropping this chunk of data.", RETRY_COUNT); - + if (failure != null) { + throw new SystemException(failure.getMessage(), failure); + } } /** @see TSDBService#putAnnotations(java.util.List) */ @@ -451,17 +486,19 @@ public void putAnnotations(List annotations) { } } - // query TSDB to get uids for annotations. - Map keyUidMap = getUidMapFromTsdb(keyAnnotationMap); + if(!keyAnnotationMap.isEmpty()) { + // query TSDB to get uids for annotations. + Map keyUidMap = getUidMapFromTsdb(keyAnnotationMap); - for(Map.Entry keyUidEntry : keyUidMap.entrySet()) { + for (Map.Entry keyUidEntry : keyUidMap.entrySet()) { - // We add new uids to the cache and create AnnotationWrapper objects. - _keyUidCache.put(keyUidEntry.getKey(), keyUidEntry.getValue()); - AnnotationWrapper wrapper = new AnnotationWrapper(keyUidEntry.getValue(), - keyAnnotationMap.get(keyUidEntry.getKey())); + // We add new uids to the cache and create AnnotationWrapper objects. + _keyUidCache.put(keyUidEntry.getKey(), keyUidEntry.getValue()); + AnnotationWrapper wrapper = new AnnotationWrapper(keyUidEntry.getValue(), + keyAnnotationMap.get(keyUidEntry.getKey())); - addToWrapperList(wrapperList, wrapper); + addToWrapperList(wrapperList, wrapper); + } } _logger.debug("putAnnotations CacheStats hitCount {} requestCount {} " + @@ -479,7 +516,7 @@ public void putAnnotations(List annotations) { put(wrappers, endpoint + "/api/annotation/bulk", HttpMethod.POST, CHUNK_SIZE); } catch (Exception ex) { _logger.warn("Exception while trying to push annotations", ex); - _retry(wrappers, _roundRobinIterator, "/api/annotation/bulk", HttpMethod.POST, CHUNK_SIZE); + retry(wrappers, _roundRobinIterator, "/api/annotation/bulk", HttpMethod.POST, CHUNK_SIZE); } } } @@ -528,11 +565,12 @@ private Map getUidMapFromTsdb(Map keyAnnotat queries.add(query); } - long backOff = 1000L; + putMetrics(metrics); + + long backOff = 500L; for (int attempts = 0; attempts < 3; attempts++) { - putMetrics(metrics); try { Thread.sleep(backOff); } catch (InterruptedException ex) { @@ -551,24 +589,25 @@ private Map getUidMapFromTsdb(Map keyAnnotat return keyUidMap; } catch (Exception e) { - backOff += 1000L; + _logger.warn("Exception while trying to get uids for annotations", e); + backOff += 500L; } } throw new SystemException("Failed to create new annotation metric."); } - private ObjectMapper getMapper() { - ObjectMapper mapper = new ObjectMapper(); - SimpleModule module = new SimpleModule(); - - module.addSerializer(Metric.class, new MetricTransform.Serializer()); - module.addDeserializer(ResultSet.class, new MetricTransform.MetricListDeserializer()); - module.addSerializer(AnnotationWrapper.class, new AnnotationTransform.Serializer()); - module.addDeserializer(AnnotationWrappers.class, new AnnotationTransform.Deserializer()); - module.addSerializer(MetricQuery.class, new MetricQueryTransform.Serializer()); - mapper.registerModule(module); - return mapper; + ObjectMapper getMapper() { + ObjectMapper mapper = new ObjectMapper(); + SimpleModule module = new SimpleModule(); + module.addSerializer(Metric.class, new MetricTransform.Serializer()); + module.addDeserializer(ResultSet.class, new MetricTransform.MetricListDeserializer()); + module.addSerializer(Histogram.class, new HistogramTransform.Serializer()); + module.addSerializer(AnnotationWrapper.class, new AnnotationTransform.Serializer()); + module.addDeserializer(AnnotationWrappers.class, new AnnotationTransform.Deserializer()); + module.addSerializer(MetricQuery.class, new MetricQueryTransform.Serializer()); + mapper.registerModule(module); + return mapper; } /* gets objects in chunks. @@ -848,7 +887,7 @@ public enum Property { TSD_CONNECTION_COUNT("service.property.tsdb.connection.count", "2"), TSD_RETRY_COUNT("service.property.tsdb.retry.count", "3"), /** The TSDB backup read endpoint. */ - TSD_ENDPOINT_BACKUP_READ("service.property.tsdb.endpoint.backup.read", "http://localhost:4466,http://localhost:4467"), + TSD_ENDPOINT_BACKUP_READ("service.property.tsdb.endpoint.backup.read", "http://localhost:4466,http://localhost:4467"), TSDB_READ_CONNECTION_REUSE_COUNT("service.property.tsdb.read.connection.reuse.count", "2000"); private final String _name; diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/AnnotationQuery.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/AnnotationQuery.java index 42e7906c7..59cb9d3b2 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/AnnotationQuery.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/AnnotationQuery.java @@ -32,10 +32,16 @@ package com.salesforce.dva.argus.service.tsdb; import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; import com.salesforce.dva.argus.entity.Metric; import com.salesforce.dva.argus.entity.TSDBEntity.ReservedField; import com.salesforce.dva.argus.system.SystemException; +import java.io.IOException; import java.io.UnsupportedEncodingException; import java.text.MessageFormat; import java.util.HashMap; @@ -52,6 +58,11 @@ */ public class AnnotationQuery { + // The ~ character is not allowed in user-facing Argus syntax + public static final String TAG_NOT_EQUALS_INTERNAL_PREFIX = "~"; + public static final String TAG_NOT_EQUALS_TSDB_PREFIX = "not_literal_or("; + public static final String TAG_NOT_EQUALS_TSDB_SUFFIX = ")"; + //~ Instance fields ****************************************************************************************************************************** protected String _scope; @@ -81,7 +92,7 @@ public AnnotationQuery(String scope, String metric, Map tags, St /** Creates a new AnnotationQuery object. */ protected AnnotationQuery() { - _tags = new HashMap<>(); + _tags = new TreeMap<>(); } /** @@ -137,9 +148,9 @@ public String getMetric() { } /** - * Returns the tags associated with the query. + * Returns the sorted tags associated with the query. * - * @return The tags associated with the query. Will never return null, but may be empty. + * @return The sorted tags associated with the query. Will never return null, but may be empty. */ public Map getTags() { return _tags; diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/CachedTSDBService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/CachedTSDBService.java index a01490f24..b67972445 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/CachedTSDBService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/CachedTSDBService.java @@ -36,6 +36,7 @@ import com.google.inject.Inject; import com.google.inject.Singleton; import com.salesforce.dva.argus.entity.Annotation; +import com.salesforce.dva.argus.entity.Histogram; import com.salesforce.dva.argus.entity.Metric; import com.salesforce.dva.argus.service.CacheService; import com.salesforce.dva.argus.service.DefaultService; @@ -201,7 +202,8 @@ private boolean compulsoryCacheMiss(MetricQuery query) { return query.getStartTimestamp() > System.currentTimeMillis() || query.getEndTimestamp() > System.currentTimeMillis() || (System.currentTimeMillis() - query.getStartTimestamp() < LOWER_START_TIME_LIMIT_IN_MILLIS) || (System.currentTimeMillis() - query.getStartTimestamp() > UPPER_START_TIME_LIMIT_IN_MILLIS) || - (System.currentTimeMillis() - query.getEndTimestamp() > END_TIME_LIMIT_IN_MILLIS) || isQueryHavingTagWildcard(query); + (System.currentTimeMillis() - query.getEndTimestamp() > END_TIME_LIMIT_IN_MILLIS) || + isQueryHavingTagWildcard(query) || isQueryHavingHistogram(query); } /** @@ -512,7 +514,7 @@ private List _getCachedMetricValues(List quer _logger.info("Time spent in mapping tags in tsdb metrics to tags in cache: {}", afterTime - beforeTime); } // end if } catch (RuntimeException | IOException ex) { - _logger.error("Error occurred Reason:", ex.toString()); + _logger.error("Error occurred Reason: {}", ex.toString()); uncached.add(new MetricQueryTimestamp(query, originalStartTimestamp, originalEndTimestamp, query.getStartTimestamp(), query.getEndTimestamp())); } // end try-catch @@ -559,6 +561,10 @@ boolean isQueryHavingTagWildcard(MetricQuery query) { } return false; } + + boolean isQueryHavingHistogram(MetricQuery query) { + return (query.getShowHistogramBuckets() == true || query.getPercentile() != null); + } @Override public void dispose() { @@ -572,6 +578,11 @@ public void putMetrics(List metrics) { _defaultTsdbService.putMetrics(metrics); } + @Override + public void putHistograms(List histograms) { + _defaultTsdbService.putHistograms(histograms); + } + @Override public void putAnnotations(List annotations) { _defaultTsdbService.putAnnotations(annotations); @@ -698,7 +709,7 @@ public void run() { try { _insertIntoCache(); } catch (Exception ex) { - _logger.error("Error occurred Reason:", ex.toString()); + _logger.error("Error occurred Reason: {}", ex.toString()); } } @@ -722,7 +733,7 @@ private void _insertIntoCache() { } } } catch (Exception e) { - _logger.error("Error occurred Reason:", e.toString()); + _logger.error("Error occurred Reason: {}", e.toString()); } } } diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/DefaultTSDBService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/DefaultTSDBService.java index f36564994..8831c910a 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/DefaultTSDBService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/DefaultTSDBService.java @@ -37,10 +37,12 @@ import java.text.MessageFormat; import java.util.ArrayList; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; +import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.ThreadPoolExecutor; @@ -56,6 +58,7 @@ import com.salesforce.dva.argus.entity.Metric; import com.salesforce.dva.argus.service.MonitorService; import com.salesforce.dva.argus.service.TSDBService; +import com.salesforce.dva.argus.service.tsdb.MetricQuery.Aggregator; import com.salesforce.dva.argus.system.SystemConfiguration; import com.salesforce.dva.argus.system.SystemException; @@ -111,7 +114,10 @@ public Map> getMetrics(List queries) { requireNotDisposed(); requireArgument(queries != null, "Metric Queries cannot be null."); _logger.debug("Active Threads in the pool = " + ((ThreadPoolExecutor) _executorService).getActiveCount()); - + int noFailedQueries = 0; + Exception lastFailedException = null; + MetricQuery failedMetricQuery = null; + long start = System.currentTimeMillis(); Map> metricsMap = new HashMap<>(); Map>> futures = new HashMap<>(); @@ -131,12 +137,26 @@ public Map> getMetrics(List queries) { for (Entry>> entry : futures.entrySet()) { try { List m = entry.getValue().get(); + MetricQuery metricQuery = entry.getKey(); + Set tagsInQuery = new HashSet(); + if(metricQuery.getTags()!=null) { + tagsInQuery = metricQuery.getTags().keySet(); + } List metrics = new ArrayList<>(); if (m != null) { for (Metric metric : m) { if (metric != null) { metric.setQuery(entry.getKey()); + if(metric.getQuery().getAggregator() != Aggregator.NONE){ + Set tagKeys = metric.getTags().keySet(); + for(String tagKey : tagKeys) { + // removing tags that the user has not requested + if(!tagsInQuery.contains(tagKey)) { + metric.removeTag(tagKey); + } + } + } metrics.add(metric); } } @@ -144,10 +164,20 @@ public Map> getMetrics(List queries) { instrumentQueryLatency(_monitorService, entry.getKey(), queryStartExecutionTime.get(entry.getKey()), "metrics"); metricsMap.put(entry.getKey(), metrics); - } catch (InterruptedException | ExecutionException e) { + } catch (ExecutionException e){ + lastFailedException = e; + failedMetricQuery = entry.getKey(); + noFailedQueries++; + continue; + } catch (InterruptedException e) { throw new SystemException("Failed to get metrics. The query was: " + entry.getKey() + "\\n", e); } } + + if(noFailedQueries !=0 && noFailedQueries == queries.size()){ + throw new SystemException("Failed to get metrics. The query was: " + failedMetricQuery + "\\n", lastFailedException); + } + _logger.debug("Time to get Metrics = " + (System.currentTimeMillis() - start)); return metricsMap; } @@ -200,6 +230,7 @@ public List getAnnotations(List queries) { } } } + _logger.info("TSDB annotation query completed in {} ms", System.currentTimeMillis() - start); instrumentQueryLatency(_monitorService, query, start, "annotations"); } } catch(IOException ex) { diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/HistogramTransform.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/HistogramTransform.java new file mode 100644 index 000000000..4e9a34596 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/HistogramTransform.java @@ -0,0 +1,78 @@ +package com.salesforce.dva.argus.service.tsdb; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import javax.xml.bind.DatatypeConverter; + +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.salesforce.dva.argus.entity.Histogram; +import com.salesforce.dva.argus.entity.HistogramBucket; +import com.salesforce.dva.argus.entity.TSDBEntity.ReservedField; + +/** + * Transforms histograms from Java to JSON. + * + * @author Dilip Devaraj (ddevaraj@salesforce.com) + */ +public class HistogramTransform { + + /** + * The histogram serializer. + * + * @author Dilip Devaraj (ddevaraj@salesforce.com) + */ + static class Serializer extends JsonSerializer { + + @Override + public void serialize(Histogram histogram, JsonGenerator jgen, SerializerProvider sp) throws IOException { + if(histogram != null) { + jgen.writeStartObject(); + jgen.writeStringField("metric", DefaultTSDBService.constructTSDBMetricName(histogram)); + jgen.writeNumberField("timestamp", histogram.getTimestamp()); + jgen.writeNumberField("overflow", histogram.getOverflow()); + jgen.writeNumberField("underflow", histogram.getUnderflow()); + serializeTags(histogram, jgen); + serializeBuckets(histogram, jgen); + jgen.writeEndObject(); + } + } + + private void serializeTags(Histogram histogram, JsonGenerator jgen) throws IOException { + jgen.writeObjectFieldStart("tags"); + + Map tags = new HashMap<>(histogram.getTags()); + + tags.put(ReservedField.META.getKey(), toMeta(histogram)); + for (Map.Entry tagEntry : tags.entrySet()) { + jgen.writeStringField(tagEntry.getKey(), tagEntry.getValue()); + } + jgen.writeEndObject(); + } + + private void serializeBuckets(Histogram histogram, JsonGenerator jgen) throws IOException { + jgen.writeObjectFieldStart("buckets"); + + for (Map.Entry bucketEntry : histogram.getBuckets().entrySet()) { + jgen.writeStringField(bucketEntry.getKey().toString(), bucketEntry.getValue().toString()); + } + jgen.writeEndObject(); + } + + private String toMeta(Histogram histogram) throws IOException { + Map meta = new HashMap<>(); + + meta.put(ReservedField.DISPLAY_NAME.getKey(), histogram.getDisplayName()); + meta.put(ReservedField.UNITS.getKey(), histogram.getUnits()); + try { + return DatatypeConverter.printBase64Binary(new ObjectMapper().writeValueAsString(meta).getBytes("UTF-8")).replace("=", "_"); + } catch (Exception ex) { + throw new IOException(ex); + } + } + } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/MetricQuery.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/MetricQuery.java index 82f8aba15..d41402993 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/MetricQuery.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/MetricQuery.java @@ -59,6 +59,8 @@ public class MetricQuery extends AnnotationQuery { private Aggregator _downsampler; private Long _downsamplingPeriod; private MetricQueryContext _metricQueryContext; + private String[] _percentile; + private boolean _showHistogramBuckets; //~ Constructors ********************************************************************************************************************************* @@ -120,6 +122,24 @@ public void setNamespace(String namespace) { _namespace = namespace; } + /** + * Sets the query percentile. (Used to only retrieve histogram data) + * + * @param percentile The percentile for histogram data. + */ + public void setPercentile(String[] percentile) { + _percentile = percentile; + } + + /** + * Sets the showHistogramBuckets (Used to only retrieve histogram data) + * + * @param showHistogramBuckets + */ + public void setShowHistogramBuckets(boolean showHistogramBuckets) { + _showHistogramBuckets = showHistogramBuckets; + } + /** * Returns the method used to aggregate query results. * @@ -146,6 +166,25 @@ public void setAggregator(Aggregator aggregator) { public Aggregator getDownsampler() { return _downsampler; } + + /** + * Returns the query percentile. + * + * @return query percentile. + */ + public String[] getPercentile() { + return _percentile; + } + + /** + * Returns if the histogram buckets should be shown or not + * + * @return Should histogram buckets be shown or not + */ + public boolean getShowHistogramBuckets() { + return _showHistogramBuckets; + } + /** * Sets the method used to downsample the query results. @@ -271,24 +310,32 @@ public boolean equals(Object obj) { */ @Override public String toString() { - String pattern = "start={0,number,#}&end={1,number,#}&m={2}{3}&ms=true&show_tsuids=true"; - long start = Math.max(0, getStartTimestamp() - 1); - long end = Math.max(start, getEndTimestamp() + 1); - StringBuilder sb = new StringBuilder(); - - sb.append(getAggregator() == null ? "avg" : getAggregator().getDescription()).append(":"); - if (getDownsampler() != null) { - sb.append(getDownsamplingPeriod()).append("ms").append("-").append(getDownsampler().getDescription()).append(":"); - } - sb.append(getTSDBMetricName()); - - Map tags = new HashMap<>(getTags()); - - try { - return MessageFormat.format(pattern, start, end, sb.toString(), toTagParameterArray(tags)); - } catch (UnsupportedEncodingException ex) { - throw new SystemException(ex); - } + String pattern = "start={0,number,#}&end={1,number,#}&m={2}{3}&ms=true&show_tsuids=true"; + long start = Math.max(0, getStartTimestamp() - 1); + long end = Math.max(start, getEndTimestamp() + 1); + StringBuilder sb = new StringBuilder(); + + sb.append(getAggregator() == null ? "avg" : getAggregator().getDescription()).append(":"); + if (getDownsampler() != null) { + sb.append(getDownsamplingPeriod()).append("ms").append("-").append(getDownsampler().getDescription()).append(":"); + } + + if (getPercentile() != null) { + sb.append(getPercentile()).append(":"); + } + + if (getShowHistogramBuckets() != false) { + sb.append("show-histogram-buckets").append(":"); + } + sb.append(getTSDBMetricName()); + + Map tags = new HashMap<>(getTags()); + + try { + return MessageFormat.format(pattern, start, end, sb.toString(), toTagParameterArray(tags)); + } catch (UnsupportedEncodingException ex) { + throw new SystemException(ex); + } } //~ Enums **************************************************************************************************************************************** @@ -305,10 +352,18 @@ public enum Aggregator { SUM("sum"), AVG("avg"), DEV("dev"), + + // these 3 aggregators is IMIN, IMAX and ISUM are also used for providing the default opentsdb interpolated aggregations + IMIN("min"), + IMAX("max"), + ISUM("sum"), + ZIMSUM("zimsum"), COUNT("count"), MIMMIN("mimmin"), MIMMAX("mimmax"), + FIRST("first"), + LAST("last"), NONE("none"); private final String _description; @@ -391,7 +446,7 @@ public static Transform correspondingTransform(Aggregator agg, TransformFactory * * @author Dilip Devaraj(ddevaraj@salesforce.com) */ - class MetricQueryContext { + protected class MetricQueryContext { private String _readEndPoint; /** diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/MetricQueryTransform.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/MetricQueryTransform.java index 13545e226..405d0476e 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/MetricQueryTransform.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/MetricQueryTransform.java @@ -75,9 +75,14 @@ public void serialize(MetricQuery query, JsonGenerator jgen, SerializerProvider jgen.writeStringField("aggregator", agg == null ? MetricQuery.Aggregator.AVG.getDescription() : agg.getDescription()); jgen.writeStringField("metric", query.getTSDBMetricName()); if(!query.getTags().isEmpty()) { + // Rewrite tag values that start with the '~' internal not-equals indicator with the TSDB not-equals jgen.writeObjectFieldStart("tags"); for(Map.Entry tag : query.getTags().entrySet()) { - jgen.writeStringField(tag.getKey(), tag.getValue()); + String value = tag.getValue(); + if (value.charAt(0) == '~') { + value = MetricQuery.TAG_NOT_EQUALS_TSDB_PREFIX + value.substring(1) + MetricQuery.TAG_NOT_EQUALS_TSDB_SUFFIX; + } + jgen.writeStringField(tag.getKey(), value); } jgen.writeEndObject(); } @@ -85,6 +90,19 @@ public void serialize(MetricQuery query, JsonGenerator jgen, SerializerProvider if(query.getDownsampler() != null) { jgen.writeStringField("downsample", query.getDownsamplingPeriod() + "ms-" + query.getDownsampler().getDescription()); } + + if(query.getPercentile() != null) { + jgen.writeArrayFieldStart("percentiles"); + for(String percentile : query.getPercentile()) { + jgen.writeNumber(Float.parseFloat(percentile)); + } + jgen.writeEndArray(); + } + + if(query.getShowHistogramBuckets() != false){ + jgen.writeBooleanField("showHistogramBuckets", true); + } + jgen.writeEndObject(); jgen.writeEndArray(); jgen.writeEndObject(); diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/PhoenixTSDBEngine.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/PhoenixTSDBEngine.java deleted file mode 100644 index 8c7e07005..000000000 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/PhoenixTSDBEngine.java +++ /dev/null @@ -1,247 +0,0 @@ -package com.salesforce.dva.argus.service.tsdb; - -import java.sql.Connection; -import java.sql.Date; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.ResultSetMetaData; -import java.sql.SQLException; -import java.sql.Statement; -import java.text.MessageFormat; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.salesforce.dva.argus.entity.Metric; -import com.salesforce.dva.argus.service.tsdb.MetricQuery.Aggregator; -import com.salesforce.dva.argus.system.SystemException; - -public class PhoenixTSDBEngine { - - protected Logger _logger = LoggerFactory.getLogger(getClass()); - - public PhoenixTSDBEngine() { - } - - void createOrUpdateView(Connection connection, Metric metric) { - - String viewName = getPhoenixViewName(metric.getScope(), metric.getMetric()); - - String createViewSql = MessageFormat.format("CREATE VIEW IF NOT EXISTS {0} ({1}) AS SELECT * " - + "FROM ARGUS.METRICS APPEND_ONLY_SCHEMA = true, UPDATE_CACHE_FREQUENCY=900000", viewName, generateCols(metric.getTags())); - _logger.info("Create View query: " + createViewSql); - - Statement stmt = null; - try { - stmt = connection.createStatement(); - stmt.executeUpdate(createViewSql); - } catch(SQLException sqle) { - throw new SystemException("Database access error occurred or " - + "createStatement() was called on a closed connection.", sqle); - } finally { - if(stmt != null) { - try { - stmt.close(); - } catch (SQLException e) { - throw new SystemException("Failed to close statement. This should never happen", e); - } - } - } - } - - void upsertMetrics(Connection connection, Metric metric) { - - String viewName = getPhoenixViewName(metric.getScope(), metric.getMetric()); - - String tagkeys = "", tagvalues = ""; - for(Map.Entry tagEntry : metric.getTags().entrySet()) { - tagkeys += "\"" + tagEntry.getKey() + "\","; - tagvalues += "'" + tagEntry.getValue() + "',"; - } - - if(metric.getDisplayName() != null && !metric.getDisplayName().isEmpty()) { - tagkeys += "DISPLAY_NAME,"; - tagvalues += "'" + metric.getDisplayName() + "',"; - } - - if(metric.getUnits() != null && !metric.getUnits().isEmpty()) { - tagkeys += "UNITS" + ","; - tagvalues += "'" + metric.getUnits() + "',"; - } - - tagkeys = tagkeys.substring(0, tagkeys.length() - 1); - tagvalues = tagvalues.substring(0, tagvalues.length() - 1); - - String upsertMetricSql = MessageFormat.format("upsert into {0} (ts, val, {1}) values(?, ?, {2})", viewName, tagkeys, tagvalues); - PreparedStatement preparedStmt = null; - try { - preparedStmt = connection.prepareStatement(upsertMetricSql); - for(Map.Entry datapointEntry : metric.getDatapoints().entrySet()) { - - Long timestamp = datapointEntry.getKey(); - Double value = datapointEntry.getValue(); - - preparedStmt.setDate(1, new Date(timestamp)); - preparedStmt.setDouble(2, value); - preparedStmt.execute(); - } - - // Commit maybe in batches of 1000 datapoints. - connection.commit(); - } catch (SQLException e) { - throw new SystemException("Failed to insert values into Phoenix.", e); - } finally { - if(preparedStmt != null) { - try { - preparedStmt.close(); - } catch (SQLException e) { - throw new SystemException("Failed to close Prepared Statement. This should never happen.", e); - } - } - } - - } - - List selectMetrics(Connection connection, MetricQuery metricQuery) { - Date startDate = new Date(metricQuery.getStartTimestamp()); - Date endDate = new Date(metricQuery.getEndTimestamp()); - - Map metrics = new HashMap<>(); - - String selectQuery = getPhoenixQuery(metricQuery); - try { - PreparedStatement preparedStmt = connection.prepareStatement(selectQuery); - preparedStmt.setDate(1, endDate); - preparedStmt.setDate(2, startDate); - - ResultSet rs = preparedStmt.executeQuery(); - - ResultSetMetaData metaData = rs.getMetaData(); - int colCount = metaData.getColumnCount(); - - while(rs.next()) { - - Map tags = new HashMap<>(); - - Double value = rs.getDouble(1); - long timestamp = rs.getDate(2).getTime(); - String displayName = rs.getString(3); - String units = rs.getString(4); - - for(int i=5; i<=colCount; i++) { - tags.put(metaData.getColumnName(i), rs.getString(i)); - } - - Map datapoints = new HashMap<>(); - datapoints.put(timestamp, value); - String identifier = tags.toString(); - if(metrics.containsKey(identifier)) { - metrics.get(identifier).addDatapoints(datapoints); - } else { - Metric metric = new Metric(metricQuery.getScope(), metricQuery.getMetric()); - metric.setTags(tags); - metric.setDatapoints(datapoints); - metric.setDisplayName(displayName); - metric.setUnits(units); - metrics.put(identifier, metric); - } - } - } catch(SQLException sqle) { - _logger.warn("Failed to read data from Phoenix.", sqle); - } - - return new ArrayList<>(metrics.values()); - } - - private String generateCols(Map tags) { - StringBuilder colSb = new StringBuilder(); - for(String tagKey : tags.keySet()) { - colSb.append("\"").append(tagKey).append("\" ").append("varchar"). append(", "); - } - - colSb.append("CONSTRAINT PK PRIMARY KEY ("); - for(String tagKey : tags.keySet()) { - colSb.append("\"").append(tagKey).append("\","); - } - colSb.deleteCharAt(colSb.length() - 1); - colSb.append(")"); - - return colSb.toString(); - } - - private String getPhoenixQuery(MetricQuery query) { - - String viewName = getPhoenixViewName(query.getScope(), query.getMetric()); - String agg = convertArgusAggregatorToPhoenixAggregator(query.getAggregator()); - - String tagkeys = "", tagWhereClaue = ""; - for(Map.Entry tagEntry : query.getTags().entrySet()) { - tagkeys += ", \"" + tagEntry.getKey() + "\""; - //TODO: Add support for tagKey=* and tagKey=a|b - String tagValue = tagEntry.getValue(); - if (tagValue.equals("*")) { - // no need to filter on tagkey - } else if (tagValue.contains("|")) { - List tagList = Arrays.asList(tagValue.split("\\|")); - String tagValues = tagList.stream() - .map((s) -> "'" + s + "'") - .collect(Collectors.joining(", ")); - tagWhereClaue += " AND \"" + tagEntry.getKey() + "\" IN (" + tagValues + ")"; - } else { - tagWhereClaue += " AND \"" + tagEntry.getKey() + "\" IN ('" + tagValue + "')"; - } - } - - String selectSql = MessageFormat.format("SELECT {0}(val) val, ts epoch_time, display_name, units {1} FROM {2}" - + " WHERE ts <= ? AND ts >= ? {3}" - + " GROUP BY epoch_time, display_name, units {1}", agg, tagkeys, viewName, tagWhereClaue); - - if(query.getDownsampler() != null) { - if(!query.getDownsampler().equals(query.getAggregator())) { - String downsamplingAgg = convertArgusAggregatorToPhoenixAggregator(query.getDownsampler()); - selectSql = MessageFormat.format("SELECT {0}(val) val, trunc(T.epoch_time, ''MILLISECOND'', {1}) epoch_time," - + " display_name, units {2} FROM ({3}) AS T GROUP BY epoch_time, display_name, units {2}", - downsamplingAgg, query.getDownsamplingPeriod(), - tagkeys, selectSql); - } else { - selectSql = MessageFormat.format("SELECT {0}(val) val, trunc(ts, ''MILLISECOND'', {1}) epoch_time, display_name," - + " units {2} FROM {3} WHERE ts < ? AND ts >= ? {4} GROUP BY epoch_time, display_name, units {1}", - agg, query.getDownsamplingPeriod(), tagkeys, viewName, tagWhereClaue); - } - } - - return selectSql; - } - - private String getPhoenixViewName(String scope, String metric) { - return "\"" + scope + "\".\"" + metric + "\""; - } - - private String convertArgusAggregatorToPhoenixAggregator(Aggregator aggregator) { - if(aggregator == null) { - return "AVG"; - } - - switch(aggregator) { - case AVG: - return "AVG"; - case SUM: - return "SUM"; - case MIN: - return "MIN"; - case MAX: - return "MAX"; - case DEV: - return "STDDEV_POP"; - default: - return "AVG"; - } - } - -} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/PhoenixTSDBService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/PhoenixTSDBService.java deleted file mode 100644 index 943c11ef1..000000000 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/tsdb/PhoenixTSDBService.java +++ /dev/null @@ -1,167 +0,0 @@ -package com.salesforce.dva.argus.service.tsdb; - -import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.SQLException; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Properties; - -import org.apache.phoenix.query.QueryServices; -import org.apache.phoenix.schema.TableAlreadyExistsException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.google.inject.Inject; -import com.salesforce.dva.argus.entity.Annotation; -import com.salesforce.dva.argus.entity.Metric; -import com.salesforce.dva.argus.service.DefaultService; -import com.salesforce.dva.argus.service.MonitorService; -import com.salesforce.dva.argus.service.TSDBService; -import com.salesforce.dva.argus.system.SystemAssert; -import com.salesforce.dva.argus.system.SystemConfiguration; -import com.salesforce.dva.argus.system.SystemException; - - -/* - * Quotes around columns and view names make them case sensitive. - */ -public class PhoenixTSDBService extends DefaultService implements TSDBService { - - protected Logger _logger = LoggerFactory.getLogger(getClass()); - private final MonitorService _monitorService; - private final String _phoenixJDBCUrl; - - // A separate connection for each thread. - private final Connection _connection; - - private final PhoenixTSDBEngine phoenixEngine = new PhoenixTSDBEngine(); - - @Inject - public PhoenixTSDBService(SystemConfiguration config, MonitorService monitorService) { - super(config); - requireArgument(config != null, "System configuration cannot be null."); - requireArgument(monitorService != null, "Monitor service cannot be null."); - - _monitorService = monitorService; - _phoenixJDBCUrl = config.getValue(Property.PHOENIX_JDBC_URL.getName(), Property.PHOENIX_JDBC_URL.getDefaultValue()); - - try { - Properties props = new Properties(); - props.setProperty(QueryServices.CLIENT_CACHE_ENCODING, "PROTOBUF"); - _connection = DriverManager.getConnection(_phoenixJDBCUrl, props); - } catch (SQLException e) { - throw new SystemException("Failed to create connection to phoenix using jdbc url: " + _phoenixJDBCUrl, e); - } - - try { - _connection.createStatement().execute("CREATE SEQUENCE IF NOT EXISTS METRIC_ID_SEQ"); - } catch (SQLException e) { - throw new SystemException("Failed to create sequence : " + _phoenixJDBCUrl, e); - } - - try { - _connection.createStatement().execute("CREATE TABLE ARGUS.METRICS (id INTEGER NOT NULL, ts DATE NOT NULL, val DOUBLE, display_name varchar, units varchar CONSTRAINT PK PRIMARY KEY(id,ts)) APPEND_ONLY_SCHEMA = true, UPDATE_CACHE_FREQUENCY = 900000, AUTO_PARTITION_SEQ=METRIC_ID_SEQ"); - // TODO change the create table ddl to IF NOT EXISTS PHOENIX-3660 is fixed - } catch (TableAlreadyExistsException e) { - System.out.println(); - } catch (SQLException e) { - throw new SystemException("Failed to create base table: " + _phoenixJDBCUrl, e); - } - } - - @Override - public void dispose() { - super.dispose(); - try { - _connection.close(); - } catch (SQLException e) { - throw new SystemException("Failed to close jdbc connection to phoenix. This should never happen.", e); - } - } - - @Override - public Properties getServiceProperties() { - Properties serviceProps = new Properties(); - - for (Property property : Property.values()) { - serviceProps.put(property.getName(), property.getDefaultValue()); - } - return serviceProps; - } - - @Override - public void putMetrics(List metrics) { - SystemAssert.requireArgument(metrics != null && !metrics.isEmpty(), "Cannot put null or empty metrics list."); - - for(Metric metric : metrics) { - phoenixEngine.createOrUpdateView(_connection, metric); - phoenixEngine.upsertMetrics(_connection, metric); - } - } - - @Override - public Map> getMetrics(List queries) { - SystemAssert.requireArgument(queries != null, "Metric queries list cannot be null."); - - Map> result = new HashMap<>(); - for(MetricQuery query : queries) { - result.put(query, phoenixEngine.selectMetrics(_connection, query)); - } - return result; - } - - @Override - public void putAnnotations(List annotations) { - throw new UnsupportedOperationException("PhoenixTSDBService does not support annotations"); - } - - @Override - public List getAnnotations(List queries) { - throw new UnsupportedOperationException("PhoenixTSDBService does not support annotations"); - } - - //~ Enums **************************************************************************************************************************************** - - /** - * Enumerates the implementation specific configuration properties. - * - * @author Tom Valine (tvaline@salesforce.com) - */ - public enum Property { - - /** The TSDB read endpoint. */ - PHOENIX_JDBC_URL("service.property.tsdb.phoenix.jdbc.url", "jdbc:phoenix:localhost:2181:/hbase"), - PHOENIX_CONNECTIONS("service.property.tsdb.phoenix.connections", "10"); - - private final String _name; - private final String _defaultValue; - - private Property(String name, String defaultValue) { - _name = name; - _defaultValue = defaultValue; - } - - /** - * Returns the property name. - * - * @return The property name. - */ - public String getName() { - return _name; - } - - /** - * Returns the default value for the property. - * - * @return The default value. - */ - public String getDefaultValue() { - return _defaultValue; - } - } - -} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/users/DefaultUserService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/users/DefaultUserService.java index 51b1c8eb4..f1c78add1 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/users/DefaultUserService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/users/DefaultUserService.java @@ -28,7 +28,7 @@ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ - + package com.salesforce.dva.argus.service.users; import com.google.inject.Inject; @@ -63,9 +63,7 @@ public class DefaultUserService extends DefaultJPAService implements UserService //~ Instance fields ****************************************************************************************************************************** private Logger _logger = LoggerFactory.getLogger(getClass()); - @Inject - Provider emf; - + //~ Constructors ********************************************************************************************************************************* @@ -112,7 +110,7 @@ public void deleteUser(PrincipalUser user) { requireNotDisposed(); requireArgument(user != null && user.getId() != null && user.getId().compareTo(ZERO) > 0, "User cannot be null and must have a valid ID."); _logger.debug("Deleting user {}.", user); - + EntityManager em = emf.get(); deleteEntity(em, user); em.flush(); @@ -123,14 +121,14 @@ public void deleteUser(PrincipalUser user) { public PrincipalUser updateUser(PrincipalUser user) { requireNotDisposed(); requireArgument(user != null, "User cannot be null."); - + EntityManager em = emf.get(); PrincipalUser result = mergeEntity(em, user); _logger.debug("Updated user to : {}", result); _auditService.createAudit("Updated user : {0}", result, result); em.flush(); - + return result; } @@ -155,7 +153,7 @@ public synchronized PrincipalUser findAdminUser() { } return result; } - + @Override @Transactional public synchronized PrincipalUser findDefaultUser() { @@ -184,6 +182,6 @@ public long getUniqueUserCount() { requireNotDisposed(); return PrincipalUser.findUniqueUserCount(emf.get()); } - + } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/warden/DefaultWardenService.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/warden/DefaultWardenService.java index f78c52fe6..b6152b152 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/warden/DefaultWardenService.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/warden/DefaultWardenService.java @@ -415,8 +415,11 @@ private void _updateWardenAlertsForUser(PrincipalUser user, PolicyCounter counte if (wardenAlert == null) { wardenAlert = _constructWardenAlertForUser(user, counter); } - wardenAlert.setEnabled(true); - _alertService.updateAlert(wardenAlert); + + if(!wardenAlert.isEnabled()) { + wardenAlert.setEnabled(true); + _alertService.updateAlert(wardenAlert); + } } private void _enableWarden(boolean enabled) { @@ -526,7 +529,7 @@ private void _disableWardenAlertsThatAreNotUpdated() { try { _logger.info("Disabling warden alert:{}", alert.getName()); - List metrics = _metricService.getMetrics(alert.getExpression()); + List metrics = _metricService.getMetrics(alert.getExpression()).getMetricsList(); Metric metric = metrics.isEmpty() ? null : metrics.get(0); if (metric == null || metric.getDatapoints().isEmpty()) { diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/warden/WardenApiNotifier.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/warden/WardenApiNotifier.java index 6feedb7c5..548b760ab 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/warden/WardenApiNotifier.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/warden/WardenApiNotifier.java @@ -74,10 +74,11 @@ public String getName() { } @Override - protected void sendAdditionalNotification(NotificationContext context) { + protected boolean sendAdditionalNotification(NotificationContext context) { _wardenService.suspendUser(super.getWardenUser(context.getAlert().getName()), SubSystem.API); super.addAnnotationSuspendedUser(context, SubSystem.API); super.sendWardenEmailToUser(context, SubSystem.API); + return true; } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/warden/WardenNotifier.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/warden/WardenNotifier.java index b8e530138..bfbca1330 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/warden/WardenNotifier.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/warden/WardenNotifier.java @@ -47,6 +47,7 @@ import com.salesforce.dva.argus.service.WardenService.SubSystem; import com.salesforce.dva.argus.service.alert.DefaultAlertService.NotificationContext; import com.salesforce.dva.argus.service.alert.notifier.DefaultNotifier; +import com.salesforce.dva.argus.service.mail.EmailContext; import com.salesforce.dva.argus.system.SystemConfiguration; import java.math.BigInteger; import java.text.MessageFormat; @@ -135,7 +136,7 @@ public String getName() { } @Override - protected abstract void sendAdditionalNotification(NotificationContext context); + protected abstract boolean sendAdditionalNotification(NotificationContext context); /** * Add annotation for user suspension to the triggers.warden metric.. @@ -197,11 +198,21 @@ protected void sendWardenEmailToUser(NotificationContext context, SubSystem subS } else { message.append(MessageFormat.format("
Reinstatement Time: {0}", DATE_FORMATTER.get().format(new Date(record.getSuspendedUntil())))); } - _mailService.sendMessage(to, subject, message.toString(), "text/html; charset=utf-8", MailService.Priority.HIGH); + + EmailContext.Builder emailContextBuilder = new EmailContext.Builder() + .withRecipients(to) + .withSubject(subject) + .withEmailBody(message.toString()) + .withContentType("text/html; charset=utf-8") + .withEmailPriority(MailService.Priority.HIGH); + _mailService.sendMessage(emailContextBuilder.build()); to.clear(); to.add("argus-admin@salesforce.com"); message.append("

Click here to view alert definition.
"); - _mailService.sendMessage(to, subject, message.toString(), "text/html; charset=utf-8", MailService.Priority.HIGH); + emailContextBuilder = emailContextBuilder + .withRecipients(to) + .withEmailBody(message.toString()); + _mailService.sendMessage(emailContextBuilder.build()); } private String getAlertUrl(BigInteger id) { @@ -232,11 +243,11 @@ protected PrincipalUser getWardenUser(String wardenAlertName) { * @param notificationContext The notification context. */ @Override - public void clearNotification(NotificationContext notificationContext) { } + public boolean clearNotification(NotificationContext notificationContext) { return true; } /** No additional action needs to be taken for clearing warden notifications as they are not stateful. This implementation is empty. */ @Override - protected void clearAdditionalNotification(NotificationContext context) { } + protected boolean clearAdditionalNotification(NotificationContext context) { return true; } @Override public Properties getNotifierProperties() { diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/warden/WardenPostingNotifier.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/warden/WardenPostingNotifier.java index 3b7b4f299..f51738a2c 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/service/warden/WardenPostingNotifier.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/service/warden/WardenPostingNotifier.java @@ -74,10 +74,11 @@ public String getName() { } @Override - protected void sendAdditionalNotification(NotificationContext context) { + protected boolean sendAdditionalNotification(NotificationContext context) { _wardenService.suspendUser(super.getWardenUser(context.getAlert().getName()), SubSystem.POSTING); super.addAnnotationSuspendedUser(context, SubSystem.POSTING); super.sendWardenEmailToUser(context, SubSystem.POSTING); + return true; } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/system/SystemAssert.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/system/SystemAssert.java index 8ba1036eb..444a626d3 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/system/SystemAssert.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/system/SystemAssert.java @@ -30,11 +30,12 @@ */ package com.salesforce.dva.argus.system; +import java.util.function.Predicate; /** * Provides functionality to assert that certain conditions are met. * - * @author Tom Valine (tvaline@salesforce.com), Bhinav Sura (bhinav.sura@salesforce.com) + * @author Tom Valine (tvaline@salesforce.com), Bhinav Sura (bhinav.sura@salesforce.com), Ian Keck (ikeck@salesforce.com) */ public class SystemAssert { @@ -54,7 +55,9 @@ private SystemAssert() { * @param message The exception message. */ public static void requireArgument(boolean condition, String message) { - require(condition, message, IllegalArgumentException.class); + if (!condition) { + raiseException(message, IllegalArgumentException.class); + } } /** @@ -64,19 +67,100 @@ public static void requireArgument(boolean condition, String message) { * @param message The exception message. */ public static void requireState(boolean condition, String message) { - require(condition, message, IllegalStateException.class); + if (!condition) { + raiseException(message, IllegalStateException.class); + } } - private static void require(boolean condition, String message, Class type) { - if (!condition) { - RuntimeException result; - try { - result = type.getConstructor(String.class).newInstance(message); - } catch (Exception ex) { - throw new SystemException(ex); + /** + * Throws an exception of the specified type and message. + * + * @param message Message to return + * @param type Type of RuntimeException to raise. + */ + private static void raiseException(String message, Class type) throws RuntimeException + { + RuntimeException result; + try { + result = type.getConstructor(String.class).newInstance(message); + } catch (Exception ex) { + throw new SystemException(ex); + } + throw result; + } + + + // NOTE - these functions add a mechanism that allow you to capture error messages in arbitrary exceptions thrown by the test code. + + /** + * Throws an IllegalArgumentException if the predicate fails. + * + * @param arg Object to test + * @param t The predicate to evaluate, + * @param message The exception message. + */ + public static

void requireArgumentP(P arg, Predicate

t, String message, boolean captureMsg) { + requirePredicate(arg, t, message, IllegalArgumentException.class, captureMsg); + } + + public static

void requireArgumentP(P arg, Predicate

t, String message) { + requirePredicate(arg, t, message, IllegalArgumentException.class, true); + } + + /** + * Throws an IllegalStateException if the predicate fails. + * + * @param arg Object to test + * @param t The condition to evaluate, + * @param message The exception message. + */ + public static

void requireStateP(P arg, Predicate

t, String message, boolean captureMsg) { + requirePredicate(arg, t, message, IllegalStateException.class, captureMsg); + } + + public static

void requireStateP(P arg, Predicate

t, String message) { + requirePredicate(arg, t, message, IllegalStateException.class, true); + } + + + private static void requirePredicate(P arg, Predicate

t, String message, Class type, boolean captureMessage) { + + boolean ok = true; + String msg = message; + try + { + ok = t.test(arg); + } + catch (RuntimeException ex) + { + ok = false; + if (captureMessage) + { + Throwable e = ex.getCause(); + msg = ex.getMessage(); + if (e != null && (msg == null || msg.isEmpty())) + { + msg = e.getMessage(); + } + if (msg == null || msg.isEmpty()) + { + msg = "unknown error"; + } } - throw result; + } + catch (Exception e) + { + ok = false; + if (captureMessage) + { + msg = e.getMessage(); + } + } + + if (!ok) + { + raiseException(msg, type); } } } diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/system/SystemConfiguration.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/system/SystemConfiguration.java index 65b7e4151..19121083b 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/system/SystemConfiguration.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/system/SystemConfiguration.java @@ -32,6 +32,7 @@ package com.salesforce.dva.argus.system; import com.google.inject.Singleton; + import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; @@ -45,12 +46,12 @@ import java.nio.charset.Charset; import java.text.MessageFormat; import java.util.Properties; - -import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; import java.util.Set; import java.util.TreeSet; import java.util.regex.Pattern; +import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; + /** * Immutable system configuration information. * @@ -112,9 +113,9 @@ public static void generateConfiguration(InputStream input, OutputStream output, do { String defaultValue = config.getValue(property); - out.write(MessageFormat.format("Enter value for ''{0}'' ", name)); + out.write(MessageFormat.format("Enter value for {0} ", name)); if (defaultValue != null) { - out.write(MessageFormat.format("'(default = '{0}')': ", config.getValue(property))); + out.write(MessageFormat.format("(default = {0}): ", config.getValue(property))); } out.flush(); @@ -181,6 +182,11 @@ public String getValue(String key, String defaultValue) { return getProperty(key, defaultValue); } + public String refreshAndGetValue(Property propertyFile, String key, String defaultValue) { + SystemInitializer.readFile(this, getValue(propertyFile)); + return getValue(key, defaultValue); + } + /** * Returns the list of configured properties and their values. * @@ -222,15 +228,20 @@ public enum Property { EMAIL_ENABLED("system.property.mail.enabled", "false"), GOC_ENABLED("system.property.goc.enabled", "false"), GUS_ENABLED("system.property.gus.enabled", "false"), - REFOCUS_ENABLED("system.property.refocus.enabled", "false"), - + CALLBACK_ENABLED("system.property.callback.enabled", "false"), + PAGERDUTY_ENABLED("system.property.pagerduty.enabled", "false"), + IMAGES_IN_NOTIFICATIONS_ENABLED("system.property.images.in.notifications.enabled", "true"), + EMAIL_EXCEPTIONS("system.property.mail.exceptions", "false"), - DATA_LAG_MONITOR_ENABLED("system.property.monitor.data.lag", "false"), - DATA_LAG_QUERY_EXPRESSION("system.property.data.lag.expression", "sampleExpression"), - DATA_LAG_THRESHOLD("system.property.data.lag.threshold.millis", "300000"), - DATA_LAG_NOTIFICATION_EMAIL_ADDRESS("system.property.data.lag.notification.emailId", "someone@mycompany.com"), - DATA_LAG_WHITE_LISTED_SCOPES("system.property.data.lag.whitelisted.scopes", ""), + ARGUS_GUS_GROUP_ID("notifier.property.argus.gus.group_id", "groupId"), + + REFOCUS_ENABLED("system.property.refocus.enabled", "false"), + REFOCUS_CLIENT_THREADS("system.property.refocus.client.threads", "1"), + REFOCUS_CLIENT_CONNECT_TIMEOUT("system.property.refocus.client.connect.timeout", "10000"), + + DATA_LAG_SERVICE_IMPL_CLASS("service.binding.datalagservice", "com.salesforce.dva.argus.service.monitor.DataLagMonitorGoldenMetric"), + DATA_LAG_SERVICE_PROPERTY_FILE("service.config.datalagservice","placeholder_datalagservice.properties"), CLIENT_THREADS("system.property.client.threads", "2"), CLIENT_CONNECT_TIMEOUT("system.property.client.connect.timeout", "10000"), @@ -239,47 +250,79 @@ public enum Property { DC_LIST("system.property.dc.list", "DC1,DC2,DC3,DC4,DC5"), CACHE_SERVICE_IMPL_CLASS("service.binding.cache", "com.salesforce.dva.argus.service.cache.NoOperationCacheService"), - CACHE_SERVICE_PROPERTY_FILE("service.config.cache","argus.properties"), + CACHE_SERVICE_PROPERTY_FILE("service.config.cache","placeholder_redis.properties"), MQ_SERVICE_IMPL_CLASS("service.binding.mq", "com.salesforce.dva.argus.service.mq.kafka.KafkaMessageService"), - MQ_SERVICE_PROPERTY_FILE("service.config.mq","argus.properties"), + MQ_SERVICE_PRODUCER_IMPL_CLASS("service.binding.mq.producer", "com.salesforce.dva.argus.service.mq.kafka.DefaultProducer"), + MQ_SERVICE_CONSUMER_IMPL_CLASS("service.binding.mq.consumer", "com.salesforce.dva.argus.service.mq.kafka.DefaultConsumer"), + MQ_SERVICE_PROPERTY_FILE("service.config.mq","placeholder_kafka.properties"), ALERT_SERVICE_IMPL_CLASS("service.binding.alert", "com.salesforce.dva.argus.service.alert.DefaultAlertService"), - ALERT_SERVICE_PROPERTY_FILE("service.config.alert","argus.properties"), - NOTIFIER_PROPERTY_FILE("service.config.notifier","notifier.properties"), + ALERT_SERVICE_PROPERTY_FILE("service.config.alert","placeholder_alert.properties"), + NOTIFIER_PROPERTY_FILE("service.config.notifier","placeholder_notifier.properties"), SCHEDULING_SERVICE_IMPL_CLASS("service.binding.scheduling", "com.salesforce.dva.argus.service.schedule.DefaultSchedulingService"), - SCHEDULING_SERVICE_PROPERTY_FILE("service.config.scheduling","argus.properties"), + SCHEDULING_SERVICE_PROPERTY_FILE("service.config.scheduling","placeholder_scheduling.properties"), MAIL_SERVICE_IMPL_CLASS("service.binding.mail", "com.salesforce.dva.argus.service.mail.DefaultMailService"), - MAIL_SERVICE_PROPERTY_FILE("service.config.mail","argus.properties"), + MAIL_SERVICE_PROPERTY_FILE("service.config.mail","placeholder_mail.properties"), CALLBACK_SERVICE_IMPL_CLASS("service.binding.callback", "com.salesforce.dva.argus.service.callback.DefaultCallbackService"), - CALLBACK_SERVICE_PROPPERTY_FILE("service.config.callback", "argus.properties"), + CALLBACK_SERVICE_PROPPERTY_FILE("service.config.callback", "placeholder_callback.properties"), AUTH_SERVICE_IMPL_CLASS("service.binding.auth", "com.salesforce.dva.argus.service.auth.LDAPAuthService"), - AUTH_SERVICE_PROPERTY_FILE("service.config.auth","argus.properties"), + AUTH_SERVICE_PROPERTY_FILE("service.config.auth","placeholder_ldap.properties"), - SCHEMA_SERVICE_IMPL_CLASS("service.binding.schema", "com.salesforce.dva.argus.service.schema.AsyncHbaseSchemaService"), - SCHEMA_SERVICE_PROPERTY_FILE("service.config.schema","argus.properties"), + SCHEMA_SERVICE_IMPL_CLASS("service.binding.schema", "com.salesforce.dva.argus.service.schema.ElasticSearchSchemaService"), + SCHEMA_SERVICE_PROPERTY_FILE("service.config.schema","placeholder_schema.properties"), + + QUERYSTORE_SERVICE_IMPL_CLASS("service.binding.querystore", "com.salesforce.dva.argus.service.querystore.NoOperationQueryStoreService"), + QUERYSTORE_SERVICE_PROPERTY_FILE("service.config.querystore","placeholder_querystore.properties"), + + IMAGE_SERVICE_IMPL_CLASS("service.binding.imageservice", "com.salesforce.dva.argus.service.image.DefaultImageService"), + IMAGE_SERVICE_PROPERTY_FILE("service.config.imageservice","placeholder_imageservice.properties"), HISTORY_SERVICE_IMPL_CLASS("service.binding.history", "com.salesforce.dva.argus.service.history.HBaseHistoryService"), - HISTORY_SERVICE_PROPERTY_FILE("service.config.history","argus.properties"), + HISTORY_SERVICE_PROPERTY_FILE("service.config.history","placeholder_history.properties"), AUDIT_SERVICE_IMPL_CLASS("service.binding.audit", "com.salesforce.dva.argus.service.audit.DefaultAuditService"), - AUDIT_SERVICE_PROPERTY_FILE("service.config.audit","argus.properties"), + AUDIT_SERVICE_PROPERTY_FILE("service.config.audit","placeholder_audit.properties"), - ASYNCHBASE_PROPERTY_FILE("service.config.asynchbase", "argus.properties"), + ASYNCHBASE_PROPERTY_FILE("service.config.asynchbase", "placeholder_asynchbase.properties"), TSDB_SERVICE_IMPL_CLASS("service.binding.tsdb", "com.salesforce.dva.argus.service.tsdb.DefaultTSDBService"), - TSDB_SERVICE_PROPERTY_FILE("service.config.tsdb","argus.properties"), + TSDB_SERVICE_PROPERTY_FILE("service.config.tsdb","placeholder_cachedtsdb.properties"), + + ANNOTATION_STORAGE_SERVICE_IMPL_CLASS("service.binding.annotation.storage", "com.salesforce.dva.argus.service.tsdb.DefaultTSDBService"), + ANNOTATION_STORAGE_SERVICE_PROPERTY_FILE("service.config.annotation.storage","placeholder_annotation.storage.properties"), + + + IMAGE_STORAGE_SERVICE_IMPL_CLASS("service.binding.image.storage", "com.salesforce.dva.argus.service.image.ElasticSearchImageService"), + + AKC_CONSUMER_OFFSET_STORAGE_SERVICE_IMPL_CLASS("service.binding.akc.consumer.offset.storage", "com.salesforce.dva.argus.service.metric.NoOperationMetricsStorageService"), + AKC_CONSUMER_OFFSET_STORAGE_SERVICE_PROPERTY_FILE("service.config.akc.consumer.offset.storage","placeholder_akc.consumer.offset.storage.properties"), WARDEN_SERVICE_IMPL_CLASS("service.binding.warden", "com.salesforce.dva.argus.service.warden.DefaultWardenService"), - WARDEN_SERVICE_PROPERTY_FILE("service.config.warden", "argus.properties"), + WARDEN_SERVICE_PROPERTY_FILE("service.config.warden", "placeholder_warden.properties"), + + METADATA_SERVICE_IMPL_CLASS("service.binding.metadata", "com.salesforce.dva.argus.service.metric.metadata.IDBMetadataService"), + IDB_CLIENT_IMPL_CLASS("service.binding.idbclient", "com.salesforce.dva.argus.service.metric.metadata.CachedIDBClient"), + IDB_CLIENT_PROPERTY_FILE("service.config.idbclient", "placeholder_idbclient.properties"), + + OAUTH_SERVICE_PROPERTY_FILE("service.config.oauth","placeholder_oauth.properties"), + + DISCOVERY_SERVICE_IMPL_CLASS("service.binding.discovery", "com.salesforce.dva.argus.service.schema.CachedDiscoveryService"), + + // the default value of 5 million datapoints roughly correspond to 500mb of memory + MAX_DATAPOINTS_ALLOWED_PER_QUERY("system.property.max.datapoints.query", "5000000"), + + ENFORCE_DATAPOINTS_LIMIT("system.property.enforce.datapoints.limit", "false"), + + PKI_MONITORED_DIRECTORY("pki.monitored.directory", "/etc/pki_service/sfdc/argus-client"), + PKI_CA_DIRECTORY("pki.ca.directory", "/etc/pki_service/ca"); - OAUTH_SERVICE_PROPERTY_FILE("service.config.oauth","argus.properties"); - private final String _name; + private final String _name; private final String _defaultValue; private Property(String name, String defaultValue) { diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/system/SystemInitializer.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/system/SystemInitializer.java index 9414a3c63..b9fa61c04 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/system/SystemInitializer.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/system/SystemInitializer.java @@ -36,15 +36,50 @@ import ch.qos.logback.classic.LoggerContext; import ch.qos.logback.classic.joran.JoranConfigurator; import ch.qos.logback.core.joran.spi.JoranException; - import com.google.inject.AbstractModule; import com.google.inject.matcher.Matchers; import com.google.inject.persist.jpa.JpaPersistModule; import com.salesforce.dva.argus.inject.SLF4JTypeListener; -import com.salesforce.dva.argus.service.*; +import com.salesforce.dva.argus.service.AlertService; +import com.salesforce.dva.argus.service.AnnotationService; +import com.salesforce.dva.argus.service.AnnotationStorageService; +import com.salesforce.dva.argus.service.AuditService; +import com.salesforce.dva.argus.service.AuthService; +import com.salesforce.dva.argus.service.BatchService; +import com.salesforce.dva.argus.service.CacheService; +import com.salesforce.dva.argus.service.CallbackService; +import com.salesforce.dva.argus.service.ChartService; +import com.salesforce.dva.argus.service.CollectionService; +import com.salesforce.dva.argus.service.DashboardService; +import com.salesforce.dva.argus.service.DiscoveryService; +import com.salesforce.dva.argus.service.DistributedSchedulingLockService; +import com.salesforce.dva.argus.service.GlobalInterlockService; +import com.salesforce.dva.argus.service.HistoryService; +import com.salesforce.dva.argus.service.ImageService; +import com.salesforce.dva.argus.service.ImageStorageService; +import com.salesforce.dva.argus.service.MQService; +import com.salesforce.dva.argus.service.MailService; +import com.salesforce.dva.argus.service.ManagementService; +import com.salesforce.dva.argus.service.MetricService; +import com.salesforce.dva.argus.service.MetricStorageService; +import com.salesforce.dva.argus.service.MonitorService; +import com.salesforce.dva.argus.service.NamedBinding; +import com.salesforce.dva.argus.service.NamespaceService; +import com.salesforce.dva.argus.service.OAuthAuthorizationCodeService; +import com.salesforce.dva.argus.service.QueryStoreService; +import com.salesforce.dva.argus.service.RefocusService; +import com.salesforce.dva.argus.service.SchedulingService; +import com.salesforce.dva.argus.service.SchemaService; +import com.salesforce.dva.argus.service.ServiceManagementService; +import com.salesforce.dva.argus.service.TSDBService; +import com.salesforce.dva.argus.service.UserService; +import com.salesforce.dva.argus.service.WardenService; +import com.salesforce.dva.argus.service.alert.notifier.RefocusForwarder; import com.salesforce.dva.argus.service.annotation.DefaultAnnotationService; +import com.salesforce.dva.argus.service.annotation.ElasticSearchAnnotationService; import com.salesforce.dva.argus.service.batch.DefaultBatchService; import com.salesforce.dva.argus.service.collect.DefaultCollectionService; +import com.salesforce.dva.argus.service.image.ElasticSearchImageService; import com.salesforce.dva.argus.service.jpa.DefaultChartService; import com.salesforce.dva.argus.service.jpa.DefaultDashboardService; import com.salesforce.dva.argus.service.jpa.DefaultDistributedSchedulingLockService; @@ -53,17 +88,14 @@ import com.salesforce.dva.argus.service.jpa.DefaultServiceManagementService; import com.salesforce.dva.argus.service.management.DefaultManagementService; import com.salesforce.dva.argus.service.metric.AsyncMetricService; -import com.salesforce.dva.argus.service.monitor.CounterMetricJMXExporter; +import com.salesforce.dva.argus.service.monitor.DataLagService; import com.salesforce.dva.argus.service.monitor.DefaultMonitorService; -import com.salesforce.dva.argus.service.monitor.GaugeExporter; import com.salesforce.dva.argus.service.oauth.DefaultOAuthAuthorizationCodeService; -import com.salesforce.dva.argus.service.schema.CachedDiscoveryService; import com.salesforce.dva.argus.service.schema.DefaultDiscoveryService; import com.salesforce.dva.argus.service.tsdb.CachedTSDBService; import com.salesforce.dva.argus.service.users.CachedUserService; import com.salesforce.dva.argus.service.users.DefaultUserService; import com.salesforce.dva.argus.system.SystemConfiguration.Property; - import org.slf4j.LoggerFactory; import java.io.FileInputStream; @@ -135,7 +167,7 @@ private static Properties readConfigInfo() { return props; } - private static void readFile(Properties props, String filePath) { + static void readFile(Properties props, String filePath) { if ((filePath != null) && !filePath.isEmpty()) { InputStream is = null; Properties result = new Properties(); @@ -228,7 +260,6 @@ private void configureLogging() { } private void configureServices() { - bindConcreteClass(CounterMetricJMXExporter.class, GaugeExporter.class); bindConcreteClass(Property.CACHE_SERVICE_IMPL_CLASS, CacheService.class); bindConcreteClass(Property.MQ_SERVICE_IMPL_CLASS, MQService.class); bindConcreteClass(Property.ALERT_SERVICE_IMPL_CLASS, AlertService.class); @@ -236,15 +267,25 @@ private void configureServices() { bindConcreteClass(Property.MAIL_SERVICE_IMPL_CLASS, MailService.class); bindConcreteClass(Property.AUTH_SERVICE_IMPL_CLASS, AuthService.class); bindConcreteClass(Property.SCHEMA_SERVICE_IMPL_CLASS, SchemaService.class); + bindConcreteClass(Property.QUERYSTORE_SERVICE_IMPL_CLASS, QueryStoreService.class); + bindConcreteClass(Property.IMAGE_SERVICE_IMPL_CLASS, ImageService.class); + bindConcreteClass(Property.IMAGE_STORAGE_SERVICE_IMPL_CLASS, ImageStorageService.class); bindConcreteClass(Property.HISTORY_SERVICE_IMPL_CLASS, HistoryService.class); bindConcreteClass(Property.AUDIT_SERVICE_IMPL_CLASS, AuditService.class); bindConcreteClass(Property.CALLBACK_SERVICE_IMPL_CLASS, CallbackService.class); bindConcreteClass(Property.WARDEN_SERVICE_IMPL_CLASS, WardenService.class); + bindConcreteClass(Property.DISCOVERY_SERVICE_IMPL_CLASS, DiscoveryService.class); + bindConcreteClass(Property.ANNOTATION_STORAGE_SERVICE_IMPL_CLASS, AnnotationStorageService.class); + bindConcreteClass(Property.AKC_CONSUMER_OFFSET_STORAGE_SERVICE_IMPL_CLASS, MetricStorageService.class); + bindConcreteClass(Property.DATA_LAG_SERVICE_IMPL_CLASS, DataLagService.class); // Named annotation binding bindConcreteClassWithNamedAnnotation(getConcreteClassToBind(Property.TSDB_SERVICE_IMPL_CLASS, TSDBService.class), TSDBService.class); bindConcreteClassWithNamedAnnotation(DefaultDiscoveryService.class, DiscoveryService.class); bindConcreteClassWithNamedAnnotation(DefaultUserService.class, UserService.class); + bindConcreteClassWithNamedAnnotation(ElasticSearchAnnotationService.class, AnnotationStorageService.class); + bindConcreteClassWithNamedAnnotation(ElasticSearchImageService.class, ImageStorageService.class); + // static binding bindConcreteClass(CachedTSDBService.class, TSDBService.class); @@ -260,9 +301,9 @@ private void configureServices() { bindConcreteClass(DefaultManagementService.class, ManagementService.class); bindConcreteClass(DefaultServiceManagementService.class, ServiceManagementService.class); bindConcreteClass(DefaultNamespaceService.class, NamespaceService.class); - bindConcreteClass(CachedDiscoveryService.class, DiscoveryService.class); bindConcreteClass(DefaultDistributedSchedulingLockService.class, DistributedSchedulingLockService.class); bindConcreteClass(DefaultChartService.class, ChartService.class); + bindConcreteClass(RefocusForwarder.class, RefocusService.class); } private void bindConcreteClass(Property property, Class type) { @@ -297,12 +338,18 @@ private Properties getServiceSpecificProperties() { readFile(properties, _systemConfiguration.getValue(Property.MAIL_SERVICE_PROPERTY_FILE)); readFile(properties, _systemConfiguration.getValue(Property.AUTH_SERVICE_PROPERTY_FILE)); readFile(properties, _systemConfiguration.getValue(Property.SCHEMA_SERVICE_PROPERTY_FILE)); + readFile(properties, _systemConfiguration.getValue(Property.QUERYSTORE_SERVICE_PROPERTY_FILE)); + readFile(properties, _systemConfiguration.getValue(Property.IMAGE_SERVICE_PROPERTY_FILE)); readFile(properties, _systemConfiguration.getValue(Property.HISTORY_SERVICE_PROPERTY_FILE)); readFile(properties, _systemConfiguration.getValue(Property.TSDB_SERVICE_PROPERTY_FILE)); readFile(properties, _systemConfiguration.getValue(Property.NOTIFIER_PROPERTY_FILE)); readFile(properties, _systemConfiguration.getValue(Property.ASYNCHBASE_PROPERTY_FILE)); readFile(properties, _systemConfiguration.getValue(Property.WARDEN_SERVICE_PROPERTY_FILE)); readFile(properties, _systemConfiguration.getValue(Property.OAUTH_SERVICE_PROPERTY_FILE)); + readFile(properties, _systemConfiguration.getValue(Property.ANNOTATION_STORAGE_SERVICE_PROPERTY_FILE)); + readFile(properties, _systemConfiguration.getValue(Property.AKC_CONSUMER_OFFSET_STORAGE_SERVICE_PROPERTY_FILE)); + readFile(properties, _systemConfiguration.getValue(Property.IDB_CLIENT_PROPERTY_FILE)); + readFile(properties, _systemConfiguration.getValue(Property.DATA_LAG_SERVICE_PROPERTY_FILE)); return properties; } } diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/system/SystemMain.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/system/SystemMain.java index f9f3cf3a4..101f7f5a5 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/system/SystemMain.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/system/SystemMain.java @@ -38,13 +38,14 @@ import com.salesforce.dva.argus.service.NotifierFactory; import com.salesforce.dva.argus.service.Service; import com.salesforce.dva.argus.service.ServiceFactory; -import static com.salesforce.dva.argus.system.SystemAssert.requireState; import java.lang.reflect.Method; import java.text.SimpleDateFormat; import java.util.Date; -import java.util.Properties; import java.util.Map.Entry; +import java.util.Properties; + +import static com.salesforce.dva.argus.system.SystemAssert.requireState; /** * Loads the system configuration from disk and provides start up and shut down methods. Calling the shut down method before the start up method has @@ -137,6 +138,9 @@ protected void doStop() { _dispose(_serviceFactory.getTSDBService()); _dispose(_serviceFactory.getCacheService()); _dispose(_serviceFactory.getHistoryService()); + _dispose(_serviceFactory.getAnnotationStorageService()); + _dispose(_serviceFactory.getConsumerOffsetMetricStorageService()); + // TODO - dispose of RefocusService. QUESTION - where is alertservice disposed? _persistService.stop(); _log.info("{} stopped.", getName()); } catch (Exception ex) { @@ -218,7 +222,7 @@ private void _mergeServiceConfiguration() { _mergeProperties(service.getServiceProperties()); } catch (Throwable e) { _log.error(e.getMessage(), e); - requireState(false, "Failed to load service properties for service factory method " + method.getName()); + requireState(false, "Failed to load service properties for service factory method; see previous logger error: " + method.getName()); } } } @@ -245,7 +249,8 @@ private void _mergeNotifierConfiguration() { method.setAccessible(accessible); _mergeProperties(notifier.getNotifierProperties()); } catch (Exception e) { - requireState(false, "Failed to load notifier properties for notifier factory method " + method.getName()); + _log.error(e.getMessage(), e); + requireState(false, "Failed to load notifier properties for notifier factory method; See previous logger error: " + method.getName()); } } } diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/util/AlertUtils.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/util/AlertUtils.java index d5cd33167..81fc795f7 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/util/AlertUtils.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/util/AlertUtils.java @@ -10,6 +10,8 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; +import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; + /** * Utility functions for the alert evaluation flow * @@ -21,7 +23,7 @@ public class AlertUtils { private static final Logger _logger = LoggerFactory.getLogger(AlertUtils.class); - public static boolean isScopePresentInWhiteList(String expression, List scopeRegexPatterns) { + public static boolean isPatternPresentInWhiteList(String expression, List scopeRegexPatterns) { for(Pattern regexPattern : scopeRegexPatterns) { if(regexPattern.matcher(expression.toLowerCase()).find()) { return true; @@ -35,7 +37,7 @@ public static Long getMaximumIntervalLength(String queryExpression) { return times[1] - times[0]; } - public static Long[] getStartAndEndTimes(String originalExpression, Long relativeTo) { + public static Long[] getStartAndEndTimes(String originalExpression, Long relativeTo) { String expression = "@" + originalExpression.replaceAll("[\\s\\t\\r\\n\\f]*", ""); String regexMatcherWithStartAndEnd = "(?i)\\-[0-9]+(d|m|h|s):\\-[0-9]+(d|m|h|s)"; String regexMatcherWithFILL = "(?i)FILL\\(#\\-[0-9]+(d|h|m|s),#\\-[0-9]+(d|h|m|s)"; @@ -46,6 +48,7 @@ public static Long[] getStartAndEndTimes(String originalExpression, Long relativ Matcher m = Pattern.compile(regexMatcherWithStartAndEnd).matcher(expression); while (m.find()) { String[] times = m.group().split(":"); + requireArgument(times.length == 2, MessageFormat.format("Regex matching returns single timestamp for string: {0} with regex: {1} and matched group: {2}", expression, regexMatcherWithStartAndEnd, m.group())); Long currentLength = MetricReader.getTime(relativeTo, times[1]) - MetricReader.getTime(relativeTo, times[0]); if(currentLength > longestLength) { longestLength = currentLength; @@ -58,6 +61,7 @@ public static Long[] getStartAndEndTimes(String originalExpression, Long relativ m = Pattern.compile(regexMatcherWithFILL).matcher(expression); while (m.find()) { String[] times = m.group().substring(6, m.group().length() - 1).split("#,#"); + requireArgument(times.length == 2, MessageFormat.format("Regex matching returns single timestamp for string: {0} with regex: {1} and matched group: {2}", expression, regexMatcherWithStartAndEnd, m.group())); Long currentLength = MetricReader.getTime(relativeTo, times[1]) - MetricReader.getTime(relativeTo, times[0]); if(currentLength > longestLength) { longestLength = currentLength; @@ -95,17 +99,18 @@ public static String getExpressionWithAbsoluteStartAndEndTimeStamps(DefaultAlert Matcher m = Pattern.compile(regexMatcherWithStartAndEnd).matcher(expression); while (m.find()) { - for (String timeStr: m.group().split(":")) { - Long absoluteTime = MetricReader.getTime(relativeTo, timeStr); - expression = expression.replaceFirst(timeStr, "" + absoluteTime); - } + String[] times = m.group().split(":"); + requireArgument(times.length == 2, MessageFormat.format("Regex matching returns single timestamp for string: {0} with regex: {1} and matched group: {2}", expression, regexMatcherWithStartAndEnd, m.group())); + Long absoluteTimeStart = MetricReader.getTime(relativeTo, times[0]); + Long absoluteTimeEnd = MetricReader.getTime(relativeTo, times[1]); + expression = expression.replaceAll(m.group(), "" + absoluteTimeStart + ":" + absoluteTimeEnd); } m = Pattern.compile(regexMatcherWithConstants).matcher(expression); while (m.find()) { String timeStr = m.group(); Long absoluteTime = MetricReader.getTime(relativeTo, timeStr.substring(1)); - expression = expression.replaceFirst(timeStr, ("" + timeStr.charAt(0)) + absoluteTime); + expression = expression.replaceAll(timeStr, ("" + timeStr.charAt(0)) + absoluteTime); } m = Pattern.compile(regexMatcherWithoutEnd).matcher(expression); @@ -121,5 +126,4 @@ public static String getExpressionWithAbsoluteStartAndEndTimeStamps(DefaultAlert return absoluteExpression; } - } diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/util/CommonUtils.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/util/CommonUtils.java new file mode 100644 index 000000000..d51f6ea28 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/util/CommonUtils.java @@ -0,0 +1,67 @@ +package com.salesforce.dva.argus.util; + +import org.apache.commons.beanutils.BeanUtils; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class CommonUtils { + /** + * Copies properties. + * + * @param dest The object to which the properties will be copied. + * @param source The object whose properties are copied + * + * @throws Exception Throws exception if beanutils encounter a problem. + */ + public static void copyProperties(Object dest, Object source) throws Exception { + try { + BeanUtils.copyProperties(dest, source); + } catch (Exception e) { + throw new Exception(e.getCause().getMessage()); + } + } + + /** + * Returns whether two lists are equivalent + */ + public static boolean listsAreEquivelent(List list1, List list2) { + if (list1 == null) { + if (list2 == null) { + return true; + } else { + return false; + } + } + + if (list2 == null) { + return false; + } + if (list1.size() != list2.size()) return false; + + Map tempMap = new HashMap<>(); + for (Object object : list1) { + Integer currentCount = tempMap.get(object.hashCode()); + if (currentCount == null) { + tempMap.put(object.hashCode(), 1); + } else { + tempMap.put(object.hashCode(), currentCount + 1); + } + } + for (Object object : list2) { + Integer currentCount = tempMap.get(object.hashCode()); + if (currentCount == null) { + return false; + } else { + tempMap.put(object.hashCode(), currentCount - 1); + } + } + for (Integer count : tempMap.values()) { + if (count != 0) { + return false; + } + } + return true; + } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/util/Cron.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/util/Cron.java index 6c02053f8..2f306cdf4 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/util/Cron.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/util/Cron.java @@ -32,8 +32,12 @@ package com.salesforce.dva.argus.util; import java.util.Date; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.function.Function; +import java.util.stream.Collectors; -import org.apache.commons.lang.exception.ExceptionUtils; import org.quartz.CronScheduleBuilder; import org.quartz.TriggerBuilder; @@ -44,125 +48,100 @@ */ public class Cron { - //~ Static fields/initializers ******************************************************************************************************************* - - private static final String ANNUALLY = "@ANNUALLY"; - private static final String YEARLY = "@YEARLY"; - private static final String MONTHLY = "@MONTHLY"; - private static final String WEEKLY = "@WEEKLY"; - private static final String DAILY = "@DAILY"; - private static final String MIDNIGHT = "@MIDNIGHT"; - private static final String HOURLY = "@HOURLY"; - //~ Constructors ********************************************************************************************************************************* private Cron() { } //~ Methods ************************************************************************************************************************************** - /** - * Determines if the given CRON entry is runnable at this current moment in time. This mimics the original implementation of the CRON table. - * - *

This implementation supports only the following types of entries:

- * - *
    - *
  1. Standard Entries having form: <minutes> <hours> <days> <months> <days of week> - * - *
      - *
    • * : All
    • - *
    • *\/n : Only mod n
    • - *
    • n : Numeric
    • - *
    • n-n : Range
    • - *
    • n,n,...,n : List
    • - *
    • n,n-n,...,n : List having ranges
    • - *
    - *
  2. - *
  3. Special Entries - * - *
      - *
    • @annually : equivalent to "0 0 1 1 *"
    • - *
    • @yearly : equivalent to "0 0 1 1 *"
    • - *
    • @monthly : equivalent to "0 0 1 * *"
    • - *
    • @weekly : equivalent to "0 0 * * 0"
    • - *
    • @daily : equivalent to "0 0 * * *"
    • - *
    • @midnight : equivalent to "0 0 * * *"
    • - *
    • @hourly : equivalent to "0 * * * *"
    • - *
    - *
  4. - *
- * - * @param entry The CRON entry to evaluate. - * @param atTime The time at which to evaluate the entry. - * - * @return true if the the current time is a valid runnable time with respect to the supplied entry. - */ - public static boolean shouldRun(String entry, Date atTime) { - entry = entry.trim().toUpperCase(); - if (ANNUALLY.equals(entry) || (YEARLY.equals(entry))) { - entry = "0 0 1 1 *"; - } else if (MONTHLY.equals(entry)) { - entry = "0 0 1 * *"; - } else if (WEEKLY.equals(entry)) { - entry = "0 0 * * 0"; - } else if (DAILY.equals(entry) || (MIDNIGHT.equals(entry))) { - entry = "0 0 * * *"; - } else if (HOURLY.equals(entry)) { - entry = "0 * * * *"; - } - return new CronTabEntry(entry).isRunnable(atTime); - } - - /** - * Indicates if a CRON entry should run at the current moment in time. - * - * @param entry The CRON entry to evaluate. - * - * @return true if the the current time is a valid runnable time with respect to the supplied entry. - */ - public static boolean shouldRun(String entry) { - return Cron.shouldRun(entry, new Date()); - } - - /** - * Determines if an entry is valid CRON syntax. - * - * @param entry The CRON entry. - * - * @return True if the entry is valid CRON syntax. - */ - public static boolean isValid(String entry) { - boolean result = true; - - try { - shouldRun(entry); - } catch (Exception ex) { - result = false; - } - return result; - } - public static boolean isCronEntryValid(String cronEntry) { - String quartzCronEntry = convertToQuartzCronEntry(cronEntry); try { - // throws runtime exception if the cronEntry is invalid + String quartzCronEntry = convertToQuartzCronEntry(cronEntry); TriggerBuilder.newTrigger().withSchedule(CronScheduleBuilder.cronSchedule(quartzCronEntry)).build(); - }catch(Exception e) { + } catch(Exception e) { return false; } return true; } + // Quartz Cron fields: + // Seconds = 0 + // Minutes = 1 + // Hours = 2 + private static final int DayOfMonth = 3; + // Month = 4 + private static final int DayOfWeek = 5; + // Year = 6 + + /* @name: convertToQuartzCronEntry + @description: Argus accepts and stores 5 field Quartz Cron entries because it is not designed to schedule + alert evaluation more often than once per minute. This function normalizes 5 + field cron entries to 6 field cron entries by prepending the seconds field, and is a bit more + permissive in accepting * where ? should be used in valid Quartz cron entries. + + @param: cronEntry: A Quartz Cron Entry with seconds absent (normally 5 fields). + Day of week may also be absent (4 fields). Year is optional. + Does NOT support special values (@yearly, @monthly, @daily, etc.) + + @returns: A valid Quartz Cron entry. + */ + // TODO - rename to convertArgusCronToQuartzCron(), normalizeArgusQuartzCron(), etc. + public static String convertToQuartzCronEntry(String cronEntry) { - // adding seconds field - cronEntry = "0 " + cronEntry.trim(); - - // if day of the week is not specified, substitute it with ?, so as to prevent conflict with month field - if(cronEntry.charAt(cronEntry.length() - 1) == '*') { - return cronEntry.substring(0, cronEntry.length() - 1) + "?"; - }else { - return cronEntry; + + String tmpCron = "0 " + cronEntry.trim(); + + List parts = new ArrayList(Arrays.asList(tmpCron.split("\\s+"))); + if (parts.size() < 5 || parts.size() > 7 ) + { + throw new RuntimeException("Invalid input cron expression: " + cronEntry + ", too many or too few fields"); } + + // if day of week is not specified, add '?' so as to prevent conflict with the month field. + if (parts.size() == 5) { + parts.add("?"); + } + + // Quartz doesn't support specification of both DOM and DOW, but for some reason it thinks that + // * is an explicit specification in these context, whereas ? is handled as any. + // Translation table: dom,dow -> dom,dow + // ------------------------------------------------- + // DOM * ? X + // DOW * {?,*} {*,?} {X,?} + // ? {*,?} {?,*} {X,?} + // X {?,X} {?,X} {X,X} + + + String dom = parts.get(DayOfMonth); + String dow = parts.get(DayOfWeek); + + // NOTE - no adjustments to dom, dow because alerts can be modified by calls to the UI and the WS. + + if ( dow.equals("*") && dom.equals("*")) + { + dom = "?"; + } + else if ( dow.equals("*") && !dom.equals("?") && !dom.equals("*")) + { + dow = "?"; + } + else if (dom.equals("*") && !dow.equals("?") && !dow.equals("*")) + { + dom = "?"; + } + else if (dom.equals("?") && dow.equals("?")) + { + dow = "*"; + } + + parts.set(DayOfMonth, dom); + parts.set(DayOfWeek, dow); + + + String quartzCron = String.join(" ", parts); + return quartzCron; } + } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/util/ImageUtils.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/util/ImageUtils.java new file mode 100644 index 000000000..4aa3a29bf --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/util/ImageUtils.java @@ -0,0 +1,291 @@ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus.util; + +import com.salesforce.dva.argus.entity.ImagePoints; +import com.salesforce.dva.argus.entity.ImageProperties; +import com.salesforce.dva.argus.entity.Metric; +import org.apache.commons.codec.digest.DigestUtils; +import org.jfree.chart.ChartFactory; +import org.jfree.chart.ChartUtils; +import org.jfree.chart.JFreeChart; +import org.jfree.chart.annotations.XYTextAnnotation; +import org.jfree.chart.axis.NumberAxis; +import org.jfree.chart.axis.NumberTickUnit; +import org.jfree.chart.plot.IntervalMarker; +import org.jfree.chart.plot.ValueMarker; +import org.jfree.chart.plot.XYPlot; +import org.jfree.chart.renderer.xy.XYItemRenderer; +import org.jfree.chart.renderer.xy.XYLineAndShapeRenderer; +import org.jfree.chart.ui.Layer; +import org.jfree.chart.ui.RectangleAnchor; +import org.jfree.chart.ui.TextAnchor; +import org.jfree.data.Range; +import org.jfree.data.time.Second; +import org.jfree.data.time.TimeSeries; +import org.jfree.data.time.TimeSeriesCollection; + +import javax.imageio.ImageIO; +import java.awt.BasicStroke; +import java.awt.Color; +import java.awt.Font; +import java.awt.Graphics; +import java.awt.image.BufferedImage; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Base64; +import java.util.Date; +import java.util.List; +import java.util.Map; + +/** + * Utility methods for Image Service + * + * @author Chandravyas Annakula (cannakula@salesforce.com) + */ +public class ImageUtils { + + private static final int MAX_LEGENDS_TO_DISPLAY=5; + private static final double TICK_UNIT_INCREMENT_VALUE = 0.5; + private static final Range TICK_UNIT_DEFAULT_RANGE = new Range(-1,1); + private static final double DOUBLE_COMPARISON_MAX_DELTA = 0.000000000000001; + private static final Font DEFAULT_FONT = new Font("Arial", Font.ITALIC, 12); + private static final Font DEFAULT_NODATA_FONT = new Font("Arial", Font.ITALIC, 20); + private static final BasicStroke DEFAULT_BASIC_STROKE = new BasicStroke(1.5f); + private static final Color DEFAULT_BACKGROUND_COLOR = Color.white; + private static final Color DEFAULT_FIRST_TIMESERIES_COLOR = Color.BLUE; + + + public static byte[] getMetricsImage(List metrics, ImageProperties imageProperties) throws IOException{ + + if (imageProperties == null) + { + imageProperties = new ImageProperties(); + } + if(metrics != null && metrics.size()>0) { + boolean legend = metrics.size() > MAX_LEGENDS_TO_DISPLAY ? false:true; + List timeseries = convertToTimeSeries(metrics); + TimeSeriesCollection dataset=new TimeSeriesCollection(); + for(TimeSeries series:timeseries) { + dataset.addSeries(series); + } + + JFreeChart timechart = ChartFactory.createTimeSeriesChart(imageProperties.getChartName(), + imageProperties.getxAxisName(), imageProperties.getyAxisName(), dataset,legend, true, true); + timechart.getPlot().setBackgroundPaint(DEFAULT_BACKGROUND_COLOR); + + XYPlot plot = (XYPlot) timechart.getPlot(); + + // Overridding the range axis with new metric formatter + NumberAxis rangeAxis = (NumberAxis) plot.getRangeAxis(); + MetricNumberFormat metricNumberFormat = new MetricNumberFormat(); + rangeAxis.setNumberFormatOverride(metricNumberFormat); + + // Handling the scenario when all the datapoints in the timeseries are same for a single Timeseries + if (timeseries.size()==1) + { + TimeSeries singleTimeseries = timeseries.get(0); + if (compareAlmostEqual(singleTimeseries.getMinY(),singleTimeseries.getMaxY(),DOUBLE_COMPARISON_MAX_DELTA)) + { + try { + double absValue=Math.abs(singleTimeseries.getMinY()); + rangeAxis.setTickUnit(new NumberTickUnit(absValue)); + rangeAxis.setRange(-2 * absValue, 2 * absValue); + } + catch (IllegalArgumentException exception) + { + // Exception occurs when all the datapoints in the graph are equal to zero. + // So when exception occurs I am setting the range axis to have default values + rangeAxis.setTickUnit(new NumberTickUnit(TICK_UNIT_INCREMENT_VALUE)); + rangeAxis.setRange(TICK_UNIT_DEFAULT_RANGE); + } + } + } + + if (imageProperties.getLabelPoints()!=null) { + for (ImagePoints point : imageProperties.getLabelPoints()) { + XYTextAnnotation textAnnotaion = new XYTextAnnotation(point.getLabel(), point.getFirstPoint(), point.getSecondPoint()); + textAnnotaion.setBackgroundPaint(DEFAULT_BACKGROUND_COLOR); + if (point.getColor()!=null) { + textAnnotaion.setPaint(point.getColor().getColor()); + } + textAnnotaion.setFont(DEFAULT_FONT); + plot.addAnnotation(textAnnotaion); + } + } + + if (imageProperties.getShadeXAxisArea()!=null) { + for (ImagePoints point : imageProperties.getShadeXAxisArea()) { + if (compareAlmostEqual(point.getFirstPoint(),point.getSecondPoint(),DOUBLE_COMPARISON_MAX_DELTA)) + { + plot.addRangeMarker(getLineMarker(point)); + } + else { + plot.addRangeMarker(getIntervalMarker(point), Layer.BACKGROUND); + } + } + } + + if (imageProperties.getShadeYAxisArea()!=null) { + for (ImagePoints point : imageProperties.getShadeYAxisArea()) { + if (compareAlmostEqual(point.getFirstPoint(),point.getSecondPoint(),DOUBLE_COMPARISON_MAX_DELTA)) + { + plot.addDomainMarker(getLineMarker(point)); + } + else { + plot.addDomainMarker(getIntervalMarker(point), Layer.BACKGROUND); + } + } + } + XYItemRenderer r = plot.getRenderer(); + // Setting the default color of the first time series to be BLUE + r.setSeriesPaint( 0, DEFAULT_FIRST_TIMESERIES_COLOR ); + if (r instanceof XYLineAndShapeRenderer) { + XYLineAndShapeRenderer renderer = (XYLineAndShapeRenderer) r; + renderer.setDefaultShapesVisible(true); + renderer.setDefaultShapesFilled(true); + renderer.setDrawSeriesLineAsPath(true); + } + ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); + ChartUtils.writeChartAsJPEG(outputStream, timechart, imageProperties.getImageWidth(), imageProperties.getImageHeight()); + return outputStream.toByteArray(); + }else { + // This is for missing data notification + return getImageWithText("Data does not exist for a given metric expression",imageProperties.getImageWidth(),imageProperties.getImageHeight()); + } + } + + private static ValueMarker getLineMarker(ImagePoints point) + { + Color color; + if (point.getColor()==null) + { + color = ImageProperties.DEFAULT_COLOR.getColor(); + } + else { + color = point.getColor().getColor(); + } + ValueMarker marker = new ValueMarker(point.getFirstPoint(), color,DEFAULT_BASIC_STROKE); + marker.setLabel(point.getLabel()); + marker.setLabelFont(DEFAULT_FONT); + marker.setLabelBackgroundColor(DEFAULT_BACKGROUND_COLOR); + marker.setLabelAnchor(RectangleAnchor.CENTER); + marker.setLabelTextAnchor(TextAnchor.CENTER); + return marker; + } + + private static IntervalMarker getIntervalMarker(ImagePoints point) + { + Color color; + if (point.getColor()==null) + { + color = ImageProperties.DEFAULT_COLOR.getColor(); + } + else { + color = point.getColor().getColor(); + } + IntervalMarker marker = new IntervalMarker(point.getFirstPoint(), point.getSecondPoint(), color); + marker.setLabel(point.getLabel()); + marker.setLabelFont(DEFAULT_FONT); + marker.setLabelBackgroundColor(DEFAULT_BACKGROUND_COLOR); + marker.setLabelAnchor(RectangleAnchor.CENTER); + marker.setLabelTextAnchor(TextAnchor.CENTER); + return marker; + } + + private static byte[] getImageWithText(String data,int width, int height) throws IOException{ + BufferedImage bufferedImage = new BufferedImage(width, height,BufferedImage.TYPE_INT_RGB); + Graphics graphics = bufferedImage.getGraphics(); + graphics.setColor(DEFAULT_BACKGROUND_COLOR); + graphics.fillRect(0, 0, width, height); + graphics.setColor(Color.BLACK); + graphics.setFont(DEFAULT_NODATA_FONT); + graphics.drawString(data, width/4, height/4); + ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); + ImageIO.write(bufferedImage, "jpg", outputStream); + return outputStream.toByteArray(); + } + + private static List convertToTimeSeries(List metrics){ + List result = new ArrayList<>(); + for(Metric metric:metrics) { + TimeSeries timeSeries = new TimeSeries(getMetricDisplayName(metric)); + for(Map.Entry entry : metric.getDatapoints().entrySet()){ + timeSeries.add(new Second(new Date(entry.getKey())), entry.getValue()); + } + result.add(timeSeries); + } + return result; + } + + private static String getMetricDisplayName(Metric metric) { + StringBuilder result = new StringBuilder(); + result.append(metric.getScope()).append(':'); + result.append(metric.getMetric()); + if(metric.getTags().size()>0) { + result.append(metric.getTags().toString()); + } + return result.toString(); + } + + public static String convertBytesToMd5Hash(byte[] inputBytes) + { + String md5Hex = null; + if (inputBytes!=null) { + md5Hex = DigestUtils.md5Hex(inputBytes).toUpperCase(); + } + return md5Hex; + } + + public static String encodeBytesToBase64(byte[] inputBytes) + { + String encodedString = null; + if (inputBytes!=null) { + encodedString = Base64.getEncoder().encodeToString(inputBytes); + } + return encodedString; + } + + public static byte[] decodeBase64ToBytes(String encodedString) + { + byte[] decodedBytes = null; + if (encodedString!=null) { + decodedBytes = Base64.getDecoder().decode(encodedString); + } + return decodedBytes; + } + + public static boolean compareAlmostEqual(double x, double y, double delta) { + return x == y || Math.abs(x - y) < delta; + } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/util/MetricNumberFormat.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/util/MetricNumberFormat.java new file mode 100644 index 000000000..22ab94151 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/util/MetricNumberFormat.java @@ -0,0 +1,138 @@ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus.util; + +import java.math.RoundingMode; +import java.text.DecimalFormat; +import java.text.FieldPosition; +import java.text.NumberFormat; +import java.text.ParsePosition; +import java.util.Map; +import java.util.NavigableMap; +import java.util.TreeMap; + +/** + * A Number formatter that formats Metric Numbers. + */ +public class MetricNumberFormat extends NumberFormat { + private static final long serialVersionUID = 1; + + private static NavigableMap suffixes = new TreeMap<>(); + private static DecimalFormat decimalFormat = new DecimalFormat("0.###"); + static { + decimalFormat.setRoundingMode(RoundingMode.DOWN); + suffixes.put(0.000000000000000000000001d, "y"); + suffixes.put(0.000000000000000000001d, "z"); + suffixes.put(0.000000000000000001d, "a"); + suffixes.put(0.000000000000001d, "f"); + suffixes.put(0.000000000001d, "p"); + suffixes.put(0.000000001d, "n"); + suffixes.put(0.000001d, "µ"); + suffixes.put(0.001d, "m"); + suffixes.put(1000d, "k"); + suffixes.put(1000000d, "M"); + suffixes.put(1000000000d, "G"); + suffixes.put(1000000000000d, "T"); + suffixes.put(1000000000000000d, "P"); + suffixes.put(1000000000000000000d, "E"); + suffixes.put(1000000000000000000000d, "Z"); + suffixes.put(1000000000000000000000000d, "Y"); + } + + /** + * Format the double number to metrics terminology + * @param value double value + * @return String representing the metric formatted in Metrics terminology + */ + public String formatNumber(double value) { + if (value == Double.MIN_VALUE) return formatNumber(Double.MIN_VALUE + 1); + if (value < 0) return "-" + formatNumber(-value); + if ((value < 1000 && value>=1) || value==0) return decimalFormat.format(value); + Map.Entry e = suffixes.floorEntry(value); + Double divideBy = e.getKey(); + String suffix = e.getValue(); + + double number = value / divideBy; + if (number%1==0) { + return Math.round(number) + suffix; + } + else { + return decimalFormat.format((number)) + suffix; + } + + } + + /** + * Formats a number into the specified string buffer. + * + * @param number the number to format. + * @param toAppendTo the string buffer. + * @param pos the field position (ignored here). + * + * @return The string buffer. + */ + @Override + public StringBuffer format(double number, StringBuffer toAppendTo, + FieldPosition pos) { + return toAppendTo.append(formatNumber(number)); + } + + /** + * Formats a number into the specified string buffer. + * + * @param number the number to format. + * @param toAppendTo the string buffer. + * @param pos the field position (ignored here). + * + * @return The string buffer. + */ + @Override + public StringBuffer format(long number, StringBuffer toAppendTo, + FieldPosition pos) { + return toAppendTo.append(formatNumber(number)); + } + + /** + * This method returns null for all inputs. This class cannot + * be used for parsing. + * + * @param source the source string. + * @param parsePosition the parse position. + * + * @return null. + */ + @Override + public Number parse(String source, ParsePosition parsePosition) { + return null; + } + +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/util/MonitoringUtils.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/util/MonitoringUtils.java new file mode 100644 index 000000000..aa89d53f3 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/util/MonitoringUtils.java @@ -0,0 +1,39 @@ +package com.salesforce.dva.argus.util; + +import java.util.HashMap; +import java.util.Map; + +import com.salesforce.dva.argus.service.MonitorService; +import com.salesforce.dva.argus.service.metric.MetricQueryResult; + +public class MonitoringUtils { + + public static final String ALERT_DATA_READ_NUM_TIME_SERIES = "perf.alert.read.num.time.series"; + + public static final String ALERT_DATA_READ_NUM_DISCOVERY_RESULTS = "perf.alert.read.num.discovery.results"; + + public static final String ALERT_DATA_READ_NUM_DISCOVERY_QUERIES = "perf.alert.read.num.discovery.queries"; + + public static final String ALERT_QUERY_COUNT = "perf.alert.read.count"; + + public static final String TAGS_USER_KEY = "user"; + + public static final String TAGS_TIME_WINDOW_KEY = "timeWindow"; + + public static final String TAGS_EXPANDED_TIME_SERIES_RANGE_KEY = "expandedTimeSeriesRange"; + + public static final String TAGS_START_TIME_WINDOW_KEY = "startTimeWindow"; + + public static void updateAlertMetricQueryPerfCounters(MonitorService monitorService, MetricQueryResult queryResult, String alertOwner) { + Map tags = new HashMap(); + tags.put(TAGS_USER_KEY, alertOwner); + tags.put(TAGS_TIME_WINDOW_KEY, queryResult.getQueryTimeWindow()); + tags.put(TAGS_EXPANDED_TIME_SERIES_RANGE_KEY, queryResult.getExpandedTimeSeriesRange()); + tags.put(TAGS_START_TIME_WINDOW_KEY, queryResult.getQueryStartTimeWindow()); + + monitorService.modifyCustomCounter(ALERT_DATA_READ_NUM_TIME_SERIES, queryResult.getNumTSDBResults(), tags); + monitorService.modifyCustomCounter(ALERT_DATA_READ_NUM_DISCOVERY_RESULTS, queryResult.getNumDiscoveryResults(), tags); + monitorService.modifyCustomCounter(ALERT_DATA_READ_NUM_DISCOVERY_QUERIES, queryResult.getNumDiscoveryQueries(), tags); + monitorService.modifyCustomCounter(ALERT_QUERY_COUNT, 1, tags); + } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/util/QueryContext.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/util/QueryContext.java index 8c5c3d689..af0728136 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/util/QueryContext.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/util/QueryContext.java @@ -8,9 +8,9 @@ /* * This class encapsulates the parsed query expression tree. * - * Each node in the tree corresponds to a transform function specified in the expression. - * - * Moreover each node has references to its constants, child transforms, parent transform and the associated tsdb query expressions + * Each node in the tree corresponds to a transform function or TSDB query expression, but not both. + * + * For example if a particular QueryContext is a transform node, then the fields corresponding to expression will be null and vice-versa * */ public class QueryContext { @@ -22,10 +22,10 @@ public class QueryContext { private QueryContext parentContext = null; private List childContexts = new ArrayList(); - - private List childExpressions = new ArrayList(); - public Function getTransform() { + private TSDBQueryExpression expression = null; + + public Function getTransform() { return transform; } @@ -57,14 +57,6 @@ public void setChildContexts(List childContexts) { this.childContexts = childContexts; } - public List getChildExpressions() { - return childExpressions; - } - - public void setChildExpressions(List childExpressions) { - this.childExpressions = childExpressions; - } - public List getChildQueryContexts() { return childContexts; } @@ -72,4 +64,12 @@ public List getChildQueryContexts() { public void setChildQueryContexts(List childQueryContexts) { this.childContexts = childQueryContexts; } + + public TSDBQueryExpression getExpression() { + return expression; + } + + public void setExpression(TSDBQueryExpression expression) { + this.expression = expression; + } } diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/util/QueryUtils.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/util/QueryUtils.java index 053125e4f..e3d302f50 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/util/QueryUtils.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/util/QueryUtils.java @@ -1,58 +1,90 @@ package com.salesforce.dva.argus.util; -import java.util.LinkedList; -import java.util.Queue; import com.salesforce.dva.argus.entity.Metric; import com.salesforce.dva.argus.service.metric.MetricReader; import com.salesforce.dva.argus.service.metric.ParseException; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Queue; +import java.util.Set; + public class QueryUtils { - private static MetricReader metricReader = new MetricReader(null, null,null); - - public static Long[] getStartAndEndTimesWithMaxInterval(String expression, Long relativeTo) { - return getStartAndEndTimesWithMaxInterval(getQueryContext(expression, relativeTo)); - } - - public static Long[] getStartAndEndTimesWithMaxInterval(QueryContext context) { - Long[] queryStartAndEndTimes = new Long[]{0L, 0L}; - Queue bfsQueue = new LinkedList(); - if(context!=null) { - bfsQueue.add(context); - } - while(!bfsQueue.isEmpty()) { - QueryContext currContext = bfsQueue.poll(); - if(currContext.getChildExpressions()!=null) { - for(TSDBQueryExpression expression : currContext.getChildExpressions()) { - if(queryStartAndEndTimes[0]==0L || queryStartAndEndTimes[0]>expression.getStartTimestamp()) { - queryStartAndEndTimes[0] = expression.getStartTimestamp(); - } - - if(queryStartAndEndTimes[1]==0L || queryStartAndEndTimes[1] bfsQueue = new LinkedList(); + if(context!=null) { + bfsQueue.add(context); + } + while(!bfsQueue.isEmpty()) { + QueryContext currContext = bfsQueue.poll(); + if(currContext.getExpression()!=null) { + if(queryStartAndEndTimes[0]==0L || queryStartAndEndTimes[0]>currContext.getExpression().getStartTimestamp()) { + queryStartAndEndTimes[0] = currContext.getExpression().getStartTimestamp(); + } + + if(queryStartAndEndTimes[1]==0L || queryStartAndEndTimes[1] getScopesFromExpression(String expression) { + return getScopesFromExpression(expression, System.currentTimeMillis()); + } + + public static List getScopesFromExpression(String expression, long relativeTo) { + return getScopesFromExpression(getQueryContext(expression, relativeTo)); + } + + private static List getScopesFromExpression(QueryContext queryContext) { + Set scopes = new HashSet<>(); + Queue bfsQueue = new LinkedList(); + if(queryContext != null) { + bfsQueue.add(queryContext); + } + while(!bfsQueue.isEmpty()) { + QueryContext currContext = bfsQueue.poll(); + if(currContext.getExpression()!=null) { + TSDBQueryExpression expression = currContext.getExpression(); + String currentScope = expression.getScope(); + scopes.add(currentScope); //TODO: If the dc gets transferred to tags, we need to update this. + } + + if(currContext.getChildContexts()!=null) { + bfsQueue.addAll(currContext.getChildContexts()); + } + } + return new ArrayList<>(scopes); + } +} \ No newline at end of file diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/util/RequestContext.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/util/RequestContext.java new file mode 100644 index 000000000..f9d26f237 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/util/RequestContext.java @@ -0,0 +1,21 @@ +package com.salesforce.dva.argus.util; + +/** + * This class encapsulates some of the parameters that are sent through the web service request + */ +public class RequestContext { + + private String userName = "NULLUSER"; + + public RequestContext(String userName) { + this.userName = userName; + } + + public String getUserName() { + return userName; + } + + public void setUserName(String userName) { + this.userName = userName; + } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/util/RequestContextHolder.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/util/RequestContextHolder.java new file mode 100644 index 000000000..a2f44d246 --- /dev/null +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/util/RequestContextHolder.java @@ -0,0 +1,17 @@ +package com.salesforce.dva.argus.util; + +/** + * This thread local variable holds the context on user who is making the current request + */ +public class RequestContextHolder { + + private static ThreadLocal currentRequestContext = new ThreadLocal(); + + public static RequestContext getRequestContext() { + return currentRequestContext.get(); + } + + public static void setRequestContext(RequestContext context) { + currentRequestContext.set(context); + } +} diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/util/TSDBQueryExpression.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/util/TSDBQueryExpression.java index 960ccd8a9..3cce9e3e2 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/util/TSDBQueryExpression.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/util/TSDBQueryExpression.java @@ -8,92 +8,112 @@ public class TSDBQueryExpression { private Long startTimestamp = null; - - private Long endTimestamp = null; - - private String namespace = null; - - private String scope = null; - - private String metric = null; - - private Map tags = new HashMap(); - - private Aggregator aggregator = null; - - private Aggregator downsampler = null; - - private Long downsamplingPeriod = null; - + + private Long endTimestamp = null; + + private String namespace = null; + + private String scope = null; + + private String metric = null; + + private Map tags = new HashMap(); + + private Aggregator aggregator = null; + + private Aggregator downsampler = null; + + private Long downsamplingPeriod = null; + + private String[] percentile; + + private boolean showHistogramBuckets; + public Long getStartTimestamp() { - return startTimestamp; - } + return startTimestamp; + } + + public void setStartTimestamp(Long startTimestamp) { + this.startTimestamp = startTimestamp; + } + + public Long getEndTimestamp() { + return endTimestamp; + } + + public void setEndTimestamp(Long endTimestamp) { + this.endTimestamp = endTimestamp; + } + + public String getNamespace() { + return namespace; + } - public void setStartTimestamp(Long startTimestamp) { - this.startTimestamp = startTimestamp; - } + public void setNamespace(String namespace) { + this.namespace = namespace; + } - public Long getEndTimestamp() { - return endTimestamp; - } + public String getScope() { + return scope; + } - public void setEndTimestamp(Long endTimestamp) { - this.endTimestamp = endTimestamp; - } + public void setScope(String scope) { + this.scope = scope; + } - public String getNamespace() { - return namespace; - } + public String getMetric() { + return metric; + } - public void setNamespace(String namespace) { - this.namespace = namespace; - } + public void setMetric(String metric) { + this.metric = metric; + } - public String getScope() { - return scope; - } + public Map getTags() { + return tags; + } - public void setScope(String scope) { - this.scope = scope; - } + public void setTags(Map tags) { + this.tags = tags; + } - public String getMetric() { - return metric; - } + public Aggregator getAggregator() { + return aggregator; + } - public void setMetric(String metric) { - this.metric = metric; - } + public void setAggregator(Aggregator aggregator) { + this.aggregator = aggregator; + } - public Map getTags() { - return tags; - } + public Aggregator getDownsampler() { + return downsampler; + } - public void setTags(Map tags) { - this.tags = tags; - } + public void setDownsampler(Aggregator downsampler) { + this.downsampler = downsampler; + } - public Aggregator getAggregator() { - return aggregator; - } + public Long getDownsamplingPeriod() { + return downsamplingPeriod; + } - public void setAggregator(Aggregator aggregator) { - this.aggregator = aggregator; - } + public void setDownsamplingPeriod(Long downsamplingPeriod) { + this.downsamplingPeriod = downsamplingPeriod; + } - public Aggregator getDownsampler() { - return downsampler; - } + public String[] getPercentile() { + return percentile; + } - public void setDownsampler(Aggregator downsampler) { - this.downsampler = downsampler; - } + public void setPercentile(String[] percentile) { + this.percentile = percentile; + } - public Long getDownsamplingPeriod() { - return downsamplingPeriod; - } + public boolean isShowHistogramBuckets() { + return showHistogramBuckets; + } - public void setDownsamplingPeriod(Long downsamplingPeriod) { - this.downsamplingPeriod = downsamplingPeriod; - } + public void setShowHistogramBuckets(boolean showHistogramBuckets) { + this.showHistogramBuckets = showHistogramBuckets; + } } diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/util/TemplateReplacer.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/util/TemplateReplacer.java index 85a7062ea..634b0620e 100644 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/util/TemplateReplacer.java +++ b/ArgusCore/src/main/java/com/salesforce/dva/argus/util/TemplateReplacer.java @@ -1,5 +1,6 @@ package com.salesforce.dva.argus.util; +import com.salesforce.dva.argus.entity.History; import com.salesforce.dva.argus.entity.Metric; import com.salesforce.dva.argus.service.alert.DefaultAlertService; import freemarker.core.InvalidReferenceException; @@ -11,6 +12,7 @@ import java.io.*; import java.text.MessageFormat; +import java.text.SimpleDateFormat; import java.util.Date; import java.util.HashMap; import java.util.Map; @@ -63,7 +65,7 @@ private static String makeExceptionForCertainTag(String templateString) { public static String applyTemplateChanges(DefaultAlertService.NotificationContext context, String templateString) { init(); - String originalString = templateString, generatedString = templateString; + String originalString = templateString, generatedString = templateString, errorMessage; // Prepare Data. Map root = new HashMap(); root.put("alert", context.getAlert()); @@ -74,7 +76,9 @@ public static String applyTemplateChanges(DefaultAlertService.NotificationContex root.put("metric", triggeredMetric.getMetric()); Map lowerCaseTagMap = getLowerCaseTagMap(triggeredMetric.getTags()); root.put("tag", lowerCaseTagMap); - root.put("triggerTimestamp", new Date(context.getTriggerFiredTime())); + String defaultFormat = "MMM d, yyyy h:mm:ss a"; + SimpleDateFormat simpleDateFormat = new SimpleDateFormat(defaultFormat); + root.put("triggerTimestamp", simpleDateFormat.format(new Date(context.getTriggerFiredTime()))); root.put("triggerValue", context.getTriggerEventValue()); int numOfIterations = 0; @@ -87,9 +91,17 @@ public static String applyTemplateChanges(DefaultAlertService.NotificationContex StringWriter stringWriter = new StringWriter(); configuredTemplate.process(root, stringWriter); generatedString = stringWriter.toString(); + } catch (InvalidReferenceException ire) { + errorMessage = MessageFormat.format("Error occurred during applying template change to the following variable: {0}\nDetailed Message: {1}",ire.getBlamedExpressionString(), ire.getMessage()); + _logger.error(errorMessage); + generatedString = originalString; + context.getHistory().appendMessageNUpdateHistory(errorMessage, History.JobStatus.ERROR, 0); + break; } catch (Exception e) { - _logger.error(MessageFormat.format("Exception occurred while applying template change on {0}, with error message {1}.", templateString, e.getMessage())); - generatedString = MessageFormat.format("ERROR occurred during applying template change to the following variable: {0}\n\n Detailed Message: {1}",((InvalidReferenceException) e).getBlamedExpressionString(), e.getMessage()); + generatedString = originalString; + errorMessage = MessageFormat.format("Error occurred during applying template change - {0}", e); + _logger.error(errorMessage); + context.getHistory().appendMessageNUpdateHistory(errorMessage, History.JobStatus.ERROR, 0); break; } } while(!generatedString.equals(templateString) && ++numOfIterations < MAX_ITERATIONS); // If we unwrap alert.name, it may also be templatize, we should replace that as well. diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/util/zookeeper/Barrier.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/util/zookeeper/Barrier.java deleted file mode 100644 index 7d87f522d..000000000 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/util/zookeeper/Barrier.java +++ /dev/null @@ -1,173 +0,0 @@ -/* - * Copyright (c) 2016, Salesforce.com, Inc. - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * 3. Neither the name of Salesforce.com nor the names of its contributors may - * be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ - -package com.salesforce.dva.argus.util.zookeeper; - -import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; - -import java.io.IOException; -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.util.List; - -import org.apache.zookeeper.CreateMode; -import org.apache.zookeeper.KeeperException; -import org.apache.zookeeper.WatchedEvent; -import org.apache.zookeeper.Watcher; -import org.apache.zookeeper.ZooDefs.Ids; -import org.apache.zookeeper.ZooKeeper; -import org.apache.zookeeper.data.Stat; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Barrier class defined for a root path, waits on a specified number of nodes to join the barrier. - * This is needed to achieve synchronization before the nodes can do a job. - * - * @author Dilip Devaraj (ddevaraj@salesforce.com) - */ -public class Barrier implements Watcher { - //~ Static fields/initializers ******************************************************************************************************************* - private static Logger _logger = LoggerFactory.getLogger(Barrier.class); - private static ZooKeeper zooKeeper = null; - private static Integer mutex; - - //~ Instance fields ****************************************************************************************************************************** - private String rootPath; - private int size; - // Node name inside barrier - private String name; - - /** - * Barrier constructor - * - * @param url The zookeeper cluster to connect to. Cannot be null. - * @param rootPath The barrier root path under which client ephemeral nodes are created - * @param size The maximum number of nodes to join barrier before all of them can proceed - */ - public Barrier(String url, String rootPath, int size) { - requireArgument(url != null, "Zookeeper cluster url cannot be null."); - requireArgument(rootPath != null, "Root path cannot be null."); - - this.rootPath = rootPath; - this.size = size; - - if(zooKeeper == null){ - try { - _logger.debug("Starting ZooKeeper:"); - zooKeeper = new ZooKeeper(url, 3000, this); - mutex = new Integer(-1); - _logger.debug("Finished starting ZooKeeper: " + zooKeeper); - } catch (IOException e) { - _logger.error(e.toString()); - zooKeeper = null; - } - } - - if (zooKeeper != null) { - try { - Stat s = zooKeeper.exists(rootPath, false); - if (s == null) { - zooKeeper.create(rootPath, new byte[0], Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); - } - } catch (KeeperException e) { - _logger.error("Keeper exception when instantiating barrier: {} " , e.toString()); - } catch (InterruptedException e) { - _logger.error("Interrupted exception"); - } - } - - try { - name = new String(InetAddress.getLocalHost().getCanonicalHostName().toString() + System.currentTimeMillis()); - } catch (UnknownHostException e) { - _logger.error(e.toString()); - } - } - - public static void setZookeeper(ZooKeeper zooKeeper) { - _logger.info("Resetting zookeeper client context"); - Barrier.zooKeeper = zooKeeper; - } - - public static ZooKeeper getZooKeeper() { - return zooKeeper; - } - - /** - * Wait until required number of nodes join barrier - * - * @return true when required number of nodes have entered barrier, else wait - * @throws KeeperException If a keeper exception occurred - * @throws InterruptedException If interrupted - */ - public boolean enter() throws KeeperException, InterruptedException{ - zooKeeper.create(rootPath + "/" + name, new byte[0], Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL); - while (true) { - synchronized (mutex) { - List list = zooKeeper.getChildren(rootPath, true); - - if (list.size() < size) { - mutex.wait(); - } else { - return true; - } - } - } - } - - /** - * Wait until all nodes leave barrier - * - * @return true when required all nodes have left barrier, else wait. - * @throws KeeperException If a keeper exception occurred - * @throws InterruptedException If interrupted - */ - public boolean leave() throws KeeperException, InterruptedException{ - zooKeeper.delete(rootPath + "/" + name, 0); - while (true) { - synchronized (mutex) { - List list = zooKeeper.getChildren(rootPath, true); - if (list.size() > 0) { - mutex.wait(); - } else { - return true; - } - } - } - } - - synchronized public void process(WatchedEvent event) { - synchronized (mutex) { - _logger.debug("Process:{}", event.getType()); - mutex.notify(); - } - } -} \ No newline at end of file diff --git a/ArgusCore/src/main/java/com/salesforce/dva/argus/util/zookeeper/ClientNode.java b/ArgusCore/src/main/java/com/salesforce/dva/argus/util/zookeeper/ClientNode.java deleted file mode 100644 index 9f5c9767f..000000000 --- a/ArgusCore/src/main/java/com/salesforce/dva/argus/util/zookeeper/ClientNode.java +++ /dev/null @@ -1,227 +0,0 @@ -/* - * Copyright (c) 2016, Salesforce.com, Inc. - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * 3. Neither the name of Salesforce.com nor the names of its contributors may - * be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ - -package com.salesforce.dva.argus.util.zookeeper; - -import static com.salesforce.dva.argus.system.SystemAssert.requireArgument; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; - -import org.apache.zookeeper.CreateMode; -import org.apache.zookeeper.KeeperException; -import org.apache.zookeeper.WatchedEvent; -import org.apache.zookeeper.Watcher; -import org.apache.zookeeper.Watcher.Event.EventType; -import org.apache.zookeeper.ZooDefs.Ids; -import org.apache.zookeeper.ZooKeeper; -import org.apache.zookeeper.data.Stat; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Create an election root node, under which client nodes are created. - * Each client node participates in election to determine its index position, and total number of peers. - * - * @author Dilip Devaraj (ddevaraj@salesforce.com) - */ -public class ClientNode implements Runnable{ - - //~ Static fields/initializers ******************************************************************************************************************* - private static final String CLIENT_NODE_PREFIX = "/c_"; - - //~ Instance fields ****************************************************************************************************************************** - private Logger _logger = LoggerFactory.getLogger(ClientNode.class); - private ZooKeeper zooKeeper; - private final String electionRootPath; - private String clientNodePath; - - //~ Constructors ********************************************************************************************************************************* - - /** - * Creates a new ClientNode object. - * - * @param zookeeperURL The zookeeper cluster to connect to. Cannot be null. - * @param electionRootPath The root path under which client ephemeral nodes are created - * @throws IOException If an I/O exception occurred - */ - public ClientNode(final String zookeeperURL, String electionRootPath) throws IOException { - requireArgument(zookeeperURL != null, "Zookeeper cluster url cannot be null."); - requireArgument(electionRootPath != null, "Election root path cannot be null."); - - if(zooKeeper == null){ - try { - _logger.debug("Starting ZooKeeper:"); - zooKeeper = new ZooKeeper(zookeeperURL, 3000, new ClientNodeWatcher()); - _logger.debug("Finished starting ZooKeeper: " + zooKeeper); - } catch (IOException e) { - _logger.error(e.toString()); - zooKeeper = null; - } - } - - this.electionRootPath = electionRootPath; - } - - /** - * ClientsResult holds the positional index after election, and the number of peers - * - * @author Dilip Devaraj (ddevaraj@salesforce.com) - * - */ - public final class ClientsResult { - private final int nodePosition; - private final int numberOfPeers; - - public ClientsResult(final int nodePosition, final int numberOfPeers) { - this.nodePosition = nodePosition; - this.numberOfPeers = numberOfPeers; - } - - public int getNodePosition() { - return nodePosition; - } - - public int getNumberOfPeers() { - return numberOfPeers; - } - } - - /** - * Gets node index and its peer count - * - * @return ClientsResult object with info - */ - public ClientsResult calculateAllNodesResult() { - final List childNodePaths = getChildren(electionRootPath, false); - _logger.info("Total peers = {} ", childNodePaths.size()); - Collections.sort(childNodePaths); - - int index = childNodePaths.indexOf(clientNodePath.substring(clientNodePath.lastIndexOf('/') + 1)); - return new ClientsResult(index, childNodePaths.size()); - } - - /** - * Create a zookeeper node - * - * @param path The path of znode to create - * @param watch Whether to watch this node or not - * @param ephimeral Create ephemeral or permanent node - * @return The created node path - */ - public String createNode(final String path, final boolean watch, final boolean ephimeral) { - String createdNodePath = null; - try { - - final Stat nodeStat = zooKeeper.exists(path, watch); - - if(nodeStat == null) { - createdNodePath = zooKeeper.create(path, new byte[0], Ids.OPEN_ACL_UNSAFE, (ephimeral ? CreateMode.EPHEMERAL_SEQUENTIAL : CreateMode.PERSISTENT)); - } else { - createdNodePath = path; - } - - } catch (KeeperException | InterruptedException e) { - throw new IllegalStateException(e); - } - - return createdNodePath; - } - - /** - * Gets list of children for a znode - * - * @param path The path of the znode - * @param watch Whether to watch this node or not - * @return List of children nodes of znode - */ - public List getChildren(final String path, final boolean watch) { - List childNodes = null; - - try { - childNodes = zooKeeper.getChildren(path, watch); - } catch (KeeperException | InterruptedException e) { - throw new IllegalStateException(e); - } - - return childNodes; - } - - @Override - public void run() { - final String rootNodePath = createNode(electionRootPath, false, false); - if(rootNodePath == null) { - throw new IllegalStateException("Unable to create/access election root node with path: " + electionRootPath); - } - - clientNodePath = createNode(rootNodePath + CLIENT_NODE_PREFIX, false, true); - if(clientNodePath == null) { - throw new IllegalStateException("Unable to create/access client node with path: " + electionRootPath); - } - - _logger.info("Client node created with path: {}", clientNodePath); - } - - /** - * Subscribes to events from zookeeper server. - * On session expiry, close the current zookeeper client object. - * If number of peers change recalculate client results - * - * @author Dilip Devaraj (ddevaraj@salesforce.com) - * - */ - public class ClientNodeWatcher implements Watcher{ - - @Override - public void process(WatchedEvent event) { - _logger.debug("Client, event received:{}", event); - - final EventType eventType = event.getType(); - - if (event.getState() == Watcher.Event.KeeperState.Expired) { - - try { - zooKeeper.close(); - - } catch (InterruptedException e) { - _logger.error("Exception in closing expired zookeeper client: {}",e); - } - - } - - if(EventType.NodeChildrenChanged.equals(eventType)) { - _logger.debug("Node children changed"); - calculateAllNodesResult(); - } - } - } -} diff --git a/ArgusCore/src/main/javacc/AnnotationReader.jj b/ArgusCore/src/main/javacc/AnnotationReader.jj index e3f2490e7..0db1ae10a 100644 --- a/ArgusCore/src/main/javacc/AnnotationReader.jj +++ b/ArgusCore/src/main/javacc/AnnotationReader.jj @@ -29,24 +29,24 @@ package com.salesforce.dva.argus.service.annotation; import com.salesforce.dva.argus.entity.Annotation; import java.io.*; import java.util.*; -import com.salesforce.dva.argus.service.TSDBService; +import com.salesforce.dva.argus.service.AnnotationStorageService; import com.salesforce.dva.argus.service.tsdb.AnnotationQuery; import com.salesforce.dva.argus.system.SystemException; import static com.salesforce.dva.argus.system.SystemAssert.*; public class AnnotationReader { - private TSDBService tsdbService; + private AnnotationStorageService annotationStorageService; - AnnotationReader(TSDBService tsdbService) { + AnnotationReader(AnnotationStorageService annotationStorageService) { this((Reader)null); - this.tsdbService = tsdbService; + this.annotationStorageService = annotationStorageService; } public static boolean isValid(Collection expressions) { boolean result = true; if(expressions != null) { - AnnotationReader reader = new AnnotationReader((TSDBService)null); + AnnotationReader reader = new AnnotationReader((AnnotationStorageService)null); for(String expression : expressions) { try { if(!reader.isValidExpression(expression)) { @@ -147,7 +147,7 @@ private List start(String expression, boolean syntaxOnly, Class clazz) : } { { - requireState(syntaxOnly || tsdbService != null, "TSDB service can only be null when only syntax validation is being performed."); + requireState(syntaxOnly || annotationStorageService != null, "Annotation storage service can only be null when only syntax validation is being performed."); ReInit(new StringReader(expression)); } result = expression(syntaxOnly, clazz) @@ -184,7 +184,7 @@ private List expression(boolean syntaxOnly, Class clazz) : AnnotationQuery query = new AnnotationQuery(scope, metric, tags, type, startTimestamp, endTimestamp); List queries = new ArrayList(); queries.add(query); - List annotations = tsdbService.getAnnotations(queries); + List annotations = annotationStorageService.getAnnotations(queries); if(user == null) { return (List) annotations; } else { @@ -255,4 +255,4 @@ private Map getTags() : } return tagsMap; } -} +} \ No newline at end of file diff --git a/ArgusCore/src/main/javacc/MetricReader.jj b/ArgusCore/src/main/javacc/MetricReader.jj index 0c1f3a0cc..d35b429f8 100644 --- a/ArgusCore/src/main/javacc/MetricReader.jj +++ b/ArgusCore/src/main/javacc/MetricReader.jj @@ -43,6 +43,7 @@ import com.salesforce.dva.argus.util.QueryContext; import com.salesforce.dva.argus.util.QueryContextHolder; import com.salesforce.dva.argus.util.TSDBQueryExpression; import static com.salesforce.dva.argus.system.SystemAssert.*; +import com.salesforce.dva.argus.service.metric.ParseException; public class MetricReader { @@ -81,6 +82,70 @@ public class MetricReader { return result; } + + public static boolean validateExpression(String expression) + { + boolean result = true; + + if(expression != null) + { + try + { + MetricReader reader = new MetricReader((TSDBService)null, (DiscoveryService)null, (TransformFactory)null); + if(!reader.isValidExpression(expression)) + { + result = false; + } + } + catch (ParseException e) + { + result = false; + String newMessage = simpleParseErrorMessage(e, expression); + throw new RuntimeException( newMessage, e); + } + catch (TokenMgrError e) + { + result = false; + String newMessage = simpleLexicalErrorMessage(e, expression); + throw new RuntimeException( newMessage, e); + } + catch (Exception e) + { + result = false; + } + } + return result; + } + + protected static String simpleParseErrorMessage(ParseException e, String expression) + { + String msg = null; + if (e.currentToken != null) + { + msg = String.format("Syntax error in at line: %d column: %d in alert expression: %s", + e.currentToken.next.beginLine, e.currentToken.next.beginColumn, expression); + } + else + { + // TODO? - parse existing msg and regex match for 'at line\s+(\d+),\s*column\s+(\d+)' + // would need to be revisted as we upgrade javacc + msg = String.format("Syntax error in alert expression: %s", expression ); + } + return msg; + } + + protected static String simpleLexicalErrorMessage(TokenMgrError e, String expression) + { + // Extract the first sentence from the message. + // This portion of the message includes the line and column of the lexical error. + String tokenErrMsg = e.getMessage(); + int endOfSentence = tokenErrMsg.indexOf('.'); + String locationMsg = (endOfSentence < 0)? tokenErrMsg : tokenErrMsg.substring(0, endOfSentence+1); + + String msg = String.format("%s Alert expression: %s", locationMsg, expression ); + return msg; + } + public static boolean isValid(String expression) { return isValid(Arrays.asList(new String[] {expression})); } @@ -100,7 +165,7 @@ public class MetricReader { public String getQueryTimeWindow() { return queryTimeWindow; } - + public enum TimeUnit { SECOND("s", 1000), @@ -169,6 +234,7 @@ TOKEN : { < #TSDB_ALLOWED_TEXT : ( | | | | > } TOKEN : { < #TAGV : > } TOKEN : { < #TAG : "=" > } +TOKEN : { < #TAG_NOT : "!=" > } TOKEN : { < #DIGIT : ["0"-"9"] > } TOKEN : { < #LETTER : ["a"-"z","A"-"Z"] > } TOKEN : { < #HYPHEN : "-" > } @@ -244,6 +310,10 @@ TOKEN : { < ANOMALY_RPCA : "ANOMALY_RPCA" > } TOKEN : { < GROUPBY : "GROUPBY" > } TOKEN : { < GROUPBYTAG : "GROUPBYTAG" > } TOKEN : { < INTERPOLATE : "INTERPOLATE" > } +TOKEN : { < RATE : "RATE" > } +TOKEN : { < SLICE : "SLICE" > } +TOKEN : { < METADATA_INCLUDE : "METADATA_INCLUDE" > } +TOKEN : { < METADATA_EXCLUDE : "METADATA_EXCLUDE" > } TOKEN : { < COLON : ":" > } TOKEN : { < COMMA : "," > } @@ -251,7 +321,7 @@ TOKEN : { < LEFT_CURLY : "{" > } TOKEN : { < RIGHT_CURLY : "}" > } TOKEN : { < LEFT_PARENTHESIS : "(" > } TOKEN : { < RIGHT_PARENTHESIS : ")" > } -TOKEN : { < AGGREGATOR : "avg" | "sum" | "min" | "max" | "dev" | "zimsum" | "mimmin" | "mimmax" | "count" | "none" > } +TOKEN : { < AGGREGATOR : "avg" | "sum" | "min" | "max" | "dev" | "zimsum" | "mimmin" | "mimmax" | "count" | "none" | "imin" | "imax" | "isum" | "first" | "last"> } TOKEN : { < TIME : ()+ | ()+ > } TOKEN : { < DOWNSAMPLER : ()+ > } @@ -259,7 +329,7 @@ TOKEN : { < DOWNSAMPLER : ()+ > } TOKEN : { < NAMESPACE : > } TOKEN : { < SCOPE : > } TOKEN : { < METRIC : > } -TOKEN : { < TAGS : ()* > } +TOKEN : { < TAGS : ( | )(( | ))* > } TOKEN : { < CONSTANT : ()+ > } boolean isValidExpression(String expression) : @@ -560,8 +630,8 @@ private String getFunctionNameAndUpdateContext(QueryContextHolder contextHolder) { updateQueryContextWithFunction(t.image, contextHolder); return t.image; - } - | + } + | t = { updateQueryContextWithFunction(t.image, contextHolder); @@ -710,7 +780,31 @@ private String getFunctionNameAndUpdateContext(QueryContextHolder contextHolder) { updateQueryContextWithFunction(t.image, contextHolder); return t.image; - } + } + | + t = + { + updateQueryContextWithFunction(t.image, contextHolder); + return t.image; + } + | + t = + { + updateQueryContextWithFunction(t.image, contextHolder); + return t.image; + } + | + t = + { + updateQueryContextWithFunction(t.image, contextHolder); + return t.image; + } + | + t = + { + updateQueryContextWithFunction(t.image, contextHolder); + return t.image; + } } private void updateQueryContextWithFunction(String functionName, QueryContextHolder contextHolder) : @@ -775,6 +869,9 @@ private List expression(long relativeTo, boolean syntaxOnly, Class clazz, Aggregator downsampler = null; Long downsamplingPeriod = null; String downsampleTokenStr = null; + String[] percentile = null; + String optionalText = null; + boolean showHistogramBuckets = false; TSDBQueryExpression expression = new TSDBQueryExpression(); } { @@ -784,30 +881,61 @@ private List expression(long relativeTo, boolean syntaxOnly, Class clazz, ( tags = getTags() )? aggregator = getAggregator() (LOOKAHEAD(2) downsampleTokenStr = getDownsampleToken())? - (namespace = getString())? + (optionalText = getNamespace())? { - downsampler = downsampleTokenStr != null ? getDownsampler(downsampleTokenStr) : null; + downsampler = downsampleTokenStr != null ? getDownsampler(downsampleTokenStr) : null; downsamplingPeriod = downsampleTokenStr != null ? getDownsamplingPeriod(downsampleTokenStr): null; endTimestamp = endTimestamp == null ? relativeTo : endTimestamp; - expression.setNamespace(namespace); + // Parse out namespace and optional tsdb parameters + if(optionalText !=null && !optionalText.isEmpty()){ + String[] parts = optionalText.split(":"); + + for(int i=0; i< parts.length;i++){ + if(parts[i].toLowerCase().startsWith("histogram-percentiles")){ + percentile = getPercentile(parts[i].toLowerCase()); + } else if(parts[i].toLowerCase().startsWith("histogram-buckets")){ + showHistogramBuckets = true; + } else{ + namespace = parts[i]; + } + } + } + expression.setNamespace(namespace); expression.setScope(scope); expression.setMetric(metric); expression.setAggregator(aggregator); expression.setTags(tags); expression.setStartTimestamp(startTimestamp); expression.setEndTimestamp(endTimestamp); - expression.setEndTimestamp(endTimestamp); - expression.setDownsampler(downsampler); + expression.setDownsampler(downsampler); expression.setDownsamplingPeriod(downsamplingPeriod); - currentQueryContext.getChildExpressions().add(expression); - if(MetricQuery.class.equals(clazz)) { + expression.setPercentile(percentile); + expression.setShowHistogramBuckets(showHistogramBuckets); + + // if current context is a transform, we add the current expression as a child context to it + if(currentQueryContext.getTransform()!=null){ + QueryContext currExpContext = new QueryContext(); + currExpContext.setExpression(expression); + currExpContext.setParentContext(currentQueryContext); + currentQueryContext.getChildContexts().add(currExpContext); + }else{ + currentQueryContext.setExpression(expression); + } + + if(MetricQuery.class.equals(clazz)) { MetricQuery query = new MetricQuery(scope, metric, tags, startTimestamp, endTimestamp); query.setNamespace(namespace); query.setAggregator(aggregator); query.setDownsampler(downsampler); query.setDownsamplingPeriod(downsamplingPeriod); + if(percentile != null) { + query.setPercentile(percentile); + } + if(showHistogramBuckets != false) { + query.setShowHistogramBuckets(showHistogramBuckets); + } List queries = discoveryService.getMatchingQueries(query); return (List) queries; } else if(Metric.class.equals(clazz)) { @@ -819,6 +947,13 @@ private List expression(long relativeTo, boolean syntaxOnly, Class clazz, query.setAggregator(aggregator); query.setDownsampler(downsampler); query.setDownsamplingPeriod(downsamplingPeriod); + if(percentile != null) { + query.setPercentile(percentile); + } + if(showHistogramBuckets != false) { + query.setShowHistogramBuckets(showHistogramBuckets); + } + setQueryTimeWindow(query); List queries = discoveryService.getMatchingQueries(query); @@ -869,9 +1004,9 @@ private String getString() : t = { return t.image; } | - t = + t = { return t.image; } - | + | t = { return t.image; } | @@ -899,9 +1034,17 @@ private Map getTags() : String tagsStr = t.image; String[] tags = tagsStr.split(","); for(String tag : tags) { - String[] tagKVPair = tag.split("="); - String tagK = tagKVPair[0]; - String tagV = tagKVPair[1]; + String tagK = ""; + String tagV = ""; + String[] tagKVPair = tag.split("!="); + if(tagKVPair!=null && tagKVPair.length==2){ + tagK = tagKVPair[0]; + tagV = MetricQuery.TAG_NOT_EQUALS_INTERNAL_PREFIX + tagKVPair[1]; + }else{ + tagKVPair = tag.split("="); + tagK = tagKVPair[0]; + tagV = tagKVPair[1]; + } tagsMap.put(tagK, tagV); } return tagsMap; @@ -947,4 +1090,15 @@ private Long getDownsamplingPeriod(String token) : TimeUnit unit = TimeUnit.fromString(timeUnit); return time * unit.getValue(); } -} \ No newline at end of file +} + +private String[] getPercentile(String token) : +{} +{ + { + + String percentileStr = token.substring("histogram-percentiles[".length(), token.length() -1); + String[] percentiles = percentileStr.split("\\|"); + return percentiles; + } +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/AbstractTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/AbstractTestIT.java similarity index 76% rename from ArgusCore/src/test/java/com/salesforce/dva/argus/AbstractTest.java rename to ArgusCore/src/test/java/com/salesforce/dva/argus/AbstractTestIT.java index 387a6af1e..a403c2c0d 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/AbstractTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/AbstractTestIT.java @@ -28,10 +28,12 @@ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ - + package com.salesforce.dva.argus; import com.salesforce.dva.argus.entity.Annotation; +import com.salesforce.dva.argus.entity.Histogram; +import com.salesforce.dva.argus.entity.HistogramBucket; import com.salesforce.dva.argus.entity.Metric; import com.salesforce.dva.argus.system.SystemException; import com.salesforce.dva.argus.system.SystemMain; @@ -58,8 +60,7 @@ import static org.junit.Assert.fail; -@Ignore -public abstract class AbstractTest { +public abstract class AbstractTestIT { private static final String scopeNameTemplate = "MetricGenerator.{0,number,#}"; private static final String metricNameTemplate = "app_record_count.{0,number,#}"; @@ -73,13 +74,15 @@ public abstract class AbstractTest { apacheLogger.setLevel(ch.qos.logback.classic.Level.OFF); ch.qos.logback.classic.Logger kafkaLogger = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger("kafka"); kafkaLogger.setLevel(ch.qos.logback.classic.Level.OFF); + ch.qos.logback.classic.Logger zkLogger = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger("org.I0Itec.zkclient"); + zkLogger.setLevel(ch.qos.logback.classic.Level.OFF); } protected TestingServer zkTestServer; protected SystemMain system; protected KafkaServerStartable kafkaServer; private String tempDir = ""; - + private static void deleteFolder(File folder) { File[] files = folder.listFiles(); @@ -95,7 +98,7 @@ private static void deleteFolder(File folder) { } public static String createRandomName() { - String className = AbstractTest.class.getSimpleName(); + String className = AbstractTestIT.class.getSimpleName(); Long randomTime = random.nextLong(); Long systemTime = System.currentTimeMillis(); @@ -103,9 +106,12 @@ public static String createRandomName() { } public static Metric createMetric() { + return createMetric(((int) (random.nextDouble() * 500)) + 1); + } + + public static Metric createMetric(int datapointCount) { int scopeIndex = ((int) (random.nextDouble() * 100)); int metricIndex = ((int) (random.nextDouble() * 10)); - int datapointCount = ((int) (random.nextDouble() * 500)) + 1; String scope = MessageFormat.format(scopeNameTemplate, scopeIndex); String metric = MessageFormat.format(metricNameTemplate, metricIndex); Metric result = new Metric(scope, metric); @@ -118,7 +124,7 @@ public static Metric createMetric() { result.setTags(tags); return result; } - + public List createRandomMetrics(String scope, String metric, int count) { List result = new ArrayList<>(count); @@ -157,7 +163,54 @@ public static Annotation createAnnotation() { return result; } - private void setupEmbeddedKafka() { + public static Histogram createHistogram(int numHistogramBuckets) { + int scopeIndex = ((int) (random.nextDouble() * 100)); + int metricIndex = ((int) (random.nextDouble() * 10)); + String scope = MessageFormat.format(scopeNameTemplate, scopeIndex); + String metric = MessageFormat.format(metricNameTemplate, metricIndex); + long timestamp = System.currentTimeMillis(); + Histogram result = new Histogram(scope, metric); + + Map buckets = new HashMap<>(); + float lowerBound = 0; + float upperBound = 50; + for (int i = 0; i < numHistogramBuckets; i++) { + HistogramBucket histogramBucket= new HistogramBucket(lowerBound, upperBound); + buckets.put(histogramBucket, random.nextLong()); + lowerBound = upperBound; + upperBound = upperBound + 100; + } + result.setBuckets(buckets); + result.setTimestamp(timestamp);; + result.setTags(tags); + return result; + } + + public static Histogram createHistogramWrongBounds(int numHistogramBuckets) { + int scopeIndex = ((int) (random.nextDouble() * 100)); + int metricIndex = ((int) (random.nextDouble() * 10)); + String scope = MessageFormat.format(scopeNameTemplate, scopeIndex); + String metric = MessageFormat.format(metricNameTemplate, metricIndex); + long timestamp = System.currentTimeMillis(); + Histogram result = new Histogram(scope, metric); + + Map buckets = new HashMap<>(); + float lowerBound = 2; + float upperBound = 1; + for (int i = 0; i < numHistogramBuckets; i++) { + HistogramBucket histogramBucket= new HistogramBucket(lowerBound, upperBound); + buckets.put(histogramBucket, random.nextLong()); + lowerBound = upperBound; + upperBound = upperBound + 100; + } + result.setBuckets(buckets); + result.setTimestamp(timestamp);; + result.setTags(tags); + return result; + } + + + protected void setupEmbeddedKafka() { Properties properties = new Properties(); properties.put("zookeeper.connect", zkTestServer.getConnectString()); @@ -167,6 +220,7 @@ private void setupEmbeddedKafka() { properties.put("num.partitions", "2"); properties.put("log.flush.interval.ms", "10"); properties.put("log.dir", "/tmp/kafka-logs/" + createRandomName()); + properties.put("offsets.topic.replication.factor", "1"); KafkaConfig config = new KafkaConfig(properties); @@ -182,8 +236,7 @@ private void tearDownEmbeddedKafka() { } } - @Before - public void setUp() { + protected void setUpZkTestServer() { try { Class.forName("org.apache.derby.jdbc.EmbeddedDriver"); DriverManager.getConnection("jdbc:derby:memory:argus;create=true").close(); @@ -192,6 +245,11 @@ public void setUp() { LoggerFactory.getLogger(getClass()).error("Exception in setUp:{}", ex.getMessage()); fail("Exception during database startup."); } + } + + @Before + public void setUp() { + setUpZkTestServer(); setupEmbeddedKafka(); system = getInstance(); system.start(); @@ -203,12 +261,13 @@ public void tearDown() { system.getServiceFactory().getManagementService().cleanupRecords(); system.stop(); } - tearDownEmbeddedKafka(); try { + tearDownEmbeddedKafka(); zkTestServer.close(); DriverManager.getConnection("jdbc:derby:memory:argus;shutdown=true").close(); } catch (SQLNonTransientConnectionException ex) { if (ex.getErrorCode() >= 50000 || ex.getErrorCode() < 40000) { + System.out.println("RunEx:"+ex); throw new RuntimeException(ex); } } catch (Exception ex) { @@ -235,8 +294,8 @@ public SystemMain getInstance() { } } return SystemMain.getInstance(config); - } - + } + public SystemMain getInstance(Properties props) { Properties config = new Properties(); InputStream is = null; @@ -256,10 +315,12 @@ public SystemMain getInstance(Properties props) { } } } - system.stop(); + if (system != null) { + system.stop(); + } system = SystemMain.getInstance(config); system.start(); return system; - } + } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/TestUtils.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/TestUtils.java new file mode 100644 index 000000000..21cb5a6ae --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/TestUtils.java @@ -0,0 +1,332 @@ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus; + +import com.salesforce.dva.argus.entity.Alert; +import com.salesforce.dva.argus.entity.Annotation; +import com.salesforce.dva.argus.entity.Histogram; +import com.salesforce.dva.argus.entity.HistogramBucket; +import com.salesforce.dva.argus.entity.History; +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.entity.Notification; +import com.salesforce.dva.argus.entity.PrincipalUser; +import com.salesforce.dva.argus.entity.Trigger; +import com.salesforce.dva.argus.system.SystemConfiguration; +import com.salesforce.dva.argus.system.SystemException; +import com.salesforce.dva.argus.system.SystemMain; +import org.powermock.reflect.Whitebox; + +import java.io.IOException; +import java.io.InputStream; +import java.lang.reflect.Field; +import java.lang.reflect.Modifier; +import java.math.BigInteger; +import java.security.SecureRandom; +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.TreeMap; + +import static org.junit.Assert.fail; + + +public class TestUtils { + + public static final SecureRandom random = new SecureRandom(); + private static final String scopeNameTemplate = "MetricGenerator.{0,number,#}"; + private static final String metricNameTemplate = "app_record_count.{0,number,#}"; + + protected static final Map tags; + static private Properties props; + + static { + tags = new HashMap<>(); + tags.put("source", "unittest"); + props = new Properties(); + props.put("system.property.mail.enabled", "false"); + props.put("system.property.admin.email", "argus-admin@mycompany.com"); + props.put("system.property.log.level", "ERROR"); + props.put("service.binding.cache", "com.salesforce.dva.argus.service.cache.NoOperationCacheService"); + props.put("service.binding.tsdb", "com.salesforce.dva.argus.service.tsdb.DefaultTSDBService"); + props.put("service.binding.audit", "com.salesforce.dva.argus.service.audit.DefaultAuditService"); + props.put("service.property.mq.connection.count", "2"); + props.put("service.property.mq.endpoint", "vm://localhost?broker.persistent=false"); + props.put("service.property.auth.ldap.authtype", "simple"); + props.put("service.property.auth.ldap.endpoint", "ldaps://ldaps.mycomany.com:636"); + props.put("service.property.auth.ldap.searchbase", "OU=active,OU=users,DC=mycompany,DC=com:OU=active,OU=robot,DC=mycompany,DC=com"); + props.put("service.property.auth.ldap.searchdn", "CN=argus_service,OU=active,OU=users,DC=mycompany,DC=com"); + props.put("service.property.auth.ldap.searchpwd", "argus_service_password"); + props.put("service.property.auth.ldap.usernamefield", "sAMAccountName"); + props.put("service.property.mail.alerturl.template", "https://localhost:8443/argus/#/alerts/$alertid$"); + props.put("service.property.mail.metricurl.template", "https://localhost:8443/argus/#/viewmetrics?expression=$expression$"); + props.put("service.property.mail.smtp.auth", "false"); + props.put("service.property.mail.smtp.host", "smtprelay.mycompany.com"); + props.put("service.property.mail.smtp.starttls.enable", "false"); + props.put("service.property.tsdb.connection.count", "2"); + props.put("service.property.tsdb.endpoint.read", "http://tsdbread.mycompany.com:4466"); + props.put("service.property.tsdb.endpoint.timeout", "10000"); + props.put("service.property.tsdb.endpoint.write", "http://tsdbwrite.mycompany.com:4477"); + props.put("service.property.tsdb.phoenix.jdbc.url", "${service.property.tsdb.phoenix.jdbc.url}"); + props.put("service.property.cache.redis.cluster", "redis0.mycompany.com:6379,redis1.mycompany.com:6389"); + } + + + + static public SystemConfiguration getConfiguration() { + Properties config = new Properties(); + InputStream is = null; + + try { + is = TestUtils.class.getResourceAsStream("/argus.properties"); + config.load(is); + } catch (IOException ex) { + throw new SystemException(ex); + } finally { + if (is != null) { + try { + is.close(); + } catch (IOException ex) { + assert false : "This should never occur."; + } + } + } + return new SystemConfiguration(config); + } + + static public SystemMain getInstance() { + Properties config = new Properties(); + InputStream is = null; + + try { + is = TestUtils.class.getResourceAsStream("/argus.properties"); + config.load(is); + } catch (IOException ex) { + throw new SystemException(ex); + } finally { + if (is != null) { + try { + is.close(); + } catch (IOException ex) { + assert false : "This should never occur."; + } + } + } + return SystemMain.getInstance(config); + } + + static public SystemMain getInstanceWithInMemProps() { + long start = System.currentTimeMillis(); + SystemMain system = SystemMain.getInstance(props); + long end = System.currentTimeMillis(); + // System.out.println("getInstanceWithInMemProps " + (end - start) + " milliseconds"); + return system; + } + + public static String createRandomName() { + return createRandomName(TestUtils.class.getSimpleName()); + } + + public static String createRandomName(String name) { + String className = name; + Long randomTime = random.nextLong(); + Long systemTime = System.currentTimeMillis(); + + return MessageFormat.format("{0}-{1}-{2}", className, Long.toHexString(systemTime), Long.toHexString(randomTime)); + } + + public static List createRandomMetrics(String className, String scope, String metric, int count) { + List result = new ArrayList<>(count); + + scope = scope == null ? createRandomName(className) : scope; + + String tag = createRandomName(className); + + for (int i = 0; i < count; i++) { + String metricName = metric == null ? createRandomName(className) : metric; + Metric met = new Metric(scope, metricName); + int datapointCount = random.nextInt(25) + 1; + Map datapoints = new HashMap<>(); + long start = System.currentTimeMillis() - 60000L; + + for (int j = 0; j < datapointCount; j++) { + datapoints.put(start - (j * 60000L), (double)(random.nextInt(100) + 1)); + } + met.setDatapoints(datapoints); + met.setDisplayName(createRandomName(className)); + met.setUnits(createRandomName(className)); + met.setTag(tag, String.valueOf(i)); + result.add(met); + } + return result; + } + + public static Metric createMetric() { + return createMetric(((int) (random.nextDouble() * 500)) + 1); + } + + public static Metric createMetric(int datapointCount) { + int scopeIndex = ((int) (random.nextDouble() * 100)); + int metricIndex = ((int) (random.nextDouble() * 10)); + String scope = MessageFormat.format(scopeNameTemplate, scopeIndex); + String metric = MessageFormat.format(metricNameTemplate, metricIndex); + Metric result = new Metric(scope, metric); + Map datapoints = new TreeMap<>(); + + for (int i = 0; i < datapointCount; i++) { + datapoints.put(System.currentTimeMillis(), random.nextDouble() * 500); + } + result.setDatapoints(datapoints); + result.setTags(tags); + return result; + } + + public static Annotation createAnnotation() { + int scopeIndex = ((int) (random.nextDouble() * 100)); + int metricIndex = ((int) (random.nextDouble() * 10)); + String scope = MessageFormat.format(scopeNameTemplate, scopeIndex); + String metric = MessageFormat.format(metricNameTemplate, metricIndex); + long timestamp = System.currentTimeMillis(); + Annotation result = new Annotation("unittest", Long.toHexString(timestamp), "unittest", scope, metric, timestamp); + + result.setTags(tags); + return result; + } + + public static Histogram createHistogram(int numHistogramBuckets) { + int scopeIndex = ((int) (random.nextDouble() * 100)); + int metricIndex = ((int) (random.nextDouble() * 10)); + String scope = MessageFormat.format(scopeNameTemplate, scopeIndex); + String metric = MessageFormat.format(metricNameTemplate, metricIndex); + long timestamp = System.currentTimeMillis(); + Histogram result = new Histogram(scope, metric); + + Map buckets = new HashMap<>(); + float lowerBound = 0; + float upperBound = 50; + for (int i = 0; i < numHistogramBuckets; i++) { + HistogramBucket histogramBucket= new HistogramBucket(lowerBound, upperBound); + buckets.put(histogramBucket, random.nextLong()); + lowerBound = upperBound; + upperBound = upperBound + 100; + } + result.setBuckets(buckets); + result.setTimestamp(timestamp);; + result.setTags(tags); + return result; + } + + public static Histogram createHistogramWrongBounds(int numHistogramBuckets) { + int scopeIndex = ((int) (random.nextDouble() * 100)); + int metricIndex = ((int) (random.nextDouble() * 10)); + String scope = MessageFormat.format(scopeNameTemplate, scopeIndex); + String metric = MessageFormat.format(metricNameTemplate, metricIndex); + long timestamp = System.currentTimeMillis(); + Histogram result = new Histogram(scope, metric); + + Map buckets = new HashMap<>(); + float lowerBound = 2; + float upperBound = 1; + for (int i = 0; i < numHistogramBuckets; i++) { + HistogramBucket histogramBucket= new HistogramBucket(lowerBound, upperBound); + buckets.put(histogramBucket, random.nextLong()); + lowerBound = upperBound; + upperBound = upperBound + 100; + } + result.setBuckets(buckets); + result.setTimestamp(timestamp);; + result.setTags(tags); + return result; + } + + public static Alert generateAlert(String alertName, PrincipalUser user, String expression) { + Alert alert = new Alert(user, user, alertName, expression, "* * * * *"); + return alert; + } + + public static Notification generateNotification(String notificationName, Alert alert, List triggers) { + Notification notification = new Notification(notificationName, alert, "notifier-name", new ArrayList(), 5000L); + notification.setAlert(alert); + notification.setTriggers(triggers); + return notification; + } + + public static Trigger generateTrigger(String triggerName, Alert alert) { + Trigger trigger = new Trigger(alert, Trigger.TriggerType.GREATER_THAN, triggerName, 0.95, 5000L); + trigger.setAlert(alert); + return trigger; + } + + public static void setStaticField(Class clazz, String fieldName, Object value) { + try { + Field field = clazz.getDeclaredField(fieldName); + field.setAccessible(true); + Field modifiers = Field.class.getDeclaredField("modifiers"); + modifiers.setAccessible(true); + modifiers.set(field, field.getModifiers() & ~Modifier.FINAL); + field.set(null, value); + } catch (Exception ex) { + fail(ex.getMessage()); + } + } + + public static void setField(Object obj, String fieldName, Object value) { + try { + Whitebox.setInternalState(obj, fieldName, value); + } catch (Exception ex) { + fail(ex.getMessage()); + } + } + + public static Notification getNotification(String notificationName, String notifierName, Alert alert, List subscriptionList) { + Notification notification = new Notification(notificationName, alert, notifierName, + subscriptionList, 5000L); + notification.setSeverityLevel(4); + return notification; + } + + public static History getHistory() { + return new History("TEST HISTORY MESSAGE", "TEST_HOST", new BigInteger("100002"), + History.JobStatus.STARTED, 10, System.currentTimeMillis() - 86400000); + } + + public static Metric getMetric() { + SecureRandom random = new SecureRandom(); + return createMetric(((int) (random.nextDouble() * 500)) + 1); + } + + public static Trigger getTrigger(Alert alert, Trigger.TriggerType triggerType, String triggerName, String triggerThreshold, String triggerInertiaMillis) { + return new Trigger(alert, triggerType, triggerName, Double.parseDouble(triggerThreshold), Long.parseLong(triggerInertiaMillis)); + } +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/entity/AlertTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/entity/AlertTest.java new file mode 100644 index 000000000..c182a7c83 --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/entity/AlertTest.java @@ -0,0 +1,172 @@ +package com.salesforce.dva.argus.entity; + +import org.junit.Ignore; +import org.junit.Test; + +import java.util.Arrays; +import java.util.List; + +import static com.salesforce.dva.argus.TestUtils.generateAlert; +import static com.salesforce.dva.argus.TestUtils.generateTrigger; +import static com.salesforce.dva.argus.TestUtils.generateNotification; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertFalse; + + +public class AlertTest { + + private static final String EXPRESSION = + "DIVIDE(-1h:argus.jvm:file.descriptor.open{host=unknown-host}:avg, -1h:argus.jvm:file.descriptor.max{host=unknown-host}:avg)"; + private static PrincipalUser admin = new PrincipalUser(null, "test-user", "test-user@salesforce.com"); + + @Test + public void testCopyConstructor_noNotificationsAndTriggers() throws Exception { + Alert alert = generateAlert("alert-name", admin, EXPRESSION); + PrincipalUser newUser = new PrincipalUser(admin, "second-user", "second-user@salesforce.com"); + Alert copy = new Alert(alert, alert.getName()+"_copy", newUser); + + assertTrue(alert.isEnabled() == copy.isEnabled()); + assertTrue(copy.getName().equals(alert.getName()+"_copy")); + assertTrue(copy.getOwner().equals(newUser)); + assertTrue(copy.getExpression().equals(alert.getExpression())); + assertTrue(copy.getCronEntry().equals(alert.getCronEntry())); + assertTrue(copy.isMissingDataNotificationEnabled() == alert.isMissingDataNotificationEnabled()); + assertTrue(copy.isShared() == alert.isShared()); + assertTrue(copy.isValid() == alert.isValid()); + assertTrue(copy.getNotifications().isEmpty()); + assertTrue(copy.getTriggers().isEmpty()); + assertFalse(copy.toString().equals(alert.toString())); + } + + @Test + public void testCopyConstructor_withNotificationsAndTriggers() throws Exception { + Alert alert = generateAlert("alert-name", admin, EXPRESSION); + Trigger trigger = generateTrigger("trigger-name", alert); + Trigger triggerNoNotification = generateTrigger("trigger-no-notification", alert); + + Notification notification = generateNotification("notification-name", alert, Arrays.asList(new Trigger[]{trigger})); + alert.setNotifications(Arrays.asList(new Notification[]{notification})); + alert.setTriggers(Arrays.asList(new Trigger[]{trigger, triggerNoNotification})); + + PrincipalUser newUser = new PrincipalUser(admin, "second-user", "second-user@salesforce.com"); + Alert copy = new Alert(alert, alert.getName()+"_copy", newUser); + + assertTrue(alert.isEnabled() == copy.isEnabled()); + assertTrue(copy.getName().equals(alert.getName()+"_copy")); + assertTrue(copy.getOwner().equals(newUser)); + assertTrue(copy.getExpression().equals(alert.getExpression())); + assertTrue(copy.getCronEntry().equals(alert.getCronEntry())); + assertTrue(copy.isMissingDataNotificationEnabled() == alert.isMissingDataNotificationEnabled()); + assertTrue(copy.isShared() == alert.isShared()); + assertTrue(copy.isValid() == alert.isValid()); + assertFalse(copy.toString().equals(alert.toString())); + + Notification copiedNotification = copy.getNotifications().get(0); + assertFalse(copiedNotification.equals(notification)); + assertTrue(copiedNotification.getName().equals(notification.getName())); + assertTrue(copiedNotification.getNotifierName().equals(notification.getNotifierName())); + assertTrue(copiedNotification.getCreatedBy().equals(newUser)); + assertTrue(copiedNotification.getSRActionable() == notification.getSRActionable()); + + Trigger copiedTrigger1 = copy.getTriggers().get(0); + + assertFalse(copiedTrigger1.equals(trigger)); + assertTrue(copiedTrigger1.getName().equals(trigger.getName())); + assertTrue(copiedTrigger1.getThreshold().equals(trigger.getThreshold())); + assertTrue(copiedTrigger1.getCreatedBy().equals(newUser)); + assertTrue(copiedTrigger1.getType().equals(trigger.getType())); + assertTrue(copiedTrigger1.getInertia() == trigger.getInertia()); + + Trigger copiedTrigger2 = copy.getTriggers().get(1); + assertFalse(copiedTrigger2.equals(triggerNoNotification)); + assertTrue(copiedTrigger2.getName().equals(triggerNoNotification.getName())); + assertTrue(copiedTrigger2.getThreshold().equals(triggerNoNotification.getThreshold())); + assertTrue(copiedTrigger2.getCreatedBy().equals(newUser)); + assertTrue(copiedTrigger2.getType().equals(triggerNoNotification.getType())); + assertTrue(copiedTrigger2.getInertia() == triggerNoNotification.getInertia()); + } + + @Test + public void testIsEqual_areEqual() { + Alert alert = generateAlert("alert-name", admin, EXPRESSION); + Trigger trigger = generateTrigger("trigger-name", alert); + Notification notification = generateNotification("notification-name", alert, Arrays.asList(new Trigger[]{trigger})); + alert.setNotifications(Arrays.asList(new Notification[]{notification})); + alert.setTriggers(Arrays.asList(new Trigger[]{trigger})); + alert.setEnabled(true); + + Alert test = alert; + + assertTrue(alert.equals(test)); + } + + @Test + public void testIsEqual_areEqual2() throws Exception { + Alert alert = generateAlert("alert-name", admin, EXPRESSION); + Trigger trigger = generateTrigger("trigger-name", alert); + Notification notification = generateNotification("notification-name", alert, Arrays.asList(new Trigger[]{trigger})); + notification.setEnableClearNotification(true); + alert.setNotifications(Arrays.asList(new Notification[]{notification})); + alert.setTriggers(Arrays.asList(new Trigger[]{trigger})); + alert.setEnabled(true); + + Alert test = new Alert(alert, alert.getName(), admin); + + assertTrue(alert.equals(test)); + } + + @Test + public void testIsEqual_areNotEqual() { + Alert alert = generateAlert("alert-name", admin, EXPRESSION); + Trigger trigger = generateTrigger("trigger-name", alert); + Notification notification = generateNotification("notification-name", alert, Arrays.asList(new Trigger[]{trigger})); + alert.setNotifications(Arrays.asList(new Notification[]{notification})); + alert.setTriggers(Arrays.asList(new Trigger[]{trigger})); + alert.setEnabled(true); + + Alert other = generateAlert("different-name", admin, EXPRESSION); + Trigger trigger2 = generateTrigger("trigger-name", other); + Notification notification2 = generateNotification("notification-name", other, Arrays.asList(new Trigger[]{trigger2})); + other.setNotifications(Arrays.asList(new Notification[]{notification2})); + other.setTriggers(Arrays.asList(new Trigger[]{trigger2})); + other.setEnabled(true); + + assertFalse(alert.equals(other)); + } + + @Test + public void testIsEqual_areNotEqual_fieldChanged() throws Exception { + // create an alert + Alert original = generateAlert("alert-name", admin, EXPRESSION); + Trigger trigger = generateTrigger("trigger-name", original); + Notification notification = generateNotification("notification-name", original, Arrays.asList(new Trigger[]{trigger})); + original.setNotifications(Arrays.asList(new Notification[]{notification})); + original.setTriggers(Arrays.asList(new Trigger[]{trigger})); + original.setEnabled(true); + + // create copy + Alert different = new Alert(original, original.getName(), admin); + // change one field + different.setShared(true); + + assertFalse(original.equals(different)); + } + + @Test + public void testIsEqual_areNotEqual_triggerChanged() throws Exception { + // create an alert + Alert original = generateAlert("alert-name", admin, EXPRESSION); + Trigger trigger = generateTrigger("trigger-name", original); + Notification notification = generateNotification("notification-name", original, Arrays.asList(new Trigger[]{trigger})); + original.setNotifications(Arrays.asList(new Notification[]{notification})); + original.setTriggers(Arrays.asList(new Trigger[]{trigger})); + original.setEnabled(true); + + // create copy + Alert different = new Alert(original, original.getName(), admin); + ((Trigger) different.getTriggers().toArray()[0]).setName("a-new-trigger-name"); + + assertFalse(original.equals(different)); + } +} \ No newline at end of file diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/entity/AnnotationTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/entity/AnnotationTest.java new file mode 100644 index 000000000..a0f183c2a --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/entity/AnnotationTest.java @@ -0,0 +1,71 @@ +package com.salesforce.dva.argus.entity; + +import org.apache.commons.lang.RandomStringUtils; +import org.apache.commons.lang3.ArrayUtils; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.lang.reflect.Field; +import java.lang.reflect.Modifier; +import java.util.HashMap; +import java.util.Map; +import java.util.Random; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + +public class AnnotationTest { + + private final Logger _logger = LoggerFactory.getLogger(AnnotationTest.class); + + @Test + public void testGetSizeBytes() throws Exception { + Annotation a = new Annotation("source", + "id", + "type", + "scope", + "metric", + System.currentTimeMillis()); + Random r = new Random(); + int expectedSize = 0; + for (Field f : ArrayUtils.addAll(a.getClass().getDeclaredFields(), // Annotation class + a.getClass().getSuperclass().getDeclaredFields())) { // TSDBEntity class + if (Modifier.isStatic(f.getModifiers())) { + continue; + } + Class t = f.getType(); + f.setAccessible(true); + if (t.equals(String.class)) { + String value = RandomStringUtils.random(r.nextInt(100)); + f.set(a, value); + expectedSize += value.length(); + } else if (t.equals(Long.class)) { + Long value = r.nextLong(); + f.set(a, value); + expectedSize += Long.BYTES; + } else if (t.equals(int.class)) { + expectedSize += Integer.BYTES; + } else if (t.equals(Map.class)) { + Map map = new HashMap<>(); + for (int i = 0; i < r.nextInt(5); i++) { + String key = RandomStringUtils.random(r.nextInt(20)); + String value = RandomStringUtils.random(r.nextInt(20)); + map.put(key, value); + expectedSize += key.length() + value.length(); + } + f.set(a, map); + } else { + if (f.getName().startsWith("$jacoco")) { + // jacoco fields start with $, we want to ignore those + } else { + fail(String.format("Unsupported type=%s for field=%s, please update this test", t, f.getName())); + } + } + _logger.debug(String.format("field=%s, size=%d", f.getName(), expectedSize)); + } + + int size = a.computeSizeBytes(); + assertEquals(expectedSize, size); + } +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/entity/HistogramTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/entity/HistogramTest.java new file mode 100644 index 000000000..831852095 --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/entity/HistogramTest.java @@ -0,0 +1,42 @@ +package com.salesforce.dva.argus.entity; + +import static org.junit.Assert.assertTrue; + +import java.util.HashMap; +import java.util.Map; + +import org.junit.Test; + +public class HistogramTest { + + @Test + public void clearAndSetHistogramBucketsTest() { + Histogram histogram = new Histogram("scope", "metric"); + long timestamp = System.currentTimeMillis(); + Map buckets = new HashMap<>(); + float lowerBound = 0; + float upperBound = 50; + for (int i = 0; i < 5; i++) { + HistogramBucket histogramBucket= new HistogramBucket(lowerBound, upperBound); + buckets.put(histogramBucket, 4L); + lowerBound = upperBound; + upperBound = upperBound + 100; + } + histogram.setBuckets(buckets); + histogram.setTimestamp(timestamp); + assertTrue(histogram.getBuckets().size() == 5); + histogram.clearBuckets(); + assertTrue(histogram.getBuckets().size() == 0); + histogram.setBuckets(buckets); + assertTrue(histogram.getBuckets().size() == 5); + } + + @Test + public void addHistogramBucketsTest() { + Histogram histogram = new Histogram("scope", "metric"); + histogram.addBucket(0, 5, 2L); + assertTrue(histogram.getBuckets().size() == 1); + histogram.addBucket(5, 10, 3L); + assertTrue(histogram.getBuckets().size() == 2); + } +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/entity/MetricTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/entity/MetricTest.java index 1cef6e3d9..80e93d311 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/entity/MetricTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/entity/MetricTest.java @@ -1,17 +1,37 @@ package com.salesforce.dva.argus.entity; -import com.salesforce.dva.argus.AbstractTest; import com.salesforce.dva.argus.util.AlertUtils; import org.junit.Test; import java.util.HashMap; import java.util.Map; +import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertTrue; public class MetricTest { + @Test + public void testEquals() { + String scope = "argus.core"; + String name = "test.metric.name"; + Map tags = new HashMap<>(); + tags.put("host", "test.com"); + tags.put("user", "testuser"); + Map tagsCopy = new HashMap<>(tags); + Metric metric1 = new Metric(scope, name); + metric1.setTags(tags); + Metric metric2 = new Metric(scope, name); + metric2.setTags(tagsCopy); + + assertNotSame(tags, tagsCopy); + assertEquals(tags, tagsCopy); + assertNotSame(metric1, metric2); + assertEquals(metric1, metric2); + } + @Test public void clearMetricsTest() { diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/entity/NotificationTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/entity/NotificationTest.java index 0c09447e5..f1ae66347 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/entity/NotificationTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/entity/NotificationTest.java @@ -4,8 +4,12 @@ import org.junit.Test; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; +import static com.salesforce.dva.argus.TestUtils.generateAlert; +import static com.salesforce.dva.argus.TestUtils.generateNotification; +import static com.salesforce.dva.argus.TestUtils.generateTrigger; import static org.junit.Assert.*; public class NotificationTest { @@ -33,4 +37,41 @@ public void testSetSubscriptionsForEmailValidationForInvalidMails() { validSubscriptions.add(" "); testNotification.setSubscriptions(validSubscriptions); } + + @Test + public void testSetSubject() { + String subject = "TEST_SUBJECT"; + Notification testNotification = new Notification(); + testNotification.setEmailSubject(subject); + assertEquals(subject, testNotification.getEmailSubject()); + } + + @Test + public void testEnableClearNotificationDefaultValue() { + Notification testNotification = new Notification(); + assertTrue(testNotification.isEnableClearNotification()); + } + + @Test + public void testEnableClearNotificationFalseValue() { + Notification testNotification = new Notification(); + testNotification.setEnableClearNotification(false); + assertFalse(testNotification.isEnableClearNotification()); + } + + @Test + public void testIsEquals() { + String expr = "DIVIDE(-1h:argus.jvm:file.descriptor.open{host=unknown-host}:avg, -1h:argus.jvm:file.descriptor.max{host=unknown-host}:avg)"; + PrincipalUser admin = new PrincipalUser(null, "test-user", "test-user@salesforce.com"); + Alert alert = generateAlert("alert-name", admin, expr); + Trigger trigger = generateTrigger("trigger-name", alert); + Notification notification = generateNotification("notification-name", alert, Arrays.asList(new Trigger[]{trigger})); + + assertEquals(notification, new Notification( + notification.getName(), + alert, + notification.getNotifierName(), + notification.getSubscriptions(), + notification.getCooldownPeriod())); + } } \ No newline at end of file diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/entity/TSDBEntityTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/entity/TSDBEntityTest.java new file mode 100644 index 000000000..7e6d3d25e --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/entity/TSDBEntityTest.java @@ -0,0 +1,42 @@ +package com.salesforce.dva.argus.entity; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class TSDBEntityTest { + @Test + public void testReplaceUnsupportedChars() { + String expected = "mytag1"; + String actual = TSDBEntity.replaceUnsupportedChars("mytag1"); + assertEquals(expected, actual); + + expected = "ANCESTOR_DOMAINS__ANCESTORS"; + actual = TSDBEntity.replaceUnsupportedChars("ANCESTOR_DOMAINS|ANCESTORS"); + assertEquals(expected, actual); + + expected = "OU__0"; + actual = TSDBEntity.replaceUnsupportedChars("OU=0"); + assertEquals(expected, actual); + + expected = "url__//"; + actual = TSDBEntity.replaceUnsupportedChars("url://"); + assertEquals(expected, actual); + + expected = "ANCESTOR_DOMAINS__ANCESTORS"; + actual = TSDBEntity.replaceUnsupportedChars("ANCESTOR_DOMAINS__ANCESTORS"); + assertEquals(expected, actual); + + expected = "ANCESTOR_DOMAINS____ANCESTORS"; + actual = TSDBEntity.replaceUnsupportedChars("ANCESTOR_DOMAINS__|ANCESTORS"); + assertEquals(expected, actual); + + expected = ""; + actual = TSDBEntity.replaceUnsupportedChars(""); + assertEquals(expected, actual); + + expected = null; + actual = TSDBEntity.replaceUnsupportedChars(null); + assertEquals(expected, actual); + } +} \ No newline at end of file diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/AnnotationServiceIT.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/AnnotationServiceIT.java index fa7ef87c7..f60cf6460 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/AnnotationServiceIT.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/AnnotationServiceIT.java @@ -28,10 +28,10 @@ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ - + package com.salesforce.dva.argus.service; -import com.salesforce.dva.argus.AbstractTest; +import com.salesforce.dva.argus.AbstractTestIT; import com.salesforce.dva.argus.IntegrationTest; import com.salesforce.dva.argus.entity.Annotation; import com.salesforce.dva.argus.entity.Notification; @@ -50,7 +50,7 @@ import static org.junit.Assert.assertTrue; @Category(IntegrationTest.class) -public class AnnotationServiceIT extends AbstractTest { +public class AnnotationServiceIT extends AbstractTestIT { private UserService uService; private AnnotationService aService; diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/AuditServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/AuditServiceTest.java index ae5658ae4..19ceb5e83 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/AuditServiceTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/AuditServiceTest.java @@ -28,10 +28,9 @@ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ - + package com.salesforce.dva.argus.service; -import com.salesforce.dva.argus.AbstractTest; import com.salesforce.dva.argus.entity.Alert; import com.salesforce.dva.argus.entity.Audit; import com.salesforce.dva.argus.system.SystemException; @@ -42,17 +41,41 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -public class AuditServiceTest extends AbstractTest { +import org.junit.BeforeClass; +import org.junit.AfterClass; +import com.salesforce.dva.argus.system.SystemMain; +import com.salesforce.dva.argus.TestUtils; + +public class AuditServiceTest { + + static private SystemMain system; + static AuditService auditService; + static AlertService alertService; + static UserService userService; + + @BeforeClass + static public void setUpClass() { + system = TestUtils.getInstance(); + system.start(); + auditService = system.getServiceFactory().getAuditService(); + alertService = system.getServiceFactory().getAlertService(); + userService = system.getServiceFactory().getUserService(); + } + + @AfterClass + static public void tearDownClass() { + if (system != null) { + system.getServiceFactory().getManagementService().cleanupRecords(); + system.stop(); + } + } private static final String expression = "DIVIDE(-1h:argus.jvm:file.descriptor.open{host=unknown-host}:avg, -1h:argus.jvm:file.descriptor.max{host=unknown-host}:avg)"; @Test public void testCreateAudit() { - AuditService auditService = system.getServiceFactory().getAuditService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - UserService userService = system.getServiceFactory().getUserService(); - Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "test_alert-name", expression, "* * * * *"); + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "test_alert-name1", expression, "* * * * *"); alert = alertService.updateAlert(alert); @@ -64,10 +87,7 @@ public void testCreateAudit() { @Test(expected = SystemException.class) public void testUpdateAudit() { - AuditService auditService = system.getServiceFactory().getAuditService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - UserService userService = system.getServiceFactory().getUserService(); - Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "test_alert-name", expression, "* * * * *"); + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "test_alert-name2", expression, "* * * * *"); alert = alertService.updateAlert(alert); @@ -79,14 +99,11 @@ public void testUpdateAudit() { @Test public void testFindByJPAEntity() { - AuditService auditService = system.getServiceFactory().getAuditService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - UserService userService = system.getServiceFactory().getUserService(); - Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "test_alert-name", expression, "* * * * *"); + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "test_alert-name3", expression, "* * * * *"); alert = alertService.updateAlert(alert); - int expectedAuditcount = random.nextInt(10) + 1; + int expectedAuditcount = TestUtils.random.nextInt(10) + 1; List expectedResult = new ArrayList<>(auditService.findByEntity(alert.getId())); for (int i = 0; i < expectedAuditcount; i++) { @@ -100,15 +117,12 @@ public void testFindByJPAEntity() { @Test public void testFindByHostName() { - AuditService auditService = system.getServiceFactory().getAuditService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - UserService userService = system.getServiceFactory().getUserService(); - Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "test_alert-name", expression, "* * * * *"); + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "test_alert-name4", expression, "* * * * *"); alert = alertService.updateAlert(alert); String hostName = "test@salesforce.com"; - int expectedAuditcount = random.nextInt(10) + 1; + int expectedAuditcount = TestUtils.random.nextInt(10) + 1; List expectedResult = new ArrayList(); for (int i = 0; i < expectedAuditcount; i++) { @@ -122,14 +136,11 @@ public void testFindByHostName() { @Test public void testFindAll() { - AuditService auditService = system.getServiceFactory().getAuditService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - UserService userService = system.getServiceFactory().getUserService(); - Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "test_alert-name", expression, "* * * * *"); + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "test_alert-name5", expression, "* * * * *"); alert = alertService.updateAlert(alert); - int expectedAuditcount = random.nextInt(10) + 1; + int expectedAuditcount = TestUtils.random.nextInt(10) + 1; List expectedResult = new ArrayList(auditService.findAll()); for (int i = 0; i < expectedAuditcount; i++) { @@ -143,19 +154,16 @@ public void testFindAll() { @Test public void testFindByMessage() { - AuditService auditService = system.getServiceFactory().getAuditService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - UserService userService = system.getServiceFactory().getUserService(); - Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "test_alert-name", expression, "* * * * *"); + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "test_alert-name6", expression, "* * * * *"); alert = alertService.updateAlert(alert); List expectedResult = new ArrayList(); - int expectedAuditcount = random.nextInt(10) + 1; + int expectedAuditcount = TestUtils.random.nextInt(10) + 1; String message = "test_message"; for (int i = 0; i < expectedAuditcount; i++) { - expectedResult.add(auditService.createAudit(new Audit(createRandomName() + message + createRandomName(), "test@salesforce.com", alert))); + expectedResult.add(auditService.createAudit(new Audit(TestUtils.createRandomName() + message + TestUtils.createRandomName(), "test@salesforce.com", alert))); } List actualRusits = auditService.findByMessage(message); @@ -163,4 +171,4 @@ public void testFindByMessage() { assertTrue(actualRusits.containsAll(expectedResult)); } } -/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ \ No newline at end of file +/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/AuthServiceIT.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/AuthServiceIT.java index 183cd77e3..86992b242 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/AuthServiceIT.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/AuthServiceIT.java @@ -28,10 +28,10 @@ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ - + package com.salesforce.dva.argus.service; -import com.salesforce.dva.argus.AbstractTest; +import com.salesforce.dva.argus.AbstractTestIT; import com.salesforce.dva.argus.IntegrationTest; import com.salesforce.dva.argus.entity.PrincipalUser; import com.salesforce.dva.argus.service.auth.LDAPAuthService.Property; @@ -41,7 +41,7 @@ import static org.junit.Assert.assertNotNull; @Category(IntegrationTest.class) -public class AuthServiceIT extends AbstractTest { +public class AuthServiceIT extends AbstractTestIT { @Test public void testCreateAudit() { diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/CallbackServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/CallbackServiceTest.java index f2e752434..69d7f0cd7 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/CallbackServiceTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/CallbackServiceTest.java @@ -4,7 +4,8 @@ import java.util.Collections; import java.util.stream.IntStream; -import com.salesforce.dva.argus.AbstractTest; +import com.github.tomakehurst.wiremock.WireMockServer; +import com.github.tomakehurst.wiremock.client.WireMock; import com.salesforce.dva.argus.entity.Alert; import com.salesforce.dva.argus.entity.History; import com.salesforce.dva.argus.entity.History.JobStatus; @@ -15,20 +16,50 @@ import com.salesforce.dva.argus.service.alert.notifier.CallbackNotifier; import org.junit.Test; +import static com.github.tomakehurst.wiremock.client.WireMock.*; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; +import org.junit.BeforeClass; +import org.junit.AfterClass; +import com.salesforce.dva.argus.system.SystemMain; +import com.salesforce.dva.argus.TestUtils; + + /** * Created by mingzhong on 26.01.17. */ -public class CallbackServiceTest extends AbstractTest { +public class CallbackServiceTest { private static final String expression = "DIVIDE(-1h:argus.jvm:file.descriptor.open{host=unknown-host}:avg, -1h:argus.jvm:file.descriptor.max{host=unknown-host}:avg)"; + static private SystemMain system; + static AlertService alertService; + static UserService userService; + + @BeforeClass + static public void setUpClass() { + system = TestUtils.getInstance(); + system.start(); + alertService = system.getServiceFactory().getAlertService(); + userService = system.getServiceFactory().getUserService(); + } + + @AfterClass + static public void tearDownClass() { + if (system != null) { + system.getServiceFactory().getManagementService().cleanupRecords(); + system.stop(); + } + } + @Test public void testCallbackNotifier() { + WireMockServer mockServer = new WireMockServer(9600); + mockServer.start(); + WireMock.configureFor("localhost", mockServer.port()); + stubFor(post(anyUrl()).willReturn(aResponse().withStatus(200))); - final UserService userService = system.getServiceFactory().getUserService(); Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "alert_name", @@ -36,16 +67,17 @@ public void testCallbackNotifier() { "* * * * *"); final Trigger trigger = new Trigger(alert, Trigger.TriggerType.GREATER_THAN_OR_EQ, "trigger_name", 2D, 5); - final String jsonBody = "{ \"uri\" : \"http://localhost:9600\", \"method\" : \"POST\", \"header\": { \"Content-Type\": \"application/json\" }, \"body\": \"{ \\\"triggerName\\\": \\\"${trigger.name}\\\", \\\"alertName\\\": \\\"${alert.name}\\\" }\" }"; final Notification notification = new Notification("notification_name", alert, "notifier_name", - Collections.singletonList(jsonBody), + Collections.singletonList("http://localhost:9600"), 23); + notification.setCustomText("{ \"triggerName\": \"${trigger.name}\", \"alertName\": \"${alert.name}\" }"); + alert.setTriggers(Collections.singletonList(trigger)); alert.setNotifications(Collections.singletonList(notification)); - alert = system.getServiceFactory().getAlertService().updateAlert(alert); + alert = alertService.updateAlert(alert); History history = new History(JobStatus.SUCCESS.getDescription(), "localhost", BigInteger.ONE, JobStatus.SUCCESS); @@ -59,13 +91,12 @@ public void testCallbackNotifier() { CallbackNotifier notifier = (CallbackNotifier) system.getServiceFactory() .getAlertService() .getNotifier(AlertService.SupportedNotifier.CALLBACK); - int notificationCounter = 3; - - + int notificationCounter = 5; IntStream.range(0, notificationCounter).forEach(i -> notifier.sendNotification(context)); assertThat("Unexpected number of triggered alerts.", notifier.getAllNotifications(alert).size(), is(notificationCounter)); + mockServer.shutdownServer(); } } diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/ChartServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/ChartServiceTest.java index 988a1212c..259a04929 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/ChartServiceTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/ChartServiceTest.java @@ -12,7 +12,6 @@ import org.junit.Before; import org.junit.Test; -import com.salesforce.dva.argus.AbstractTest; import com.salesforce.dva.argus.entity.Chart; import com.salesforce.dva.argus.entity.Chart.ChartQuery; import com.salesforce.dva.argus.entity.Chart.ChartQueryType; @@ -21,34 +20,43 @@ import com.salesforce.dva.argus.entity.Dashboard.LayoutType; import com.salesforce.dva.argus.entity.PrincipalUser; -public class ChartServiceTest extends AbstractTest { - - private ChartService _chartService; - private PrincipalUser _adminUser; - - @Before - public void setup() { - _chartService = system.getServiceFactory().getChartService(); - _adminUser = system.getServiceFactory().getUserService().findAdminUser(); - } - +import org.junit.BeforeClass; +import com.salesforce.dva.argus.system.SystemMain; +import com.salesforce.dva.argus.TestUtils; + + +public class ChartServiceTest { + + static private ChartService _chartService; + static private PrincipalUser _adminUser; + static private SystemMain system; + + @BeforeClass + static public void setUpClass() { + system = TestUtils.getInstance(); + system.start(); + _chartService = system.getServiceFactory().getChartService(); + _adminUser = system.getServiceFactory().getUserService().findAdminUser(); + } + + @Test public void testCreateChart() { - + List queries = Arrays.asList(new ChartQuery(ChartQueryType.METRIC, "-1h:argus.jvm:mem.heap.used:avg")); Chart chart = new Chart(_adminUser, _adminUser, ChartType.LINE, queries); chart.setTitle("This is an example chart"); - + chart = _chartService.updateChart(chart); assertNotNull(chart.getId()); - + Chart retrievedChart = _chartService.getChartByPrimaryKey(chart.getId()); assertEquals(chart.getId(), retrievedChart.getId()); } @Test public void testDeleteChart() { - + List queries = Arrays.asList(new ChartQuery(ChartQueryType.METRIC, "-1h:argus.jvm:mem.heap.used:avg")); Chart chart = new Chart(_adminUser, _adminUser, ChartType.LINE, queries); Map preferences = new HashMap<>(); @@ -56,17 +64,17 @@ public void testDeleteChart() { preferences.put("downsampling","Method1"); chart.getPreferences().putAll(preferences); chart.setTitle("This is an example chart"); - + chart = _chartService.updateChart(chart); - + _chartService.deleteChart(chart); Chart retrievedChart = _chartService.getChartByPrimaryKey(chart.getId()); assertNull(retrievedChart); } - + @Test public void testDeleteChartById() { - + List queries = Arrays.asList(new ChartQuery(ChartQueryType.METRIC, "-1h:argus.jvm:mem.heap.used:avg")); Chart chart = new Chart(_adminUser, _adminUser, ChartType.LINE, queries); chart.setTitle("This is an example chart"); @@ -74,17 +82,17 @@ public void testDeleteChartById() { preferences.put("brush","enabled"); preferences.put("downsampling","Method1"); chart.getPreferences().putAll(preferences); - + chart = _chartService.updateChart(chart); - + _chartService.deleteChart(chart.getId()); Chart retrievedChart = _chartService.getChartByPrimaryKey(chart.getId()); assertNull(retrievedChart); } - + @Test public void testGetChartByPrimaryKey() { - + List queries = Arrays.asList(new ChartQuery(ChartQueryType.METRIC, "-1h:argus.jvm:mem.heap.used:avg")); Chart chart = new Chart(_adminUser, _adminUser, ChartType.LINE, queries); chart.setTitle("This is an example chart"); @@ -94,51 +102,51 @@ public void testGetChartByPrimaryKey() { chart.getPreferences().putAll(preferences); chart = _chartService.updateChart(chart); - + Chart retrievedChart = _chartService.getChartByPrimaryKey(chart.getId()); assertEquals(chart.getId(), retrievedChart.getId()); } - + @Test public void testGetChartsByOwner() { - + List queries = Arrays.asList(new ChartQuery(ChartQueryType.METRIC, "-1h:argus.jvm:mem.heap.used:avg")); Chart chart1 = new Chart(_adminUser, _adminUser, ChartType.LINE, queries); chart1.setTitle("chart1"); - + Chart chart2 = new Chart(_adminUser, _adminUser, ChartType.LINE, queries); chart2.setTitle("chart2"); - + _chartService.updateChart(chart1); _chartService.updateChart(chart2); - + List charts = _chartService.getChartsByOwner(_adminUser); assertEquals("chart1", charts.get(0).getTitle()); assertEquals("chart2", charts.get(1).getTitle()); } - + @Test public void testGetChartsForEntity() { - + Dashboard dashboard = new Dashboard(_adminUser, "dashboard", _adminUser); dashboard.setLayout(LayoutType.MEDIUM); dashboard = system.getServiceFactory().getDashboardService().updateDashboard(dashboard); - + List queries = Arrays.asList(new ChartQuery(ChartQueryType.METRIC, "-1h:argus.jvm:mem.heap.used:avg")); Chart chart1 = new Chart(_adminUser, _adminUser, ChartType.LINE, queries); chart1.setTitle("chart1"); chart1.setEntity(dashboard); - + Chart chart2 = new Chart(_adminUser, _adminUser, ChartType.LINE, queries); chart2.setTitle("chart2"); chart2.setEntity(dashboard); - + chart1 = _chartService.updateChart(chart1); chart2 = _chartService.updateChart(chart2); - + List charts = _chartService.getChartsForEntity(dashboard.getId()); assertEquals("chart1", charts.get(0).getTitle()); assertEquals("chart2", charts.get(1).getTitle()); } - + } diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/DashboardServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/DashboardServiceTest.java index 8af3dd8ba..20ad57c43 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/DashboardServiceTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/DashboardServiceTest.java @@ -28,14 +28,13 @@ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ - + package com.salesforce.dva.argus.service; -import com.salesforce.dva.argus.AbstractTest; +import com.salesforce.dva.argus.AbstractTestIT; import com.salesforce.dva.argus.entity.Dashboard; import com.salesforce.dva.argus.entity.PrincipalUser; -import org.junit.Before; -import org.junit.Test; +import org.junit.*; import java.util.Arrays; import java.util.List; @@ -43,12 +42,20 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; +import org.junit.Before; +import com.salesforce.dva.argus.system.SystemMain; +import com.salesforce.dva.argus.TestUtils; +import java.sql.DriverManager; +import java.sql.SQLNonTransientConnectionException; +import org.slf4j.LoggerFactory; -public class DashboardServiceTest extends AbstractTest { +import static org.junit.Assert.fail; - private PrincipalUser admin; - DashboardService dService; - UserService uService; +@Ignore("Tests are failing in Strata -W-6003515 to investigate and fix and renable") +public class DashboardServiceTest extends AbstractTestIT { + static private PrincipalUser admin; + static DashboardService dService; + static UserService uService; @Before @Override @@ -120,17 +127,17 @@ public void testDashboard_FindShareDeleteByVersion() { dService.deleteDashboard(dashboard.getId()); assertNull(dService.findDashboardByPrimaryKey(dashboard.getId())); } - + @Test public void testFindDashboardsMeta() { - + PrincipalUser owner = new PrincipalUser(admin, "owner1", "owner1@mycompany.abc"); Dashboard dashboard = new Dashboard(uService.findAdminUser(), "Test Dashboard", owner); dashboard.setShared(true); dashboard = dService.updateDashboard(dashboard); assertNotNull(dashboard.getId()); - + List retrieved = dService.findDashboards(1, true, null); assertEquals(1, retrieved.size()); assertEquals(dashboard.getId(), retrieved.get(0).getId()); @@ -151,20 +158,20 @@ public void testFindDashboardsMetaByVersion() { assertEquals(1, retrieved.size()); assertEquals(dashboard.getId(), retrieved.get(0).getId()); } - + @Test public void testFindDashboardsByOwnerMeta() { PrincipalUser owner1 = new PrincipalUser(admin, "owner1", "owner1@mycompany.abc"); PrincipalUser owner2 = new PrincipalUser(admin, "owner2", "owner2@mycompany.abc"); - + Dashboard dashboard1 = new Dashboard(uService.findAdminUser(), "Test Dashboard", owner1); dashboard1 = dService.updateDashboard(dashboard1); assertNotNull(dashboard1.getId()); - + Dashboard dashboard2 = new Dashboard(uService.findAdminUser(), "Test Dashboard", owner2); dashboard2 = dService.updateDashboard(dashboard2); assertNotNull(dashboard2.getId()); - + owner1 = uService.findUserByUsername("owner1"); List dashboardsRetrieved = dService.findDashboardsByOwner(owner1, true, null); @@ -193,23 +200,23 @@ public void testFindDashboardsByOwnerMetaAndByVersion() { assertEquals(1, dashboardsRetrieved.size()); assertEquals(dashboard1.getId(), dashboardsRetrieved.get(0).getId()); } - + @Test public void testFindSharedDashboardsMeta() { PrincipalUser owner = new PrincipalUser(admin, "owner1", "owner1@mycompany.abc"); - + Dashboard dashboard1 = new Dashboard(uService.findAdminUser(), "Test Dashboard1", owner); dashboard1.setShared(true); dashboard1 = dService.updateDashboard(dashboard1); - + owner = uService.findUserByUsername("owner1"); - + Dashboard dashboard2 = new Dashboard(uService.findAdminUser(), "Test Dashboard2", owner); dashboard2 = dService.updateDashboard(dashboard2); - + assertNotNull(dashboard1.getId()); assertNotNull(dashboard2.getId()); - + List dashboardsRetrieved = dService.findSharedDashboards(true, null, null, null); assertEquals(1, dashboardsRetrieved.size()); assertEquals(dashboard1.getId(), dashboardsRetrieved.get(0).getId()); @@ -237,27 +244,27 @@ public void testFindSharedDashboardsMetaByVersion() { assertEquals(1, dashboardsRetrieved.size()); assertEquals(dashboard1.getId(), dashboardsRetrieved.get(0).getId()); } - + @Test public void testFindSharedDashboardsByOwner() { PrincipalUser owner1 = new PrincipalUser(admin, "owner1", "owner1@mycompany.abc"); - + Dashboard dashboard1 = new Dashboard(uService.findAdminUser(), "Test Dashboard1", owner1); dashboard1.setShared(true); dashboard1 = dService.updateDashboard(dashboard1); - + owner1 = uService.findUserByUsername("owner1"); - + Dashboard dashboard2 = new Dashboard(uService.findAdminUser(), "Test Dashboard2", owner1); dashboard2 = dService.updateDashboard(dashboard2); - + assertNotNull(dashboard1.getId()); assertNotNull(dashboard2.getId()); - + Dashboard dashboard3 = new Dashboard(uService.findAdminUser(), "Test Dashboard3", uService.findAdminUser()); dashboard3.setShared(true); dashboard3 = dService.updateDashboard(dashboard3); - + Dashboard dashboard4 = new Dashboard(uService.findAdminUser(), "Test Dashboard4", uService.findAdminUser()); dashboard4 = dService.updateDashboard(dashboard4); @@ -265,21 +272,21 @@ public void testFindSharedDashboardsByOwner() { assertNotNull(dashboard4.getId()); PrincipalUser owner2 = new PrincipalUser(admin, "owner2", "owner2@mycompany.abc"); - + Dashboard dashboard5 = new Dashboard(uService.findAdminUser(), "Test Dashboard5", owner2); dashboard5.setShared(true); dashboard5 = dService.updateDashboard(dashboard5); - + owner2 = uService.findUserByUsername("owner2"); Dashboard dashboard6 = new Dashboard(uService.findAdminUser(), "Test Dashboard6", owner2); dashboard6 = dService.updateDashboard(dashboard6); assertNotNull(dashboard5.getId()); assertNotNull(dashboard6.getId()); - + List allSharedDashboardsRetrieved = dService.findSharedDashboards(false, null, null, null); assertEquals(3, allSharedDashboardsRetrieved.size()); - + List allSharedOwner1DashboardsRetrieved = dService.findSharedDashboards(false, owner1, null, null); assertEquals(1, allSharedOwner1DashboardsRetrieved.size()); assertEquals(dashboard1.getId(), allSharedOwner1DashboardsRetrieved.get(0).getId()); @@ -287,7 +294,7 @@ public void testFindSharedDashboardsByOwner() { List allSharedAdminDashboardsRetrieved = dService.findSharedDashboards(false, admin, null, null); assertEquals(1, allSharedAdminDashboardsRetrieved.size()); assertEquals(dashboard3.getId(), allSharedAdminDashboardsRetrieved.get(0).getId()); - + List allSharedOwner2DashboardsRetrieved = dService.findSharedDashboards(false, owner2, null, null); assertEquals(1, allSharedOwner2DashboardsRetrieved.size()); assertEquals(dashboard5.getId(), allSharedOwner2DashboardsRetrieved.get(0).getId()); diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/DefaultAlertServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/DefaultAlertServiceTest.java deleted file mode 100644 index cae020418..000000000 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/DefaultAlertServiceTest.java +++ /dev/null @@ -1,868 +0,0 @@ -package com.salesforce.dva.argus.service; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.fail; -import static org.mockito.Matchers.*; -import static org.mockito.Mockito.*; - -import java.io.IOException; -import java.lang.reflect.Field; -import java.math.BigInteger; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.atomic.AtomicInteger; - -import javax.persistence.EntityManager; -import javax.persistence.Persistence; - -import com.salesforce.dva.argus.service.alert.notifier.RefocusNotifier; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mock; -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.module.SimpleModule; -import com.google.inject.Provider; -import com.salesforce.dva.argus.AbstractTest; -import com.salesforce.dva.argus.entity.Alert; -import com.salesforce.dva.argus.entity.History; -import com.salesforce.dva.argus.entity.Metric; -import com.salesforce.dva.argus.entity.Notification; -import com.salesforce.dva.argus.entity.PrincipalUser; -import com.salesforce.dva.argus.entity.Trigger; -import com.salesforce.dva.argus.entity.Trigger.TriggerType; -import com.salesforce.dva.argus.service.MQService.MQQueue; -import com.salesforce.dva.argus.service.alert.DefaultAlertService; -import com.salesforce.dva.argus.service.alert.DefaultAlertService.AlertWithTimestamp; -import com.salesforce.dva.argus.service.alert.notifier.AuditNotifier; - -@RunWith(org.mockito.runners.MockitoJUnitRunner.class) -public class DefaultAlertServiceTest extends AbstractTest { - - private static final String EXPRESSION = - "DIVIDE(-1h:argus.jvm:file.descriptor.open{host=unknown-host}:avg, -1h:argus.jvm:file.descriptor.max{host=unknown-host}:avg)"; - - @Mock private Provider _emProviderMock; - @Mock private MQService _mqServiceMock; - @Mock private MetricService _metricServiceMock; - @Mock private TSDBService _tsdbServiceMock; - @Mock private MailService _mailServiceMock; - @Mock private HistoryService _historyServiceMock; - @Mock private MonitorService _monitorServiceMock; - @Mock private AuditService _auditServiceMock; - @Mock private ObjectMapper _mapper; - - private DefaultAlertService alertService; - - @Before - public void setup() { - alertService = new DefaultAlertService(system.getConfiguration(), _mqServiceMock, _metricServiceMock, _auditServiceMock, - _tsdbServiceMock, _mailServiceMock, _historyServiceMock, _monitorServiceMock, system.getNotifierFactory(), - _emProviderMock); - try { - Field field = alertService.getClass().getDeclaredField("_mapper"); - field.setAccessible(true); - field.set(alertService, _mapper); - } catch (NoSuchFieldException | SecurityException | IllegalArgumentException | IllegalAccessException e) { - fail("Failed to set mocked ObjectMapper using reflection."); - } - } - - @Test - public void testExecuteScheduledAlerts_ForOneTimeSeries() { - ServiceFactory sFactory = system.getServiceFactory(); - UserService userService = sFactory.getUserService(); - - int triggerMinValue = 50, inertiaPeriod = 1000 * 60 * 5; - int cooldownPeriod = 1000 * 5; - final AtomicInteger notificationCount = new AtomicInteger(0); - final AtomicInteger clearCount = new AtomicInteger(0); - - Metric metric = _createMetric(createRandomName(), createRandomName(), triggerMinValue, inertiaPeriod); - - Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "testAlert", "-1h:scope:metric:avg", "* * * * *"); - _setAlertId(alert, "100001"); - Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "testTrigger", triggerMinValue, inertiaPeriod); - _setTriggerId(trigger, "100002"); - Notification notification = new Notification("testNotification", alert, AuditNotifier.class.getName(), new ArrayList(), - cooldownPeriod); - _setNotificationId(notification, "100003"); - - alert.setTriggers(Arrays.asList(trigger)); - alert.setNotifications(Arrays.asList(notification)); - notification.setTriggers(alert.getTriggers()); - alert.setEnabled(true); - - DefaultAlertService spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, - Arrays.asList(metric), alert, notification); - - spyAlertService.executeScheduledAlerts(1, 1000); - - assertEquals(1, notificationCount.get()); - } - - @Test - public void testExecuteScheduledAlerts_ForOneTimeSeriesMultipleTriggers() { - ServiceFactory sFactory = system.getServiceFactory(); - UserService userService = sFactory.getUserService(); - - final AtomicInteger notificationCount = new AtomicInteger(0); - final AtomicInteger clearCount = new AtomicInteger(0); - - Metric metric = new Metric("scope", "metric"); - Map dps = new HashMap(); - dps.put(1000L, "11"); - dps.put(2000L, "21"); - dps.put(3000L, "31"); - metric.setDatapoints(_convertDatapoints(dps)); - - Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "testAlert", "-1h:scope:metric:avg", "* * * * *"); - _setAlertId(alert, "100001"); - Trigger trigger1 = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "testTrigger1", 10, 0); - _setTriggerId(trigger1, "100002"); - Trigger trigger2 = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "testTrigger2", 5, 0); - _setTriggerId(trigger2, "100003"); - Notification notification = new Notification("testNotification", alert, AuditNotifier.class.getName(), new ArrayList(), 0); - _setNotificationId(notification, "100004"); - - alert.setTriggers(Arrays.asList(trigger1, trigger2)); - alert.setNotifications(Arrays.asList(notification)); - notification.setTriggers(alert.getTriggers()); - alert.setEnabled(true); - - DefaultAlertService spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, - Arrays.asList(metric), alert, notification); - - spyAlertService.executeScheduledAlerts(1, 1000); - - assertEquals(2, notificationCount.get()); - } - - @Test - public void testExecuteScheduledAlerts_OnCooldown() { - ServiceFactory sFactory = system.getServiceFactory(); - UserService userService = sFactory.getUserService(); - - final AtomicInteger notificationCount = new AtomicInteger(0); - final AtomicInteger clearCount = new AtomicInteger(0); - - Metric metric = new Metric("scope", "metric"); - Map dps = new HashMap(); - dps.put(1000L, "11"); - dps.put(2000L, "21"); - dps.put(3000L, "31"); - metric.setDatapoints(_convertDatapoints(dps)); - - Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "testAlert", "-1h:scope:metric:avg", "* * * * *"); - _setAlertId(alert, "100001"); - Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "testTrigger", 10, 0); - _setTriggerId(trigger, "100002"); - Notification notification = new Notification("testNotification", alert, AuditNotifier.class.getName(), new ArrayList(), - 600000); - _setNotificationId(notification, "100003"); - - alert.setTriggers(Arrays.asList(new Trigger[] { trigger })); - alert.setNotifications(Arrays.asList(new Notification[] { notification })); - notification.setTriggers(alert.getTriggers()); - alert.setEnabled(true); - - DefaultAlertService spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, - Arrays.asList(metric), alert, notification); - - //This will set the notification on cooldown for the given metric and trigger. - spyAlertService.executeScheduledAlerts(1, 1000); - //This evaluation should not send notification. Hence notificationCount count would still be 1. - spyAlertService.executeScheduledAlerts(1, 1000); - - assertEquals(1, notificationCount.get()); - assertEquals(1, notification.getCooldownExpirationMap().size()); - } - - @Test - public void testExecuteScheduledAlerts_ForMoreThanOneTimeSeries() { - UserService userService = system.getServiceFactory().getUserService(); - final AtomicInteger notificationCount = new AtomicInteger(0); - final AtomicInteger clearCount = new AtomicInteger(0); - - Metric metric1 = new Metric("scope1", "metric"); - Map dps1 = new HashMap(); - dps1.put(1000L, "1"); - dps1.put(2000L, "2"); - dps1.put(3000L, "3"); - metric1.setDatapoints(_convertDatapoints(dps1)); - - Metric metric2 = new Metric("scope2", "metric"); - Map dps2 = new HashMap(); - dps2.put(4000L, "11"); - dps2.put(5000L, "20"); - dps2.put(6000L, "30"); - metric2.setDatapoints(_convertDatapoints(dps2)); - - Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "testAlert", "-1h:scope:metric:avg", "* * * * *"); - _setAlertId(alert, "100001"); - Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "testTrigger", 10, 0); - _setTriggerId(trigger, "100002"); - Notification notification = new Notification("testNotification", alert, AuditNotifier.class.getName(), new ArrayList(), 0); - _setNotificationId(notification, "100003"); - - alert.setTriggers(Arrays.asList(new Trigger[] { trigger })); - alert.setNotifications(Arrays.asList(new Notification[] { notification })); - notification.setTriggers(alert.getTriggers()); - alert.setEnabled(true); - - DefaultAlertService spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, - Arrays.asList(metric1, metric2), alert, notification); - - spyAlertService.executeScheduledAlerts(10, 1000); - - assertEquals(1, notificationCount.get()); - } - - @Test - public void testExecuteScheduledAlerts_ClearNotification() { - UserService userService = system.getServiceFactory().getUserService(); - final AtomicInteger notificationCount = new AtomicInteger(0); - final AtomicInteger clearCount = new AtomicInteger(0); - - Metric metric = new Metric("scope", "metric"); - Map dps = new HashMap(); - dps.put(4000L, "11"); - dps.put(5000L, "20"); - dps.put(6000L, "30"); - metric.setDatapoints(_convertDatapoints(dps)); - - Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "testAlert", "-1h:scope:metric:avg", "* * * * *"); - _setAlertId(alert, "100001"); - Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "testTrigger", 10, 0); - _setTriggerId(trigger, "100002"); - Notification notification = new Notification("testNotification", alert, AuditNotifier.class.getName(), new ArrayList(), 0); - _setNotificationId(notification, "100003"); - - alert.setTriggers(Arrays.asList(new Trigger[] { trigger })); - alert.setNotifications(Arrays.asList(new Notification[] { notification })); - notification.setTriggers(alert.getTriggers()); - alert.setEnabled(true); - - DefaultAlertService spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, - Arrays.asList(metric), alert, notification); - - spyAlertService.executeScheduledAlerts(10, 1000); - assertEquals(1, notificationCount.get()); - //assertEquals(true, notification.isActiveForTriggerAndMetric(trigger, metric)); - - notificationCount.set(0); - clearCount.set(0); - - dps = new HashMap(); - dps.put(4000L, "1"); - dps.put(5000L, "2"); - dps.put(6000L, "3"); - metric.setDatapoints(_convertDatapoints(dps)); - - spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, Arrays.asList(metric), - alert, notification); - - spyAlertService.executeScheduledAlerts(10, 1000); - assertEquals(0, notificationCount.get()); - assertEquals(1, clearCount.get()); - //assertEquals(false, notification.isActiveForTriggerAndMetric(trigger, metric)); - - } - - @Test - public void testExecuteScheduledAlerts_OnCooldownWithRefocusNotifier() { - ServiceFactory sFactory = system.getServiceFactory(); - UserService userService = sFactory.getUserService(); - - final AtomicInteger notificationCount = new AtomicInteger(0); - final AtomicInteger clearCount = new AtomicInteger(0); - - Metric metric = new Metric("scope", "metric"); - Map dps = new HashMap(); - dps.put(1000L, "11"); - dps.put(2000L, "21"); - dps.put(3000L, "31"); - metric.setDatapoints(_convertDatapoints(dps)); - - Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "testAlert", "-1h:scope:metric:avg", "* * * * *"); - _setAlertId(alert, "100001"); - Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "testTrigger", 10, 0); - _setTriggerId(trigger, "100002"); - Notification notification = new Notification("testNotification", alert, RefocusNotifier.class.getName(), new ArrayList(), - 600000); //cool down logic does not apply to Refocus notifier - _setNotificationId(notification, "100003"); - - alert.setTriggers(Arrays.asList(new Trigger[] { trigger })); - alert.setNotifications(Arrays.asList(new Notification[] { notification })); - notification.setTriggers(alert.getTriggers()); - alert.setEnabled(true); - - DefaultAlertService spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, - Arrays.asList(metric), alert, notification); - - //This will set the notification on cooldown for the given metric and trigger. - spyAlertService.executeScheduledAlerts(1, 1000); - //This evaluation should still send notification for refocus. Hence notificationCount count would increase by 1. - spyAlertService.executeScheduledAlerts(1, 1000); - - assertEquals(false, notification.isActiveForTriggerAndMetric(trigger, metric)); // refocus notification is stateless - - assertEquals(2, notificationCount.get()); //notification was sent out even on cool down for refocus - assertEquals(0, notification.getCooldownExpirationMap().size()); //refocuse notifier does not record/persist cooldown info - } - - @Test - public void testExecuteScheduledAlerts_ClearNotificationWithRefocusNotifier() { - UserService userService = system.getServiceFactory().getUserService(); - final AtomicInteger notificationCount = new AtomicInteger(0); - final AtomicInteger clearCount = new AtomicInteger(0); - - Metric metric = new Metric("scope", "metric"); - Map dps = new HashMap(); - dps.put(4000L, "11"); - dps.put(5000L, "20"); - dps.put(6000L, "30"); - metric.setDatapoints(_convertDatapoints(dps)); - - Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "testAlert", "-1h:scope:metric:avg", "* * * * *"); - _setAlertId(alert, "100001"); - Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "testTrigger", 10, 0); - _setTriggerId(trigger, "100002"); - Notification notification = new Notification("testNotification", alert, RefocusNotifier.class.getName(), new ArrayList(), 0); - _setNotificationId(notification, "100003"); - - alert.setTriggers(Arrays.asList(new Trigger[] { trigger })); - alert.setNotifications(Arrays.asList(new Notification[] { notification })); - notification.setTriggers(alert.getTriggers()); - alert.setEnabled(true); - - DefaultAlertService spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, - Arrays.asList(metric), alert, notification); - - spyAlertService.executeScheduledAlerts(10, 1000); - assertEquals(1, notificationCount.get()); - assertEquals(false, notification.isActiveForTriggerAndMetric(trigger, metric)); // refocus notification is stateless - - notificationCount.set(0); - clearCount.set(0); - - dps = new HashMap(); - dps.put(4000L, "1"); - dps.put(5000L, "2"); - dps.put(6000L, "3"); - metric.setDatapoints(_convertDatapoints(dps)); - - spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, Arrays.asList(metric), - alert, notification); - - spyAlertService.executeScheduledAlerts(10, 1000); - assertEquals(0, notificationCount.get()); - assertEquals(1, clearCount.get()); - assertEquals(false, notification.isActiveForTriggerAndMetric(trigger, metric)); // refocus notification is stateless - - } - - - @Test - public void testExecuteScheduledAlerts_AlertWithMultipleMetricsNotificationSentForEach() { - UserService userService = system.getServiceFactory().getUserService(); - final AtomicInteger notificationCount = new AtomicInteger(0); - final AtomicInteger clearCount = new AtomicInteger(0); - - Metric metric1 = new Metric("scope1", "metric"); - Map dps1 = new HashMap(); - dps1.put(1000L, "11"); - dps1.put(2000L, "20"); - dps1.put(3000L, "30"); - metric1.setDatapoints(_convertDatapoints(dps1)); - - Metric metric2 = new Metric("scope2", "metric"); - Map dps2 = new HashMap(); - dps2.put(4000L, "11"); - dps2.put(5000L, "20"); - dps2.put(6000L, "30"); - metric2.setDatapoints(_convertDatapoints(dps2)); - - Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "testAlert", "-1h:scope:metric:avg", "* * * * *"); - _setAlertId(alert, "100001"); - Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "testTrigger", 10, 0); - _setTriggerId(trigger, "100002"); - Notification notification = new Notification("testNotification", alert, AuditNotifier.class.getName(), new ArrayList(), 300000); - _setNotificationId(notification, "100003"); - - alert.setTriggers(Arrays.asList(new Trigger[] { trigger })); - alert.setNotifications(Arrays.asList(new Notification[] { notification })); - notification.setTriggers(alert.getTriggers()); - alert.setEnabled(true); - - DefaultAlertService spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, - Arrays.asList(metric1, metric2), alert, notification); - - spyAlertService.executeScheduledAlerts(10, 1000); - - assertEquals(2, notificationCount.get()); - assertEquals(2, notification.getCooldownExpirationMap().size()); - assertEquals(2, notification.getActiveStatusMap().size()); - } - - /** - * This test case is for the following scenario: - * - * Evaluation1: - * - metric1 violates threshold, notification sent out, notification set on cooldown for metric1. - * - metric2 does not violate threshold. - * Evaluation2: - * - metric1 goes back to normal state, since notification was in active state a clear notification is sent out. - * - metric2 violates threshold, notification is sent out, notification set on cooldown for metric2. - */ - @Test - public void testExecuteScheduledAlerts_Scenario1() { - UserService userService = system.getServiceFactory().getUserService(); - final AtomicInteger notificationCount = new AtomicInteger(0); - final AtomicInteger clearCount = new AtomicInteger(0); - - Metric metric1 = new Metric("scope1", "metric"); - Map dps1 = new HashMap(); - dps1.put(1000L, "11"); - dps1.put(2000L, "20"); - dps1.put(3000L, "30"); - metric1.setDatapoints(_convertDatapoints(dps1)); - - Metric metric2 = new Metric("scope2", "metric"); - Map dps2 = new HashMap(); - dps2.put(4000L, "1"); - dps2.put(5000L, "2"); - dps2.put(6000L, "3"); - metric2.setDatapoints(_convertDatapoints(dps2)); - - Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "testAlert", "-1h:scope:metric:avg", "* * * * *"); - _setAlertId(alert, "100001"); - Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "testTrigger", 10, 0); - _setTriggerId(trigger, "100002"); - Notification notification = new Notification("testNotification", alert, AuditNotifier.class.getName(), new ArrayList(), 300000); - _setNotificationId(notification, "100003"); - - alert.setTriggers(Arrays.asList(new Trigger[] { trigger })); - alert.setNotifications(Arrays.asList(new Notification[] { notification })); - notification.setTriggers(alert.getTriggers()); - alert.setEnabled(true); - - DefaultAlertService spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, - Arrays.asList(metric1, metric2), alert, notification); - spyAlertService.executeScheduledAlerts(10, 1000); - - assertEquals(1, notificationCount.get()); - assertEquals(1, notification.getCooldownExpirationMap().size()); - assertEquals(1, notification.getActiveStatusMap().size()); - - notificationCount.set(0); - clearCount.set(0); - - metric1 = new Metric("scope1", "metric"); - dps1 = new HashMap(); - dps1.put(1000L, "1"); - dps1.put(2000L, "2"); - dps1.put(3000L, "3"); - metric1.setDatapoints(_convertDatapoints(dps1)); - - metric2 = new Metric("scope2", "metric"); - dps2 = new HashMap(); - dps2.put(4000L, "11"); - dps2.put(5000L, "21"); - dps2.put(6000L, "31"); - metric2.setDatapoints(_convertDatapoints(dps2)); - - spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, Arrays.asList(metric1, metric2), - alert, notification); - spyAlertService.executeScheduledAlerts(10, 1000); - - assertEquals(1, notificationCount.get()); - assertEquals(1, clearCount.get()); - assertEquals(2, notification.getCooldownExpirationMap().size()); - assertEquals(1, _getActiveSize(notification.getActiveStatusMap())); - } - - private int _getActiveSize(Map activeStatusMap) { - int size = 0; - for(Map.Entry entry : activeStatusMap.entrySet()) { - if(entry.getValue()) { - size++; - } - } - return size; - } - - @Test - public void testGetTriggerFiredDatapointTime() { - UserService userService = system.getServiceFactory().getUserService(); - Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "alert-name_test", EXPRESSION, "* * * * *"); - Metric metric = new Metric("scope", "metric"); - double thresholdValue = 90; - long inertia = 10; - long startTime = 1; - long expectedTriggerTime; - Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "name_test", thresholdValue, inertia); - Map datapoints = new HashMap(); - - datapoints.putAll(_createDatapoints(inertia + 1, thresholdValue, startTime, true)); - metric.setDatapoints(_convertDatapoints(datapoints)); - expectedTriggerTime = datapoints.size(); - - long actualValue = alertService.getTriggerFiredDatapointTime(trigger, metric, alert.getExpression(), startTime); - - assertEquals(expectedTriggerTime, actualValue); - startTime = datapoints.size() + 1; - datapoints.putAll(_createDatapoints(201, thresholdValue, startTime, false)); - metric.setDatapoints(_convertDatapoints(datapoints)); - actualValue = alertService.getTriggerFiredDatapointTime(trigger, metric, alert.getExpression(), startTime); - assertEquals(expectedTriggerTime, actualValue); - startTime = datapoints.size() + 1; - datapoints.putAll(_createDatapoints(inertia - 1, thresholdValue, startTime, true)); - metric.setDatapoints(_convertDatapoints(datapoints)); - actualValue = alertService.getTriggerFiredDatapointTime(trigger, metric, alert.getExpression(), startTime); - assertEquals(expectedTriggerTime, actualValue); - startTime = datapoints.size() + 1; - datapoints.putAll(_createDatapoints(inertia + 1, thresholdValue, startTime, true)); - metric.setDatapoints(_convertDatapoints(datapoints)); - actualValue = alertService.getTriggerFiredDatapointTime(trigger, metric, alert.getExpression(), startTime); - expectedTriggerTime = datapoints.size(); - assertEquals(expectedTriggerTime, actualValue); - startTime = datapoints.size() + 1; - datapoints.putAll(_createDatapoints(201, thresholdValue, startTime, false)); - metric.setDatapoints(_convertDatapoints(datapoints)); - actualValue = alertService.getTriggerFiredDatapointTime(trigger, metric, alert.getExpression(), startTime); - assertEquals(expectedTriggerTime, actualValue); - } - - @Test - public void testGetTriggerFiredDatapointTimeWhenOneDatapointAndZeroInertia() { - UserService userService = system.getServiceFactory().getUserService(); - Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "alert-name_test", EXPRESSION, "* * * * *"); - Metric metric = new Metric("scope", "metric"); - double thresholdValue = 90; - long inertia = 0; - long startTime = 1000; - long expectedTriggerTime; - Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "name_test", thresholdValue, inertia); - Map datapoints = new HashMap(); - - datapoints.putAll(_createDatapoints(1, thresholdValue, startTime, true)); - metric.setDatapoints(_convertDatapoints(datapoints)); - expectedTriggerTime = startTime; - - long actualValue = alertService.getTriggerFiredDatapointTime(trigger, metric, alert.getExpression(), startTime); - - assertEquals(expectedTriggerTime, actualValue); - } - - @Test - public void testGetTriggerFiredDatapointTimeWhenOneDatapointAndInertiaOne() { - UserService userService = system.getServiceFactory().getUserService(); - Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "alert-name_test", EXPRESSION, "* * * * *"); - Metric metric = new Metric("scope", "metric"); - double thresholdValue = 90; - long inertia = 1; - long startTime = 1000; - Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "name_test", thresholdValue, inertia); - Map datapoints = new HashMap(); - - datapoints.putAll(_createDatapoints(1, thresholdValue, startTime, true)); - metric.setDatapoints(_convertDatapoints(datapoints)); - - Long actualValue = alertService.getTriggerFiredDatapointTime(trigger, metric, alert.getExpression(), startTime); - - assertNull(actualValue); - } - - @Test - public void testGetTriggerFiredDatapointTimeWehnNoDatapoints() { - UserService userService = system.getServiceFactory().getUserService(); - Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "alert-name_test", EXPRESSION, "* * * * *"); - Metric metric = new Metric("scope", "metric"); - double thresholdValue = 90; - long inertia = 0; - long startTime = 1000; - Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "name_test", thresholdValue, inertia); - Map datapoints = new HashMap(); - - datapoints.putAll(_createDatapoints(0, thresholdValue, startTime, true)); - metric.setDatapoints(_convertDatapoints(datapoints)); - - Long actualValue = alertService.getTriggerFiredDatapointTime(trigger, metric, alert.getExpression(), startTime); - - assertNull(actualValue); - } - - @Test - public void testGetTriggerFiredDatapointTimeWhenMissingTimeStamps() { - UserService userService = system.getServiceFactory().getUserService(); - Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "alert-name_test", EXPRESSION, "* * * * *"); - Metric metric = new Metric("scope", "metric"); - double thresholdValue = 1; - long inertia = 5*60*1000; - Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "name_test", thresholdValue, inertia); - Map datapoints = new HashMap(); - datapoints.put(0L, "1"); - datapoints.put(inertia, "1"); - metric.setDatapoints(_convertDatapoints(datapoints)); - long actualValue = alertService.getTriggerFiredDatapointTime(trigger, metric, alert.getExpression(), 1L); - long expectedTriggerTime=5*60*1000; - assertEquals(expectedTriggerTime, actualValue); - } - - @Test - public void testGetTriggerFiredDatapointTimeWhenMissingTimeStamps2() { - UserService userService = system.getServiceFactory().getUserService(); - Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "alert-name_test", EXPRESSION, "* * * * *"); - Metric metric = new Metric("scope", "metric"); - double thresholdValue = 1; - long inertia = 5*60*1000; - Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "name_test", thresholdValue, inertia); - Map datapoints = new HashMap(); - datapoints.put(0L, "1"); - datapoints.put(3*60*1000L, "1"); - datapoints.put(inertia, "1"); - metric.setDatapoints(_convertDatapoints(datapoints)); - long actualValue = alertService.getTriggerFiredDatapointTime(trigger, metric, alert.getExpression(), 1L); - long expectedTriggerTime=5*60*1000; - assertEquals(expectedTriggerTime, actualValue); - } - - @Test - public void testGetTriggerFiredDatapointTimeWhenMissingTimeStamps3() { - UserService userService = system.getServiceFactory().getUserService(); - Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "alert-name_test", EXPRESSION, "* * * * *"); - Metric metric = new Metric("scope", "metric"); - double thresholdValue = 1; - long inertia = 5*60*1000; - Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "name_test", thresholdValue, inertia); - Map datapoints = new HashMap(); - datapoints.put(0L, "1"); - datapoints.put(9*60*1000L, "1"); - metric.setDatapoints(_convertDatapoints(datapoints)); - long actualValue = alertService.getTriggerFiredDatapointTime(trigger, metric, alert.getExpression(), 1L); - long expectedTriggerTime=9*60*1000; - assertEquals(expectedTriggerTime, actualValue); - } - - private Map _convertDatapoints(Map datapoints) { - Map newDps = new HashMap<>(); - for(Map.Entry dp : datapoints.entrySet()) { - newDps.put(dp.getKey(), Double.parseDouble(dp.getValue())); - } - return newDps; - } - - @Test - public void testGetTriggerFiredDatapointTimeWhenMissingTimeStamps4() { - UserService userService = system.getServiceFactory().getUserService(); - Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "alert-name_test", EXPRESSION, "* * * * *"); - Metric metric = new Metric("scope", "metric"); - double thresholdValue = 1; - long inertia = 5*60*1000; - Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "name_test", thresholdValue, inertia); - Map datapoints = new HashMap(); - datapoints.put(0L, 2.0); - datapoints.put(3*60*1000L, 2.0); - datapoints.put(6*60*1000L, 2.0); - datapoints.put(7*60*1000L, 0.0); - datapoints.put(9*60*1000L, 2.0); - metric.setDatapoints(datapoints); - long actualValue = alertService.getTriggerFiredDatapointTime(trigger, metric, alert.getExpression(), 1L); - long expectedTriggerTime = 6 * 60 * 1000; - assertEquals(expectedTriggerTime, actualValue); - } - - @Test - public void testGetTriggerFiredDatapointTimeWhenMissingTimeStamps5() { - UserService userService = system.getServiceFactory().getUserService(); - Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "alert-name_test", EXPRESSION, "* * * * *"); - Metric metric = new Metric("scope", "metric"); - double thresholdValue = 1; - long inertia = 5*60*1000; - Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "name_test", thresholdValue, inertia); - Map datapoints = new HashMap<>(); - datapoints.put(0L, 2.0); - datapoints.put(3*60*1000L, 0.0); - datapoints.put(6*60*1000L, 2.0); - datapoints.put(7*60*1000L, 0.0); - datapoints.put(9*60*1000L, 2.0); - metric.setDatapoints(datapoints); - Long actualValue = alertService.getTriggerFiredDatapointTime(trigger, metric, alert.getExpression(), 1L); - assertNull(actualValue); - } - - @Test - public void testGetTriggerFiredDatapointTimeWhenMissingTimeStampsReturnNull() { - UserService userService = system.getServiceFactory().getUserService(); - Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "alert-name_test", EXPRESSION, "* * * * *"); - Metric metric = new Metric("scope", "metric"); - double thresholdValue = 1; - long inertia = 5*60*1000; - Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "name_test", thresholdValue, inertia); - Map datapoints = new HashMap<>(); - datapoints.put(0L, 1.0); - datapoints.put(2*60*1000L, 0.0); - datapoints.put(inertia, 1.0); - metric.setDatapoints(datapoints); - Long actualValue = alertService.getTriggerFiredDatapointTime(trigger, metric, alert.getExpression(), 1L); - assertNull(actualValue); - } - - - private DefaultAlertService _initializeSpyAlertServiceWithStubs(final AtomicInteger notificationCount, final AtomicInteger clearCount, - List metrics, Alert alert, Notification notification) { - DefaultAlertService spyAlertService = spy(alertService); - EntityManager em = Persistence.createEntityManagerFactory("argus-pu").createEntityManager(); - when(_emProviderMock.get()).thenReturn(em); - - Long enqueueTime = System.currentTimeMillis(); - ObjectMapper mapper = new ObjectMapper(); - SimpleModule module = new SimpleModule(); - module.addSerializer(Alert.class, new Alert.Serializer()); - module.addSerializer(Trigger.class, new Trigger.Serializer()); - module.addSerializer(Notification.class, new Notification.Serializer()); - module.addSerializer(PrincipalUser.class, new Alert.PrincipalUserSerializer()); - mapper.registerModule(module); - - try { - AlertWithTimestamp alertWithTimestamp = new AlertWithTimestamp(mapper.writeValueAsString(alert), enqueueTime); - when(_mqServiceMock.dequeue(eq(MQQueue.ALERT.getQueueName()), eq(AlertWithTimestamp.class), anyInt(), anyInt())). - thenReturn(Arrays.asList(alertWithTimestamp)); - } catch (JsonProcessingException e) { - fail("Failed to serialize Alert"); - } - - try { - doReturn(alert).when(_mapper).readValue(mapper.writeValueAsString(alert), Alert.class); - } catch (IOException e) { - fail("Failed to deserialize Alert"); - } - - when(_metricServiceMock.getMetrics(anyString(), anyLong())).thenReturn(metrics); - - doAnswer(new Answer() { - - @Override - public Notification answer(InvocationOnMock invocation) throws Throwable { - return invocation.getArgumentAt(1, Notification.class); - } - }).when(spyAlertService).mergeEntity(em, notification); - - - doAnswer(new Answer() { - - @Override - public Void answer(InvocationOnMock invocation) throws Throwable { - return null; - } - - }).when(spyAlertService).updateNotificationsActiveStatusAndCooldown(Arrays.asList(notification)); - - - doAnswer(new Answer() { - - @Override - public Void answer(InvocationOnMock invocation) throws Throwable { - notificationCount.incrementAndGet(); - return null; - } - }).when(spyAlertService).sendNotification(any(Trigger.class), - any(Metric.class), - any(History.class), - any(Notification.class), - any(Alert.class), - anyLong(), - anyLong()); - - doAnswer(new Answer() { - - @Override - public Void answer(InvocationOnMock invocation) throws Throwable { - clearCount.incrementAndGet(); - return null; - } - }).when(spyAlertService).sendClearNotification(any(Trigger.class), - any(Metric.class), - any(History.class), - any(Notification.class), - any(Alert.class), - anyLong()); - - return spyAlertService; - } - - private Metric _createMetric(String scope, String metricName, int triggerMinValue, int inertiaPeriod) { - - long startTime = 1L; - inertiaPeriod = inertiaPeriod / (1000 * 60); - - Metric result = new Metric(scope, metricName); - Map datapoints = new HashMap<>(); - int index = 0; - - for (int j = 0; j <= random.nextInt(10); j++) { - datapoints.put(startTime + (++index * 60000L), (double)(random.nextInt(triggerMinValue))); - } - for (int j = 0; j <= inertiaPeriod; j++) { - datapoints.put(startTime + (++index * 60000L), (double)(triggerMinValue + random.nextInt(10))); - } - for (int j = 0; j <= random.nextInt(10); j++) { - datapoints.put(startTime + (++index * 60000L), (double)(random.nextInt(triggerMinValue))); - } - result.setDatapoints(datapoints); - result.setDisplayName(createRandomName()); - result.setUnits(createRandomName()); - return result; - } - - private void _setAlertId(Alert alert, String id) { - try { - Field idField = Alert.class.getSuperclass().getDeclaredField("id"); - idField.setAccessible(true); - idField.set(alert, new BigInteger(id)); - } catch (NoSuchFieldException | SecurityException | IllegalArgumentException | IllegalAccessException e) { - fail("Failed to set alert id using reflection."); - } - } - - private void _setTriggerId(Trigger trigger, String id) { - try { - Field idField = Trigger.class.getSuperclass().getDeclaredField("id"); - idField.setAccessible(true); - idField.set(trigger, new BigInteger(id)); - } catch (NoSuchFieldException | SecurityException | IllegalArgumentException | IllegalAccessException e) { - fail("Failed to set alert id using reflection."); - } - } - - private void _setNotificationId(Notification notification, String id) { - try { - Field idField = Notification.class.getSuperclass().getDeclaredField("id"); - idField.setAccessible(true); - idField.set(notification, new BigInteger(id)); - } catch (NoSuchFieldException | SecurityException | IllegalArgumentException | IllegalAccessException e) { - fail("Failed to set alert id using reflection."); - } - } - - private Map _createDatapoints(long size, Double value, long startTime, boolean greaterThan) { - Map result = new HashMap(); - - for (int i = 0; i < size; i++) { - double dataPointValue = random.nextInt(value.intValue()) + (greaterThan ? (value + 2) : -1); - - result.put(startTime++, String.valueOf(dataPointValue)); - } - return result; - } - -} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/GlobalInterlockServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/GlobalInterlockServiceTest.java index 4094ada5a..4c415a675 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/GlobalInterlockServiceTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/GlobalInterlockServiceTest.java @@ -28,74 +28,93 @@ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ - + package com.salesforce.dva.argus.service; -import com.salesforce.dva.argus.AbstractTest; import com.salesforce.dva.argus.service.GlobalInterlockService.LockType; import org.junit.Test; import static org.junit.Assert.*; -public class GlobalInterlockServiceTest extends AbstractTest { +import org.junit.After; +import org.junit.Before; +import org.junit.BeforeClass; +import com.salesforce.dva.argus.system.SystemMain; +import com.salesforce.dva.argus.TestUtils; + +public class GlobalInterlockServiceTest { private static final long EXPIRATION_MS = 750; + private SystemMain system; + private GlobalInterlockService globalInterlockService; + + @Before + public void setUp() { + system = TestUtils.getInstance(); + system.start(); + globalInterlockService = system.getServiceFactory().getGlobalInterlockService(); + } + + @After + public void tearDown() { + if (system != null) { + system.getServiceFactory().getManagementService().cleanupRecords(); + system.stop(); + } + } + + @Test public void testReleaseNonexistentScheduleLock() { - GlobalInterlockService service = system.getServiceFactory().getGlobalInterlockService(); - assertFalse(service.releaseLock(LockType.ALERT_SCHEDULING, String.valueOf(System.currentTimeMillis()))); + assertFalse(globalInterlockService.releaseLock(LockType.ALERT_SCHEDULING, String.valueOf(System.currentTimeMillis()))); } @Test public void testObtainReleaseLock() { - GlobalInterlockService service = system.getServiceFactory().getGlobalInterlockService(); - String key = service.obtainLock(EXPIRATION_MS, LockType.ALERT_SCHEDULING, "Note"); + String key = globalInterlockService.obtainLock(EXPIRATION_MS, LockType.ALERT_SCHEDULING, "Note"); - assertNull(service.obtainLock(EXPIRATION_MS, LockType.ALERT_SCHEDULING, "Note")); - assertTrue(service.releaseLock(LockType.ALERT_SCHEDULING, key)); - assertFalse(service.releaseLock(LockType.ALERT_SCHEDULING, key)); + assertNull(globalInterlockService.obtainLock(EXPIRATION_MS, LockType.ALERT_SCHEDULING, "Note")); + assertTrue(globalInterlockService.releaseLock(LockType.ALERT_SCHEDULING, key)); + assertFalse(globalInterlockService.releaseLock(LockType.ALERT_SCHEDULING, key)); } @Test public void testObtainRefreshLock() throws InterruptedException { - GlobalInterlockService service = system.getServiceFactory().getGlobalInterlockService(); - String key = service.obtainLock(EXPIRATION_MS, LockType.ALERT_SCHEDULING, "Note"); + String key = globalInterlockService.obtainLock(EXPIRATION_MS, LockType.ALERT_SCHEDULING, "Note"); assertNotNull(key); Thread.sleep(2000L); - String refreshed = service.refreshLock(LockType.ALERT_SCHEDULING, key, "note"); + String refreshed = globalInterlockService.refreshLock(LockType.ALERT_SCHEDULING, key, "note"); assertNotNull(refreshed); assertFalse(key.equals(refreshed)); - assertTrue(service.releaseLock(LockType.ALERT_SCHEDULING, refreshed)); - assertNull(service.refreshLock(LockType.ALERT_SCHEDULING, key, "note")); + assertTrue(globalInterlockService.releaseLock(LockType.ALERT_SCHEDULING, refreshed)); + assertNull(globalInterlockService.refreshLock(LockType.ALERT_SCHEDULING, key, "note")); } @Test public void testObtainOnExpiredScheduleLock() throws InterruptedException { - GlobalInterlockService service = system.getServiceFactory().getGlobalInterlockService(); - assertNotNull(service.obtainLock(EXPIRATION_MS, LockType.ALERT_SCHEDULING, "Note")); - assertNull(service.obtainLock(EXPIRATION_MS, LockType.ALERT_SCHEDULING, "Note")); + assertNotNull(globalInterlockService.obtainLock(EXPIRATION_MS, LockType.ALERT_SCHEDULING, "Note")); + assertNull(globalInterlockService.obtainLock(EXPIRATION_MS, LockType.ALERT_SCHEDULING, "Note")); Thread.sleep(2000L); - String key = service.obtainLock(EXPIRATION_MS, LockType.ALERT_SCHEDULING, "Note"); + String key = globalInterlockService.obtainLock(EXPIRATION_MS, LockType.ALERT_SCHEDULING, "Note"); assertNotNull(key); - service.releaseLock(LockType.ALERT_SCHEDULING, key); + globalInterlockService.releaseLock(LockType.ALERT_SCHEDULING, key); } @Test public void testLockExclusivity() { - GlobalInterlockService service = system.getServiceFactory().getGlobalInterlockService(); - String keyA = service.obtainLock(EXPIRATION_MS, LockType.ALERT_SCHEDULING, "Note"); - String keyB = service.obtainLock(EXPIRATION_MS, LockType.COLLECTION_SCHEDULING, "Note"); + String keyA = globalInterlockService.obtainLock(EXPIRATION_MS, LockType.ALERT_SCHEDULING, "Note"); + String keyB = globalInterlockService.obtainLock(EXPIRATION_MS, LockType.COLLECTION_SCHEDULING, "Note"); - assertTrue(service.releaseLock(LockType.ALERT_SCHEDULING, keyA)); - assertTrue(service.releaseLock(LockType.COLLECTION_SCHEDULING, keyB)); + assertTrue(globalInterlockService.releaseLock(LockType.ALERT_SCHEDULING, keyA)); + assertTrue(globalInterlockService.releaseLock(LockType.COLLECTION_SCHEDULING, keyB)); } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/HistoryServiceIT.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/HistoryServiceIT.java index 7088f9517..af490f581 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/HistoryServiceIT.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/HistoryServiceIT.java @@ -28,7 +28,7 @@ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ - + package com.salesforce.dva.argus.service; import static org.junit.Assert.assertEquals; @@ -36,14 +36,14 @@ import org.junit.Test; import org.junit.experimental.categories.Category; -import com.salesforce.dva.argus.AbstractTest; +import com.salesforce.dva.argus.AbstractTestIT; import com.salesforce.dva.argus.IntegrationTest; import com.salesforce.dva.argus.entity.Alert; import com.salesforce.dva.argus.entity.History; import com.salesforce.dva.argus.entity.History.JobStatus; @Category(IntegrationTest.class) -public class HistoryServiceIT extends AbstractTest { +public class HistoryServiceIT extends AbstractTestIT { private static final String expression = "-1h:argus.jvm:file.descriptor.max{host=unknown-host}:avg"; @@ -60,7 +60,7 @@ public void testCreateHistory() { History actual = historyService.createHistory(job, "test", JobStatus.SUCCESS, 0); expected.setCreationTime(actual.getCreationTime()); - + assertEquals(expected, actual); } @@ -76,7 +76,7 @@ public void testFindByJob() { History expected = historyService.createHistory(job, "test", JobStatus.SUCCESS, 0); History actual = historyService.findByJob(job.getId(), 1).get(0); - + assertEquals(expected, actual); } @@ -90,12 +90,12 @@ public void testFindByJobAndStatus() { job = alertService.updateAlert(job); History expected = historyService.createHistory(job, "test", JobStatus.SUCCESS, 0); - + History actual = historyService.findByJobAndStatus(job.getId(), 1, JobStatus.SUCCESS).get(0); assertEquals(expected, actual); assertEquals(0, historyService.findByJobAndStatus(job.getId(), 1, JobStatus.FAILURE).size()); } - + } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/MQServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/MQServiceTest.java index bd8040d85..c3f7c08fe 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/MQServiceTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/MQServiceTest.java @@ -28,10 +28,11 @@ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ - + package com.salesforce.dva.argus.service; -import com.salesforce.dva.argus.AbstractTest; +import com.salesforce.dva.argus.AbstractTestIT; +import org.junit.Ignore; import org.junit.Test; import java.util.ArrayList; import java.util.List; @@ -40,7 +41,8 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -public class MQServiceTest extends AbstractTest { + +public class MQServiceTest extends AbstractTestIT { @Test public void testServiceIsSingleton() { diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/ManagementServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/ManagementServiceTest.java index 4678e00d9..308c2368b 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/ManagementServiceTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/ManagementServiceTest.java @@ -28,17 +28,38 @@ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ - + package com.salesforce.dva.argus.service; -import com.salesforce.dva.argus.AbstractTest; import com.salesforce.dva.argus.entity.PrincipalUser; import org.junit.Test; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -public class ManagementServiceTest extends AbstractTest { +import org.junit.BeforeClass; +import org.junit.AfterClass; +import com.salesforce.dva.argus.system.SystemMain; +import com.salesforce.dva.argus.TestUtils; + + +public class ManagementServiceTest { + + static private SystemMain system; + + @BeforeClass + static public void setUpClass() { + system = TestUtils.getInstance(); + system.start(); + } + + @AfterClass + static public void tearDownClass() { + if (system != null) { + system.getServiceFactory().getManagementService().cleanupRecords(); + system.stop(); + } + } @Test public void testPrivilegeGrant() { diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/MonitorServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/MonitorServiceTest.java index b0f9db193..8e5eb8264 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/MonitorServiceTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/MonitorServiceTest.java @@ -28,17 +28,33 @@ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ - + package com.salesforce.dva.argus.service; -import com.salesforce.dva.argus.AbstractTest; +import com.salesforce.dva.argus.TestUtils; import com.salesforce.dva.argus.entity.Alert; +import com.salesforce.dva.argus.service.monitor.DataLagMonitorGoldenMetric; import com.salesforce.dva.argus.service.monitor.DefaultMonitorService; +import com.salesforce.dva.argus.system.SystemConfiguration; +import com.salesforce.dva.argus.system.SystemMain; +import org.junit.After; +import org.junit.Before; +import org.junit.Ignore; import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; +import org.powermock.reflect.Whitebox; + +import javax.management.MBeanServer; +import javax.management.ObjectName; +import java.lang.management.ManagementFactory; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.Collections; import java.util.List; +import java.util.Map; +import java.util.TreeMap; import java.util.concurrent.CountDownLatch; import static org.hamcrest.MatcherAssert.assertThat; @@ -47,21 +63,53 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + + +@RunWith(MockitoJUnitRunner.class) +public class MonitorServiceTest { + private static final double DOUBLE_COMPARISON_MAX_DELTA = 0.001; + private static final String HOSTNAME = SystemConfiguration.getHostname(); + private MBeanServer mbeanServer = ManagementFactory.getPlatformMBeanServer(); + + private SystemMain system; + + @Before + public void setUp() { + system = TestUtils.getInstance(); + system.start(); + } + + @After + public void tearDown() { + if (system != null) { + system.getServiceFactory().getManagementService().cleanupRecords(); + system.stop(); + } + } + + @Mock + private TSDBService tsdbMock; -public class MonitorServiceTest extends AbstractTest { + @Mock + private MetricService metricServiceMock; @Test public void testServiceIsSingleton() { assertTrue(system.getServiceFactory().getMonitorService() == system.getServiceFactory().getMonitorService()); } - @Test(timeout = 10000L) - public void testConcurrentUpdates() throws NoSuchFieldException, IllegalAccessException, InterruptedException { - final MonitorService _monitorService = system.getServiceFactory().getMonitorService(); + @Test(timeout = 5000L) + public void testGaugeConcurrentUpdates() throws Exception { + final String metricName = "test.custom.metric"; + final DefaultMonitorService _monitorService = (DefaultMonitorService) system.getServiceFactory().getMonitorService(); + _monitorService.setTSDBService(tsdbMock); Field field = DefaultMonitorService.class.getDeclaredField("TIME_BETWEEN_RECORDINGS"); + final int TIME_BETWEEN_RECORDINGS_MS = 2000; + final int RESET_TIME_AFTER_TEST_MS = 3000; // enough time for reset to have occurred. field.setAccessible(true); - field.setLong(null, 10 * 1000); + field.setLong(null, TIME_BETWEEN_RECORDINGS_MS); _monitorService.startRecordingCounters(); final CountDownLatch gate = new CountDownLatch(1); @@ -77,7 +125,7 @@ public void run() { try { gate.await(); for (int j = 0; j < iterations; j++) { - _monitorService.modifyCustomCounter("test.custom.metric", 1, Collections.emptyMap()); + _monitorService.modifyCustomCounter(metricName, 1, Collections.emptyMap()); } } catch (InterruptedException ex) { org.junit.Assert.fail("This should never happen."); @@ -91,15 +139,94 @@ public void run() { } gate.countDown(); for (Thread worker : workers) { - worker.join(5000); + worker.join(1500); + } + + // gauge value should be iterations * workerCount + double customCounter = _monitorService.getCustomCounter(metricName, Collections.emptyMap()); + double expectedCounterValue = iterations * workerCount; + assertEquals(expectedCounterValue, customCounter, DOUBLE_COMPARISON_MAX_DELTA); + + // jmx gauge should be 0 since gauge value should be init to 0 + ObjectName jmxName = new ObjectName("ArgusMetrics:type=Gauge,scope=argus.custom,metric=" + metricName + ",host=" + HOSTNAME); + double jmxValue = (Double)mbeanServer.getAttribute(jmxName, "Value"); + assertEquals(0, jmxValue, DOUBLE_COMPARISON_MAX_DELTA); + + // wait for MonitorThread to run + Thread.sleep(RESET_TIME_AFTER_TEST_MS); + + // gauge value should have reset + customCounter = _monitorService.getCustomCounter(metricName, Collections.emptyMap()); + assertEquals(Double.NaN, customCounter, DOUBLE_COMPARISON_MAX_DELTA); + + // jmx gauge value should now reflect iterations * workerCount + jmxValue = (Double)mbeanServer.getAttribute(jmxName, "Value"); + assertEquals(expectedCounterValue, jmxValue, DOUBLE_COMPARISON_MAX_DELTA); + } + + @Test(timeout = 5000L) + public void testMonotonicCounterConcurrentUpdates() throws Exception { + final MonitorService.Counter counter = MonitorService.Counter.ALERTS_SCHEDULED; + final DefaultMonitorService _monitorService = (DefaultMonitorService) system.getServiceFactory().getMonitorService(); + _monitorService.setTSDBService(tsdbMock); + Field field = DefaultMonitorService.class.getDeclaredField("TIME_BETWEEN_RECORDINGS"); + + final int TIME_BETWEEN_RECORDINGS_MS = 2000; + final int RESET_TIME_AFTER_TEST_MS = 3000; // enough time for reset to have occurred. + field.setAccessible(true); + field.setLong(null, TIME_BETWEEN_RECORDINGS_MS); + _monitorService.startRecordingCounters(); + + final CountDownLatch gate = new CountDownLatch(1); + int workerCount = 3; + final int iterations = 100; + Thread[] workers = new Thread[workerCount]; + + for (int i = 0; i < workers.length; i++) { + Thread thread = new Thread(new Runnable() { + + @Override + public void run() { + try { + gate.await(); + for (int j = 0; j < iterations; j++) { + _monitorService.modifyCounter(counter, 1, Collections.emptyMap()); + } + } catch (InterruptedException ex) { + org.junit.Assert.fail("This should never happen."); + } + } + }); + + thread.setDaemon(true); + thread.start(); + workers[i] = thread; + } + gate.countDown(); + for (Thread worker : workers) { + worker.join(1500); } - int customCounter = (int) _monitorService.getCustomCounter("test.custom.metric", Collections.emptyMap()); + // gauge value should be iterations * workerCount + double expectedCounterValue = iterations * workerCount; + double customCounter = _monitorService.getCounter(counter, Collections.emptyMap()); + assertEquals(expectedCounterValue, customCounter, DOUBLE_COMPARISON_MAX_DELTA); + + // jmx counter value should be iterations * workerCount as well + ObjectName jmxName = new ObjectName("ArgusMetrics:type=Counter,scope=argus.core,metric=" + counter.getMetric() + counter.getJMXMetricNameSuffix() + ",host=" + HOSTNAME); + double jmxValue = (Double)mbeanServer.getAttribute(jmxName, "Value"); + assertEquals(expectedCounterValue, jmxValue, DOUBLE_COMPARISON_MAX_DELTA); + + // wait for MonitorThread to run, which clears gauge metrics + Thread.sleep(RESET_TIME_AFTER_TEST_MS); + + // counter value should NOT have reset + customCounter = _monitorService.getCounter(counter, Collections.emptyMap()); + assertEquals(Double.NaN, customCounter, DOUBLE_COMPARISON_MAX_DELTA); - assertEquals(iterations * workerCount, customCounter); - _monitorService.resetCustomCounters(); - customCounter = (int) _monitorService.getCustomCounter("test.custom.metric", Collections.emptyMap()); - assertEquals(0, customCounter); + // jmx counter value should NOT reset either + jmxValue = (Double)mbeanServer.getAttribute(jmxName, "Value"); + assertEquals(expectedCounterValue, jmxValue, DOUBLE_COMPARISON_MAX_DELTA); } @Test diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/NamespaceServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/NamespaceServiceTest.java index 5f7ee9525..21716de2e 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/NamespaceServiceTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/NamespaceServiceTest.java @@ -28,10 +28,9 @@ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ - + package com.salesforce.dva.argus.service; -import com.salesforce.dva.argus.AbstractTest; import com.salesforce.dva.argus.entity.Namespace; import com.salesforce.dva.argus.entity.PrincipalUser; import com.salesforce.dva.argus.system.SystemException; @@ -44,83 +43,100 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -public class NamespaceServiceTest extends AbstractTest { - - private PrincipalUser _admin; - private NamespaceService _namespaceService; - private UserService _userService; - - @Before - public void setupNamespaceServiceTest() { - _namespaceService = system.getServiceFactory().getNamespaceService(); - _userService = system.getServiceFactory().getUserService(); - _admin = system.getServiceFactory().getUserService().findAdminUser(); +import org.junit.BeforeClass; +import org.junit.AfterClass; +import com.salesforce.dva.argus.system.SystemMain; +import com.salesforce.dva.argus.TestUtils; + + +public class NamespaceServiceTest { + + static private SystemMain system; + static private UserService userService; + static private PrincipalUser admin; + static private NamespaceService namespaceService; + + @BeforeClass + static public void setUpClass() { + system = TestUtils.getInstance(); + system.start(); + namespaceService = system.getServiceFactory().getNamespaceService(); + userService = system.getServiceFactory().getUserService(); + admin = system.getServiceFactory().getUserService().findAdminUser(); + } + + @AfterClass + static public void tearDownClass() { + if (system != null) { + system.getServiceFactory().getManagementService().cleanupRecords(); + system.stop(); + } } @Test public void testCreateNamespace() { - PrincipalUser user = _userService.findAdminUser(); - Namespace namespace = new Namespace("namespace", user); + PrincipalUser user = userService.findAdminUser(); + Namespace namespace = new Namespace("namespace1", user); - namespace = _namespaceService.createNamespace(namespace); + namespace = namespaceService.createNamespace(namespace); assertTrue(namespace.getId() != null && namespace.getId().compareTo(BigInteger.ZERO) > 0); } @Test(expected = SystemException.class) public void testNamespaceUnique() { - PrincipalUser user = _userService.findAdminUser(); - Namespace namespace = new Namespace("namespace", user); + PrincipalUser user = userService.findAdminUser(); + Namespace namespace = new Namespace("namespace2", user); - _namespaceService.createNamespace(namespace); - _namespaceService.createNamespace(namespace); + namespaceService.createNamespace(namespace); + namespaceService.createNamespace(namespace); } @Test public void testFindNamespaceByPrimaryKey() { - PrincipalUser user = _userService.findAdminUser(); - Namespace namespace = new Namespace("namespace", user); + PrincipalUser user = userService.findAdminUser(); + Namespace namespace = new Namespace("namespace3", user); - namespace = _namespaceService.createNamespace(namespace); + namespace = namespaceService.createNamespace(namespace); - Namespace retrievedNamespace = _namespaceService.findNamespaceByPrimaryKey(namespace.getId()); + Namespace retrievedNamespace = namespaceService.findNamespaceByPrimaryKey(namespace.getId()); assertTrue(namespace.equals(retrievedNamespace)); } @Test public void testAddAdditionalUsersToNamespace() { - PrincipalUser user = _userService.findAdminUser(); - PrincipalUser user1 = new PrincipalUser(_admin, "abc", "abc@xyz.com"); - Namespace namespace = new Namespace("namespace", user); + PrincipalUser user = userService.findAdminUser(); + PrincipalUser user1 = new PrincipalUser(admin, "abc1", "abc1@xyz.com"); + Namespace namespace = new Namespace("namespace4", user); - namespace = _namespaceService.createNamespace(namespace); + namespace = namespaceService.createNamespace(namespace); namespace.getUsers().add(user1); - namespace = _namespaceService.updateNamespace(namespace); + namespace = namespaceService.updateNamespace(namespace); assertTrue(namespace.getUsers().size() == 2); } @Test public void testUserIsPermitted() { - PrincipalUser user = _userService.findAdminUser(); - PrincipalUser user1 = new PrincipalUser(_admin, "abc", "abc@xyz.com"); - Namespace namespace = new Namespace("namespace", user); + PrincipalUser user = userService.findAdminUser(); + PrincipalUser user1 = new PrincipalUser(admin, "abc2", "abc2@xyz.com"); + Namespace namespace = new Namespace("namespace5", user); - namespace = _namespaceService.createNamespace(namespace); - assertTrue(_namespaceService.isPermitted(namespace.getQualifier(), user)); - assertFalse(_namespaceService.isPermitted(namespace.getQualifier(), user1)); + namespace = namespaceService.createNamespace(namespace); + assertTrue(namespaceService.isPermitted(namespace.getQualifier(), user)); + assertFalse(namespaceService.isPermitted(namespace.getQualifier(), user1)); } @Test public void testAdditionalUserIsPermitted() { - PrincipalUser user = _userService.findAdminUser(); - PrincipalUser user1 = new PrincipalUser(_admin, "abc", "abc@xyz.com"); - Namespace namespace = new Namespace("namespace", user); + PrincipalUser user = userService.findAdminUser(); + PrincipalUser user1 = new PrincipalUser(admin, "abc3", "abc3@xyz.com"); + Namespace namespace = new Namespace("namespace6", user); - namespace = _namespaceService.createNamespace(namespace); + namespace = namespaceService.createNamespace(namespace); namespace.getUsers().add(user1); - namespace = _namespaceService.updateNamespace(namespace); - assertTrue(_namespaceService.isPermitted(namespace.getQualifier(), user)); - assertTrue(_namespaceService.isPermitted(namespace.getQualifier(), user1)); + namespace = namespaceService.updateNamespace(namespace); + assertTrue(namespaceService.isPermitted(namespace.getQualifier(), user)); + assertTrue(namespaceService.isPermitted(namespace.getQualifier(), user1)); } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/NotifierIT.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/NotifierIT.java index 3e2a09606..56f94cf48 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/NotifierIT.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/NotifierIT.java @@ -28,7 +28,7 @@ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ - + package com.salesforce.dva.argus.service; import static org.junit.Assert.assertFalse; @@ -42,7 +42,7 @@ import org.junit.Test; import org.junit.experimental.categories.Category; -import com.salesforce.dva.argus.AbstractTest; +import com.salesforce.dva.argus.AbstractTestIT; import com.salesforce.dva.argus.IntegrationTest; import com.salesforce.dva.argus.entity.Alert; import com.salesforce.dva.argus.entity.Annotation; @@ -58,7 +58,7 @@ import static org.junit.Assert.fail; @Category(IntegrationTest.class) -public class NotifierIT extends AbstractTest { +public class NotifierIT extends AbstractTestIT { private static final String expression = "DIVIDE(-1h:argus.jvm:file.descriptor.open{host=unknown-host}:avg, -1h:argus.jvm:file.descriptor.max{host=unknown-host}:avg)"; diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/NotifierTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/NotifierTest.java index 415d5b8ea..05695f962 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/NotifierTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/NotifierTest.java @@ -28,7 +28,7 @@ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ - + package com.salesforce.dva.argus.service; import static org.junit.Assert.assertEquals; @@ -44,7 +44,6 @@ import com.salesforce.dva.argus.util.AlertUtils; import org.junit.Test; -import com.salesforce.dva.argus.AbstractTest; import com.salesforce.dva.argus.entity.Alert; import com.salesforce.dva.argus.entity.Metric; import com.salesforce.dva.argus.entity.Notification; @@ -55,15 +54,42 @@ import com.salesforce.dva.argus.service.alert.DefaultAlertService.NotificationContext; import com.salesforce.dva.argus.service.alert.notifier.AuditNotifier; -public class NotifierTest extends AbstractTest { +import org.junit.BeforeClass; +import org.junit.AfterClass; +import com.salesforce.dva.argus.system.SystemMain; +import com.salesforce.dva.argus.TestUtils; + + +public class NotifierTest { private static final String expression = "DIVIDE(-1h:argus.jvm:file.descriptor.open{host=unknown-host}:avg, -1h:argus.jvm:file.descriptor.max{host=unknown-host}:avg)"; + static private SystemMain system; + + @BeforeClass + static public void setUpClass() { + system = TestUtils.getInstance(); + system.start(); + } + + @AfterClass + static public void tearDownClass() { + if (system != null) { + system.getServiceFactory().getManagementService().cleanupRecords(); + system.stop(); + } + } + @Test public void testDBNotifier() { UserService userService = system.getServiceFactory().getUserService(); - Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "alert_name", expression, "* * * * *"); + String alertName = "alert_name-" + TestUtils.createRandomName(); + Alert alert = new Alert(userService.findAdminUser(), + userService.findAdminUser(), + alertName, + expression, + "* * * * *"); Notification notification = new Notification("notification_name", alert, "notifier_name", new ArrayList(), 23); Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "trigger_name", 2D, 5); @@ -75,7 +101,7 @@ public void testDBNotifier() { NotificationContext context = new NotificationContext(alert, alert.getTriggers().get(0), notification, 1418319600000L, 0.0, new Metric("scope", "metric"), history); - int count = 1 + random.nextInt(5); + int count = 1 + TestUtils.random.nextInt(5); for (int i = 0; i < count; i++) { Notifier notifier = system.getServiceFactory().getAlertService().getNotifier(SupportedNotifier.DATABASE); diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/OAuthServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/OAuthServiceTest.java index 87a98cbce..98b6d8b19 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/OAuthServiceTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/OAuthServiceTest.java @@ -30,26 +30,40 @@ */ package com.salesforce.dva.argus.service; -import com.salesforce.dva.argus.AbstractTest; import com.salesforce.dva.argus.entity.OAuthAuthorizationCode; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import java.sql.Timestamp; +import org.junit.BeforeClass; +import com.salesforce.dva.argus.system.SystemMain; +import com.salesforce.dva.argus.TestUtils; +import org.junit.AfterClass; + + /** * @author Chandravyas Annakula (cannakula@salesforce.com) */ -public class OAuthServiceTest extends AbstractTest { - OAuthAuthorizationCodeService authService; - private UserService userService; - - @Before - @Override - public void setUp() { - super.setUp(); - authService = system.getServiceFactory().getOAuthAuthorizationCodeService(); +public class OAuthServiceTest { + static private SystemMain system; + static private OAuthAuthorizationCodeService authService; + static private UserService userService; + + @BeforeClass + static public void setUpClass() { + system = TestUtils.getInstance(); + system.start(); userService = system.getServiceFactory().getUserService(); + authService = system.getServiceFactory().getOAuthAuthorizationCodeService(); + } + + @AfterClass + static public void tearDownClass() { + if (system != null) { + system.getServiceFactory().getManagementService().cleanupRecords(); + system.stop(); + } } @Test @@ -188,8 +202,8 @@ public void testAcceptOauthInvalidParams() { authService.updateUserId(authCode, "", "test_user"); authService.updateUserId(authCode, state, ""); } - + } -/* Copyright (c) 2018, Salesforce.com, Inc. All rights reserved. */ \ No newline at end of file +/* Copyright (c) 2018, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/RefocusServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/RefocusServiceTest.java new file mode 100644 index 000000000..0b61dce7b --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/RefocusServiceTest.java @@ -0,0 +1,1094 @@ +package com.salesforce.dva.argus.service; + +import static org.junit.Assert.assertEquals; +import static org.mockito.Mockito.*; + +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.stream.IntStream; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; + +import javax.persistence.EntityManager; + +import com.salesforce.dva.argus.TestUtils; +import com.salesforce.dva.argus.service.alert.notifier.RefocusForwarder; +import com.salesforce.dva.argus.service.alert.notifier.RefocusProperty; + +import com.salesforce.dva.argus.entity.History; +import com.salesforce.dva.argus.system.SystemConfiguration; +import com.salesforce.dva.argus.system.SystemException; +import com.salesforce.dva.argus.system.SystemMain; +import org.apache.commons.lang.exception.ExceptionUtils; +import org.junit.*; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.junit.experimental.categories.Category; +import com.salesforce.dva.argus.AbstractTestIT; +import com.salesforce.dva.argus.IntegrationTest; + +import org.apache.http.impl.client.*; +import org.apache.http.HttpStatus; + +import java.util.function.Supplier; +import java.util.Iterator; + +import com.google.gson.JsonObject; +import com.google.gson.JsonParser; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.inject.Provider; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.io.InterruptedIOException; + + +// ===================================================================================================== +// This test is a unit test of the RefocusForwarder class. +// With correct configuration, it can be pointed at Refocus itself or the Refocus sandbox. +// By default it is a fully mocked unit test of the RefocusForwarder. +// +// The variables relevant to configuration of this are: +// test_mode : UNIT_TEST or REFOCUS. Mocked when UNIT_TEST, talks to a Refocus instance when not. +// use_sandbox: Selects the refocus sandbox or refocus. +// use_proxy: Adds proxy configuration. +// use_personal_ssl_certs: Needed when you connect from your desktop. +// +// IMPORTANT - you should create the relevant content in the Refocus instance before running these tests. +// FUTURE - migrate this to an Integration Test and enable +// ==================================================================================================== + +@Ignore("Convert to Integration Test") +@RunWith(org.mockito.junit.MockitoJUnitRunner.class) +public class RefocusServiceTest { + + @Mock private Provider _emProviderMock; + @Mock private HistoryService _historyServiceMock; + @Mock private ObjectMapper _mapper; + + private SystemMain system; + private SystemConfiguration _configuration; + private AtomicInteger _jobCounter; + private ExecutorService _executor; + private RefocusForwarder refocusForwarder; + + private final Logger LOGGER = LoggerFactory.getLogger(RefocusServiceTest.class); + private static final boolean mapToErrorLogging = false; // call LOGGER.error for all logs when true. + + + enum TestMode { UNIT_TEST, REFOCUS }; + + // This is the unit test config - enable before checking in. + private TestMode test_mode = TestMode.UNIT_TEST; + private boolean use_proxy = false; // doesn't matter for UNIT_TEST mode. + private boolean use_sandbox = true; // doesn't matter for UNIT_TEST mode. + private boolean use_personal_ssl_certs = true; // doesn't matter for UNIT_TEST mode. + + + // Note - replace anything you need to here if you switch to TestMode.REFOCUS + private static final String argus_refocus_user_name=""; + private static final String argus_refocus_user_password=""; + private static final String refocus_prd_proxy = "myhostname.abc.com"; + private static final String refocus_prd_proxy_port = "8080"; + private static final String argus_user_refocus_user_token = ""; + private static final String argus_user_refocus_sandbox_user_token = ""; + + // These values are needed to configure desktop testing against Refocus or the Refocus Sandbox + // TODO - write directions for constructing the keystore from the SSL Cert & Key (or write a function that creates the keystore in memory from these 2 files.) + private static final String my_sandbox_user_token = ""; + private static final String my_private_ssl_keystore_path = ""; // + private static final String my_private_ssl_keystore_password = ""; + private static final String my_private_ssl_key_password = ""; + + + // @Before + public void refocus_setup() { + + _configuration = system.getConfiguration(); + + _configuration.setProperty("system.property.refocus.enabled", "true"); // How to set property for testing? + + // This mode allows the test to be run against a Refocus instance rather than verifying the forwarder behavior through mocking. + if (test_mode == TestMode.REFOCUS) + { + if (!use_sandbox) + { + _configuration.setProperty(RefocusProperty.REFOCUS_ENDPOINT.getName(), "https://refocus.internal.salesforce.com"); + _configuration.setProperty(RefocusProperty.REFOCUS_TOKEN.getName(), argus_user_refocus_user_token); // ""); + } + else + { + _configuration.setProperty(RefocusProperty.REFOCUS_ENDPOINT.getName(), "https://refocus-sandbox.internal.salesforce.com"); + _configuration.setProperty(RefocusProperty.REFOCUS_TOKEN.getName(), my_sandbox_user_token ); // ""); + } + + if (use_proxy) + { + _configuration.setProperty(RefocusProperty.REFOCUS_PROXY_HOST.getName(), refocus_prd_proxy); // e.g. myhostname.abc.com + _configuration.setProperty(RefocusProperty.REFOCUS_PROXY_PORT.getName(), refocus_prd_proxy_port); + } + + if (use_personal_ssl_certs) + { + _configuration.setProperty(RefocusProperty.REFOCUS_CUSTOM_KEYSTORE_PATH.getName(), my_private_ssl_keystore_path); + _configuration.setProperty(RefocusProperty.REFOCUS_CUSTOM_KEYSTORE_PASSWORD.getName(), my_private_ssl_keystore_password); + _configuration.setProperty(RefocusProperty.REFOCUS_CUSTOM_KEYSTORE_KEY_PASSWORD.getName(), my_private_ssl_key_password); + } + + _info(String.format("Test Mode: REFOCUS %s%s%s", (use_sandbox?"Refocus Sandbox":"Refocus Production"), (use_proxy?", via Proxy":""), (use_personal_ssl_certs?", SSL Certs Specified":""))); + _info(String.format(" URL: %s", _configuration.getValue(RefocusProperty.REFOCUS_ENDPOINT.getName(), "Unknown"))); + _info(String.format(" Token: %s", _configuration.getValue(RefocusProperty.REFOCUS_TOKEN.getName(), "Unknown"))); + _info(String.format(" ProxyHost: %s", _configuration.getValue(RefocusProperty.REFOCUS_PROXY_HOST.getName(), "Unknown"))); + _info(String.format(" ProxyPort: %s", _configuration.getValue(RefocusProperty.REFOCUS_PROXY_PORT.getName(), "Unknown"))); + } + else if (test_mode == TestMode.UNIT_TEST) + { + _configuration.setProperty(RefocusProperty.REFOCUS_MAX_REQUESTS_PER_MINUTE.getName(), "100000"); // any value above 6000 will result in no sleeping in the unit tests. + } + + + if (test_mode == TestMode.UNIT_TEST) + { + refocusForwarder = new RefocusForwarder(_configuration, _emProviderMock); + + CloseableHttpClient mockClient = mock(CloseableHttpClient.class); + refocusForwarder = spy(refocusForwarder); + when(refocusForwarder.getHttpClient()).thenReturn(mockClient); + } + else + { + refocusForwarder = new RefocusForwarder(_configuration, _emProviderMock); + } + + _jobCounter = new AtomicInteger(0); + _executor = null; + } + + + @BeforeClass + static public void setUpClass() { + } + + @AfterClass + static public void tearDownClass() { + } + + @Before + public void setup() { + + system = TestUtils.getInstanceWithInMemProps(); // getInstance(); + system.start(); + refocus_setup(); + } + + @After + public void tearDown() { + if (system != null) { + system.getServiceFactory().getManagementService().cleanupRecords(); + system.stop(); + } + } + + + // ----------------------------------------------------------------------------------- + // Support for Generating sequences of stubbed responses. + // ----------------------------------------------------------------------------------- + + static class StubResult { + int sleep_ms; + int resp_code; + JsonObject gson; + + public StubResult(int sleep_ms, int resp_code, JsonObject gson) + { + this.sleep_ms = sleep_ms; + this.resp_code = resp_code; + this.gson = gson; + } + + public void action() throws RuntimeException + { + } + } + + + static class StubIOExceptionResult extends StubResult { + + public StubIOExceptionResult(int sleep_ms, int resp_code, JsonObject gson) + { + super(sleep_ms, resp_code, gson); + } + + @Override + public void action() throws RuntimeException + { + throw new RuntimeException(new IOException()); + } + } + + static class StubInterruptedIOExceptionResult extends StubResult { + + public StubInterruptedIOExceptionResult(int sleep_ms, int resp_code, JsonObject gson) + { + super(sleep_ms, resp_code, gson); + } + + @Override + public void action() throws RuntimeException + { + throw new RuntimeException(new InterruptedIOException()); + } + } + + + static JsonObject makeUpcertResponse(String status, int jobId) { + JsonObject response = new JsonObject(); + response.addProperty("status", status); + response.addProperty("jobId", jobId); + return response; + } + + static StubResult make200(int sleep_ms, int txId) + { + return new StubResult(sleep_ms, HttpStatus.SC_OK, makeUpcertResponse("OK", txId)); + } + + // OK result + static StubResult make201(int sleep_ms, int txId) + { + return new StubResult(sleep_ms, HttpStatus.SC_CREATED, makeUpcertResponse("OK", txId)); + } + + static StubResult make204(int sleep_ms, int txId) + { + return new StubResult(sleep_ms, HttpStatus.SC_NO_CONTENT, makeUpcertResponse("OK", txId)); + } + + static StubResult makeTimedOut(int sleep_ms) // suggest timeouts >= 10000 + { + return new StubResult(sleep_ms, HttpStatus.SC_REQUEST_TIMEOUT, new JsonObject()); + } + + static StubResult makeTooManyRequests(int sleep_ms) + { + return new StubResult(sleep_ms, 429, new JsonObject()); + } + + static StubResult makeWithResponseCode(int sleep_ms, int respCode) + { + return new StubResult(sleep_ms, respCode, new JsonObject()); + } + + static StubResult makeWithIOException(int sleep_ms, int respCode) + { + return new StubIOExceptionResult(sleep_ms, respCode, new JsonObject()); + } + + static StubResult makeWithInterruptedIOException(int sleep_ms, int respCode) + { + return new StubInterruptedIOExceptionResult(sleep_ms, respCode, new JsonObject()); + } + + public static void SleepMs(int sleep_ms) + { + try { + if (sleep_ms > 0) + { + Thread.sleep(sleep_ms); + } + } + catch( InterruptedException e) { + throw new RuntimeException(e); + } + } + + static class ResultStubSupplier implements Supplier { + + protected JsonObject cloneJson(JsonObject r) + { + String jsonText = r.toString(); + JsonObject obj = (new JsonParser()).parse(jsonText).getAsJsonObject(); + return obj; + } + + public RefocusForwarder.RefocusResponse get() throws RuntimeException + { + return null; + } + } + + static class ResultListSupplier extends ResultStubSupplier { + + ArrayList results; + Iterator iterator; + RefocusForwarder forwarder; + StubResult defaultResult; + + public ResultListSupplier( ArrayList resultsToReturn, StubResult defaultResult, RefocusForwarder forwarder) + { + super(); + this.results = resultsToReturn; + this.iterator = resultsToReturn.iterator(); + this.forwarder = forwarder; + this.defaultResult = defaultResult; + } + + @Override + public RefocusForwarder.RefocusResponse get() throws RuntimeException + { + StubResult r = this.iterator.hasNext() ? this.iterator.next() : defaultResult; + SleepMs(r.sleep_ms); + r.action(); + RefocusForwarder.RefocusResponse refocus_response = forwarder.makeResponse(r.resp_code, cloneJson(r.gson)); + return refocus_response; + } + } + + static class ResultCycleStubSupplier extends ResultStubSupplier { + + StubResult[] resultCycle; + Iterator iterator; + RefocusForwarder forwarder; + int pos = 0; + + public ResultCycleStubSupplier( StubResult[] resultCycle, RefocusForwarder forwarder) + { + this.resultCycle = resultCycle; + this.pos = 0; + this.forwarder = forwarder; + assert(this.resultCycle.length > 0); + } + + @Override + public RefocusForwarder.RefocusResponse get() throws RuntimeException + { + StubResult r = this.resultCycle[ this.pos % this.resultCycle.length ]; + this.pos = (this.pos + 1) % this.resultCycle.length; + SleepMs(r.sleep_ms); + r.action(); + RefocusForwarder.RefocusResponse refocus_response = forwarder.makeResponse(r.resp_code, cloneJson(r.gson)); + return refocus_response; + } + } + + void enqueueSamples(int numSamples, History history) + { + enqueueSamples(numSamples, history, 0, 0); + } + + void enqueueSamples(int numSamples, History history, int sleep_interval, long sleep_ms) + { + IntStream.range(0,numSamples).forEach( x -> { + + String value = String.format("%d", x); + String sa = String.format("a.b%d|c", x); + try + { + refocusForwarder.sendRefocusNotification(sa, value, "myuser", "mytoken", history); + + if (sleep_ms > 0 && sleep_interval > 0) + { + if ( ((x+1) % sleep_interval) == 0) + { + Thread.sleep(sleep_ms); + } + } + } + catch (Exception e) { + throw new RuntimeException(e); + } + }); + } + + + void waitUntilQueuedNotificationsAreProcessed(long extraSleep, long maxWait, long check_period_ms) throws InterruptedException + { + long start = System.currentTimeMillis(); + long duration = 0; + + while( ((maxWait > 0 && duration < maxWait) || (maxWait <= 0)) && + refocusForwarder.getNotificationsDelivered() + refocusForwarder.getNotificationsDiscarded() < refocusForwarder.getNotificationsEnqueued()) + { + Thread.sleep(check_period_ms); + duration = System.currentTimeMillis() - start; + } + Thread.sleep(extraSleep); + } + + void logForwarderStats() + { + int maxQueueLength = refocusForwarder.getMaxQueueLength(); + _info(MessageFormat.format("MaxQueueLength was {0} samples", maxQueueLength)); + } + + void verifyProcessed( long expect_delivered, long expect_discarded, long enqueued) + { + long delivered = refocusForwarder.getNotificationsDelivered(); + long discarded = refocusForwarder.getNotificationsDiscarded(); + long processed = refocusForwarder.getNotificationsProcessed(); + assertEquals(expect_delivered, delivered); + assertEquals(expect_discarded, discarded); + assertEquals(processed, delivered+discarded); + assertEquals(processed, enqueued); + } + + // ----------------------------------------------------------------------------------- + // Tests + // ----------------------------------------------------------------------------------- + + @Test + public void testRefocusForwarderStartStop() { + + RefocusForwarder.Duration d = new RefocusForwarder.Duration(); + + startRefocusService(); + try + { + Thread.sleep(1000); + } + catch( InterruptedException e) + {} + + stopRefocusService(); + _error(MessageFormat.format("Test duration= {0}ms", d.duration())); + verifyProcessed(0, 0, 0); + } + + + @Test + public void testRefocusForwarderSendSomeSamples() { + + int timeout = 45; + int extra_sleep = 2000; + int check_period_ms = 500; + RefocusForwarder.Duration d = new RefocusForwarder.Duration(); + + if (test_mode == TestMode.UNIT_TEST) + { + timeout = 10; + extra_sleep = 100; + check_period_ms = 20; + + ResultListSupplier responseSupplier = new ResultListSupplier(new ArrayList(), + make200(timeout, 1), + refocusForwarder); + + refocusForwarder.setStubSender(responseSupplier); + } + + + startRefocusService(); + History history = mock(History.class); + try + { + enqueueSamples(1000, history); + waitUntilQueuedNotificationsAreProcessed(extra_sleep, 12000, check_period_ms); + } + catch( InterruptedException e) + {} + catch( RuntimeException e) + {} + + stopRefocusService(); + _error(MessageFormat.format("Test duration= {0}ms", d.duration())); + logForwarderStats(); + verifyProcessed(1000, 0, 1000); + } + + @Test + public void testRefocusForwarderThrottled() { + + Assume.assumeTrue(test_mode == TestMode.UNIT_TEST); + + int timeout = 10; + int extra_sleep = 100; + int check_period_ms = 20; + RefocusForwarder.Duration d = new RefocusForwarder.Duration(); + + ResultListSupplier responseSupplier = new ResultListSupplier( + new ArrayList(), + makeTooManyRequests(120), + refocusForwarder); + + refocusForwarder.setStubSender(responseSupplier); + + startRefocusService(); + History history = mock(History.class); + try + { + enqueueSamples(2000, history); + waitUntilQueuedNotificationsAreProcessed(extra_sleep, 12000, check_period_ms); + + } + catch( InterruptedException e) + {} + catch( RuntimeException e) + {} + + stopRefocusService(); + _error(MessageFormat.format("Test duration= {0}ms", d.duration())); + logForwarderStats(); + verifyProcessed(0, 2000, 2000); + } + + + @Test + public void testRefocusForwarderTimedOut() { + + Assume.assumeTrue(test_mode == TestMode.UNIT_TEST); + + int timeout = 10; + int extra_sleep = 100; + int check_period_ms = 10; + RefocusForwarder.Duration d = new RefocusForwarder.Duration(); + + ResultListSupplier responseSupplier = new ResultListSupplier( + new ArrayList(), + makeTimedOut(timeout), + refocusForwarder); + + refocusForwarder.setStubSender(responseSupplier); + + startRefocusService(); + History history = mock(History.class); + try + { + enqueueSamples(2000, history); + waitUntilQueuedNotificationsAreProcessed(extra_sleep, 50000, check_period_ms); + } + catch( InterruptedException e) + {} + catch( RuntimeException e) + {} + + stopRefocusService(); + _error(MessageFormat.format("Test duration= {0}ms", d.duration())); + logForwarderStats(); + verifyProcessed(0, 2000, 2000); + } + + @Test + public void testRefocusForwarderInterruptedIOException() { + + Assume.assumeTrue(test_mode == TestMode.UNIT_TEST); + + int timeout = 10; + int extra_sleep = 100; + int check_period_ms = 20; + RefocusForwarder.Duration d = new RefocusForwarder.Duration(); + + ResultListSupplier responseSupplier = new ResultListSupplier( + new ArrayList(), + makeWithInterruptedIOException(timeout, -1), + refocusForwarder); + + refocusForwarder.setStubSender(responseSupplier); + + startRefocusService(); + History history = mock(History.class); + try + { + enqueueSamples(2000, history); + waitUntilQueuedNotificationsAreProcessed(extra_sleep, 50000, check_period_ms); + } + catch( InterruptedException e) + {} + catch( RuntimeException e) + {} + + stopRefocusService(); + _error(MessageFormat.format("Test duration= {0}ms", d.duration())); + logForwarderStats(); + verifyProcessed(0, 2000, 2000); + } + + @Test + public void testRefocusForwarderIOException() { + + Assume.assumeTrue(test_mode == TestMode.UNIT_TEST); + + int timeout = 10; + int extra_sleep = 100; + int check_period_ms = 20; + RefocusForwarder.Duration d = new RefocusForwarder.Duration(); + + ResultListSupplier responseSupplier = new ResultListSupplier( + new ArrayList(), + makeWithIOException(timeout, -1), + refocusForwarder); + + refocusForwarder.setStubSender(responseSupplier); + + startRefocusService(); + History history = mock(History.class); + try + { + enqueueSamples(2000, history); + waitUntilQueuedNotificationsAreProcessed(extra_sleep, 50000, check_period_ms); + } + catch( InterruptedException e) + {} + catch( RuntimeException e) + {} + + stopRefocusService(); + _error(MessageFormat.format("Test duration= {0}ms", d.duration())); + logForwarderStats(); + verifyProcessed(0, 2000, 2000); + } + + @Test + public void testRefocusNoAuth() { + + Assume.assumeTrue(test_mode == TestMode.UNIT_TEST); + + int timeout= 10; + int extra_sleep = 100; + int check_period_ms = 20; + RefocusForwarder.Duration d = new RefocusForwarder.Duration(); + + ResultListSupplier responseSupplier = new ResultListSupplier( + new ArrayList(), + makeWithResponseCode(timeout, HttpStatus.SC_UNAUTHORIZED), + refocusForwarder); + + refocusForwarder.setStubSender(responseSupplier); + + + startRefocusService(); + History history = mock(History.class); + try + { + enqueueSamples(2000, history); + waitUntilQueuedNotificationsAreProcessed(extra_sleep, 50000, check_period_ms); + } + catch( InterruptedException e) + {} + catch( RuntimeException e) + {} + + stopRefocusService(); + _error(MessageFormat.format("Test duration= {0}ms", d.duration())); + logForwarderStats(); + verifyProcessed(0, 2000, 2000); + } + + @Test + public void testRefocusServiceUnavail() { + + Assume.assumeTrue(test_mode == TestMode.UNIT_TEST); + + int timeout= 10; + int extra_sleep = 100; + int check_period_ms = 20; + RefocusForwarder.Duration d = new RefocusForwarder.Duration(); + + ResultListSupplier responseSupplier = new ResultListSupplier( + new ArrayList(), + makeWithResponseCode(timeout, HttpStatus.SC_SERVICE_UNAVAILABLE), + refocusForwarder); + + refocusForwarder.setStubSender(responseSupplier); + + + startRefocusService(); + History history = mock(History.class); + try + { + enqueueSamples(2000, history); + waitUntilQueuedNotificationsAreProcessed(extra_sleep, 50000, check_period_ms); + } + catch( InterruptedException e) + {} + catch( RuntimeException e) + {} + + stopRefocusService(); + _error(MessageFormat.format("Test duration= {0}ms", d.duration())); + logForwarderStats(); + verifyProcessed(0, 2000, 2000); + } + + @Test + public void testIntermittentThrottle() { + + Assume.assumeTrue(test_mode == TestMode.UNIT_TEST); + + int timeout = 10; + int enqueue_timeout = 10; + int extra_sleep = 100; + int check_period_ms = 20; + RefocusForwarder.Duration d = new RefocusForwarder.Duration(); + + + // NOTE: This assumes that the #retries == 3 + StubResult [] cycle = { + makeTooManyRequests(timeout), + makeTooManyRequests(timeout), + make200(timeout, 1) + }; + ResultCycleStubSupplier responseSupplier = new ResultCycleStubSupplier( cycle, refocusForwarder); + refocusForwarder.setStubSender(responseSupplier); + + int num_notifications = 100000; + + startRefocusService(); + History history = mock(History.class); + try + { + enqueueSamples(num_notifications, history, 1000, enqueue_timeout); + waitUntilQueuedNotificationsAreProcessed(extra_sleep, 50000, check_period_ms); + } + catch( InterruptedException e) + {} + catch( RuntimeException e) + {} + + stopRefocusService(); + _error(MessageFormat.format("Test duration= {0}ms", d.duration())); + logForwarderStats(); + verifyProcessed(num_notifications, 0, num_notifications); + } + + @Test + public void testIntermittentInterruptedIOException() { + + Assume.assumeTrue(test_mode == TestMode.UNIT_TEST); + + int timeout = 10; + int enqueue_timeout = 10; + int extra_sleep = 100; + int check_period_ms = 20; + RefocusForwarder.Duration d = new RefocusForwarder.Duration(); + + + // NOTE: This assumes that the #retries == 3 + StubResult [] cycle = { + makeWithInterruptedIOException(timeout, -1), + makeWithInterruptedIOException(timeout, -1), + make200(timeout, 1) + }; + ResultCycleStubSupplier responseSupplier = new ResultCycleStubSupplier( cycle, refocusForwarder); + refocusForwarder.setStubSender(responseSupplier); + + int num_notifications = 100000; + + startRefocusService(); + History history = mock(History.class); + try + { + enqueueSamples(num_notifications, history, 1000, enqueue_timeout); + waitUntilQueuedNotificationsAreProcessed(extra_sleep, 50000, check_period_ms); + } + catch( InterruptedException e) + {} + catch( RuntimeException e) + {} + + stopRefocusService(); + _error(MessageFormat.format("Test duration= {0}ms", d.duration())); + logForwarderStats(); + verifyProcessed(num_notifications, 0, num_notifications); + } + + @Ignore + @Test + public void test10mNotifications() { + + int timeout = 250; + int enqueue_timeout = 1000; + int extra_sleep = 10000; + int check_period_ms = 500; + RefocusForwarder.Duration d = new RefocusForwarder.Duration(); + + if (test_mode == TestMode.UNIT_TEST) + { + timeout = 10; + enqueue_timeout = 10; + extra_sleep = 100; + check_period_ms = 20; + + ResultListSupplier responseSupplier = new ResultListSupplier( + new ArrayList(), + make200(timeout, 1), + refocusForwarder); + + refocusForwarder.setStubSender(responseSupplier); + } + + int num_notifications = 600000; // 600k - should take 10mins + + startRefocusService(); + History history = mock(History.class); + try + { + // 1000 samples every second. + enqueueSamples(num_notifications, history, 1000, enqueue_timeout); + waitUntilQueuedNotificationsAreProcessed(extra_sleep, 50000, check_period_ms); + } + catch( InterruptedException e) + {} + catch( RuntimeException e) + {} + + stopRefocusService(); + _error(MessageFormat.format("Test duration= {0}ms", d.duration())); + logForwarderStats(); + verifyProcessed(num_notifications, 0, num_notifications); + } + + // @Ignore + @Test + public void testRefocusForwarderSend100kSamples() throws Exception { + + int timeout = 45; + int enqueue_timeout = 50; + int extra_sleep = 1000; + int check_period_ms = 500; + RefocusForwarder.Duration d = new RefocusForwarder.Duration(); + + if (test_mode == TestMode.UNIT_TEST) + { + timeout = 10; + enqueue_timeout = 10; + extra_sleep = 100; + check_period_ms = 20; + + ResultListSupplier responseSupplier = new ResultListSupplier( + new ArrayList(), + make200(timeout, 1), + refocusForwarder); + + refocusForwarder.setStubSender(responseSupplier); + } + + int num_samples = 100000; + + startRefocusService(); + History history = mock(History.class); + try + { + long start = System.currentTimeMillis(); + enqueueSamples(num_samples, history, 1000, enqueue_timeout); + long end = System.currentTimeMillis(); + long duration = end - start; + _info(MessageFormat.format("Enqueued {0} samples in {1}ms", num_samples, duration)); + + waitUntilQueuedNotificationsAreProcessed(extra_sleep, 60000, check_period_ms); + + } + catch( InterruptedException e) + {} + catch( RuntimeException e) + {} + + stopRefocusService(); + _error(MessageFormat.format("Test duration= {0}ms", d.duration())); + logForwarderStats(); + verifyProcessed(num_samples, 0, num_samples); + } + + @Test + public void test60kSamplesPerMinute() throws Exception { + + int timeout = 45; + int enqueue_timeout = 1000; + int extra_sleep = 1000; + int check_period_ms = 500; + RefocusForwarder.Duration d = new RefocusForwarder.Duration(); + + if (test_mode == TestMode.UNIT_TEST) + { + timeout = 10; + enqueue_timeout = 10; + extra_sleep = 100; + check_period_ms = 20; + + ResultListSupplier responseSupplier = new ResultListSupplier( + new ArrayList(), + make200(timeout, 1), + refocusForwarder); + + refocusForwarder.setStubSender(responseSupplier); + } + + int num_samples = 60000; + + startRefocusService(); + History history = mock(History.class); + try + { + long start = System.currentTimeMillis(); + enqueueSamples(num_samples, history, 1000, enqueue_timeout); + long end = System.currentTimeMillis(); + long duration = end - start; + _info(MessageFormat.format("Enqueued {0} samples in {1}ms", num_samples, duration)); + + waitUntilQueuedNotificationsAreProcessed(extra_sleep, 120000, check_period_ms); + + } + catch( InterruptedException e) + {} + catch( RuntimeException e) + {} + + stopRefocusService(); + _error(MessageFormat.format("Test duration= {0}ms", d.duration())); + logForwarderStats(); + verifyProcessed(num_samples, 0, num_samples); + } + + + + // ------------------------------------------------------------------------------------------------------- + // Thread Pool and Executor Support + // ------------------------------------------------------------------------------------------------------- + + private void startRefocusService() { + assert(_executor == null); + try { + _info("Starting Refocus service."); + _executor = startRefocusClientService(refocusForwarder); + _info("Refocus service started."); + } catch (Exception ex) { + throw new SystemException("There was a problem starting the Refocus Service.", ex); + } + } + + private void stopRefocusService() { + assert(_executor != null); + + try { + + _info("Stopping Refocus service."); + _executor.shutdownNow(); + try + { + if (!_executor.awaitTermination(60000, TimeUnit.MILLISECONDS)) + { + _warn("Shutdown timed out after 60 seconds. Exiting."); + } + } catch (InterruptedException iex) + { + _warn("Forcing shutdown of Refocus Service."); + } + _info("Service stopped."); + } catch (Exception ex) { + throw new SystemException("There was a problem shutting down the Refocus Service.", ex); + } finally { + _info("Finished"); + _executor = null; + } + } + + + private ExecutorService startRefocusClientService(RefocusService refocus) { + int configuredCount = Integer.valueOf(_configuration.getValue(SystemConfiguration.Property.REFOCUS_CLIENT_THREADS)); + int configuredTimeout = Integer.valueOf(_configuration.getValue(SystemConfiguration.Property.REFOCUS_CLIENT_CONNECT_TIMEOUT)); + int threadPoolCount = Math.max(configuredCount, 1); + int timeout = Math.max(10000, configuredTimeout); + AtomicInteger jobCounter = new AtomicInteger(0); + + ExecutorService service = Executors.newFixedThreadPool(threadPoolCount, new ThreadFactory() { + + AtomicInteger id = new AtomicInteger(0); + + @Override + public Thread newThread(Runnable r) { + return new Thread(r, MessageFormat.format("RefocusServiceTest-{0}", id.getAndIncrement())); + } + }); + for (int i = 0; i < threadPoolCount; i++) { + service.submit(new RefocusRunner(refocus, timeout, jobCounter)); + } + return service; + } + + + class RefocusRunner implements Runnable { + + //~ Instance fields ****************************************************************************************************************************** + + private final RefocusService service; + private final int timeout; + private final AtomicInteger jobCounter; + private final Logger LOGGER = LoggerFactory.getLogger(RefocusService.class); + + //~ Constructors ********************************************************************************************************************************* + + /** + * Creates a new Alerter object. + * + * @param service The Refocus service to use. + * @param timeout The timeout in milliseconds for a single alert evaluation. Must be a positive number. + * @param jobCounter The job counter. Cannot be null. + */ + RefocusRunner(RefocusService service, int timeout, AtomicInteger jobCounter) { + this.service = service; + this.timeout = timeout; + this.jobCounter = jobCounter; + } + + //~ Methods ************************************************************************************************************************************** + + @Override + public void run() { + while (!Thread.currentThread().isInterrupted()) { + try { + int forwarded = service.forwardNotifications(); + Thread.sleep(10); + } catch (InterruptedException ex) { + // TODO - should we catch RuntimeException and handle wrapped InterruptedExceptions too? + _info("Execution was interrupted."); + Thread.currentThread().interrupt(); + break; + } catch (Throwable ex) { + _error(String.format("Exception in alerter: %s", ExceptionUtils.getFullStackTrace(ex))); + } + } + _warn(String.format("Refocus thread interrupted. %d notifications forwardeed by this thread.", jobCounter.get())); + service.dispose(); + } + } + + + + // ================================================================================= + // Logging wrappers + // Note: these exist because I haven't found a mechanism for setting the log level programmatically. + // ================================================================================= + + + // Syntactic sugar + private void _info(String msg) + { + if (mapToErrorLogging) { + LOGGER.error(msg); + } + else { + LOGGER.info(msg); + } + } + + private void _error(String msg) + { + LOGGER.error(msg); + } + + private void _warn(String msg) + { + if (mapToErrorLogging) { + LOGGER.error(msg); + } + else { + LOGGER.warn(msg); + } + } + +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/SchedulingServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/SchedulingServiceTest.java deleted file mode 100644 index 73b134279..000000000 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/SchedulingServiceTest.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright (c) 2016, Salesforce.com, Inc. - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * 3. Neither the name of Salesforce.com nor the names of its contributors may - * be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ - -package com.salesforce.dva.argus.service; - -import com.salesforce.dva.argus.AbstractTest; -import com.salesforce.dva.argus.entity.Alert; -import com.salesforce.dva.argus.entity.Notification; -import com.salesforce.dva.argus.entity.PrincipalUser; -import com.salesforce.dva.argus.entity.Trigger; -import com.salesforce.dva.argus.entity.Trigger.TriggerType; -import com.salesforce.dva.argus.service.alert.DefaultAlertService.AlertWithTimestamp; -import com.salesforce.dva.argus.service.alert.notifier.AuditNotifier; - -import org.junit.Test; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -import static com.salesforce.dva.argus.service.MQService.MQQueue.ALERT; -import static org.junit.Assert.*; - -public class SchedulingServiceTest extends AbstractTest { - - @Test - public void testAlertSchedulingWithGlobalInterlock() throws InterruptedException { - SchedulingService schedulingService = system.getServiceFactory().getSchedulingService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - MQService mqService = system.getServiceFactory().getMQService(); - UserService userService = system.getServiceFactory().getUserService(); - - schedulingService.enableScheduling(); - - long schedulingIterations = 1; - int noOfAlerts = random.nextInt(10) + 1; - PrincipalUser user = userService.findAdminUser(); - Alert alert; - - for (int i = 0; i < noOfAlerts; i++) { - String expression = "DIVIDE(-1h:argus.jvm:file.descriptor.open{host=unknown-host}:avg, " + - "-1h:argus.jvm:file.descriptor.max{host=unknown-host}:avg)"; - - alert = new Alert(user, user, createRandomName(), expression, "* * * * *"); - alert.setEnabled(true); - - Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "testTrigger", 0, 0); - alert.setTriggers(Arrays.asList(trigger)); - Notification notification = new Notification("testNotification", alert, AuditNotifier.class.getName(), new ArrayList(), - 0); - alert.setNotifications(Arrays.asList(notification)); - - alertService.updateAlert(alert); - } - schedulingService.startAlertScheduling(); - Thread.sleep((1000L * 60L * schedulingIterations)); - schedulingService.stopAlertScheduling(); - - List list = mqService.dequeue(ALERT.getQueueName(), AlertWithTimestamp.class, 1000 * noOfAlerts, - (int) (noOfAlerts * schedulingIterations)); - - assertEquals(schedulingIterations * noOfAlerts, list.size()); - } -} -/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/TSDBServiceIT.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/TSDBServiceIT.java index 4d868f526..b861b8cd8 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/TSDBServiceIT.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/TSDBServiceIT.java @@ -28,11 +28,11 @@ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ - + package com.salesforce.dva.argus.service; import com.fasterxml.jackson.core.JsonProcessingException; -import com.salesforce.dva.argus.AbstractTest; +import com.salesforce.dva.argus.AbstractTestIT; import com.salesforce.dva.argus.IntegrationTest; import com.salesforce.dva.argus.entity.Annotation; import com.salesforce.dva.argus.entity.Metric; @@ -61,7 +61,7 @@ import static org.junit.Assert.assertTrue; @Category(IntegrationTest.class) -public class TSDBServiceIT extends AbstractTest { +public class TSDBServiceIT extends AbstractTestIT { private static final long SLEEP_AFTER_PUT_IN_MILLIS = 2000; @@ -127,10 +127,10 @@ public void testPutAndGetMetrics() throws InterruptedException { service.dispose(); } } - + @Test public void testGetMetricsTagValueTooLarge() throws InterruptedException { - + TSDBService service = system.getServiceFactory().getTSDBService(); List expected = createMetricWithMultipleTags("tagKey", 100); @@ -152,7 +152,7 @@ public void testGetMetricsTagValueTooLarge() throws InterruptedException { } finally { service.dispose(); } - + } private MetricQuery toWildcardOrQuery(List metrics, String commonTagKey) { @@ -167,7 +167,7 @@ private MetricQuery toWildcardOrQuery(List metrics, String commonTagKey) sb.append(m.getTag(commonTagKey)).append("|"); } tags.put(commonTagKey, sb.substring(0, sb.length() - 1)); - + return new MetricQuery(metric.getScope(), metric.getMetric(), tags, start, end); } @@ -181,15 +181,15 @@ private List createMetricWithMultipleTags(String commonTagKey, int tagVa Metric m = new Metric(scope, metric); Map datapoints = new HashMap<>(); datapoints.put(timestamp, Double.valueOf(i)); - + Map tags = new HashMap<>(); tags.put(commonTagKey, "someverylooooooooooooooooooooooooooooooooooooooooongTagValue" + i); - + m.setDatapoints(datapoints); m.setTags(tags); result.add(m); } - + return result; } @@ -356,33 +356,6 @@ public void testPutAndGetScopeAnnotations() throws InterruptedException { } } - @SuppressWarnings("unchecked") - @Test - public void testFractureMetrics() { - TSDBService service = system.getServiceFactory().getTSDBService(); - Metric metric = new Metric("testscope", "testMetric"); - Map datapoints = new HashMap<>(); - - for (int i = 0; i <= 200; i++) { - datapoints.put(System.currentTimeMillis() + (i * 60000L), (double)(random.nextInt(50))); - } - metric.setDatapoints(datapoints); - try { - Method method = DefaultTSDBService.class.getDeclaredMethod("fractureMetric", Metric.class); - - method.setAccessible(true); - - List metricList = (List) method.invoke(service, metric); - - assertEquals(3, metricList.size()); - assertEquals(100, metricList.get(0).getDatapoints().size()); - assertEquals(100, metricList.get(1).getDatapoints().size()); - assertEquals(1, metricList.get(2).getDatapoints().size()); - } catch (NoSuchMethodException | IllegalAccessException | IllegalArgumentException | InvocationTargetException ex) { - throw new SystemException("Failed to construct fracture metric method using reflection"); - } - } - private AnnotationQuery toQuery(Annotation annotation) { String scope = annotation.getScope(); String metric = annotation.getMetric(); @@ -393,27 +366,19 @@ private AnnotationQuery toQuery(Annotation annotation) { return new AnnotationQuery(scope, metric, tags, type, timestamp, null); } - @Test - public void isTSDBServiceSingleton() { - TSDBService service1 = system.getServiceFactory().getTSDBService(); - TSDBService service2 = system.getServiceFactory().getTSDBService(); - - assertTrue(service1 == service2); - } - @Test public void testPut_DatapointsContainNullValues() { - + TSDBService service = system.getServiceFactory().getTSDBService(); - + Map datapoints = new HashMap<>(); datapoints.put(1493973552000L, 100D); datapoints.put(1493973652000L, null); Metric m = new Metric("scope", "metric"); m.setDatapoints(datapoints); - + service.putMetrics(Arrays.asList(m)); - + } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/TSDBServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/TSDBServiceTest.java new file mode 100644 index 000000000..a61b3d0a1 --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/TSDBServiceTest.java @@ -0,0 +1,74 @@ +package com.salesforce.dva.argus.service; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import org.junit.Test; + +import com.salesforce.dva.argus.system.SystemMain; +import com.salesforce.dva.argus.TestUtils; +import org.junit.BeforeClass; +import org.junit.AfterClass; +import java.util.Properties; +import java.time.LocalDateTime; +import java.time.format.DateTimeFormatter; + + + +public class TSDBServiceTest { + + DateTimeFormatter dtf = DateTimeFormatter.ofPattern("yyyy/MM/dd HH:mm:ss"); + + @BeforeClass + static public void setUpClass() { + } + + @AfterClass + static public void tearDownClass() { + } + + + @Test + public void testQueryWindow() { + long differenceInMillis = 80L; + assertEquals(TSDBService.QueryTimeWindow.WITHIN_24_HRS.getName(), TSDBService.QueryTimeWindow.getWindow(differenceInMillis)); + differenceInMillis = 87000000L; + assertEquals(TSDBService.QueryTimeWindow.WITHIN_24_HRS_AND_30_DAYS.getName(), TSDBService.QueryTimeWindow.getWindow(differenceInMillis)); + differenceInMillis = 26000000000L; + assertEquals(TSDBService.QueryTimeWindow.GREATER_THAN_30_DAYS.getName(), TSDBService.QueryTimeWindow.getWindow(differenceInMillis)); + } + + @Test + public void testQueryTimeSeriesExpanssion() { + int numExpandedTimeSeries = 0; + assertEquals(TSDBService.QueryTimeSeriesExpansion.TS_0.getName(), TSDBService.QueryTimeSeriesExpansion.getExpandedTimeSeriesRange(numExpandedTimeSeries)); + numExpandedTimeSeries = 1; + assertEquals(TSDBService.QueryTimeSeriesExpansion.TS_1.getName(), TSDBService.QueryTimeSeriesExpansion.getExpandedTimeSeriesRange(numExpandedTimeSeries)); + numExpandedTimeSeries = 2; + assertEquals(TSDBService.QueryTimeSeriesExpansion.TS_2_10.getName(), TSDBService.QueryTimeSeriesExpansion.getExpandedTimeSeriesRange(numExpandedTimeSeries)); + numExpandedTimeSeries = 3; + assertEquals(TSDBService.QueryTimeSeriesExpansion.TS_2_10.getName(), TSDBService.QueryTimeSeriesExpansion.getExpandedTimeSeriesRange(numExpandedTimeSeries)); + numExpandedTimeSeries = 10; + assertEquals(TSDBService.QueryTimeSeriesExpansion.TS_2_10.getName(), TSDBService.QueryTimeSeriesExpansion.getExpandedTimeSeriesRange(numExpandedTimeSeries)); + numExpandedTimeSeries = 11; + assertEquals(TSDBService.QueryTimeSeriesExpansion.TS_11_100.getName(), TSDBService.QueryTimeSeriesExpansion.getExpandedTimeSeriesRange(numExpandedTimeSeries)); + numExpandedTimeSeries = 12; + assertEquals(TSDBService.QueryTimeSeriesExpansion.TS_11_100.getName(), TSDBService.QueryTimeSeriesExpansion.getExpandedTimeSeriesRange(numExpandedTimeSeries)); + numExpandedTimeSeries = 100; + assertEquals(TSDBService.QueryTimeSeriesExpansion.TS_11_100.getName(), TSDBService.QueryTimeSeriesExpansion.getExpandedTimeSeriesRange(numExpandedTimeSeries)); + numExpandedTimeSeries = 101; + assertEquals(TSDBService.QueryTimeSeriesExpansion.TS_101_1000.getName(), TSDBService.QueryTimeSeriesExpansion.getExpandedTimeSeriesRange(numExpandedTimeSeries)); + numExpandedTimeSeries = 102; + assertEquals(TSDBService.QueryTimeSeriesExpansion.TS_101_1000.getName(), TSDBService.QueryTimeSeriesExpansion.getExpandedTimeSeriesRange(numExpandedTimeSeries)); + numExpandedTimeSeries = 1000; + assertEquals(TSDBService.QueryTimeSeriesExpansion.TS_101_1000.getName(), TSDBService.QueryTimeSeriesExpansion.getExpandedTimeSeriesRange(numExpandedTimeSeries)); + numExpandedTimeSeries = 1001; + assertEquals(TSDBService.QueryTimeSeriesExpansion.TS_1001_10000.getName(), TSDBService.QueryTimeSeriesExpansion.getExpandedTimeSeriesRange(numExpandedTimeSeries)); + numExpandedTimeSeries = 1002; + assertEquals(TSDBService.QueryTimeSeriesExpansion.TS_1001_10000.getName(), TSDBService.QueryTimeSeriesExpansion.getExpandedTimeSeriesRange(numExpandedTimeSeries)); + numExpandedTimeSeries = 10000; + assertEquals(TSDBService.QueryTimeSeriesExpansion.TS_1001_10000.getName(), TSDBService.QueryTimeSeriesExpansion.getExpandedTimeSeriesRange(numExpandedTimeSeries)); + numExpandedTimeSeries = 10001; + assertEquals(TSDBService.QueryTimeSeriesExpansion.TS_GREATER_THAN_10000.getName(), TSDBService.QueryTimeSeriesExpansion.getExpandedTimeSeriesRange(numExpandedTimeSeries)); + } +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/UserServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/UserServiceTest.java index 616157498..39f1d5eea 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/UserServiceTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/UserServiceTest.java @@ -28,10 +28,9 @@ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ - + package com.salesforce.dva.argus.service; -import com.salesforce.dva.argus.AbstractTest; import com.salesforce.dva.argus.entity.PrincipalUser; import com.salesforce.dva.argus.entity.PrincipalUser.Preference; import org.junit.Test; @@ -42,12 +41,38 @@ import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; +import org.junit.BeforeClass; +import org.junit.AfterClass; + + +import com.salesforce.dva.argus.system.SystemMain; + +import com.salesforce.dva.argus.TestUtils; + +public class UserServiceTest { + + static private UserService userService; + static private SystemMain system; + + + @BeforeClass + static public void setUpClass() { + system = TestUtils.getInstance(); + system.start(); + userService = system.getServiceFactory().getUserService(); + } + + @AfterClass + static public void tearDownClass() { + if (system != null) { + system.getServiceFactory().getManagementService().cleanupRecords(); + system.stop(); + } + } -public class UserServiceTest extends AbstractTest { @Test public void testUserCrud() { - UserService userService = system.getServiceFactory().getUserService(); PrincipalUser user = new PrincipalUser(userService.findAdminUser(), "aUser", "aUser@mycompany.abc"); user = userService.updateUser(user); @@ -69,16 +94,14 @@ public void testUserCrud() { @Test public void testAdminUserExistence() { - UserService userService = system.getServiceFactory().getUserService(); PrincipalUser admin = userService.findUserByUsername("admin"); assertNotNull(admin); assertEquals(BigInteger.ONE, admin.getId()); } - + @Test public void testDefaultUserExistence() { - UserService userService = system.getServiceFactory().getUserService(); PrincipalUser defaultUser = userService.findUserByUsername("default"); assertNotNull(defaultUser); @@ -87,7 +110,6 @@ public void testDefaultUserExistence() { @Test public void testUniqueUserCount() { - UserService userService = system.getServiceFactory().getUserService(); long uniqueUserCount = userService.getUniqueUserCount(); assertTrue("There should always be at least one user at system startup.", uniqueUserCount >= 1); diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/WardenServiceIT.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/WardenServiceIT.java index 1e70e84e4..045764934 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/WardenServiceIT.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/WardenServiceIT.java @@ -28,7 +28,7 @@ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ - + package com.salesforce.dva.argus.service; import static org.junit.Assert.assertFalse; @@ -42,7 +42,7 @@ import org.junit.Test; import org.junit.experimental.categories.Category; -import com.salesforce.dva.argus.AbstractTest; +import com.salesforce.dva.argus.AbstractTestIT; import com.salesforce.dva.argus.IntegrationTest; import com.salesforce.dva.argus.entity.Annotation; import com.salesforce.dva.argus.entity.PrincipalUser; @@ -50,7 +50,7 @@ import com.salesforce.dva.argus.system.SystemException; @Category(IntegrationTest.class) -public class WardenServiceIT extends AbstractTest { +public class WardenServiceIT extends AbstractTestIT { private UserService _userService; private WardenService _wardenService; @@ -136,7 +136,7 @@ public void testReinstatedUserFromTemporarySuspension() throws InterruptedExcept } } } - + @Test public void testSuspendUserIndefinitely() { PrincipalUser user = _userService.findUserByUsername("bhinav.sura"); @@ -154,7 +154,7 @@ public void testSuspendUserIndefinitely() { isIndefinitelySuspended = _wardenService.suspendUser(user, SubSystem.API); assertTrue(isIndefinitelySuspended); } - + @Test(expected = SystemException.class) public void testAssertSubsystemUsePermitted_IndefiniteSuspension() { PrincipalUser user = _userService.findUserByUsername("bhinav.sura"); diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/WardenServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/WardenServiceTest.java index 4e971cd83..4e6d49e8c 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/WardenServiceTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/WardenServiceTest.java @@ -28,7 +28,7 @@ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ - + package com.salesforce.dva.argus.service; import static org.junit.Assert.assertEquals; @@ -43,9 +43,10 @@ import java.util.Random; import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.AfterClass; import org.junit.Test; -import com.salesforce.dva.argus.AbstractTest; import com.salesforce.dva.argus.entity.Alert; import com.salesforce.dva.argus.entity.PrincipalUser; import com.salesforce.dva.argus.entity.Trigger; @@ -53,27 +54,41 @@ import com.salesforce.dva.argus.service.WardenService.SubSystem; import com.salesforce.dva.argus.service.warden.DefaultWardenService; import com.salesforce.dva.argus.system.SystemException; +import com.salesforce.dva.argus.system.SystemMain; -public class WardenServiceTest extends AbstractTest { +import com.salesforce.dva.argus.TestUtils; - private UserService _userService; - private WardenService _wardenService; - private AlertService _alertService; - @Before - @Override - public void setUp() { - super.setUp(); +public class WardenServiceTest { + + static private UserService _userService; + static private WardenService _wardenService; + static private AlertService _alertService; + static private SystemMain system; + static private PrincipalUser user; + + + @BeforeClass + static public void setUpClass() { + system = TestUtils.getInstance(); + system.start(); _userService = system.getServiceFactory().getUserService(); _wardenService = system.getServiceFactory().getWardenService(); _alertService = system.getServiceFactory().getAlertService(); + user = new PrincipalUser(_userService.findAdminUser(), "bhinav.sura", "bhinav.sura@salesforce.com"); + user = _userService.updateUser(user); + } - PrincipalUser user = _userService.findUserByUsername("bhinav.sura"); - - if (user == null) { - user = new PrincipalUser(_userService.findAdminUser(), "bhinav.sura", "bhinav.sura@salesforce.com"); - user = _userService.updateUser(user); + @AfterClass + static public void tearDownClass() { + if (system != null) { + system.getServiceFactory().getManagementService().cleanupRecords(); + system.stop(); } + } + + @Before + public void setUp() { _wardenService.reinstateUser(user, SubSystem.API); _wardenService.reinstateUser(user, SubSystem.POSTING); diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/AlertDefinitionsCacheRefresherThreadTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/AlertDefinitionsCacheRefresherThreadTest.java new file mode 100644 index 000000000..79a6e88ba --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/AlertDefinitionsCacheRefresherThreadTest.java @@ -0,0 +1,152 @@ +package com.salesforce.dva.argus.service.alert; + +import com.salesforce.dva.argus.TestUtils; +import com.salesforce.dva.argus.entity.Alert; +import com.salesforce.dva.argus.entity.Notification; +import com.salesforce.dva.argus.entity.PrincipalUser; +import com.salesforce.dva.argus.entity.Trigger; +import com.salesforce.dva.argus.service.AlertService; +import com.salesforce.dva.argus.service.UserService; +import com.salesforce.dva.argus.system.SystemMain; + +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Ignore; +import org.junit.Test; +import org.slf4j.LoggerFactory; + +import java.math.BigInteger; +import java.sql.DriverManager; +import java.sql.SQLNonTransientConnectionException; +import java.util.Arrays; +import java.util.Map; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import static org.junit.Assert.assertEquals; + +//@Ignore("These new tests are failing in the build pipeline due to persistence issues. @TODO: remove @ignore when pipeline issues are resolved") +public class AlertDefinitionsCacheRefresherThreadTest { + private static final String EXPRESSION = + "DIVIDE(-1h:argus.jvm:file.descriptor.open{host=unknown-host}:avg, -1h:argus.jvm:file.descriptor.max{host=unknown-host}:avg)"; + + private SystemMain system; + private PrincipalUser admin; + private AlertService alertService; + private UserService userService; + + private static ch.qos.logback.classic.Logger apacheLogger; + private static ch.qos.logback.classic.Logger myClassLogger; + + @BeforeClass + static public void setUpClass() { + myClassLogger = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger(AlertDefinitionsCacheRefresherThreadTest.class); + myClassLogger.setLevel(ch.qos.logback.classic.Level.OFF); + apacheLogger = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger("org.apache"); + apacheLogger.setLevel(ch.qos.logback.classic.Level.OFF); + + } + + @AfterClass + static public void tearDownClass() { + } + + @Before + public void setup() { + system = TestUtils.getInstance(); + system.start(); + userService = system.getServiceFactory().getUserService(); + admin = userService.findAdminUser(); + alertService = system.getServiceFactory().getAlertService(); + alertService.findAllAlerts(false).forEach(a -> alertService.deleteAlert(a)); + + try { + Class.forName("org.apache.derby.jdbc.EmbeddedDriver"); + DriverManager.getConnection("jdbc:derby:memory:argus;create=true").close(); + + } catch (Exception ex) { + LoggerFactory.getLogger(AlertServiceTest.class).error("Exception in setUp:{}", ex.getMessage()); + fail("Exception during database startup."); + } + } + + @After + public void tearDown() { + alertService.findAllAlerts(false).forEach(a -> alertService.deleteAlert(a)); + if (system != null) { + system.getServiceFactory().getManagementService().cleanupRecords(); + system.stop(); + } + + try { + DriverManager.getConnection("jdbc:derby:memory:argus;shutdown=true").close(); + } catch (SQLNonTransientConnectionException ex) { + if (ex.getErrorCode() >= 50000 || ex.getErrorCode() < 40000) { + throw new RuntimeException(ex); + } + } catch (Exception ex) { + throw new RuntimeException(ex); + } + } + + @Test + public void testInitializeCache() { + AlertDefinitionsCache cache = new AlertDefinitionsCache(alertService, false); + AlertDefinitionsCacheRefresherThread refresherThread = new AlertDefinitionsCacheRefresherThread(cache, alertService); + refresherThread.interrupt(); + + Alert alert = TestUtils.generateAlert("alert-name", admin, EXPRESSION); + Trigger trigger = TestUtils.generateTrigger("trigger-name", alert); + Notification notification = TestUtils.generateNotification("notification-name", alert, Arrays.asList(new Trigger[]{trigger})); + alert.setNotifications(Arrays.asList(new Notification[]{notification})); + alert.setTriggers(Arrays.asList(new Trigger[]{trigger})); + alert.setEnabled(true); + Alert expectedAlert = alertService.updateAlert(alert); + + refresherThread.initializeAlertDefinitionsCache(); + Map alertsMapById = cache.getAlertsMapById(); + + assertEquals(1, alertsMapById.size()); + Alert actualAlert = (Alert) alertsMapById.values().toArray()[0]; + + assertTrue(actualAlert.equals(expectedAlert)); + assertTrue(actualAlert.getTriggers().toArray()[0].equals(trigger)); + assertTrue(actualAlert.getNotifications().toArray()[0].equals(notification)); + } + + @Ignore + @Test + public void testRefreshCache() { + SystemMain system = TestUtils.getInstance(); + AlertDefinitionsCache cache = new AlertDefinitionsCache(alertService, false); + AlertDefinitionsCacheRefresherThread refresherThread = new AlertDefinitionsCacheRefresherThread(cache, alertService); + + Alert alert = TestUtils.generateAlert("an-alert", admin, EXPRESSION); + Trigger trigger = TestUtils.generateTrigger("trigger-name", alert); + Notification notification = TestUtils.generateNotification("notification-name", alert, Arrays.asList(new Trigger[]{trigger})); + alert.setNotifications(Arrays.asList(new Notification[]{notification})); + alert.setTriggers(Arrays.asList(new Trigger[]{trigger})); + alert.setEnabled(true); + alert = alertService.updateAlert(alert); + + refresherThread.initializeAlertDefinitionsCache(); + Map alertsMapById = cache.getAlertsMapById(); + + assertEquals(1, alertsMapById.size()); + + Alert expected = alertService.findAlertByPrimaryKey(alert.getId()); + expected.setShared(true); + alertService.updateAlert(expected); + + long currentExecutionTime = System.currentTimeMillis(); + refresherThread.refreshAlertDefinitionsCache(0, 0, 0, currentExecutionTime); + alertsMapById = cache.getAlertsMapById(); + Alert actualAlert = (Alert) alertsMapById.values().toArray()[0]; + + assertFalse(actualAlert.equals(alert)); + assertTrue(actualAlert.equals(expected)); + } +} \ No newline at end of file diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/AlertDefinitionsCacheTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/AlertDefinitionsCacheTest.java new file mode 100644 index 000000000..7dea2037f --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/AlertDefinitionsCacheTest.java @@ -0,0 +1,112 @@ +package com.salesforce.dva.argus.service.alert; + +import com.salesforce.dva.argus.TestUtils; +import com.salesforce.dva.argus.entity.Alert; +import com.salesforce.dva.argus.entity.PrincipalUser; +import com.salesforce.dva.argus.service.UserService; +import com.salesforce.dva.argus.system.SystemMain; + +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.mockito.Mock; + +import org.slf4j.LoggerFactory; + +import java.math.BigInteger; +import java.sql.DriverManager; +import java.util.*; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + + +public class AlertDefinitionsCacheTest { + static final long MILLISECONDS_PER_MINUTE = 60 * 1000L; + + private static ch.qos.logback.classic.Logger apacheLogger; + private static ch.qos.logback.classic.Logger myClassLogger; + + @Mock private AlertDefinitionsCacheRefresherThread refreshThreadMock; + private AlertDefinitionsCache alertDefinitionsCache; + private SystemMain system; + private PrincipalUser admin; + private UserService userService; + + @BeforeClass + static public void setUpClass() { + myClassLogger = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger(AlertDefinitionsCacheRefresherThreadTest.class); + myClassLogger.setLevel(ch.qos.logback.classic.Level.OFF); + apacheLogger = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger("org.apache"); + apacheLogger.setLevel(ch.qos.logback.classic.Level.OFF); + } + + + @Before + public void setup() { + system = TestUtils.getInstance(); + system.start(); + userService = system.getServiceFactory().getUserService(); + admin = userService.findAdminUser(); + try { + Class.forName("org.apache.derby.jdbc.EmbeddedDriver"); + DriverManager.getConnection("jdbc:derby:memory:argus;create=true").close(); + } catch (Exception ex) { + LoggerFactory.getLogger(AlertServiceTest.class).error("Exception in setUp:{}", ex.getMessage()); + fail("Exception during database startup."); + } + alertDefinitionsCache = new AlertDefinitionsCache(refreshThreadMock); + } + + @Test + public void testSetAlertsMapByCronEntry(){ + List alertsIDList1 = new ArrayList<>(); + alertsIDList1.add(new BigInteger("1")); + alertsIDList1.add(new BigInteger("2")); + + Map> alertMapByCronEntry = new HashMap<>(); + alertMapByCronEntry.put("* * * * *", alertsIDList1); + alertDefinitionsCache.setAlertsMapByCronEntry(alertMapByCronEntry); + + assertTrue(alertDefinitionsCache.getAlertsMapByCronEntry() == alertMapByCronEntry); + } + + @Test + public void testSetAlertsMapById(){ + Map alertMapById = new HashMap<>(); + Alert alert1 = new Alert(userService.findAdminUser(), admin, "testAlert1", "COUNT(-1h:scope:metric:avg, -1h:scope:metric:avg, -1h:scope:metric:avg)", "* * * * *"); + alertMapById.put(new BigInteger("1"), alert1); + + Alert alert2 = new Alert(userService.findAdminUser(), admin, "testAlert2", "COUNT(-1h:scope:metric:avg, -1h:scope:metric:avg, -1h:scope:metric:avg)", "* * * * *"); + alertMapById.put(new BigInteger("2"), alert2); + + alertDefinitionsCache.setAlertsMapById(alertMapById); + + assertTrue(alertDefinitionsCache.getAlertsMapById() == alertMapById); + } + + @Test + public void testGetEnabledAlertsForMinute(){ + List alertsIDList1 = new ArrayList<>(); + alertsIDList1.add(new BigInteger("1")); + alertsIDList1.add(new BigInteger("2")); + + Map> alertMapByCronEntry = new HashMap<>(); + alertMapByCronEntry.put("* * * * *", alertsIDList1); + alertDefinitionsCache.setAlertsMapByCronEntry(alertMapByCronEntry); + + Map alertMapById = new HashMap<>(); + Alert alert1 = new Alert(userService.findAdminUser(), admin, "testAlert1", "COUNT(-1h:scope:metric:avg, -1h:scope:metric:avg, -1h:scope:metric:avg)", "* * * * *"); + alertMapById.put(new BigInteger("1"), alert1); + + Alert alert2 = new Alert(userService.findAdminUser(), admin, "testAlert2", "COUNT(-1h:scope:metric:avg, -1h:scope:metric:avg, -1h:scope:metric:avg)", "* * * * *"); + alertMapById.put(new BigInteger("2"), alert2); + + alertDefinitionsCache.setAlertsMapById(alertMapById); + + Long timeInMillis = new Date().getTime(); + Long flooredMinuteInMillis = timeInMillis - (timeInMillis % MILLISECONDS_PER_MINUTE); + List alerts = alertDefinitionsCache.getEnabledAlertsForMinute(flooredMinuteInMillis); + assertTrue(alerts.size() == 2); + } +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/AlertServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/AlertServiceTest.java similarity index 62% rename from ArgusCore/src/test/java/com/salesforce/dva/argus/service/AlertServiceTest.java rename to ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/AlertServiceTest.java index 21e665aeb..7d83508d0 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/AlertServiceTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/AlertServiceTest.java @@ -29,13 +29,40 @@ * POSSIBILITY OF SUCH DAMAGE. */ -package com.salesforce.dva.argus.service; +package com.salesforce.dva.argus.service.alert; -import static com.salesforce.dva.argus.service.MQService.MQQueue.ALERT; -import static org.junit.Assert.*; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.module.SimpleModule; +import com.salesforce.dva.argus.TestUtils; +import com.salesforce.dva.argus.entity.Alert; +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.entity.Notification; +import com.salesforce.dva.argus.entity.PrincipalUser; +import com.salesforce.dva.argus.entity.Trigger; +import com.salesforce.dva.argus.entity.Trigger.TriggerType; +import com.salesforce.dva.argus.service.AlertService; +import com.salesforce.dva.argus.service.MQService; +import com.salesforce.dva.argus.service.ManagementService; +import com.salesforce.dva.argus.service.UserService; +import com.salesforce.dva.argus.service.alert.DefaultAlertService.AlertWithTimestamp; +import com.salesforce.dva.argus.system.SystemMain; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Ignore; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.MethodRule; +import org.junit.rules.TestWatchman; +import org.junit.runners.model.FrameworkMethod; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import javax.persistence.EntityManager; import java.io.IOException; -import java.math.BigInteger; +import java.sql.DriverManager; +import java.sql.SQLNonTransientConnectionException; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; @@ -45,37 +72,130 @@ import java.util.Set; import java.util.stream.Collectors; -import org.junit.Before; -import org.junit.Test; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.module.SimpleModule; -import com.salesforce.dva.argus.AbstractTest; -import com.salesforce.dva.argus.entity.Alert; -import com.salesforce.dva.argus.entity.Metric; -import com.salesforce.dva.argus.entity.Notification; -import com.salesforce.dva.argus.entity.PrincipalUser; -import com.salesforce.dva.argus.entity.Trigger; -import com.salesforce.dva.argus.entity.Trigger.TriggerType; -import com.salesforce.dva.argus.service.alert.AlertsCountContext; -import com.salesforce.dva.argus.service.alert.DefaultAlertService.AlertWithTimestamp; +import static com.salesforce.dva.argus.service.MQService.MQQueue.ALERT; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import static org.mockito.Mockito.mock; -public class AlertServiceTest extends AbstractTest { +public class AlertServiceTest{ private static final String EXPRESSION = "DIVIDE(-1h:argus.jvm:file.descriptor.open{host=unknown-host}:avg, -1h:argus.jvm:file.descriptor.max{host=unknown-host}:avg)"; - private PrincipalUser admin; + + + private SystemMain system; + private PrincipalUser admin; + private AlertService alertService; + private UserService userService; + private MQService mqService; + private ManagementService managementService; + final Logger logger = LoggerFactory.getLogger(getClass()); + private EntityManager em; + + + private static ch.qos.logback.classic.Logger apacheLogger; + private static ch.qos.logback.classic.Logger myClassLogger; + + @Rule public MethodRule watchman = new TestWatchman() { + public void starting(FrameworkMethod method) { + logger.info("now running {}", method.getName()); + } + }; + + + @BeforeClass + static public void setUpClass() { + myClassLogger = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger("com.salesforce.dva.argus.service.alert.AlertServiceTest"); + myClassLogger.setLevel(ch.qos.logback.classic.Level.INFO); + apacheLogger = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger("org.apache"); + apacheLogger.setLevel(ch.qos.logback.classic.Level.OFF); + + } + + @AfterClass + static public void tearDownClass() { + } + @Before public void setup() { - admin = system.getServiceFactory().getUserService().findAdminUser(); + system = TestUtils.getInstance(); + system.start(); + userService = system.getServiceFactory().getUserService(); + admin = userService.findAdminUser(); + alertService = system.getServiceFactory().getAlertService(); + mqService = system.getServiceFactory().getMQService(); + managementService = system.getServiceFactory().getManagementService(); + alertService.findAllAlerts(false).forEach(a -> alertService.deleteAlert(a)); + try { + Class.forName("org.apache.derby.jdbc.EmbeddedDriver"); + DriverManager.getConnection("jdbc:derby:memory:argus;create=true").close(); + em = mock(EntityManager.class); + + } catch (Exception ex) { + LoggerFactory.getLogger(getClass()).error("Exception in setUp:{}", ex.getMessage()); + fail("Exception during database startup."); + } + + + } + @After + public void tearDown() { + alertService.findAllAlerts(false).forEach(a -> alertService.deleteAlert(a)); + if (system != null) { + system.stop(); + } + + try { + DriverManager.getConnection("jdbc:derby:memory:argus;shutdown=true").close(); + } catch (SQLNonTransientConnectionException ex) { + if (ex.getErrorCode() >= 50000 || ex.getErrorCode() < 40000) { + throw new RuntimeException(ex); + } + } catch (Exception ex) { + throw new RuntimeException(ex); + } + } + + // Should be used to mock EntityManager +// void mockQuery(String nameQuery, Long outputValue) { +// TypedQuery mockedQuery = mock(TypedQuery.class); +// when(mockedQuery.setHint(anyString(), any())).thenReturn(mockedQuery); +// when(mockedQuery.getSingleResult()).thenReturn(outputValue); +// when(em.createNamedQuery(nameQuery, Long.class)).thenReturn(mockedQuery); +// when(em.createNamedQuery(anyString())).thenReturn(mockedQuery); +// } +// +// void mockQuery(String nameQuery, List outputValue) { +// TypedQuery mockedQuery = mock(TypedQuery.class); +// CriteriaBuilder cb = mock(CriteriaBuilder.class); +// CriteriaQuery cq = mock(CriteriaQuery.class); +// TypedQuery mockedQuery2 = mock(TypedQuery.class); +// +// when(mockedQuery.setHint(anyString(), any())).thenReturn(mockedQuery); +// when(mockedQuery.getResultList()).thenReturn(outputValue); +// when(mockedQuery2.setHint(anyString(), any())).thenReturn(mockedQuery2); +// //when(mockedQuery2.getResultList()).thenReturn(outputValue); +// +// when(cb.createTupleQuery()).thenReturn(cq); +// +// when(em.getCriteriaBuilder()).thenReturn(cb); +// when(em.createQuery(cq)).thenReturn(mockedQuery2); +// when(em.createNamedQuery(nameQuery, Alert.class)).thenReturn(mockedQuery); +// when(em.createNamedQuery(anyString())).thenReturn(mockedQuery); +// } + + @Test public void testUpdateAlert() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - Alert expected = new Alert(userService.findAdminUser(), userService.findAdminUser(), "alert-name", EXPRESSION, "* * * * *"); + String alertName = "alertname-" + TestUtils.createRandomName(); + Alert expected = new Alert(userService.findAdminUser(), userService.findAdminUser(), alertName, EXPRESSION, "* * * * *"); Notification notification = new Notification("notification", expected, "notifier-name", new ArrayList(), 5000L); Trigger trigger = new Trigger(expected, TriggerType.GREATER_THAN, "trigger-name", 0.95, 60000); @@ -98,9 +218,9 @@ public void testUpdateAlert() { @Test public void testDeleteAlert() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "alert-name", EXPRESSION, "* * * * *"); + String alertName = "alertname-" + TestUtils.createRandomName(); + + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), alertName, EXPRESSION, "* * * * *"); Notification notification1 = new Notification("notification1", alert, "notifier-name1", new ArrayList(), 5000L); Notification notification2 = new Notification("notification2", alert, "notifier-name2", new ArrayList(), 5000L); Trigger trigger1 = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "trigger-name1", 0.95, 60000); @@ -149,9 +269,9 @@ public void testDeleteAlert() { @Test public void testFindAlertByNameAndOwner() { - AlertService alertService = system.getServiceFactory().getAlertService(); String alertName = "testAlert"; - PrincipalUser expectedUser = new PrincipalUser(admin, "testUser", "testuser@testcompany.com"); + String userName = TestUtils.createRandomName(); + PrincipalUser expectedUser = new PrincipalUser(admin, userName, "testuser@testcompany.com"); Alert expectedAlert = new Alert(expectedUser, expectedUser, alertName, EXPRESSION, "* * * * *"); expectedAlert = alertService.updateAlert(expectedAlert); @@ -164,10 +284,8 @@ public void testFindAlertByNameAndOwner() { @Test public void testfindAlertsByOwner() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - String userName = createRandomName(); - int alertsCount = random.nextInt(20) + 1; + String userName = TestUtils.createRandomName(); + int alertsCount = TestUtils.random.nextInt(20) + 1; PrincipalUser user = new PrincipalUser(admin ,userName, userName + "@testcompany.com"); user = userService.updateUser(user); @@ -192,10 +310,8 @@ public void testfindAlertsByOwner() { @Test public void testfindAlertsByOwnerMeta() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - String userName = createRandomName(); - int alertsCount = random.nextInt(20) + 1; + String userName = TestUtils.createRandomName(); + int alertsCount = TestUtils.random.nextInt(20) + 1; PrincipalUser user = new PrincipalUser(admin, userName, userName + "@testcompany.com"); user = userService.updateUser(user); @@ -209,6 +325,8 @@ public void testfindAlertsByOwnerMeta() { List actualAlerts = alertService.findAlertsByOwner(user, true); assertEquals(actualAlerts.size(), expectedAlerts.size()); + //mockQuery("Alert.findByOwner", expectedAlerts); + //assertEquals(expectedAlerts, Alert.findByOwnerMeta(em, user)); Set actualSet = new HashSet<>(); @@ -217,12 +335,10 @@ public void testfindAlertsByOwnerMeta() { assertTrue(actualSet.contains(alert)); } } - + @Test public void testFindAlertsByOwnerPaged() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - String userName = createRandomName(); + String userName = TestUtils.createRandomName(); int alertsCount = 25; PrincipalUser user = new PrincipalUser(admin, userName, userName + "@testcompany.com"); @@ -236,26 +352,26 @@ public void testFindAlertsByOwnerPaged() { int limit = 10; // Page size List actualAlerts = new ArrayList<>(); - + // Fetch first page - List page = alertService.findAlertsByOwnerPaged(user, limit, 0, null); + List page = alertService.findAlertsByOwnerPaged(user, limit, 0, null, null, null); assertEquals(page.size(), limit); actualAlerts.addAll(page); // Fetch second page - page = alertService.findAlertsByOwnerPaged(user, limit, actualAlerts.size(), null); + page = alertService.findAlertsByOwnerPaged(user, limit, actualAlerts.size(), null, null, null); assertEquals(page.size(), limit); - actualAlerts.addAll(page); - + actualAlerts.addAll(page); + // Fetch remaining alerts (less than a page) - page = alertService.findAlertsByOwnerPaged(user, limit, actualAlerts.size(), null); + page = alertService.findAlertsByOwnerPaged(user, limit, actualAlerts.size(), null, null, null); assertEquals(page.size(), expectedAlerts.size() - actualAlerts.size()); actualAlerts.addAll(page); - + // Try to fetch again should be empty result - page = alertService.findAlertsByOwnerPaged(user, limit, actualAlerts.size(), null); + page = alertService.findAlertsByOwnerPaged(user, limit, actualAlerts.size(), null, null, null); assertEquals(0, page.size()); - + Set actualSet = new HashSet<>(); actualSet.addAll(actualAlerts); @@ -263,19 +379,17 @@ public void testFindAlertsByOwnerPaged() { assertTrue(actualSet.contains(alert)); } } - + @Test public void testFindAlertsByOwnerPagedWithSearchText() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - String userName = createRandomName(); + String userName = TestUtils.createRandomName(); int alertsCount = 25; PrincipalUser user = new PrincipalUser(admin, userName, userName + "@testcompany.com"); user = userService.updateUser(user); List expectedAlerts = new ArrayList<>(); - List expectedEvenAlerts = new ArrayList<>(); + List expectedEvenAlerts = new ArrayList<>(); List expectedOddAlerts = new ArrayList<>(); for (int i = 0; i < alertsCount; i++) { @@ -297,26 +411,26 @@ public void testFindAlertsByOwnerPagedWithSearchText() { List actualAlerts = new ArrayList<>(); // Fetch first page - List page = alertService.findAlertsByOwnerPaged(user, limit, 0, userName); + List page = alertService.findAlertsByOwnerPaged(user, limit, 0, userName, null, null); assertEquals(page.size(), limit); actualAlerts.addAll(page); // Fetch with invalid owner's name should be empty - page = alertService.findAlertsByOwnerPaged(user, limit, 0, "invalid_owner"); + page = alertService.findAlertsByOwnerPaged(user, limit, 0, "invalid_owner", null, null); assertEquals(page.size(), 0); // Fetch second page - page = alertService.findAlertsByOwnerPaged(user, limit, actualAlerts.size(), userName); + page = alertService.findAlertsByOwnerPaged(user, limit, actualAlerts.size(), userName, null, null); assertEquals(page.size(), limit); actualAlerts.addAll(page); // Fetch remaining alerts (less than a page) - page = alertService.findAlertsByOwnerPaged(user, limit, actualAlerts.size(), userName); + page = alertService.findAlertsByOwnerPaged(user, limit, actualAlerts.size(), userName, null, null); assertEquals(page.size(), expectedAlerts.size() - actualAlerts.size()); actualAlerts.addAll(page); // Try to fetch again should be empty result - page = alertService.findAlertsByOwnerPaged(user, limit, actualAlerts.size(), userName); + page = alertService.findAlertsByOwnerPaged(user, limit, actualAlerts.size(), userName, null, null); assertEquals(0, page.size()); Set actualSet = new HashSet<>(); @@ -329,29 +443,29 @@ public void testFindAlertsByOwnerPagedWithSearchText() { // ================================================== // Test search by alerts name // ================================================== - + List actualEvenAlerts = new ArrayList<>(); - + // Fetch with invalid alert name should be empty - page = alertService.findAlertsByOwnerPaged(user, limit, 0, "invalid_alert_name"); + page = alertService.findAlertsByOwnerPaged(user, limit, 0, "invalid_alert_name", null, null); assertEquals(page.size(), 0); // Fetch first page of even number alerts - page = alertService.findAlertsByOwnerPaged(user, limit, 0, "e*eN"); + page = alertService.findAlertsByOwnerPaged(user, limit, 0, "e*eN", null, null); assertEquals(page.size(), limit); actualEvenAlerts.addAll(page); - + // Fetch first page of even number alerts case insensitive - page = alertService.findAlertsByOwnerPaged(user, limit, 0, "EvEn"); + page = alertService.findAlertsByOwnerPaged(user, limit, 0, "EvEn", null, null); assertEquals(page.size(), limit); // Fetch second page of even number alerts (less than a page) - page = alertService.findAlertsByOwnerPaged(user, limit, actualEvenAlerts.size(), "even"); + page = alertService.findAlertsByOwnerPaged(user, limit, actualEvenAlerts.size(), "even", null, null); assertEquals(page.size(), expectedEvenAlerts.size() - actualEvenAlerts.size()); actualEvenAlerts.addAll(page); // Try to fetch again should be empty result - page = alertService.findAlertsByOwnerPaged(user, limit, actualEvenAlerts.size(), "even"); + page = alertService.findAlertsByOwnerPaged(user, limit, actualEvenAlerts.size(), "even", null, null); assertEquals(0, page.size()); Set actualEvenSet = new HashSet<>(); @@ -361,16 +475,92 @@ public void testFindAlertsByOwnerPagedWithSearchText() { assertTrue(actualEvenSet.contains(alert)); } - page = alertService.findAlertsByOwnerPaged(user, limit, 0, "O*d"); + page = alertService.findAlertsByOwnerPaged(user, limit, 0, "O*d", null, null); assertEquals(limit, page.size()); } - + + @Test + public void testFindAlertsByOwnerPagedWithSorting() { + String userName = TestUtils.createRandomName(); + PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, userName, userName + "test1@salesforce.com")); + + Alert alert1 = alertService.updateAlert(new Alert(user1, user1, "alert1", EXPRESSION, "* * * * *")); + try{ + Thread.sleep(1000); + }catch(Exception e) { + } + Alert alert2 = alertService.updateAlert(new Alert(user1, user1, "alert2", EXPRESSION, "* * * * *")); + try{ + Thread.sleep(1000); + }catch(Exception e) { + } + Alert alert3 = alertService.updateAlert(new Alert(user1, user1, "alert3", EXPRESSION, "* * * * *")); + try{ + Thread.sleep(1000); + }catch(Exception e) { + } + + //Change modified date + alert1.setShared(true); + alertService.updateAlert(alert1); + try{ + Thread.sleep(1000); + }catch(Exception e) { + } + + //sort by alert name ascending + List page = alertService.findAlertsByOwnerPaged(user1, 10, 0, null, "name", "ASC"); + assertEquals(alert1.getName(), page.get(0).getName()); + assertEquals(alert2.getName(), page.get(1).getName()); + assertEquals(alert3.getName(), page.get(2).getName()); + + //sort by alert name descending + page = alertService.findAlertsByOwnerPaged(user1, 10, 0, null, "name", "DESC"); + assertEquals(alert3.getName(), page.get(0).getName()); + assertEquals(alert2.getName(), page.get(1).getName()); + assertEquals(alert1.getName(), page.get(2).getName()); + + //sort by create date ascending + page = alertService.findAlertsByOwnerPaged(user1, 10, 0, null, "createdDate", "ASC"); + assertEquals(alert1.getName(), page.get(0).getName()); + assertEquals(alert2.getName(), page.get(1).getName()); + assertEquals(alert3.getName(), page.get(2).getName()); + + //sort by create date descending + page = alertService.findAlertsByOwnerPaged(user1, 10, 0, null, "createdDate", "DESC"); + assertEquals(alert3.getName(), page.get(0).getName()); + assertEquals(alert2.getName(), page.get(1).getName()); + assertEquals(alert1.getName(), page.get(2).getName()); + + //sort by modified date ascending + page = alertService.findAlertsByOwnerPaged(user1, 10, 0, null, "modifiedDate", "ASC"); + assertEquals(alert2.getName(), page.get(0).getName()); + assertEquals(alert3.getName(), page.get(1).getName()); + assertEquals(alert1.getName(), page.get(2).getName()); + + //sort by modified date descending + page = alertService.findAlertsByOwnerPaged(user1, 10, 0, null, "modifiedDate", "DESC"); + assertEquals(alert1.getName(), page.get(0).getName()); + assertEquals(alert3.getName(), page.get(1).getName()); + assertEquals(alert2.getName(), page.get(2).getName()); + + //invalid column + try { + page = alertService.findAlertsByOwnerPaged(user1, 10, 0, null, "invalidColumn", "DESC"); + } catch (IllegalArgumentException ex){ + assertNotNull(ex); + } + try { + page = alertService.findAlertsByOwnerPaged(user1, 10, 0, null, "", "DESC"); + } catch (IllegalArgumentException ex){ + assertNotNull(ex); + } + } + @Test public void testCountAlertsByOwner() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - String userName = createRandomName(); - int alertsCount = random.nextInt(20) + 1; + String userName = TestUtils.createRandomName(); + int alertsCount = TestUtils.random.nextInt(20) + 1; PrincipalUser user = new PrincipalUser(admin, userName, userName + "@testcompany.com"); user = userService.updateUser(user); @@ -385,29 +575,30 @@ public void testCountAlertsByOwner() { int cnt = alertService.countAlerts(context); assertEquals(cnt, expectedAlerts.size()); + //mockQuery("Alert.countByOwner", Long.valueOf(expectedAlerts.size())); + //assertEquals(expectedAlerts.size(), Alert.countByOwner(em, user, null)); } - + @Test public void testCountAlertsByOwnerWithSearchText() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - String userName = createRandomName(); - int alertsCount = random.nextInt(20) + 1; + String namedQuery = "Alert.countByOwnerWithSearchText"; + String userName = TestUtils.createRandomName(); + int alertsCount = TestUtils.random.nextInt(20) + 1; PrincipalUser user = new PrincipalUser(admin, userName, userName + "@testcompany.com"); user = userService.updateUser(user); List expectedAlerts = new ArrayList<>(); - List expectedEvenAlerts = new ArrayList<>(); + List expectedEvenAlerts = new ArrayList<>(); List expectedOddAlerts = new ArrayList<>(); for (int i = 0; i < alertsCount; i++) { if (i % 2 == 0) { - Alert evenAlert = alertService.updateAlert(new Alert(user, user, "even_alert_" + i, EXPRESSION, "* * * * *")); + Alert evenAlert = alertService.updateAlert(new Alert(user, user, "another_even_alert_" + i, EXPRESSION, "* * * * *")); expectedEvenAlerts.add(evenAlert); expectedAlerts.add(evenAlert); } else { - Alert oddAlert = alertService.updateAlert(new Alert(user, user, "odd_alert_" + i, EXPRESSION, "* * * * *")); + Alert oddAlert = alertService.updateAlert(new Alert(user, user, "another_odd_alert_" + i, EXPRESSION, "* * * * *")); expectedOddAlerts.add(oddAlert); expectedAlerts.add(oddAlert); } @@ -416,29 +607,35 @@ public void testCountAlertsByOwnerWithSearchText() { // Filter on user name AlertsCountContext context = new AlertsCountContext.AlertsCountContextBuilder().countUserAlerts().setPrincipalUser(user).setSearchText(userName).build(); int cnt = alertService.countAlerts(context); - assertEquals(cnt, expectedAlerts.size()); - + //mockQuery(namedQuery, Long.valueOf(expectedAlerts.size())); + //assertEquals(expectedAlerts.size(), Alert.countByOwner(em, user, userName)); + + // Count alerts have "even" in its name context = new AlertsCountContext.AlertsCountContextBuilder().countUserAlerts().setPrincipalUser(user).setSearchText("even").build(); cnt = alertService.countAlerts(context); assertEquals(cnt, expectedEvenAlerts.size()); - + //mockQuery(namedQuery, Long.valueOf(expectedEvenAlerts.size())); + //assertEquals(expectedEvenAlerts.size(), Alert.countByOwner(em, user, "even")); + // Count alerts have "even" in its name case insensitive context = new AlertsCountContext.AlertsCountContextBuilder().countUserAlerts().setPrincipalUser(user).setSearchText("EvEn").build(); cnt = alertService.countAlerts(context); assertEquals(cnt, expectedEvenAlerts.size()); - + // Count alerts have "odd" in its name context = new AlertsCountContext.AlertsCountContextBuilder().countUserAlerts().setPrincipalUser(user).setSearchText("odd").build(); cnt = alertService.countAlerts(context); assertEquals(cnt, expectedOddAlerts.size()); - - // Count alerts have "odd" in its name + + // Count alerts have "odd" in its name case insensitive context = new AlertsCountContext.AlertsCountContextBuilder().countUserAlerts().setPrincipalUser(user).setSearchText("OdD").build(); cnt = alertService.countAlerts(context); assertEquals(cnt, expectedOddAlerts.size()); - + //mockQuery(namedQuery, Long.valueOf(expectedOddAlerts.size())); + //assertEquals(expectedOddAlerts.size(), Alert.countByOwner(em, user, "OdD")); + // Invalid alert name context = new AlertsCountContext.AlertsCountContextBuilder().countUserAlerts().setPrincipalUser(user).setSearchText("invalid_alert_name").build(); cnt = alertService.countAlerts(context); @@ -449,7 +646,8 @@ public void testCountAlertsByOwnerWithSearchText() { cnt = alertService.countAlerts(context); assertEquals(cnt, expectedEvenAlerts.size()); - context = new AlertsCountContext.AlertsCountContextBuilder().countUserAlerts().setPrincipalUser(user).setSearchText("*dD").build(); + // Test with wildcard expressions. + context = new AlertsCountContext.AlertsCountContextBuilder().countUserAlerts().setPrincipalUser(user).setSearchText("o*D").build(); cnt = alertService.countAlerts(context); assertEquals(cnt, expectedOddAlerts.size()); @@ -459,10 +657,8 @@ public void testCountAlertsByOwnerWithSearchText() { @Test public void findAllAlerts() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - String userName = createRandomName(); - int alertsCount = random.nextInt(100) + 1; + String userName = TestUtils.createRandomName(); + int alertsCount = TestUtils.random.nextInt(100) + 1; PrincipalUser user = new PrincipalUser(admin, userName, userName + "@testcompany.com"); user = userService.updateUser(user); @@ -475,6 +671,9 @@ public void findAllAlerts() { List actualAlerts = alertService.findAllAlerts(false); + //mockQuery("Alert.findAll", expectedAlerts); + //assertEquals(expectedAlerts, Alert.findAll(em)); + assertEquals(actualAlerts.size(), expectedAlerts.size()); Set actualSet = new HashSet<>(); @@ -487,10 +686,8 @@ public void findAllAlerts() { @Test public void testFindAllAlertsMeta() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - String userName = createRandomName(); - int alertsCount = random.nextInt(100) + 1; + String userName = TestUtils.createRandomName(); + int alertsCount = TestUtils.random.nextInt(100) + 1; PrincipalUser user = new PrincipalUser(admin, userName, userName + "@testcompany.com"); user = userService.updateUser(user); @@ -504,6 +701,8 @@ public void testFindAllAlertsMeta() { List actualAlerts = alertService.findAllAlerts(true); assertEquals(actualAlerts.size(), expectedAlerts.size()); + //mockQuery("Alert.findAll", expectedAlerts); + //assertEquals(expectedAlerts, Alert.findAllMeta(em)); Set actualSet = new HashSet<>(); @@ -515,9 +714,7 @@ public void testFindAllAlertsMeta() { @Test public void findAlertsInRange() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - String userName = createRandomName(); + String userName = TestUtils.createRandomName(); int alertsCount = 50; PrincipalUser user = new PrincipalUser(admin, userName, userName + "@testcompany.com"); @@ -526,13 +723,15 @@ public void findAlertsInRange() { List insertedAlerts = new ArrayList<>(); for (int i = 0; i < alertsCount; i++) { - insertedAlerts.add(alertService.updateAlert(new Alert(user, user, "alert_" + i, EXPRESSION, "* * * * *"))); + Alert a = alertService.updateAlert(new Alert(user, user, "alert_" + i, EXPRESSION, "* * * * *")); + insertedAlerts.add(a); } + List expectedAlerts = insertedAlerts.subList(0, 20); - List actualAlerts = alertService.findAlertsByRangeAndStatus(new BigInteger("100002"), new BigInteger("100021"), false); + List actualAlerts = alertService.findAlertsByRangeAndStatus(insertedAlerts.get(0).getId(), insertedAlerts.get(19).getId(), false); - assertEquals(actualAlerts.size(), 20); + assertEquals(20, actualAlerts.size()); Set actualSet = new HashSet<>(); @@ -546,9 +745,7 @@ public void findAlertsInRange() { @Test public void findAlertsModifiedAfterDate() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - String userName = createRandomName(); + String userName = TestUtils.createRandomName(); PrincipalUser user = new PrincipalUser(admin, userName, userName + "@testcompany.com"); user = userService.updateUser(user); @@ -562,14 +759,14 @@ public void findAlertsModifiedAfterDate() { try { Thread.sleep(5000); }catch(Exception e) { - + } - + Date currentDate = new Date(); for (int i = 25; i < 50; i++) { insertedAlerts.add(alertService.updateAlert(new Alert(user, user, "alert_" + i, EXPRESSION, "* * * * *"))); } - + List expectedAlerts = insertedAlerts.subList(25, 50); List actualAlerts = alertService.findAlertsModifiedAfterDate(currentDate); @@ -587,9 +784,7 @@ public void findAlertsModifiedAfterDate() { @Test public void findFullAlertObjectRetrieval() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - String userName = createRandomName(); + String userName = TestUtils.createRandomName(); PrincipalUser user = new PrincipalUser(admin, userName, userName + "@testcompany.com"); @@ -636,7 +831,7 @@ public void findFullAlertObjectRetrieval() { assertEquals(notificationsMap.get("notification_1").getTriggers().size(), 1); assertEquals(notificationsMap.get("notification_2").getTriggers().size(), 0); assertEquals(triggersMap.get("trigger_1").getNotifications().size(), 1); - assertEquals(triggersMap.get("trigger_2").getNotifications().size(), 0); + assertEquals(triggersMap.get("trigger_2").getNotifications().size(), 0); assertEquals(notificationsMap.get("notification_1").getTriggers().get(0).getName(), "trigger_1"); assertEquals(triggersMap.get("trigger_1").getNotifications().get(0).getName(), "notification_1"); Notification fetchedNotification1 = notificationsMap.get("notification_1"); @@ -661,10 +856,8 @@ public void findFullAlertObjectRetrieval() { @Test public void testAlertDelete() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); PrincipalUser user = userService.findAdminUser(); - String alertName = createRandomName(); + String alertName = TestUtils.createRandomName(); Alert expectedAlert = new Alert(user, user, alertName, EXPRESSION, "* * * * *"); expectedAlert = alertService.updateAlert(expectedAlert); @@ -674,9 +867,9 @@ public void testAlertDelete() { @Test public void testDeletedTriggersInNotifications() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "alert-name", EXPRESSION, "* * * * *"); + String alertName = "alertname-" + TestUtils.createRandomName(); + + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), alertName, EXPRESSION, "* * * * *"); Notification notification1 = new Notification("notification1", alert, "notifier-name1", new ArrayList(), 5000L); Notification notification2 = new Notification("notification2", alert, "notifier-name2", new ArrayList(), 5000L); Notification notification3 = new Notification("notification3", alert, "notifier-name3", new ArrayList(), 5000L); @@ -700,10 +893,8 @@ public void testDeletedTriggersInNotifications() { @Test public void testAlertDeleteCreateAnotherAlertWithSameName() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); PrincipalUser user = userService.findAdminUser(); - String alertName = createRandomName(); + String alertName = TestUtils.createRandomName(); Alert alert = new Alert(user, user, alertName, EXPRESSION, "* * * * *"); alert = alertService.updateAlert(alert); @@ -714,32 +905,33 @@ public void testAlertDeleteCreateAnotherAlertWithSameName() { assertNotNull((alertService.findAlertByNameAndOwner(alertName, user))); } + @Ignore("re-do this test, it tests things similar to defaultalertservicetest and mqservicetest,and takes longer") @Test public void testAlertEnqueue() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - MQService mqService = system.getServiceFactory().getMQService(); + PrincipalUser user = userService.findAdminUser(); List actualAlertList = new ArrayList<>(); - for (int i = 0; i < 5; i++) { - actualAlertList.add(alertService.updateAlert(new Alert(user, user, createRandomName(), EXPRESSION, "* * * * *"))); + int count=5; + for (int i = 0; i < count; i++) { + actualAlertList.add(alertService.updateAlert(new Alert(user, user, TestUtils.createRandomName(), EXPRESSION, "* * * * *"))); } alertService.enqueueAlerts(actualAlertList); + List expectedList = mqService.dequeue(ALERT.getQueueName(), AlertWithTimestamp.class, 10000, count); - List expectedList = mqService.dequeue(ALERT.getQueueName(), AlertWithTimestamp.class, 1000, 10); - - assertEquals(actualAlertList.size(), expectedList.size()); + assertEquals(actualAlertList.size(), expectedList.size()); } @Test public void testSharedAlertWhenOneSharedAlert() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, "test1", "test1@salesforce.com")); + String userName = TestUtils.createRandomName(); + PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, userName, userName+ "test1@salesforce.com")); + + String alertName1 = "alertname-" + TestUtils.createRandomName(); - alertService.updateAlert(new Alert(user1, user1, "alert-name1", EXPRESSION, "* * * * *")); - Alert alertShared = alertService.updateAlert(new Alert(user1, user1, "alert-name-shared2", EXPRESSION, "* * * * *")); + alertService.updateAlert(new Alert(user1, user1, alertName1, EXPRESSION, "* * * * *")); + String alertName2 = "alertname-" + TestUtils.createRandomName(); + Alert alertShared = alertService.updateAlert(new Alert(user1, user1, alertName2, EXPRESSION, "* * * * *")); alertShared.setShared(true); alertService.updateAlert(alertShared); @@ -752,13 +944,15 @@ public void testSharedAlertWhenOneSharedAlert() { @Test public void testSharedAlertWhenTwoSharedAlert() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, "test1", "test1@salesforce.com")); - PrincipalUser user2 = userService.updateUser(new PrincipalUser(admin, "test2", "test2@salesforce.com")); + String userName1 = TestUtils.createRandomName(); + String userName2 = TestUtils.createRandomName(); + PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, userName1, userName1 + "test1@salesforce.com")); + PrincipalUser user2 = userService.updateUser(new PrincipalUser(admin, userName2, userName2 + "test2@salesforce.com")); - Alert alertSharedUser1 = alertService.updateAlert(new Alert(user1, user1, "alert-name_shared1", EXPRESSION, "* * * * *")); - Alert alertSharedUser2 = alertService.updateAlert(new Alert(user2, user2, "alert-name-shared2", EXPRESSION, "* * * * *")); + String alertName1 = "alertname-" + TestUtils.createRandomName(); + Alert alertSharedUser1 = alertService.updateAlert(new Alert(user1, user1, alertName1, EXPRESSION, "* * * * *")); + String alertName2 = "alertname-" + TestUtils.createRandomName(); + Alert alertSharedUser2 = alertService.updateAlert(new Alert(user2, user2, alertName2, EXPRESSION, "* * * * *")); alertSharedUser1.setShared(true); alertService.updateAlert(alertSharedUser1); @@ -775,13 +969,15 @@ public void testSharedAlertWhenTwoSharedAlert() { @Test public void testFindSharedAlertsMeta() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, "test1", "test1@salesforce.com")); - PrincipalUser user2 = userService.updateUser(new PrincipalUser(admin, "test2", "test2@salesforce.com")); + String userName1 = TestUtils.createRandomName(); + String userName2 = TestUtils.createRandomName(); + PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, userName1, userName1 + "test1@salesforce.com")); + PrincipalUser user2 = userService.updateUser(new PrincipalUser(admin, userName2, userName2 + "test2@salesforce.com")); - Alert alertSharedUser1 = alertService.updateAlert(new Alert(user1, user1, "alert-name_shared1", EXPRESSION, "* * * * *")); - Alert alertSharedUser2 = alertService.updateAlert(new Alert(user2, user2, "alert-name-shared2", EXPRESSION, "* * * * *")); + String alertName1 = "alertname-" + TestUtils.createRandomName(); + Alert alertSharedUser1 = alertService.updateAlert(new Alert(user1, user1, alertName1, EXPRESSION, "* * * * *")); + String alertName2 = "alertname-" + TestUtils.createRandomName(); + Alert alertSharedUser2 = alertService.updateAlert(new Alert(user2, user2, alertName2, EXPRESSION, "* * * * *")); alertSharedUser1.setShared(true); alertService.updateAlert(alertSharedUser1); @@ -795,51 +991,55 @@ public void testFindSharedAlertsMeta() { assertEquals(expectedSharedResult, alertService.findSharedAlerts(true, null, null)); } - + @Test public void testFindSharedAlertsMetaPaged() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, "test1", "test1@salesforce.com")); - PrincipalUser user2 = userService.updateUser(new PrincipalUser(admin, "test2", "test2@salesforce.com")); + String userName1 = TestUtils.createRandomName(); + String userName2 = TestUtils.createRandomName(); + PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, userName1, userName1 + "test1@salesforce.com")); + PrincipalUser user2 = userService.updateUser(new PrincipalUser(admin, userName2, userName2 + "test2@salesforce.com")); Alert alert1 = alertService.updateAlert(new Alert(user1, user1, "alert1", EXPRESSION, "* * * * *")); Alert alert2 = alertService.updateAlert(new Alert(user2, user2, "alert2", EXPRESSION, "* * * * *")); Alert alert3 = alertService.updateAlert(new Alert(user2, user2, "alert3", EXPRESSION, "* * * * *")); - alert1.setShared(true); + alert1.setMissingDataNotificationEnabled(true); alertService.updateAlert(alert1); alert2.setShared(true); + alert2.setMissingDataNotificationEnabled(false); alertService.updateAlert(alert2); alert3.setShared(false); alertService.updateAlert(alert3); - + + Set sharedAlerts = new HashSet<>(); sharedAlerts.add("alert1"); sharedAlerts.add("alert2"); - + // First page - List page = alertService.findSharedAlertsPaged(1, 0, null); + List page = alertService.findSharedAlertsPaged(1, 0, null, null, null); assertEquals(1, page.size()); assertTrue(sharedAlerts.contains(page.get(0).getName())); - + assertEquals(true, page.get(0).isMissingDataNotificationEnabled()); + // Second page - page = alertService.findSharedAlertsPaged(1, 1, null); + page = alertService.findSharedAlertsPaged(1, 1, null, null, null); assertEquals(1, page.size()); assertTrue(sharedAlerts.contains(page.get(0).getName())); - + assertEquals(page.get(0).isMissingDataNotificationEnabled(), false); + // Thrid page should be zero - page = alertService.findSharedAlertsPaged(1, 2, null); + page = alertService.findSharedAlertsPaged(1, 2, null, null, null); assertEquals(0, page.size()); } - + @Test public void testFindSharedAlertsMetaPagedWithSearchText() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, "test1", "test1@salesforce.com")); - PrincipalUser user2 = userService.updateUser(new PrincipalUser(admin, "test2", "test2@salesforce.com")); + String userName1 = TestUtils.createRandomName(); + String userName2 = TestUtils.createRandomName(); + PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, userName1, userName1 + "test1@salesforce.com")); + PrincipalUser user2 = userService.updateUser(new PrincipalUser(admin, userName2, userName2 + "test2@salesforce.com")); Alert alert1 = alertService.updateAlert(new Alert(user1, user1, "alert1", EXPRESSION, "* * * * *")); Alert alert2 = alertService.updateAlert(new Alert(user2, user2, "alert2", EXPRESSION, "* * * * *")); @@ -852,46 +1052,159 @@ public void testFindSharedAlertsMetaPagedWithSearchText() { alertService.updateAlert(alert2); alert3.setShared(false); alertService.updateAlert(alert3); - + Set sharedAlerts = new HashSet<>(); sharedAlerts.add("alert1"); sharedAlerts.add("alert2"); - + // Search by owner name - List page = alertService.findSharedAlertsPaged(10, 0, "test1"); + List page = alertService.findSharedAlertsPaged(10, 0, userName1, null, null); assertEquals(1, page.size()); - assertTrue("test1".equals(page.get(0).getOwner().getUserName())); - + assertTrue(userName1.equals(page.get(0).getOwner().getUserName())); + // Search by owner name case insensitive - page = alertService.findSharedAlertsPaged(10, 0, "TeSt1"); + page = alertService.findSharedAlertsPaged(10, 0, userName1.toUpperCase(), null, null); assertEquals(1, page.size()); - assertTrue("test1".equals(page.get(0).getOwner().getUserName())); - + assertTrue(userName1.equals(page.get(0).getOwner().getUserName())); + // Search by alert name - page = alertService.findSharedAlertsPaged(10, 0, "alert2"); + page = alertService.findSharedAlertsPaged(10, 0, "alert2", null, null); assertEquals(1, page.size()); assertTrue("alert2".equals(page.get(0).getName())); - + // Search by alert name case insensitive - page = alertService.findSharedAlertsPaged(10, 0, "aLeRt2"); + page = alertService.findSharedAlertsPaged(10, 0, "aLeRt2", null, null); assertEquals(1, page.size()); assertTrue("alert2".equals(page.get(0).getName())); - + // Search private alert - page = alertService.findSharedAlertsPaged(1, 2, "alert3"); + page = alertService.findSharedAlertsPaged(1, 2, "alert3", null, null); assertEquals(0, page.size()); - + // Invalid search text - page = alertService.findSharedAlertsPaged(1, 2, "invalid_search_text"); + page = alertService.findSharedAlertsPaged(1, 2, "invalid_search_text", null, null); assertEquals(0, page.size()); } - + + @Test + public void testFindSharedAlertsMetaPagedWithSorting() { + String userName1 = "test1-" + TestUtils.createRandomName(); + String userName2 = "test2-" + TestUtils.createRandomName(); + String userName3 = "test3-" + TestUtils.createRandomName(); + + + PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, userName1, userName1 + "test1@salesforce.com")); + PrincipalUser user2 = userService.updateUser(new PrincipalUser(admin, userName2, userName2 + "test2@salesforce.com")); + PrincipalUser user3 = userService.updateUser(new PrincipalUser(admin, userName3, userName3 + "test3@salesforce.com")); + + + Alert alert1 = alertService.updateAlert(new Alert(user1, user1, "alert1", EXPRESSION, "* * * * *")); + try{ + Thread.sleep(1000); + }catch(Exception e) { + } + Alert alert2 = alertService.updateAlert(new Alert(user2, user2, "alert2", EXPRESSION, "* * * * *")); + try{ + Thread.sleep(1000); + }catch(Exception e) { + } + Alert alert3 = alertService.updateAlert(new Alert(user3, user3, "alert3", EXPRESSION, "* * * * *")); + try{ + Thread.sleep(1000); + }catch(Exception e) { + } + + //Change modified date + alert2.setShared(true); + alertService.updateAlert(alert2); + try{ + Thread.sleep(1000); + }catch(Exception e) { + } + + alert3.setShared(true); + alertService.updateAlert(alert3); + try{ + Thread.sleep(1000); + }catch(Exception e) { + } + + alert1.setShared(true); + alertService.updateAlert(alert1); + try{ + Thread.sleep(1000); + }catch(Exception e) { + } + + + //sort by owner name ascending + List page = alertService.findSharedAlertsPaged(10, 0, null, "ownerName", "ASC"); + assertEquals(3, page.size()); + assertEquals(alert1.getName(), page.get(0).getName()); + assertEquals(alert2.getName(), page.get(1).getName()); + assertEquals(alert3.getName(), page.get(2).getName()); + + //sort by owner name descending + page = alertService.findSharedAlertsPaged(10, 0, null, "ownerName", "DESC"); + assertEquals(3, page.size()); + assertEquals(alert3.getName(), page.get(0).getName()); + assertEquals(alert2.getName(), page.get(1).getName()); + assertEquals(alert1.getName(), page.get(2).getName()); + + //sort by alert name ascending + page = alertService.findSharedAlertsPaged(10, 0, null, "name", "ASC"); + assertEquals(alert1.getName(), page.get(0).getName()); + assertEquals(alert2.getName(), page.get(1).getName()); + assertEquals(alert3.getName(), page.get(2).getName()); + + //sort by alert name descending + page = alertService.findSharedAlertsPaged(10, 0, null, "name", "DESC"); + assertEquals(alert3.getName(), page.get(0).getName()); + assertEquals(alert2.getName(), page.get(1).getName()); + assertEquals(alert1.getName(), page.get(2).getName()); + + //sort by create date ascending + page = alertService.findSharedAlertsPaged(10, 0, null, "createdDate", "ASC"); + assertEquals(alert1.getName(), page.get(0).getName()); + assertEquals(alert2.getName(), page.get(1).getName()); + assertEquals(alert3.getName(), page.get(2).getName()); + + //sort by create date descending + page = alertService.findSharedAlertsPaged(10, 0, null, "createdDate", "DESC"); + assertEquals(alert3.getName(), page.get(0).getName()); + assertEquals(alert2.getName(), page.get(1).getName()); + assertEquals(alert1.getName(), page.get(2).getName()); + + //sort by modified date ascending + page = alertService.findSharedAlertsPaged(10, 0, null, "modifiedDate", "ASC"); + assertEquals(alert2.getName(), page.get(0).getName()); + assertEquals(alert3.getName(), page.get(1).getName()); + assertEquals(alert1.getName(), page.get(2).getName()); + + //sort by modified date descending + page = alertService.findSharedAlertsPaged(10, 0, null, "modifiedDate", "DESC"); + assertEquals(alert1.getName(), page.get(0).getName()); + assertEquals(alert3.getName(), page.get(1).getName()); + assertEquals(alert2.getName(), page.get(2).getName()); + + //invalid column + try { + page = alertService.findSharedAlertsPaged(10, 0, null, "invalidColumn", "DESC"); + } catch (IllegalArgumentException ex){ + assertNotNull(ex); + } + try { + page = alertService.findSharedAlertsPaged(10, 0, null, "", "DESC"); + } catch (IllegalArgumentException ex) { + assertNotNull(ex); + } + } @Test public void testCountSharedAlertsMetaPaged() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, "test1", "test1@salesforce.com")); - PrincipalUser user2 = userService.updateUser(new PrincipalUser(admin, "test2", "test2@salesforce.com")); + String userName1 = TestUtils.createRandomName(); + String userName2 = TestUtils.createRandomName(); + PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, userName1, userName1 + "test1@salesforce.com")); + PrincipalUser user2 = userService.updateUser(new PrincipalUser(admin, userName2, userName2 + "test2@salesforce.com")); Alert alert1 = alertService.updateAlert(new Alert(user1, user1, "alert1", EXPRESSION, "* * * * *")); Alert alert2 = alertService.updateAlert(new Alert(user2, user2, "alert2", EXPRESSION, "* * * * *")); @@ -904,17 +1217,17 @@ public void testCountSharedAlertsMetaPaged() { alertService.updateAlert(alert2); alert3.setShared(false); alertService.updateAlert(alert3); - + AlertsCountContext context = new AlertsCountContext.AlertsCountContextBuilder().countSharedAlerts().build(); assertEquals(2, alertService.countAlerts(context)); } - + @Test public void testCountSharedAlertsMetaPagedWithSearchText() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, "test1", "test1@salesforce.com")); - PrincipalUser user2 = userService.updateUser(new PrincipalUser(admin, "test2", "test2@salesforce.com")); + String userName1 = TestUtils.createRandomName(); + String userName2 = TestUtils.createRandomName(); + PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, userName1, userName1 + "test1@salesforce.com")); + PrincipalUser user2 = userService.updateUser(new PrincipalUser(admin, userName2, userName2 + "test2@salesforce.com")); Alert alert1 = alertService.updateAlert(new Alert(user1, user1, "alert1", EXPRESSION, "* * * * *")); Alert alert2 = alertService.updateAlert(new Alert(user2, user2, "alert2", EXPRESSION, "* * * * *")); @@ -927,26 +1240,26 @@ public void testCountSharedAlertsMetaPagedWithSearchText() { alertService.updateAlert(alert2); alert3.setShared(false); alertService.updateAlert(alert3); - + AlertsCountContext context = new AlertsCountContext.AlertsCountContextBuilder().countSharedAlerts().setSearchText("alert").build(); assertEquals(2, alertService.countAlerts(context)); - + // count by alert name context = new AlertsCountContext.AlertsCountContextBuilder().countSharedAlerts().setSearchText("alert1").build(); assertEquals(1, alertService.countAlerts(context)); - + // count by alert name case insensitive context = new AlertsCountContext.AlertsCountContextBuilder().countSharedAlerts().setSearchText("aLeRt1").build(); assertEquals(1, alertService.countAlerts(context)); - + // count by user name - context = new AlertsCountContext.AlertsCountContextBuilder().countSharedAlerts().setSearchText("test1").build(); + context = new AlertsCountContext.AlertsCountContextBuilder().countSharedAlerts().setSearchText(userName1).build(); assertEquals(1, alertService.countAlerts(context)); - + // count by user name case insensitive - context = new AlertsCountContext.AlertsCountContextBuilder().countSharedAlerts().setSearchText("tEsT1").build(); + context = new AlertsCountContext.AlertsCountContextBuilder().countSharedAlerts().setSearchText(userName1.toUpperCase()).build(); assertEquals(1, alertService.countAlerts(context)); - + // Invalid search text context = new AlertsCountContext.AlertsCountContextBuilder().countSharedAlerts().setSearchText("invalid_search_text").build(); assertEquals(0, alertService.countAlerts(context)); @@ -954,14 +1267,18 @@ public void testCountSharedAlertsMetaPagedWithSearchText() { @Test public void testFindSharedAlertsByOwner() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, "test1", "test1@salesforce.com")); - PrincipalUser user2 = userService.updateUser(new PrincipalUser(admin, "test2", "test2@salesforce.com")); + String userName1 = TestUtils.createRandomName(); + String userName2 = TestUtils.createRandomName(); + + PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, userName1, userName1 + "test1@salesforce.com")); + PrincipalUser user2 = userService.updateUser(new PrincipalUser(admin, userName2, userName2 + "test2@salesforce.com")); - Alert alertSharedUser1 = alertService.updateAlert(new Alert(user1, user1, "alert-name_shared1", EXPRESSION, "* * * * *")); - Alert alertSharedUser2 = alertService.updateAlert(new Alert(user2, user2, "alert-name-shared2", EXPRESSION, "* * * * *")); - Alert alertSharedAdmin = alertService.updateAlert(new Alert(admin, admin, "alert-name-shared3", EXPRESSION, "* * * * *")); + String alertName1 = "alertname-" + TestUtils.createRandomName(); + Alert alertSharedUser1 = alertService.updateAlert(new Alert(user1, user1, alertName1, EXPRESSION, "* * * * *")); + String alertName2 = "alertname-" + TestUtils.createRandomName(); + Alert alertSharedUser2 = alertService.updateAlert(new Alert(user2, user2, alertName2, EXPRESSION, "* * * * *")); + String alertName3 = "alertname-" + TestUtils.createRandomName(); + Alert alertSharedAdmin = alertService.updateAlert(new Alert(admin, admin, alertName3, EXPRESSION, "* * * * *")); alertSharedUser1.setShared(true); alertService.updateAlert(alertSharedUser1); @@ -996,14 +1313,18 @@ public void testFindSharedAlertsByOwner() { @Test public void testFindSharedAlertsMetaByOwner() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, "test1", "test1@salesforce.com")); - PrincipalUser user2 = userService.updateUser(new PrincipalUser(admin, "test2", "test2@salesforce.com")); + String userName1 = TestUtils.createRandomName(); + String userName2 = TestUtils.createRandomName(); - Alert alertSharedUser1 = alertService.updateAlert(new Alert(user1, user1, "alert-name_shared1", EXPRESSION, "* * * * *")); - Alert alertSharedUser2 = alertService.updateAlert(new Alert(user2, user2, "alert-name-shared2", EXPRESSION, "* * * * *")); - Alert alertSharedAdmin = alertService.updateAlert(new Alert(admin, admin, "alert-name-shared3", EXPRESSION, "* * * * *")); + PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, userName1, userName1 + "test1@salesforce.com")); + PrincipalUser user2 = userService.updateUser(new PrincipalUser(admin, userName2, userName2 + "test2@salesforce.com")); + + String alertName1 = "alertname-" + TestUtils.createRandomName(); + Alert alertSharedUser1 = alertService.updateAlert(new Alert(user1, user1, alertName1, EXPRESSION, "* * * * *")); + String alertName2 = "alertname-" + TestUtils.createRandomName(); + Alert alertSharedUser2 = alertService.updateAlert(new Alert(user2, user2, alertName2, EXPRESSION, "* * * * *")); + String alertName3 = "alertname-" + TestUtils.createRandomName(); + Alert alertSharedAdmin = alertService.updateAlert(new Alert(admin, admin, alertName3, EXPRESSION, "* * * * *")); alertSharedUser1.setShared(true); alertService.updateAlert(alertSharedUser1); @@ -1035,20 +1356,24 @@ public void testFindSharedAlertsMetaByOwner() { alertService.updateAlert(alertSharedAdmin); assertEquals(new ArrayList(), alertService.findSharedAlerts(true, admin, null)); } - + @Test public void testFindPrivateAlertsPagedForNonPrivilegedUser() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - + // By default user is not privileged - PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, "test1", "test1@salesforce.com")); - PrincipalUser user2 = userService.updateUser(new PrincipalUser(admin, "test2", "test2@salesforce.com")); + String userName1 = TestUtils.createRandomName(); + String userName2 = TestUtils.createRandomName(); + + PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, userName1, userName1 + "test1@salesforce.com")); + PrincipalUser user2 = userService.updateUser(new PrincipalUser(admin, userName2, userName2 + "test2@salesforce.com")); - Alert alert1 = alertService.updateAlert(new Alert(user1, user1, "alert-name_private1", EXPRESSION, "* * * * *")); - Alert alert2 = alertService.updateAlert(new Alert(user2, user2, "alert-name-private2", EXPRESSION, "* * * * *")); - Alert alert3 = alertService.updateAlert(new Alert(user2, user2, "alert-name-private3", EXPRESSION, "* * * * *")); + String alertName1 = "alertname-" + TestUtils.createRandomName(); + Alert alert1 = alertService.updateAlert(new Alert(user1, user1, alertName1, EXPRESSION, "* * * * *")); + String alertName2 = "alertname-" + TestUtils.createRandomName(); + Alert alert2 = alertService.updateAlert(new Alert(user2, user2, alertName2, EXPRESSION, "* * * * *")); + String alertName3 = "alertname-" + TestUtils.createRandomName(); + Alert alert3 = alertService.updateAlert(new Alert(user2, user2, alertName3, EXPRESSION, "* * * * *")); alert1.setShared(false); alertService.updateAlert(alert1); @@ -1058,21 +1383,25 @@ public void testFindPrivateAlertsPagedForNonPrivilegedUser() { alertService.updateAlert(alert3); // Assert result is empty for non-privileged user - assertEquals(0, alertService.findPrivateAlertsForPrivilegedUserPaged(user1, 100, 0, null).size()); + assertEquals(0, alertService.findPrivateAlertsForPrivilegedUserPaged(user1, 100, 0, null, null, null).size()); } - + @Test public void testCountPrivateAlertsForNonPrivilegedUser() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - + // By default user is not privileged - PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, "test1", "test1@salesforce.com")); - PrincipalUser user2 = userService.updateUser(new PrincipalUser(admin, "test2", "test2@salesforce.com")); + String userName1 = TestUtils.createRandomName(); + String userName2 = TestUtils.createRandomName(); + + PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, userName1, userName1 + "test1@salesforce.com")); + PrincipalUser user2 = userService.updateUser(new PrincipalUser(admin, userName2, userName2 + "test2@salesforce.com")); - Alert alert1 = alertService.updateAlert(new Alert(user1, user1, "alert-name_private1", EXPRESSION, "* * * * *")); - Alert alert2 = alertService.updateAlert(new Alert(user2, user2, "alert-name-private2", EXPRESSION, "* * * * *")); - Alert alert3 = alertService.updateAlert(new Alert(user2, user2, "alert-name-private3", EXPRESSION, "* * * * *")); + String alertName1 = "alertname-" + TestUtils.createRandomName(); + Alert alert1 = alertService.updateAlert(new Alert(user1, user1, alertName1, EXPRESSION, "* * * * *")); + String alertName2 = "alertname-" + TestUtils.createRandomName(); + Alert alert2 = alertService.updateAlert(new Alert(user2, user2, alertName2, EXPRESSION, "* * * * *")); + String alertName3 = "alertname-" + TestUtils.createRandomName(); + Alert alert3 = alertService.updateAlert(new Alert(user2, user2, alertName3, EXPRESSION, "* * * * *")); alert1.setShared(false); alertService.updateAlert(alert1); @@ -1085,22 +1414,25 @@ public void testCountPrivateAlertsForNonPrivilegedUser() { AlertsCountContext context = new AlertsCountContext.AlertsCountContextBuilder().countPrivateAlerts().setPrincipalUser(user1).build(); assertEquals(0, alertService.countAlerts(context)); } - + @Test public void testFindPrivateAlertsPagedForPrivilegedUser() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - ManagementService managementService = system.getServiceFactory().getManagementService(); - + // By default user is not privileged - PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, "test1", "test1@salesforce.com")); + String userName1 = TestUtils.createRandomName(); + String userName2 = TestUtils.createRandomName(); + + PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, userName1, userName1 + "test1@salesforce.com")); managementService.setAdministratorPrivilege(user1, true); - PrincipalUser user2 = userService.updateUser(new PrincipalUser(admin, "test2", "test2@salesforce.com")); + PrincipalUser user2 = userService.updateUser(new PrincipalUser(admin, userName2, userName2 + "test2@salesforce.com")); - Alert alert1 = alertService.updateAlert(new Alert(user1, user1, "alert-name_private1", EXPRESSION, "* * * * *")); - Alert alert2 = alertService.updateAlert(new Alert(user2, user2, "alert-name-private2", EXPRESSION, "* * * * *")); - Alert alert3 = alertService.updateAlert(new Alert(user2, user2, "alert-name-shared3", EXPRESSION, "* * * * *")); + String alertName1 = "alertname-" + TestUtils.createRandomName(); + Alert alert1 = alertService.updateAlert(new Alert(user1, user1, alertName1, EXPRESSION, "* * * * *")); + String alertName2 = "alertname-" + TestUtils.createRandomName(); + Alert alert2 = alertService.updateAlert(new Alert(user2, user2, alertName2, EXPRESSION, "* * * * *")); + String alertName3 = "alertname-" + TestUtils.createRandomName(); + Alert alert3 = alertService.updateAlert(new Alert(user2, user2, alertName3, EXPRESSION, "* * * * *")); alert1.setShared(false); alertService.updateAlert(alert1); @@ -1110,41 +1442,44 @@ public void testFindPrivateAlertsPagedForPrivilegedUser() { alertService.updateAlert(alert3); Set alertNames = new HashSet<>(); - + // Fetch first page - List page = alertService.findPrivateAlertsForPrivilegedUserPaged(user1, 1, 0, null); + List page = alertService.findPrivateAlertsForPrivilegedUserPaged(user1, 1, 0, null, null, null); assertEquals(1, page.size()); alertNames.add(page.get(0).getName()); - + // Fetch second page - page = alertService.findPrivateAlertsForPrivilegedUserPaged(user1, 1, 1, null); + page = alertService.findPrivateAlertsForPrivilegedUserPaged(user1, 1, 1, null, null, null); assertEquals(1, page.size()); alertNames.add(page.get(0).getName()); - + // Fetch third page, should be empty - page = alertService.findPrivateAlertsForPrivilegedUserPaged(user1, 1, 2, null); + page = alertService.findPrivateAlertsForPrivilegedUserPaged(user1, 1, 2, null, null, null); assertEquals(0, page.size()); - + // Assert all private alerts are fetched - assertTrue(alertNames.contains("alert-name_private1")); - assertTrue(alertNames.contains("alert-name-private2")); + assertTrue(alertNames.contains(alertName1)); + assertTrue(alertNames.contains(alertName2)); } - + @Test public void testFindPrivateAlertsPagedForPrivilegedUserWithSearchText() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - ManagementService managementService = system.getServiceFactory().getManagementService(); - + // By default user is not privileged - PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, "test1", "test1@salesforce.com")); + String userName1 = TestUtils.createRandomName(); + String userName2 = TestUtils.createRandomName(); + + PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, userName1, userName1 + "test1@salesforce.com")); managementService.setAdministratorPrivilege(user1, true); - PrincipalUser user2 = userService.updateUser(new PrincipalUser(admin, "test2", "test2@salesforce.com")); + PrincipalUser user2 = userService.updateUser(new PrincipalUser(admin, userName2, userName2 + "test2@salesforce.com")); - Alert alert1 = alertService.updateAlert(new Alert(user1, user1, "alert-name_private1", EXPRESSION, "* * * * *")); - Alert alert2 = alertService.updateAlert(new Alert(user2, user2, "alert-name-private2", EXPRESSION, "* * * * *")); - Alert alert3 = alertService.updateAlert(new Alert(user2, user2, "alert-name-shared3", EXPRESSION, "* * * * *")); + String alertName1 = "alert-name_private1" + TestUtils.createRandomName(); + Alert alert1 = alertService.updateAlert(new Alert(user1, user1, alertName1, EXPRESSION, "* * * * *")); + String alertName2 = "alert-name-private2" + TestUtils.createRandomName(); + Alert alert2 = alertService.updateAlert(new Alert(user2, user2, alertName2, EXPRESSION, "* * * * *")); + String alertName3 = "alert-name-shared3" + TestUtils.createRandomName(); + Alert alert3 = alertService.updateAlert(new Alert(user2, user2, alertName3, EXPRESSION, "* * * * *")); alert1.setShared(false); alertService.updateAlert(alert1); @@ -1152,55 +1487,140 @@ public void testFindPrivateAlertsPagedForPrivilegedUserWithSearchText() { alertService.updateAlert(alert2); alert3.setShared(true); alertService.updateAlert(alert3); - - // Search by alert name - List page = alertService.findPrivateAlertsForPrivilegedUserPaged(user1, 10, 0, "alert-name"); + + // Search by alert name + List page = alertService.findPrivateAlertsForPrivilegedUserPaged(user1, 10, 0, "alert-name", null, null); assertEquals(2, page.size()); - - page = alertService.findPrivateAlertsForPrivilegedUserPaged(user1, 10, 0, "private1"); + + page = alertService.findPrivateAlertsForPrivilegedUserPaged(user1, 10, 0, "private1", null, null); assertEquals(1, page.size()); - + // Search by alert name case insensitive - page = alertService.findPrivateAlertsForPrivilegedUserPaged(user1, 10, 0, "aLerT-NamE"); + page = alertService.findPrivateAlertsForPrivilegedUserPaged(user1, 10, 0, "aLerT-NamE", null, null); assertEquals(2, page.size()); - - page = alertService.findPrivateAlertsForPrivilegedUserPaged(user1, 10, 0, "PrIvAtE1"); + + page = alertService.findPrivateAlertsForPrivilegedUserPaged(user1, 10, 0, "PrIvAtE1", null, null); assertEquals(1, page.size()); - + // Search shared alert name - page = alertService.findPrivateAlertsForPrivilegedUserPaged(user1, 10, 0, "shared3"); + page = alertService.findPrivateAlertsForPrivilegedUserPaged(user1, 10, 0, "shared3", null, null); assertEquals(0, page.size()); - + // Search shared alert name case insensitive - page = alertService.findPrivateAlertsForPrivilegedUserPaged(user1, 10, 0, "SHaReD3"); + page = alertService.findPrivateAlertsForPrivilegedUserPaged(user1, 10, 0, "SHaReD3", null, null); assertEquals(0, page.size()); - + // Search by owner name - page = alertService.findPrivateAlertsForPrivilegedUserPaged(user1, 10, 0, "test2"); + page = alertService.findPrivateAlertsForPrivilegedUserPaged(user1, 10, 0, userName2, null, null); assertEquals(1, page.size()); - assertEquals("test2", page.get(0).getOwner().getUserName()); - + assertEquals(userName2, page.get(0).getOwner().getUserName()); + // Search by owner name case insensitive - page = alertService.findPrivateAlertsForPrivilegedUserPaged(user1, 10, 0, "TeSt2"); + page = alertService.findPrivateAlertsForPrivilegedUserPaged(user1, 10, 0, userName2.toUpperCase(), null, null); assertEquals(1, page.size()); - assertEquals("test2", page.get(0).getOwner().getUserName()); + assertEquals(userName2, page.get(0).getOwner().getUserName()); } - + + @Test + public void testFindPrivateAlertsPagedForPrivilegedUserWithSorting() { + + // By default user is not privileged + String userName1 = TestUtils.createRandomName(); + + PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, userName1, userName1 + "test1@salesforce.com")); + managementService.setAdministratorPrivilege(user1, true); + + Alert alert1 = alertService.updateAlert(new Alert(user1, user1, "alert1", EXPRESSION, "* * * * *")); + try{ + Thread.sleep(1000); + }catch(Exception e) { + } + Alert alert2 = alertService.updateAlert(new Alert(user1, user1, "alert2", EXPRESSION, "* * * * *")); + try{ + Thread.sleep(1000); + }catch(Exception e) { + } + Alert alert3 = alertService.updateAlert(new Alert(user1, user1, "alert3", EXPRESSION, "* * * * *")); + try{ + Thread.sleep(1000); + }catch(Exception e) { + } + + //Change modified date + alert1.setShared(false); + alertService.updateAlert(alert1); + try{ + Thread.sleep(1000); + }catch(Exception e) { + } + + //sort by alert name ascending + List page = alertService.findPrivateAlertsForPrivilegedUserPaged(user1, 10, 0, null, "name", "ASC"); + assertEquals(alert1.getName(), page.get(0).getName()); + assertEquals(alert2.getName(), page.get(1).getName()); + assertEquals(alert3.getName(), page.get(2).getName()); + + //sort by alert name descending + page = alertService.findPrivateAlertsForPrivilegedUserPaged(user1, 10, 0, null, "name", "DESC"); + assertEquals(alert3.getName(), page.get(0).getName()); + assertEquals(alert2.getName(), page.get(1).getName()); + assertEquals(alert1.getName(), page.get(2).getName()); + + //sort by create date ascending + page = alertService.findPrivateAlertsForPrivilegedUserPaged(user1, 10, 0, null, "createdDate", "ASC"); + assertEquals(alert1.getName(), page.get(0).getName()); + assertEquals(alert2.getName(), page.get(1).getName()); + assertEquals(alert3.getName(), page.get(2).getName()); + + //sort by create date descending + page = alertService.findPrivateAlertsForPrivilegedUserPaged(user1, 10, 0, null, "createdDate", "DESC"); + assertEquals(alert3.getName(), page.get(0).getName()); + assertEquals(alert2.getName(), page.get(1).getName()); + assertEquals(alert1.getName(), page.get(2).getName()); + + //sort by modified date ascending + page = alertService.findPrivateAlertsForPrivilegedUserPaged(user1, 10, 0, null, "modifiedDate", "ASC"); + assertEquals(alert2.getName(), page.get(0).getName()); + assertEquals(alert3.getName(), page.get(1).getName()); + assertEquals(alert1.getName(), page.get(2).getName()); + + //sort by modified date descending + page = alertService.findPrivateAlertsForPrivilegedUserPaged(user1, 10, 0, null, "modifiedDate", "DESC"); + assertEquals(alert1.getName(), page.get(0).getName()); + assertEquals(alert3.getName(), page.get(1).getName()); + assertEquals(alert2.getName(), page.get(2).getName()); + + //invalid column + try { + page = alertService.findPrivateAlertsForPrivilegedUserPaged(user1, 10, 0, null, "invalidColumn", "DESC"); + } catch (IllegalArgumentException ex) { + assertNotNull(ex); + } + try { + page = alertService.findPrivateAlertsForPrivilegedUserPaged(user1, 10, 0, null, "", "DESC"); + } catch (IllegalArgumentException ex) { + assertNotNull(ex); + } + } + @Test public void testCountPrivateAlertsForPrivilegedUser() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - ManagementService managementService = system.getServiceFactory().getManagementService(); - + // By default user is not privileged - PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, "test1", "test1@salesforce.com")); + String userName1 = TestUtils.createRandomName(); + String userName2 = TestUtils.createRandomName(); + + PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, userName1, userName1 + "test1@salesforce.com")); managementService.setAdministratorPrivilege(user1, true); - PrincipalUser user2 = userService.updateUser(new PrincipalUser(admin, "test2", "test2@salesforce.com")); + PrincipalUser user2 = userService.updateUser(new PrincipalUser(admin, userName2, userName2 + "test2@salesforce.com")); - Alert alert1 = alertService.updateAlert(new Alert(user1, user1, "alert-name_private1", EXPRESSION, "* * * * *")); - Alert alert2 = alertService.updateAlert(new Alert(user2, user2, "alert-name-private2", EXPRESSION, "* * * * *")); - Alert alert3 = alertService.updateAlert(new Alert(user2, user2, "alert-name-shared3", EXPRESSION, "* * * * *")); + String alertName1 = "alertname-" + TestUtils.createRandomName(); + Alert alert1 = alertService.updateAlert(new Alert(user1, user1, alertName1, EXPRESSION, "* * * * *")); + String alertName2 = "alertname-" + TestUtils.createRandomName(); + Alert alert2 = alertService.updateAlert(new Alert(user2, user2, alertName2, EXPRESSION, "* * * * *")); + String alertName3 = "alertname-" + TestUtils.createRandomName(); + Alert alert3 = alertService.updateAlert(new Alert(user2, user2, alertName3, EXPRESSION, "* * * * *")); alert1.setShared(false); alertService.updateAlert(alert1); @@ -1208,26 +1628,33 @@ public void testCountPrivateAlertsForPrivilegedUser() { alertService.updateAlert(alert2); alert3.setShared(true); alertService.updateAlert(alert3); - + AlertsCountContext context = new AlertsCountContext.AlertsCountContextBuilder().countPrivateAlerts().setPrincipalUser(user1).build(); assertEquals(2, alertService.countAlerts(context)); + //mockQuery("Alert.countPrivateAlertsForPrivilegedUser",2L); + //assertEquals(2, Alert.countPrivateAlertsForPrivilegedUser(em, admin, context.getSearchText())); + } - + @Test public void testCountPrivateAlertsForPrivilegedUserWithSearchText() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - ManagementService managementService = system.getServiceFactory().getManagementService(); - + + String namedQuery = "Alert.countPrivateAlertsForPrivilegedUserWithSearchText"; // By default user is not privileged - PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, "test1", "test1@salesforce.com")); + String userName1 = TestUtils.createRandomName(); + String userName2 = TestUtils.createRandomName(); + + PrincipalUser user1 = userService.updateUser(new PrincipalUser(admin, userName1, userName1 + "test1@salesforce.com")); managementService.setAdministratorPrivilege(user1, true); - PrincipalUser user2 = userService.updateUser(new PrincipalUser(admin, "test2", "test2@salesforce.com")); + PrincipalUser user2 = userService.updateUser(new PrincipalUser(admin, userName2, userName2 + "test2@salesforce.com")); - Alert alert1 = alertService.updateAlert(new Alert(user1, user1, "alert-name_private1", EXPRESSION, "* * * * *")); - Alert alert2 = alertService.updateAlert(new Alert(user2, user2, "alert-name-private2", EXPRESSION, "* * * * *")); - Alert alert3 = alertService.updateAlert(new Alert(user2, user2, "alert-name-shared3", EXPRESSION, "* * * * *")); + String alertName1 = "alertname-" + TestUtils.createRandomName(); + Alert alert1 = alertService.updateAlert(new Alert(user1, user1, alertName1, EXPRESSION, "* * * * *")); + String alertName2 = "alertname-" + TestUtils.createRandomName(); + Alert alert2 = alertService.updateAlert(new Alert(user2, user2, alertName2, EXPRESSION, "* * * * *")); + String alertName3 = "alertname-" + TestUtils.createRandomName(); + Alert alert3 = alertService.updateAlert(new Alert(user2, user2, alertName3, EXPRESSION, "* * * * *")); alert1.setShared(false); alertService.updateAlert(alert1); @@ -1235,39 +1662,55 @@ public void testCountPrivateAlertsForPrivilegedUserWithSearchText() { alertService.updateAlert(alert2); alert3.setShared(true); alertService.updateAlert(alert3); - - // count by alert name + + // count by alert name AlertsCountContext context = new AlertsCountContext.AlertsCountContextBuilder().countPrivateAlerts().setPrincipalUser(user1).setSearchText("alert").build(); assertEquals(2, alertService.countAlerts(context)); - + //mockQuery(namedQuery,2L); + //assertEquals(2, Alert.countPrivateAlertsForPrivilegedUser(em, admin, context.getSearchText())); + // count by alert name case insensitive context = new AlertsCountContext.AlertsCountContextBuilder().countPrivateAlerts().setPrincipalUser(user1).setSearchText("AlErT").build(); assertEquals(2, alertService.countAlerts(context)); - + //mockQuery(namedQuery,2L); + //assertEquals(2, Alert.countPrivateAlertsForPrivilegedUser(em, admin, context.getSearchText())); + // count by alert name - context = new AlertsCountContext.AlertsCountContextBuilder().countPrivateAlerts().setPrincipalUser(user1).setSearchText("alert-name_private1").build(); + context = new AlertsCountContext.AlertsCountContextBuilder().countPrivateAlerts().setPrincipalUser(user1).setSearchText(alertName1).build(); assertEquals(1, alertService.countAlerts(context)); - + //mockQuery(namedQuery,1L); + //assertEquals(1, Alert.countPrivateAlertsForPrivilegedUser(em, admin, context.getSearchText())); + // count by owner name - context = new AlertsCountContext.AlertsCountContextBuilder().countPrivateAlerts().setPrincipalUser(user1).setSearchText("test2").build(); + context = new AlertsCountContext.AlertsCountContextBuilder().countPrivateAlerts().setPrincipalUser(user1).setSearchText(userName2).build(); assertEquals(1, alertService.countAlerts(context)); - + //mockQuery(namedQuery,1L); + //assertEquals(1, Alert.countPrivateAlertsForPrivilegedUser(em, admin, context.getSearchText())); + // count by owner name case insensitive - context = new AlertsCountContext.AlertsCountContextBuilder().countPrivateAlerts().setPrincipalUser(user1).setSearchText("TeST2").build(); + context = new AlertsCountContext.AlertsCountContextBuilder().countPrivateAlerts().setPrincipalUser(user1).setSearchText(userName2.toUpperCase()).build(); assertEquals(1, alertService.countAlerts(context)); - + //mockQuery(namedQuery,1L); + //assertEquals(1, Alert.countPrivateAlertsForPrivilegedUser(em, admin, context.getSearchText())); + // count by invalid name context = new AlertsCountContext.AlertsCountContextBuilder().countPrivateAlerts().setPrincipalUser(user1).setSearchText("invalid_name").build(); - assertEquals(0, alertService.countAlerts(context)); + assertEquals(0, alertService.countAlerts(context)); + //mockQuery(namedQuery,0L); + //assertEquals(0, Alert.countPrivateAlertsForPrivilegedUser(em, admin, context.getSearchText())); } @Test public void testAlertSerDes() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "alert-name", EXPRESSION, "* * * * *"); + String alertName1 = "alertname-" + TestUtils.createRandomName(); + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), alertName1, EXPRESSION, "* * * * *"); Notification notification = new Notification("notification", alert, "notifier-name", new ArrayList(), 5000L); + notification.setArticleNumber("an"); + notification.setSRActionable(true); + notification.setProductTag("pT"); + notification.setElementName("elN"); + notification.setEventName("evN"); Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN, "trigger-name", 0.95, 60000); alert.setNotifications(Arrays.asList(notification)); @@ -1315,7 +1758,7 @@ public void testAlertSerDes() { } catch (IOException e) { fail("IOException while serializing/deserializing alert."); - } + } } @@ -1345,10 +1788,9 @@ private void _assertEquals(Trigger expected, Trigger actual) { @Test public void testUpdateNotification() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); - Alert expected = new Alert(userService.findAdminUser(), userService.findAdminUser(), "alert-name", EXPRESSION, "* * * * *"); + String alertName = "alertname-" + TestUtils.createRandomName(); + Alert expected = new Alert(userService.findAdminUser(), userService.findAdminUser(), alertName, EXPRESSION, "* * * * *"); Notification notification = new Notification("notification", expected, "notifier-name", new ArrayList(), 5000L); Trigger trigger = new Trigger(expected, TriggerType.GREATER_THAN, "trigger-name", 0.95, 60000); @@ -1373,10 +1815,10 @@ public void testUpdateNotification() { assertTrue(n.getActiveStatusMap().size() == 1); assertTrue(n.getCooldownExpirationMap().size() == 1); } - + @Test public void testAlertsCountContext() { - String userName = createRandomName(); + String userName = TestUtils.createRandomName(); PrincipalUser user = new PrincipalUser(admin, userName, userName + "@testcompany.com"); // Test count user alerts context @@ -1423,16 +1865,15 @@ public void testAlertsCountContext() { @Test public void testTriggerInertiaSetting() { - UserService userService = system.getServiceFactory().getUserService(); - AlertService alertService = system.getServiceFactory().getAlertService(); ArrayList expressionArray = new ArrayList (Arrays.asList( "ABOVE(-1d:scope:metric:avg:4h-avg, #0.5#, #avg#)", "LIMIT( -21d:-1d:scope:metricA:avg:4h-avg, -1d:scope:metricB:avg:4h-avg,#1#)", "-20m:-0d:scone.*.*.cs19:acs.DELETERequestProcessingTime_95thPercentile{device=*acs2-1*}:avg", - "DOWNSAMPLE(-2d:alerts.scheduled:alert-1429851:zimsum, #5m-sum#,#-2d#, #-0m#, #0#)" + "DOWNSAMPLE(-2d:argus.alerts:scheduled{alertId=1429851}:zimsum, #5m-sum#,#-2d#, #-0m#, #0#)" )); for (String currentExpression: expressionArray) { - Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "alert-name", currentExpression, "* * * * *"); + String alertName = "alertname-" + TestUtils.createRandomName(); + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), alertName, currentExpression, "* * * * *"); try { Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN, "trigger-name", 0.95, 120000); } catch (IllegalArgumentException ex) { @@ -1441,4 +1882,4 @@ public void testTriggerInertiaSetting() { } } } -/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ \ No newline at end of file +/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/AlertsCountContextTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/AlertsCountContextTest.java new file mode 100644 index 000000000..b24e360ec --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/AlertsCountContextTest.java @@ -0,0 +1,64 @@ +package com.salesforce.dva.argus.service.alert; + +import com.salesforce.dva.argus.entity.PrincipalUser; +import org.junit.BeforeClass; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; + +public class AlertsCountContextTest { + + private static String searchText; + private static PrincipalUser owner; + + @BeforeClass + static public void setUpClass() { + owner = new PrincipalUser(null, "owner", "owner@mycompany.abc"); + searchText = "testSearchText"; + } + + @Test + public void isCountUserAlerts() { + AlertsCountContext context = new AlertsCountContext.AlertsCountContextBuilder().countUserAlerts() + .setPrincipalUser(owner).setSearchText(searchText).build(); + assertEquals(true, context.isCountUserAlerts()); + context = new AlertsCountContext.AlertsCountContextBuilder().countUserAlerts().build(); + assertEquals(false, context.isCountUserAlerts()); + } + + @Test + public void isCountSharedAlerts() { + AlertsCountContext context = new AlertsCountContext.AlertsCountContextBuilder().countSharedAlerts() + .setPrincipalUser(owner).setSearchText(searchText).build(); + assertEquals(true, context.isCountSharedAlerts()); + context = new AlertsCountContext.AlertsCountContextBuilder().countSharedAlerts().countUserAlerts().build(); + assertEquals(false, context.isCountSharedAlerts()); + } + + @Test + public void isCountPrivateAlerts() { + AlertsCountContext context = new AlertsCountContext.AlertsCountContextBuilder().countPrivateAlerts() + .setPrincipalUser(owner).setSearchText(searchText).build(); + assertEquals(true, context.isCountPrivateAlerts()); + context = new AlertsCountContext.AlertsCountContextBuilder().countPrivateAlerts().build(); + assertEquals(false, context.isCountPrivateAlerts()); + } + + @Test + public void getPrincipalUser() { + AlertsCountContext context = new AlertsCountContext.AlertsCountContextBuilder().countPrivateAlerts() + .setPrincipalUser(owner).setSearchText(searchText).build(); + assertEquals(owner, context.getPrincipalUser()); + assertNotEquals(null, context.getPrincipalUser()); + context = new AlertsCountContext.AlertsCountContextBuilder().setPrincipalUser(null).build(); + assertEquals(null, context.getPrincipalUser()); + } + + @Test + public void getSearchText() { + AlertsCountContext context = new AlertsCountContext.AlertsCountContextBuilder().countPrivateAlerts() + .setPrincipalUser(owner).setSearchText(searchText).build(); + assertEquals(searchText, context.getSearchText()); + } +} \ No newline at end of file diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/DefaultAlertServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/DefaultAlertServiceTest.java new file mode 100644 index 000000000..75f68a5db --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/DefaultAlertServiceTest.java @@ -0,0 +1,1603 @@ +package com.salesforce.dva.argus.service.alert; + +import static com.salesforce.dva.argus.TestUtils.generateAlert; +import static com.salesforce.dva.argus.TestUtils.getHistory; +import static com.salesforce.dva.argus.TestUtils.getMetric; +import static com.salesforce.dva.argus.TestUtils.getNotification; +import static com.salesforce.dva.argus.TestUtils.getTrigger; +import static com.salesforce.dva.argus.service.metric.ElasticSearchConsumerOffsetMetricsService.METRIC_NAME; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.fail; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.ArgumentMatchers.anyList; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.*; + +import java.io.IOException; +import java.lang.reflect.Field; +import java.math.BigInteger; +import java.security.SecureRandom; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; +import java.util.concurrent.atomic.AtomicInteger; + +import javax.persistence.EntityManager; +import javax.persistence.Persistence; + +import com.google.common.collect.ImmutableList; +import com.salesforce.dva.argus.service.*; +import com.salesforce.dva.argus.service.alert.notifier.EmailNotifier; +import com.salesforce.dva.argus.service.alert.notifier.RefocusNotifier; +import com.salesforce.dva.argus.service.alert.retriever.ImageDataRetrievalContext; +import com.salesforce.dva.argus.service.alert.retriever.ImageDataRetriever; +import com.salesforce.dva.argus.service.alert.testing.AlertTestResults; +import com.salesforce.dva.argus.service.metric.MetricQueryResult; + +import com.salesforce.dva.argus.service.metric.transform.TransformFactory; +import com.salesforce.dva.argus.service.monitor.DataLagService; +import com.salesforce.dva.argus.service.tsdb.MetricQuery; +import com.salesforce.dva.argus.system.SystemConfiguration; +import com.salesforce.dva.argus.util.RequestContextHolder; +import org.apache.commons.lang3.tuple.Pair; +import org.junit.*; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.module.SimpleModule; +import com.google.inject.Provider; +import com.salesforce.dva.argus.entity.Alert; +import com.salesforce.dva.argus.entity.History; +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.entity.Notification; +import com.salesforce.dva.argus.entity.PrincipalUser; +import com.salesforce.dva.argus.entity.Trigger; +import com.salesforce.dva.argus.entity.Trigger.TriggerType; +import com.salesforce.dva.argus.service.MQService.MQQueue; +import com.salesforce.dva.argus.service.alert.DefaultAlertService.AlertWithTimestamp; +import com.salesforce.dva.argus.service.alert.notifier.AuditNotifier; +import org.powermock.reflect.Whitebox; + +import com.salesforce.dva.argus.system.SystemMain; +import com.salesforce.dva.argus.TestUtils; + +@RunWith(org.mockito.junit.MockitoJUnitRunner.class) +public class DefaultAlertServiceTest { + private static final String EXPRESSION = + "DIVIDE(-1h:argus.jvm:file.descriptor.open{host=unknown-host}:avg, -1h:argus.jvm:file.descriptor.max{host=unknown-host}:avg)"; + + @Mock private Provider _emProviderMock; + @Mock private MQService _mqServiceMock; + @Mock private MetricService _metricServiceMock; + @Mock private TSDBService _tsdbServiceMock; + @Mock private MailService _mailServiceMock; + @Mock private HistoryService _historyServiceMock; + @Mock private MonitorService _monitorServiceMock; + @Mock private AuditService _auditServiceMock; + @Mock private ImageDataRetriever _imageDataRetrieverMock; + @Mock private ObjectMapper _mapper; + + private DefaultAlertService alertService; + private EntityManager em; + + static private SystemMain system; + + @BeforeClass + static public void setUpClass() { + system = TestUtils.getInstance(); + system.start(); + } + + @AfterClass + static public void tearDownClass() { + if (system != null) { + system.getServiceFactory().getManagementService().cleanupRecords(); + system.stop(); + } + } + + @Before + public void setup() { + em = mock(EntityManager.class); + alertService = new DefaultAlertService(system.getConfiguration(), _mqServiceMock, _metricServiceMock, _auditServiceMock, + _tsdbServiceMock, _mailServiceMock, _historyServiceMock, _monitorServiceMock, _imageDataRetrieverMock, system.getNotifierFactory(), + _emProviderMock); + try { + Field field = alertService.getClass().getDeclaredField("_mapper"); + field.setAccessible(true); + field.set(alertService, _mapper); + } catch (NoSuchFieldException | SecurityException | IllegalArgumentException | IllegalAccessException e) { + fail("Failed to set mocked ObjectMapper using reflection."); + } + } + + @After + public void teardown() { + // forcing the gc to clean up. Otherwise the EM created gets injected by guice in ut's that run afterwards. So weird + em = null; + System.gc(); + } + + + @Test + public void testExecuteScheduledAlerts_ForOneTimeSeries() { + ServiceFactory sFactory = system.getServiceFactory(); + UserService userService = sFactory.getUserService(); + + int triggerMinValue = 50, inertiaPeriod = 1000 * 60 * 5; + int cooldownPeriod = 1000 * 5; + final AtomicInteger notificationCount = new AtomicInteger(0); + final AtomicInteger clearCount = new AtomicInteger(0); + + Metric metric = _createMetric(TestUtils.createRandomName(), TestUtils.createRandomName(), triggerMinValue, inertiaPeriod); + + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "testAlert", "-1h:scope:metric:avg", "* * * * *"); + _setAlertId(alert, "100001"); + Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "testTrigger", triggerMinValue, inertiaPeriod); + _setTriggerId(trigger, "100002"); + Notification notification = new Notification("testNotification", alert, AuditNotifier.class.getName(), new ArrayList(), + cooldownPeriod); + _setNotificationId(notification, "100003"); + + alert.setTriggers(Arrays.asList(trigger)); + alert.setNotifications(Arrays.asList(notification)); + notification.setTriggers(alert.getTriggers()); + alert.setEnabled(true); + + DefaultAlertService spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, + Arrays.asList(metric), alert, notification, false); + + assertEquals(new Integer(1), spyAlertService.executeScheduledAlerts(1, 1000)); + + assertEquals(1, notificationCount.get()); + } + + @Test + public void testExecuteScheduledAlerts_ForOneTimeSeriesMultipleTriggers() { + ServiceFactory sFactory = system.getServiceFactory(); + UserService userService = sFactory.getUserService(); + + final AtomicInteger notificationCount = new AtomicInteger(0); + final AtomicInteger clearCount = new AtomicInteger(0); + + Metric metric = new Metric("scope", "metric"); + Map dps = new HashMap(); + dps.put(1000L, "11"); + dps.put(2000L, "21"); + dps.put(3000L, "31"); + metric.setDatapoints(_convertDatapoints(dps)); + + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "testAlert", "-1h:scope:metric:avg", "* * * * *"); + _setAlertId(alert, "100001"); + Trigger trigger1 = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "testTrigger1", 10, 0); + _setTriggerId(trigger1, "100002"); + Trigger trigger2 = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "testTrigger2", 5, 0); + _setTriggerId(trigger2, "100003"); + Notification notification = new Notification("testNotification", alert, AuditNotifier.class.getName(), new ArrayList(), 0); + _setNotificationId(notification, "100004"); + + alert.setTriggers(Arrays.asList(trigger1, trigger2)); + alert.setNotifications(Arrays.asList(notification)); + notification.setTriggers(alert.getTriggers()); + alert.setEnabled(true); + + DefaultAlertService spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, + Arrays.asList(metric), alert, notification, false); + + assertEquals(new Integer(1), spyAlertService.executeScheduledAlerts(1, 1000)); + + assertEquals(2, notificationCount.get()); + } + + @Test + public void testExecuteScheduledAlerts_ForNoDataTrigger() { + ServiceFactory sFactory = system.getServiceFactory(); + UserService userService = sFactory.getUserService(); + + final AtomicInteger notificationCount = new AtomicInteger(0); + final AtomicInteger clearCount = new AtomicInteger(0); + + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "testAlert", "-1h:scope:metric:avg", "* * * * *"); + _setAlertId(alert, "100001"); + Trigger trigger1 = new Trigger(alert, TriggerType.NO_DATA, "testTrigger1", 10, 1000*60); + _setTriggerId(trigger1, "100002"); + Notification notification = new Notification("testNotification", alert, AuditNotifier.class.getName(), new ArrayList(), 0); + _setNotificationId(notification, "100004"); + + alert.setTriggers(Arrays.asList(trigger1)); + alert.setNotifications(Arrays.asList(notification)); + notification.setTriggers(alert.getTriggers()); + alert.setEnabled(true); + + DefaultAlertService spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, + Arrays.asList(), alert, notification, false); + + assertEquals(new Integer(1), spyAlertService.executeScheduledAlerts(1, 1000)); + + assertEquals(1, notificationCount.get()); + } + + @Test + public void testExecuteScheduledAlerts_OnCooldown() { + ServiceFactory sFactory = system.getServiceFactory(); + UserService userService = sFactory.getUserService(); + + final AtomicInteger notificationCount = new AtomicInteger(0); + final AtomicInteger clearCount = new AtomicInteger(0); + + Metric metric = new Metric("scope", "metric"); + Map dps = new HashMap(); + dps.put(1000L, "11"); + dps.put(2000L, "21"); + dps.put(3000L, "31"); + metric.setDatapoints(_convertDatapoints(dps)); + + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "testAlert", "-1h:scope:metric:avg", "* * * * *"); + _setAlertId(alert, "100001"); + Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "testTrigger", 10, 0); + _setTriggerId(trigger, "100002"); + Notification notification = new Notification("testNotification", alert, AuditNotifier.class.getName(), new ArrayList(), + 600000); + _setNotificationId(notification, "100003"); + + alert.setTriggers(Arrays.asList(new Trigger[] { trigger })); + alert.setNotifications(Arrays.asList(new Notification[] { notification })); + notification.setTriggers(alert.getTriggers()); + alert.setEnabled(true); + + DefaultAlertService spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, + Arrays.asList(metric), alert, notification, false); + + //This will set the notification on cooldown for the given metric and trigger. + assertEquals(new Integer(1), spyAlertService.executeScheduledAlerts(1, 1000)); + //This evaluation should not send notification. Hence notificationCount count would still be 1. + assertEquals(new Integer(1), spyAlertService.executeScheduledAlerts(1, 1000)); + + assertEquals(1, notificationCount.get()); + assertEquals(1, notification.getCooldownExpirationMap().size()); + } + + @Test + public void testExecuteScheduledAlerts_ForMoreThanOneTimeSeries() { + UserService userService = system.getServiceFactory().getUserService(); + final AtomicInteger notificationCount = new AtomicInteger(0); + final AtomicInteger clearCount = new AtomicInteger(0); + + Metric metric1 = new Metric("scope1", "metric"); + Map dps1 = new HashMap(); + dps1.put(1000L, "1"); + dps1.put(2000L, "2"); + dps1.put(3000L, "3"); + metric1.setDatapoints(_convertDatapoints(dps1)); + + Metric metric2 = new Metric("scope2", "metric"); + Map dps2 = new HashMap(); + dps2.put(4000L, "11"); + dps2.put(5000L, "20"); + dps2.put(6000L, "30"); + metric2.setDatapoints(_convertDatapoints(dps2)); + + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "testAlert", "-1h:scope:metric:avg", "* * * * *"); + _setAlertId(alert, "100001"); + Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "testTrigger", 10, 0); + _setTriggerId(trigger, "100002"); + Notification notification = new Notification("testNotification", alert, AuditNotifier.class.getName(), new ArrayList(), 0); + _setNotificationId(notification, "100003"); + + alert.setTriggers(Arrays.asList(new Trigger[] { trigger })); + alert.setNotifications(Arrays.asList(new Notification[] { notification })); + notification.setTriggers(alert.getTriggers()); + alert.setEnabled(true); + + DefaultAlertService spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, + Arrays.asList(metric1, metric2), alert, notification, false); + + assertEquals(new Integer(1), spyAlertService.executeScheduledAlerts(10, 1000)); + + assertEquals(1, notificationCount.get()); + } + + @Test + public void testExecuteScheduledAlerts_ClearNotification() { + UserService userService = system.getServiceFactory().getUserService(); + final AtomicInteger notificationCount = new AtomicInteger(0); + final AtomicInteger clearCount = new AtomicInteger(0); + + Metric metric = new Metric("scope", "metric"); + Map dps = new HashMap(); + dps.put(4000L, "11"); + dps.put(5000L, "20"); + dps.put(6000L, "30"); + metric.setDatapoints(_convertDatapoints(dps)); + + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "testAlert", "-1h:scope:metric:avg", "* * * * *"); + _setAlertId(alert, "100001"); + Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "testTrigger", 10, 0); + _setTriggerId(trigger, "100002"); + Notification notification = new Notification("testNotification", alert, AuditNotifier.class.getName(), new ArrayList(), 0); + _setNotificationId(notification, "100003"); + + alert.setTriggers(Arrays.asList(new Trigger[] { trigger })); + alert.setNotifications(Arrays.asList(new Notification[] { notification })); + notification.setTriggers(alert.getTriggers()); + alert.setEnabled(true); + DefaultAlertService spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, + Arrays.asList(metric), alert, notification, false); + + spyAlertService.executeScheduledAlerts(10, 1000); + assertEquals(1, notificationCount.get()); + //assertEquals(true, notification.isActiveForTriggerAndMetric(trigger, metric)); + + notificationCount.set(0); + clearCount.set(0); + + dps = new HashMap(); + dps.put(4000L, "1"); + dps.put(5000L, "2"); + dps.put(6000L, "3"); + metric.setDatapoints(_convertDatapoints(dps)); + + spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, Arrays.asList(metric), + alert, notification, false); + + assertEquals(new Integer(1), spyAlertService.executeScheduledAlerts(10, 1000)); + assertEquals(0, notificationCount.get()); + assertEquals(1, clearCount.get()); + //assertEquals(false, notification.isActiveForTriggerAndMetric(trigger, metric)); + + } + + @Test + public void testExecuteScheduledAlerts_OnCooldownWithRefocusNotifier() { + ServiceFactory sFactory = system.getServiceFactory(); + UserService userService = sFactory.getUserService(); + + final AtomicInteger notificationCount = new AtomicInteger(0); + final AtomicInteger clearCount = new AtomicInteger(0); + + Metric metric = new Metric("scope", "metric"); + Map dps = new HashMap(); + dps.put(1000L, "11"); + dps.put(2000L, "21"); + dps.put(3000L, "31"); + metric.setDatapoints(_convertDatapoints(dps)); + + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "testAlert", "-1h:scope:metric:avg", "* * * * *"); + _setAlertId(alert, "100001"); + Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "testTrigger", 10, 0); + _setTriggerId(trigger, "100002"); + Notification notification = new Notification("testNotification", alert, RefocusNotifier.class.getName(), new ArrayList(), + 600000); //cool down logic does not apply to Refocus notifier + _setNotificationId(notification, "100003"); + + alert.setTriggers(Arrays.asList(new Trigger[] { trigger })); + alert.setNotifications(Arrays.asList(new Notification[] { notification })); + notification.setTriggers(alert.getTriggers()); + alert.setEnabled(true); + + DefaultAlertService spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, + Arrays.asList(metric), alert, notification, false); + + //This will set the notification on cooldown for the given metric and trigger. + assertEquals(new Integer(1), spyAlertService.executeScheduledAlerts(1, 1000)); + //This evaluation should still send notification for refocus. Hence notificationCount count would increase by 1. + assertEquals(new Integer(1), spyAlertService.executeScheduledAlerts(1, 1000)); + + assertEquals(false, notification.isActiveForTriggerAndMetric(trigger, metric)); // refocus notification is stateless + + assertEquals(2, notificationCount.get()); //notification was sent out even on cool down for refocus + assertEquals(0, notification.getCooldownExpirationMap().size()); //refocuse notifier does not record/persist cooldown info + } + + @Test + public void testExecuteScheduledAlerts_ClearNotificationWithRefocusNotifier() { + UserService userService = system.getServiceFactory().getUserService(); + final AtomicInteger notificationCount = new AtomicInteger(0); + final AtomicInteger clearCount = new AtomicInteger(0); + + Metric metric = new Metric("scope", "metric"); + Map dps = new HashMap(); + dps.put(4000L, "11"); + dps.put(5000L, "20"); + dps.put(6000L, "30"); + metric.setDatapoints(_convertDatapoints(dps)); + + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "testAlert", "-1h:scope:metric:avg", "* * * * *"); + _setAlertId(alert, "100001"); + Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "testTrigger", 10, 0); + _setTriggerId(trigger, "100002"); + Notification notification = new Notification("testNotification", alert, RefocusNotifier.class.getName(), new ArrayList(), 0); + _setNotificationId(notification, "100003"); + + alert.setTriggers(Arrays.asList(new Trigger[] { trigger })); + alert.setNotifications(Arrays.asList(new Notification[] { notification })); + notification.setTriggers(alert.getTriggers()); + alert.setEnabled(true); + + DefaultAlertService spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, + Arrays.asList(metric), alert, notification, false); + + assertEquals(new Integer(1), spyAlertService.executeScheduledAlerts(10, 1000)); + assertEquals(1, notificationCount.get()); + assertEquals(false, notification.isActiveForTriggerAndMetric(trigger, metric)); // refocus notification is stateless + + notificationCount.set(0); + clearCount.set(0); + + dps = new HashMap(); + dps.put(4000L, "1"); + dps.put(5000L, "2"); + dps.put(6000L, "3"); + metric.setDatapoints(_convertDatapoints(dps)); + + spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, Arrays.asList(metric), + alert, notification, false); + + spyAlertService.executeScheduledAlerts(10, 1000); + assertEquals(0, notificationCount.get()); + assertEquals(1, clearCount.get()); + assertEquals(false, notification.isActiveForTriggerAndMetric(trigger, metric)); // refocus notification is stateless + + } + + + @Test + public void testExecuteScheduledAlerts_AlertWithMultipleMetricsNotificationSentForEach() { + UserService userService = system.getServiceFactory().getUserService(); + final AtomicInteger notificationCount = new AtomicInteger(0); + final AtomicInteger clearCount = new AtomicInteger(0); + + Metric metric1 = new Metric("scope1", "metric"); + Map dps1 = new HashMap(); + dps1.put(1000L, "11"); + dps1.put(2000L, "20"); + dps1.put(3000L, "30"); + metric1.setDatapoints(_convertDatapoints(dps1)); + + Metric metric2 = new Metric("scope2", "metric"); + Map dps2 = new HashMap(); + dps2.put(4000L, "11"); + dps2.put(5000L, "20"); + dps2.put(6000L, "30"); + metric2.setDatapoints(_convertDatapoints(dps2)); + + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "testAlert", "-1h:scope:metric:avg", "* * * * *"); + _setAlertId(alert, "100001"); + Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "testTrigger", 10, 0); + _setTriggerId(trigger, "100002"); + Notification notification = new Notification("testNotification", alert, AuditNotifier.class.getName(), new ArrayList(), 300000); + _setNotificationId(notification, "100003"); + + alert.setTriggers(Arrays.asList(new Trigger[] { trigger })); + alert.setNotifications(Arrays.asList(new Notification[] { notification })); + notification.setTriggers(alert.getTriggers()); + alert.setEnabled(true); + + DefaultAlertService spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, + Arrays.asList(metric1, metric2), alert, notification, false); + + assertEquals(new Integer(1), spyAlertService.executeScheduledAlerts(10, 1000)); + + assertEquals(2, notificationCount.get()); + assertEquals(2, notification.getCooldownExpirationMap().size()); + assertEquals(2, notification.getActiveStatusMap().size()); + } + + /** + * This test case is for the following scenario: + * + * Evaluation1: + * - metric1 violates threshold, notification sent out, notification set on cooldown for metric1. + * - metric2 does not violate threshold. + * Evaluation2: + * - metric1 goes back to normal state, since notification was in active state a clear notification is sent out. + * - metric2 violates threshold, notification is sent out, notification set on cooldown for metric2. + */ + @Test + public void testExecuteScheduledAlerts_Scenario1() { + UserService userService = system.getServiceFactory().getUserService(); + final AtomicInteger notificationCount = new AtomicInteger(0); + final AtomicInteger clearCount = new AtomicInteger(0); + + Metric metric1 = new Metric("scope1", "metric"); + Map dps1 = new HashMap(); + dps1.put(1000L, "11"); + dps1.put(2000L, "20"); + dps1.put(3000L, "30"); + metric1.setDatapoints(_convertDatapoints(dps1)); + + Metric metric2 = new Metric("scope2", "metric"); + Map dps2 = new HashMap(); + dps2.put(4000L, "1"); + dps2.put(5000L, "2"); + dps2.put(6000L, "3"); + metric2.setDatapoints(_convertDatapoints(dps2)); + + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "testAlert", "-1h:scope:metric:avg", "* * * * *"); + _setAlertId(alert, "100001"); + Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "testTrigger", 10, 0); + _setTriggerId(trigger, "100002"); + Notification notification = new Notification("testNotification", alert, AuditNotifier.class.getName(), new ArrayList(), 300000); + _setNotificationId(notification, "100003"); + + alert.setTriggers(Arrays.asList(new Trigger[] { trigger })); + alert.setNotifications(Arrays.asList(new Notification[] { notification })); + notification.setTriggers(alert.getTriggers()); + alert.setEnabled(true); + + DefaultAlertService spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, + Arrays.asList(metric1, metric2), alert, notification, false); + assertEquals(new Integer(1), spyAlertService.executeScheduledAlerts(10, 1000)); + + assertEquals(1, notificationCount.get()); + assertEquals(1, notification.getCooldownExpirationMap().size()); + assertEquals(1, notification.getActiveStatusMap().size()); + + notificationCount.set(0); + clearCount.set(0); + + metric1 = new Metric("scope1", "metric"); + dps1 = new HashMap(); + dps1.put(1000L, "1"); + dps1.put(2000L, "2"); + dps1.put(3000L, "3"); + metric1.setDatapoints(_convertDatapoints(dps1)); + + metric2 = new Metric("scope2", "metric"); + dps2 = new HashMap(); + dps2.put(4000L, "11"); + dps2.put(5000L, "21"); + dps2.put(6000L, "31"); + metric2.setDatapoints(_convertDatapoints(dps2)); + + spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, Arrays.asList(metric1, metric2), + alert, notification, false); + assertEquals(new Integer(1), spyAlertService.executeScheduledAlerts(10, 1000)); + + assertEquals(1, notificationCount.get()); + assertEquals(1, clearCount.get()); + assertEquals(2, notification.getCooldownExpirationMap().size()); + assertEquals(1, _getActiveSize(notification.getActiveStatusMap())); + } + + private int _getActiveSize(Map activeStatusMap) { + int size = 0; + for(Map.Entry entry : activeStatusMap.entrySet()) { + if(entry.getValue()) { + size++; + } + } + return size; + } + + @Test + public void testSendNotificationWhenImageSendingIsEnabled() { + UserService userService = system.getServiceFactory().getUserService(); + Alert alert = generateAlert("testAlert", userService.findAdminUser(), "-1h:"+METRIC_NAME); + alert.setEnabled(true); + _setAlertId(alert, "1"); + + Trigger trigger = getTrigger(alert, TriggerType.GREATER_THAN, "testTrigger", "2.0", "1"); + List triggerList = ImmutableList.of(trigger); + alert.setTriggers(triggerList); + + Notification notification = getNotification("EmailNotifier", + EmailNotifier.class.getName(), alert, ImmutableList.of("test@salesforce.com")); + alert.addNotification(notification); + + Metric metric = createMetric(); + History history = getHistory(); + + Pair evaluatedMetricSnapshotDetails = Pair.of("img1", "Test String".getBytes()); + when(_imageDataRetrieverMock.getAnnotatedImage(any(ImageDataRetrievalContext.class))).thenReturn(evaluatedMetricSnapshotDetails); + when(_imageDataRetrieverMock.getImageURL(evaluatedMetricSnapshotDetails)).thenReturn("https://localhost:8080/img1"); + + alertService.sendNotification(trigger, metric, history, notification, alert, 2L, 500L, "triggered"); + + verify(_imageDataRetrieverMock, times(1)).getAnnotatedImage(any(ImageDataRetrievalContext.class)); + verify(_imageDataRetrieverMock, times(1)).getImageURL(evaluatedMetricSnapshotDetails); + } + + @Test + public void testSendNotificationWhenImageSendingIsDisabled() { + UserService userService = system.getServiceFactory().getUserService(); + Alert alert = generateAlert("testAlert", userService.findAdminUser(), "-1h:"+METRIC_NAME); + alert.setEnabled(true); + _setAlertId(alert, "1"); + + Trigger trigger = getTrigger(alert, TriggerType.GREATER_THAN, "testTrigger", "2.0", "1"); + List triggerList = ImmutableList.of(trigger); + alert.setTriggers(triggerList); + + Notification notification = getNotification("EmailNotifier", + EmailNotifier.class.getName(), alert, ImmutableList.of("test@salesforce.com")); + alert.addNotification(notification); + + Metric metric = createMetric(); + History history = getHistory(); + + Pair evaluatedMetricSnapshotDetails = Pair.of("img1", "Test String".getBytes()); + + alertService.sendNotification(trigger, metric, history, notification, alert, 2L, 500L, "notified"); + + verify(_imageDataRetrieverMock, never()).getAnnotatedImage(any(ImageDataRetrievalContext.class)); + verify(_imageDataRetrieverMock, never()).getImageURL(evaluatedMetricSnapshotDetails); + } + + @Test + public void testClearNotificationWhenImageSendingIsEnabled() { + UserService userService = system.getServiceFactory().getUserService(); + Alert alert = generateAlert("testAlert", userService.findAdminUser(), "-1h:"+METRIC_NAME); + alert.setEnabled(true); + _setAlertId(alert, "1"); + + Trigger trigger = getTrigger(alert, TriggerType.GREATER_THAN, "testTrigger", "2.0", "1"); + List triggerList = ImmutableList.of(trigger); + alert.setTriggers(triggerList); + + Notification notification = getNotification("EmailNotifier", + EmailNotifier.class.getName(), alert, ImmutableList.of("test@salesforce.com")); + alert.addNotification(notification); + + Metric metric = createMetric(); + History history = getHistory(); + + Pair evaluatedMetricSnapshotDetails = Pair.of("img1", "Test String".getBytes()); + when(_imageDataRetrieverMock.getAnnotatedImage(any(ImageDataRetrievalContext.class))).thenReturn(evaluatedMetricSnapshotDetails); + when(_imageDataRetrieverMock.getImageURL(evaluatedMetricSnapshotDetails)).thenReturn("https://localhost:8080/img1"); + + alertService.sendClearNotification(trigger, metric, history, notification, alert, 2L, "cleared"); + + verify(_imageDataRetrieverMock, times(1)).getAnnotatedImage(any(ImageDataRetrievalContext.class)); + verify(_imageDataRetrieverMock, times(1)).getImageURL(evaluatedMetricSnapshotDetails); + } + + @Test + public void testClearNotificationWhenImageSendingIsDisabled() { + UserService userService = system.getServiceFactory().getUserService(); + Alert alert = generateAlert("testAlert", userService.findAdminUser(), "-1h:"+METRIC_NAME); + alert.setEnabled(true); + _setAlertId(alert, "1"); + + Trigger trigger = getTrigger(alert, TriggerType.GREATER_THAN, "testTrigger", "2.0", "1"); + List triggerList = ImmutableList.of(trigger); + alert.setTriggers(triggerList); + + Notification notification = getNotification("EmailNotifier", + EmailNotifier.class.getName(), alert, ImmutableList.of("test@salesforce.com")); + alert.addNotification(notification); + + Metric metric = createMetric(); + History history = getHistory(); + + Pair evaluatedMetricSnapshotDetails = Pair.of("img1", "Test String".getBytes()); + + alertService.sendClearNotification(trigger, metric, history, notification, alert, 2L, "missingdata"); + + verify(_imageDataRetrieverMock, never()).getAnnotatedImage(any(ImageDataRetrievalContext.class)); + verify(_imageDataRetrieverMock, never()).getImageURL(evaluatedMetricSnapshotDetails); + } + + private static Metric createMetric() { + SecureRandom random = new SecureRandom(); + int datapointCount = ((int) (random.nextDouble() * 500)) + 1; + Metric result = new Metric("testScopeName", "TestMetric"); + Map datapoints = new TreeMap<>(); + + long timestamp = 1L; + for (int i = 0; i < datapointCount; i++) { + datapoints.put(timestamp+1, random.nextDouble() * 500); + } + + Map tags = new HashMap<>(); + tags.put("source", "unittest"); + result.setDatapoints(datapoints); + result.setTags(tags); + return result; + } + + @Test + public void testGetTriggerFiredDatapointTime() { + UserService userService = system.getServiceFactory().getUserService(); + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "alert-name_test", EXPRESSION, "* * * * *"); + Metric metric = new Metric("scope", "metric"); + double thresholdValue = 90; + long inertia = 10; + long startTime = 1; + long expectedTriggerTime; + Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "name_test", thresholdValue, inertia); + Map datapoints = new HashMap(); + + datapoints.putAll(_createDatapoints(inertia + 1, thresholdValue, startTime, true)); + metric.setDatapoints(_convertDatapoints(datapoints)); + expectedTriggerTime = datapoints.size(); + + long actualValue = alertService.getTriggerFiredDatapointTime(trigger, metric, alert.getExpression(), startTime); + + assertEquals(expectedTriggerTime, actualValue); + startTime = datapoints.size() + 1; + datapoints.putAll(_createDatapoints(201, thresholdValue, startTime, false)); + metric.setDatapoints(_convertDatapoints(datapoints)); + actualValue = alertService.getTriggerFiredDatapointTime(trigger, metric, alert.getExpression(), startTime); + assertEquals(expectedTriggerTime, actualValue); + startTime = datapoints.size() + 1; + datapoints.putAll(_createDatapoints(inertia - 1, thresholdValue, startTime, true)); + metric.setDatapoints(_convertDatapoints(datapoints)); + actualValue = alertService.getTriggerFiredDatapointTime(trigger, metric, alert.getExpression(), startTime); + assertEquals(expectedTriggerTime, actualValue); + startTime = datapoints.size() + 1; + datapoints.putAll(_createDatapoints(inertia + 1, thresholdValue, startTime, true)); + metric.setDatapoints(_convertDatapoints(datapoints)); + actualValue = alertService.getTriggerFiredDatapointTime(trigger, metric, alert.getExpression(), startTime); + expectedTriggerTime = datapoints.size(); + assertEquals(expectedTriggerTime, actualValue); + startTime = datapoints.size() + 1; + datapoints.putAll(_createDatapoints(201, thresholdValue, startTime, false)); + metric.setDatapoints(_convertDatapoints(datapoints)); + actualValue = alertService.getTriggerFiredDatapointTime(trigger, metric, alert.getExpression(), startTime); + assertEquals(expectedTriggerTime, actualValue); + } + + @Test + public void testGetTriggerFiredDatapointTimeWhenOneDatapointAndZeroInertia() { + UserService userService = system.getServiceFactory().getUserService(); + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "alert-name_test", EXPRESSION, "* * * * *"); + Metric metric = new Metric("scope", "metric"); + double thresholdValue = 90; + long inertia = 0; + long startTime = 1000; + long expectedTriggerTime; + Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "name_test", thresholdValue, inertia); + Map datapoints = new HashMap(); + + datapoints.putAll(_createDatapoints(1, thresholdValue, startTime, true)); + metric.setDatapoints(_convertDatapoints(datapoints)); + expectedTriggerTime = startTime; + + long actualValue = alertService.getTriggerFiredDatapointTime(trigger, metric, alert.getExpression(), startTime); + + assertEquals(expectedTriggerTime, actualValue); + } + + @Test + public void testGetTriggerFiredDatapointTimeWhenOneDatapointAndInertiaOne() { + UserService userService = system.getServiceFactory().getUserService(); + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "alert-name_test", EXPRESSION, "* * * * *"); + Metric metric = new Metric("scope", "metric"); + double thresholdValue = 90; + long inertia = 1; + long startTime = 1000; + Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "name_test", thresholdValue, inertia); + Map datapoints = new HashMap(); + + datapoints.putAll(_createDatapoints(1, thresholdValue, startTime, true)); + metric.setDatapoints(_convertDatapoints(datapoints)); + + Long actualValue = alertService.getTriggerFiredDatapointTime(trigger, metric, alert.getExpression(), startTime); + + assertNull(actualValue); + } + + @Test + public void testGetTriggerFiredDatapointTimeWhenNoDatapoints() { + UserService userService = system.getServiceFactory().getUserService(); + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "alert-name_test", EXPRESSION, "* * * * *"); + Metric metric = new Metric("scope", "metric"); + double thresholdValue = 90; + long inertia = 0; + long startTime = 1000; + Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "name_test", thresholdValue, inertia); + Map datapoints = new HashMap(); + + datapoints.putAll(_createDatapoints(0, thresholdValue, startTime, true)); + metric.setDatapoints(_convertDatapoints(datapoints)); + + Long actualValue = alertService.getTriggerFiredDatapointTime(trigger, metric, alert.getExpression(), startTime); + + assertNull(actualValue); + } + + @Test + public void testGetTriggerFiredDatapointTimeWhenMissingTimeStamps() { + UserService userService = system.getServiceFactory().getUserService(); + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "alert-name_test", EXPRESSION, "* * * * *"); + Metric metric = new Metric("scope", "metric"); + double thresholdValue = 1; + long inertia = 5*60*1000; + Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "name_test", thresholdValue, inertia); + Map datapoints = new HashMap(); + datapoints.put(0L, "1"); + datapoints.put(inertia, "1"); + metric.setDatapoints(_convertDatapoints(datapoints)); + long actualValue = alertService.getTriggerFiredDatapointTime(trigger, metric, alert.getExpression(), 1L); + long expectedTriggerTime=5*60*1000; + assertEquals(expectedTriggerTime, actualValue); + } + + @Test + public void testGetTriggerFiredDatapointTimeWhenMissingTimeStamps2() { + UserService userService = system.getServiceFactory().getUserService(); + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "alert-name_test", EXPRESSION, "* * * * *"); + Metric metric = new Metric("scope", "metric"); + double thresholdValue = 1; + long inertia = 5*60*1000; + Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "name_test", thresholdValue, inertia); + Map datapoints = new HashMap(); + datapoints.put(0L, "1"); + datapoints.put(3*60*1000L, "1"); + datapoints.put(inertia, "1"); + metric.setDatapoints(_convertDatapoints(datapoints)); + long actualValue = alertService.getTriggerFiredDatapointTime(trigger, metric, alert.getExpression(), 1L); + long expectedTriggerTime=5*60*1000; + assertEquals(expectedTriggerTime, actualValue); + } + + @Test + public void testGetTriggerFiredDatapointTimeWhenMissingTimeStamps3() { + UserService userService = system.getServiceFactory().getUserService(); + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "alert-name_test", EXPRESSION, "* * * * *"); + Metric metric = new Metric("scope", "metric"); + double thresholdValue = 1; + long inertia = 5*60*1000; + Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "name_test", thresholdValue, inertia); + Map datapoints = new HashMap(); + datapoints.put(0L, "1"); + datapoints.put(9*60*1000L, "1"); + metric.setDatapoints(_convertDatapoints(datapoints)); + long actualValue = alertService.getTriggerFiredDatapointTime(trigger, metric, alert.getExpression(), 1L); + long expectedTriggerTime=9*60*1000; + assertEquals(expectedTriggerTime, actualValue); + } + + private Map _convertDatapoints(Map datapoints) { + Map newDps = new HashMap<>(); + for(Map.Entry dp : datapoints.entrySet()) { + newDps.put(dp.getKey(), Double.parseDouble(dp.getValue())); + } + return newDps; + } + + @Test + public void testGetTriggerFiredDatapointTimeWhenMissingTimeStamps4() { + UserService userService = system.getServiceFactory().getUserService(); + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "alert-name_test", EXPRESSION, "* * * * *"); + Metric metric = new Metric("scope", "metric"); + double thresholdValue = 1; + long inertia = 5*60*1000; + Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "name_test", thresholdValue, inertia); + Map datapoints = new HashMap(); + datapoints.put(0L, 2.0); + datapoints.put(3*60*1000L, 2.0); + datapoints.put(6*60*1000L, 2.0); + datapoints.put(7*60*1000L, 0.0); + datapoints.put(9*60*1000L, 2.0); + metric.setDatapoints(datapoints); + long actualValue = alertService.getTriggerFiredDatapointTime(trigger, metric, alert.getExpression(), 1L); + long expectedTriggerTime = 6 * 60 * 1000; + assertEquals(expectedTriggerTime, actualValue); + } + + @Test + public void testGetTriggerFiredDatapointTimeWhenMissingTimeStamps5() { + UserService userService = system.getServiceFactory().getUserService(); + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "alert-name_test", EXPRESSION, "* * * * *"); + Metric metric = new Metric("scope", "metric"); + double thresholdValue = 1; + long inertia = 5*60*1000; + Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "name_test", thresholdValue, inertia); + Map datapoints = new HashMap<>(); + datapoints.put(0L, 2.0); + datapoints.put(3*60*1000L, 0.0); + datapoints.put(6*60*1000L, 2.0); + datapoints.put(7*60*1000L, 0.0); + datapoints.put(9*60*1000L, 2.0); + metric.setDatapoints(datapoints); + Long actualValue = alertService.getTriggerFiredDatapointTime(trigger, metric, alert.getExpression(), 1L); + assertNull(actualValue); + } + + @Test + public void testGetTriggerFiredDatapointTimeWhenMissingTimeStampsReturnNull() { + UserService userService = system.getServiceFactory().getUserService(); + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "alert-name_test", EXPRESSION, "* * * * *"); + Metric metric = new Metric("scope", "metric"); + double thresholdValue = 1; + long inertia = 5*60*1000; + Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "name_test", thresholdValue, inertia); + Map datapoints = new HashMap<>(); + datapoints.put(0L, 1.0); + datapoints.put(2*60*1000L, 0.0); + datapoints.put(inertia, 1.0); + metric.setDatapoints(datapoints); + Long actualValue = alertService.getTriggerFiredDatapointTime(trigger, metric, alert.getExpression(), 1L); + assertNull(actualValue); + } + + @Test + public void testExecuteScheduledAlerts_DuringDatalagPresentWithNoDataTriggerWithSkippedEvaluation() { + ServiceFactory sFactory = system.getServiceFactory(); + UserService userService = sFactory.getUserService(); + + int triggerMinValue = 50, inertiaPeriod = 1000; + int cooldownPeriod = 1000 * 5; + final AtomicInteger notificationCount = new AtomicInteger(0); + final AtomicInteger clearCount = new AtomicInteger(0); + + Metric metric = new Metric("scope", "metric"); + Map datapoints = new HashMap<>(); + metric.setDatapoints(datapoints); + + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "testAlert", "-1h:scope:metric:avg", "* * * * *"); + _setAlertId(alert, "100001"); + Trigger trigger = new Trigger(alert, TriggerType.NO_DATA, "testTrigger", triggerMinValue, inertiaPeriod); + _setTriggerId(trigger, "100002"); + Notification notification = new Notification("testNotification", alert, AuditNotifier.class.getName(), new ArrayList(), + cooldownPeriod); + _setNotificationId(notification, "100003"); + + alert.setTriggers(Arrays.asList(trigger)); + alert.setNotifications(Arrays.asList(notification)); + notification.setTriggers(alert.getTriggers()); + alert.setEnabled(true); + + DefaultAlertService spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, + Arrays.asList(), alert, notification, true); + + MetricQueryResult queryResult = new MetricQueryResult(); + queryResult.addInboundMetricQuery(new MetricQuery("scope", "metric", null, 0L, 5000L)); + when(_metricServiceMock.extractDCFromMetricQuery(anyList())).thenReturn(new ArrayList<>(Arrays.asList("DC1"))); + + when(_metricServiceMock.getMetrics(anyString(), anyLong())).thenReturn(queryResult); + + assertEquals(new Integer(0), spyAlertService.executeScheduledAlerts(1, 1000)); + + assertEquals(0, notificationCount.get()); + enableDatalagMonitoring(false); + } + + + @Test + public void testExecuteScheduledAlerts_DuringDatalagPresentWithSuccessfulEvaluation() { + ServiceFactory sFactory = system.getServiceFactory(); + UserService userService = sFactory.getUserService(); + + int triggerMinValue = 10, inertiaPeriod = 1; + int cooldownPeriod = 1000 * 5; + + final AtomicInteger notificationCount = new AtomicInteger(0); + final AtomicInteger clearCount = new AtomicInteger(0); + final AtomicInteger refocusCount = new AtomicInteger(0); + + Metric metric = new Metric("scope", "metric"); + Map dps1 = new HashMap(); + dps1.put(1000L, "11"); + dps1.put(2000L, "20"); + dps1.put(3000L, "30"); + metric.setDatapoints(_convertDatapoints(dps1)); + + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "testAlert", "-1h:scope:metric:avg", "* * * * *"); + _setAlertId(alert, "100001"); + Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "testTrigger", triggerMinValue, inertiaPeriod); + _setTriggerId(trigger, "100002"); + Notification notification = new Notification("testNotification", alert, AuditNotifier.class.getName(), new ArrayList(), + cooldownPeriod); + _setNotificationId(notification, "100003"); + + alert.setTriggers(Arrays.asList(trigger)); + alert.setNotifications(Arrays.asList(notification)); + notification.setTriggers(alert.getTriggers()); + alert.setEnabled(true); + + + DefaultAlertService spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, + Arrays.asList(metric), alert, notification, false); + + enableDatalagMonitoring(true); + + assertEquals(new Integer(1), spyAlertService.executeScheduledAlerts(1, 1000)); + + assertEquals(1, notificationCount.get()); + + enableDatalagMonitoring(false); + } + + @Test + public void testExecuteScheduledAlerts_DuringDatalagPresentWithSkippedEvaluation() { + ServiceFactory sFactory = system.getServiceFactory(); + UserService userService = sFactory.getUserService(); + + int triggerMinValue = 10, inertiaPeriod = 1; + int cooldownPeriod = 1000 * 5; + final AtomicInteger clearCount = new AtomicInteger(0); + final AtomicInteger notificationCount = new AtomicInteger(0); + + Metric metric = new Metric("scope", "metric"); + Map dps1 = new HashMap(); + dps1.put(1000L, "11"); + dps1.put(2000L, "20"); + dps1.put(3000L, "30"); + metric.setDatapoints(_convertDatapoints(dps1)); + + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "testAlert", "-1h:scope:metric:avg", "* * * * *"); + _setAlertId(alert, "100001"); + Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "testTrigger", triggerMinValue, inertiaPeriod); + _setTriggerId(trigger, "100002"); + Notification notification = new Notification("testNotification", alert, AuditNotifier.class.getName(), new ArrayList(), + cooldownPeriod); + _setNotificationId(notification, "100003"); + + alert.setTriggers(Arrays.asList(trigger)); + alert.setNotifications(Arrays.asList(notification)); + notification.setTriggers(alert.getTriggers()); + alert.setEnabled(true); + + DefaultAlertService spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, + Arrays.asList(metric), alert, notification, true); + + assertEquals(new Integer(0), spyAlertService.executeScheduledAlerts(1, 1000)); + + assertEquals(0, notificationCount.get()); + enableDatalagMonitoring(false); + } + + @Test + public void testExecuteScheduledAlerts_DuringDatalagPresentWithDcDetectionFailedWithSkippedEvaluation() { + ServiceFactory sFactory = system.getServiceFactory(); + UserService userService = sFactory.getUserService(); + + int triggerMinValue = 10, inertiaPeriod = 1; + int cooldownPeriod = 1000 * 5; + final AtomicInteger clearCount = new AtomicInteger(0); + final AtomicInteger notificationCount = new AtomicInteger(0); + + Metric metric = new Metric("scope", "metric"); + Map dps1 = new HashMap(); + dps1.put(1000L, "11"); + dps1.put(2000L, "20"); + dps1.put(3000L, "30"); + metric.setDatapoints(_convertDatapoints(dps1)); + + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "testAlert", "-1h:scope:metric:avg", "* * * * *"); + _setAlertId(alert, "100001"); + Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "testTrigger", triggerMinValue, inertiaPeriod); + _setTriggerId(trigger, "100002"); + Notification notification = new Notification("testNotification", alert, AuditNotifier.class.getName(), new ArrayList(), + cooldownPeriod); + _setNotificationId(notification, "100003"); + + alert.setTriggers(Arrays.asList(trigger)); + alert.setNotifications(Arrays.asList(notification)); + notification.setTriggers(alert.getTriggers()); + alert.setEnabled(true); + + DefaultAlertService spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, + Arrays.asList(metric), alert, notification, true); + + when(_metricServiceMock.extractDCFromMetricQuery(anyList())).thenReturn(new ArrayList<>()); + + assertEquals(new Integer(0), spyAlertService.executeScheduledAlerts(1, 1000)); + + assertEquals(0, notificationCount.get()); + enableDatalagMonitoring(false); + } + + @Test + public void testExecuteScheduledAlerts_DuringDatalagPresentWithTransformsWithSkippedEvaluation() { + ServiceFactory sFactory = system.getServiceFactory(); + UserService userService = sFactory.getUserService(); + + int triggerMinValue = 10, inertiaPeriod = 1; + int cooldownPeriod = 1000 * 5; + + final AtomicInteger clearCount = new AtomicInteger(0); + final AtomicInteger notificationCount = new AtomicInteger(0); + + Metric metric = new Metric("scope", "metric"); + Map dps1 = new HashMap(); + dps1.put(1000L, "11"); + dps1.put(2000L, "20"); + dps1.put(3000L, "30"); + metric.setDatapoints(_convertDatapoints(dps1)); + + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "testAlert", "COUNT(-1h:scope:metric:avg, -1h:scope:metric:avg, -1h:scope:metric:avg)", "* * * * *"); + _setAlertId(alert, "100001"); + Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "testTrigger", triggerMinValue, inertiaPeriod); + _setTriggerId(trigger, "100002"); + Notification notification = new Notification("testNotification", alert, AuditNotifier.class.getName(), new ArrayList(), + cooldownPeriod); + _setNotificationId(notification, "100003"); + + alert.setTriggers(Arrays.asList(trigger)); + alert.setNotifications(Arrays.asList(notification)); + notification.setTriggers(alert.getTriggers()); + alert.setEnabled(true); + + DefaultAlertService spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, + Arrays.asList(metric), alert, notification, true); + + MetricQueryResult queryResult = new MetricQueryResult(); + queryResult.setMetricsList(new ArrayList(Arrays.asList(metric))); + queryResult.addTransform(TransformFactory.Function.COUNT); + queryResult.addInboundMetricQuery(new MetricQuery("COUNT", "metric", null, 0L, 5000L)); + when(_metricServiceMock.getMetrics(anyString(), anyLong())).thenReturn(queryResult); + when(_metricServiceMock.extractDCFromMetricQuery(anyList())).thenReturn(new ArrayList<>(Arrays.asList("DC1", "DC2", "DC3"))); + + enableDatalagMonitoring(true); + + assertEquals(new Integer(0), spyAlertService.executeScheduledAlerts(1, 1000)); + + assertEquals(0, notificationCount.get()); + enableDatalagMonitoring(false); + } + + @Test + public void testExecuteScheduledAlerts_DuringDatalagPresentWithTransformsWithSuccessfulEvaluation() { + ServiceFactory sFactory = system.getServiceFactory(); + UserService userService = sFactory.getUserService(); + + int triggerMinValue = 10, inertiaPeriod = 1; + int cooldownPeriod = 1000 * 5; + + final AtomicInteger clearCount = new AtomicInteger(0); + final AtomicInteger notificationCount = new AtomicInteger(0); + + Metric metric = new Metric("scope", "metric"); + Map dps1 = new HashMap(); + dps1.put(1000L, "11"); + dps1.put(2000L, "20"); + dps1.put(3000L, "30"); + metric.setDatapoints(_convertDatapoints(dps1)); + + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "testAlert", "COUNT(-1h:scope:metric:avg, -1h:scope:metric:avg, -1h:scope:metric:avg)", "* * * * *"); + _setAlertId(alert, "100001"); + Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "testTrigger", triggerMinValue, inertiaPeriod); + _setTriggerId(trigger, "100002"); + Notification notification = new Notification("testNotification", alert, AuditNotifier.class.getName(), new ArrayList(), + cooldownPeriod); + _setNotificationId(notification, "100003"); + + alert.setTriggers(Arrays.asList(trigger)); + alert.setNotifications(Arrays.asList(notification)); + notification.setTriggers(alert.getTriggers()); + alert.setEnabled(true); + + DefaultAlertService spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, + Arrays.asList(metric), alert, notification, false); + + MetricQueryResult queryResult = new MetricQueryResult(); + queryResult.setMetricsList(new ArrayList(Arrays.asList(metric))); + queryResult.addTransform(TransformFactory.Function.COUNT); + queryResult.addInboundMetricQuery(new MetricQuery("COUNT", "metric", null, 0L, 5000L)); + when(_metricServiceMock.getMetrics(anyString(), anyLong())).thenReturn(queryResult); + when(_metricServiceMock.extractDCFromMetricQuery(anyList())).thenReturn(new ArrayList<>(Arrays.asList("DC1", "DC2", "DC3"))); + + enableDatalagMonitoring(true); + + assertEquals(new Integer(1), spyAlertService.executeScheduledAlerts(1, 1000)); + + assertEquals(1, notificationCount.get()); + enableDatalagMonitoring(false); + } + + @Test + public void testExecuteScheduledAlerts_DuringDatalagPresentWithWhiteListedScopeWithNoDataWithSuccessfulEvaluation() { + ServiceFactory sFactory = system.getServiceFactory(); + UserService userService = sFactory.getUserService(); + + int triggerMinValue = 50, inertiaPeriod = 1000; + int cooldownPeriod = 1000 * 5; + final AtomicInteger notificationCount = new AtomicInteger(0); + final AtomicInteger clearCount = new AtomicInteger(0); + + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "testAlert", "-1h:whiteListedScope:metric:avg", "* * * * *"); + _setAlertId(alert, "100001"); + Trigger trigger = new Trigger(alert, TriggerType.NO_DATA, "testTrigger", triggerMinValue, inertiaPeriod); + _setTriggerId(trigger, "100002"); + Notification notification = new Notification("testNotification", alert, AuditNotifier.class.getName(), new ArrayList(), + cooldownPeriod); + _setNotificationId(notification, "100003"); + + alert.setTriggers(Arrays.asList(trigger)); + alert.setNotifications(Arrays.asList(notification)); + notification.setTriggers(alert.getTriggers()); + alert.setEnabled(true); + + DefaultAlertService spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, + Arrays.asList(), alert, notification, true); + + assertEquals(new Integer(1), spyAlertService.executeScheduledAlerts(1, 1000)); + + assertEquals(1, notificationCount.get()); + enableDatalagMonitoring(false); + } + + @Test + public void testExecuteScheduledAlerts_DuringDatalagPresentWithWhiteListedUserWithNoDataWithSuccessfulEvaluation() { + ServiceFactory sFactory = system.getServiceFactory(); + UserService userService = sFactory.getUserService(); + + int triggerMinValue = 50, inertiaPeriod = 1000; + int cooldownPeriod = 1000 * 5; + final AtomicInteger notificationCount = new AtomicInteger(0); + final AtomicInteger clearCount = new AtomicInteger(0); + + Alert alert = new Alert(userService.findDefaultUser(), userService.findDefaultUser(), "testAlert", "-1h:scope:metric:avg", "* * * * *"); + _setAlertId(alert, "100001"); + Trigger trigger = new Trigger(alert, TriggerType.NO_DATA, "testTrigger", triggerMinValue, inertiaPeriod); + _setTriggerId(trigger, "100002"); + Notification notification = new Notification("testNotification", alert, AuditNotifier.class.getName(), new ArrayList(), + cooldownPeriod); + _setNotificationId(notification, "100003"); + + alert.setTriggers(Arrays.asList(trigger)); + alert.setNotifications(Arrays.asList(notification)); + notification.setTriggers(alert.getTriggers()); + alert.setEnabled(true); + + DefaultAlertService spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, + Arrays.asList(), alert, notification, true); + + assertEquals(new Integer(1), spyAlertService.executeScheduledAlerts(1, 1000)); + + assertEquals(1, notificationCount.get()); + enableDatalagMonitoring(false); + } + + @Test + public void testExecuteScheduledAlerts_DuringDatalagPresentWithWhiteListedScopeWithMetricDataWithSuccessfulEvaluation() { + ServiceFactory sFactory = system.getServiceFactory(); + UserService userService = sFactory.getUserService(); + + int triggerMinValue = 10, inertiaPeriod = 1; + int cooldownPeriod = 1000 * 5; + + final AtomicInteger clearCount = new AtomicInteger(0); + final AtomicInteger notificationCount = new AtomicInteger(0); + + Metric metric = new Metric("scope", "metric"); + Map dps1 = new HashMap(); + dps1.put(1000L, "11"); + dps1.put(2000L, "20"); + dps1.put(3000L, "30"); + metric.setDatapoints(_convertDatapoints(dps1)); + + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "testAlert", "-1h:whiteListedScope:metric:avg", "* * * * *"); + _setAlertId(alert, "100001"); + Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "testTrigger", triggerMinValue, inertiaPeriod); + _setTriggerId(trigger, "100002"); + Notification notification = new Notification("testNotification", alert, AuditNotifier.class.getName(), new ArrayList(), + cooldownPeriod); + _setNotificationId(notification, "100003"); + + alert.setTriggers(Arrays.asList(trigger)); + alert.setNotifications(Arrays.asList(notification)); + notification.setTriggers(alert.getTriggers()); + alert.setEnabled(true); + + DefaultAlertService spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, + Arrays.asList(metric), alert, notification, true); + + assertEquals(new Integer(1), spyAlertService.executeScheduledAlerts(1, 1000)); + + assertEquals(1, notificationCount.get()); + enableDatalagMonitoring(false); + } + + @Test + public void testExecuteScheduledAlerts_DuringDatalagPresentWithWhiteListedUserWithMetricDataWithSuccessfulEvaluation() { + ServiceFactory sFactory = system.getServiceFactory(); + UserService userService = sFactory.getUserService(); + + int triggerMinValue = 10, inertiaPeriod = 1; + int cooldownPeriod = 1000 * 5; + + final AtomicInteger clearCount = new AtomicInteger(0); + final AtomicInteger notificationCount = new AtomicInteger(0); + + Metric metric = new Metric("scope", "metric"); + Map dps1 = new HashMap(); + dps1.put(1000L, "11"); + dps1.put(2000L, "20"); + dps1.put(3000L, "30"); + metric.setDatapoints(_convertDatapoints(dps1)); + + Alert alert = new Alert(userService.findDefaultUser(), userService.findDefaultUser(), "testAlert", "-1h:scope:metric:avg", "* * * * *"); + _setAlertId(alert, "100001"); + Trigger trigger = new Trigger(alert, TriggerType.NO_DATA, "testTrigger", triggerMinValue, inertiaPeriod); + _setTriggerId(trigger, "100002"); + Notification notification = new Notification("testNotification", alert, AuditNotifier.class.getName(), new ArrayList(), + cooldownPeriod); + _setNotificationId(notification, "100003"); + + alert.setTriggers(Arrays.asList(trigger)); + alert.setNotifications(Arrays.asList(notification)); + notification.setTriggers(alert.getTriggers()); + alert.setEnabled(true); + + DefaultAlertService spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, + Arrays.asList(metric), alert, notification, true); + + assertEquals(new Integer(1), spyAlertService.executeScheduledAlerts(1, 1000)); + + assertEquals(1, notificationCount.get()); + enableDatalagMonitoring(false); + } + + @Test + public void testExecuteScheduledAlerts_DuringDatalagPresentWithWhiteListedScopeWithTransformSuccessfulEvaluation() { + ServiceFactory sFactory = system.getServiceFactory(); + UserService userService = sFactory.getUserService(); + + int triggerMinValue = 10, inertiaPeriod = 1; + int cooldownPeriod = 1000 * 5; + + final AtomicInteger clearCount = new AtomicInteger(0); + final AtomicInteger notificationCount = new AtomicInteger(0); + + Metric metric = new Metric("scope", "metric"); + Map dps1 = new HashMap(); + dps1.put(1000L, "11"); + dps1.put(2000L, "20"); + dps1.put(3000L, "30"); + metric.setDatapoints(_convertDatapoints(dps1)); + + Alert alert = new Alert(userService.findDefaultUser(), userService.findDefaultUser(), "testAlert", "COUNT(-1h:whitelistedScope:metric1:avg,-1h:scope:metric2:avg,-1h:scope:metric3:avg)", "* * * * *"); + _setAlertId(alert, "100001"); + Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "testTrigger", triggerMinValue, inertiaPeriod); + _setTriggerId(trigger, "100002"); + Notification notification = new Notification("testNotification", alert, AuditNotifier.class.getName(), new ArrayList(), + cooldownPeriod); + _setNotificationId(notification, "100003"); + + alert.setTriggers(Arrays.asList(trigger)); + alert.setNotifications(Arrays.asList(notification)); + notification.setTriggers(alert.getTriggers()); + alert.setEnabled(true); + + DefaultAlertService spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, + Arrays.asList(metric), alert, notification, true); + + MetricQueryResult queryResult = new MetricQueryResult(); + queryResult.setMetricsList(new ArrayList(Arrays.asList(metric))); + queryResult.addTransform(TransformFactory.Function.COUNT); + queryResult.addInboundMetricQuery(new MetricQuery("COUNT", "metric", null, 0L, 5000L)); + when(_metricServiceMock.getMetrics(anyString(), anyLong())).thenReturn(queryResult); + when(_metricServiceMock.extractDCFromMetricQuery(anyList())).thenReturn(new ArrayList<>(Arrays.asList("DC1", "DC2", "DC3"))); + + assertEquals(new Integer(1), spyAlertService.executeScheduledAlerts(1, 1000)); + + assertEquals(1, notificationCount.get()); + enableDatalagMonitoring(false); + } + + @Test + public void updateRequestContext_test() { + ServiceFactory sFactory = system.getServiceFactory(); + UserService userService = sFactory.getUserService(); + Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "alert-name_test", EXPRESSION, "* * * * *"); + + alertService.updateRequestContext(alert); + + assertNotNull(RequestContextHolder.getRequestContext()); + assertEquals(userService.findAdminUser().getUserName() + "-alert", RequestContextHolder.getRequestContext().getUserName()); + } + + // ------------------------------------------------------------------------------------------ + // Historical testing unit tests + // ------------------------------------------------------------------------------------------ + + @Test + public void testAlertsHistorical_WithWhiteListedUserWithMetricDataWithSuccessfulEvaluation() { + ServiceFactory sFactory = system.getServiceFactory(); + UserService userService = sFactory.getUserService(); + + int triggerMinValue = 10, inertiaPeriod = 1; + int cooldownPeriod = 1000 * 5; + + final AtomicInteger clearCount = new AtomicInteger(0); + final AtomicInteger notificationCount = new AtomicInteger(0); + + Metric metric = new Metric("scope", "metric"); + Map dps1 = new HashMap(); + dps1.put(1000L, "11"); + dps1.put(2000L, "20"); + dps1.put(3000L, "30"); + metric.setDatapoints(_convertDatapoints(dps1)); + + Alert alert = new Alert(userService.findDefaultUser(), userService.findDefaultUser(), "testAlert", "-1h:scope:metric:avg", "* * * * *"); + _setAlertId(alert, "100001"); + Trigger trigger = new Trigger(alert, TriggerType.NO_DATA, "testTrigger", triggerMinValue, inertiaPeriod); + _setTriggerId(trigger, "100002"); + Notification notification = new Notification("testNotification", alert, AuditNotifier.class.getName(), new ArrayList(), + cooldownPeriod); + _setNotificationId(notification, "100003"); + + alert.setTriggers(Arrays.asList(trigger)); + alert.setNotifications(Arrays.asList(notification)); + notification.setTriggers(alert.getTriggers()); + alert.setEnabled(true); + + DefaultAlertService spyAlertService = _initializeSpyAlertServiceWithStubs(notificationCount, clearCount, + Arrays.asList(metric), alert, notification, false); + + // spyAlertService.executeScheduledAlerts(1, 1000); + AlertTestResults testResults = new AlertTestResults("myUuid"); + spyAlertService.testEvaluateAlert( alert, 3010L, testResults); + + // assertEquals(1, notificationCount.get()); + enableDatalagMonitoring(false); + } + + + // Support Methods -------------------------------------------------------------------------- + + private DefaultAlertService _initializeSpyAlertServiceWithStubs(final AtomicInteger notificationCount, final AtomicInteger clearCount, + List metrics, Alert alert, Notification notification, boolean isDataLagging) { + DefaultAlertService spyAlertService = spy(alertService); + when(_emProviderMock.get()).thenReturn(em); + + Long enqueueTime = System.currentTimeMillis(); + ObjectMapper mapper = new ObjectMapper(); + SimpleModule module = new SimpleModule(); + module.addSerializer(Alert.class, new Alert.Serializer()); + module.addSerializer(Trigger.class, new Trigger.Serializer()); + module.addSerializer(Notification.class, new Notification.Serializer()); + module.addSerializer(PrincipalUser.class, new Alert.PrincipalUserSerializer()); + mapper.registerModule(module); + + try { + AlertWithTimestamp alertWithTimestamp = new AlertWithTimestamp(mapper.writeValueAsString(alert), enqueueTime); + when(_mqServiceMock.dequeue(eq(MQQueue.ALERT.getQueueName()), eq(AlertWithTimestamp.class), anyInt(), anyInt())). + thenReturn(Arrays.asList(alertWithTimestamp)); + } catch (JsonProcessingException e) { + fail("Failed to serialize Alert"); + } + + try { + doReturn(alert).when(_mapper).readValue(mapper.writeValueAsString(alert), Alert.class); + } catch (IOException e) { + fail("Failed to deserialize Alert"); + } + + MetricQueryResult queryResult = new MetricQueryResult(); + queryResult.setMetricsList(metrics); + when(_metricServiceMock.getMetrics(anyString(), anyLong())).thenReturn(queryResult); + when(_monitorServiceMock.isDataLagging(any())).thenReturn(isDataLagging); + + enableDatalagMonitoring(isDataLagging); + + doAnswer(new Answer() { + + @Override + public Notification answer(InvocationOnMock invocation) throws Throwable { + return invocation.getArgument(1, Notification.class); + } + }).when(spyAlertService).mergeEntity(em, notification); + + + doAnswer(new Answer() { + + @Override + public Void answer(InvocationOnMock invocation) throws Throwable { + return null; + } + + }).when(spyAlertService).updateNotificationsActiveStatusAndCooldown(Arrays.asList(notification)); + + + doAnswer(new Answer() { + + @Override + public Void answer(InvocationOnMock invocation) throws Throwable { + notificationCount.incrementAndGet(); + return null; + } + }).when(spyAlertService).sendNotification(any(Trigger.class), + any(Metric.class), + any(History.class), + any(Notification.class), + any(Alert.class), + anyLong(), + anyLong(), + anyString()); + + doAnswer(new Answer() { + + @Override + public Void answer(InvocationOnMock invocation) throws Throwable { + clearCount.incrementAndGet(); + return null; + } + }).when(spyAlertService).sendClearNotification(any(Trigger.class), + any(Metric.class), + any(History.class), + any(Notification.class), + any(Alert.class), + anyLong(), + anyString()); + + return spyAlertService; + } + + private void enableDatalagMonitoring(boolean isDataLagging) { + TestUtils.setField(DataLagService.Property.DATA_LAG_MONITOR_ENABLED, "_defaultValue", Boolean.toString(isDataLagging)); + } + + private Metric _createMetric(String scope, String metricName, int triggerMinValue, int inertiaPeriod) { + + long startTime = 1L; + inertiaPeriod = inertiaPeriod / (1000 * 60); + + Metric result = new Metric(scope, metricName); + Map datapoints = new HashMap<>(); + int index = 0; + + for (int j = 0; j <= TestUtils.random.nextInt(10); j++) { + datapoints.put(startTime + (++index * 60000L), (double)(TestUtils.random.nextInt(triggerMinValue))); + } + for (int j = 0; j <= inertiaPeriod; j++) { + datapoints.put(startTime + (++index * 60000L), (double)(triggerMinValue + TestUtils.random.nextInt(10))); + } + for (int j = 0; j <= TestUtils.random.nextInt(10); j++) { + datapoints.put(startTime + (++index * 60000L), (double)(TestUtils.random.nextInt(triggerMinValue))); + } + result.setDatapoints(datapoints); + result.setDisplayName(TestUtils.createRandomName()); + result.setUnits(TestUtils.createRandomName()); + return result; + } + + private void _setAlertId(Alert alert, String id) { + try { + Field idField = Alert.class.getSuperclass().getDeclaredField("id"); + idField.setAccessible(true); + idField.set(alert, new BigInteger(id)); + } catch (NoSuchFieldException | SecurityException | IllegalArgumentException | IllegalAccessException e) { + fail("Failed to set alert id using reflection."); + } + } + + private void _setTriggerId(Trigger trigger, String id) { + try { + Field idField = Trigger.class.getSuperclass().getDeclaredField("id"); + idField.setAccessible(true); + idField.set(trigger, new BigInteger(id)); + } catch (NoSuchFieldException | SecurityException | IllegalArgumentException | IllegalAccessException e) { + fail("Failed to set alert id using reflection."); + } + } + + private void _setNotificationId(Notification notification, String id) { + try { + Field idField = Notification.class.getSuperclass().getDeclaredField("id"); + idField.setAccessible(true); + idField.set(notification, new BigInteger(id)); + } catch (NoSuchFieldException | SecurityException | IllegalArgumentException | IllegalAccessException e) { + fail("Failed to set alert id using reflection."); + } + } + + private Map _createDatapoints(long size, Double value, long startTime, boolean greaterThan) { + Map result = new HashMap(); + + for (int i = 0; i < size; i++) { + double dataPointValue = TestUtils.random.nextInt(value.intValue()) + (greaterThan ? (value + 2) : -1); + + result.put(startTime++, String.valueOf(dataPointValue)); + } + return result; + } + +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/NotificationsCacheTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/NotificationsCacheTest.java new file mode 100644 index 000000000..f202aed87 --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/NotificationsCacheTest.java @@ -0,0 +1,60 @@ +package com.salesforce.dva.argus.service.alert; + +import com.google.common.collect.ImmutableMap; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Matchers; +import org.mockito.Mock; + +import java.math.BigInteger; +import java.util.Map; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertSame; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.verify; + +@RunWith(org.mockito.junit.MockitoJUnitRunner.class) +public class NotificationsCacheTest { + @Mock + private NotificationsCacheRefresherThread thread; + private NotificationsCache cache; + + @Before + public void setUp() { + cache = new NotificationsCache(thread); + } + + @Test + public void initCacheAndStartRefresherThread_test() { + verify(thread).runOnce(); + verify(thread).start(); + } + + @Test + public void setNotificationCooldownExpirationMap_test() { + Map> notificationCooldownExpirationMap = ImmutableMap.of(BigInteger.TEN, + ImmutableMap.of("TEST", Long.MIN_VALUE)); + cache.setNotificationCooldownExpirationMap(notificationCooldownExpirationMap); + assertSame(notificationCooldownExpirationMap, cache.getNotificationCooldownExpirationMap()); + } + + @Test + public void setNotificationActiveStatusMap_test() { + Map> notificationActiveStatusMap = ImmutableMap.of(BigInteger.TEN, + ImmutableMap.of("TEST", Boolean.TRUE)); + cache.setNotificationActiveStatusMap(notificationActiveStatusMap); + assertSame(notificationActiveStatusMap, cache.getNotificationActiveStatusMap()); + } + + @Test + public void setNotificationsCacheRefreshed_test() { + boolean refreshed = false; + cache.setNotificationsCacheRefreshed(refreshed); + assertEquals(refreshed, cache.isNotificationsCacheRefreshed()); + refreshed = true; + cache.setNotificationsCacheRefreshed(refreshed); + assertEquals(refreshed, cache.isNotificationsCacheRefreshed()); + } +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/notifier/EmailNotifierTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/notifier/EmailNotifierTest.java new file mode 100644 index 000000000..53087173b --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/notifier/EmailNotifierTest.java @@ -0,0 +1,514 @@ +package com.salesforce.dva.argus.service.alert.notifier; + +import com.google.common.collect.ImmutableList; +import com.salesforce.dva.argus.entity.Alert; +import com.salesforce.dva.argus.entity.History; +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.entity.Notification; +import com.salesforce.dva.argus.entity.PrincipalUser; +import com.salesforce.dva.argus.entity.Trigger; +import com.salesforce.dva.argus.service.AnnotationService; +import com.salesforce.dva.argus.service.AuditService; +import com.salesforce.dva.argus.service.MailService; +import com.salesforce.dva.argus.service.MetricService; +import com.salesforce.dva.argus.service.alert.DefaultAlertService.NotificationContext; +import com.salesforce.dva.argus.service.mail.EmailContext; +import com.salesforce.dva.argus.system.SystemConfiguration; +import com.salesforce.dva.argus.util.TemplateReplacer; +import org.apache.commons.lang3.tuple.Pair; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.ArgumentMatcher; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +import java.security.SecureRandom; +import java.text.SimpleDateFormat; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.Set; +import java.util.TimeZone; +import java.util.TreeMap; + +import static com.salesforce.dva.argus.TestUtils.generateAlert; +import static com.salesforce.dva.argus.TestUtils.getHistory; +import static com.salesforce.dva.argus.TestUtils.getNotification; +import static junit.framework.TestCase.assertTrue; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.mockito.ArgumentMatchers.argThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +@RunWith(MockitoJUnitRunner.class) +public class EmailNotifierTest { + private static final SecureRandom random = new SecureRandom(); + private static final String ALERT_NAME = "TestName"; + private static final String METRIC_NAME = "argus.jvm:cores.active:max"; + private static final String METRIC_SCOPE_NAME = "MetricScope"; + private static final Trigger.TriggerType TRIGGER_TYPE = Trigger.TriggerType.GREATER_THAN; + private static final String TRIGGER_NAME = "TestTrigger"; + private static final String TRIGGER_NAME2 = "TestTrigger2"; + private static final String TRIGGER_THRESHOLD = "1.0"; + private static final String TRIGGER_THRESHOLD2 = "2.0"; + private static final String TRIGGER_INERTIA_MILLIS = "0"; + private static final String TRIGGER_INERTIA_MILLIS2 = "1"; + private static final int TRIGGER_EVENT_VALUE = 5; + private static final long TRIGGER_FIRED_TIME_SECONDS = 1349333576; + private static String TRIGGER_FIRED_DATE_GMT = getDateFormat(TRIGGER_FIRED_TIME_SECONDS); + private static String TRIGGERED = "Triggered"; + private static String CLEARED = "Cleared"; + private static final String EMAIL_NOTIFICATION_NAME = "TEST EMAIL NOTIFICATION"; + private static final String EMAIL_NOTIFIER_NAME = "TEST_EMAIL_NOTIFIER"; + private static final String PAGER_DUTY_NOTIFICATION_NAME = "TEST PAGER DUTY NOTIFICATION"; + private static final String PAGER_DUTY_NOTIFIER_NAME = "TEST_PAGER_DUTY_NOTIFIER"; + private static final String IMAGE_ID = "img1"; + private static final byte[] IMAGE_BYTE_ARRAY = "TestString".getBytes(); + private static final String IMAGE_URL = "https://myhostname.abc.com/argusws/images/id/img1"; + private static final String TRACKING_ID = "1_" + TRIGGER_FIRED_TIME_SECONDS; + + private static final List SUBSCRIPTION_LIST = Arrays.asList("test-subscription"); + + @Mock + private MailService mailServiceMock; + + @Mock + private AnnotationService annotationServiceMock; + + @Mock + private AuditService auditServiceMock; + + @Mock + private MetricService metricServiceMock; + + @Mock + private PrincipalUser principalUserMock; + + private SystemConfiguration systemConfiguration; + private EmailNotifier emailNotifier; + private NotificationContext notificationContext; + + @Before + public void setup() { + Properties properties = new Properties(); + systemConfiguration = new SystemConfiguration(properties); + emailNotifier = new EmailNotifier(metricServiceMock, annotationServiceMock, auditServiceMock, mailServiceMock, + systemConfiguration, null); + Alert alert = generateAlert(ALERT_NAME, principalUserMock, "-1h:"+METRIC_NAME); + alert.setEnabled(true); + + Trigger trigger = getTrigger(alert, TRIGGER_TYPE, TRIGGER_NAME, TRIGGER_THRESHOLD, TRIGGER_INERTIA_MILLIS); + List triggerList = ImmutableList.of(trigger); + alert.setTriggers(triggerList); + + Notification notification = getNotification(EMAIL_NOTIFICATION_NAME, EMAIL_NOTIFIER_NAME, alert, SUBSCRIPTION_LIST); + alert.addNotification(notification); + + Metric metric = getMetric(); + History history = getHistory(); + + notificationContext = new NotificationContext(alert, trigger, notification, + TRIGGER_FIRED_TIME_SECONDS*1000, TRIGGER_EVENT_VALUE, metric, history, + Pair.of(IMAGE_ID, IMAGE_BYTE_ARRAY), IMAGE_URL, TRACKING_ID); + } + + @Test + public void testGetNameReturnsEmailNotifierClassName() { + assertEquals(emailNotifier.getName(), "com.salesforce.dva.argus.service.alert.notifier.EmailNotifier"); + } + + @Test + public void getEmailSubject_testDefaultValueWithOnly1Trigger1Notification() { + String subjectTemplate = "[Argus] Notification for Alert: ${alert.name}"; + String expectedSubject = TemplateReplacer.applyTemplateChanges(notificationContext, subjectTemplate); + String subject = emailNotifier.getEmailSubject(notificationContext); + + assertEquals(expectedSubject, subject); + + notificationContext.getNotification().setEmailSubject(" "); // whitespace should get "trimmed" and default subject should be returned + subject = emailNotifier.getEmailSubject(notificationContext); + + assertEquals(expectedSubject, subject); + } + + @Test + public void getEmailSubject_testDefaultValueWith2Triggers2Notifications() { + Alert alert = notificationContext.getAlert(); + Trigger trigger2 = getTrigger(alert, TRIGGER_TYPE, TRIGGER_NAME2, TRIGGER_THRESHOLD2, TRIGGER_INERTIA_MILLIS); + alert.setTriggers(Arrays.asList(notificationContext.getTrigger(), trigger2)); + Notification notification2 = getNotification("name2", + EMAIL_NOTIFIER_NAME, + alert, + SUBSCRIPTION_LIST); + alert.setNotifications(Arrays.asList(notificationContext.getNotification(), notification2)); + + String subjectTemplate = "[Argus] Notification for Alert: ${alert.name} Notification: ${notification.name} Trigger:${trigger.name}"; + String expectedSubject = TemplateReplacer.applyTemplateChanges(notificationContext, subjectTemplate); + String subject = emailNotifier.getEmailSubject(notificationContext); + + assertEquals(expectedSubject, subject); + } + + @Test + public void getEmailSubject_testDefaultValueWith2Notifications() { + Alert alert = notificationContext.getAlert(); + Notification notification2 = getNotification("name2", + EMAIL_NOTIFIER_NAME, + alert, + SUBSCRIPTION_LIST); + alert.setNotifications(Arrays.asList(notificationContext.getNotification(), notification2)); + + String subjectTemplate = "[Argus] Notification for Alert: ${alert.name} Notification: ${notification.name}"; + String expectedSubject = TemplateReplacer.applyTemplateChanges(notificationContext, subjectTemplate); + String subject = emailNotifier.getEmailSubject(notificationContext); + + assertEquals(expectedSubject, subject); + } + + @Test + public void getEmailSubject_testDefaultValueWith2Triggers() { + Alert alert = notificationContext.getAlert(); + Trigger trigger2 = getTrigger(alert, TRIGGER_TYPE, TRIGGER_NAME2, TRIGGER_THRESHOLD2, TRIGGER_INERTIA_MILLIS); + alert.setTriggers(Arrays.asList(notificationContext.getTrigger(), trigger2)); + + String subjectTemplate = "[Argus] Notification for Alert: ${alert.name} Trigger:${trigger.name}"; + String expectedSubject = TemplateReplacer.applyTemplateChanges(notificationContext, subjectTemplate); + String subject = emailNotifier.getEmailSubject(notificationContext); + + assertEquals(expectedSubject, subject); + } + + @Test + public void getEmailSubject_test() { + String expectedSubject = "TEST SUBJECT"; + notificationContext.getNotification().setEmailSubject(expectedSubject); + + String subject = emailNotifier.getEmailSubject(notificationContext); + + assertEquals(expectedSubject, subject); + } + + @Test + public void testEmailNotificationWhenTheStatusIsTriggered() { + String expectedNotificationSubject = getNotificationSubjectSingleNotification(); + String expectedNotificationBody = getEmailBodyForSingleNotificationOnTriggered(); + + ArgumentMatcher emailContext = new ArgumentMatcher() { + @Override + public boolean matches(EmailContext emailContext) { + boolean isEmailBodyEqual = emailContext.getEmailBody().equals(expectedNotificationBody); + boolean isEmailSubjectEqual = emailContext.getSubject().equals(expectedNotificationSubject); + return isEmailBodyEqual && isEmailSubjectEqual; + } + }; + when(mailServiceMock.sendMessage(argThat(emailContext))).thenReturn(true); + + boolean isEmailSent = emailNotifier.sendAdditionalNotification(notificationContext); + + verify(mailServiceMock, times(1)).sendMessage(argThat(emailContext)); + + assertTrue(isEmailSent); + } + + @Test + public void testEmailNotificationWhenTheStatusIsTriggeredAndImageDetainsAreNull() { + Alert alert = generateAlert(ALERT_NAME, principalUserMock, "-1h:"+METRIC_NAME); + alert.setEnabled(true); + + Trigger trigger = getTrigger(alert, TRIGGER_TYPE, TRIGGER_NAME, TRIGGER_THRESHOLD, TRIGGER_INERTIA_MILLIS); + List triggerList = ImmutableList.of(trigger); + alert.setTriggers(triggerList); + + Notification notification = getNotification(EMAIL_NOTIFICATION_NAME, EMAIL_NOTIFIER_NAME, alert, SUBSCRIPTION_LIST); + alert.addNotification(notification); + + Metric metric = getMetric(); + History history = getHistory(); + + notificationContext = new NotificationContext(alert, trigger, notification, + TRIGGER_FIRED_TIME_SECONDS*1000, TRIGGER_EVENT_VALUE, metric, history); + + String expectedNotificationSubject = getNotificationSubjectSingleNotification(); + String expectedNotificationBody = getEmailBodyForSingleNotificationOnTriggeredWhenImageDetailsAreNotPresent(); + + ArgumentMatcher emailContext = new ArgumentMatcher() { + @Override + public boolean matches(EmailContext emailContext) { + boolean isEmailBodyEqual = emailContext.getEmailBody().equals(expectedNotificationBody); + boolean isEmailSubjectEqual = emailContext.getSubject().equals(expectedNotificationSubject); + return isEmailBodyEqual && isEmailSubjectEqual; + } + }; + when(mailServiceMock.sendMessage(argThat(emailContext))).thenReturn(true); + + boolean isEmailSent = emailNotifier.sendAdditionalNotification(notificationContext); + + verify(mailServiceMock, times(1)).sendMessage(argThat(emailContext)); + + assertTrue(isEmailSent); + } + + @Test + public void testEmailNotificationWhenTheStatusIsCleared() { + String expectedNotificationSubject = getNotificationSubjectSingleNotification(); + String expectedNotificationBody = getEmailBodyForSingleNotificationOnCleared(); + + ArgumentMatcher emailContext = new ArgumentMatcher() { + @Override + public boolean matches(EmailContext emailContext) { + boolean isEmailBodyEqual = emailContext.getEmailBody().equals(expectedNotificationBody); + boolean isEmailSubjectEqual = emailContext.getSubject().equals(expectedNotificationSubject); + return isEmailBodyEqual && isEmailSubjectEqual; + } + }; + when(mailServiceMock.sendMessage(argThat(emailContext))).thenReturn(true); + boolean isEmailSent = emailNotifier.clearAdditionalNotification(notificationContext); + + verify(mailServiceMock, times(1)).sendMessage(argThat(emailContext)); + + assertTrue(isEmailSent); + } + + @Test + public void testEmailNotificationWhenTheNotificationIsNotSentOnClearedNotification() { + String expectedNotificationSubject = getNotificationSubjectSingleNotification(); + String expectedNotificationBody = getEmailBodyForSingleNotificationOnCleared(); + + ArgumentMatcher emailContext = new ArgumentMatcher() { + @Override + public boolean matches(EmailContext emailContext) { + boolean isEmailBodyEqual = emailContext.getEmailBody().equals(expectedNotificationBody); + boolean isEmailSubjectEqual = emailContext.getSubject().equals(expectedNotificationSubject); + return isEmailBodyEqual && isEmailSubjectEqual; + } + }; + when(mailServiceMock.sendMessage(argThat(emailContext))).thenReturn(false); + boolean isEmailSent = emailNotifier.clearAdditionalNotification(notificationContext); + + verify(mailServiceMock, times(1)).sendMessage(argThat(emailContext)); + + assertTrue(notificationContext.getHistory().getMessage().contains(getAuditMessageWhenEmailNotSent(CLEARED))); + assertFalse(isEmailSent); + } + + @Test + public void clearAdditionalNotification_testDisabledClearNotification() { + notificationContext.getNotification().setEnableClearNotification(false); + + boolean result = emailNotifier.clearAdditionalNotification(notificationContext); + + assertTrue(result); + verify(mailServiceMock, never()).sendMessage(any()); + } + + @Test + public void testEmailNotificationWhenTheNotificationIsNotSentOnTriggeredNotification() { + String expectedNotificationSubject = getNotificationSubjectSingleNotification(); + String expectedNotificationBody = getEmailBodyForSingleNotificationOnTriggered(); + + ArgumentMatcher emailContext = new ArgumentMatcher() { + @Override + public boolean matches(EmailContext emailContext) { + boolean isEmailBodyEqual = emailContext.getEmailBody().equals(expectedNotificationBody); + boolean isEmailSubjectEqual = emailContext.getSubject().equals(expectedNotificationSubject); + return isEmailBodyEqual && isEmailSubjectEqual; + } + }; + when(mailServiceMock.sendMessage(argThat(emailContext))).thenReturn(false); + + boolean isEmailSent = emailNotifier.sendAdditionalNotification(notificationContext); + + verify(mailServiceMock, times(1)).sendMessage(argThat(emailContext)); + + assertTrue(notificationContext.getHistory().getMessage().contains(getAuditMessageWhenEmailNotSent(TRIGGERED))); + assertFalse(isEmailSent); + } + + @Test(expected = IllegalArgumentException.class) + public void testEmailNotifierWithMailServiceAsNull() { + Properties properties = new Properties(); + systemConfiguration = new SystemConfiguration(properties); + emailNotifier = new EmailNotifier(metricServiceMock, annotationServiceMock, auditServiceMock, null, + systemConfiguration, null); + } + + @Test(expected = IllegalArgumentException.class) + public void testEmailNotifierWithConfigSetAsNull() { + Properties properties = new Properties(); + systemConfiguration = new SystemConfiguration(properties); + emailNotifier = new EmailNotifier(metricServiceMock, annotationServiceMock, auditServiceMock, mailServiceMock, + null, null); + } + + @Test(expected = IllegalArgumentException.class) + public void testSendAdditionalNotificationWhenNotificationContextIsNull() { + boolean isSent = emailNotifier.sendAdditionalNotification(null); + } + + @Test(expected = IllegalArgumentException.class) + public void testClearAdditionalNotificationWhenNotificationContextIsNull() { + boolean isSent = emailNotifier.sendAdditionalNotification(null); + } + + @Test + public void testSendingNotificationsWhenThereAreMultipleNotificationsForAnAlert() { + Alert alert = generateAlert(ALERT_NAME, principalUserMock, "-1h:" + METRIC_NAME);; + alert.setEnabled(true); + + Trigger trigger = getTrigger(alert, TRIGGER_TYPE, TRIGGER_NAME, TRIGGER_THRESHOLD, TRIGGER_INERTIA_MILLIS); + List triggerList = ImmutableList.of(trigger); + alert.setTriggers(triggerList); + + Notification emailNotification = getNotification(EMAIL_NOTIFICATION_NAME, EMAIL_NOTIFIER_NAME, alert, SUBSCRIPTION_LIST); + alert.addNotification(emailNotification); + + Notification pagerDutyNotification = getNotification(PAGER_DUTY_NOTIFICATION_NAME, PAGER_DUTY_NOTIFIER_NAME, alert, SUBSCRIPTION_LIST); + alert.addNotification(pagerDutyNotification); + + Metric metric = getMetric(); + History history = getHistory(); + + notificationContext = new NotificationContext(alert, trigger, emailNotification, + TRIGGER_FIRED_TIME_SECONDS*1000, TRIGGER_EVENT_VALUE, metric, history, Pair.of(IMAGE_ID, IMAGE_BYTE_ARRAY), IMAGE_URL, TRACKING_ID); + + String expectedNotificationSubject = getNotificationSubjectMultipleNotifications(); + String expectedNotificationBody = getEmailBodyForMultipleNotificationsOnTriggered(); + + Pair imageData = Pair.of(IMAGE_ID, IMAGE_BYTE_ARRAY); + ArgumentMatcher emailContext = new ArgumentMatcher() { + @Override + public boolean matches(EmailContext emailContext) { + boolean isEmailBodyEqual = emailContext.getEmailBody().equals(expectedNotificationBody); + boolean isEmailSubjectEqual = emailContext.getSubject().equals(expectedNotificationSubject); + return isEmailBodyEqual && isEmailSubjectEqual; + } + }; + when(mailServiceMock.sendMessage(argThat(emailContext))).thenReturn(true); + + boolean isEmailSent = emailNotifier.sendAdditionalNotification(notificationContext); + + verify(mailServiceMock, times(1)).sendMessage(argThat(emailContext)); + + assertTrue(isEmailSent); + } + + @Test + public void testSendingNotificationsWhenThereAreMultipleTriggersForAnAlert() { + Alert alert = generateAlert(ALERT_NAME, principalUserMock, "-1h:" + METRIC_NAME);; + alert.setEnabled(true); + + Trigger trigger1 = getTrigger(alert, TRIGGER_TYPE, TRIGGER_NAME, TRIGGER_THRESHOLD, TRIGGER_INERTIA_MILLIS); + Trigger trigger2 = getTrigger(alert, TRIGGER_TYPE, TRIGGER_NAME2, TRIGGER_THRESHOLD2, TRIGGER_INERTIA_MILLIS2); + List triggerList = ImmutableList.of(trigger1, trigger2); + alert.setTriggers(triggerList); + + Notification emailNotification = getNotification(EMAIL_NOTIFICATION_NAME, EMAIL_NOTIFIER_NAME, alert, SUBSCRIPTION_LIST); + alert.addNotification(emailNotification); + + Metric metric = getMetric(); + History history = getHistory(); + + Trigger triggerThatTriggered = trigger1; + notificationContext = new NotificationContext(alert, triggerThatTriggered, emailNotification, + TRIGGER_FIRED_TIME_SECONDS*1000, TRIGGER_EVENT_VALUE, metric, history, Pair.of(IMAGE_ID, IMAGE_BYTE_ARRAY), IMAGE_URL, TRACKING_ID); + + String expectedNotificationSubject = getNotificationSubjectMultipleTriggers(triggerThatTriggered.getName()); + String expectedNotificationBody = getEmailBodyForMultipleTriggersOnTriggered(triggerThatTriggered.getName()); + + Pair imageData = Pair.of(IMAGE_ID, IMAGE_BYTE_ARRAY); + ArgumentMatcher emailContext = new ArgumentMatcher() { + @Override + public boolean matches(EmailContext emailContext) { + boolean isEmailBodyEqual = emailContext.getEmailBody().equals(expectedNotificationBody); + boolean isEmailSubjectEqual = emailContext.getSubject().equals(expectedNotificationSubject); + return isEmailBodyEqual && isEmailSubjectEqual; + } + }; + when(mailServiceMock.sendMessage(argThat(emailContext))).thenReturn(true); + + boolean isEmailSent = emailNotifier.sendAdditionalNotification(notificationContext); + + verify(mailServiceMock, times(1)).sendMessage(argThat(emailContext)); + + assertTrue(isEmailSent); + } + + + private String getEmailBodyForSingleNotificationOnTriggered() { + return "

Alert " + ALERT_NAME + " was " + TRIGGERED + " at " + TRIGGER_FIRED_DATE_GMT + "

Tracking ID: " + TRACKING_ID +"
Notification is on cooldown until: 01/01/1970 00:00:00 GMT

Snapshot of the evaluated metric data.

Evaluated metric expression: -3600000:0:" + METRIC_NAME + "

Click here for the current view of the metric data.

Triggered on Metric: MetricScope:argus.jvm:cores.active:max{source=unittest}
Trigger details: type=" + TRIGGER_TYPE + ", name=" + TRIGGER_NAME + ", threshold=" + TRIGGER_THRESHOLD + ", inertia="+ TRIGGER_INERTIA_MILLIS + "
Triggering event value: " + TRIGGER_EVENT_VALUE + "

Click here to view alert definition.

Disclaimer: This alert was evaluated using the time series data as it existed at the time of evaluation. If the data source has inherent lag or a large aggregation window is used during data collection, it is possible for the time series data to be updated such that the alert condition is no longer met. This may be avoided by ensuring the time" + + " window used in alert expression is outside the range of the datasource lag.

You received this notification because you, or a distribution list you belong to is listed as a subscriber of the alert."; + } + + private String getEmailBodyForSingleNotificationOnTriggeredWhenImageDetailsAreNotPresent() { + return "

Alert " + ALERT_NAME + " was " + TRIGGERED + " at " + TRIGGER_FIRED_DATE_GMT + "

Notification is on cooldown until: 01/01/1970 00:00:00 GMT

Click here to view the evaluated metric data.

Evaluated metric expression: -3600000:0:" + METRIC_NAME + "

Click here for the current view of the metric data.

Triggered on Metric: MetricScope:argus.jvm:cores.active:max{source=unittest}
Trigger details: type=" + TRIGGER_TYPE + ", name=" + TRIGGER_NAME + ", threshold=" + TRIGGER_THRESHOLD + ", inertia="+ TRIGGER_INERTIA_MILLIS + "
Triggering event value: " + TRIGGER_EVENT_VALUE + "

Click here to view alert definition.

Disclaimer: This alert was evaluated using the time series data as it existed at the time of evaluation. If the data source has inherent lag or a large aggregation window is used during data collection, it is possible for the time series data to be updated such that the alert condition is no longer met. This may be avoided by ensuring the time" + + " window used in alert expression is outside the range of the datasource lag.

You received this notification because you, or a distribution list you belong to is listed as a subscriber of the alert."; + } + + private String getEmailBodyForSingleNotificationOnCleared() { + return "

Alert " + ALERT_NAME + " was " + CLEARED + " at " + TRIGGER_FIRED_DATE_GMT + "

Tracking ID: " + TRACKING_ID +"

Snapshot of the evaluated metric data.

Evaluated metric expression: -3600000:0:" + METRIC_NAME + "

Click here for the current view of the metric data.

" + CLEARED + " on Metric: " + METRIC_SCOPE_NAME + ":" + METRIC_NAME + "{source=unittest}
Trigger details: type="+ TRIGGER_TYPE +", name=" + TRIGGER_NAME + ", threshold=" + TRIGGER_THRESHOLD + ", inertia=" + TRIGGER_INERTIA_MILLIS + "

Click here to view alert definition.

Disclaimer: This alert was evaluated using the time series data as it existed at the time of evaluation. If the data source has inherent lag or a large aggregation window is used during data collection, it is possible for the time series data to be updated such that the alert condition is no longer met. This may be avoided by ensuring the time window used in alert expression is outside the range of the datasource lag.

You received this notification because you, or a distribution list you belong to is listed as a subscriber of the alert."; + } + + private String getEmailBodyForMultipleNotificationsOnTriggered() { + return "

Alert " + ALERT_NAME + " was " + TRIGGERED + " at " + TRIGGER_FIRED_DATE_GMT + "

Tracking ID: " + TRACKING_ID +"
Notification: " + EMAIL_NOTIFICATION_NAME + "
Notification is on cooldown until: 01/01/1970 00:00:00 GMT

Snapshot of the evaluated metric data.

Evaluated metric expression: -3600000:0:" + METRIC_NAME + "

Click here for the current view of the metric data.

Triggered on Metric: MetricScope:argus.jvm:cores.active:max{source=unittest}
Trigger details: type=" + TRIGGER_TYPE + ", name=" + TRIGGER_NAME + ", threshold=" + TRIGGER_THRESHOLD + ", inertia="+ TRIGGER_INERTIA_MILLIS + "
Triggering event value: " + TRIGGER_EVENT_VALUE + "

Click here to view alert definition.

Disclaimer: This alert was evaluated using the time series data as it existed at the time of evaluation. If the data source has inherent lag or a large aggregation window is used during data collection, it is possible for the time series data to be updated such that the alert condition is no longer met. This may be avoided by ensuring the time" + + " window used in alert expression is outside the range of the datasource lag.

You received this notification because you, or a distribution list you belong to is listed as a subscriber of the alert."; + } + + private String getEmailBodyForMultipleTriggersOnTriggered(String triggerName) { + return "

Alert " + ALERT_NAME + " was " + TRIGGERED + " at " + TRIGGER_FIRED_DATE_GMT + "

Tracking ID: " + TRACKING_ID +"
Triggered by: " + triggerName + "
Notification is on cooldown until: 01/01/1970 00:00:00 GMT

Snapshot of the evaluated metric data.

Evaluated metric expression: -3600000:0:" + METRIC_NAME + "

Click here for the current view of the metric data.

" + TRIGGERED + " on Metric: " + METRIC_SCOPE_NAME + ":" + METRIC_NAME + "{source=unittest}
Trigger details: type=" + TRIGGER_TYPE + ", name=" + TRIGGER_NAME + ", threshold=" + TRIGGER_THRESHOLD + ", inertia=" + TRIGGER_INERTIA_MILLIS + "
Triggering event value: " + TRIGGER_EVENT_VALUE + "

Click here to view alert definition.

Disclaimer: This alert was evaluated using the time series data as it existed at the time of evaluation. If the data source has inherent lag or a large aggregation window is used during data collection, it is possible for the time series data to be updated such that the alert condition is no longer met. This may be avoided by ensuring the time window used in alert expression is outside the range of the datasource lag.

You received this notification because you, or a distribution list you belong to is listed as a subscriber of the alert."; + } + + private String getAuditMessageWhenEmailNotSent(String notificationStatus) { + return " Not able to send email for " + notificationStatus.toLowerCase() + " notification: `" + EMAIL_NOTIFICATION_NAME + + ".` to recipient " + SUBSCRIPTION_LIST; + } + + private String getNotificationSubjectSingleNotification() { + return "[Argus] Notification for Alert: " + ALERT_NAME ; + } + + private String getNotificationSubjectMultipleNotifications() { + return getNotificationSubjectSingleNotification() + " Notification: " + EMAIL_NOTIFICATION_NAME; + } + + private String getNotificationSubjectMultipleTriggers(String triggerName) { + return getNotificationSubjectSingleNotification() + " Trigger:" + triggerName; + } + + private Trigger getTrigger(Alert alert, Trigger.TriggerType triggerType, String triggerName, String triggerThreshold, String triggerInertiaMillis) { + return new Trigger(alert, triggerType, triggerName, Double.parseDouble(triggerThreshold), Long.parseLong(triggerInertiaMillis)); + } + + private static Metric getMetric() { + SecureRandom random = new SecureRandom(); + return createMetric(((int) (random.nextDouble() * 500)) + 1); + } + + private static Metric createMetric(int datapointCount) { + Metric result = new Metric(METRIC_SCOPE_NAME, METRIC_NAME); + Map datapoints = new TreeMap<>(); + + for (int i = 0; i < datapointCount; i++) { + datapoints.put(System.currentTimeMillis(), random.nextDouble() * 500); + } + + Map tags = new HashMap<>(); + tags.put("source", "unittest"); + result.setDatapoints(datapoints); + result.setTags(tags); + return result; + } + + private static String getDateFormat(long timeSeconds) { + SimpleDateFormat sdf = new SimpleDateFormat("MM/dd/yyyy HH:mm:ss z"); + sdf.setTimeZone(TimeZone.getTimeZone("GMT")); + return sdf.format(timeSeconds*1000); + } +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/notifier/GOCNotifierIT.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/notifier/GOCNotifierIT.java new file mode 100644 index 000000000..0e5fd4f24 --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/notifier/GOCNotifierIT.java @@ -0,0 +1,83 @@ +package com.salesforce.dva.argus.service.alert.notifier; + +import com.salesforce.dva.argus.AbstractTestIT; +import com.salesforce.dva.argus.IntegrationTest; +import com.salesforce.dva.argus.entity.Alert; +import com.salesforce.dva.argus.entity.History; +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.entity.Notification; +import com.salesforce.dva.argus.entity.PrincipalUser; +import com.salesforce.dva.argus.entity.Trigger; +import com.salesforce.dva.argus.service.AlertService.Notifier.NotificationStatus; +import com.salesforce.dva.argus.service.UserService; +import com.salesforce.dva.argus.service.alert.DefaultAlertService.NotificationContext; +import com.salesforce.dva.argus.system.SystemConfiguration; +import org.junit.Before; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.LinkedList; +import java.util.List; +import java.util.Properties; + +import static org.junit.Assert.assertTrue; + +@Category(IntegrationTest.class) +public class GOCNotifierIT extends AbstractTestIT { + private GOCNotifier notifier; + + @Override + @Before + public void setUp() { + super.setUpZkTestServer(); + super.setupEmbeddedKafka(); + Properties p = new Properties(); + p.setProperty("notifier.property.goc.client.id", "{INSERT VALUE}"); + p.setProperty("notifier.property.goc.client.secret", "{INSERT VALUE}"); + p.setProperty("notifier.property.goc.username", "{INSERT VALUE}"); + p.setProperty("notifier.property.goc.password", "{INSERT VALUE}"); + p.setProperty("notifier.property.goc.endpoint", "https://login.salesforce.com"); + p.setProperty("system.property.goc.enabled", "true"); + system = getInstance(p); + system.start(); + } + + @Test + public void _sendAdditionalNotification_test() { + notifier = system.getNotifierFactory().getGOCNotifier(); + + UserService userService = system.getServiceFactory().getUserService(); + PrincipalUser user = userService.findAdminUser(); + Alert a = new Alert(user, user, createRandomName(), "-1h:argus.jvm:cores.active:max", "* * * * *"); + a.setEnabled(true); + + Trigger t = new Trigger(a, Trigger.TriggerType.GREATER_THAN, "TEST TRIGGER IGNORE", 1, 0); + List triggerList = new LinkedList<>(); + triggerList.add(t); + a.setTriggers(triggerList); + + List notificationArgList = new ArrayList(); + notificationArgList.add("TEST SUBSCRIPTION"); + Notification n = new Notification("TEST NOTIF IGNORE", a, "TEST GOC NOTIFIER", notificationArgList, 5000L); + n.setSRActionable(false); + n.setSeverityLevel(5); + + n.setCustomText("INTEGRATION CUSTOM TEXT"); + n.setEventName("INTEGRATION TEST EVENT NAME"); + n.setElementName("TEST ELEMENT NAME IGNORE"); + n.setProductTag("a1aB0000000QA0QIAW"); + n.setArticleNumber("TEST ARTICLE NUMBER"); + + a.addNotification(n); + + Metric m = createMetric(); + + History h = new History("TEST HISTORY MESSAGE", SystemConfiguration.getHostname(), new BigInteger("100002"), History.JobStatus.STARTED, 10, System.currentTimeMillis() - 86400000); + + NotificationContext context = new NotificationContext(a, t, n, System.currentTimeMillis(), 5, m, h); + boolean result = notifier._sendAdditionalNotification(context, NotificationStatus.TRIGGERED); + assertTrue(result); + } +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/notifier/GOCNotifierTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/notifier/GOCNotifierTest.java new file mode 100644 index 000000000..cac5cc351 --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/notifier/GOCNotifierTest.java @@ -0,0 +1,414 @@ +package com.salesforce.dva.argus.service.alert.notifier; + +import com.google.common.collect.ImmutableList; +import com.google.inject.Provider; +import com.salesforce.dva.argus.entity.Alert; +import com.salesforce.dva.argus.entity.Audit; +import com.salesforce.dva.argus.entity.History; +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.entity.Notification; +import com.salesforce.dva.argus.entity.PrincipalUser; +import com.salesforce.dva.argus.entity.Trigger; +import com.salesforce.dva.argus.service.AnnotationService; +import com.salesforce.dva.argus.service.AuditService; +import com.salesforce.dva.argus.service.MetricService; +import com.salesforce.dva.argus.service.MonitorService; +import com.salesforce.dva.argus.service.alert.DefaultAlertService.NotificationContext; +import com.salesforce.dva.argus.service.alert.retriever.ImageDataRetriever; +import com.salesforce.dva.argus.system.SystemConfiguration; +import org.apache.commons.lang3.tuple.Pair; +import org.apache.http.HttpEntity; +import org.apache.http.StatusLine; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.util.EntityUtils; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; +import org.slf4j.LoggerFactory; + +import javax.persistence.EntityManager; +import java.math.BigInteger; +import java.net.SocketTimeoutException; +import java.util.Properties; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.powermock.api.mockito.PowerMockito.mock; +import static org.powermock.api.mockito.PowerMockito.mockStatic; +import static org.powermock.api.mockito.PowerMockito.verifyStatic; +import static org.powermock.api.mockito.PowerMockito.when; +import static org.powermock.api.mockito.PowerMockito.whenNew; + +@RunWith(PowerMockRunner.class) +@PrepareForTest({GOCNotifier.class, GusTransport.class, EntityUtils.class}) +public class GOCNotifierTest { + /* Constants */ + private static final String SYSTEM_CONFIG_GOC_ENABLED = "system.property.goc.enabled"; + private static final String GOC_NOTIFIER_GOC_USER = "notifier.property.goc.username"; + private static final String GOC_NOTIFIER_GOC_PWD = "notifier.property.goc.password"; + private static final String GOC_NOTIFIER_GOC_CLIENT_ID = "notifier.property.goc.client.id"; + private static final String GOC_NOTIFIER_GOC_CLIENT_SECRET = "notifier.property.goc.client.secret"; + private static final String GOC_NOTIFIER_GOC_ENDPOINT = "notifier.property.goc.endpoint"; + private static final String GOC_NOTIFIER_PROXY_HOST = "notifier.property.proxy.host"; + private static final String GOC_NOTIFIER_PROXY_PORT = "notifier.property.proxy.port"; + + private static final String TEST_INSTANCE_URL = "https://test_instance_url.com"; + private static final String TEST_TOKEN = "test_token"; + + private static final int MAX_ATTEMPTS_GOC_POST = 3; + + /* Test mocks */ + private MetricService metricService; + private AnnotationService annotationService; + private AuditService auditService; + private Provider emf; + private MonitorService monitorService; + private CloseableHttpClient httpClient; + private Audit auditResult; + private GusTransport gusTransport; + private CloseableHttpResponse httpResponse; + private StatusLine httpResponseStatusLine; + private HttpEntity httpResponseEntity; + + /* Class being tested */ + private GOCNotifier notifier; + + /* Test data */ + private SystemConfiguration config; + private Properties properties; + private NotificationContext context; + private Alert alert; + private Trigger trigger; + private Notification notification; + private Metric metric; + private History history; + + private static ch.qos.logback.classic.Logger apacheLogger; + private static ch.qos.logback.classic.Logger myClassLogger; + + @BeforeClass + static public void setUpClass() { + myClassLogger = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger("com.salesforce.dva.argus.service.alert.GOCNotifierTest"); + myClassLogger.setLevel(ch.qos.logback.classic.Level.OFF); + apacheLogger = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger("org.apache"); + apacheLogger.setLevel(ch.qos.logback.classic.Level.OFF); + } + + @Before + public void setup() { + // create mocks + metricService = mock(MetricService.class); + annotationService = mock(AnnotationService.class); + auditService = mock(AuditService.class); + emf = mock(Provider.class); + monitorService = mock(MonitorService.class); + httpClient = mock(CloseableHttpClient.class); + auditResult = mock(Audit.class); + gusTransport = mock(GusTransport.class); + httpResponse = mock(CloseableHttpResponse.class); + httpResponseStatusLine = mock(StatusLine.class); + httpResponseEntity = mock(HttpEntity.class); + + mockStatic(EntityUtils.class); + + // set up test SystemConfiguration properties + properties = new Properties(); + properties.setProperty(SYSTEM_CONFIG_GOC_ENABLED, "true"); + properties.setProperty(GOC_NOTIFIER_GOC_USER, "test_goc_user"); + properties.setProperty(GOC_NOTIFIER_GOC_PWD, "test_goc_pw"); + properties.setProperty(GOC_NOTIFIER_GOC_CLIENT_ID, "test_goc_client_id"); + properties.setProperty(GOC_NOTIFIER_GOC_CLIENT_SECRET, "test_goc_client_secret"); + properties.setProperty(GOC_NOTIFIER_GOC_ENDPOINT, "https://test_goc_ep.com"); + properties.setProperty(GOC_NOTIFIER_PROXY_HOST, "test_proxy_host"); + properties.setProperty(GOC_NOTIFIER_PROXY_PORT, "9090"); + config = new SystemConfiguration(properties); + + // set up test data + alert = new Alert(new PrincipalUser(null, "test_creator", "test_creator@salesforce.com"), + new PrincipalUser(null, "test_owner", "test_owner@salesforce.com"), + "test_alert_name", + "-1h:test:metric:avg", + "test_alert_cron_entry"); + trigger = new Trigger(alert, Trigger.TriggerType.EQUAL, "test_trigger_name", 3.14, 1000); + alert.setTriggers(ImmutableList.of(trigger)); + notification = new Notification("test_notification_name", + alert, + "test_notifier_name", + ImmutableList.of("test_subscription"), + 3000); + alert.addNotification(notification); + metric = new Metric("test_scope", "test_metric_name"); + history = new History("test_message", "test_host_name", BigInteger.valueOf(456), History.JobStatus.STARTED); + context = new NotificationContext(alert, trigger, notification, System.currentTimeMillis(), 3.14, metric, history); + } + + @Test + public void sendAdditionalNotification_testPostGOCNotificationRespCode201() throws Exception { + boolean result = sendAdditionalNotification_testTemplate(201); + assertTrue(result); + } + + @Test + public void sendAdditionalNotification_testPostGOCNotificationRespCode204() throws Exception { + boolean result = sendAdditionalNotification_testTemplate(204); + assertTrue(result); + } + + @Test + public void sendAdditionalNotification_testPostGOCNotificationRespCode401RetryAndPass() throws Exception { + int[] postNotificationResponseCode = {401, 201}; + sendOrClearAdditionalNotification_mockBehaviorHappyCaseTemplate(); + when(httpResponseStatusLine.getStatusCode()).thenReturn(postNotificationResponseCode[0]) + .thenReturn(postNotificationResponseCode[1]); + + // create object under test + notifier = new GOCNotifier(metricService, annotationService, auditService, config, emf, monitorService); + + // test + boolean result = notifier.sendAdditionalNotification(context); + assertTrue(result); + + // verify mocks + sendOrClearAdditionalNotification_verifyMocksTemplate(2, 0, 1, 1, true); + } + + @Test + public void sendAdditionalNotification_testPostGOCNotificationRespCode401RetryMaxTimes() throws Exception { + int postNotificationResponseCode = 401; + sendOrClearAdditionalNotification_mockBehaviorHappyCaseTemplate(); + when(httpResponseStatusLine.getStatusCode()).thenReturn(postNotificationResponseCode); + + // create object under test + notifier = new GOCNotifier(metricService, annotationService, auditService, config, emf, monitorService); + + // test + boolean result = notifier.sendAdditionalNotification(context); + assertFalse(result); + + // verify mocks + sendOrClearAdditionalNotification_verifyMocksTemplate(MAX_ATTEMPTS_GOC_POST, 0, 2, MAX_ATTEMPTS_GOC_POST, false); + } + + @Test + public void sendAdditionalNotification_testPostGOCNotificationRespCode500RetryAndPass() throws Exception { + int[] postNotificationResponseCode = {500, 201}; + sendOrClearAdditionalNotification_mockBehaviorHappyCaseTemplate(); + when(httpResponseStatusLine.getStatusCode()).thenReturn(postNotificationResponseCode[0]) + .thenReturn(postNotificationResponseCode[1]); + + // create object under test + notifier = new GOCNotifier(metricService, annotationService, auditService, config, emf, monitorService); + + // test + boolean result = notifier.sendAdditionalNotification(context); + assertTrue(result); + + // verify mocks + sendOrClearAdditionalNotification_verifyMocksTemplate(2, 0, 0, 1, true); + } + + @Test + public void sendAdditionalNotification_testPostGOCNotificationRespCode500RetryMaxTimes() throws Exception { + int postNotificationResponseCode = 500; + sendOrClearAdditionalNotification_mockBehaviorHappyCaseTemplate(); + when(httpResponseStatusLine.getStatusCode()).thenReturn(postNotificationResponseCode); + + // create object under test + notifier = new GOCNotifier(metricService, annotationService, auditService, config, emf, monitorService); + + // test + boolean result = notifier.sendAdditionalNotification(context); + assertFalse(result); + + // verify mocks + sendOrClearAdditionalNotification_verifyMocksTemplate(MAX_ATTEMPTS_GOC_POST, 0, 0, MAX_ATTEMPTS_GOC_POST, false); + } + + @Test + public void sendAdditionalNotification_testSocketTimeoutExceptionRetryAndPass() throws Exception { + sendOrClearAdditionalNotification_mockBehaviorHappyCaseTemplate(); + when(httpClient.execute(any())).thenThrow(new SocketTimeoutException()) + .thenReturn(httpResponse); + when(httpResponseStatusLine.getStatusCode()).thenReturn(201); + + // create object under test + notifier = new GOCNotifier(metricService, annotationService, auditService, config, emf, monitorService); + + // test + boolean result = notifier.sendAdditionalNotification(context); + assertTrue(result); + + // verify mocks + sendOrClearAdditionalNotification_verifyMocksTemplate(2, 1, 0, 0, true); + } + + @Test + public void sendAdditionalNotification_testSocketTimeoutExceptionRetryMaxTimes() throws Exception { + sendOrClearAdditionalNotification_mockBehaviorHappyCaseTemplate(); + when(httpClient.execute(any())).thenThrow(new SocketTimeoutException()); + + // create object under test + notifier = new GOCNotifier(metricService, annotationService, auditService, config, emf, monitorService); + + // test + boolean result = notifier.sendAdditionalNotification(context); + assertFalse(result); + + // verify mocks + sendOrClearAdditionalNotification_verifyMocksTemplate(MAX_ATTEMPTS_GOC_POST, 3, 0, 0, false); + } + + @Test + public void sendAdditionalNotification_testRespCode400AuthHeaderFailRetryMaxTimes() throws Exception { + int postNotificationResponseCode = 400; + sendOrClearAdditionalNotification_mockBehaviorHappyCaseTemplate(); + when(httpResponseStatusLine.getStatusCode()).thenReturn(postNotificationResponseCode); + when(EntityUtils.toString(any())).thenReturn("[{\"message\":\"INVALID_HEADER_TYPE\", \"errorCode\":\"INVALID_AUTH_HEADER\"}]"); + + // create object under test + notifier = new GOCNotifier(metricService, annotationService, auditService, config, emf, monitorService); + + // test + boolean result = notifier.sendAdditionalNotification(context); + assertFalse(result); + + // verify mocks + sendOrClearAdditionalNotification_verifyMocksTemplate(MAX_ATTEMPTS_GOC_POST, 0, 2, MAX_ATTEMPTS_GOC_POST, false); + } + + @Test + public void sendAdditionalNotification_testRespCode400UnknownFailureNoRetries() throws Exception { + boolean result = sendAdditionalNotification_testNoRetriesTemplate(400); + assertFalse(result); + } + + @Test + public void sendAdditionalNotification_testUnknownFailureNoRetries() throws Exception { + boolean result = sendAdditionalNotification_testNoRetriesTemplate(404); + assertFalse(result); + } + + @Test + public void clearAdditionalNotification_testPostGOCNotificationRespCode201() throws Exception { + boolean result = clearAdditionalNotification_testTemplate(201); + assertTrue(result); + } + + @Test + public void clearAdditionalNotification_testPostGOCNotificationRespCode204() throws Exception { + boolean result = clearAdditionalNotification_testTemplate(204); + assertTrue(result); + } + + @Test + public void clearAdditionalNotification_testPostGOCNotificationRespCode401RetryAndPass() throws Exception { + int[] postNotificationResponseCode = {401, 201}; + sendOrClearAdditionalNotification_mockBehaviorHappyCaseTemplate(); + when(httpResponseStatusLine.getStatusCode()).thenReturn(postNotificationResponseCode[0]) + .thenReturn(postNotificationResponseCode[1]); + + // create object under test + notifier = new GOCNotifier(metricService, annotationService, auditService, config, emf, monitorService); + + // test + boolean result = notifier.clearAdditionalNotification(context); + assertTrue(result); + + // verify mocks + sendOrClearAdditionalNotification_verifyMocksTemplate(2, 0, 1, 1, true); + } + + private boolean sendAdditionalNotification_testTemplate(int postNotificationResponseCode) throws Exception { + // define mock behavior + sendOrClearAdditionalNotification_mockBehaviorHappyCaseTemplate(); + when(httpResponseStatusLine.getStatusCode()).thenReturn(postNotificationResponseCode); + + // create object under test + notifier = new GOCNotifier(metricService, annotationService, auditService, config, emf, monitorService); + + // test + boolean result = notifier.sendAdditionalNotification(context); + + // verify mocks + sendOrClearAdditionalNotification_verifyMocksHappyCaseTemplate(); + + return result; + } + + private boolean sendAdditionalNotification_testNoRetriesTemplate(int postNotificationResponseCode) throws Exception { + sendOrClearAdditionalNotification_mockBehaviorHappyCaseTemplate(); + when(httpResponseStatusLine.getStatusCode()).thenReturn(postNotificationResponseCode); + when(EntityUtils.toString(any())).thenReturn("bad response"); + + // create object under test + notifier = new GOCNotifier(metricService, annotationService, auditService, config, emf, monitorService); + + // test + boolean result = notifier.sendAdditionalNotification(context); + + // verify mocks + sendOrClearAdditionalNotification_verifyMocksTemplate(1, 0, 0, 1, false); + return result; + } + + + private boolean clearAdditionalNotification_testTemplate(int postNotificationResponseCode) throws Exception { + // define mock behavior + sendOrClearAdditionalNotification_mockBehaviorHappyCaseTemplate(); + when(httpResponseStatusLine.getStatusCode()).thenReturn(postNotificationResponseCode); + + // create object under test + notifier = new GOCNotifier(metricService, annotationService, auditService, config, emf, monitorService); + + // test + boolean result = notifier.clearAdditionalNotification(context); + + // verify mocks + sendOrClearAdditionalNotification_verifyMocksHappyCaseTemplate(); + + return result; + } + + private void sendOrClearAdditionalNotification_mockBehaviorHappyCaseTemplate() throws Exception { + sendOrClearAdditionalNotification_mockBehaviorTemplate(new GusTransport.EndpointInfo(TEST_INSTANCE_URL, TEST_TOKEN)); + } + + private void sendOrClearAdditionalNotification_mockBehaviorTemplate(GusTransport.EndpointInfo ei) throws Exception { + // define mock behavior + when(auditService.createAudit(any())).thenReturn(auditResult); + whenNew(GusTransport.class).withAnyArguments().thenReturn(gusTransport); + when(gusTransport.getEndpointInfo(anyBoolean())).thenReturn(ei); + when(gusTransport.getHttpClient()).thenReturn(httpClient); + when(httpClient.execute(any())).thenReturn(httpResponse); + when(httpResponse.getStatusLine()).thenReturn(httpResponseStatusLine); + when(EntityUtils.toString(any())).thenReturn("default"); + } + + private void sendOrClearAdditionalNotification_verifyMocksHappyCaseTemplate() throws Exception { + // verify mocks + sendOrClearAdditionalNotification_verifyMocksTemplate(1, 0, 0, 0, true); + } + + private void sendOrClearAdditionalNotification_verifyMocksTemplate(int tries, int exceptionsThrown, int refreshCacheTries, int getResponseBodyAsStringTimes, boolean success) throws Exception { + verify(gusTransport, times(tries - refreshCacheTries)).getEndpointInfo(false); + verify(gusTransport, times(refreshCacheTries)).getEndpointInfo(true); + verify(httpClient, times(tries)).execute(any()); + verify(httpResponse, times(tries - exceptionsThrown)).getStatusLine(); + verify(httpResponseStatusLine, times(tries - exceptionsThrown)).getStatusCode(); + verify(httpResponse, times(tries - exceptionsThrown)).close(); + + verifyStatic(EntityUtils.class, times(getResponseBodyAsStringTimes)); + EntityUtils.toString(any()); + + verify(monitorService).modifyCounter(MonitorService.Counter.GOC_NOTIFICATIONS_RETRIES, tries - 1, null); + verify(monitorService).modifyCounter(MonitorService.Counter.GOC_NOTIFICATIONS_FAILED, success ? 0 : 1, null); + } +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/notifier/GusNotifierIT.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/notifier/GusNotifierIT.java new file mode 100644 index 000000000..06282e36f --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/notifier/GusNotifierIT.java @@ -0,0 +1,75 @@ +package com.salesforce.dva.argus.service.alert.notifier; + +import com.salesforce.dva.argus.AbstractTestIT; +import com.salesforce.dva.argus.IntegrationTest; +import com.salesforce.dva.argus.entity.Alert; +import com.salesforce.dva.argus.entity.History; +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.entity.Notification; +import com.salesforce.dva.argus.entity.PrincipalUser; +import com.salesforce.dva.argus.entity.Trigger; +import com.salesforce.dva.argus.service.AlertService.Notifier.NotificationStatus; +import com.salesforce.dva.argus.service.UserService; +import com.salesforce.dva.argus.service.alert.DefaultAlertService.NotificationContext; +import com.salesforce.dva.argus.system.SystemConfiguration; +import org.junit.Before; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.LinkedList; +import java.util.List; +import java.util.Properties; + +import static org.junit.Assert.assertTrue; + +@Category(IntegrationTest.class) +public class GusNotifierIT extends AbstractTestIT { + private GusNotifier notifier; + + @Override + @Before + public void setUp() { + super.setUpZkTestServer(); + super.setupEmbeddedKafka(); + Properties p = new Properties(); + p.setProperty("notifier.property.alert.gus_client_id", "{INSERT VALUE}"); + p.setProperty("notifier.property.alert.gus_client_secret", "{INSERT VALUE}"); + p.setProperty("notifier.property.alert.gus_user", "{INSERT VALUE}"); + p.setProperty("notifier.property.alert.gus_pwd", "{INSERT VALUE}"); + p.setProperty("notifier.property.alert.gus_endpoint", "https://myhost.com/services/oauth2/token"); + p.setProperty("notifier.property.alert.gus_post_endpoint", "https://myhost.com/services/data/v35.0/chatter/feed-elements?feedElementType=FeedItem"); + p.setProperty("system.property.gus.enabled", "true"); + system = getInstance(p); + system.start(); + } + + @Test + public void sendGusNotification_test() { + notifier = system.getNotifierFactory().getGusNotifier(); + + UserService userService = system.getServiceFactory().getUserService(); + PrincipalUser user = userService.findAdminUser(); + Alert a = new Alert(user, user, createRandomName(), "-1h:argus.jvm:cores.active:max", "* * * * *"); + a.setEnabled(true); + + Trigger t = new Trigger(a, Trigger.TriggerType.GREATER_THAN, "TEST TRIGGER IGNORE", 1, 0); + List triggerList = new LinkedList<>(); + triggerList.add(t); + a.setTriggers(triggerList); + + List notificationArgList = new ArrayList(); + notificationArgList.add("0F9B0000000IZlDKAW"); + Notification n = new Notification("TEST NOTIF IGNORE", a, "TEST GUS NOTIFIER", notificationArgList, 5000L); + a.addNotification(n); + + Metric m = createMetric(); + + History h = new History("TEST HISTORY MESSAGE", SystemConfiguration.getHostname(), new BigInteger("100002"), History.JobStatus.STARTED, 10, System.currentTimeMillis() - 86400000); + + NotificationContext context = new NotificationContext(a, t, n, System.currentTimeMillis(), 5, m, h); + boolean result = notifier.sendGusNotification(context, NotificationStatus.TRIGGERED); + assertTrue(result); + } +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/notifier/GusNotifierTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/notifier/GusNotifierTest.java new file mode 100644 index 000000000..27d880156 --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/notifier/GusNotifierTest.java @@ -0,0 +1,341 @@ +package com.salesforce.dva.argus.service.alert.notifier; + +import com.google.common.collect.ImmutableList; +import com.google.inject.Provider; +import com.salesforce.dva.argus.entity.Alert; +import com.salesforce.dva.argus.entity.Audit; +import com.salesforce.dva.argus.entity.History; +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.entity.Notification; +import com.salesforce.dva.argus.entity.PrincipalUser; +import com.salesforce.dva.argus.entity.Trigger; +import com.salesforce.dva.argus.service.AnnotationService; +import com.salesforce.dva.argus.service.AuditService; +import com.salesforce.dva.argus.service.MetricService; +import com.salesforce.dva.argus.service.MonitorService; +import com.salesforce.dva.argus.service.alert.DefaultAlertService.NotificationContext; +import com.salesforce.dva.argus.service.alert.retriever.ImageDataRetriever; +import com.salesforce.dva.argus.system.SystemConfiguration; +import org.apache.commons.lang3.tuple.Pair; +import org.apache.http.HttpEntity; +import org.apache.http.StatusLine; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.util.EntityUtils; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; +import org.slf4j.LoggerFactory; + +import javax.persistence.EntityManager; +import java.math.BigInteger; +import java.util.Properties; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.powermock.api.mockito.PowerMockito.mock; +import static org.powermock.api.mockito.PowerMockito.mockStatic; +import static org.powermock.api.mockito.PowerMockito.verifyStatic; +import static org.powermock.api.mockito.PowerMockito.when; +import static org.powermock.api.mockito.PowerMockito.whenNew; + +@RunWith(PowerMockRunner.class) +@PrepareForTest({GusNotifier.class, GusTransport.class, EntityUtils.class, ImageDataRetriever.class}) +public class GusNotifierTest { + /* Constants */ + private static final String SYSTEM_CONFIG_GUS_ENABLED = "system.property.gus.enabled"; + private static final String GUS_NOTIFIER_GUS_USER = "notifier.property.alert.gus_user"; + private static final String GUS_NOTIFIER_GUS_PWD = "notifier.property.alert.gus_pwd"; + private static final String GUS_NOTIFIER_GUS_CLIENT_ID = "notifier.property.alert.gus_client_id"; + private static final String GUS_NOTIFIER_GUS_CLIENT_SECRET = "notifier.property.alert.gus_client_secret"; + private static final String GUS_NOTIFIER_GUS_ENDPOINT = "notifier.property.alert.gus_endpoint"; + private static final String GUS_NOTIFIER_GUS_POST_ENDPOINT = "notifier.property.alert.gus_post_endpoint"; + private static final String GUS_NOTIFIER_PROXY_HOST = "notifier.property.proxy.host"; + private static final String GUS_NOTIFIER_PROXY_PORT = "notifier.property.proxy.port"; + + private static final String TEST_INSTANCE_URL = "https://test_instance_url.com"; + private static final String TEST_TOKEN = "test_token"; + + private static final int MAX_ATTEMPTS_GUS_POST = 3; + + /* Test mocks */ + private MetricService metricService; + private AnnotationService annotationService; + private AuditService auditService; + private Provider emf; + private MonitorService monitorService; + private CloseableHttpClient httpClient; + private Audit auditResult; + private GusTransport gusTransport; + private CloseableHttpResponse httpResponse; + private StatusLine httpResponseStatusLine; + private HttpEntity httpResponseEntity; + + /* Class being tested */ + private GusNotifier notifier; + + /* Test data */ + private SystemConfiguration config; + private Properties properties; + private NotificationContext context; + private Alert alert; + private Trigger trigger; + private Notification notification; + private Metric metric; + private History history; + + private static ch.qos.logback.classic.Logger apacheLogger; + private static ch.qos.logback.classic.Logger myClassLogger; + + @BeforeClass + static public void setUpClass() { + myClassLogger = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger("com.salesforce.dva.argus.service.alert.GusNotifierTest"); + myClassLogger.setLevel(ch.qos.logback.classic.Level.OFF); + apacheLogger = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger("org.apache"); + apacheLogger.setLevel(ch.qos.logback.classic.Level.OFF); + } + + @Before + public void setup() { + // create mocks + metricService = mock(MetricService.class); + annotationService = mock(AnnotationService.class); + auditService = mock(AuditService.class); + emf = mock(Provider.class); + monitorService = mock(MonitorService.class); + httpClient = mock(CloseableHttpClient.class); + auditResult = mock(Audit.class); + gusTransport = mock(GusTransport.class); + httpResponse = mock(CloseableHttpResponse.class); + httpResponseStatusLine = mock(StatusLine.class); + httpResponseEntity = mock(HttpEntity.class); + + mockStatic(EntityUtils.class); + + // set up test SystemConfiguration properties + properties = new Properties(); + properties.setProperty(SYSTEM_CONFIG_GUS_ENABLED, "true"); + properties.setProperty(GUS_NOTIFIER_GUS_USER, "test_gus_user"); + properties.setProperty(GUS_NOTIFIER_GUS_PWD, "test_gus_pw"); + properties.setProperty(GUS_NOTIFIER_GUS_CLIENT_ID, "test_gus_client_id"); + properties.setProperty(GUS_NOTIFIER_GUS_CLIENT_SECRET, "test_gus_client_secret"); + properties.setProperty(GUS_NOTIFIER_GUS_ENDPOINT, "https://test_gus_ep.com"); + properties.setProperty(GUS_NOTIFIER_GUS_POST_ENDPOINT, "https://test_gus_post_ep.com"); + properties.setProperty(GUS_NOTIFIER_PROXY_HOST, "test_proxy_host"); + properties.setProperty(GUS_NOTIFIER_PROXY_PORT, "9090"); + config = new SystemConfiguration(properties); + + // set up test data + alert = new Alert(new PrincipalUser(null, "test_creator", "test_creator@salesforce.com"), + new PrincipalUser(null, "test_owner", "test_owner@salesforce.com"), + "test_alert_name", + "-1h:test:metric:avg", + "test_alert_cron_entry"); + trigger = new Trigger(alert, Trigger.TriggerType.EQUAL, "test_trigger_name", 3.14, 1000); + alert.setTriggers(ImmutableList.of(trigger)); + notification = new Notification("test_notification_name", + alert, + "test_notifier_name", + ImmutableList.of("test_subscription"), + 3000); + alert.addNotification(notification); + metric = new Metric("test_scope", "test_metric_name"); + history = new History("test_message", "test_host_name", BigInteger.valueOf(456), History.JobStatus.STARTED); + context = new NotificationContext(alert, trigger, notification, System.currentTimeMillis(), 3.14, metric, history); + } + + @Test + public void sendAdditionalNotification_testPostGusNotificationRespCode201() throws Exception { + boolean result = sendAdditionalNotification_testHappyCaseTemplate(201); + assertTrue(result); + } + + @Test + public void sendAdditionalNotification_testPostGusNotificationRespCode204() throws Exception { + boolean result = sendAdditionalNotification_testHappyCaseTemplate(204); + assertTrue(result); + } + + @Test + public void sendAdditionalNotification_testGusPostFailThenRetryAndPass() throws Exception { + int[] postNotificationResponseCode = {405, 201}; + // define mock behavior + sendOrClearAdditionalNotification_mockBehaviorHappyCaseTemplate(); + when(httpResponseStatusLine.getStatusCode()).thenReturn(postNotificationResponseCode[0]) + .thenReturn(postNotificationResponseCode[1]); + when(EntityUtils.toString(any())).thenReturn("[{\"message\": \"BLAH\", \"errorCode\": \"BLAH2\"}]"); + + // create object under test + notifier = new GusNotifier(metricService, annotationService, auditService, config, emf, monitorService); + + // test + boolean result = notifier.sendAdditionalNotification(context); + assertTrue(result); + + // verify mocks + sendOrClearAdditionalNotification_verifyGusPostMethodMockTemplate(2, 0, 1, true); + } + + @Test + public void sendAdditionalNotification_testGusPostFailRetryMaxTimes() throws Exception { + int postNotificationResponseCode = 404; + // define mock behavior + sendOrClearAdditionalNotification_mockBehaviorHappyCaseTemplate(); + when(httpResponseStatusLine.getStatusCode()).thenReturn(postNotificationResponseCode); + when(EntityUtils.toString(any())).thenReturn("[{\"message\": \"BLAH\", \"errorCode\": \"BLAH2\"}]"); + + // create object under test + notifier = new GusNotifier(metricService, annotationService, auditService, config, emf, monitorService); + + // test + boolean result = notifier.sendAdditionalNotification(context); + assertFalse(result); + + // verify mocks + sendOrClearAdditionalNotification_verifyGusPostMethodMockTemplate(MAX_ATTEMPTS_GUS_POST, 0, MAX_ATTEMPTS_GUS_POST, false); + } + + @Test + public void sendAdditionalNotification_testAuthHeaderFailThenRetryAndPass() throws Exception { + int[] postNotificationResponseCode = {404, 201}; + // define mock behavior + sendOrClearAdditionalNotification_mockBehaviorHappyCaseTemplate(); + when(httpResponseStatusLine.getStatusCode()).thenReturn(postNotificationResponseCode[0]) + .thenReturn(postNotificationResponseCode[1]); + when(EntityUtils.toString(any())).thenReturn("[{\"message\":\"INVALID_HEADER_TYPE\",\"errorCode\":\"INVALID_AUTH_HEADER\"}]"); + + // create object under test + notifier = new GusNotifier(metricService, annotationService, auditService, config, emf, monitorService); + + // test + boolean result = notifier.sendAdditionalNotification(context); + assertTrue(result); + + // verify mocks + sendOrClearAdditionalNotification_verifyGusPostMethodMockTemplate(2, 1, 1, true); + } + + @Test + public void sendAdditionalNotification_testAuthHeaderFailRetryMaxTimes() throws Exception { + int postNotificationResponseCode = 404; + // define mock behavior + sendOrClearAdditionalNotification_mockBehaviorHappyCaseTemplate(); + when(httpResponseStatusLine.getStatusCode()).thenReturn(postNotificationResponseCode); + when(EntityUtils.toString(any())).thenReturn("[{\"message\":\"INVALID_HEADER_TYPE\",\"errorCode\":\"INVALID_AUTH_HEADER\"}]"); + + // create object under test + notifier = new GusNotifier(metricService, annotationService, auditService, config, emf, monitorService); + + // test + boolean result = notifier.sendAdditionalNotification(context); + assertFalse(result); + + // verify mocks + sendOrClearAdditionalNotification_verifyGusPostMethodMockTemplate(MAX_ATTEMPTS_GUS_POST, 2, MAX_ATTEMPTS_GUS_POST, false); + } + + @Test + public void clearAdditionalNotification_testPostGusNotificationRespCode201() throws Exception { + boolean result = clearAdditionalNotification_testHappyCaseTemplate(201); + assertTrue(result); + } + + @Test + public void clearAdditionalNotification_testPostGusNotificationRespCode204() throws Exception { + boolean result = clearAdditionalNotification_testHappyCaseTemplate(204); + assertTrue(result); + } + + @Test + public void clearAdditionalNotification_testDisableClearNotification() throws Exception { + context.getNotification().setEnableClearNotification(false); + + // create object under test + notifier = new GusNotifier(metricService, annotationService, auditService, config, emf, monitorService); + + // test + boolean result = notifier.clearAdditionalNotification(context); + + // verify + assertTrue(result); + verify(gusTransport, never()).getEndpointInfo(false); + verify(gusTransport, never()).getEndpointInfo(true); + verify(httpClient, never()).execute(any()); + verify(httpResponse, never()).getStatusLine(); + verify(httpResponseStatusLine, never()).getStatusCode(); + verify(httpResponse, never()).close(); + } + + private boolean sendAdditionalNotification_testHappyCaseTemplate(int postNotificationResponseCode) throws Exception { + // define mock behavior + sendOrClearAdditionalNotification_mockBehaviorHappyCaseTemplate(); + when(httpResponseStatusLine.getStatusCode()).thenReturn(postNotificationResponseCode); + + // create object under test + notifier = new GusNotifier(metricService, annotationService, auditService, config, emf, monitorService); + + // test + boolean result = notifier.sendAdditionalNotification(context); + + // verify mocks + sendOrClearAdditionalNotification_verifyMocksHappyCaseTemplate(); + + return result; + } + + private boolean clearAdditionalNotification_testHappyCaseTemplate(int postNotificationResponseCode) throws Exception { + // define mock behavior + sendOrClearAdditionalNotification_mockBehaviorHappyCaseTemplate(); + when(httpResponseStatusLine.getStatusCode()).thenReturn(postNotificationResponseCode); + + // create object under test + notifier = new GusNotifier(metricService, annotationService, auditService, config, emf, monitorService); + + // test + boolean result = notifier.clearAdditionalNotification(context); + + // verify mocks + sendOrClearAdditionalNotification_verifyMocksHappyCaseTemplate(); + + return result; + } + + private void sendOrClearAdditionalNotification_mockBehaviorHappyCaseTemplate() throws Exception { + // define mock behavior + when(auditService.createAudit(any())).thenReturn(auditResult); + whenNew(GusTransport.class).withAnyArguments().thenReturn(gusTransport); + when(gusTransport.getEndpointInfo(anyBoolean())).thenReturn(new GusTransport.EndpointInfo(TEST_INSTANCE_URL, TEST_TOKEN)); + when(gusTransport.getHttpClient()).thenReturn(httpClient); + when(httpClient.execute(any())).thenReturn(httpResponse); + when(httpResponse.getStatusLine()).thenReturn(httpResponseStatusLine); + } + + private void sendOrClearAdditionalNotification_verifyMocksHappyCaseTemplate() throws Exception { + // verify mocks + sendOrClearAdditionalNotification_verifyGusPostMethodMockTemplate(1, 0, 0, true); + } + + private void sendOrClearAdditionalNotification_verifyGusPostMethodMockTemplate(int tries, int refreshCacheTries, int getResponseBodyAsStringTimes, boolean success) throws Exception { + verify(gusTransport, times(tries - refreshCacheTries)).getEndpointInfo(false); + verify(gusTransport, times(refreshCacheTries)).getEndpointInfo(true); + verify(httpClient, times(tries)).execute(any()); + verify(httpResponse, times(tries)).getStatusLine(); + verify(httpResponseStatusLine, times(tries)).getStatusCode(); + verify(httpResponse, times(tries)).close(); + + verifyStatic(EntityUtils.class, times(getResponseBodyAsStringTimes)); + EntityUtils.toString(any()); + + verify(monitorService).modifyCounter(MonitorService.Counter.GUS_NOTIFICATIONS_RETRIES, tries - 1, null); + verify(monitorService).modifyCounter(MonitorService.Counter.GUS_NOTIFICATIONS_FAILED, success ? 0 : 1, null); + } + +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/notifier/GusTransportIT.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/notifier/GusTransportIT.java new file mode 100644 index 000000000..a23188bc8 --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/notifier/GusTransportIT.java @@ -0,0 +1,45 @@ +package com.salesforce.dva.argus.service.alert.notifier; + +import org.apache.zookeeper.Op; +import org.junit.Before; +import org.junit.Test; + +import java.util.Optional; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotSame; +import static org.junit.Assert.assertTrue; + +public class GusTransportIT { + private String AUTH_ENDPOINT = "https://myhost.com/services/oauth2/token"; + private String AUTH_CLIENT_ID = "{INSERT VALUE}"; + private String AUTH_CLIENT_SECRET = "{INSERT VALUE}"; + private String AUTH_USERNAME = "{INSERT VALUE}"; + private String AUTH_PASSWORD = "{INSERT VALUE}"; + private GusTransport.EndpointInfo DEFAULT_EP = new GusTransport.EndpointInfo("default EP", "NO_TOKEN"); + private GusTransport gusTransport; + + @Before + public void setUp() { + gusTransport = new GusTransport(Optional.empty(), + Optional.empty(), + Optional.empty(), + Optional.empty(), + AUTH_ENDPOINT, + AUTH_CLIENT_ID, + AUTH_CLIENT_SECRET, + AUTH_USERNAME, + AUTH_PASSWORD, + DEFAULT_EP, + 1, + 1); + } + + @Test + public void getEndpointInfo_test() throws Exception { + GusTransport.EndpointInfo ei = gusTransport.getEndpointInfo(); + assertNotSame(DEFAULT_EP, ei); + assertEquals("https://myhost.com", ei.getEndPoint()); + assertTrue(ei.getToken().length() > 0); + } +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/notifier/GusTransportTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/notifier/GusTransportTest.java new file mode 100644 index 000000000..70dbf3f7c --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/notifier/GusTransportTest.java @@ -0,0 +1,594 @@ +package com.salesforce.dva.argus.service.alert.notifier; + +import com.salesforce.dva.argus.service.alert.notifier.GusTransport.EndpointInfo; +import com.salesforce.dva.argus.util.Option; +import org.apache.commons.lang.StringUtils; +import org.apache.http.HttpEntity; +import org.apache.http.StatusLine; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.util.EntityUtils; + +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.runner.RunWith; + +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; +import org.slf4j.LoggerFactory; + +import java.util.Optional; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertSame; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.powermock.api.mockito.PowerMockito.mock; +import static org.powermock.api.mockito.PowerMockito.mockStatic; +import static org.powermock.api.mockito.PowerMockito.when; + +/** + * Ignore the NoSuchAlgorithmException that gets logged for all test cases in this class. This failure is occurring + * because SSLContext is loading from PowerMock's classloader. + * http://mathieuhicauber-java.blogspot.com/2013/07/powermock-and-ssl-context.html + */ +@RunWith(PowerMockRunner.class) +@PrepareForTest({HttpClients.class, EntityUtils.class, HttpClientBuilder.class}) +public class GusTransportTest { + private static final String PROXY_HOST = "test_proxy_host"; + private static final int PROXY_PORT = 9090; + private static final String PROXY_USERNAME = ""; + private static final String PROXY_PASSWORD = ""; + private static final String AUTH_ENDPOINT = "https://test_auth_ep.com"; + private static final String AUTH_CLIENT_ID = "test_auth_client_id"; + private static final String AUTH_CLIENT_SECRET = "test_auth_client_secret"; + private static final String AUTH_USERNAME = "test_auth_username"; + private static final String AUTH_PASSWORD = "test_auth_password"; + private static final String DEFAULT_ENDPOINT = "https://test_default_ep.com"; + private static final String TEST_INSTANCE_URL = "https://test_instance_url.com"; + private static final String TEST_TOKEN = "test_token"; + private static final int CONNECTION_POOL_SIZE = 5; + private static final int CONNECTION_POOL_MAX_PER_ROUTE = 5; + + // mocks + private HttpClientBuilder httpClientBuilder; + private CloseableHttpClient httpClient; + private CloseableHttpResponse httpResponse; + private StatusLine httpResponseStatusLine; + private HttpEntity httpResponseEntity; + + private GusTransport gusTransport; + + private static ch.qos.logback.classic.Logger apacheLogger; + private static ch.qos.logback.classic.Logger myClassLogger; + + @BeforeClass + static public void setUpClass() { + myClassLogger = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger("com.salesforce.dva.argus.service.alert.GusTransportTest"); + myClassLogger.setLevel(ch.qos.logback.classic.Level.OFF); + apacheLogger = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger("org.apache"); + apacheLogger.setLevel(ch.qos.logback.classic.Level.OFF); + } + + @Before + public void setUp() { + httpClientBuilder = mock(HttpClientBuilder.class); + httpClient = mock(CloseableHttpClient.class); + httpResponse = mock(CloseableHttpResponse.class); + httpResponseStatusLine = mock(StatusLine.class); + httpResponseEntity = mock(HttpEntity.class); + + mockStatic(HttpClients.class); + mockStatic(EntityUtils.class); + } + + @Test + public void constructor_test() throws Exception { + gusTransport = createGusTransportHappyCase(); + + verify(httpClientBuilder).setRoutePlanner(any()); + } + + @Test + public void constructor_testProxyHostStringAndProxyPortString() throws Exception { + gusTransport = createGusTransportHappyCase(PROXY_HOST, new Integer(PROXY_PORT).toString()); + + verify(httpClientBuilder).setRoutePlanner(any()); + } + + @Test + public void constructor_testEmptyProxyHostString() throws Exception { + gusTransport = createGusTransportHappyCase(" ", new Integer(PROXY_PORT).toString()); + + verify(httpClientBuilder, never()).setRoutePlanner(any()); + } + + @Test + public void constructor_testEmptyProxyHost() throws Exception { + gusTransport = createGusTransportHappyCase(Optional.empty(), Optional.of(PROXY_PORT), Optional.of(PROXY_USERNAME), Optional.of(PROXY_PASSWORD)); + + verify(httpClientBuilder, never()).setRoutePlanner(any()); + } + + @Test + public void constructor_testEmptyProxyPortString() throws Exception { + gusTransport = createGusTransportHappyCase(PROXY_HOST, " "); + + verify(httpClientBuilder, never()).setRoutePlanner(any()); + } + + @Test + public void constructor_testEmptyProxyPort() throws Exception { + gusTransport = createGusTransportHappyCase(Optional.of(PROXY_HOST), Optional.empty(), Optional.of(PROXY_USERNAME), Optional.of(PROXY_PASSWORD)); + + verify(httpClientBuilder, never()).setRoutePlanner(any()); + } + + @Test(expected = IllegalArgumentException.class) + public void constructor_testProxyPortStringNonNumeric() throws Exception { + gusTransport = new GusTransport(PROXY_HOST, + "ABC", + PROXY_USERNAME, + PROXY_PASSWORD, + AUTH_ENDPOINT, + AUTH_CLIENT_ID, + AUTH_CLIENT_SECRET, + AUTH_USERNAME, + AUTH_PASSWORD, + new EndpointInfo(DEFAULT_ENDPOINT, GusTransport.NO_TOKEN), + CONNECTION_POOL_SIZE, + CONNECTION_POOL_MAX_PER_ROUTE); + } + + @Test(expected = IllegalArgumentException.class) + public void constructor_testProxyHostBlank() throws Exception { + gusTransport = createGusTransport(Optional.of(" "), Optional.of(PROXY_PORT), Optional.of(PROXY_USERNAME), Optional.of(PROXY_PASSWORD)); + } + + @Test(expected = IllegalArgumentException.class) + public void constructor_testProxyPortLessThan0() throws Exception { + gusTransport = createGusTransport(Optional.of(PROXY_HOST), Optional.of(-1), Optional.of(PROXY_USERNAME), Optional.of(PROXY_PASSWORD)); + } + + @Test(expected = IllegalArgumentException.class) + public void constructor_testAuthEndpointBlank() throws Exception { + gusTransport = new GusTransport(Optional.of(PROXY_HOST), + Optional.of(PROXY_PORT), + Optional.of(PROXY_USERNAME), + Optional.of(PROXY_PASSWORD), + "", + AUTH_CLIENT_ID, + AUTH_CLIENT_SECRET, + AUTH_USERNAME, + AUTH_PASSWORD, + new EndpointInfo(DEFAULT_ENDPOINT, GusTransport.NO_TOKEN), + CONNECTION_POOL_SIZE, + CONNECTION_POOL_MAX_PER_ROUTE); + } + + @Test(expected = IllegalArgumentException.class) + public void constructor_testAuthEndpointNull() throws Exception { + gusTransport = new GusTransport(Optional.of(PROXY_HOST), + Optional.of(PROXY_PORT), + Optional.of(PROXY_USERNAME), + Optional.of(PROXY_PASSWORD), + null, + AUTH_CLIENT_ID, + AUTH_CLIENT_SECRET, + AUTH_USERNAME, + AUTH_PASSWORD, + new EndpointInfo(DEFAULT_ENDPOINT, GusTransport.NO_TOKEN), + CONNECTION_POOL_SIZE, + CONNECTION_POOL_MAX_PER_ROUTE); + } + + @Test(expected = IllegalArgumentException.class) + public void constructor_testAuthClientIdBlank() throws Exception { + gusTransport = new GusTransport(Optional.of(PROXY_HOST), + Optional.of(PROXY_PORT), + Optional.of(PROXY_USERNAME), + Optional.of(PROXY_PASSWORD), + AUTH_ENDPOINT, + " ", + AUTH_CLIENT_SECRET, + AUTH_USERNAME, + AUTH_PASSWORD, + new EndpointInfo(DEFAULT_ENDPOINT, GusTransport.NO_TOKEN), + CONNECTION_POOL_SIZE, + CONNECTION_POOL_MAX_PER_ROUTE); + } + + @Test(expected = IllegalArgumentException.class) + public void constructor_testAuthClientIdNull() throws Exception { + gusTransport = new GusTransport(Optional.of(PROXY_HOST), + Optional.of(PROXY_PORT), + Optional.of(PROXY_USERNAME), + Optional.of(PROXY_PASSWORD), + AUTH_ENDPOINT, + null, + AUTH_CLIENT_SECRET, + AUTH_USERNAME, + AUTH_PASSWORD, + new EndpointInfo(DEFAULT_ENDPOINT, GusTransport.NO_TOKEN), + CONNECTION_POOL_SIZE, + CONNECTION_POOL_MAX_PER_ROUTE); + } + + @Test(expected = IllegalArgumentException.class) + public void constructor_testAuthClientSecretBlank() throws Exception { + gusTransport = new GusTransport(Optional.of(PROXY_HOST), + Optional.of(PROXY_PORT), + Optional.of(PROXY_USERNAME), + Optional.of(PROXY_PASSWORD), + AUTH_ENDPOINT, + AUTH_CLIENT_ID, + " ", + AUTH_USERNAME, + AUTH_PASSWORD, + new EndpointInfo(DEFAULT_ENDPOINT, GusTransport.NO_TOKEN), + CONNECTION_POOL_SIZE, + CONNECTION_POOL_MAX_PER_ROUTE); + } + + @Test(expected = IllegalArgumentException.class) + public void constructor_testAuthClientSecretNull() throws Exception { + gusTransport = new GusTransport(Optional.of(PROXY_HOST), + Optional.of(PROXY_PORT), + Optional.of(PROXY_USERNAME), + Optional.of(PROXY_PASSWORD), + AUTH_ENDPOINT, + AUTH_CLIENT_ID, + null, + AUTH_USERNAME, + AUTH_PASSWORD, + new EndpointInfo(DEFAULT_ENDPOINT, GusTransport.NO_TOKEN), + CONNECTION_POOL_SIZE, + CONNECTION_POOL_MAX_PER_ROUTE); + } + + @Test(expected = IllegalArgumentException.class) + public void constructor_testAuthUsernameBlank() throws Exception { + gusTransport = new GusTransport(Optional.of(PROXY_HOST), + Optional.of(PROXY_PORT), + Optional.of(PROXY_USERNAME), + Optional.of(PROXY_PASSWORD), + AUTH_ENDPOINT, + AUTH_CLIENT_ID, + AUTH_CLIENT_SECRET, + "", + AUTH_PASSWORD, + new EndpointInfo(DEFAULT_ENDPOINT, GusTransport.NO_TOKEN), + CONNECTION_POOL_SIZE, + CONNECTION_POOL_MAX_PER_ROUTE); + } + + @Test(expected = IllegalArgumentException.class) + public void constructor_testAuthUsernameNull() throws Exception { + gusTransport = new GusTransport(Optional.of(PROXY_HOST), + Optional.of(PROXY_PORT), + Optional.of(PROXY_USERNAME), + Optional.of(PROXY_PASSWORD), + AUTH_ENDPOINT, + AUTH_CLIENT_ID, + AUTH_CLIENT_SECRET, + null, + AUTH_PASSWORD, + new EndpointInfo(DEFAULT_ENDPOINT, GusTransport.NO_TOKEN), + CONNECTION_POOL_SIZE, + CONNECTION_POOL_MAX_PER_ROUTE); + } + + @Test(expected = IllegalArgumentException.class) + public void constructor_testAuthPasswordBlank() throws Exception { + gusTransport = new GusTransport(Optional.of(PROXY_HOST), + Optional.of(PROXY_PORT), + Optional.of(PROXY_USERNAME), + Optional.of(PROXY_PASSWORD), + AUTH_ENDPOINT, + AUTH_CLIENT_ID, + AUTH_CLIENT_SECRET, + AUTH_USERNAME, + "", + new EndpointInfo(DEFAULT_ENDPOINT, GusTransport.NO_TOKEN), + CONNECTION_POOL_SIZE, + CONNECTION_POOL_MAX_PER_ROUTE); + } + + @Test(expected = IllegalArgumentException.class) + public void constructor_testAuthPasswordNull() throws Exception { + gusTransport = new GusTransport(Optional.of(PROXY_HOST), + Optional.of(PROXY_PORT), + Optional.of(PROXY_USERNAME), + Optional.of(PROXY_PASSWORD), + AUTH_ENDPOINT, + AUTH_CLIENT_ID, + AUTH_CLIENT_SECRET, + AUTH_USERNAME, + null, + new EndpointInfo(DEFAULT_ENDPOINT, GusTransport.NO_TOKEN), + CONNECTION_POOL_SIZE, + CONNECTION_POOL_MAX_PER_ROUTE); + } + + @Test(expected = IllegalArgumentException.class) + public void constructor_testDefaultEndpointNull() throws Exception { + gusTransport = new GusTransport(Optional.of(PROXY_HOST), + Optional.of(PROXY_PORT), + Optional.of(PROXY_USERNAME), + Optional.of(PROXY_PASSWORD), + AUTH_ENDPOINT, + AUTH_CLIENT_ID, + AUTH_CLIENT_SECRET, + AUTH_USERNAME, + AUTH_PASSWORD, + null, + CONNECTION_POOL_SIZE, + CONNECTION_POOL_MAX_PER_ROUTE); + } + + @Test(expected = IllegalArgumentException.class) + public void constructor_testDefaultEndpointEndpointBlank() throws Exception { + gusTransport = new GusTransport(Optional.of(PROXY_HOST), + Optional.of(PROXY_PORT), + Optional.of(PROXY_USERNAME), + Optional.of(PROXY_PASSWORD), + AUTH_ENDPOINT, + AUTH_CLIENT_ID, + AUTH_CLIENT_SECRET, + AUTH_USERNAME, + AUTH_PASSWORD, + new EndpointInfo("", GusTransport.NO_TOKEN), + CONNECTION_POOL_SIZE, + CONNECTION_POOL_MAX_PER_ROUTE); + } + + @Test(expected = IllegalArgumentException.class) + public void constructor_testDefaultEndpointEndpointNull() throws Exception { + gusTransport = new GusTransport(Optional.of(PROXY_HOST), + Optional.of(PROXY_PORT), + Optional.of(PROXY_USERNAME), + Optional.of(PROXY_PASSWORD), + AUTH_ENDPOINT, + AUTH_CLIENT_ID, + AUTH_CLIENT_SECRET, + AUTH_USERNAME, + AUTH_PASSWORD, + new EndpointInfo(null, GusTransport.NO_TOKEN), + CONNECTION_POOL_SIZE, + CONNECTION_POOL_MAX_PER_ROUTE); + } + + @Test(expected = IllegalArgumentException.class) + public void constructor_testDefaultEndpointTokenBlank() throws Exception { + gusTransport = new GusTransport(Optional.of(PROXY_HOST), + Optional.of(PROXY_PORT), + Optional.of(PROXY_USERNAME), + Optional.of(PROXY_PASSWORD), + AUTH_ENDPOINT, + AUTH_CLIENT_ID, + AUTH_CLIENT_SECRET, + AUTH_USERNAME, + AUTH_PASSWORD, + new EndpointInfo(DEFAULT_ENDPOINT, " "), + CONNECTION_POOL_SIZE, + CONNECTION_POOL_MAX_PER_ROUTE); + } + + @Test(expected = IllegalArgumentException.class) + public void constructor_testDefaultEndpointTokenNull() throws Exception { + gusTransport = new GusTransport(Optional.of(PROXY_HOST), + Optional.of(PROXY_PORT), + Optional.of(PROXY_USERNAME), + Optional.of(PROXY_PASSWORD), + AUTH_ENDPOINT, + AUTH_CLIENT_ID, + AUTH_CLIENT_SECRET, + AUTH_USERNAME, + AUTH_PASSWORD, + new EndpointInfo(DEFAULT_ENDPOINT, null), + CONNECTION_POOL_SIZE, + CONNECTION_POOL_MAX_PER_ROUTE); + } + + @Test(expected = IllegalArgumentException.class) + public void constructor_testInvalidConnectionPoolSize() throws Exception { + gusTransport = new GusTransport(Optional.of(PROXY_HOST), + Optional.of(PROXY_PORT), + Optional.of(PROXY_USERNAME), + Optional.of(PROXY_PASSWORD), + AUTH_ENDPOINT, + AUTH_CLIENT_ID, + AUTH_CLIENT_SECRET, + AUTH_USERNAME, + AUTH_PASSWORD, + new EndpointInfo(DEFAULT_ENDPOINT, GusTransport.NO_TOKEN), + 0, + 1); + } + + @Test(expected = IllegalArgumentException.class) + public void constructor_testInvalidConnectionPoolMaxPerRoute() throws Exception { + gusTransport = new GusTransport(Optional.of(PROXY_HOST), + Optional.of(PROXY_PORT), + Optional.of(PROXY_USERNAME), + Optional.of(PROXY_PASSWORD), + AUTH_ENDPOINT, + AUTH_CLIENT_ID, + AUTH_CLIENT_SECRET, + AUTH_USERNAME, + AUTH_PASSWORD, + new EndpointInfo(DEFAULT_ENDPOINT, GusTransport.NO_TOKEN), + 1, + -1); + } + + @Test + public void getEndpointInfo_testFalseRefresh() throws Exception { + int times = 10; + boolean refresh = false; + + gusTransport = createGusTransportHappyCase(); + EndpointInfo ei = gusTransport.getEndpointInfo(); + + assertEquals(TEST_INSTANCE_URL, ei.getEndPoint()); + assertEquals(TEST_TOKEN, ei.getToken()); + + // call again and should not refresh + for (int i = 0; i < times; i++) { + EndpointInfo ei2 = gusTransport.getEndpointInfo(refresh); + assertSame(ei, ei2); + } + + verify(httpClient, times(1)).execute(any()); + verify(httpResponse, times(1)).getStatusLine(); + verify(httpResponseStatusLine, times(1)).getStatusCode(); + verify(httpResponse, times(1)).getEntity(); + } + + @Test + public void getEndpointInfo_testTrueRefresh() throws Exception { + int times = 10; + boolean refresh = true; + + gusTransport = createGusTransportHappyCase(); + EndpointInfo ei = gusTransport.getEndpointInfo(); + + assertEquals(TEST_INSTANCE_URL, ei.getEndPoint()); + assertEquals(TEST_TOKEN, ei.getToken()); + + // call again and should refresh each time + for (int i = 1; i < times; i++) { + EndpointInfo ei2 = gusTransport.getEndpointInfo(refresh); + assertEquals(ei, ei2); + } + + verify(httpClient, times(times)).execute(any()); + verify(httpResponse, times(times)).getStatusLine(); + verify(httpResponseStatusLine, times(times)).getStatusCode(); + verify(httpResponse, times(times)).getEntity(); + } + + @Test + public void getEndpointInfo_testFailToInitEndpointOnConstructionWithBadResponse() throws Exception { + mockCacheInitExpectations(); + when(httpResponseStatusLine.getStatusCode()).thenReturn(401); // get token response code + + gusTransport = createGusTransport(); + EndpointInfo ei = gusTransport.getEndpointInfo(); + + assertEquals(DEFAULT_ENDPOINT, ei.getEndPoint()); + assertEquals(GusTransport.NO_TOKEN, ei.getToken()); + + verify(httpClient, times(1)).execute(any()); + verify(httpResponse, times(1)).getStatusLine(); + verify(httpResponseStatusLine, times(1)).getStatusCode(); + verify(httpResponse, times(1)).getEntity(); + } + + @Test + public void getEndpointInfo_testFailToRefreshEndpointWithBadResponse() throws Exception { + boolean refresh = true; + mockCacheInitExpectations(); + when(httpResponseStatusLine.getStatusCode()).thenReturn(200) // first time, return OK + .thenReturn(401); // 2nd time, return bad response + when(EntityUtils.toString(httpResponseEntity)) + .thenReturn("{\"instance_url\": \"" + TEST_INSTANCE_URL +"\", \"access_token\": \"" + TEST_TOKEN + "\"}") + .thenReturn("bad response"); + + gusTransport = createGusTransport(); + EndpointInfo ei = gusTransport.getEndpointInfo(); + + assertEquals(TEST_INSTANCE_URL, ei.getEndPoint()); + assertEquals(TEST_TOKEN, ei.getToken()); + + EndpointInfo ei2 = gusTransport.getEndpointInfo(refresh); + assertSame(ei, ei2); + + verify(httpClient, times(2)).execute(any()); + verify(httpResponse, times(2)).getStatusLine(); + verify(httpResponseStatusLine, times(2)).getStatusCode(); + verify(httpResponse, times(2)).getEntity(); + } + + private GusTransport createGusTransportHappyCase() throws Exception { + return createGusTransportHappyCase(Optional.of(PROXY_HOST), Optional.of(PROXY_PORT), Optional.of(PROXY_USERNAME), Optional.of(PROXY_PASSWORD)); + } + + private GusTransport createGusTransport() throws Exception { + return createGusTransport(Optional.of(PROXY_HOST), Optional.of(PROXY_PORT), Optional.of(PROXY_USERNAME), Optional.of(PROXY_PASSWORD)); + } + + private void mockCacheInitExpectations() throws Exception { + // define mock behavior where cache init is successful + when(httpClient.execute(any())).thenReturn(httpResponse); + when(httpResponse.getStatusLine()).thenReturn(httpResponseStatusLine); + when(httpResponse.getEntity()).thenReturn(httpResponseEntity); + when(EntityUtils.toString(httpResponseEntity)).thenReturn( + "{\"instance_url\": \"" + TEST_INSTANCE_URL +"\", \"access_token\": \"" + TEST_TOKEN + "\"}"); + } + + private void mockCacheInitExpectationsHappyCase() throws Exception { + mockCacheInitExpectations(); + when(httpResponseStatusLine.getStatusCode()).thenReturn(200); + } + + private GusTransport createGusTransportHappyCase(String proxyHostString, String proxyPortString) throws Exception { + mockCacheInitExpectationsHappyCase(); + mockHttpClientExpectations(Optional.ofNullable(proxyHostString), + StringUtils.isNumeric(proxyPortString) ? Optional.of(Integer.parseInt(proxyPortString)) : Optional.empty()); + return new GusTransport(proxyHostString, + proxyPortString, + PROXY_USERNAME, + PROXY_PASSWORD, + AUTH_ENDPOINT, + AUTH_CLIENT_ID, + AUTH_CLIENT_SECRET, + AUTH_USERNAME, + AUTH_PASSWORD, + new EndpointInfo(DEFAULT_ENDPOINT, GusTransport.NO_TOKEN), + CONNECTION_POOL_SIZE, + CONNECTION_POOL_MAX_PER_ROUTE); + } + + private GusTransport createGusTransportHappyCase(Optional proxyHost, Optional proxyPort, Optional proxyUsername, Optional proxyPassword) throws Exception { + mockCacheInitExpectationsHappyCase(); + return createGusTransport(proxyHost, proxyPort, proxyUsername, proxyPassword); + } + + private void mockHttpClientExpectations(Optional proxyHost, Optional proxyPort) throws Exception { + // define mock behavior + when(HttpClients.custom()).thenReturn(httpClientBuilder); + when(httpClientBuilder.setDefaultRequestConfig(any())).thenReturn(httpClientBuilder); + when(httpClientBuilder.setConnectionManager(any())).thenReturn(httpClientBuilder); + when(httpClientBuilder.setSSLContext(any())).thenReturn(httpClientBuilder); + when(httpClientBuilder.setSSLHostnameVerifier(any())).thenReturn(httpClientBuilder); + if (proxyHost.isPresent() && proxyPort.isPresent()) { + when(httpClientBuilder.setRoutePlanner(any())).thenReturn(httpClientBuilder); + } + when(httpClientBuilder.build()).thenReturn(httpClient); + } + + private GusTransport createGusTransport(Optional proxyHost, Optional proxyPort, Optional proxyUsername, Optional proxyPassword) throws Exception { + mockHttpClientExpectations(proxyHost, proxyPort); + + // create new GusTransport + return new GusTransport(proxyHost, + proxyPort, + proxyUsername, + proxyPassword, + AUTH_ENDPOINT, + AUTH_CLIENT_ID, + AUTH_CLIENT_SECRET, + AUTH_USERNAME, + AUTH_PASSWORD, + new EndpointInfo(DEFAULT_ENDPOINT, GusTransport.NO_TOKEN), + CONNECTION_POOL_SIZE, + CONNECTION_POOL_MAX_PER_ROUTE); + } + +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/notifier/PagerDutyNotifierIT.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/notifier/PagerDutyNotifierIT.java new file mode 100644 index 000000000..1df5d959a --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/notifier/PagerDutyNotifierIT.java @@ -0,0 +1,87 @@ +package com.salesforce.dva.argus.service.alert.notifier; + +import com.salesforce.dva.argus.AbstractTestIT; +import com.salesforce.dva.argus.IntegrationTest; +import com.salesforce.dva.argus.entity.Alert; +import com.salesforce.dva.argus.entity.History; +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.entity.Notification; +import com.salesforce.dva.argus.entity.PrincipalUser; +import com.salesforce.dva.argus.entity.Trigger; +import com.salesforce.dva.argus.service.AlertService.Notifier.NotificationStatus; +import com.salesforce.dva.argus.service.UserService; +import com.salesforce.dva.argus.service.alert.DefaultAlertService.NotificationContext; +import com.salesforce.dva.argus.system.SystemConfiguration; +import org.apache.commons.lang3.reflect.FieldUtils; +import org.junit.Before; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.LinkedList; +import java.util.List; +import java.util.Properties; + +import static org.junit.Assert.assertTrue; + +@Category(IntegrationTest.class) +public class PagerDutyNotifierIT extends AbstractTestIT { + private static final String TOKEN = "{INSERT TOKEN HERE}"; + private static final String ROUTING_KEY = "{INSERT KEY HERE}"; // this is the integration key/routing key from pager duty config + private PagerDutyNotifier notifier; + + @Override + @Before + public void setUp() { + super.setUpZkTestServer(); + super.setupEmbeddedKafka(); + Properties p = new Properties(); + p.setProperty("notifier.property.pagerduty.token", TOKEN); + p.setProperty("notifier.property.pagerduty.endpoint", "https://events.pagerduty.com"); + p.setProperty("system.property.pagerduty.enabled", "true"); + //p.setProperty("notifier.property.pagerduty.proxy.host", "myhostname.abc.com"); + //p.setProperty("notifier.property.pagerduty.proxy.port", "8080"); + system = getInstance(p); + system.start(); + } + + @Test + public void sendPagerDutyNotification_test() throws Exception { + notifier = system.getNotifierFactory().getPagerDutyNotifier(); + + UserService userService = system.getServiceFactory().getUserService(); + PrincipalUser user = userService.findAdminUser(); + Alert a = new Alert(user, user, createRandomName(), "-1h:argus.jvm:cores.active:max", "* * * * *"); + a.setEnabled(true); + + Trigger t = new Trigger(a, Trigger.TriggerType.GREATER_THAN, "TEST TRIGGER IGNORE", 1, 0); + List triggerList = new LinkedList<>(); + triggerList.add(t); + a.setTriggers(triggerList); + + List notificationArgList = new ArrayList(); + notificationArgList.add(ROUTING_KEY); // DVA Argus Alerting Subsystem service routing key + Notification n = new Notification("TEST NOTIF IGNORE", a, "TEST PAGERDUTY NOTIFIER", notificationArgList, 5000L); + FieldUtils.writeField(n, "id", BigInteger.valueOf(12345L), true); + n.setSeverityLevel(4); + a.addNotification(n); + + Metric m = createMetric(); + + History h = new History("TEST HISTORY MESSAGE", SystemConfiguration.getHostname(), new BigInteger("100002"), History.JobStatus.STARTED, 10, System.currentTimeMillis() - 86400000); + + NotificationContext context = new NotificationContext(a, t, n, System.currentTimeMillis(), 5, m, h); + boolean result = notifier.sendPagerDutyNotification(context, NotificationStatus.TRIGGERED); + assertTrue(result); + + Thread.sleep(10000L); + n.setName("TEST NOTIF IGNORE DUPLICATE"); + result = notifier.sendPagerDutyNotification(context, NotificationStatus.TRIGGERED); + assertTrue(result); + + Thread.sleep(120000L); + result = notifier.sendPagerDutyNotification(context, NotificationStatus.CLEARED); + assertTrue(result); + } +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/notifier/PagerDutyNotifierTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/notifier/PagerDutyNotifierTest.java new file mode 100644 index 000000000..a63846ee3 --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/notifier/PagerDutyNotifierTest.java @@ -0,0 +1,298 @@ +package com.salesforce.dva.argus.service.alert.notifier; + +import com.google.common.collect.ImmutableList; +import com.google.inject.Provider; +import com.salesforce.dva.argus.entity.Alert; +import com.salesforce.dva.argus.entity.Audit; +import com.salesforce.dva.argus.entity.History; +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.entity.Notification; +import com.salesforce.dva.argus.entity.PrincipalUser; +import com.salesforce.dva.argus.entity.Trigger; +import com.salesforce.dva.argus.service.AnnotationService; +import com.salesforce.dva.argus.service.ArgusTransport; +import com.salesforce.dva.argus.service.AuditService; +import com.salesforce.dva.argus.service.MetricService; +import com.salesforce.dva.argus.service.MonitorService; +import com.salesforce.dva.argus.service.alert.DefaultAlertService.NotificationContext; +import com.salesforce.dva.argus.service.alert.retriever.ImageDataRetriever; +import com.salesforce.dva.argus.system.SystemConfiguration; +import org.apache.commons.lang3.reflect.FieldUtils; +import org.apache.commons.lang3.tuple.Pair; +import org.apache.http.HttpEntity; +import org.apache.http.StatusLine; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.util.EntityUtils; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; + +import javax.persistence.EntityManager; +import java.io.InterruptedIOException; +import java.math.BigInteger; +import java.util.Properties; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyBoolean; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.powermock.api.mockito.PowerMockito.mock; +import static org.powermock.api.mockito.PowerMockito.mockStatic; +import static org.powermock.api.mockito.PowerMockito.verifyStatic; +import static org.powermock.api.mockito.PowerMockito.when; +import static org.powermock.api.mockito.PowerMockito.whenNew; + +@RunWith(PowerMockRunner.class) +@PrepareForTest({PagerDutyNotifier.class, ArgusTransport.class, EntityUtils.class, ImageDataRetriever.class}) +public class PagerDutyNotifierTest { + /* Constants */ + private static final String SYSTEM_CONFIG_PAGERDUTY_ENABLED = "system.property.pagerduty.enabled"; + private static final String PAGERDUTY_NOTIFIER_ENDPOINT = "notifier.property.pagerduty.endpoint"; + private static final String PAGERDUTY_NOTIFIER_TOKEN = "notifier.property.pagerduty.token"; + private static final String PAGERDUTY_NOTIFIER_PROXY_HOST = "notifier.property.pagerduty.proxy.host"; + private static final String PAGERDUTY_NOTIFIER_PROXY_PORT = "notifier.property.pagerduty.proxy.port"; + private static final String PAGERDUTY_NOTIFIER_MAX_POST_ATTEMPTS = "notifier.property.pagerduty.maxPostAttempts"; + private static final String PAGERDUTY_NOTIFIER_CONNECTION_POOL_MAX_SIZE = "notifier.property.pagerduty.connectionpool.maxsize"; + private static final String PAGERDUTY_NOTIFIER_CONNECTION_POOL_MAX_PER_ROUTE = "notifier.property.pagerduty.connectionpool.maxperroute"; + private static final int MAX_POST_ATTEMPTS = 3; + + /* Test mocks */ + private MetricService metricService; + private AnnotationService annotationService; + private AuditService auditService; + private Provider emf; + private MonitorService monitorService; + private CloseableHttpClient httpClient; + private Audit auditResult; + private ArgusTransport argusTransport; + private CloseableHttpResponse httpResponse; + private StatusLine httpResponseStatusLine; + private HttpEntity httpResponseEntity; + + /* Class being tested */ + private PagerDutyNotifier notifier; + + /* Test data */ + private SystemConfiguration config; + private Properties properties; + private NotificationContext context; + private Alert alert; + private Trigger trigger; + private Notification notification; + private Metric metric; + private History history; + + @Before + public void setup() throws Exception { + // create mocks + metricService = mock(MetricService.class); + annotationService = mock(AnnotationService.class); + auditService = mock(AuditService.class); + emf = mock(Provider.class); + monitorService = mock(MonitorService.class); + httpClient = mock(CloseableHttpClient.class); + auditResult = mock(Audit.class); + argusTransport = mock(ArgusTransport.class); + httpResponse = mock(CloseableHttpResponse.class); + httpResponseStatusLine = mock(StatusLine.class); + httpResponseEntity = mock(HttpEntity.class); + + mockStatic(EntityUtils.class); + + // set up test SystemConfiguration properties + properties = new Properties(); + properties.setProperty(SYSTEM_CONFIG_PAGERDUTY_ENABLED, "true"); + properties.setProperty(PAGERDUTY_NOTIFIER_ENDPOINT, "https://test_pd_ep.com"); + properties.setProperty(PAGERDUTY_NOTIFIER_TOKEN, "test_token"); + properties.setProperty(PAGERDUTY_NOTIFIER_MAX_POST_ATTEMPTS, Integer.toString(MAX_POST_ATTEMPTS)); + properties.setProperty(PAGERDUTY_NOTIFIER_CONNECTION_POOL_MAX_SIZE, "10"); + properties.setProperty(PAGERDUTY_NOTIFIER_CONNECTION_POOL_MAX_PER_ROUTE, "5"); + properties.setProperty(PAGERDUTY_NOTIFIER_PROXY_HOST, "test_proxy_host"); + properties.setProperty(PAGERDUTY_NOTIFIER_PROXY_PORT, "9090"); + config = new SystemConfiguration(properties); + + // set up test data + alert = new Alert(new PrincipalUser(null, "test_creator", "test_creator@salesforce.com"), + new PrincipalUser(null, "test_owner", "test_owner@salesforce.com"), + "test_alert_name", + "-1h:test:metric:avg", + "test_alert_cron_entry"); + trigger = new Trigger(alert, Trigger.TriggerType.EQUAL, "test_trigger_name", 3.14, 1000); + alert.setTriggers(ImmutableList.of(trigger)); + notification = new Notification("test_notification_name", + alert, + "test_notifier_name", + ImmutableList.of("test_subscription"), + 3000); + FieldUtils.writeField(notification, "id", BigInteger.valueOf(12345L), true); + alert.addNotification(notification); + metric = new Metric("test_scope", "test_metric_name"); + history = new History("test_message", "test_host_name", BigInteger.valueOf(456), History.JobStatus.STARTED); + context = new NotificationContext(alert, trigger, notification, System.currentTimeMillis(), 3.14, metric, history); + } + + @Test + public void sendAdditionalNotification_testPostPagerDutyNotificationResponseCode202() throws Exception { + sendOrClearAdditionalNotification_mockBehaviorTemplate(); + when(httpResponseStatusLine.getStatusCode()).thenReturn(202); + + notifier = new PagerDutyNotifier(metricService, annotationService, auditService, config, emf, monitorService); + + boolean result = notifier.sendAdditionalNotification(context); + assertTrue(result); + + sendOrClearAdditionalNotification_verifyMocksTemplate(1, 0, 0, result); + } + + @Test + public void sendAdditionalNotification_testPostPagerDutyNotificationResponseCode400() throws Exception { + sendOrClearAdditionalNotification_mockBehaviorTemplate(); + when(httpResponseStatusLine.getStatusCode()).thenReturn(400); + + notifier = new PagerDutyNotifier(metricService, annotationService, auditService, config, emf, monitorService); + + boolean result = notifier.sendAdditionalNotification(context); + assertFalse(result); + + sendOrClearAdditionalNotification_verifyMocksTemplate(1, 0, 1, result); + } + + @Test + public void sendAdditionalNotification_testPostPagerDutyNotificationResponseCode429RetryMaxTimesAndFail() throws Exception { + sendAdditionalNotification_testPostPagerDutyNotificationResponseCodeXXXRetryMaxTimesAndFail(429); + } + + @Test + public void sendAdditionalNotification_testPostPagerDutyNotificationResponseCode429RetryAndPass() throws Exception { + sendOrClearAdditionalNotification_mockBehaviorTemplate(); + when(httpResponseStatusLine.getStatusCode()).thenReturn(429).thenReturn(202); + + notifier = new PagerDutyNotifier(metricService, annotationService, auditService, config, emf, monitorService, + properties.getProperty(PAGERDUTY_NOTIFIER_ENDPOINT), + properties.getProperty(PAGERDUTY_NOTIFIER_TOKEN), + 1L); + + boolean result = notifier.sendAdditionalNotification(context); + assertTrue(result); + + sendOrClearAdditionalNotification_verifyMocksTemplate(2, 0, 0, result); + } + + @Test + public void sendAdditionalNotification_testPostPagerDutyNotificationResponseCode4XX() throws Exception { + sendOrClearAdditionalNotification_mockBehaviorTemplate(); + when(httpResponseStatusLine.getStatusCode()).thenReturn(402); + + notifier = new PagerDutyNotifier(metricService, annotationService, auditService, config, emf, monitorService); + + boolean result = notifier.sendAdditionalNotification(context); + assertFalse(result); + + sendOrClearAdditionalNotification_verifyMocksTemplate(1, 0, 1, result); + } + + @Test + public void sendAdditionalNotification_testPostPagerDutyNotificationResponseCode500RetryMaxTimesAndFail() throws Exception { + sendAdditionalNotification_testPostPagerDutyNotificationResponseCodeXXXRetryMaxTimesAndFail(500); + } + + @Test + public void sendAdditionalNotification_testPostPagerDutyNotificationResponseCode500RetryAndPass() throws Exception { + sendOrClearAdditionalNotification_mockBehaviorTemplate(); + when(httpResponseStatusLine.getStatusCode()).thenReturn(500).thenReturn(202); + + notifier = new PagerDutyNotifier(metricService, annotationService, auditService, config, emf, monitorService); + + boolean result = notifier.sendAdditionalNotification(context); + assertTrue(result); + + sendOrClearAdditionalNotification_verifyMocksTemplate(2, 0, 0, result); + } + + @Test + public void sendAdditionalNotification_testPostPagerDutyNotificationResponseCode502RetryMaxTimesAndFail() throws Exception { + sendAdditionalNotification_testPostPagerDutyNotificationResponseCodeXXXRetryMaxTimesAndFail(502); + } + + @Test + public void sendAdditionalNotification_testPostPagerDutyNotificationThrowsInterruptedIOException() throws Exception { + sendOrClearAdditionalNotification_mockBehaviorTemplate(); + when(httpClient.execute(any())).thenThrow(new InterruptedIOException("TEST")); + + notifier = new PagerDutyNotifier(metricService, annotationService, auditService, config, emf, monitorService); + + boolean result = notifier.sendAdditionalNotification(context); + assertFalse(result); + + sendOrClearAdditionalNotification_verifyMocksTemplate(MAX_POST_ATTEMPTS, MAX_POST_ATTEMPTS, 0, result); + } + + @Test + public void sendAdditionalNotification_testPostPagerDutyNotificationThrowsException() throws Exception { + sendOrClearAdditionalNotification_mockBehaviorTemplate(); + when(httpClient.execute(any())).thenThrow(new RuntimeException("TEST")); + + notifier = new PagerDutyNotifier(metricService, annotationService, auditService, config, emf, monitorService); + + boolean result = notifier.sendAdditionalNotification(context); + assertFalse(result); + + sendOrClearAdditionalNotification_verifyMocksTemplate(1, 1, 0, result); + } + + @Test + public void clearAdditionalNotification_testPostPagerDutyNotificationResponseCode202() throws Exception { + sendOrClearAdditionalNotification_mockBehaviorTemplate(); + when(httpResponseStatusLine.getStatusCode()).thenReturn(202); + + notifier = new PagerDutyNotifier(metricService, annotationService, auditService, config, emf, monitorService); + + boolean result = notifier.clearAdditionalNotification(context); + assertTrue(result); + + sendOrClearAdditionalNotification_verifyMocksTemplate(1, 0, 0, result); + } + + private void sendAdditionalNotification_testPostPagerDutyNotificationResponseCodeXXXRetryMaxTimesAndFail(int statusCode) throws Exception { + sendOrClearAdditionalNotification_mockBehaviorTemplate(); + when(httpResponseStatusLine.getStatusCode()).thenReturn(statusCode); + + notifier = new PagerDutyNotifier(metricService, annotationService, auditService, config, emf, monitorService, + properties.getProperty(PAGERDUTY_NOTIFIER_ENDPOINT), + properties.getProperty(PAGERDUTY_NOTIFIER_TOKEN), + 1L); + + boolean result = notifier.sendAdditionalNotification(context); + assertFalse(result); + + sendOrClearAdditionalNotification_verifyMocksTemplate(MAX_POST_ATTEMPTS, 0, 0, result); + } + + private void sendOrClearAdditionalNotification_mockBehaviorTemplate() throws Exception { + whenNew(ArgusTransport.class).withAnyArguments().thenReturn(argusTransport); + when(argusTransport.getHttpClient()).thenReturn(httpClient); + when(httpClient.execute(any())).thenReturn(httpResponse); + when(httpResponse.getStatusLine()).thenReturn(httpResponseStatusLine); + when(EntityUtils.toString(any())).thenReturn("default"); + } + + private void sendOrClearAdditionalNotification_verifyMocksTemplate(int tries, int exceptionsThrown, int getResponseBodyAsStringTimes, boolean success) throws Exception { + verify(argusTransport, times(1)).getHttpClient(); + verify(httpClient, times(tries)).execute(any()); + verify(httpResponse, times(tries - exceptionsThrown)).getStatusLine(); + verify(httpResponseStatusLine, times(tries - exceptionsThrown)).getStatusCode(); + verify(httpResponse, times(tries - exceptionsThrown)).close(); + + verifyStatic(EntityUtils.class, times(getResponseBodyAsStringTimes)); + EntityUtils.toString(any()); + + verify(monitorService).modifyCounter(MonitorService.Counter.PAGERDUTY_NOTIFICATIONS_RETRIES, tries - 1, null); + verify(monitorService).modifyCounter(MonitorService.Counter.PAGERDUTY_NOTIFICATIONS_FAILED, success ? 0 : 1, null); + } + +} \ No newline at end of file diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/notifier/RefocusForwarderTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/notifier/RefocusForwarderTest.java new file mode 100644 index 000000000..edadf3f0d --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/notifier/RefocusForwarderTest.java @@ -0,0 +1,542 @@ +package com.salesforce.dva.argus.service.alert.notifier; + +import static org.junit.Assert.assertEquals; +import static org.mockito.Mockito.*; + +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.stream.IntStream; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; + +import javax.persistence.EntityManager; + +import com.salesforce.dva.argus.TestUtils; + + +import com.salesforce.dva.argus.service.RefocusService; +import com.salesforce.dva.argus.service.HistoryService; +import com.salesforce.dva.argus.entity.History; +import com.salesforce.dva.argus.system.SystemConfiguration; +import com.salesforce.dva.argus.system.SystemMain; +import org.junit.*; +import org.junit.runner.RunWith; +import org.mockito.Mock; + +import org.apache.http.impl.client.*; +import org.apache.http.HttpStatus; + +import java.util.function.Supplier; +import java.util.Iterator; + +import com.google.gson.JsonObject; +import com.google.gson.JsonParser; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.inject.Provider; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.io.InterruptedIOException; + + +@RunWith(org.mockito.junit.MockitoJUnitRunner.class) +public class RefocusForwarderTest { + + @Mock private Provider _emProviderMock; + @Mock private HistoryService _historyServiceMock; + @Mock private ObjectMapper _mapper; + + private SystemMain system; + private SystemConfiguration _configuration; + private RefocusForwarder refocusForwarder; + + private final Logger LOGGER = LoggerFactory.getLogger(RefocusForwarderTest.class); + private static final boolean mapToErrorLogging = false; // call LOGGER.error for all logs when true. + + + // @Before + public void refocus_setup() { + + _configuration = system.getConfiguration(); + + _configuration.setProperty("system.property.refocus.enabled", "true"); + _configuration.setProperty(RefocusProperty.REFOCUS_MAX_REQUESTS_PER_MINUTE.getName(), "100000"); // any value above 6000 will result in no sleeping in the unit tests. + _configuration.setProperty(RefocusProperty.REFOCUS_SEND_INTERVAL_MS.getName(), "0"); // no waiting + _configuration.setProperty(RefocusProperty.REFOCUS_MAX_BULK_ITEMS.getName(), "50"); // 50 samples per bulk request + _configuration.setProperty(RefocusProperty.REFOCUS_CONNECTION_REFRESH_MAX_TIMES.getName(), "3"); // ensure it is at least 3 + + refocusForwarder = new RefocusForwarder(_configuration, _emProviderMock); + + CloseableHttpClient mockClient = mock(CloseableHttpClient.class); + refocusForwarder = spy(refocusForwarder); + when(refocusForwarder.getHttpClient()).thenReturn(mockClient); + } + + + @BeforeClass + static public void setUpClass() { + } + + @AfterClass + static public void tearDownClass() { + } + + @Before + public void setup() { + + system = TestUtils.getInstanceWithInMemProps(); // getInstance(); + system.start(); // 4s initially, 0.7s each subsequent run. + refocus_setup(); // 50ms initially, 0-2ms each subsequent run. + } + + @After + public void tearDown() { + if (system != null) { + system.getServiceFactory().getManagementService().cleanupRecords(); + system.stop(); + } + } + + + // ----------------------------------------------------------------------------------- + // Support for Generating sequences of stubbed responses. + // ----------------------------------------------------------------------------------- + + static class StubResult { + int sleep_ms; + int resp_code; + JsonObject gson; + + public StubResult(int sleep_ms, int resp_code, JsonObject gson) + { + this.sleep_ms = sleep_ms; + this.resp_code = resp_code; + this.gson = gson; + } + + public void action() throws RuntimeException + { + } + } + + + static class StubIOExceptionResult extends StubResult { + + public StubIOExceptionResult(int sleep_ms, int resp_code, JsonObject gson) + { + super(sleep_ms, resp_code, gson); + } + + @Override + public void action() throws RuntimeException + { + throw new RuntimeException(new IOException()); + } + } + + static class StubInterruptedIOExceptionResult extends StubResult { + + public StubInterruptedIOExceptionResult(int sleep_ms, int resp_code, JsonObject gson) + { + super(sleep_ms, resp_code, gson); + } + + @Override + public void action() throws RuntimeException + { + throw new RuntimeException(new InterruptedIOException()); + } + } + + + static JsonObject makeUpcertResponse(String status, int jobId) { + JsonObject response = new JsonObject(); + response.addProperty("status", status); + response.addProperty("jobId", jobId); + return response; + } + + static StubResult make200(int sleep_ms, int txId) + { + return new StubResult(sleep_ms, HttpStatus.SC_OK, makeUpcertResponse("OK", txId)); + } + + // OK result + static StubResult make201(int sleep_ms, int txId) + { + return new StubResult(sleep_ms, HttpStatus.SC_CREATED, makeUpcertResponse("OK", txId)); + } + + static StubResult make204(int sleep_ms, int txId) + { + return new StubResult(sleep_ms, HttpStatus.SC_NO_CONTENT, makeUpcertResponse("OK", txId)); + } + + static StubResult makeTimedOut(int sleep_ms) // suggest timeouts >= 10000 + { + return new StubResult(sleep_ms, HttpStatus.SC_REQUEST_TIMEOUT, new JsonObject()); + } + + static StubResult makeTooManyRequests(int sleep_ms) + { + return new StubResult(sleep_ms, 429, new JsonObject()); + } + + static StubResult makeWithResponseCode(int sleep_ms, int respCode) + { + return new StubResult(sleep_ms, respCode, new JsonObject()); + } + + static StubResult makeWithIOException(int sleep_ms, int respCode) + { + return new StubIOExceptionResult(sleep_ms, respCode, new JsonObject()); + } + + static StubResult makeWithInterruptedIOException(int sleep_ms, int respCode) + { + return new StubInterruptedIOExceptionResult(sleep_ms, respCode, new JsonObject()); + } + + static class ResultStubSupplier implements Supplier { + + protected JsonObject cloneJson(JsonObject r) + { + String jsonText = r.toString(); + JsonObject obj = (new JsonParser()).parse(jsonText).getAsJsonObject(); + return obj; + } + + public RefocusForwarder.RefocusResponse get() throws RuntimeException + { + return null; + } + } + + static class ResultListSupplier extends ResultStubSupplier { + + ArrayList results; + Iterator iterator; + RefocusForwarder forwarder; + StubResult defaultResult; + + public ResultListSupplier( ArrayList resultsToReturn, StubResult defaultResult, RefocusForwarder forwarder) + { + super(); + this.results = resultsToReturn; + this.iterator = resultsToReturn.iterator(); + this.forwarder = forwarder; + this.defaultResult = defaultResult; + } + + @Override + public RefocusForwarder.RefocusResponse get() throws RuntimeException + { + StubResult r = this.iterator.hasNext() ? this.iterator.next() : defaultResult; + // SleepMs(r.sleep_ms); + r.action(); + RefocusForwarder.RefocusResponse refocus_response = forwarder.makeResponse(r.resp_code, cloneJson(r.gson)); + return refocus_response; + } + } + + static class ResultCycleStubSupplier extends ResultStubSupplier { + + StubResult[] resultCycle; + Iterator iterator; + RefocusForwarder forwarder; + int pos = 0; + + public ResultCycleStubSupplier( StubResult[] resultCycle, RefocusForwarder forwarder) + { + this.resultCycle = resultCycle; + this.pos = 0; + this.forwarder = forwarder; + assert(this.resultCycle.length > 0); + } + + @Override + public RefocusForwarder.RefocusResponse get() throws RuntimeException + { + StubResult r = this.resultCycle[ this.pos % this.resultCycle.length ]; + this.pos = (this.pos + 1) % this.resultCycle.length; + // SleepMs(r.sleep_ms); + r.action(); + RefocusForwarder.RefocusResponse refocus_response = forwarder.makeResponse(r.resp_code, cloneJson(r.gson)); + return refocus_response; + } + } + + void enqueueSamples(int numSamples, History history) + { + IntStream.range(0,numSamples).forEach( x -> { + + String value = String.format("%d", x); + String sa = String.format("a.b%d|c", x); + try + { + refocusForwarder.sendRefocusNotification(sa, value, "myuser", "mytoken", history); + } + catch (Exception e) { + throw new RuntimeException(e); + } + }); + } + + + boolean waitUntilQueuedNotificationsAreProcessed(RefocusService service) + { + boolean rv = true; + try + { + while( refocusForwarder.getNotificationsDelivered() + refocusForwarder.getNotificationsDiscarded() < refocusForwarder.getNotificationsEnqueued()) + { + int forwarded = service.forwardNotifications(); + } + } + catch( InterruptedException | RuntimeException e) + { + rv = false; + } + return false; + } + + void logForwarderStats() + { + int maxQueueLength = refocusForwarder.getMaxQueueLength(); + _info(MessageFormat.format("MaxQueueLength was {0} samples", maxQueueLength)); + } + + void verifyProcessed( long expect_delivered, long expect_discarded, long enqueued) + { + long delivered = refocusForwarder.getNotificationsDelivered(); + long discarded = refocusForwarder.getNotificationsDiscarded(); + long processed = refocusForwarder.getNotificationsProcessed(); + assertEquals(expect_delivered, delivered); + assertEquals(expect_discarded, discarded); + assertEquals(processed, delivered+discarded); + assertEquals(processed, enqueued); + } + + // ----------------------------------------------------------------------------------- + // Tests + // ----------------------------------------------------------------------------------- + + + @Test + public void testRefocusForwarderSendSamples() { + + ResultListSupplier responseSupplier = new ResultListSupplier(new ArrayList(), + make200(10, 1), + refocusForwarder); + + refocusForwarder.setStubSender(responseSupplier); + int num_notifications = 100; + + History history = mock(History.class); + enqueueSamples(num_notifications, history); + waitUntilQueuedNotificationsAreProcessed(refocusForwarder); + + logForwarderStats(); + verifyProcessed(num_notifications, 0, num_notifications); + } + + @Test + public void testRefocusForwarderThrottled() { + + ResultListSupplier responseSupplier = new ResultListSupplier( + new ArrayList(), + makeTooManyRequests(120), + refocusForwarder); + + refocusForwarder.setStubSender(responseSupplier); + int num_notifications = 5; + + History history = mock(History.class); + enqueueSamples(num_notifications, history); + waitUntilQueuedNotificationsAreProcessed(refocusForwarder); + + logForwarderStats(); + verifyProcessed(0, num_notifications, num_notifications); + } + + + @Test + public void testRefocusForwarderTimedOut() { + + ResultListSupplier responseSupplier = new ResultListSupplier( + new ArrayList(), + makeTimedOut(10), + refocusForwarder); + + refocusForwarder.setStubSender(responseSupplier); + int num_notifications = 5; + + History history = mock(History.class); + enqueueSamples(num_notifications, history); + waitUntilQueuedNotificationsAreProcessed(refocusForwarder); + + logForwarderStats(); + verifyProcessed(0, num_notifications, num_notifications); + } + + @Test + public void testRefocusForwarderInterruptedIOException() { + + RefocusForwarder.Duration d = new RefocusForwarder.Duration(); + + ResultListSupplier responseSupplier = new ResultListSupplier( + new ArrayList(), + makeWithInterruptedIOException(10, -1), + refocusForwarder); + + refocusForwarder.setStubSender(responseSupplier); + int num_notifications = 5; + + History history = mock(History.class); + enqueueSamples(num_notifications, history); + waitUntilQueuedNotificationsAreProcessed(refocusForwarder); + + logForwarderStats(); + verifyProcessed(0, num_notifications, num_notifications); + } + + @Test + public void testRefocusForwarderIOException() { + + ResultListSupplier responseSupplier = new ResultListSupplier( + new ArrayList(), + makeWithIOException(10, -1), + refocusForwarder); + + refocusForwarder.setStubSender(responseSupplier); + + int num_notifications = 5; + + History history = mock(History.class); + enqueueSamples(num_notifications, history); + waitUntilQueuedNotificationsAreProcessed(refocusForwarder); + + logForwarderStats(); + verifyProcessed(0, num_notifications, num_notifications); + } + + @Test + public void testRefocusNoAuth() { + + ResultListSupplier responseSupplier = new ResultListSupplier( + new ArrayList(), + makeWithResponseCode(10, HttpStatus.SC_UNAUTHORIZED), + refocusForwarder); + + refocusForwarder.setStubSender(responseSupplier); + + int num_notifications = 5; + History history = mock(History.class); + enqueueSamples(num_notifications, history); + waitUntilQueuedNotificationsAreProcessed(refocusForwarder); + + logForwarderStats(); + verifyProcessed(0, num_notifications, num_notifications); + } + + @Test + public void testRefocusServiceUnavail() { + + ResultListSupplier responseSupplier = new ResultListSupplier( + new ArrayList(), + makeWithResponseCode(10, HttpStatus.SC_SERVICE_UNAVAILABLE), + refocusForwarder); + + refocusForwarder.setStubSender(responseSupplier); + + int num_notifications = 5; + + History history = mock(History.class); + enqueueSamples(num_notifications, history); + waitUntilQueuedNotificationsAreProcessed(refocusForwarder); + + logForwarderStats(); + verifyProcessed(0, num_notifications, num_notifications); + } + + @Test + public void testIntermittentThrottle() { + + // Assumes retries >= 3 + StubResult [] cycle = { + makeTooManyRequests(10), + makeTooManyRequests(10), + make200(10, 1) + }; + ResultCycleStubSupplier responseSupplier = new ResultCycleStubSupplier( cycle, refocusForwarder); + refocusForwarder.setStubSender(responseSupplier); + + int num_notifications = 5; + + History history = mock(History.class); + enqueueSamples(num_notifications, history); + waitUntilQueuedNotificationsAreProcessed(refocusForwarder); + + logForwarderStats(); + verifyProcessed(num_notifications, 0, num_notifications); + } + + @Test + public void testIntermittentInterruptedIOException() { + + // Assumes retries >= 3 + StubResult [] cycle = { + makeWithInterruptedIOException(10, -1), + makeWithInterruptedIOException(10, -1), + make200(10, 1) + }; + ResultCycleStubSupplier responseSupplier = new ResultCycleStubSupplier( cycle, refocusForwarder); + refocusForwarder.setStubSender(responseSupplier); + + int num_notifications = 5; + + History history = mock(History.class); + enqueueSamples(num_notifications, history); + waitUntilQueuedNotificationsAreProcessed(refocusForwarder); + + logForwarderStats(); + verifyProcessed(num_notifications, 0, num_notifications); + } + + + // ================================================================================= + // Logging wrappers + // Note: these exist because I haven't found a mechanism for setting the log level programmatically. + // ================================================================================= + + + // Syntactic sugar + private void _info(String msg) + { + if (mapToErrorLogging) { + LOGGER.error(msg); + } + else { + LOGGER.info(msg); + } + } + + private void _error(String msg) + { + LOGGER.error(msg); + } + + private void _warn(String msg) + { + if (mapToErrorLogging) { + LOGGER.error(msg); + } + else { + LOGGER.warn(msg); + } + } + +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/retriever/ImageDataRetrieverTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/retriever/ImageDataRetrieverTest.java new file mode 100644 index 000000000..b6ad30b85 --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/alert/retriever/ImageDataRetrieverTest.java @@ -0,0 +1,171 @@ +package com.salesforce.dva.argus.service.alert.retriever; + +import com.google.common.collect.ImmutableList; +import com.salesforce.dva.argus.TestUtils; +import com.salesforce.dva.argus.entity.Alert; +import com.salesforce.dva.argus.entity.History; +import com.salesforce.dva.argus.entity.ImageProperties; +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.entity.Notification; +import com.salesforce.dva.argus.entity.PrincipalUser; +import com.salesforce.dva.argus.entity.Trigger; +import com.salesforce.dva.argus.service.AlertService.Notifier.NotificationStatus; +import com.salesforce.dva.argus.service.ImageService; +import com.salesforce.dva.argus.service.alert.DefaultAlertService; +import com.salesforce.dva.argus.system.SystemMain; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.ArgumentMatcher; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; +import org.apache.commons.lang3.tuple.Pair; + +import java.util.Arrays; +import java.util.List; + +import static com.salesforce.dva.argus.TestUtils.generateAlert; +import static com.salesforce.dva.argus.TestUtils.getHistory; +import static com.salesforce.dva.argus.TestUtils.getMetric; +import static com.salesforce.dva.argus.TestUtils.getNotification; +import static com.salesforce.dva.argus.TestUtils.getTrigger; +import static org.junit.Assert.assertEquals; +import static org.mockito.ArgumentMatchers.argThat; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +@RunWith(MockitoJUnitRunner.class) +public class ImageDataRetrieverTest { + + private static final String IMAGE_URL_PREFIX = "http://localhost:8080/argus/#/images/"; + private static final String IMAGE_ID = "img1"; + + @Mock + private ImageService imageServiceMock; + + @Mock + private PrincipalUser principalUserMock; + + static private SystemMain system; + + @BeforeClass + static public void setUpClass() { + system = TestUtils.getInstance(); + system.start(); + } + + @AfterClass + static public void tearDownClass() { + if (system != null) { + system.getServiceFactory().getManagementService().cleanupRecords(); + system.stop(); + } + } + + private ImageDataRetriever imageDataRetriever; + + @Before + public void setup() { + imageDataRetriever = new ImageDataRetriever(imageServiceMock, system.getConfiguration()); + } + + @Test + public void testGetImageUrl() { + String imageURL = imageDataRetriever.getImageURL(Pair.of(IMAGE_ID, "Test String".getBytes())); + assertEquals(imageURL, IMAGE_URL_PREFIX + IMAGE_ID); + } + + @Test + public void testGetImageUrlWhenImageDetailsIsNull() { + String imageURL = imageDataRetriever.getImageURL(null); + assertEquals(imageURL, null); + } + + @Test + public void testGetImageUrlWhenImageIDIsNull() { + String imageURL = imageDataRetriever.getImageURL(Pair.of(null, null)); + assertEquals(imageURL, null); + } + + @Test(expected = IllegalArgumentException.class) + public void testRetrievingAnnotatedImageWhenNotificationContextIsNull() { + Pair imageDetails = imageDataRetriever.getAnnotatedImage(null); + + } + + @Test + public void testRetrievingAnnotatedImageWhenTriggerTypeIsGreaterThan() { + Double triggerThreshold = 1D; + Long triggerFiredTime = 1563692460000L; + Long inertia = 1800*1000L; + ImageDataRetrievalContext context = getImageDataRetrievalContext(Trigger.TriggerType.GREATER_THAN, triggerFiredTime, inertia, triggerThreshold, NotificationStatus.TRIGGERED); + + ArgumentMatcher imageProperties = new ArgumentMatcher() { + @Override + public boolean matches(ImageProperties imageProperties) { + return imageProperties.getShadeXAxisArea().size() == 1 && imageProperties.getShadeYAxisArea().size() == 1; + } + }; + + ArgumentMatcher> metricList = new ArgumentMatcher>() { + @Override + public boolean matches(List metricList) { + return metricList.size() == 1; + } + }; + when(imageServiceMock.generateAndStoreImage(argThat(metricList), argThat(imageProperties), + eq(true))).thenReturn(Pair.of("1", "TestString".getBytes())); + Pair imageDetails = imageDataRetriever.getAnnotatedImage(context); + + verify(imageServiceMock, times(1)).generateAndStoreImage(argThat(metricList), + argThat(imageProperties), eq(true)); + } + + @Test + public void testRetrievingAnnotatedImageWhenTriggerStatusIsCleared() { + Double triggerThreshold = 1D; + Long triggerFiredTime = 1563692460000L; + Long inertia = 1800*1000L; + ImageDataRetrievalContext context = getImageDataRetrievalContext(Trigger.TriggerType.GREATER_THAN, triggerFiredTime, inertia, triggerThreshold, NotificationStatus.CLEARED); + + ArgumentMatcher imageProperties = new ArgumentMatcher() { + @Override + public boolean matches(ImageProperties imageProperties) { + return imageProperties.getShadeYAxisArea() == null && imageProperties.getShadeXAxisArea() == null; + } + }; + when(imageServiceMock.generateAndStoreImage(eq(ImmutableList.of(context.getTriggeredMetric())), argThat(imageProperties), + eq(true))).thenReturn(Pair.of("1", "TestString".getBytes())); + Pair imageDetails = imageDataRetriever.getAnnotatedImage(context); + + verify(imageServiceMock, times(1)).generateAndStoreImage( + eq(ImmutableList.of(context.getTriggeredMetric())), argThat(imageProperties), eq(true)); + } + + private ImageDataRetrievalContext getImageDataRetrievalContext(Trigger.TriggerType triggerType, + long triggerFiredTime, + long inertiaMillis, + double triggerThreshold, NotificationStatus notificationStatus) { + Alert alert = generateAlert("TestName", principalUserMock, "-1h:argus.jvm:cores.active:max"); + alert.setEnabled(true); + + Trigger trigger = getTrigger(alert, triggerType, "TestTrigger", String.valueOf(triggerThreshold), + String.valueOf(inertiaMillis)); + List triggerList = ImmutableList.of(trigger); + alert.setTriggers(triggerList); + + Notification notification = getNotification("TEST EMAIL NOTIFICATION", + "TEST_EMAIL_NOTIFIER", alert, Arrays.asList("test-subscription")); + alert.addNotification(notification); + + Metric metric = getMetric(); + History history = getHistory(); + + return new ImageDataRetrievalContext(alert, trigger, triggerFiredTime, metric, notificationStatus); + } + +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/annotation/DefaultAnnotationServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/annotation/DefaultAnnotationServiceTest.java new file mode 100644 index 000000000..af07b99c3 --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/annotation/DefaultAnnotationServiceTest.java @@ -0,0 +1,111 @@ +package com.salesforce.dva.argus.service.annotation; + +import com.google.common.collect.ImmutableMap; +import com.salesforce.dva.argus.entity.Annotation; +import com.salesforce.dva.argus.entity.PrincipalUser; +import com.salesforce.dva.argus.service.MonitorService; +import com.salesforce.dva.argus.service.MonitorService.Counter; +import com.salesforce.dva.argus.service.TSDBService; +import com.salesforce.dva.argus.system.SystemConfiguration; +import org.apache.commons.lang.RandomStringUtils; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Properties; + +import static junit.framework.TestCase.assertTrue; +import static org.junit.Assert.assertEquals; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; + +@RunWith(MockitoJUnitRunner.class) +public class DefaultAnnotationServiceTest { + @Mock + private TSDBService tsdbService; + @Mock + private MonitorService monitorService; + private SystemConfiguration config; + private DefaultAnnotationService defaultAnnotationService; + + /* TEST DATA */ + Annotation a; + Annotation tooLargeAnnotation; + PrincipalUser pu; + + @Before + public void setUp() { + config = new SystemConfiguration(new Properties()); + defaultAnnotationService = new DefaultAnnotationService(tsdbService, monitorService, config); + + a = new Annotation("source", + "id", + "type", + "scope", + "metric", + System.currentTimeMillis()); + tooLargeAnnotation = new Annotation("source2", + "id2", + "type2", + "scope2", + "metric2", + System.currentTimeMillis()); + // set up annotation with size larger than max size allowed + final int TAG_SIZE = 100; + final int NUM_TAGS = DefaultAnnotationService.MAX_ANNOTATION_SIZE_BYTES / TAG_SIZE / 2; + for (int i = 0; i < NUM_TAGS; i++) { + tooLargeAnnotation.setTag(RandomStringUtils.random(TAG_SIZE), RandomStringUtils.random(TAG_SIZE)); + } + final Map fields = new HashMap<>(); + for (int i = 0; i < 10; i++) { + fields.put(RandomStringUtils.random(10), RandomStringUtils.random(10)); + } + tooLargeAnnotation.setFields(fields); + + pu = new PrincipalUser(null, "username", "email"); + } + + @Test + public void updateAnnotations_testAnnotationSizeLessThanMax() { + // test + defaultAnnotationService.updateAnnotations(ImmutableMap.of(a, pu)); + + // verify + verify(monitorService).modifyCounter(Counter.ANNOTATION_WRITES, 1, null); + ArgumentCaptor annotationListCaptor = ArgumentCaptor.forClass(List.class); + verify(tsdbService).putAnnotations(annotationListCaptor.capture()); + assertEquals(1, annotationListCaptor.getValue().size()); + assertTrue(annotationListCaptor.getValue().contains(a)); + } + + @Test + public void updateAnnotations_testListContainingOneAnnotationSizeGreaterThanMax() { + // test + defaultAnnotationService.updateAnnotations(ImmutableMap.of(a, pu, tooLargeAnnotation, pu)); + + // verify + verify(monitorService).modifyCounter(Counter.ANNOTATION_DROPS_MAXSIZEEXCEEDED, 1, ImmutableMap.of("source", tooLargeAnnotation.getSource())); + verify(monitorService).modifyCounter(Counter.ANNOTATION_WRITES, 1, null); + ArgumentCaptor annotationListCaptor = ArgumentCaptor.forClass(List.class); + verify(tsdbService).putAnnotations(annotationListCaptor.capture()); + assertEquals(1, annotationListCaptor.getValue().size()); + } + + @Test + public void updateAnnotations_testOnlyOneAnnotationSizeGreaterThanMax() { + // test + defaultAnnotationService.updateAnnotations(ImmutableMap.of(tooLargeAnnotation, pu)); + + // verify + verify(monitorService).modifyCounter(Counter.ANNOTATION_DROPS_MAXSIZEEXCEEDED, 1, ImmutableMap.of("source", tooLargeAnnotation.getSource())); + verify(monitorService).modifyCounter(Counter.ANNOTATION_WRITES, 0, null); + verify(tsdbService, never()).putAnnotations(any()); + } +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/annotation/DualAnnotationServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/annotation/DualAnnotationServiceTest.java new file mode 100644 index 000000000..3e2dd75d3 --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/annotation/DualAnnotationServiceTest.java @@ -0,0 +1,130 @@ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + + +package com.salesforce.dva.argus.service.annotation; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.mock; + +import java.util.List; +import java.util.Properties; + +import org.junit.BeforeClass; +import org.junit.Test; + +import com.salesforce.dva.argus.service.MonitorService; +import com.salesforce.dva.argus.service.schema.ElasticSearchUtils; +import com.salesforce.dva.argus.service.tsdb.AnnotationQuery; +import com.salesforce.dva.argus.service.tsdb.DefaultTSDBService; +import com.salesforce.dva.argus.system.SystemConfiguration; + +public class DualAnnotationServiceTest { + private static SystemConfiguration systemConfig; + private static DualAnnotationService dualAnnotationService; + + @BeforeClass + public static void setUpClass() { + Properties config = new Properties(); + config.put("service.property.tsdb.connection.count", "2"); + config.put("service.property.tsdb.endpoint.read", "http://tsdbread.mycompany.com:4466"); + config.put("service.property.tsdb.endpoint.write", "http://tsdbwrite.mycompany.com:4477"); + systemConfig =new SystemConfiguration(config); + MonitorService mockedMonitor = mock(MonitorService.class); + ElasticSearchUtils mockedElasticSearchUtils = mock(ElasticSearchUtils.class); + DefaultTSDBService tsdbService = new DefaultTSDBService(systemConfig, mockedMonitor); + ElasticSearchAnnotationService esAnnotationService = new ElasticSearchAnnotationService(systemConfig, mockedMonitor, mockedElasticSearchUtils); + dualAnnotationService = new DualAnnotationService(systemConfig, esAnnotationService, tsdbService); + } + + @Test + public void testConvertTimestampToMillis(){ + AnnotationQuery annotationQuery = new AnnotationQuery("scope1", "metric1", null, "unittest", 1557809359073L, 1557809599073L); + dualAnnotationService.convertTimestampToMillis(annotationQuery); + assertEquals(1557809359073L, annotationQuery.getStartTimestamp().longValue()); + assertEquals(1557809599073L, annotationQuery.getEndTimestamp().longValue()); + + annotationQuery = new AnnotationQuery("scope1", "metric1", null, "unittest", 1557809359L, 1557809599L); + dualAnnotationService.convertTimestampToMillis(annotationQuery); + assertEquals(1557809359000L, annotationQuery.getStartTimestamp().longValue()); + assertEquals(1557809599000L, annotationQuery.getEndTimestamp().longValue()); + + annotationQuery = new AnnotationQuery("scope1", "metric1", null, "unittest", 1557809359123L, 1557809599L); + dualAnnotationService.convertTimestampToMillis(annotationQuery); + assertEquals(1557809359123L, annotationQuery.getStartTimestamp().longValue()); + assertEquals(1557809599000L, annotationQuery.getEndTimestamp().longValue()); + + annotationQuery = new AnnotationQuery("scope1", "metric1", null, "unittest", 1557809359L, 1557809599456L); + dualAnnotationService.convertTimestampToMillis(annotationQuery); + assertEquals(1557809359000L, annotationQuery.getStartTimestamp().longValue()); + assertEquals(1557809599456L, annotationQuery.getEndTimestamp().longValue()); + } + + @Test + public void testSplitQuery(){ + AnnotationQuery annotationQuery = new AnnotationQuery("scope1", "metric1", null, "unittest", 1559153223000L, 1559153226000L); + List queries = dualAnnotationService.splitQuery(annotationQuery); + assertEquals(2, queries.size()); + AnnotationQuery tsdbQuery = queries.get(0); + AnnotationQuery esQuery = queries.get(1); + assertEquals("scope1", tsdbQuery.getScope()); + assertEquals("metric1", tsdbQuery.getMetric()); + assertEquals("unittest", tsdbQuery.getType()); + assertEquals(1559153223000L, tsdbQuery.getStartTimestamp().longValue()); + assertEquals(1559153225000L, tsdbQuery.getEndTimestamp().longValue()); + + assertEquals("scope1", esQuery.getScope()); + assertEquals("metric1", esQuery.getMetric()); + assertEquals("unittest", esQuery.getType()); + assertEquals(1559153225000L, esQuery.getStartTimestamp().longValue()); + assertEquals(1559153226000L, esQuery.getEndTimestamp().longValue()); + } + + @Test + public void testQueryBeforeEpochCutOffTimestamp(){ + AnnotationQuery annotationQuery = new AnnotationQuery("scope1", "metric1", null, "unittest", 1559596094000L, 1559596095000L); + assertFalse(dualAnnotationService.isQueryHavingEpochCutOff(annotationQuery)); + } + + @Test + public void testQueryAfterEpochCutOffTimestamp(){ + AnnotationQuery annotationQuery = new AnnotationQuery("scope1", "metric1", null, "unittest", 1559594094000L, 1559594095000L); + assertFalse(dualAnnotationService.isQueryHavingEpochCutOff(annotationQuery)); + } + + @Test + public void testQueryAcrossEpochCutOffTimestamp(){ + AnnotationQuery annotationQuery = new AnnotationQuery("scope1", "metric1", null, "unittest", 1559153223000L, 1559153226000L); + assertTrue(dualAnnotationService.isQueryHavingEpochCutOff(annotationQuery)); + } +} \ No newline at end of file diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/annotation/ElasticSearchAnnotationServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/annotation/ElasticSearchAnnotationServiceTest.java new file mode 100644 index 000000000..76790f113 --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/annotation/ElasticSearchAnnotationServiceTest.java @@ -0,0 +1,345 @@ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + + +package com.salesforce.dva.argus.service.annotation; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.google.gson.JsonElement; +import com.google.gson.JsonParser; +import com.salesforce.dva.argus.TestUtils; +import com.salesforce.dva.argus.entity.Annotation; +import com.salesforce.dva.argus.service.MonitorService; +import com.salesforce.dva.argus.service.schema.ElasticSearchUtils; +import com.salesforce.dva.argus.service.tsdb.AnnotationQuery; +import com.salesforce.dva.argus.system.SystemConfiguration; +import org.apache.http.util.EntityUtils; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RestClient; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.ArgumentCaptor; +import org.powermock.core.classloader.annotations.PowerMockIgnore; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; +import org.powermock.reflect.Whitebox; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Properties; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.powermock.api.mockito.PowerMockito.mockStatic; +import static org.powermock.api.mockito.PowerMockito.verifyStatic; + +@PowerMockIgnore("*.ssl.*") +@RunWith(PowerMockRunner.class) +@PrepareForTest(ElasticSearchUtils.class) +public class ElasticSearchAnnotationServiceTest { + + private RestClient restClient; + private String createSucessReply = String.join("\n", + "{" + + " \"took\": 55," + + " \"errors\": false," + + " \"items\": [" + + " {" + + " \"create\": {" + + " \"_index\": \"argus-annotation\"," + + " \"_id\": \"cdfd12850d42746257f8217899647c8b\"," + + " \"_version\": 1," + + " \"result\": \"created\"," + + " \"_shards\": {" + + " \"total\": 2," + + " \"successful\": 1," + + " \"failed\": 0" + + " }," + + " \"created\": true," + + " \"status\": 201" + + " }" + + " }" + + " ]" + + "}"); + + private String getReply = String.join("\n", + "{", + " \"took\": 9,", + " \"timed_out\": false,", + " \"_shards\": {", + " \"total\": 6,", + " \"successful\": 6,", + " \"skipped\": 0,", + " \"failed\": 0", + " },", + " \"hits\": {", + " \"total\": 1,", + " \"max_score\": 0.0,", + " \"hits\": [", + " {", + " \"_index\": \"argus-annotation-2019-05\",", + " \"_id\": \"44618b179e858ef9fb7b49997ebdba0c\",", + " \"_score\": 0.0,", + " \"_source\": {", + " \"metric\": \"metric1\",", + " \"scope\": \"scope1\",", + " \"source\": \"unittest\",", + " \"id\": \"16ab4b56311\",", + " \"type\": \"unittest\",", + " \"fields\": \"{}\",", + " \"tags\": \"{}\",", + " \"sid\": \"f9c22bcbd813474ec99f7011ae50b080\",", + " \"ts\": \"1557809559073\"", + " }", + " }", + " ]", + " }", + "}"); + + private String annotationQueryMustTermRange = String.join("\n", + "{", + " \"query\": {", + " \"bool\": {", + " \"must\": [", + " {", + " \"term\": {", + " \"sid.raw\": \"f9c22bcbd813474ec99f7011ae50b080\"", + " }", + " },", + " {", + " \"range\": {", + " \"ts\": {", + " \"gte\": \"1557809359073\",", + " \"lte\": \"1557809599073\"", + " }", + " }", + " }", + " ]", + " }", + " },", + " \"from\": 0,", + " \"size\": 10000", + "}"); + + private static SystemConfiguration systemConfig; + private static ElasticSearchAnnotationService esAnnotationService; + private static ObjectMapper mapper = new ObjectMapper(); + + private String convertToPrettyJson(String jsonString) { + JsonParser parser = new JsonParser(); + Gson gson = new GsonBuilder().setPrettyPrinting().create(); + + JsonElement el = parser.parse(jsonString); + return gson.toJson(el); + } + + @BeforeClass + public static void setUpClass() { + Properties config = new Properties(); + systemConfig = new SystemConfiguration(config); + MonitorService mockedMonitor = mock(MonitorService.class); + mockStatic(ElasticSearchUtils.class); + ElasticSearchUtils mockedElasticSearchUtils = mock(ElasticSearchUtils.class); + esAnnotationService = new ElasticSearchAnnotationService(systemConfig, mockedMonitor, mockedElasticSearchUtils); + } + + @Before + public void setUp() { + TestUtils.setStaticField(ElasticSearchAnnotationService.class, "ANNOTATION_INDEX_MAX_RESULT_WINDOW", 10000); + } + + @Test + public void testConvertTimestampToMillis(){ + AnnotationQuery annotationQuery = new AnnotationQuery("scope1", "metric1", null, "unittest", 1557809359073L, 1557809599073L); + esAnnotationService.convertTimestampToMillis(annotationQuery); + assertEquals(1557809359073L, annotationQuery.getStartTimestamp().longValue()); + assertEquals(1557809599073L, annotationQuery.getEndTimestamp().longValue()); + + annotationQuery = new AnnotationQuery("scope1", "metric1", null, "unittest", 1557809359L, 1557809599L); + esAnnotationService.convertTimestampToMillis(annotationQuery); + assertEquals(1557809359000L, annotationQuery.getStartTimestamp().longValue()); + assertEquals(1557809599000L, annotationQuery.getEndTimestamp().longValue()); + + annotationQuery = new AnnotationQuery("scope1", "metric1", null, "unittest", 1557809359123L, 1557809599L); + esAnnotationService.convertTimestampToMillis(annotationQuery); + assertEquals(1557809359123L, annotationQuery.getStartTimestamp().longValue()); + assertEquals(1557809599000L, annotationQuery.getEndTimestamp().longValue()); + + annotationQuery = new AnnotationQuery("scope1", "metric1", null, "unittest", 1557809359L, 1557809599456L); + esAnnotationService.convertTimestampToMillis(annotationQuery); + assertEquals(1557809359000L, annotationQuery.getStartTimestamp().longValue()); + assertEquals(1557809599456L, annotationQuery.getEndTimestamp().longValue()); + } + + @Test + public void testAnnotationRecordListMapper() throws IOException { + mapper = ElasticSearchAnnotationService.getAnnotationObjectMapper(new AnnotationRecordList.IndexSerializer()); + + Annotation record1 = new Annotation("unittest", "id123", "unittest", "scope1", "metric1", 1557801635504L); + AnnotationRecordList recordList = new AnnotationRecordList(Arrays.asList(record1), AnnotationRecordList.HashAlgorithm.fromString("MD5")); + + String serialized = mapper.writeValueAsString(recordList); + String[] lines = serialized.split("\\r?\\n"); + + String expectedIndexName = "argus-annotation-2019-05"; + JsonNode root = mapper.readTree(lines[0]); + String actualIndexName = root.get("index").get("_index").asText(); + assertEquals(expectedIndexName, actualIndexName); + + String expectedSerializedAnnotation = + "{\"metric\":\"metric1\",\"scope\":\"scope1\",\"source\":\"unittest\",\"id\":\"id123\",\"type\":\"unittest\",\"fields\":\"{}\",\"tags\":\"{}\",\"sid\":\"f9c22bcbd813474ec99f7011ae50b080\",\"ts\":\"1557801635504\"}"; + assertEquals(expectedSerializedAnnotation, lines[1]); + } + + @Test + public void testPutAnnotationsUsingAnnotationIndex() throws IOException { + ElasticSearchAnnotationService spyService = _initializeSpyService(esAnnotationService, createSucessReply, true); + List annotations = new ArrayList<>(); + + Annotation record1 = new Annotation("unittest", "id456", "unittest", "scope1", "metric1", 1557800720441L); + annotations.add(record1); + + spyService.putAnnotations(annotations); + + ArgumentCaptor requestCaptorUrl = ArgumentCaptor.forClass(String.class); + ArgumentCaptor requestCaptorBody = ArgumentCaptor.forClass(String.class); + + verifyStatic(ElasticSearchUtils.class, times(1)); + ElasticSearchUtils.performESRequest(eq(restClient), requestCaptorUrl.capture(), requestCaptorBody.capture()); + + + String expectedURL = "_bulk"; + assertEquals(expectedURL, requestCaptorUrl.getValue()); + + String[] lines = requestCaptorBody.getValue().split("\\r?\\n"); + + String expectedIndexName = "argus-annotation-2019-05"; + JsonNode root = mapper.readTree(lines[0]); + String actualIndexName = root.get("index").get("_index").asText(); + assertEquals(expectedIndexName, actualIndexName); + + String expectedSerializedAnnotation = + "{\"metric\":\"metric1\",\"scope\":\"scope1\",\"source\":\"unittest\",\"id\":\"id456\",\"type\":\"unittest\",\"fields\":\"{}\",\"tags\":\"{}\",\"sid\":\"f9c22bcbd813474ec99f7011ae50b080\",\"ts\":\"1557800720441\"}"; + assertEquals(expectedSerializedAnnotation, lines[1]); + } + + @Test + public void testGetAnnotations() throws IOException { + AnnotationQuery annotationQuery = new AnnotationQuery("scope1", "metric1", null, "unittest", 1557809359073L, 1557809599073L); + List queries = new ArrayList<>(); + queries.add(annotationQuery); + ElasticSearchAnnotationService spyService = _initializeSpyService(esAnnotationService, getReply, false); + + List annotations = spyService.getAnnotations(queries); + Annotation expectedAnnotation = new Annotation("unittest", "16ab4b56311", "unittest", "scope1", "metric1", 1557809559073L); + assertEquals(expectedAnnotation, annotations.get(0)); + ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(Request.class); + verify(restClient, times(1)).performRequest(requestCaptor.capture()); + Request capturedRequest = requestCaptor.getValue(); + + String queryJson = convertToPrettyJson(EntityUtils.toString(capturedRequest.getEntity())); + assertEquals(annotationQueryMustTermRange, queryJson); + String expectedURL = "/argus-annotation-*/_search"; + assertEquals(expectedURL, capturedRequest.getEndpoint()); + } + + @Test (expected = RuntimeException.class) + public void testGetAnnotationsExceedingLimit(){ + AnnotationQuery annotationQuery = new AnnotationQuery("scope1", "metric1", null, "unittest", 1557809359073L, 1557809599073L); + List queries = new ArrayList<>(); + queries.add(annotationQuery); + + ElasticSearchAnnotationService spyService = null; + try { + spyService = _initializeSpyService(esAnnotationService, getReply, false); + } catch (IOException e) { + fail(); + } + Whitebox.setInternalState(spyService, "ANNOTATION_INDEX_MAX_RESULT_WINDOW", 1); + spyService.getAnnotations(queries); + } + + @Test + public void testHashedSearchIdentifierAnnotationAndAnnotationQueryMatch(){ + Map annotationQueryTags = new HashMap<>(); + annotationQueryTags.put("device","device1"); + annotationQueryTags.put("podName","pod1"); + AnnotationQuery annotationQuery = new AnnotationQuery("scope1", "metric1", annotationQueryTags, "transactionId", 1557809359073L, 1557809599073L); + + Annotation annotation = new Annotation("source", "16ab4b56311", "transactionId", "scope1", "metric1", 1557809559073L); + annotation.setTags(annotationQueryTags); + assertEquals(AnnotationRecordList.getHashedSearchIdentifier(annotation), ElasticSearchAnnotationService.getHashedSearchIdentifier(annotationQuery)); + + // change order of tags in annotation query + Map annotationQueryTags2 = new HashMap<>(); + annotationQueryTags2.put("podName","pod1"); + annotationQueryTags2.put("device","device1"); + AnnotationQuery annotationQuery2 = new AnnotationQuery("scope1", "metric1", annotationQueryTags2, "transactionId", 1557809359073L, 1557809599073L); + assertEquals(AnnotationRecordList.getHashedSearchIdentifier(annotation), ElasticSearchAnnotationService.getHashedSearchIdentifier(annotationQuery2)); + } + + private ElasticSearchAnnotationService _initializeSpyService(ElasticSearchAnnotationService service, + String firstReply, boolean isPut) throws IOException { + + restClient = mock(RestClient.class); + service.setESRestClient(restClient); + mockStatic(ElasticSearchUtils.class); + if (isPut) { + when(ElasticSearchUtils.performESRequest(eq(restClient), any(), any())).thenReturn(mapper.readValue(firstReply, ElasticSearchUtils.PutResponse.class)); + } else { + when(ElasticSearchUtils.extractResponse(any())).thenReturn(firstReply); + mapper = ElasticSearchAnnotationService.getAnnotationObjectMapper(new AnnotationRecordList.IndexSerializer()); + AnnotationRecordList ret = mapper.readValue(firstReply, new TypeReference() {}); + when(ElasticSearchUtils.toEntity(any(), any(),any())).thenReturn(ret); + } + + ElasticSearchAnnotationService spyService = spy(service); + + return spyService; + } +} \ No newline at end of file diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/auth/NoAuthTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/auth/NoAuthTest.java new file mode 100644 index 000000000..4af909c12 --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/auth/NoAuthTest.java @@ -0,0 +1,36 @@ +package com.salesforce.dva.argus.service.auth; + +import com.salesforce.dva.argus.TestUtils; +import com.salesforce.dva.argus.entity.PrincipalUser; +import com.salesforce.dva.argus.service.MonitorService; +import com.salesforce.dva.argus.service.UserService; +import org.junit.Test; + +import static org.junit.Assert.*; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class NoAuthTest { + + @Test + public void testNoAuthDoesNotThrow() { + + String userName = "user"; + + PrincipalUser principalUser1 = new PrincipalUser(null, userName, userName); + + UserService mockUserService = mock(UserService.class); + when(mockUserService.findUserByUsername(any())).thenReturn(principalUser1); + when(mockUserService.updateUser(any())).thenReturn(principalUser1); + + MonitorService mockMonitorService = mock(MonitorService.class); + + NoAuthService authService = new NoAuthService(TestUtils.getConfiguration(), mockUserService, mockMonitorService); + + PrincipalUser principalUser2 = authService.getUser(userName, userName); + + assertEquals(principalUser2.getUserName(), userName); + } +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/cache/RedisCacheServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/cache/RedisCacheServiceTest.java new file mode 100644 index 000000000..6aae37ff7 --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/cache/RedisCacheServiceTest.java @@ -0,0 +1,347 @@ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus.service.cache; + +import com.salesforce.dva.argus.system.SystemException; +import org.junit.Test; +import java.util.ArrayList; +import java.util.List; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import java.util.HashSet; +import java.util.Properties; +import java.io.IOException; +import com.google.common.collect.Lists; +import java.util.Enumeration; + + + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.ArgumentMatchers.any; + + +import org.junit.BeforeClass; +import org.junit.AfterClass; +import com.salesforce.dva.argus.system.SystemMain; +import com.salesforce.dva.argus.TestUtils; +import com.salesforce.dva.argus.system.SystemConfiguration; +import com.salesforce.dva.argus.service.cache.CacheRedisClient; +import com.salesforce.dva.argus.service.cache.RedisCacheService; + +import redis.clients.jedis.JedisCluster; + + +public class RedisCacheServiceTest { + + static SystemConfiguration config = TestUtils.getConfiguration(); + + @BeforeClass + static public void setUpClass() { + } + + @AfterClass + static public void tearDownClass() { + } + + + @Test + public void testGetKey() { + String keyName = "blah"; + String expectedValue = "valueblah"; + JedisCluster mockJedisClient = mock(JedisCluster.class); + CacheRedisClient mockedCachedRedisClient = mock(CacheRedisClient.class); + when(mockedCachedRedisClient.getJedisClusterClient()).thenReturn(mockJedisClient); + when(mockJedisClient.get(keyName)).thenReturn(expectedValue); + + RedisCacheService redisCacheService = new RedisCacheService(config, mockedCachedRedisClient); + String actualValue = redisCacheService.get(keyName); + assertEquals(expectedValue, actualValue); + actualValue = redisCacheService.get("nonexistant"); + assertEquals(null, actualValue); + } + + @Test + public void testGetKeySet() { + String keyName1 = "blah1"; + String keyName2 = "blah2"; + String expectedValue = "valueblah"; + JedisCluster mockJedisClient = mock(JedisCluster.class); + CacheRedisClient mockedCachedRedisClient = mock(CacheRedisClient.class); + when(mockedCachedRedisClient.getJedisClusterClient()).thenReturn(mockJedisClient); + when(mockJedisClient.get(keyName1)).thenReturn(expectedValue); + when(mockJedisClient.get(keyName2)).thenReturn(expectedValue); + + RedisCacheService redisCacheService = new RedisCacheService(config, mockedCachedRedisClient); + + Set keySet = new HashSet<>(Arrays.asList(keyName1, keyName2)); + Map actualMap = redisCacheService.get(keySet); + Map expectedMap = new HashMap<>(); + expectedMap.put(keyName1, expectedValue); + expectedMap.put(keyName2, expectedValue); + assertEquals(expectedMap, actualMap); + + keySet = new HashSet<>(Arrays.asList("non1", "non2")); + Map expectedMap2 = new HashMap<>(); + expectedMap2.put("non1", null); + expectedMap2.put("non2", null); + actualMap = redisCacheService.get(keySet); + assertEquals(expectedMap2, actualMap); + } + + @Test + public void testPutKey() { + String keyName = "blah"; + String expectedValue = "valueblah"; + JedisCluster mockJedisClient = mock(JedisCluster.class); + CacheRedisClient mockedCachedRedisClient = mock(CacheRedisClient.class); + when(mockedCachedRedisClient.getJedisClusterClient()).thenReturn(mockJedisClient); + + RedisCacheService redisCacheService = new RedisCacheService(config, mockedCachedRedisClient); + redisCacheService.put(keyName, expectedValue, 1); + verify(mockJedisClient, times(1)).set(keyName, expectedValue); + verify(mockJedisClient, times(1)).expire(keyName, 1); + + } + + @Test + public void testPutKeySet() { + JedisCluster mockJedisClient = mock(JedisCluster.class); + CacheRedisClient mockedCachedRedisClient = mock(CacheRedisClient.class); + when(mockedCachedRedisClient.getJedisClusterClient()).thenReturn(mockJedisClient); + + RedisCacheService redisCacheService = new RedisCacheService(config, mockedCachedRedisClient); + + Map entries = new HashMap<>(); + entries.put("non1", "blah"); + entries.put("non2", "blah"); + + redisCacheService.put(entries, 10); + verify(mockJedisClient, times(2)).set(anyString(), anyString()); + verify(mockJedisClient, times(2)).expire(anyString(), anyInt()); + + } + + @Test + public void testExpire() { + JedisCluster mockJedisClient = mock(JedisCluster.class); + CacheRedisClient mockedCachedRedisClient = mock(CacheRedisClient.class); + when(mockedCachedRedisClient.getJedisClusterClient()).thenReturn(mockJedisClient); + + RedisCacheService redisCacheService = new RedisCacheService(config, mockedCachedRedisClient); + + redisCacheService.expire("key1", 10); + verify(mockJedisClient, times(1)).expire("key1", 10); + + } + + @Test + public void testExpireKeySet() { + JedisCluster mockJedisClient = mock(JedisCluster.class); + CacheRedisClient mockedCachedRedisClient = mock(CacheRedisClient.class); + when(mockedCachedRedisClient.getJedisClusterClient()).thenReturn(mockJedisClient); + + RedisCacheService redisCacheService = new RedisCacheService(config, mockedCachedRedisClient); + + Set keys = new HashSet<>(Arrays.asList("key1", "key2")); + redisCacheService.expire(keys, 10); + verify(mockJedisClient, times(2)).expire(anyString(), anyInt()); + + } + + @Test + public void testExist() { + JedisCluster mockJedisClient = mock(JedisCluster.class); + CacheRedisClient mockedCachedRedisClient = mock(CacheRedisClient.class); + when(mockedCachedRedisClient.getJedisClusterClient()).thenReturn(mockJedisClient); + + RedisCacheService redisCacheService = new RedisCacheService(config, mockedCachedRedisClient); + + redisCacheService.exist("key1"); + verify(mockJedisClient, times(1)).exists("key1"); + + } + + @Test + public void testExistKeySet() { + JedisCluster mockJedisClient = mock(JedisCluster.class); + CacheRedisClient mockedCachedRedisClient = mock(CacheRedisClient.class); + when(mockedCachedRedisClient.getJedisClusterClient()).thenReturn(mockJedisClient); + + RedisCacheService redisCacheService = new RedisCacheService(config, mockedCachedRedisClient); + + Set keys = new HashSet<>(Arrays.asList("key1", "key2")); + redisCacheService.exist(keys); + verify(mockJedisClient, times(2)).exists(anyString()); + } + + @Test + public void testDelete() { + JedisCluster mockJedisClient = mock(JedisCluster.class); + CacheRedisClient mockedCachedRedisClient = mock(CacheRedisClient.class); + when(mockedCachedRedisClient.getJedisClusterClient()).thenReturn(mockJedisClient); + + RedisCacheService redisCacheService = new RedisCacheService(config, mockedCachedRedisClient); + + redisCacheService.delete("key1"); + verify(mockJedisClient, times(1)).del("key1"); + } + + @Test + public void testDeleteKeySet() { + JedisCluster mockJedisClient = mock(JedisCluster.class); + CacheRedisClient mockedCachedRedisClient = mock(CacheRedisClient.class); + when(mockedCachedRedisClient.getJedisClusterClient()).thenReturn(mockJedisClient); + + RedisCacheService redisCacheService = new RedisCacheService(config, mockedCachedRedisClient); + + Set keys = new HashSet<>(Arrays.asList("key1", "key2")); + redisCacheService.delete(keys); + verify(mockJedisClient, times(2)).del(anyString()); + } + + @Test + public void testAppend() { + JedisCluster mockJedisClient = mock(JedisCluster.class); + CacheRedisClient mockedCachedRedisClient = mock(CacheRedisClient.class); + when(mockedCachedRedisClient.getJedisClusterClient()).thenReturn(mockJedisClient); + + RedisCacheService redisCacheService = new RedisCacheService(config, mockedCachedRedisClient); + + redisCacheService.append("key1", "val1"); + verify(mockJedisClient, times(1)).rpush("key1", "val1"); + } + + @Test + public void testGetRange() { + JedisCluster mockJedisClient = mock(JedisCluster.class); + CacheRedisClient mockedCachedRedisClient = mock(CacheRedisClient.class); + when(mockedCachedRedisClient.getJedisClusterClient()).thenReturn(mockJedisClient); + + RedisCacheService redisCacheService = new RedisCacheService(config, mockedCachedRedisClient); + + redisCacheService.getRange("key1", 2, 10); + verify(mockJedisClient, times(1)).lrange("key1", 2, 10); + } + + @Test + public void testAppendWithTtl() { + JedisCluster mockJedisClient = mock(JedisCluster.class); + CacheRedisClient mockedCachedRedisClient = mock(CacheRedisClient.class); + when(mockedCachedRedisClient.getJedisClusterClient()).thenReturn(mockJedisClient); + + RedisCacheService redisCacheService = new RedisCacheService(config, mockedCachedRedisClient); + + redisCacheService.append("key1", "val1", 10); + verify(mockJedisClient, times(1)).rpush("key1", "val1"); + verify(mockJedisClient, times(1)).expire("key1", 10); + } + + @Test + public void testGetRangeSet() { + JedisCluster mockJedisClient = mock(JedisCluster.class); + CacheRedisClient mockedCachedRedisClient = mock(CacheRedisClient.class); + when(mockedCachedRedisClient.getJedisClusterClient()).thenReturn(mockJedisClient); + + RedisCacheService redisCacheService = new RedisCacheService(config, mockedCachedRedisClient); + + Set keys = new HashSet<>(Arrays.asList("key1", "key2")); + redisCacheService.getRange(keys, 2, 10); + verify(mockJedisClient, times(1)).lrange("key1", 2, 10); + verify(mockJedisClient, times(1)).lrange("key2", 2, 10); + } + + @Test + public void testGetServiceProperties() { + JedisCluster mockJedisClient = mock(JedisCluster.class); + CacheRedisClient mockedCachedRedisClient = mock(CacheRedisClient.class); + when(mockedCachedRedisClient.getJedisClusterClient()).thenReturn(mockJedisClient); + + RedisCacheService redisCacheService = new RedisCacheService(config, mockedCachedRedisClient); + + Properties props = redisCacheService.getServiceProperties(); + Enumeration < ? > enumeration = props.propertyNames(); + List namesList = Lists.newArrayList(); + while (enumeration.hasMoreElements()) { + namesList.add((String) enumeration.nextElement()); + } + String name1 = "service.property.cache.redis.cache.expiry.in.sec"; + assertTrue("propname [" + name1 + "] not found in " + namesList, + namesList.contains(name1)); + + name1 = "service.property.cache.redis.cluster"; + assertTrue("propname [" + name1 + "] not found in " + namesList, + namesList.contains(name1)); + + name1 = "service.property.cache.redis.server.max.connections"; + assertTrue("propname [" + name1 + "] not found in " + namesList, + namesList.contains(name1)); + + } + + @Test + public void testDispose() throws IOException { + JedisCluster mockJedisClient = mock(JedisCluster.class); + CacheRedisClient mockedCachedRedisClient = mock(CacheRedisClient.class); + when(mockedCachedRedisClient.getJedisClusterClient()).thenReturn(mockJedisClient); + + RedisCacheService redisCacheService = new RedisCacheService(config, mockedCachedRedisClient); + + redisCacheService.dispose(); + verify(mockJedisClient, times(1)).close(); + } + + @Test + public void testGetByPattern() throws IOException { + JedisCluster mockJedisClient = mock(JedisCluster.class); + CacheRedisClient mockedCachedRedisClient = mock(CacheRedisClient.class); + when(mockedCachedRedisClient.getJedisClusterClient()).thenReturn(mockJedisClient); + + RedisCacheService redisCacheService = new RedisCacheService(config, mockedCachedRedisClient); + + redisCacheService.getByPattern("abc"); + verify(mockJedisClient, times(1)).getClusterNodes(); + } + + + +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/collect/DefaultCollectionServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/collect/DefaultCollectionServiceTest.java new file mode 100644 index 000000000..a4538f238 --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/collect/DefaultCollectionServiceTest.java @@ -0,0 +1,265 @@ +package com.salesforce.dva.argus.service.collect; + +import com.fasterxml.jackson.databind.type.CollectionType; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.salesforce.dva.argus.entity.Annotation; +import com.salesforce.dva.argus.entity.Histogram; +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.entity.PrincipalUser; +import com.salesforce.dva.argus.service.AnnotationStorageService; +import com.salesforce.dva.argus.service.AuditService; +import com.salesforce.dva.argus.service.MQService; +import com.salesforce.dva.argus.service.MonitorService; +import com.salesforce.dva.argus.service.MonitorService.Counter; +import com.salesforce.dva.argus.service.NamespaceService; +import com.salesforce.dva.argus.service.SchemaService; +import com.salesforce.dva.argus.service.TSDBService; +import com.salesforce.dva.argus.service.WardenService; +import org.apache.commons.lang.RandomStringUtils; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import com.salesforce.dva.argus.system.SystemConfiguration; +import com.salesforce.dva.argus.TestUtils; + + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static com.salesforce.dva.argus.service.MQService.MQQueue.ANNOTATION; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.mockito.Mockito.mock; +import org.slf4j.LoggerFactory; + + +import java.util.Properties; + + +@RunWith(org.mockito.junit.MockitoJUnitRunner.class) +public class DefaultCollectionServiceTest { + + @Mock MQService mqService; + @Mock TSDBService tsdbService; + @Mock AuditService auditService; + @Mock AnnotationStorageService annotationStorageService; + @Mock SchemaService schemaService; + @Mock WardenService wardenService; + @Mock MonitorService monitorService; + @Mock NamespaceService namespaceService; + DefaultCollectionService collectionService; + PrincipalUser user; + static private SystemConfiguration systemConfig; + + @Before + public void setup() { + ch.qos.logback.classic.Logger apacheLogger = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger("org.apache"); + apacheLogger.setLevel(ch.qos.logback.classic.Level.OFF); + ch.qos.logback.classic.Logger myClassLogger = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger("com.salesforce.dva.argus.service.collect.DefaultCollectionService"); + myClassLogger.setLevel(ch.qos.logback.classic.Level.OFF); + + Properties config = new Properties(); + systemConfig = new SystemConfiguration(config); + collectionService = new DefaultCollectionService(mqService, + tsdbService, + auditService, + annotationStorageService, + systemConfig, + schemaService, + wardenService, + monitorService, + namespaceService); + user = mock(PrincipalUser.class); + when(wardenService.isWardenServiceEnabled()).thenReturn(true); + } + + Metric createMetricWithIncreasingResolution(long minResolution, int dpCount) { + Metric metric = TestUtils.createMetric(); + metric.clearDatapoints(); + long time = System.currentTimeMillis(); + Map dps = new HashMap<>(); + for (int i = dpCount; i > 0; i--) { + time -= i * minResolution; + dps.put(time, 1.0); + } + metric.setDatapoints(dps); + return metric; + } + + @Test + public void testParseMetricData() { + long bigRes = 10000; + Metric metricBiggerRes = createMetricWithIncreasingResolution(bigRes, 6); + + collectionService.submitMetric(user, metricBiggerRes); + verify(wardenService).updatePolicyCounter(user, WardenService.PolicyCounter.MINIMUM_RESOLUTION_MS, bigRes); + + long smallRes = 1000; + Metric metricSmallerRes = createMetricWithIncreasingResolution(smallRes, 6); + metricSmallerRes.setTag("source", "unittest2"); + + collectionService.submitMetrics(user, Arrays.asList(metricBiggerRes, metricSmallerRes)); + verify(wardenService).updatePolicyCounter(user, WardenService.PolicyCounter.MINIMUM_RESOLUTION_MS, smallRes); + } + + @Test + public void testSubmitMetric() { + when(namespaceService.isPermitted(any(), any())).thenReturn(true); + int metricCount = 60; + List metrics = new ArrayList<>(metricCount); + for (int i = 0; i < metricCount; i++) { + metrics.add(createMetricWithIncreasingResolution(1000, 2)); + } + collectionService.submitMetrics(user, metrics); + verify(monitorService).modifyCounter(MonitorService.Counter.DATAPOINT_WRITES, metricCount * 2, null); + verify(wardenService).modifyPolicyCounter(user, WardenService.PolicyCounter.METRICS_PER_HOUR, metricCount); + verify(wardenService).modifyPolicyCounter(user, WardenService.PolicyCounter.DATAPOINTS_PER_HOUR, metricCount * 2); + } + + @Test + public void testSubmitAnnotation() { + Annotation annotation = TestUtils.createAnnotation(); + collectionService.submitAnnotation(user, annotation); + verify(monitorService).modifyCounter(MonitorService.Counter.ANNOTATION_WRITES, 1, null); + } + + @Test + public void testSubmitHistogram() { + Histogram histogram = TestUtils.createHistogram(3); + collectionService.submitHistogram(user, histogram); + verify(monitorService).modifyCounter(MonitorService.Counter.HISTOGRAM_WRITES, 1, null); + } + + @Test + public void testSubmitHistogramBucketsExceeded() { + Histogram histogram = TestUtils.createHistogram(101); + collectionService.submitHistogram(user, histogram); + verify(monitorService).modifyCounter(MonitorService.Counter.HISTOGRAM_DROPPED, 1, null); + } + + @Test + public void testSubmitHistogramBucketsEmpty() { + Histogram histogram = TestUtils.createHistogram(0); + collectionService.submitHistogram(user, histogram); + verify(monitorService).modifyCounter(MonitorService.Counter.HISTOGRAM_DROPPED, 1, null); + } + + @Test + public void testSubmitHistogramBucketsWrongBounds() { + Histogram histogram = TestUtils.createHistogramWrongBounds(2); + collectionService.submitHistogram(user, histogram); + verify(monitorService).modifyCounter(MonitorService.Counter.HISTOGRAM_DROPPED, 1, null); + } + + @Test + public void testCommitMetrics() { + List messages = Arrays.asList( + new ArrayList<>(Arrays.asList(TestUtils.createMetric())), + new ArrayList<>(Arrays.asList(TestUtils.createMetric())) + ); + when(mqService.dequeue(eq(MQService.MQQueue.METRIC.getQueueName()), any(CollectionType.class), anyInt(), anyInt())).thenReturn(messages); + assertEquals(2, collectionService.commitMetrics(2, 60000).size()); + } + + @Test + public void testCommitMetricSchema() { + List messages = Arrays.asList( + new ArrayList<>(Arrays.asList(TestUtils.createMetric())), + new ArrayList<>(Arrays.asList(TestUtils.createMetric())) + ); + when(mqService.dequeue(eq(MQService.MQQueue.METRIC.getQueueName()), any(CollectionType.class), anyInt(), anyInt())).thenReturn(messages); + assertEquals(2, collectionService.commitMetricSchema(2, 60000)); + } + + @Test + public void testCommitAnnotations() { + List messages = Arrays.asList(TestUtils.createAnnotation(), TestUtils.createAnnotation()); + when(mqService.dequeue(eq(MQService.MQQueue.ANNOTATION.getQueueName()), eq(Annotation.class), anyInt(), anyInt())).thenReturn(messages); + assertEquals(2, collectionService.commitAnnotations(2, 60000)); + } + + @Test + public void testCommitHistograms() { + List messages = Arrays.asList(TestUtils.createHistogram(4), TestUtils.createHistogram(5)); + when(mqService.dequeue(eq(MQService.MQQueue.HISTOGRAM.getQueueName()), eq(Histogram.class), anyInt(), anyInt())).thenReturn(messages); + assertEquals(2, collectionService.commitHistograms(2, 60000)); + } + + @Test + public void submitAnnotations_testAnnotationSizeLessThanMax() { + Annotation a = TestUtils.createAnnotation(); + + // test + collectionService.submitAnnotations(user, ImmutableList.of(a)); + + // verify + verify(monitorService).modifyCounter(Counter.ANNOTATION_WRITES, 1, null); + ArgumentCaptor annotationListCaptor = ArgumentCaptor.forClass(List.class); + verify(mqService).enqueue(eq(ANNOTATION.getQueueName()), annotationListCaptor.capture()); + assertEquals(1, annotationListCaptor.getValue().size()); + assertTrue(annotationListCaptor.getValue().contains(a)); + } + + @Test + public void submitAnnotations_testListContainingOneAnnotationSizeGreaterThanMax() { + Annotation a = TestUtils.createAnnotation(); + Annotation tooLargeAnnotation = createAnnotationWithSizeTooLarge(); + + // test + collectionService.submitAnnotations(user, ImmutableList.of(a, tooLargeAnnotation)); + + // verify + verify(monitorService).modifyCounter(Counter.ANNOTATION_DROPS_MAXSIZEEXCEEDED, 1, ImmutableMap.of("source", tooLargeAnnotation.getSource())); + verify(monitorService).modifyCounter(Counter.ANNOTATION_WRITES, 1, null); + ArgumentCaptor annotationListCaptor = ArgumentCaptor.forClass(List.class); + verify(mqService).enqueue(eq(ANNOTATION.getQueueName()), annotationListCaptor.capture()); + assertEquals(1, annotationListCaptor.getValue().size()); + } + + @Test + public void updateAnnotations_testOnlyOneAnnotationSizeGreaterThanMax() { + Annotation tooLargeAnnotation = createAnnotationWithSizeTooLarge(); + + // test + collectionService.submitAnnotations(user, ImmutableList.of(tooLargeAnnotation)); + + // verify + verify(monitorService).modifyCounter(Counter.ANNOTATION_DROPS_MAXSIZEEXCEEDED, 1, ImmutableMap.of("source", tooLargeAnnotation.getSource())); + verify(monitorService).modifyCounter(Counter.ANNOTATION_WRITES, 0, null); + verify(mqService, never()).enqueue(any(), (List)any()); + } + + private Annotation createAnnotationWithSizeTooLarge() { + Annotation tooLargeAnnotation = new Annotation("source2", + "id2", + "type2", + "scope2", + "metric2", + System.currentTimeMillis()); + // set up annotation with size larger than max size allowed + final int TAG_SIZE = 100; + final int NUM_TAGS = DefaultCollectionService.MAX_ANNOTATION_SIZE_BYTES / TAG_SIZE / 2; + for (int i = 0; i < NUM_TAGS; i++) { + tooLargeAnnotation.setTag(RandomStringUtils.random(TAG_SIZE), RandomStringUtils.random(TAG_SIZE)); + } + final Map fields = new HashMap<>(); + for (int i = 0; i < 10; i++) { + fields.put(RandomStringUtils.random(10), RandomStringUtils.random(10)); + } + tooLargeAnnotation.setFields(fields); + return tooLargeAnnotation; + } +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/image/DefaultImageServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/image/DefaultImageServiceTest.java new file mode 100644 index 000000000..ca0e624c8 --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/image/DefaultImageServiceTest.java @@ -0,0 +1,74 @@ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus.service.image; + +import com.salesforce.dva.argus.service.MonitorService; +import com.salesforce.dva.argus.service.schema.ElasticSearchUtils; +import com.salesforce.dva.argus.system.SystemConfiguration; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mock; + +import java.util.Properties; + +import static org.mockito.Mockito.mock; + +public class DefaultImageServiceTest { + + @Mock + private ElasticSearchImageService elasticSearchImageService; + private DefaultImageService defaultImageService; + + @Before + public void setUp() { + Properties config = new Properties(); + SystemConfiguration systemConfig = new SystemConfiguration(config); + MonitorService mockedMonitor = mock(MonitorService.class); + ElasticSearchUtils mockedElasticSearchUtils = mock(ElasticSearchUtils.class); + elasticSearchImageService = new ElasticSearchImageService(systemConfig, mockedMonitor, mockedElasticSearchUtils); + defaultImageService = new DefaultImageService(elasticSearchImageService,systemConfig); + } + + @Test(expected=IllegalArgumentException.class) + public void testNullOREmptyImageId() { + defaultImageService.getImageById(null); + defaultImageService.getImageById(""); + } + + @Test(expected=IllegalArgumentException.class) + public void testStoreNullImageBytes() { + defaultImageService.storeImage(null,false); + defaultImageService.storeImage("".getBytes(),false); + + } + +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/image/ElasticSearchImageServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/image/ElasticSearchImageServiceTest.java new file mode 100644 index 000000000..b6d38460c --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/image/ElasticSearchImageServiceTest.java @@ -0,0 +1,295 @@ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus.service.image; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.google.gson.JsonElement; +import com.google.gson.JsonParser; +import com.salesforce.dva.argus.service.MonitorService; +import com.salesforce.dva.argus.service.schema.ElasticSearchUtils; +import com.salesforce.dva.argus.system.SystemConfiguration; +import com.salesforce.dva.argus.system.SystemException; +import com.salesforce.dva.argus.util.ImageUtils; +import org.apache.http.entity.BasicHttpEntity; +import org.apache.http.util.EntityUtils; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RestClient; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.mockito.ArgumentCaptor; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.util.Properties; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +public class ElasticSearchImageServiceTest { + + private String createSucessReply = String.join("\n", + "{" + + " \"took\": 55," + + " \"errors\": false," + + " \"items\": [" + + " {" + + " \"create\": {" + + " \"_index\": \"argus-image\"," + + " \"_id\": \"cdfd12850d42746257f8217899647c8b\"," + + " \"_version\": 1," + + " \"result\": \"created\"," + + " \"_shards\": {" + + " \"total\": 2," + + " \"successful\": 1," + + " \"failed\": 0" + + " }," + + " \"created\": true," + + " \"status\": 201" + + " }" + + " }" + + " ]" + + "}"); + private String getReply = String.join("\n", + "{", + " \"took\": 9,", + " \"timed_out\": false,", + " \"_shards\": {", + " \"total\": 6,", + " \"successful\": 6,", + " \"skipped\": 0,", + " \"failed\": 0", + " },", + " \"hits\": {", + " \"total\": 1,", + " \"max_score\": 1.0,", + " \"hits\": [", + " {", + " \"_index\": \"argus-image-2019-06-11\",", + " \"_id\": \"0FD3DBEC9730101BFF92ACC820BEFC34\",", + " \"_score\": 1.0,", + " \"_source\": {", + " \"imageblob\": \"VGVzdCBzdHJpbmc=\",", + " \"mts\": \"1560798017039\"", + " }", + " }", + " ]", + " }", + "}"); + + private String imageIdQuery = String.join("\n", + "{", + " \"query\": {", + " \"ids\": {", + " \"values\": [", + " \"0FD3DBEC9730101BFF92ACC820BEFC34\"", + " ]", + " }", + " }", + "}"); + + private RestClient restClient; + private static SystemConfiguration systemConfig; + private static ElasticSearchImageService elasticSearchImageService; + private static ObjectMapper mapper = new ObjectMapper(); + + private String convertToPrettyJson(String jsonString) { + JsonParser parser = new JsonParser(); + Gson gson = new GsonBuilder().setPrettyPrinting().create(); + + JsonElement el = parser.parse(jsonString); + return gson.toJson(el); + } + + @BeforeClass + public static void setUpClass() { + Properties config = new Properties(); + systemConfig = new SystemConfiguration(config); + MonitorService mockedMonitor = mock(MonitorService.class); + ElasticSearchUtils mockedElasticSearchUtils = mock(ElasticSearchUtils.class); + elasticSearchImageService = new ElasticSearchImageService(systemConfig, mockedMonitor, mockedElasticSearchUtils); + } + + @Test + public void testImageRecordMapper() throws IOException { + ImageRecord.IndexSerializer imageIndexSerializer= new ImageRecord.IndexSerializer(); + mapper = ElasticSearchImageService.getImageObjectMapper(imageIndexSerializer); + + String byteString = "Test string"; + String imageId = ImageUtils.convertBytesToMd5Hash(byteString.getBytes()); + ImageRecord imageRecord = new ImageRecord(imageId,byteString.getBytes()); + String serialized = mapper.writeValueAsString(imageRecord); + String[] lines = serialized.split("\\r?\\n"); + + + String expectedIndexName=imageIndexSerializer.getImageIndex(System.currentTimeMillis()); + String expectedDocumentId = ImageUtils.convertBytesToMd5Hash(imageRecord.getImageBytes()); + String expectedImageBlob = ImageUtils.encodeBytesToBase64(imageRecord.getImageBytes()); + JsonNode line1 = mapper.readTree(lines[0]); + String actualIndexName = line1.get("index").get("_index").asText(); + String actualDocumentId = line1.get("index").get("_id").asText(); + + assertEquals(expectedIndexName, actualIndexName); + assertEquals(expectedDocumentId, actualDocumentId); + + JsonNode line2 = mapper.readTree(lines[1]); + String actualImageBlob = line2.get("imageblob").asText(); + + assertEquals(expectedImageBlob, actualImageBlob); + + } + + @Test + public void testPutImageIndex() throws IOException { + ElasticSearchImageService spyService = _initializeSpyService(elasticSearchImageService, createSucessReply, createSucessReply); + + String byteString = "Test string"; + String imageId = ImageUtils.convertBytesToMd5Hash(byteString.getBytes()); + spyService.putImage(imageId,byteString.getBytes(),true); + ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(Request.class); + + verify(restClient, times(1)).performRequest(requestCaptor.capture()); + + Request capturedRequest = requestCaptor.getValue(); + String indexJson = EntityUtils.toString(capturedRequest.getEntity()); + String expectedURL = "_bulk"; + assertEquals(expectedURL, capturedRequest.getEndpoint()); + + String[] lines = indexJson.split("\\r?\\n"); + + ImageRecord.IndexSerializer imageIndexSerializer= new ImageRecord.IndexSerializer(); + String id = ImageUtils.convertBytesToMd5Hash(byteString.getBytes()); + ImageRecord imageRecord = new ImageRecord(id,byteString.getBytes()); + + String expectedIndexName=imageIndexSerializer.getImageIndex(System.currentTimeMillis()); + String expectedDocumentId = ImageUtils.convertBytesToMd5Hash(imageRecord.getImageBytes()); + String expectedImageBlob = ImageUtils.encodeBytesToBase64(imageRecord.getImageBytes()); + JsonNode line1 = mapper.readTree(lines[0]); + String actualIndexName = line1.get("index").get("_index").asText(); + String actualDocumentId = line1.get("index").get("_id").asText(); + + assertEquals(expectedIndexName, actualIndexName); + assertEquals(expectedDocumentId, actualDocumentId); + + JsonNode line2 = mapper.readTree(lines[1]); + String actualImageBlob = line2.get("imageblob").asText(); + + assertEquals(expectedImageBlob, actualImageBlob); + + } + + @Test + public void testGetImage() throws IOException { + + ElasticSearchImageService spyService = _initializeSpyService(elasticSearchImageService, getReply, getReply); + + + byte[] expectedBytes = "Test string".getBytes(); + byte[] actualBytes = spyService.getImage("0FD3DBEC9730101BFF92ACC820BEFC34"); + assertArrayEquals(expectedBytes, actualBytes); + ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(Request.class); + verify(restClient, times(1)).performRequest(requestCaptor.capture()); + Request capturedRequest = requestCaptor.getValue(); + + String queryJson = convertToPrettyJson(EntityUtils.toString(capturedRequest.getEntity())); + assertEquals(imageIdQuery, queryJson); + String expectedURL = "/argus-image-*/_search"; + assertEquals(expectedURL, capturedRequest.getEndpoint()); + } + + @Test + public void testDoExtractResponse() throws Exception { + final String message = "this is a test"; + BasicHttpEntity entity = new BasicHttpEntity(); + try(ByteArrayInputStream bis = new ByteArrayInputStream(message.getBytes())) { + entity.setContent(bis); + } + catch (IOException e) { + throw e; + } + + String responseMessage = ElasticSearchUtils.doExtractResponse(200, entity); + assertEquals("expect the entity to be equal after extraction", message, responseMessage); + } + + @Rule + public ExpectedException expectedException = ExpectedException.none(); + + @Test + public void testDoExtractResponse400() { + expectedException.expect(IllegalArgumentException.class); + expectedException.expectMessage("Status code: 400"); + ElasticSearchUtils.doExtractResponse(400, null); + } + + @Test + public void testDoExtractResponse500() { + expectedException.expect(SystemException.class); + expectedException.expectMessage("Status code: 500"); + ElasticSearchUtils.doExtractResponse(500, null); + } + + private ElasticSearchImageService _initializeSpyService(ElasticSearchImageService service, + String firstReply, String secondReply) { + + restClient = mock(RestClient.class); + service.setESRestClient(restClient); + ElasticSearchImageService spyService = spy(service); + + doAnswer(new Answer() { + private int count = 0; + public Object answer(InvocationOnMock invocation) { + count++; + if (count == 1) { + return firstReply; + } + + return secondReply; + } + }).when(spyService).extractResponse(any()); + + return spyService; + } + +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/AsyncMetricServiceIT.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/AsyncMetricServiceIT.java index 1228921d7..61c160ac5 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/AsyncMetricServiceIT.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/AsyncMetricServiceIT.java @@ -1,6 +1,6 @@ package com.salesforce.dva.argus.service.metric; -import com.salesforce.dva.argus.AbstractTest; +import com.salesforce.dva.argus.AbstractTestIT; import com.salesforce.dva.argus.IntegrationTest; import com.salesforce.dva.argus.entity.AsyncBatchedMetricQuery; import com.salesforce.dva.argus.entity.BatchMetricQuery; @@ -18,7 +18,7 @@ import static org.junit.Assert.assertTrue; @Category(IntegrationTest.class) -public class AsyncMetricServiceIT extends AbstractTest { +public class AsyncMetricServiceIT extends AbstractTestIT { @Test public void testQueueAndProcessAsyncMetric() throws InterruptedException { diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/ElasticSearchConsumerOffsetMetricsServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/ElasticSearchConsumerOffsetMetricsServiceTest.java new file mode 100644 index 000000000..bbc13b56f --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/ElasticSearchConsumerOffsetMetricsServiceTest.java @@ -0,0 +1,476 @@ +package com.salesforce.dva.argus.service.metric; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.google.gson.JsonElement; +import com.google.gson.JsonParser; +import com.salesforce.dva.argus.TestUtils; +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.service.MonitorService; +import com.salesforce.dva.argus.service.schema.ConsumerOffsetRecordList; +import com.salesforce.dva.argus.service.schema.ElasticSearchUtils; +import com.salesforce.dva.argus.service.tsdb.MetricQuery; +import com.salesforce.dva.argus.system.SystemConfiguration; +import org.apache.http.util.EntityUtils; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RestClient; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Ignore; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.ArgumentCaptor; +import org.powermock.core.classloader.annotations.PowerMockIgnore; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; +import org.powermock.reflect.Whitebox; + +import java.io.IOException; +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Calendar; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Properties; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.powermock.api.mockito.PowerMockito.mockStatic; +import static org.powermock.api.mockito.PowerMockito.verifyStatic; + +@PowerMockIgnore("*.ssl.*") +@RunWith(PowerMockRunner.class) +@PrepareForTest(ElasticSearchUtils.class) +public class ElasticSearchConsumerOffsetMetricsServiceTest { + + private static SystemConfiguration systemConfig; + private static ElasticSearchConsumerOffsetMetricsService esConsumerOffsetMetricsService; + private RestClient restClient; + private static ObjectMapper mapper = new ObjectMapper(); + + private String successReply = String.join("\n", + "{" + + " \"took\": 55," + + " \"errors\": false," + + " \"items\": [" + + " {" + + " \"create\": {" + + " \"_index\": \"argus-akc-consumer-offset-index\"," + + " \"_id\": \"cdfd12850d42746257f8217899647c8b\"," + + " \"_version\": 1," + + " \"result\": \"created\"," + + " \"_shards\": {" + + " \"total\": 2," + + " \"successful\": 1," + + " \"failed\": 0" + + " }," + + " \"created\": true," + + " \"status\": 201" + + " }" + + " }" + + " ]" + + "}"); + + private String getReply = "\n" + + "{\n" + + " \"took\" : 26,\n" + + " \"timed_out\" : false,\n" + + " \"_shards\" : {\n" + + " \"total\" : 5,\n" + + " \"successful\" : 5,\n" + + " \"skipped\" : 0,\n" + + " \"failed\" : 0\n" + + " },\n" + + " \"hits\" : {\n" + + " \"total\" : {\n" + + " \"value\" : 53,\n" + + " \"relation\" : \"eq\"\n" + + " },\n" + + " \"max_score\" : null,\n" + + " \"hits\" : [ ]\n" + + " },\n" + + " \"aggregations\" : {\n" + + " \"max_topic_offset_per_unit_time_greater_than\" : {\n" + + " \"doc_count_error_upper_bound\" : 0,\n" + + " \"sum_other_doc_count\" : 0,\n" + + " \"buckets\" : [\n" + + " {\n" + + " \"key\" : \"mytopic2.name\",\n" + + " \"doc_count\" : 28,\n" + + " \"max_offset_per_unit_time_greater_than\" : {\n" + + " \"buckets\" : [\n" + + " {\n" + + " \"key_as_string\" : \"2019-06-19T00:00:00.000Z\",\n" + + " \"key\" : 1560902400000,\n" + + " \"doc_count\" : 7,\n" + + " \"max_offset_greater_than\" : {\n" + + " \"value\" : 19676.0\n" + + " }\n" + + " },\n" + + " {\n" + + " \"key_as_string\" : \"2019-06-21T00:00:00.000Z\",\n" + + " \"key\" : 1561075200000,\n" + + " \"doc_count\" : 10,\n" + + " \"max_offset_greater_than\" : {\n" + + " \"value\" : 21635.0\n" + + " }\n" + + " },\n" + + " {\n" + + " \"key_as_string\" : \"2019-06-23T00:00:00.000Z\",\n" + + " \"key\" : 1561248000000,\n" + + " \"doc_count\" : 11,\n" + + " \"max_offset_greater_than\" : {\n" + + " \"value\" : 22659.0\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + " },\n" + + " {\n" + + " \"key\" : \"mytopic1.name\",\n" + + " \"doc_count\" : 25,\n" + + " \"max_offset_per_unit_time_greater_than\" : {\n" + + " \"buckets\" : [\n" + + " {\n" + + " \"key_as_string\" : \"2019-06-19T00:00:00.000Z\",\n" + + " \"key\" : 1560902400000,\n" + + " \"doc_count\" : 8,\n" + + " \"max_offset_greater_than\" : {\n" + + " \"value\" : 21455.0\n" + + " }\n" + + " },\n" + + " {\n" + + " \"key_as_string\" : \"2019-06-21T00:00:00.000Z\",\n" + + " \"key\" : 1561075200000,\n" + + " \"doc_count\" : 7,\n" + + " \"max_offset_greater_than\" : {\n" + + " \"value\" : 21981.0\n" + + " }\n" + + " },\n" + + " {\n" + + " \"key_as_string\" : \"2019-06-23T00:00:00.000Z\",\n" + + " \"key\" : 1561248000000,\n" + + " \"doc_count\" : 10,\n" + + " \"max_offset_greater_than\" : {\n" + + " \"value\" : 22461.0\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + " }\n" + + "}\n"; + + private String queryMustTermRange = "{\n" + + " \"aggs\": {\n" + + " \"max_topic_offset_per_unit_time_greater_than\": {\n" + + " \"terms\": {\n" + + " \"field\": \"topic.raw\"\n" + + " },\n" + + " \"aggs\": {\n" + + " \"max_offset_per_unit_time_greater_than\": {\n" + + " \"date_histogram\": {\n" + + " \"field\": \"ts\",\n" + + " \"interval\": \"5m\"\n" + + " },\n" + + " \"aggs\": {\n" + + " \"max_offset_greater_than\": {\n" + + " \"max\": {\n" + + " \"field\": \"value\"\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " },\n" + + " \"query\": {\n" + + " \"bool\": {\n" + + " \"must\": [\n" + + " {\n" + + " \"range\": {\n" + + " \"ts\": {\n" + + " \"gte\": \"1557809359000\",\n" + + " \"lte\": \"1557809599000\"\n" + + " }\n" + + " }\n" + + " }\n" + + " ],\n" + + " \"filter\": [\n" + + " {\n" + + " \"regexp\": {\n" + + " \"topic.raw\": \"topic\"\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + " },\n" + + " \"from\": 0,\n" + + " \"size\": 10000\n" + + "}"; + + private Map defaultTags = new HashMap<>(); + + private String convertToPrettyJson(String jsonString) { + JsonParser parser = new JsonParser(); + Gson gson = new GsonBuilder().setPrettyPrinting().create(); + + JsonElement el = parser.parse(jsonString); + return gson.toJson(el); + } + + @BeforeClass + public static void setUpClass() { + Properties config = new Properties(); + systemConfig = new SystemConfiguration(config); + MonitorService mockedMonitor = mock(MonitorService.class); + mockStatic(ElasticSearchUtils.class); + ElasticSearchUtils mockedElasticSearchUtils = mock(ElasticSearchUtils.class); + esConsumerOffsetMetricsService = new ElasticSearchConsumerOffsetMetricsService(systemConfig, mockedMonitor, mockedElasticSearchUtils); + } + + @Before + public void setUp() { + TestUtils.setStaticField(ElasticSearchConsumerOffsetMetricsService.class, "INDEX_MAX_RESULT_WINDOW", 10000); + defaultTags.put("key1", "val1"); + defaultTags.put("key2", "val2"); + } + + @Test + public void testConstructQuery() throws IOException { + Map tags = new HashMap<>(); + tags.put("service", "scope*"); + tags.put("groupId", "groupId*"); + tags.put("topic", "*"); + esConsumerOffsetMetricsService = _initializeSpyService(esConsumerOffsetMetricsService, getReply, false); + MetricQuery mQ = new MetricQuery("test", "test", tags, 0L, 1L); + mQ.setDownsamplingPeriod((long)(2 * 60 * 1000)); + mQ.setDownsampler(MetricQuery.Aggregator.MAX); + String actualOutput = esConsumerOffsetMetricsService.constructQuery(mQ, 0, 0); + String expectedOutput = "{\n" + + " \"aggs\": {\n" + + " \"max_topic_offset_per_unit_time_greater_than\": {\n" + + " \"terms\": {\n" + + " \"field\": \"topic.raw\"\n" + + " },\n" + + " \"aggs\": {\n" + + " \"max_offset_per_unit_time_greater_than\": {\n" + + " \"date_histogram\": {\n" + + " \"field\": \"ts\",\n" + + " \"interval\": \"2m\"\n" + + " },\n" + + " \"aggs\": {\n" + + " \"max_offset_greater_than\": {\n" + + " \"max\": {\n" + + " \"field\": \"value\"\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " },\n" + + " \"query\": {\n" + + " \"bool\": {\n" + + " \"must\": [\n" + + " {\n" + + " \"range\": {\n" + + " \"ts\": {\n" + + " \"gte\": \"0\",\n" + + " \"lte\": \"1000\"\n" + + " }\n" + + " }\n" + + " }\n" + + " ],\n" + + " \"filter\": [\n" + + " {\n" + + " \"regexp\": {\n" + + " \"topic.raw\": \".*\"\n" + + " }\n" + + " },\n" + + " {\n" + + " \"regexp\": {\n" + + " \"tags.raw\": \".*([\\\"]groupId[\\\"]:[\\\"]groupId.*[\\\"]).*\"\n" + + " }\n" + + " },\n" + + " {\n" + + " \"regexp\": {\n" + + " \"tags.raw\": \".*([\\\"]service[\\\"]:[\\\"]scope.*[\\\"]).*\"\n" + + " }\n" + + " }\n" + + " ]\n" + + " }\n" + + " },\n" + + " \"from\": 0,\n" + + " \"size\": 0\n" + + "}"; + + assertEquals(convertToPrettyJson(expectedOutput), convertToPrettyJson(actualOutput)); + } + + @Ignore + @Test + public void testConsumerOffsetSchemaRecordListMapper() throws IOException { + mapper = ElasticSearchConsumerOffsetMetricsService.getMetricObjectMapper(new ConsumerOffsetRecordList.IndexSerializer(), new ConsumerOffsetRecordList.Deserializer()); + + Long cTime = System.currentTimeMillis(); + ConsumerOffsetMetric record1 = new ConsumerOffsetMetric("metric", "topic", cTime, 0.0, defaultTags); + ConsumerOffsetRecordList recordList = new ConsumerOffsetRecordList(Arrays.asList(record1), ElasticSearchUtils.HashAlgorithm.fromString("MD5")); + + String serialized = mapper.writeValueAsString(recordList); + + String[] lines = serialized.split("\\r?\\n"); + + String expectedIndexName = getExpectedIndexName(); + JsonNode root = mapper.readTree(lines[0]); + String actualIndexName = root.get("index").get("_index").asText(); + assertEquals(expectedIndexName, actualIndexName); + + String expectedSerializedMetric = + "{\"metric\":\"metric\",\"topic\":\"topic\",\"value\":\"0.0\",\"ts\":\"" + cTime + "\",\"tags\":\"{\\\"key1\\\":\\\"val1\\\",\\\"key2\\\":\\\"val2\\\"}\"}"; + assertEquals(expectedSerializedMetric, lines[1]); + } + + private String getExpectedIndexName() { + String month = String.valueOf(Calendar.getInstance().get(Calendar.MONTH) + 1); + String year = String.valueOf(Calendar.getInstance().get(Calendar.YEAR)); + String day = String.valueOf(Calendar.getInstance().get(Calendar.DAY_OF_MONTH)); + if (month.length() == 1) { + month = "0" + month; + } + + if (day.length() == 1) { + day = "0" + day; + } + + return MessageFormat.format("argus-akc-consumer-offset-{0}-{1}-{2}", year, month, day); + + } + + @Test + public void testPutMetricsUsingOffsetIndex() throws IOException { + Long currentTime = System.currentTimeMillis(); + ElasticSearchConsumerOffsetMetricsService spyService = _initializeSpyService(esConsumerOffsetMetricsService, successReply, true); + List metrics = new ArrayList<>(); + + Metric record1 = new Metric("scope", "metric"); + record1.setTag("topic", "topicV"); + record1.addDatapoint(currentTime, 0.0); + metrics.add(record1); + + Metric record2 = new Metric("scope2", "metric2"); + record2.setTag("topic", "topicV"); + record2.setTag("groupId", "groupId"); + record2.addDatapoint(currentTime, 1.0); + metrics.add(record2); + + spyService.putMetrics(metrics); + + ArgumentCaptor requestCaptorUrl = ArgumentCaptor.forClass(String.class); + ArgumentCaptor requestCaptorBody = ArgumentCaptor.forClass(String.class); + + verifyStatic(ElasticSearchUtils.class, times(1)); + ElasticSearchUtils.performESRequest(eq(restClient), requestCaptorUrl.capture(), requestCaptorBody.capture()); + + String expectedURL = "_bulk"; + assertEquals(expectedURL, requestCaptorUrl.getValue()); + + String[] lines = requestCaptorBody.getValue().split("\\r?\\n"); + + String expectedIndexName = getExpectedIndexName(); + JsonNode root = mapper.readTree(lines[0]); + String actualIndexName = root.get("index").get("_index").asText(); + assertEquals(expectedIndexName, actualIndexName); + + String expectedSerializedMetric1 = + "{\"metric\":\"metric\",\"topic\":\"topicV\",\"value\":\"0.0\",\"ts\":\"" + currentTime + "\",\"tags\":\"{\\\"service\\\":\\\"scope\\\"}\"}"; + String expectedSerializedMetric2 = + "{\"metric\":\"metric2\",\"topic\":\"topicV\",\"value\":\"1.0\",\"ts\":\"" + currentTime + "\",\"tags\":\"{\\\"groupId\\\":\\\"groupId\\\",\\\"service\\\":\\\"scope2\\\"}\"}"; + List expectedOutput = new ArrayList<>(Arrays.asList(expectedSerializedMetric1, expectedSerializedMetric2)); + Collections.sort(expectedOutput); + List actualOutput = new ArrayList<>(Arrays.asList(lines[1], lines[3])); + Collections.sort(actualOutput); + assertEquals(Arrays.asList(expectedOutput), Arrays.asList(actualOutput)); + } + + @Test + public void testGetMetrics() throws IOException { + MetricQuery metricQuery = new MetricQuery("scope1", "metric1", null, 1557809359L, 1557809599L); + metricQuery.setTag("topic", "topic"); + metricQuery.setDownsampler(MetricQuery.Aggregator.MAX); + metricQuery.setDownsamplingPeriod(5 * 60 * 1000L); + List queries = new ArrayList<>(); + queries.add(metricQuery); + + ElasticSearchConsumerOffsetMetricsService spyService = _initializeSpyService(esConsumerOffsetMetricsService, getReply, false); + + Map> metricsResult = spyService.getMetrics(queries); + String expectedMetric = + "[namespace=>null, scope=>ajna.consumer, metric=>metric.consumer.lag, tags=>{topic=mytopic1.name}, datapoints=>{1560902400000=21455.0, 1561075200000=21981.0, 1561248000000=22461.0}, " + + "namespace=>null, scope=>ajna.consumer, metric=>metric.consumer.lag, tags=>{topic=mytopic2.name}, datapoints=>{1560902400000=19676.0, 1561075200000=21635.0, 1561248000000=22659.0}]"; + + assertEquals(expectedMetric, metricsResult.get(metricQuery).toString()); + ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(Request.class); + verify(restClient, times(1)).performRequest(requestCaptor.capture()); + Request capturedRequest = requestCaptor.getValue(); + + String queryJson = convertToPrettyJson(EntityUtils.toString(capturedRequest.getEntity())); + assertEquals(queryMustTermRange, queryJson); + String expectedURL = "/argus-akc-consumer-offset-*/_search"; + assertEquals(expectedURL, capturedRequest.getEndpoint()); + } + + @Test (expected = RuntimeException.class) + public void testGetMetricsExceedingLimit(){ + MetricQuery metricQuery = new MetricQuery("scope1", "metric1", null, 1557809359L, 1557809599L); + metricQuery.setTag("topic", "topic"); + metricQuery.setDownsampler(MetricQuery.Aggregator.MAX); + metricQuery.setDownsamplingPeriod(5 * 60 * 1000L); + List queries = new ArrayList<>(); + queries.add(metricQuery); + + ElasticSearchConsumerOffsetMetricsService spyService = null; + try { + spyService = _initializeSpyService(esConsumerOffsetMetricsService, getReply, false); + } catch (IOException e) { + fail(); + } + Whitebox.setInternalState(spyService, "INDEX_MAX_RESULT_WINDOW", 1); + spyService.getMetrics(queries); + } + + private ElasticSearchConsumerOffsetMetricsService _initializeSpyService(ElasticSearchConsumerOffsetMetricsService service, + String reply, boolean isPut) throws IOException { + + restClient = mock(RestClient.class); + service.setESRestClient(restClient); + mockStatic(ElasticSearchUtils.class); + if (isPut) { + when(ElasticSearchUtils.performESRequest(eq(restClient), any(), any())).thenReturn(mapper.readValue(reply, ElasticSearchUtils.PutResponse.class)); + } else { + + when(ElasticSearchUtils.extractResponse(any())).thenReturn(reply); + mapper = ElasticSearchConsumerOffsetMetricsService.getMetricObjectMapper(new ConsumerOffsetRecordList.IndexSerializer(), new ConsumerOffsetRecordList.Deserializer()); + when(ElasticSearchUtils.toEntity(any(), any(),any())).thenCallRealMethod(); + } + + when(ElasticSearchUtils.convertTimestampToMillis(any())).thenCallRealMethod(); + ElasticSearchConsumerOffsetMetricsService spyService = spy(service); + + return spyService; + } +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/MetricServiceIT.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/MetricServiceIT.java index 42900b874..4c5966ebd 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/MetricServiceIT.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/MetricServiceIT.java @@ -28,10 +28,10 @@ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ - + package com.salesforce.dva.argus.service.metric; -import com.salesforce.dva.argus.AbstractTest; +import com.salesforce.dva.argus.AbstractTestIT; import com.salesforce.dva.argus.IntegrationTest; import com.salesforce.dva.argus.entity.Metric; import com.salesforce.dva.argus.service.MetricService; @@ -49,7 +49,7 @@ import static org.junit.Assert.*; @Category(IntegrationTest.class) -public class MetricServiceIT extends AbstractTest { +public class MetricServiceIT extends AbstractTestIT { @Test public void testGetMetricsRelativeTo() throws InterruptedException { @@ -72,11 +72,11 @@ public void testGetMetricsRelativeTo() throws InterruptedException { tsdbService.putMetrics(Arrays.asList(new Metric[] { m })); Thread.sleep(5 * 1000); - List metrics = metricService.getMetrics("-10000s" + MessageFormat.format(":{0}:{1}:avg", m.getScope(), m.getMetric()), currentTime); + List metrics = metricService.getMetrics("-10000s" + MessageFormat.format(":{0}:{1}:avg", m.getScope(), m.getMetric()), currentTime).getMetricsList(); assertTrue(metrics.size() == 0 || metrics.get(0).getDatapoints().size() == 0); - + metrics = metricService.getMetrics("-10000s" + MessageFormat.format(":{0}:{1}:avg", m.getScope(), m.getMetric()), - (currentTime - 10000000)); + (currentTime - 10000000)).getMetricsList(); assertTrue(_datapointsBetween(metrics.get(0).getDatapoints(), currentTime - 20000000, System.currentTimeMillis() - 10000000)); } finally { metricService.dispose(); diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/AboveTransformTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/AboveTransformTest.java index 14893c4ab..a72f59071 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/AboveTransformTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/AboveTransformTest.java @@ -121,6 +121,28 @@ public void testAboveTransformWithoutMetrics() { aboveTransform.transform(null, metrics, constants); } + @Test + public void testAboveTransformWithZeroDataPointsForMetric() { + Transform belowTransform = new MetricFilterWithInteralReducerTransform(new AboveValueFilter()); + Map datapoints = new HashMap(); + + Metric metric_1 = new Metric(TEST_SCOPE, TEST_METRIC); + + metric_1.setDatapoints(datapoints); + List metrics = new ArrayList(); + + metrics.add(metric_1); + + List constants = new ArrayList(); + + constants.add("2"); + constants.add("average"); + + List result = belowTransform.transform(null, metrics, constants); + + assertEquals(0, result.size()); + } + @Test public void testAboveTransformWithLimitEqualMidDPsEvalDefault() { Transform aboveTransform = new MetricFilterWithInteralReducerTransform(new AboveValueFilter()); diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/AliasByTagTransformTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/AliasByTagTransformTest.java index 6c3e4a9cd..60f4c9541 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/AliasByTagTransformTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/AliasByTagTransformTest.java @@ -74,5 +74,26 @@ public void testAliasByTagTransformEmptyTagKey() { Metric actual = aliasByTagTransform.transform(null, Arrays.asList(metric), Arrays.asList("")).get(0); assertEquals(null, actual.getDisplayName()); } - + + @Test + public void testAliasByTagScopeNMetricWithOneTag() { + Metric metric = new Metric("scope", "metric"); + metric.setTag("device", "someDevice"); + metric.setTag("target", "someTarget"); + Metric actual = aliasByTagTransform.transform(null, Arrays.asList(metric), Arrays.asList("device")).get(0); + + assertEquals(AliasByTagTransform.DEFAULT_SCOPE_NAME, actual.getScope()); + assertEquals("someDevice", actual.getMetric()); + } + + @Test + public void testAliasByTagScopeNMetricWithTwoTags() { + Metric metric = new Metric("scope", "metric"); + metric.setTag("device", "someDevice"); + metric.setTag("target", "someTarget"); + Metric actual = aliasByTagTransform.transform(null, Arrays.asList(metric), Arrays.asList("device","target")).get(0); + + assertEquals(AliasByTagTransform.DEFAULT_SCOPE_NAME, actual.getScope()); + assertEquals("someDevice,someTarget", actual.getMetric()); + } } diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/AliasTransformTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/AliasTransformTest.java index 24569b6b0..7af9a4456 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/AliasTransformTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/AliasTransformTest.java @@ -185,5 +185,42 @@ public void testAliasTransformLiteralScope() { assertEquals("s", result.get(1).getScope()); assertEquals("s", result.get(2).getScope()); } + + @Test + public void testDisplayName() { + Transform aliasTransform = new AliasTransform(); + Metric metric_1 = new Metric("test_scope1", "test_metric1"); + Metric metric_2 = new Metric("test_scope2", "test_metric2"); + metric_2.setTag("test_key", "test_value"); + List metrics = new ArrayList(); + + metrics.add(metric_1); + metrics.add(metric_2); + + List constants = new ArrayList(); + constants.add("new_metric"); + constants.add(TEST_TYPE_LITERAL); + List result = aliasTransform.transform(null, metrics, constants); + + assertEquals(result.size(), 2); + assertEquals("test_scope1:new_metric", result.get(0).getDisplayName()); + assertEquals("test_scope2:new_metric{test_key=test_value}", result.get(1).getDisplayName()); + } + @Test + public void testDisplayNameAfterAliasByTag() { + Transform aliasTransform = new AliasTransform(); + Metric metric_1 = new Metric(AliasByTagTransform.DEFAULT_SCOPE_NAME, "test_metric1"); + Metric metric_2 = new Metric(AliasByTagTransform.DEFAULT_SCOPE_NAME, "test_metric2"); + metric_2.setTag("test_key", "test_value"); + List metrics = new ArrayList(); + metrics.add(metric_1); + metrics.add(metric_2); + List constants = new ArrayList(); + constants.add("new_metric"); + constants.add(TEST_TYPE_LITERAL); + List result = aliasTransform.transform(null, metrics, constants); + assertEquals("new_metric", result.get(0).getDisplayName()); + assertEquals("new_metric{test_key=test_value}", result.get(1).getDisplayName()); + } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/BelowTransformTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/BelowTransformTest.java index d52a285cf..76f75ab67 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/BelowTransformTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/BelowTransformTest.java @@ -407,6 +407,28 @@ public void testBelowTransformWithLimitEqualToMidDPsEvalRecent() { assertEquals(expected_1, result.get(0).getDatapoints()); } + @Test + public void testBelowTransformWithZeroDataPointsForMetric() { + Transform belowTransform = new MetricFilterWithInteralReducerTransform(new BelowValueFilter()); + Map datapoints = new HashMap(); + + Metric metric_1 = new Metric(TEST_SCOPE, TEST_METRIC); + + metric_1.setDatapoints(datapoints); + List metrics = new ArrayList(); + + metrics.add(metric_1); + + List constants = new ArrayList(); + + constants.add("2"); + constants.add("average"); + + List result = belowTransform.transform(null, metrics, constants); + + assertEquals(0, result.size()); + } + @Test public void testBelowTransformWithLimitEqualMidDPsEvalDefaultHavingNull() { Transform belowTransform = new MetricFilterWithInteralReducerTransform(new BelowValueFilter()); diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/Diff_VTransformTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/Diff_VTransformTest.java index 10c2e7833..a6a233770 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/Diff_VTransformTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/Diff_VTransformTest.java @@ -54,14 +54,15 @@ public void testDiff_VTransformWithoutMetrics() { diff_vTransform.transform(null, metrics); } - @Test(expected = IllegalArgumentException.class) + @Test public void testDiff_VTransformWithOnlyOneMetric() { Transform diff_vTransform = new MetricZipperTransform(new DiffValueZipper()); List metrics = new ArrayList(); Metric metric = new Metric(TEST_SCOPE, TEST_METRIC); metrics.add(metric); - diff_vTransform.transform(null, metrics); + List result = diff_vTransform.transform(null, metrics); + assertEquals(result.size(), 0); } @Test(expected = IllegalArgumentException.class) @@ -69,7 +70,6 @@ public void testDiff_VTransformWithConstants() { Transform diff_vTransform = new MetricZipperTransform(new DiffValueZipper()); List metrics = new ArrayList(); Metric metric = new Metric(TEST_SCOPE, TEST_METRIC); - metrics.add(metric); List constants = new ArrayList(); @@ -77,7 +77,6 @@ public void testDiff_VTransformWithConstants() { diff_vTransform.transform(null, metrics, constants); } - @Test(expected = IllegalArgumentException.class) public void testDiff_VTransformVectorWithoutPoints() { Transform diff_vTransform = new MetricZipperTransform(new DiffValueZipper()); Map datapoints = new HashMap(); @@ -93,7 +92,9 @@ public void testDiff_VTransformVectorWithoutPoints() { metrics.add(metric); metrics.add(vector); - diff_vTransform.transform(null, metrics); + List result = diff_vTransform.transform(null, metrics); + + assertEquals(result.get(0).getDatapoints().size(), 0); } @Test diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/DivideTransformTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/DivideTransformTest.java index 2fcdaed1a..f8af69293 100755 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/DivideTransformTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/DivideTransformTest.java @@ -63,6 +63,7 @@ public void testDivideTransformWithIllegalConstant() { List constants = new ArrayList(1); + constants.add("5w"); divideTransform.transform(null, metrics, constants); } @@ -198,6 +199,120 @@ public void testDivideTransformWithTwoConstants() { divideTransform.transform(null, metrics, constants); } + @Test(expected = IllegalArgumentException.class) + public void testDivideTransformWithWrongDefaultConstant() { + Map datapoints_1 = new HashMap(); + + datapoints_1.put(1000L, 10.0); + datapoints_1.put(2000L, 20.0); + datapoints_1.put(3000L, 30.0); + + Metric metric_1 = new Metric(TEST_SCOPE, TEST_METRIC); + + metric_1.setDatapoints(datapoints_1); + + Map datapoints_2 = new HashMap(); + + datapoints_2.put(1000L, 10.0); + datapoints_2.put(2000L, 100.0); + datapoints_2.put(3000L, 1000.0); + + Metric metric_2 = new Metric(TEST_SCOPE, TEST_METRIC); + + metric_2.setDatapoints(datapoints_2); + + List metrics = new ArrayList(); + + metrics.add(metric_1); + metrics.add(metric_2); + + List constants = new ArrayList(1); + + constants.add("union"); + constants.add("abc"); + Transform divideTransform = new MetricReducerOrMappingTransform(new DivideValueReducerOrMapping()); + divideTransform.transform(null, metrics, constants); + } + + @Test + public void testDivideTransformWithRightDefaultConstant() { + Transform divideTransform = new MetricReducerOrMappingTransform(new DivideValueReducerOrMapping()); + Map datapoints_1 = new HashMap(); + + datapoints_1.put(1000L, 1.0); + datapoints_1.put(2000L, 2.0); + datapoints_1.put(3000L, 3.0); + + Metric metric_1 = new Metric(TEST_SCOPE, TEST_METRIC); + + metric_1.setDatapoints(datapoints_1); + + Map datapoints_2 = new HashMap(); + + datapoints_2.put(100L, 10.0); + datapoints_2.put(2000L, 100.0); + datapoints_2.put(300L, 1000.0); + + Metric metric_2 = new Metric(TEST_SCOPE, TEST_METRIC); + + metric_2.setDatapoints(datapoints_2); + + List metrics = new ArrayList(); + + metrics.add(metric_1); + metrics.add(metric_2); + + List constants = new ArrayList(); + constants.add("union"); + constants.add("2"); + Map expected = new HashMap(); + expected.put(100L, 2.0); + expected.put(1000L, 2.0); + expected.put(2000L, 0.02); + expected.put(300L, 2.0); + expected.put(3000L, 2.0); + List result = divideTransform.transform(null, metrics, constants); + assertEquals(result.get(0).getDatapoints().size(), expected.size()); + assertEquals(expected, result.get(0).getDatapoints()); + } + + @Test + public void testDivideTransformWithNoDataForOneTimeSeries() { + Transform divideTransform = new MetricReducerOrMappingTransform(new DivideValueReducerOrMapping()); + Map datapoints_1 = new HashMap(); + + Metric metric_1 = new Metric(TEST_SCOPE, TEST_METRIC); + + metric_1.setDatapoints(datapoints_1); + + Map datapoints_2 = new HashMap(); + + datapoints_2.put(100L, 10.0); + datapoints_2.put(200L, 100.0); + datapoints_2.put(300L, 1000.0); + + Metric metric_2 = new Metric(TEST_SCOPE, TEST_METRIC); + + metric_2.setDatapoints(datapoints_2); + + List metrics = new ArrayList(); + + metrics.add(metric_1); + metrics.add(metric_2); + + List constants = new ArrayList(); + constants.add("union"); + constants.add("2"); + Map expected = new HashMap(); + expected.put(100L, 2.0); + expected.put(200L, 2.0); + expected.put(300L, 2.0); + List result = divideTransform.transform(null, metrics, constants); + assertEquals(result.get(0).getDatapoints().size(), expected.size()); + assertEquals(expected, result.get(0).getDatapoints()); + } + + @Test public void testDivideTransformWithNoConstantShareCommonDPsDividedByZero() { Transform divideTransform = new MetricReducerOrMappingTransform(new DivideValueReducerOrMapping()); Map datapoints_1 = new HashMap(); @@ -394,12 +509,12 @@ public void testDivideTransformWithFullJoinConstantShareSomeCommonDPs() { assertEquals(result.get(0).getDatapoints().size(), expected.size()); assertEquals(expected, result.get(0).getDatapoints()); } - + @Test(expected = IllegalArgumentException.class) public void testDivideTransformWithOneTimeseries() { - Transform divideTransform = new MetricReducerOrMappingTransform(new DivideValueReducerOrMapping()); - - Map datapoints_1 = new HashMap(); + Transform divideTransform = new MetricReducerOrMappingTransform(new DivideValueReducerOrMapping()); + + Map datapoints_1 = new HashMap(); datapoints_1.put(1000L, 1.0); datapoints_1.put(2000L, 2.0); datapoints_1.put(3000L, 3.0); @@ -409,12 +524,58 @@ public void testDivideTransformWithOneTimeseries() { List metrics = new ArrayList(); metrics.add(metric_1); - + divideTransform.transform(null, metrics); } - + @Test - public void testDivideTransformWithOneTimeseriesWithConstant() { + public void testDivideTransformWithOneTimeseriesAndDefaultValueConstants() { + Transform divideTransform = new MetricReducerOrMappingTransform(new DivideValueReducerOrMapping()); + + Map datapoints_1 = new HashMap(); + datapoints_1.put(1000L, 1.0); + datapoints_1.put(2000L, 2.0); + datapoints_1.put(3000L, 3.0); + + Metric metric_1 = new Metric(TEST_SCOPE, TEST_METRIC); + metric_1.setDatapoints(datapoints_1); + List constants = new ArrayList(); + constants.add("union"); + constants.add("12"); + + List metrics = new ArrayList(); + metrics.add(metric_1); + Map expected = new HashMap(); + expected.put(1000L, 12.0); + expected.put(2000L, 12.0); + expected.put(3000L, 12.0); + + List result = divideTransform.transform(null, metrics, constants); + assertEquals(result.get(0).getDatapoints().size(), expected.size()); + assertEquals(expected, result.get(0).getDatapoints()); + } + + @Test(expected = IllegalArgumentException.class) + public void testDivideTransformWithOneTimeseriesAndOnlyUnionConstant() { + Transform divideTransform = new MetricReducerOrMappingTransform(new DivideValueReducerOrMapping()); + + Map datapoints_1 = new HashMap(); + datapoints_1.put(1000L, 1.0); + datapoints_1.put(2000L, 2.0); + datapoints_1.put(3000L, 3.0); + + Metric metric_1 = new Metric(TEST_SCOPE, TEST_METRIC); + metric_1.setDatapoints(datapoints_1); + List constants = new ArrayList(); + constants.add("union"); + + List metrics = new ArrayList(); + metrics.add(metric_1); + List result = divideTransform.transform(null, metrics, constants); + } + + @Test + public void testDivideTransformWithOneTimeseriesWithDivisorConstant() { Transform divideTransform = new MetricReducerOrMappingTransform(new DivideValueReducerOrMapping()); Map datapoints_1 = new HashMap(); @@ -427,15 +588,15 @@ public void testDivideTransformWithOneTimeseriesWithConstant() { List constants = new ArrayList(); constants.add("2"); - + List metrics = new ArrayList(); metrics.add(metric_1); - + Map expected = new HashMap(); expected.put(1000L, 5.0); expected.put(2000L, 10.0); expected.put(3000L, 15.0); - + List result = divideTransform.transform(null, metrics, constants); assertEquals(result.get(0).getDatapoints().size(), expected.size()); assertEquals(expected, result.get(0).getDatapoints()); diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/Divide_VTransformTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/Divide_VTransformTest.java index ad188aa06..eac7fd28a 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/Divide_VTransformTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/Divide_VTransformTest.java @@ -55,14 +55,15 @@ public void testDivide_VTransformWithoutMetrics() { divide_vTransform.transform(null, metrics); } - @Test(expected = IllegalArgumentException.class) + @Test public void testDivide_VTransformWithOnlyOneMetric() { Transform divide_vTransform = new MetricZipperTransform(new DivideValueZipper()); List metrics = new ArrayList(); Metric metric = new Metric(TEST_SCOPE, TEST_METRIC); metrics.add(metric); - divide_vTransform.transform(null, metrics); + List result = divide_vTransform.transform(null, metrics); + assertEquals(result.size(), 0); } @Test(expected = IllegalArgumentException.class) @@ -75,10 +76,10 @@ public void testDivide_VTransformWithConstants() { List constants = new ArrayList(); - divide_vTransform.transform(null, metrics, constants); + List result = divide_vTransform.transform(null, metrics, constants); + assertEquals(result.get(0).getDatapoints().size(), 0); } - @Test(expected = IllegalArgumentException.class) public void testDivide_VTransformVectorWithoutPoints() { Transform divide_vTransform = new MetricZipperTransform(new DivideValueZipper()); Map datapoints = new HashMap(); @@ -97,7 +98,6 @@ public void testDivide_VTransformVectorWithoutPoints() { divide_vTransform.transform(null, metrics); } - @Test(expected = SystemException.class) public void testDivide_VTransformVectorWithZeroAsDividend() { Transform divide_vTransform = new MetricZipperTransform(new DivideValueZipper()); Map datapoints = new HashMap(); @@ -120,7 +120,11 @@ public void testDivide_VTransformVectorWithZeroAsDividend() { metrics.add(metric); metrics.add(vector); - divide_vTransform.transform(null, metrics); + List result = divide_vTransform.transform(null, metrics); + + Map expected = new HashMap(); + assertEquals(result.get(0).getDatapoints().size(), 0); + assertEquals(expected, result.get(0).getDatapoints()); } @Test @@ -602,4 +606,4 @@ public void testDivide_VTransformWithSameShorterLongerVectorAgainstMetricList_fu assertEquals(expected_3, result.get(2).getDatapoints()); } } -/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ +/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ \ No newline at end of file diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/DownsampleTransformTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/DownsampleTransformTest.java index e85018380..301af5a0d 100755 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/DownsampleTransformTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/DownsampleTransformTest.java @@ -49,1042 +49,1075 @@ public class DownsampleTransformTest { - private static final String TEST_SCOPE = "test-scope"; - private static final String TEST_METRIC = "test-metric"; + private static final String TEST_SCOPE = "test-scope"; + private static final String TEST_METRIC = "test-metric"; - @Test(expected = IllegalArgumentException.class) - public void testDownsampleTransformWithIllegalUnit() { - Transform downsampleTransform = new DownsampleTransform(); - Map datapoints = new HashMap(); + @Test(expected = IllegalArgumentException.class) + public void testDownsampleTransformWithIllegalUnit() { + Transform downsampleTransform = new DownsampleTransform(); + Map datapoints = new HashMap(); - datapoints.put(1000L, 1.0); + datapoints.put(1000L, 1.0); - Metric metric = new Metric(TEST_SCOPE, TEST_METRIC); + Metric metric = new Metric(TEST_SCOPE, TEST_METRIC); - metric.setDatapoints(datapoints); + metric.setDatapoints(datapoints); - List metrics = new ArrayList(); + List metrics = new ArrayList(); - metrics.add(metric); + metrics.add(metric); - List constants = new ArrayList(); + List constants = new ArrayList(); - constants.add("2k-avg"); - downsampleTransform.transform(null, metrics, constants); - } + constants.add("2k-avg"); + downsampleTransform.transform(null, metrics, constants); + } - @Test(expected = IllegalArgumentException.class) - public void testDownsampelTransformWithIllegalType() { - Transform downsampleTransform = new DownsampleTransform(); - Map datapoints = new HashMap(); + @Test(expected = IllegalArgumentException.class) + public void testDownsampelTransformWithIllegalType() { + Transform downsampleTransform = new DownsampleTransform(); + Map datapoints = new HashMap(); - datapoints.put(1000L, 1.0); + datapoints.put(1000L, 1.0); - Metric metric = new Metric(TEST_SCOPE, TEST_METRIC); + Metric metric = new Metric(TEST_SCOPE, TEST_METRIC); - metric.setDatapoints(datapoints); + metric.setDatapoints(datapoints); - List metrics = new ArrayList(); + List metrics = new ArrayList(); - metrics.add(metric); + metrics.add(metric); - List constants = new ArrayList(); + List constants = new ArrayList(); - constants.add("2s-foobar"); - downsampleTransform.transform(null, metrics, constants); - } + constants.add("2s-foobar"); + downsampleTransform.transform(null, metrics, constants); + } - @Test(expected = IllegalArgumentException.class) - public void testDownsampleTransformWithoutUnit() { - Transform downsampleTransform = new DownsampleTransform(); - Map datapoints = new HashMap(); + @Test(expected = IllegalArgumentException.class) + public void testDownsampleTransformWithoutUnit() { + Transform downsampleTransform = new DownsampleTransform(); + Map datapoints = new HashMap(); - datapoints.put(1000L, 1.0); + datapoints.put(1000L, 1.0); - Metric metric = new Metric(TEST_SCOPE, TEST_METRIC); + Metric metric = new Metric(TEST_SCOPE, TEST_METRIC); - metric.setDatapoints(datapoints); + metric.setDatapoints(datapoints); - List metrics = new ArrayList(); + List metrics = new ArrayList(); - metrics.add(metric); + metrics.add(metric); - List constants = new ArrayList(); + List constants = new ArrayList(); - constants.add("-min"); - downsampleTransform.transform(null, metrics, constants); - } + constants.add("-min"); + downsampleTransform.transform(null, metrics, constants); + } - @Test(expected = IllegalArgumentException.class) - public void testDownsampleTransformWithoutType() { - Transform downsampleTransform = new DownsampleTransform(); - Map datapoints = new HashMap(); + @Test(expected = IllegalArgumentException.class) + public void testDownsampleTransformWithoutType() { + Transform downsampleTransform = new DownsampleTransform(); + Map datapoints = new HashMap(); - datapoints.put(1000L, 1.0); + datapoints.put(1000L, 1.0); - Metric metric = new Metric(TEST_SCOPE, TEST_METRIC); + Metric metric = new Metric(TEST_SCOPE, TEST_METRIC); - metric.setDatapoints(datapoints); + metric.setDatapoints(datapoints); - List metrics = new ArrayList(); + List metrics = new ArrayList(); - metrics.add(metric); + metrics.add(metric); - List constants = new ArrayList(); + List constants = new ArrayList(); - constants.add("6s-"); - downsampleTransform.transform(null, metrics, constants); - } + constants.add("6s-"); + downsampleTransform.transform(null, metrics, constants); + } - @Test(expected = IllegalArgumentException.class) - public void testDownsampleTransformWithIllegalExpFormat() { - Transform downsampleTransform = new DownsampleTransform(); - Map datapoints = new HashMap(); + @Test(expected = IllegalArgumentException.class) + public void testDownsampleTransformWithIllegalExpFormat() { + Transform downsampleTransform = new DownsampleTransform(); + Map datapoints = new HashMap(); - datapoints.put(1000L, 1.0); + datapoints.put(1000L, 1.0); - Metric metric = new Metric(TEST_SCOPE, TEST_METRIC); + Metric metric = new Metric(TEST_SCOPE, TEST_METRIC); - metric.setDatapoints(datapoints); + metric.setDatapoints(datapoints); - List metrics = new ArrayList(); + List metrics = new ArrayList(); - metrics.add(metric); + metrics.add(metric); - List constants = new ArrayList(); + List constants = new ArrayList(); - constants.add("***test"); - downsampleTransform.transform(null, metrics, constants); - } + constants.add("***test"); + downsampleTransform.transform(null, metrics, constants); + } - @Test(expected = IllegalArgumentException.class) - public void testDownsampleTransformWithoutMetrics() { - Transform downsampleTransform = new DownsampleTransform(); - List metrics = null; - List constants = new ArrayList(); + @Test(expected = IllegalArgumentException.class) + public void testDownsampleTransformWithoutMetrics() { + Transform downsampleTransform = new DownsampleTransform(); + List metrics = null; + List constants = new ArrayList(); - constants.add("2"); - constants.add("average"); - downsampleTransform.transform(null, metrics, constants); - } + constants.add("2"); + constants.add("average"); + downsampleTransform.transform(null, metrics, constants); + } - @Test - public void testDownsampleTransformAvgOneMetric() { - Transform downsampleTransform = new DownsampleTransform(); - Map datapoints_1 = new HashMap(); + @Test + public void testDownsampleTransformAvgOneMetric() { + Transform downsampleTransform = new DownsampleTransform(); + Map datapoints_1 = new HashMap(); - datapoints_1.put(0L, 1.0); - datapoints_1.put(1000L, 1.0); - datapoints_1.put(2000L, 2.0); - datapoints_1.put(3000L, 3.0); - datapoints_1.put(4000L, 4.0); - datapoints_1.put(5000L, 5.0); - datapoints_1.put(6000L, 6.0); - datapoints_1.put(7000L, 7.0); - datapoints_1.put(8000L, 8.0); - datapoints_1.put(9000L, 9.0); + datapoints_1.put(0L, 1.0); + datapoints_1.put(1000L, 1.0); + datapoints_1.put(2000L, 2.0); + datapoints_1.put(3000L, 3.0); + datapoints_1.put(4000L, 4.0); + datapoints_1.put(5000L, 5.0); + datapoints_1.put(6000L, 6.0); + datapoints_1.put(7000L, 7.0); + datapoints_1.put(8000L, 8.0); + datapoints_1.put(9000L, 9.0); - Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); + Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); - metric_1.setDatapoints(datapoints_1); + metric_1.setDatapoints(datapoints_1); - List metrics = new ArrayList(); + List metrics = new ArrayList(); - metrics.add(metric_1); + metrics.add(metric_1); - List constants = new ArrayList(); + List constants = new ArrayList(); - constants.add("2s-avg"); + constants.add("2s-avg"); - Map expected_1 = new HashMap(); + Map expected_1 = new HashMap(); - expected_1.put(0L, 1.0); - expected_1.put(2000L, 2.5); - expected_1.put(4000L, 4.5); - expected_1.put(6000L, 6.5); - expected_1.put(8000L, 8.5); + expected_1.put(0L, 1.0); + expected_1.put(2000L, 2.5); + expected_1.put(4000L, 4.5); + expected_1.put(6000L, 6.5); + expected_1.put(8000L, 8.5); - List result = downsampleTransform.transform(null, metrics, constants); + List result = downsampleTransform.transform(null, metrics, constants); - assertEquals(result.size(), 1); - assertEquals(expected_1, result.get(0).getDatapoints()); - } + assertEquals(result.size(), 1); + assertEquals(expected_1, result.get(0).getDatapoints()); + } - @Test - public void testDownsampleTransformMinOneMetric() { - Transform downsampleTransform = new DownsampleTransform(); - Map datapoints_1 = new HashMap(); + @Test + public void testDownsampleTransformMinOneMetric() { + Transform downsampleTransform = new DownsampleTransform(); + Map datapoints_1 = new HashMap(); - datapoints_1.put(1L, 1.0); - datapoints_1.put(1000L, 1.0); - datapoints_1.put(2000L, 2.0); - datapoints_1.put(3000L, 3.0); - datapoints_1.put(4000L, 4.0); - datapoints_1.put(5000L, 5.0); - datapoints_1.put(6000L, 6.0); - datapoints_1.put(7000L, 7.0); - datapoints_1.put(8000L, 8.0); - datapoints_1.put(9000L, 9.0); + datapoints_1.put(1L, 1.0); + datapoints_1.put(1000L, 1.0); + datapoints_1.put(2000L, 2.0); + datapoints_1.put(3000L, 3.0); + datapoints_1.put(4000L, 4.0); + datapoints_1.put(5000L, 5.0); + datapoints_1.put(6000L, 6.0); + datapoints_1.put(7000L, 7.0); + datapoints_1.put(8000L, 8.0); + datapoints_1.put(9000L, 9.0); - Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); + Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); - metric_1.setDatapoints(datapoints_1); + metric_1.setDatapoints(datapoints_1); - List metrics = new ArrayList(); + List metrics = new ArrayList(); - metrics.add(metric_1); + metrics.add(metric_1); - List constants = new ArrayList(); + List constants = new ArrayList(); - constants.add("2s-min"); + constants.add("2s-min"); - Map expected_1 = new HashMap(); + Map expected_1 = new HashMap(); - expected_1.put(0L, 1.0); - expected_1.put(2000L, 2.0); - expected_1.put(4000L, 4.0); - expected_1.put(6000L, 6.0); - expected_1.put(8000L, 8.0); + expected_1.put(0L, 1.0); + expected_1.put(2000L, 2.0); + expected_1.put(4000L, 4.0); + expected_1.put(6000L, 6.0); + expected_1.put(8000L, 8.0); - List result = downsampleTransform.transform(null, metrics, constants); + List result = downsampleTransform.transform(null, metrics, constants); - assertEquals(result.size(), 1); - assertEquals(expected_1, result.get(0).getDatapoints()); - } + assertEquals(result.size(), 1); + assertEquals(expected_1, result.get(0).getDatapoints()); + } - @Test - public void testDownsampleTransformMaxOneMetric() { - Transform downsampleTransform = new DownsampleTransform(); - Map datapoints_1 = new HashMap(); + @Test + public void testDownsampleTransformMaxOneMetric() { + Transform downsampleTransform = new DownsampleTransform(); + Map datapoints_1 = new HashMap(); - datapoints_1.put(1L, 1.0); - datapoints_1.put(1000L, 1.0); - datapoints_1.put(2000L, 2.0); - datapoints_1.put(3000L, 3.0); - datapoints_1.put(4000L, 4.0); - datapoints_1.put(5000L, 5.0); - datapoints_1.put(6000L, 6.0); - datapoints_1.put(7000L, 7.0); - datapoints_1.put(8000L, 8.0); - datapoints_1.put(9000L, 9.0); + datapoints_1.put(1L, 1.0); + datapoints_1.put(1000L, 1.0); + datapoints_1.put(2000L, 2.0); + datapoints_1.put(3000L, 3.0); + datapoints_1.put(4000L, 4.0); + datapoints_1.put(5000L, 5.0); + datapoints_1.put(6000L, 6.0); + datapoints_1.put(7000L, 7.0); + datapoints_1.put(8000L, 8.0); + datapoints_1.put(9000L, 9.0); - Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); + Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); - metric_1.setDatapoints(datapoints_1); + metric_1.setDatapoints(datapoints_1); - List metrics = new ArrayList(); + List metrics = new ArrayList(); - metrics.add(metric_1); + metrics.add(metric_1); - List constants = new ArrayList(); + List constants = new ArrayList(); - constants.add("2s-max"); + constants.add("2s-max"); - Map expected_1 = new HashMap(); + Map expected_1 = new HashMap(); - expected_1.put(0L, 1.0); - expected_1.put(2000L, 3.0); - expected_1.put(4000L, 5.0); - expected_1.put(6000L, 7.0); - expected_1.put(8000L, 9.0); + expected_1.put(0L, 1.0); + expected_1.put(2000L, 3.0); + expected_1.put(4000L, 5.0); + expected_1.put(6000L, 7.0); + expected_1.put(8000L, 9.0); - List result = downsampleTransform.transform(null, metrics, constants); + List result = downsampleTransform.transform(null, metrics, constants); - assertEquals(result.size(), 1); - assertEquals(expected_1, result.get(0).getDatapoints()); - } + assertEquals(result.size(), 1); + assertEquals(expected_1, result.get(0).getDatapoints()); + } - @Test - public void testDownsampleTransformShouldReturnSameMetric() { - Transform downsampleTransform = new DownsampleTransform(); - Map datapoints_1 = new HashMap(); + @Test + public void testDownsampleTransformShouldReturnSameMetric() { + Transform downsampleTransform = new DownsampleTransform(); + Map datapoints_1 = new HashMap(); - datapoints_1.put(1000L, 1.0); - datapoints_1.put(2000L, 2.0); - datapoints_1.put(3000L, 3.0); - datapoints_1.put(4000L, 4.0); - datapoints_1.put(5000L, 5.0); + datapoints_1.put(1000L, 1.0); + datapoints_1.put(2000L, 2.0); + datapoints_1.put(3000L, 3.0); + datapoints_1.put(4000L, 4.0); + datapoints_1.put(5000L, 5.0); - Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); + Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); - metric_1.setDatapoints(datapoints_1); + metric_1.setDatapoints(datapoints_1); - List metrics = new ArrayList(); + List metrics = new ArrayList(); - metrics.add(metric_1); + metrics.add(metric_1); - List constants = new ArrayList(); + List constants = new ArrayList(); - constants.add("1s-sum"); + constants.add("1s-sum"); - Map expected_1 = new HashMap(); + Map expected_1 = new HashMap(); - expected_1.put(1000L, 1.0); - expected_1.put(2000L, 2.0); - expected_1.put(3000L, 3.0); - expected_1.put(4000L, 4.0); - expected_1.put(5000L, 5.0); + expected_1.put(1000L, 1.0); + expected_1.put(2000L, 2.0); + expected_1.put(3000L, 3.0); + expected_1.put(4000L, 4.0); + expected_1.put(5000L, 5.0); - List result = downsampleTransform.transform(null, metrics, constants); + List result = downsampleTransform.transform(null, metrics, constants); - assertEquals(result.size(), 1); - assertEquals(expected_1, result.get(0).getDatapoints()); - } + assertEquals(result.size(), 1); + assertEquals(expected_1, result.get(0).getDatapoints()); + } - @Test - public void testDownsampleTransformSumOneMetric() { - Transform downsampleTransform = new DownsampleTransform(); - Map datapoints_1 = new HashMap(); + @Test + public void testDownsampleTransformSumOneMetric() { + Transform downsampleTransform = new DownsampleTransform(); + Map datapoints_1 = new HashMap(); - datapoints_1.put(1L, 0.0); - datapoints_1.put(1000L, 1.0); - datapoints_1.put(2000L, 2.0); - datapoints_1.put(3000L, 3.0); - datapoints_1.put(4000L, 4.0); - datapoints_1.put(5000L, 5.0); - datapoints_1.put(6000L, 6.0); - datapoints_1.put(7000L, 7.0); - datapoints_1.put(8000L, 8.0); - datapoints_1.put(9000L, 9.0); + datapoints_1.put(1L, 0.0); + datapoints_1.put(1000L, 1.0); + datapoints_1.put(2000L, 2.0); + datapoints_1.put(3000L, 3.0); + datapoints_1.put(4000L, 4.0); + datapoints_1.put(5000L, 5.0); + datapoints_1.put(6000L, 6.0); + datapoints_1.put(7000L, 7.0); + datapoints_1.put(8000L, 8.0); + datapoints_1.put(9000L, 9.0); - Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); + Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); - metric_1.setDatapoints(datapoints_1); + metric_1.setDatapoints(datapoints_1); - List metrics = new ArrayList(); + List metrics = new ArrayList(); - metrics.add(metric_1); + metrics.add(metric_1); - List constants = new ArrayList(); + List constants = new ArrayList(); - constants.add("2s-sum"); + constants.add("2s-sum"); - Map expected_1 = new HashMap(); + Map expected_1 = new HashMap(); - expected_1.put(0L, 1.0); - expected_1.put(2000L, 5.0); - expected_1.put(4000L, 9.0); - expected_1.put(6000L, 13.0); - expected_1.put(8000L, 17.0); + expected_1.put(0L, 1.0); + expected_1.put(2000L, 5.0); + expected_1.put(4000L, 9.0); + expected_1.put(6000L, 13.0); + expected_1.put(8000L, 17.0); - List result = downsampleTransform.transform(null, metrics, constants); + List result = downsampleTransform.transform(null, metrics, constants); - assertEquals(result.size(), 1); - assertEquals(expected_1, result.get(0).getDatapoints()); - } + assertEquals(result.size(), 1); + assertEquals(expected_1, result.get(0).getDatapoints()); + } - @Test - public void testDownsampleTransformDevOneMetric() { - Transform downsampleTransform = new DownsampleTransform(); - Map datapoints_1 = new HashMap(); + @Test + public void testDownsampleTransformDevOneMetric() { + Transform downsampleTransform = new DownsampleTransform(); + Map datapoints_1 = new HashMap(); - datapoints_1.put(0L, 0.0); - datapoints_1.put(1000L, 1.0); - datapoints_1.put(2000L, 2.0); - datapoints_1.put(3000L, 3.0); - datapoints_1.put(4000L, 4.0); - datapoints_1.put(5000L, 5.0); - datapoints_1.put(6000L, 6.0); - datapoints_1.put(7000L, 7.0); - datapoints_1.put(8000L, 8.0); - datapoints_1.put(9000L, 9.0); - datapoints_1.put(10000L, 9.0); + datapoints_1.put(0L, 0.0); + datapoints_1.put(1000L, 1.0); + datapoints_1.put(2000L, 2.0); + datapoints_1.put(3000L, 3.0); + datapoints_1.put(4000L, 4.0); + datapoints_1.put(5000L, 5.0); + datapoints_1.put(6000L, 6.0); + datapoints_1.put(7000L, 7.0); + datapoints_1.put(8000L, 8.0); + datapoints_1.put(9000L, 9.0); + datapoints_1.put(10000L, 9.0); - Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); + Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); - metric_1.setDatapoints(datapoints_1); + metric_1.setDatapoints(datapoints_1); - List metrics = new ArrayList(); + List metrics = new ArrayList(); - metrics.add(metric_1); + metrics.add(metric_1); - List constants = new ArrayList(); + List constants = new ArrayList(); - constants.add("3s-dev"); + constants.add("3s-dev"); - Map expected_1 = new HashMap(); + Map expected_1 = new HashMap(); - expected_1.put(0L, 1.0); - expected_1.put(3000L, 1.0); - expected_1.put(6000L, 1.0); - expected_1.put(9000L, 0.0); - List result = downsampleTransform.transform(null, metrics, constants); + expected_1.put(0L, 1.0); + expected_1.put(3000L, 1.0); + expected_1.put(6000L, 1.0); + expected_1.put(9000L, 0.0); + List result = downsampleTransform.transform(null, metrics, constants); - assertEquals(result.size(), 1); - assertEquals(expected_1, result.get(0).getDatapoints()); - } + assertEquals(result.size(), 1); + assertEquals(expected_1, result.get(0).getDatapoints()); + } - @Test - public void testDownsampleTransformAvgMultipleMetrics() { - Transform downsampleTransform = new DownsampleTransform(); - Map datapoints_1 = new HashMap(); + @Test + public void testDownsampleTransformAvgMultipleMetrics() { + Transform downsampleTransform = new DownsampleTransform(); + Map datapoints_1 = new HashMap(); - datapoints_1.put(1000L, 1.0); - datapoints_1.put(2000L, 2.0); - datapoints_1.put(3000L, 3.0); - datapoints_1.put(4000L, 4.0); - datapoints_1.put(5000L, 5.0); - datapoints_1.put(6000L, 6.0); - datapoints_1.put(7000L, 7.0); - datapoints_1.put(8000L, 8.0); - datapoints_1.put(9000L, 9.0); + datapoints_1.put(1000L, 1.0); + datapoints_1.put(2000L, 2.0); + datapoints_1.put(3000L, 3.0); + datapoints_1.put(4000L, 4.0); + datapoints_1.put(5000L, 5.0); + datapoints_1.put(6000L, 6.0); + datapoints_1.put(7000L, 7.0); + datapoints_1.put(8000L, 8.0); + datapoints_1.put(9000L, 9.0); - Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); + Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); - metric_1.setDatapoints(datapoints_1); + metric_1.setDatapoints(datapoints_1); - Map datapoints_2 = new HashMap(); + Map datapoints_2 = new HashMap(); - datapoints_2.put(1L, 0.0); - datapoints_2.put(1000L, 100.0); - datapoints_2.put(2000L, 200.0); - datapoints_2.put(3000L, 300.0); - datapoints_2.put(4000L, 400.0); - datapoints_2.put(5000L, 500.0); - datapoints_2.put(6000L, 600.0); - datapoints_2.put(7000L, 700.0); - datapoints_2.put(8000L, 800.0); - datapoints_2.put(9000L, 900.0); + datapoints_2.put(1L, 0.0); + datapoints_2.put(1000L, 100.0); + datapoints_2.put(2000L, 200.0); + datapoints_2.put(3000L, 300.0); + datapoints_2.put(4000L, 400.0); + datapoints_2.put(5000L, 500.0); + datapoints_2.put(6000L, 600.0); + datapoints_2.put(7000L, 700.0); + datapoints_2.put(8000L, 800.0); + datapoints_2.put(9000L, 900.0); - Metric metric_2 = new Metric(TEST_SCOPE + "2", TEST_METRIC); + Metric metric_2 = new Metric(TEST_SCOPE + "2", TEST_METRIC); - metric_2.setDatapoints(datapoints_2); + metric_2.setDatapoints(datapoints_2); - List metrics = new ArrayList(); + List metrics = new ArrayList(); - metrics.add(metric_1); - metrics.add(metric_2); + metrics.add(metric_1); + metrics.add(metric_2); - List constants = new ArrayList(); + List constants = new ArrayList(); - constants.add("2s-avg"); + constants.add("2s-avg"); - Map expected_1 = new HashMap(); + Map expected_1 = new HashMap(); - expected_1.put(1000L, 1.5); - expected_1.put(3000L, 3.5); - expected_1.put(5000L, 5.5); - expected_1.put(7000L, 7.5); - expected_1.put(9000L, 9.0); + expected_1.put(1000L, 1.5); + expected_1.put(3000L, 3.5); + expected_1.put(5000L, 5.5); + expected_1.put(7000L, 7.5); + expected_1.put(9000L, 9.0); - Map expected_2 = new HashMap(); + Map expected_2 = new HashMap(); - expected_2.put(0L, 50.0); - expected_2.put(2000L, 250.0); - expected_2.put(4000L, 450.0); - expected_2.put(6000L, 650.0); - expected_2.put(8000L, 850.0); + expected_2.put(0L, 50.0); + expected_2.put(2000L, 250.0); + expected_2.put(4000L, 450.0); + expected_2.put(6000L, 650.0); + expected_2.put(8000L, 850.0); - List result = downsampleTransform.transform(null, metrics, constants); + List result = downsampleTransform.transform(null, metrics, constants); - assertEquals(result.size(), 2); - assertEquals(expected_1, result.get(0).getDatapoints()); - assertEquals(expected_2, result.get(1).getDatapoints()); - } + assertEquals(result.size(), 2); + assertEquals(expected_1, result.get(0).getDatapoints()); + assertEquals(expected_2, result.get(1).getDatapoints()); + } - @Test - public void testDownsampleTransformWindowGreaterThanRangeOneMetric() { - Transform downsampleTransform = new DownsampleTransform(); - Map datapoints_1 = new HashMap(); + @Test + public void testDownsampleTransformWindowGreaterThanRangeOneMetric() { + Transform downsampleTransform = new DownsampleTransform(); + Map datapoints_1 = new HashMap(); - datapoints_1.put(1000L, 1.0); - datapoints_1.put(2000L, 2.0); - datapoints_1.put(3000L, 3.0); - datapoints_1.put(4000L, 4.0); - datapoints_1.put(5000L, 5.0); - datapoints_1.put(6000L, 6.0); - datapoints_1.put(7000L, 7.0); - datapoints_1.put(8000L, 8.0); - datapoints_1.put(9000L, 9.0); + datapoints_1.put(1000L, 1.0); + datapoints_1.put(2000L, 2.0); + datapoints_1.put(3000L, 3.0); + datapoints_1.put(4000L, 4.0); + datapoints_1.put(5000L, 5.0); + datapoints_1.put(6000L, 6.0); + datapoints_1.put(7000L, 7.0); + datapoints_1.put(8000L, 8.0); + datapoints_1.put(9000L, 9.0); - Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); + Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); - metric_1.setDatapoints(datapoints_1); + metric_1.setDatapoints(datapoints_1); - List metrics = new ArrayList(); + List metrics = new ArrayList(); - metrics.add(metric_1); + metrics.add(metric_1); - List constants = new ArrayList(); + List constants = new ArrayList(); - constants.add("100s-avg"); + constants.add("100s-avg"); - Map expected_1 = new HashMap(); + Map expected_1 = new HashMap(); - expected_1.put(1000L, 5.0); + expected_1.put(1000L, 5.0); - List result = downsampleTransform.transform(null, metrics, constants); + List result = downsampleTransform.transform(null, metrics, constants); - assertEquals(result.size(), 1); - assertEquals(expected_1, result.get(0).getDatapoints()); - } + assertEquals(result.size(), 1); + assertEquals(expected_1, result.get(0).getDatapoints()); + } - @Test - public void testDownsampleTransformWindowLessThanUnitOneMetric() { - Transform downsampleTransform = new DownsampleTransform(); - Map datapoints_1 = new HashMap(); + @Test + public void testDownsampleTransformWindowLessThanUnitOneMetric() { + Transform downsampleTransform = new DownsampleTransform(); + Map datapoints_1 = new HashMap(); - datapoints_1.put(1000L, 1.0); - datapoints_1.put(5000L, 5.0); - datapoints_1.put(9000L, 9.0); + datapoints_1.put(1000L, 1.0); + datapoints_1.put(5000L, 5.0); + datapoints_1.put(9000L, 9.0); - Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); + Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); - metric_1.setDatapoints(datapoints_1); + metric_1.setDatapoints(datapoints_1); - List metrics = new ArrayList(); + List metrics = new ArrayList(); - metrics.add(metric_1); + metrics.add(metric_1); - List constants = new ArrayList(); + List constants = new ArrayList(); - constants.add("2s-avg"); + constants.add("2s-avg"); - Map expected_1 = new HashMap(); + Map expected_1 = new HashMap(); - expected_1.put(1000L, 1.0); - expected_1.put(5000L, 5.0); - expected_1.put(9000L, 9.0); + expected_1.put(1000L, 1.0); + expected_1.put(5000L, 5.0); + expected_1.put(9000L, 9.0); - List result = downsampleTransform.transform(null, metrics, constants); + List result = downsampleTransform.transform(null, metrics, constants); - assertEquals(result.size(), 1); - assertEquals(expected_1, result.get(0).getDatapoints()); - } + assertEquals(result.size(), 1); + assertEquals(expected_1, result.get(0).getDatapoints()); + } - @Test - public void testDownsampleTransformMinOneMetricHavingNull() { - Transform downsampleTransform = new DownsampleTransform(); - Map datapoints_1 = new HashMap(); + @Test + public void testDownsampleTransformMinOneMetricHavingNull() { + Transform downsampleTransform = new DownsampleTransform(); + Map datapoints_1 = new HashMap(); - datapoints_1.put(1000L, null); - datapoints_1.put(2000L, null); - datapoints_1.put(3000L, 3.0); - datapoints_1.put(4000L, null); - datapoints_1.put(5000L, 5.0); - datapoints_1.put(6000L, null); - datapoints_1.put(7000L, 7.0); - datapoints_1.put(8000L, null); - datapoints_1.put(9000L, 9.0); + datapoints_1.put(1000L, null); + datapoints_1.put(2000L, null); + datapoints_1.put(3000L, 3.0); + datapoints_1.put(4000L, null); + datapoints_1.put(5000L, 5.0); + datapoints_1.put(6000L, null); + datapoints_1.put(7000L, 7.0); + datapoints_1.put(8000L, null); + datapoints_1.put(9000L, 9.0); - Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); + Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); - metric_1.setDatapoints(datapoints_1); + metric_1.setDatapoints(datapoints_1); - List metrics = new ArrayList(); + List metrics = new ArrayList(); - metrics.add(metric_1); + metrics.add(metric_1); - List constants = new ArrayList(); + List constants = new ArrayList(); - constants.add("2s-min"); + constants.add("2s-min"); - Map expected_1 = new HashMap(); + Map expected_1 = new HashMap(); - expected_1.put(1000L, 0.0); - expected_1.put(3000L, 0.0); - expected_1.put(5000L, 0.0); - expected_1.put(7000L, 0.0); - expected_1.put(9000L, 9.0); - List result = downsampleTransform.transform(null, metrics, constants); + expected_1.put(1000L, 0.0); + expected_1.put(3000L, 0.0); + expected_1.put(5000L, 0.0); + expected_1.put(7000L, 0.0); + expected_1.put(9000L, 9.0); + List result = downsampleTransform.transform(null, metrics, constants); - assertEquals(result.size(), 1); - assertEquals(expected_1, result.get(0).getDatapoints()); - } + assertEquals(result.size(), 1); + assertEquals(expected_1, result.get(0).getDatapoints()); + } - @Test//_W-2905322 - public void testDownsampleTransformBug_OnHourLevel() { - Transform downsampleTransform = new DownsampleTransform(); - Map datapoints_1 = new HashMap(); + @Test//_W-2905322 + public void testDownsampleTransformBug_OnHourLevel() { + Transform downsampleTransform = new DownsampleTransform(); + Map datapoints_1 = new HashMap(); - datapoints_1.put(1453798890000L, 1.0); - datapoints_1.put(1453802750000L, 2.0); - datapoints_1.put(1453806510000L, 3.0); - datapoints_1.put(1453809690000L, 4.0); + datapoints_1.put(1453798890000L, 1.0); + datapoints_1.put(1453802750000L, 2.0); + datapoints_1.put(1453806510000L, 3.0); + datapoints_1.put(1453809690000L, 4.0); - Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); + Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); - metric_1.setDatapoints(datapoints_1); + metric_1.setDatapoints(datapoints_1); - List metrics = new ArrayList(); + List metrics = new ArrayList(); - metrics.add(metric_1); + metrics.add(metric_1); - List constants = new ArrayList(); + List constants = new ArrayList(); - constants.add("1h-min"); + constants.add("1h-min"); - Map expected_1 = new HashMap(); + Map expected_1 = new HashMap(); - expected_1.put(1453798800000L, 1.0); - expected_1.put(1453802400000L, 2.0); - expected_1.put(1453806000000L, 3.0); - expected_1.put(1453809600000L, 4.0); + expected_1.put(1453798800000L, 1.0); + expected_1.put(1453802400000L, 2.0); + expected_1.put(1453806000000L, 3.0); + expected_1.put(1453809600000L, 4.0); - List result = downsampleTransform.transform(null, metrics, constants); + List result = downsampleTransform.transform(null, metrics, constants); - assertEquals(result.size(), 1); - assertEquals(expected_1, result.get(0).getDatapoints()); - } + assertEquals(result.size(), 1); + assertEquals(expected_1, result.get(0).getDatapoints()); + } - @Test - public void testDownsampleTransformCountOneMetric() { - Transform downsampleTransform = new DownsampleTransform(); - Map datapoints_1 = new HashMap(); + @Test + public void testDownsampleTransformCountOneMetric() { + Transform downsampleTransform = new DownsampleTransform(); + Map datapoints_1 = new HashMap(); - datapoints_1.put(1L, 1.0); - datapoints_1.put(1000L, 1.0); - datapoints_1.put(2000L, 2.0); - datapoints_1.put(3000L, 3.0); - datapoints_1.put(4000L, 4.0); - datapoints_1.put(5000L, 5.0); - datapoints_1.put(6000L, 6.0); - datapoints_1.put(7000L, 7.0); - datapoints_1.put(8000L, 8.0); - datapoints_1.put(9000L, 9.0); + datapoints_1.put(1L, 1.0); + datapoints_1.put(1000L, 1.0); + datapoints_1.put(2000L, 2.0); + datapoints_1.put(3000L, 3.0); + datapoints_1.put(4000L, 4.0); + datapoints_1.put(5000L, 5.0); + datapoints_1.put(6000L, 6.0); + datapoints_1.put(7000L, 7.0); + datapoints_1.put(8000L, 8.0); + datapoints_1.put(9000L, 9.0); - Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); + Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); - metric_1.setDatapoints(datapoints_1); + metric_1.setDatapoints(datapoints_1); - List metrics = new ArrayList(); + List metrics = new ArrayList(); - metrics.add(metric_1); + metrics.add(metric_1); - List constants = new ArrayList(); + List constants = new ArrayList(); - constants.add("3s-count"); + constants.add("3s-count"); - Map expected_1 = new HashMap(); + Map expected_1 = new HashMap(); - expected_1.put(0L, 3.0); - expected_1.put(3000L, 3.0); - expected_1.put(6000L, 3.0); - expected_1.put(9000L, 1.0); + expected_1.put(0L, 3.0); + expected_1.put(3000L, 3.0); + expected_1.put(6000L, 3.0); + expected_1.put(9000L, 1.0); - List result = downsampleTransform.transform(null, metrics, constants); - assertEquals(result.size(), 1); - assertEquals(expected_1, result.get(0).getDatapoints()); - } + List result = downsampleTransform.transform(null, metrics, constants); + assertEquals(result.size(), 1); + assertEquals(expected_1, result.get(0).getDatapoints()); + } - @Test - public void testDownsampleTransformCountOneMetricHavingNull() { - Transform downsampleTransform = new DownsampleTransform(); - Map datapoints_1 = new HashMap(); + @Test + public void testDownsampleTransformCountOneMetricHavingNull() { + Transform downsampleTransform = new DownsampleTransform(); + Map datapoints_1 = new HashMap(); - datapoints_1.put(1L, null); - datapoints_1.put(1000L, null); - datapoints_1.put(2000L, null); - datapoints_1.put(3000L, 3.0); - datapoints_1.put(4000L, null); - datapoints_1.put(5000L, 5.0); - datapoints_1.put(6000L, null); - datapoints_1.put(7000L, 7.0); - datapoints_1.put(8000L, null); - datapoints_1.put(9000L, 9.0); + datapoints_1.put(1L, null); + datapoints_1.put(1000L, null); + datapoints_1.put(2000L, null); + datapoints_1.put(3000L, 3.0); + datapoints_1.put(4000L, null); + datapoints_1.put(5000L, 5.0); + datapoints_1.put(6000L, null); + datapoints_1.put(7000L, 7.0); + datapoints_1.put(8000L, null); + datapoints_1.put(9000L, 9.0); - Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); + Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); - metric_1.setDatapoints(datapoints_1); + metric_1.setDatapoints(datapoints_1); - List metrics = new ArrayList(); + List metrics = new ArrayList(); - metrics.add(metric_1); + metrics.add(metric_1); - List constants = new ArrayList(); + List constants = new ArrayList(); - constants.add("3s-count"); + constants.add("3s-count"); - Map expected_1 = new HashMap(); + Map expected_1 = new HashMap(); - expected_1.put(0L, 0.0); - expected_1.put(3000L, 2.0); - expected_1.put(6000L, 1.0); - expected_1.put(9000L, 1.0); + expected_1.put(0L, 0.0); + expected_1.put(3000L, 2.0); + expected_1.put(6000L, 1.0); + expected_1.put(9000L, 1.0); - List result = downsampleTransform.transform(null, metrics, constants); - assertEquals(result.size(), 1); - assertEquals(expected_1, result.get(0).getDatapoints()); - } + List result = downsampleTransform.transform(null, metrics, constants); + assertEquals(result.size(), 1); + assertEquals(expected_1, result.get(0).getDatapoints()); + } - @Test - public void testDownsampleTransformMetricIsAllNull() { - Transform downsampleTransform = new DownsampleTransform(); - Map datapoints = new HashMap(); + @Test + public void testDownsampleTransformMetricIsAllNull() { + Transform downsampleTransform = new DownsampleTransform(); + Map datapoints = new HashMap(); - Metric metric = new Metric(TEST_SCOPE + "1", TEST_METRIC); - metric.setDatapoints(datapoints); - List metrics = new ArrayList(); - metrics.add(metric); + Metric metric = new Metric(TEST_SCOPE + "1", TEST_METRIC); + metric.setDatapoints(datapoints); + List metrics = new ArrayList(); + metrics.add(metric); - List constants = new ArrayList(); - constants.add("3s-count"); - Map expected = new HashMap(); - List result = downsampleTransform.transform(null, metrics, constants); - assertEquals(result.size(), 1); - assertEquals(expected, result.get(0).getDatapoints()); - } + List constants = new ArrayList(); + constants.add("3s-count"); + Map expected = new HashMap(); + List result = downsampleTransform.transform(null, metrics, constants); + assertEquals(result.size(), 1); + assertEquals(expected, result.get(0).getDatapoints()); + } - @Test - public void testDownsampleTransformPercentileOneMetric() { - Transform downsampleTransform = new DownsampleTransform(); - Map datapoints_1 = new HashMap(); + @Test + public void testDownsampleTransformPercentileOneMetric() { + Transform downsampleTransform = new DownsampleTransform(); + Map datapoints_1 = new HashMap(); - datapoints_1.put(1000L, 1.0); - datapoints_1.put(2000L, 2.0); - datapoints_1.put(3000L, 3.0); - datapoints_1.put(4000L, 4.0); - datapoints_1.put(5000L, 5.0); - datapoints_1.put(6000L, 6.0); - datapoints_1.put(7000L, 7.0); - datapoints_1.put(8000L, 8.0); - datapoints_1.put(9000L, 9.0); + datapoints_1.put(1000L, 1.0); + datapoints_1.put(2000L, 2.0); + datapoints_1.put(3000L, 3.0); + datapoints_1.put(4000L, 4.0); + datapoints_1.put(5000L, 5.0); + datapoints_1.put(6000L, 6.0); + datapoints_1.put(7000L, 7.0); + datapoints_1.put(8000L, 8.0); + datapoints_1.put(9000L, 9.0); - Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); + Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); - metric_1.setDatapoints(datapoints_1); + metric_1.setDatapoints(datapoints_1); - List metrics = new ArrayList(); + List metrics = new ArrayList(); - metrics.add(metric_1); + metrics.add(metric_1); - List constants = new ArrayList(); + List constants = new ArrayList(); - constants.add("2s-p90"); + constants.add("2s-p90"); - Map expected_1 = new HashMap(); + Map expected_1 = new HashMap(); - expected_1.put(1000L, 2.0); - expected_1.put(3000L, 4.0); - expected_1.put(5000L, 6.0); - expected_1.put(7000L, 8.0); - expected_1.put(9000L, 9.0); + expected_1.put(1000L, 2.0); + expected_1.put(3000L, 4.0); + expected_1.put(5000L, 6.0); + expected_1.put(7000L, 8.0); + expected_1.put(9000L, 9.0); - List result = downsampleTransform.transform(null, metrics, constants); + List result = downsampleTransform.transform(null, metrics, constants); - assertEquals(result.size(), 1); - assertEquals(expected_1, result.get(0).getDatapoints()); - } + assertEquals(result.size(), 1); + assertEquals(expected_1, result.get(0).getDatapoints()); + } - @Test - public void testDownsampleTransformPercentileMultipleMetrics() { - Transform downsampleTransform = new DownsampleTransform(); - Map datapoints_1 = new HashMap(); + @Test + public void testDownsampleTransformPercentileMultipleMetrics() { + Transform downsampleTransform = new DownsampleTransform(); + Map datapoints_1 = new HashMap(); - datapoints_1.put(000L, 10.0); - datapoints_1.put(1000L, 1.0); - datapoints_1.put(2000L, 2.0); - datapoints_1.put(3000L, 3.0); - datapoints_1.put(4000L, 4.0); - datapoints_1.put(5000L, 5.0); - datapoints_1.put(6000L, 6.0); - datapoints_1.put(7000L, 7.0); - datapoints_1.put(8000L, 8.0); - datapoints_1.put(9000L, 9.0); + datapoints_1.put(000L, 10.0); + datapoints_1.put(1000L, 1.0); + datapoints_1.put(2000L, 2.0); + datapoints_1.put(3000L, 3.0); + datapoints_1.put(4000L, 4.0); + datapoints_1.put(5000L, 5.0); + datapoints_1.put(6000L, 6.0); + datapoints_1.put(7000L, 7.0); + datapoints_1.put(8000L, 8.0); + datapoints_1.put(9000L, 9.0); - Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); + Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); - metric_1.setDatapoints(datapoints_1); + metric_1.setDatapoints(datapoints_1); - Map datapoints_2 = new HashMap(); + Map datapoints_2 = new HashMap(); - datapoints_2.put(0L, 1.0); - datapoints_2.put(1000L, 20.0); - datapoints_2.put(2000L, 30.0); - datapoints_2.put(3000L, 40.0); - datapoints_2.put(4000L, 50.0); - datapoints_2.put(5000L, 60.0); - datapoints_2.put(6000L, 70.0); - datapoints_2.put(7000L, 80.0); - datapoints_2.put(8000L, 90.0); - datapoints_2.put(9000L, 100.0); + datapoints_2.put(0L, 1.0); + datapoints_2.put(1000L, 20.0); + datapoints_2.put(2000L, 30.0); + datapoints_2.put(3000L, 40.0); + datapoints_2.put(4000L, 50.0); + datapoints_2.put(5000L, 60.0); + datapoints_2.put(6000L, 70.0); + datapoints_2.put(7000L, 80.0); + datapoints_2.put(8000L, 90.0); + datapoints_2.put(9000L, 100.0); - Metric metric_2 = new Metric(TEST_SCOPE + "1", TEST_METRIC); + Metric metric_2 = new Metric(TEST_SCOPE + "1", TEST_METRIC); - metric_2.setDatapoints(datapoints_2); + metric_2.setDatapoints(datapoints_2); - List metrics = new ArrayList(); + List metrics = new ArrayList(); - metrics.add(metric_1); - metrics.add(metric_2); + metrics.add(metric_1); + metrics.add(metric_2); - List constants = new ArrayList(); - constants.add("10s-p90"); + List constants = new ArrayList(); + constants.add("10s-p90"); - Map expected_1 = new HashMap(); + Map expected_1 = new HashMap(); - expected_1.put(0L, 9.9); + expected_1.put(0L, 9.9); - Map expected_2 = new HashMap(); + Map expected_2 = new HashMap(); - expected_2.put(0L, 99.0); + expected_2.put(0L, 99.0); - List result = downsampleTransform.transform(null, metrics, constants); + List result = downsampleTransform.transform(null, metrics, constants); - assertEquals(2, result.size()); - assertEquals(expected_1, result.get(0).getDatapoints()); - assertEquals(expected_2, result.get(1).getDatapoints()); - } + assertEquals(2, result.size()); + assertEquals(expected_1, result.get(0).getDatapoints()); + assertEquals(expected_2, result.get(1).getDatapoints()); + } - @Test - public void testSnappingSeconds(){ - Transform downsampleTransform = new DownsampleTransform(); - Map datapoints = new HashMap(); + @Test + public void testSnappingSeconds(){ + Transform downsampleTransform = new DownsampleTransform(); + Map datapoints = new HashMap(); - datapoints.put(1002L, 1.0); - datapoints.put(2002L, 1.0); - datapoints.put(2010L, 1.0); - datapoints.put(4001L, 1.0); + datapoints.put(1002L, 1.0); + datapoints.put(2002L, 1.0); + datapoints.put(2010L, 1.0); + datapoints.put(4001L, 1.0); - datapoints.put(7000L, 1.0); - datapoints.put(8000L, 1.0); + datapoints.put(7000L, 1.0); + datapoints.put(8000L, 1.0); - Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); + Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); - metric_1.setDatapoints(datapoints); + metric_1.setDatapoints(datapoints); - List metrics = new ArrayList(); + List metrics = new ArrayList(); - metrics.add(metric_1); + metrics.add(metric_1); - List constants = new ArrayList(); + List constants = new ArrayList(); - constants.add("2s-sum"); + constants.add("2s-sum"); - Map expected_1 = new HashMap(); + Map expected_1 = new HashMap(); - expected_1.put(1000L, 3.0); - expected_1.put(3000L, 1.0); - expected_1.put(7000L, 2.0); + expected_1.put(1000L, 3.0); + expected_1.put(3000L, 1.0); + expected_1.put(7000L, 2.0); - List result = downsampleTransform.transform(null, metrics, constants); + List result = downsampleTransform.transform(null, metrics, constants); - assertEquals(result.size(), 1); - assertEquals(expected_1, result.get(0).getDatapoints()); - } + assertEquals(result.size(), 1); + assertEquals(expected_1, result.get(0).getDatapoints()); + } - @Test - public void testSnappingMinutes(){ - Transform downsampleTransform = new DownsampleTransform(); - Map datapoints = new HashMap(); + @Test + public void testSnappingMinutes(){ + Transform downsampleTransform = new DownsampleTransform(); + Map datapoints = new HashMap(); - datapoints.put(61002L, 1.0); - datapoints.put(120002L, 1.0); - datapoints.put(180010L, 1.0); - datapoints.put(540000L, 1.0); + datapoints.put(61002L, 1.0); + datapoints.put(120002L, 1.0); + datapoints.put(180010L, 1.0); + datapoints.put(540000L, 1.0); - Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); + Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); - metric_1.setDatapoints(datapoints); + metric_1.setDatapoints(datapoints); - List metrics = new ArrayList(); + List metrics = new ArrayList(); - metrics.add(metric_1); + metrics.add(metric_1); - List constants = new ArrayList(); + List constants = new ArrayList(); - constants.add("3m-sum"); + constants.add("3m-sum"); - Map expected_1 = new HashMap(); + Map expected_1 = new HashMap(); - expected_1.put(60000L, 3.0); - expected_1.put(420000L, 1.0); + expected_1.put(60000L, 3.0); + expected_1.put(420000L, 1.0); - List result = downsampleTransform.transform(null, metrics, constants); + List result = downsampleTransform.transform(null, metrics, constants); - assertEquals(result.size(), 1); - assertEquals(expected_1, result.get(0).getDatapoints()); - } + assertEquals(result.size(), 1); + assertEquals(expected_1, result.get(0).getDatapoints()); + } - @Test - public void testSnappingHours(){ - Transform downsampleTransform = new DownsampleTransform(); - Map datapoints = new HashMap(); + @Test + public void testSnappingHours(){ + Transform downsampleTransform = new DownsampleTransform(); + Map datapoints = new HashMap(); - GregorianCalendar calendar = new GregorianCalendar(TimeZone.getTimeZone("GMT")); - calendar.set(2010, 11, 9, 3, 31, 31); - datapoints.put(calendar.getTimeInMillis(), 1.0); - calendar.set(Calendar.HOUR_OF_DAY, 4); - datapoints.put(calendar.getTimeInMillis(), 1.0); - calendar.set(Calendar.HOUR_OF_DAY, 5); - datapoints.put(calendar.getTimeInMillis(), 1.0); - calendar.set(Calendar.HOUR_OF_DAY, 6); - datapoints.put(calendar.getTimeInMillis(), 1.0); + GregorianCalendar calendar = new GregorianCalendar(TimeZone.getTimeZone("GMT")); + calendar.set(2010, 11, 9, 3, 31, 31); + datapoints.put(calendar.getTimeInMillis(), 1.0); + calendar.set(Calendar.HOUR_OF_DAY, 4); + datapoints.put(calendar.getTimeInMillis(), 1.0); + calendar.set(Calendar.HOUR_OF_DAY, 5); + datapoints.put(calendar.getTimeInMillis(), 1.0); + calendar.set(Calendar.HOUR_OF_DAY, 6); + datapoints.put(calendar.getTimeInMillis(), 1.0); - Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); + Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); - metric_1.setDatapoints(datapoints); + metric_1.setDatapoints(datapoints); - List metrics = new ArrayList(); + List metrics = new ArrayList(); - metrics.add(metric_1); + metrics.add(metric_1); - List constants = new ArrayList(); + List constants = new ArrayList(); - constants.add("3h-sum"); + constants.add("3h-sum"); - Map expected_1 = new HashMap(); + Map expected_1 = new HashMap(); - calendar.set(2010, 11, 9, 3, 0, 0); - calendar.set(Calendar.MILLISECOND, 0); + calendar.set(2010, 11, 9, 3, 0, 0); + calendar.set(Calendar.MILLISECOND, 0); - expected_1.put(calendar.getTimeInMillis(), 3.0); - calendar.set(Calendar.HOUR_OF_DAY, 6); - expected_1.put(calendar.getTimeInMillis(), 1.0); + expected_1.put(calendar.getTimeInMillis(), 3.0); + calendar.set(Calendar.HOUR_OF_DAY, 6); + expected_1.put(calendar.getTimeInMillis(), 1.0); - List result = downsampleTransform.transform(null, metrics, constants); + List result = downsampleTransform.transform(null, metrics, constants); - assertEquals(result.size(), 1); - assertEquals(expected_1, result.get(0).getDatapoints()); - } + assertEquals(result.size(), 1); + assertEquals(expected_1, result.get(0).getDatapoints()); + } - @Test - public void testSnappingDays(){ - Transform downsampleTransform = new DownsampleTransform(); - Map datapoints = new HashMap(); + @Test + public void testSnappingDays(){ + Transform downsampleTransform = new DownsampleTransform(); + Map datapoints = new HashMap(); - GregorianCalendar calendar = new GregorianCalendar(TimeZone.getTimeZone("GMT")); - calendar.set(2010, 11, 9, 3, 31, 31); - datapoints.put(calendar.getTimeInMillis(), 1.0); - calendar.set(Calendar.DAY_OF_MONTH, 10); - datapoints.put(calendar.getTimeInMillis(), 1.0); - calendar.set(Calendar.DAY_OF_MONTH, 11); - datapoints.put(calendar.getTimeInMillis(), 1.0); - calendar.set(Calendar.DAY_OF_MONTH, 12); - datapoints.put(calendar.getTimeInMillis(), 1.0); + GregorianCalendar calendar = new GregorianCalendar(TimeZone.getTimeZone("GMT")); + calendar.set(2010, 11, 9, 3, 31, 31); + datapoints.put(calendar.getTimeInMillis(), 1.0); + calendar.set(Calendar.DAY_OF_MONTH, 10); + datapoints.put(calendar.getTimeInMillis(), 1.0); + calendar.set(Calendar.DAY_OF_MONTH, 11); + datapoints.put(calendar.getTimeInMillis(), 1.0); + calendar.set(Calendar.DAY_OF_MONTH, 12); + datapoints.put(calendar.getTimeInMillis(), 1.0); - calendar.set(Calendar.DAY_OF_MONTH, 18); - datapoints.put(calendar.getTimeInMillis(), 1.0); - calendar.set(Calendar.HOUR_OF_DAY, 18); - datapoints.put(calendar.getTimeInMillis(), 1.0); + calendar.set(Calendar.DAY_OF_MONTH, 18); + datapoints.put(calendar.getTimeInMillis(), 1.0); + calendar.set(Calendar.HOUR_OF_DAY, 18); + datapoints.put(calendar.getTimeInMillis(), 1.0); - Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); + Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); - metric_1.setDatapoints(datapoints); + metric_1.setDatapoints(datapoints); - List metrics = new ArrayList(); + List metrics = new ArrayList(); - metrics.add(metric_1); + metrics.add(metric_1); - List constants = new ArrayList(); + List constants = new ArrayList(); - constants.add("7d-sum"); + constants.add("7d-sum"); - Map expected_1 = new HashMap(); + Map expected_1 = new HashMap(); - calendar.set(2010, 11, 9, 0, 0, 0); - calendar.set(Calendar.MILLISECOND, 0); + calendar.set(2010, 11, 9, 0, 0, 0); + calendar.set(Calendar.MILLISECOND, 0); - expected_1.put(calendar.getTimeInMillis(), 4.0); - calendar.set(Calendar.DAY_OF_MONTH, 16); - expected_1.put(calendar.getTimeInMillis(), 2.0); + expected_1.put(calendar.getTimeInMillis(), 4.0); + calendar.set(Calendar.DAY_OF_MONTH, 16); + expected_1.put(calendar.getTimeInMillis(), 2.0); - List result = downsampleTransform.transform(null, metrics, constants); + List result = downsampleTransform.transform(null, metrics, constants); - assertEquals(result.size(), 1); - assertEquals(expected_1, result.get(0).getDatapoints()); - } + assertEquals(result.size(), 1); + assertEquals(expected_1, result.get(0).getDatapoints()); + } - @Test - public void testDownsampleWithFillDefaultValues() { - Transform downsampleTransform = new DownsampleTransform(); - Map datapoints_1 = new HashMap(); + @Test + public void testDownsampleWithFillDefaultValues() { + Transform downsampleTransform = new DownsampleTransform(); + Map datapoints_1 = new HashMap(); long startMillis = 1534368960000L; - datapoints_1.put(startMillis, 1.0); - datapoints_1.put(startMillis+60000L, 1.0); - datapoints_1.put(startMillis+2*60000L, 2.0); - datapoints_1.put(startMillis+3*60000L, 3.0); - datapoints_1.put(startMillis+4*60000L, 4.0); - datapoints_1.put(startMillis+7*60000L, 7.0); - datapoints_1.put(startMillis+8*60000L, 8.0); - - Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); - - metric_1.setDatapoints(datapoints_1); - - List metrics = new ArrayList(); - - metrics.add(metric_1); - - List constants = new ArrayList(); - - constants.add("1m-sum"); - constants.add("-10m"); - constants.add("-0m"); - constants.add("0.0"); - Map expected_1 = new HashMap(); - - expected_1.put(startMillis, 1.0); - expected_1.put(startMillis+60000L, 1.0); - expected_1.put(startMillis+2*60000L, 2.0); - expected_1.put(startMillis+3*60000L, 3.0); - expected_1.put(startMillis+4*60000L, 4.0); - expected_1.put(startMillis+5*60000L, 0.0); - expected_1.put(startMillis+6*60000L, 0.0); - expected_1.put(startMillis+7*60000L, 7.0); - expected_1.put(startMillis+8*60000L, 8.0); - expected_1.put(startMillis+9*60000L, 0.0); - - List result = downsampleTransform.transform(null, metrics, constants); - - assertEquals(result.size(), 1); - assertEquals(expected_1, result.get(0).getDatapoints()); - } - - @Test - public void testDownsampleWithAbsoluteIntervals() { - Transform downsampleTransform = new DownsampleTransform(); - Map datapoints_1 = new HashMap(); + datapoints_1.put(startMillis, 1.0); + datapoints_1.put(startMillis+60000L, 1.0); + datapoints_1.put(startMillis+2*60000L, 2.0); + datapoints_1.put(startMillis+3*60000L, 3.0); + datapoints_1.put(startMillis+4*60000L, 4.0); + datapoints_1.put(startMillis+7*60000L, 7.0); + datapoints_1.put(startMillis+8*60000L, 8.0); + + Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); + + metric_1.setDatapoints(datapoints_1); + + List metrics = new ArrayList(); + + metrics.add(metric_1); + + List constants = new ArrayList(); + + constants.add("1m-sum"); + constants.add("-10m"); + constants.add("-0m"); + constants.add("0.0"); + Map expected_1 = new HashMap(); + + expected_1.put(startMillis, 1.0); + expected_1.put(startMillis+60000L, 1.0); + expected_1.put(startMillis+2*60000L, 2.0); + expected_1.put(startMillis+3*60000L, 3.0); + expected_1.put(startMillis+4*60000L, 4.0); + expected_1.put(startMillis+5*60000L, 0.0); + expected_1.put(startMillis+6*60000L, 0.0); + expected_1.put(startMillis+7*60000L, 7.0); + expected_1.put(startMillis+8*60000L, 8.0); + expected_1.put(startMillis+9*60000L, 0.0); + + List result = downsampleTransform.transform(null, metrics, constants); + + assertEquals(result.size(), 1); + assertEquals(expected_1, result.get(0).getDatapoints()); + } + + @Test + public void testDownsampleWithAbsoluteIntervals() { + Transform downsampleTransform = new DownsampleTransform(); + Map datapoints_1 = new HashMap(); long startMillis = 1534368960000L; - datapoints_1.put(startMillis, 1.0); - datapoints_1.put(startMillis+60000L, 1.0); - datapoints_1.put(startMillis+2*60000L, 2.0); - datapoints_1.put(startMillis+3*60000L, 3.0); - datapoints_1.put(startMillis+4*60000L, 4.0); - datapoints_1.put(startMillis+7*60000L, 7.0); - datapoints_1.put(startMillis+8*60000L, 8.0); + datapoints_1.put(startMillis, 1.0); + datapoints_1.put(startMillis+60000L, 1.0); + datapoints_1.put(startMillis+2*60000L, 2.0); + datapoints_1.put(startMillis+3*60000L, 3.0); + datapoints_1.put(startMillis+4*60000L, 4.0); + datapoints_1.put(startMillis+7*60000L, 7.0); + datapoints_1.put(startMillis+8*60000L, 8.0); - Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); + Metric metric_1 = new Metric(TEST_SCOPE + "1", TEST_METRIC); - metric_1.setDatapoints(datapoints_1); + metric_1.setDatapoints(datapoints_1); - List metrics = new ArrayList(); + List metrics = new ArrayList(); - metrics.add(metric_1); + metrics.add(metric_1); - List constants = new ArrayList(); + List constants = new ArrayList(); - constants.add("1m-sum"); - constants.add("0.0"); - constants.add("abs"); - Map expected_1 = new TreeMap(); + constants.add("1m-sum"); + constants.add("0.0"); + constants.add("abs"); + Map expected_1 = new TreeMap(); - expected_1.put(startMillis, 1.0); - expected_1.put(startMillis+60000L, 1.0); - expected_1.put(startMillis+2*60000L, 2.0); - expected_1.put(startMillis+3*60000L, 3.0); - expected_1.put(startMillis+4*60000L, 4.0); - expected_1.put(startMillis+5*60000L, 0.0); - expected_1.put(startMillis+6*60000L, 0.0); - expected_1.put(startMillis+7*60000L, 7.0); - expected_1.put(startMillis+8*60000L, 8.0); - expected_1.put(startMillis+9*60000L, 0.0); + expected_1.put(startMillis, 1.0); + expected_1.put(startMillis+60000L, 1.0); + expected_1.put(startMillis+2*60000L, 2.0); + expected_1.put(startMillis+3*60000L, 3.0); + expected_1.put(startMillis+4*60000L, 4.0); + expected_1.put(startMillis+5*60000L, 0.0); + expected_1.put(startMillis+6*60000L, 0.0); + expected_1.put(startMillis+7*60000L, 7.0); + expected_1.put(startMillis+8*60000L, 8.0); + expected_1.put(startMillis+9*60000L, 0.0); QueryContext context = QueryUtils.getQueryContext(1534368960000L+":"+(1534368960000L+10*60000L)+":argus.core:alerts.evaluated:zimsum:1m-sum", 0L); - List result = downsampleTransform.transform(context, metrics, constants); - - assertEquals(result.size(), 1); - assertEquals(expected_1, result.get(0).getDatapoints()); - } - + List result = downsampleTransform.transform(context, metrics, constants); + + assertEquals(result.size(), 1); + assertEquals(expected_1, result.get(0).getDatapoints()); + + Map datapoints_2 = new HashMap(); + datapoints_2.put(startMillis+3*60000L, 3.0); + datapoints_2.put(startMillis+4*60000L, 4.0); + datapoints_2.put(startMillis+7*60000L, 7.0); + datapoints_2.put(startMillis+8*60000L, 8.0); + datapoints_2.put(startMillis+10*60000L, 10.0); + + Map expected_2 = new TreeMap(); + + expected_2.put(startMillis, 0.0); + expected_2.put(startMillis+60000L, 0.0); + expected_2.put(startMillis+2*60000L, 0.0); + expected_2.put(startMillis+3*60000L, 3.0); + expected_2.put(startMillis+4*60000L, 4.0); + expected_2.put(startMillis+5*60000L, 0.0); + expected_2.put(startMillis+6*60000L, 0.0); + expected_2.put(startMillis+7*60000L, 7.0); + expected_2.put(startMillis+8*60000L, 8.0); + expected_2.put(startMillis+9*60000L, 0.0); + + Metric metric_2 = new Metric(TEST_SCOPE + "1", TEST_METRIC); + + metric_2.setDatapoints(datapoints_2); + + List metrics2 = new ArrayList(); + + metrics2.add(metric_2); + + QueryContext context2 = QueryUtils.getQueryContext(1534368960000L+":"+(1534368960000L+10*60000L)+":argus.core:alerts.evaluated:zimsum:1m-sum", 0L); + List result2 = downsampleTransform.transform(context2, metrics2, constants); + + assertEquals(result2.size(), 1); + assertEquals(expected_2, result2.get(0).getDatapoints()); + } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/GroupByTagTransformTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/GroupByTagTransformTest.java index 3969e1991..3ec1dce8f 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/GroupByTagTransformTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/GroupByTagTransformTest.java @@ -18,7 +18,7 @@ public class GroupByTagTransformTest { @Test public void testGroupBySingleCommonTag() { - GroupByTagTransform transform = new GroupByTagTransform(new TransformFactory(null)); + GroupByTagTransform transform = new GroupByTagTransform(new TransformFactory(null, null)); Map datapoints = new HashMap<>(); datapoints.put(1000L, 1.0); @@ -79,7 +79,7 @@ public void testGroupBySingleCommonTag() { @Test public void testGroupByTagTwoCommonTags() { - Transform transform = new GroupByTagTransform(new TransformFactory(null)); + Transform transform = new GroupByTagTransform(new TransformFactory(null, null)); Map datapoints = new HashMap(); datapoints.put(1000L, 1.0); @@ -148,7 +148,7 @@ public void testGroupByTagTwoCommonTags() { @Test public void testGroupByTagTwoTagsOnePartial() { - Transform transform = new GroupByTagTransform(new TransformFactory(null)); + Transform transform = new GroupByTagTransform(new TransformFactory(null, null)); Map datapoints = new HashMap(); datapoints.put(1000L, 1.0); @@ -220,7 +220,7 @@ public void testGroupByTagTwoTagsOnePartial() { @Test public void testGroupByTagOnePartial() { - Transform transform = new GroupByTagTransform(new TransformFactory(null)); + Transform transform = new GroupByTagTransform(new TransformFactory(null, null)); Map datapoints = new HashMap(); datapoints.put(1000L, 1.0); @@ -290,7 +290,7 @@ public void testGroupByTagOnePartial() { @Test public void testGroupByTagWithTransformConstant() { - Transform transform = new GroupByTagTransform(new TransformFactory(null)); + Transform transform = new GroupByTagTransform(new TransformFactory(null, null)); Map datapoints = new HashMap(); datapoints.put(1000L, 1.0); @@ -371,7 +371,7 @@ public void testGroupByTagWithTransformConstant() { @Test public void testGroupByTagNoTags() { - Transform transform = new GroupByTagTransform(new TransformFactory(null)); + Transform transform = new GroupByTagTransform(new TransformFactory(null, null)); Map datapoints = new HashMap(); datapoints.put(1000L, 1.0); @@ -406,7 +406,7 @@ public void testGroupByTagNoTags() { @Test public void testGroupByTagNoFunction() { - Transform transform = new GroupByTagTransform(new TransformFactory(null)); + Transform transform = new GroupByTagTransform(new TransformFactory(null, null)); Map datapoints = new HashMap(); datapoints.put(1000L, 1.0); diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/GroupByTransformTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/GroupByTransformTest.java index f06af5944..96eeafdc5 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/GroupByTransformTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/GroupByTransformTest.java @@ -16,7 +16,7 @@ public class GroupByTransformTest { @Test public void testGroupByDC() { - GroupByTransform transform = new GroupByTransform(new TransformFactory(null)); + GroupByTransform transform = new GroupByTransform(new TransformFactory(null, null)); Map datapoints = new HashMap(); datapoints.put(1000L, 1.0); @@ -54,7 +54,7 @@ public void testGroupByDC() { @Test public void testGroupByDCAndUncapturedGroup() { - GroupByTransform transform = new GroupByTransform(new TransformFactory(null)); + GroupByTransform transform = new GroupByTransform(new TransformFactory(null, null)); Map datapoints = new HashMap(); datapoints.put(1000L, 1.0); @@ -92,7 +92,7 @@ public void testGroupByDCAndUncapturedGroup() { @Test public void testGroupByDCAndPodPrefix() { - GroupByTransform transform = new GroupByTransform(new TransformFactory(null)); + GroupByTransform transform = new GroupByTransform(new TransformFactory(null, null)); Map datapoints = new HashMap(); datapoints.put(1000L, 1.0); @@ -146,7 +146,7 @@ public void testGroupByDCAndPodPrefix() { @Test public void testGroupByDCAndPodNumber() { - GroupByTransform transform = new GroupByTransform(new TransformFactory(null)); + GroupByTransform transform = new GroupByTransform(new TransformFactory(null, null)); Map datapoints = new HashMap(); datapoints.put(1000L, 1.0); @@ -198,7 +198,7 @@ public void testGroupByDCAndPodNumber() { @Test public void testWeightedAvgUsingGroupBy() { - GroupByTransform transform = new GroupByTransform(new TransformFactory(null)); + GroupByTransform transform = new GroupByTransform(new TransformFactory(null, null)); Map datapoints = new HashMap(); datapoints.put(1000L, 1.0); @@ -258,7 +258,7 @@ public void testWeightedAvgUsingGroupBy() { @Test public void testGroupByPod() { - GroupByTransform transform = new GroupByTransform(new TransformFactory(null)); + GroupByTransform transform = new GroupByTransform(new TransformFactory(null, null)); Map datapoints = new HashMap(); datapoints.put(1000L, 1.0); @@ -312,7 +312,7 @@ public void testGroupByPod() { @Test public void testGroupByWithFunctionTakingConstants() { - GroupByTransform transform = new GroupByTransform(new TransformFactory(null)); + GroupByTransform transform = new GroupByTransform(new TransformFactory(null, null)); Map datapoints = new HashMap(); datapoints.put(1000L, 1.0); diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/HighestTransformTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/HighestTransformTest.java index ba4f05445..aea2a4620 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/HighestTransformTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/HighestTransformTest.java @@ -183,6 +183,28 @@ public void testHighestTransformWithLimitLessThanDPsLenDefault() { assertEquals(expected_2, result.get(1).getDatapoints()); } + @Test + public void testHighestTransformWithZeroDataPointsForMetric() { + Transform belowTransform = new MetricFilterWithInteralReducerTransform(new HighestValueFilter()); + Map datapoints = new HashMap(); + + Metric metric_1 = new Metric(TEST_SCOPE, TEST_METRIC); + + metric_1.setDatapoints(datapoints); + List metrics = new ArrayList(); + + metrics.add(metric_1); + + List constants = new ArrayList(); + + constants.add("2"); + constants.add("average"); + + List result = belowTransform.transform(null, metrics, constants); + + assertEquals(0, result.size()); + } + @Test public void testHighestTransformWithLimitLessThanDPsLenAvg() { Transform highestTransform = new MetricFilterWithInteralReducerTransform(new HighestValueFilter()); diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/LowestTransformTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/LowestTransformTest.java index ff25b9864..0b279cdb7 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/LowestTransformTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/LowestTransformTest.java @@ -121,6 +121,28 @@ public void testLowestTransformWithoutMetrics() { lowestTransform.transform(null, metrics, constants); } + @Test + public void testLowestTransformWithZeroDataPointsForMetric() { + Transform belowTransform = new MetricFilterWithInteralReducerTransform(new LowestValueFilter()); + Map datapoints = new HashMap(); + + Metric metric_1 = new Metric(TEST_SCOPE, TEST_METRIC); + + metric_1.setDatapoints(datapoints); + List metrics = new ArrayList(); + + metrics.add(metric_1); + + List constants = new ArrayList(); + + constants.add("2"); + constants.add("average"); + + List result = belowTransform.transform(null, metrics, constants); + + assertEquals(0, result.size()); + } + @Test public void testLowestTransformWithLimitLessThanDPsLenDefault() { Transform lowestTransform = new MetricFilterWithInteralReducerTransform(new LowestValueFilter()); diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/MetricReduceTransformTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/MetricReduceTransformTest.java index f9226aef4..784aa3eb2 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/MetricReduceTransformTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/MetricReduceTransformTest.java @@ -36,7 +36,8 @@ import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.Mockito; -import org.mockito.runners.MockitoJUnitRunner; +import org.mockito.junit.MockitoJUnitRunner; + import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/PropagateTransformTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/PropagateTransformTest.java index cb97c1659..3b8943742 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/PropagateTransformTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/PropagateTransformTest.java @@ -36,6 +36,7 @@ import com.salesforce.dva.argus.util.QueryContext; import com.salesforce.dva.argus.util.QueryUtils; +import com.salesforce.dva.argus.util.TSDBQueryExpression; import org.junit.Test; import java.util.ArrayList; @@ -342,7 +343,7 @@ public void testWithOnlyOneDP() { Transform propagateTransform = new PropagateTransform(); Map datapoints = new HashMap(); - datapoints.put(1000L, 1.0); + datapoints.put(200000L, 1.0); Metric metric = new Metric(TEST_SCOPE, TEST_METRIC); @@ -358,11 +359,21 @@ public void testWithOnlyOneDP() { Map expected = new HashMap(); - expected.put(1000L, 1.0); + expected.put(200000L, 1.0); + expected.put(300000L, 1.0); + expected.put(400000L, 1.0); + expected.put(500000L, 1.0); + expected.put(600000L, 1.0); - List result = propagateTransform.transform(null, metrics, constants); + QueryContext queryContext = new QueryContext(); + TSDBQueryExpression expression = new TSDBQueryExpression(); + expression.setStartTimestamp(200000L); + expression.setEndTimestamp(600000L); + queryContext.setExpression(expression); + + List result = propagateTransform.transform(queryContext, metrics, constants); - assertEquals(result.get(0).getDatapoints().size(), 1); + assertEquals(result.get(0).getDatapoints().size(), 5); assertEquals(expected, result.get(0).getDatapoints()); } diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/RateTransformTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/RateTransformTest.java new file mode 100644 index 000000000..a0b95de40 --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/RateTransformTest.java @@ -0,0 +1,339 @@ +package com.salesforce.dva.argus.service.metric.transform; + +import static org.junit.Assert.assertEquals; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.junit.Test; + +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.util.QueryContext; +import com.salesforce.dva.argus.util.TSDBQueryExpression; + +public class RateTransformTest { + private static long MINUTE=60*1000; + + @Test + public void testRateWithNoMissingDP() { + long startTimestamp=1*MINUTE; + double counter=1; + RateTransform rateTransform = new RateTransform(); + Map actualDPs = new HashMap<>(); + for(int i=1;i<=60;i++) { + actualDPs.put(i*MINUTE, counter++); + } + List metrics = new ArrayList<>(); + Metric actualMetric= new Metric("testScope", "testMetric"); + actualMetric.setDatapoints(actualDPs); + metrics.add(actualMetric); + + Map expectedDPs = new HashMap<>(); + for(int i=2;i<=60;i++) { + expectedDPs.put(i*MINUTE, 1d); + } + List expectedMetrics = new ArrayList<>(); + Metric expectedMetric= new Metric("testScope", "testMetric"); + expectedMetric.setDatapoints(expectedDPs); + expectedMetrics.add(expectedMetric); + QueryContext queryContext = _getQueryContext(startTimestamp, 60*MINUTE); + List constants = Arrays.asList("1m","FALSE","FALSE"); + List actualMetrics= rateTransform.transform(queryContext, metrics, constants); + assertEquals(expectedMetrics, actualMetrics); + assertEquals(expectedMetrics.get(0).getDatapoints(), actualMetrics.get(0).getDatapoints()); + } + + @Test + public void testRateWithMissingDPShouldInterpolate() { + long startTime=1*MINUTE, endTime=11*MINUTE; + RateTransform rateTransform = new RateTransform(); + List metrics = new ArrayList<>(); + Metric actualMetric= new Metric("testScope", "testMetric"); + Map actualDPs = new HashMap<>(); + actualDPs.put(startTime, 1d); + actualDPs.put(9*60*1000l, 9d); + actualDPs.put(endTime, 11d); + actualMetric.setDatapoints(actualDPs); + metrics.add(actualMetric); + QueryContext queryContext = _getQueryContext(startTime, endTime); + List constants = Arrays.asList("1m","FALSE","TRUE"); + List actualMetrics= rateTransform.transform(queryContext, metrics, constants); + List expectedMetrics = new ArrayList<>(); + Metric expectedMetric= new Metric("testScope", "testMetric"); + Map expectedDPs = new HashMap<>(); + for(int i=2;i<=11;i++) { + expectedDPs.put(i*MINUTE, 1d); + } + expectedMetric.setDatapoints(expectedDPs); + expectedMetrics.add(expectedMetric); + assertEquals(expectedMetrics, actualMetrics); + assertEquals(expectedMetrics.get(0).getDatapoints(), actualMetrics.get(0).getDatapoints()); + } + + @Test + public void testRateWhenOneDPShouldReturnZeroDP() { + long startTime=1, endTime=10*MINUTE; + RateTransform rateTransform = new RateTransform(); + List metrics = new ArrayList<>(); + Metric actualMetric= new Metric("testScope", "testMetric"); + Map actualDPs = new HashMap<>(); + actualDPs.put(startTime, 1d); + actualMetric.setDatapoints(actualDPs); + metrics.add(actualMetric); + List expectedMetrics = new ArrayList<>(); + Metric expectedMetric= new Metric("testScope", "testMetric"); + expectedMetric.setDatapoints(null); + expectedMetrics.add(expectedMetric); + QueryContext queryContext = _getQueryContext(startTime, endTime); + List constants = Arrays.asList("1m","FALSE","FALSE"); + List actualMetrics1 = rateTransform.transform(queryContext, metrics, constants); + constants = Arrays.asList("1m","TRUE","TRUE"); + List actualMetrics2 = rateTransform.transform(queryContext, metrics, constants); + assertEquals(expectedMetrics.get(0).getDatapoints(), actualMetrics1.get(0).getDatapoints()); + assertEquals(expectedMetrics.get(0).getDatapoints(), actualMetrics2.get(0).getDatapoints()); + } + + @Test + public void testRateWithTwoDPNNoInterpolationShouldReturnOneDP() { + long startTime=1*MINUTE, endTime=10*MINUTE; + RateTransform rateTransform = new RateTransform(); + List metrics = new ArrayList<>(); + Metric actualMetric= new Metric("testScope", "testMetric"); + Map actualDPs = new HashMap<>(); + actualDPs.put(startTime, 1d); + actualDPs.put(endTime, 10d); + actualMetric.setDatapoints(actualDPs); + metrics.add(actualMetric); + List expectedMetrics = new ArrayList<>(); + Metric expectedMetric= new Metric("testScope", "testMetric"); + Map expectedDPs = new HashMap<>(); + expectedDPs.put(10*MINUTE, 1d); + expectedMetric.setDatapoints(expectedDPs); + expectedMetrics.add(expectedMetric); + QueryContext queryContext = _getQueryContext(startTime, endTime); + List constants = Arrays.asList("1m","FALSE","FALSE"); + List actualMetrics = rateTransform.transform(queryContext, metrics, constants); + assertEquals(expectedMetrics.get(0).getDatapoints(), actualMetrics.get(0).getDatapoints()); + } + + @Test + public void testRateWithInterpolationWhenTwoDPShouldReturnInterpolatedDP() { + long startTime=1*MINUTE, endTime=10*MINUTE; + RateTransform rateTransform = new RateTransform(); + List metrics = new ArrayList<>(); + Metric actualMetric= new Metric("testScope", "testMetric"); + Map actualDPs = new HashMap<>(); + actualDPs.put(startTime, 1d); + actualDPs.put(10*MINUTE, 10d); + actualMetric.setDatapoints(actualDPs); + metrics.add(actualMetric); + List expectedMetrics = new ArrayList<>(); + Metric expectedMetric= new Metric("testScope", "testMetric"); + Map expectedDPs = new HashMap<>(); + for(int i=2;i<=10;i++) { + expectedDPs.put(i*MINUTE, 1d); + } + expectedMetric.setDatapoints(expectedDPs); + expectedMetrics.add(expectedMetric); + List constants = Arrays.asList("1m","FALSE","TRUE"); + QueryContext queryContext = _getQueryContext(startTime, endTime); + List actualMetrics = rateTransform.transform(queryContext, metrics, constants); + assertEquals(expectedMetrics.get(0).getDatapoints(), actualMetrics.get(0).getDatapoints()); + } + + @Test + public void testRateWithMissingFirstNLastDP() { + long startTime=1*MINUTE, endTime=10*MINUTE; + RateTransform rateTransform = new RateTransform(); + List metrics = new ArrayList<>(); + Metric actualMetric= new Metric("testScope", "testMetric"); + Map actualDPs = new HashMap<>(); + actualDPs.put(4*MINUTE, 4d); + actualDPs.put(5*MINUTE, 5d); + actualMetric.setDatapoints(actualDPs); + metrics.add(actualMetric); + List expectedMetrics = new ArrayList<>(); + Metric expectedMetric= new Metric("testScope", "testMetric"); + Map expectedDPs = new HashMap<>(); + for(int i=2;i<=10;i++) { + expectedDPs.put(i*MINUTE, 1d); + } + expectedMetric.setDatapoints(expectedDPs); + expectedMetrics.add(expectedMetric); + List constants = Arrays.asList("1m","FALSE","TRUE"); + QueryContext queryContext = _getQueryContext(startTime, endTime); + List actualMetrics = rateTransform.transform(queryContext, metrics,constants); + assertEquals(expectedMetrics.get(0).getDatapoints(), actualMetrics.get(0).getDatapoints()); + } + + @Test + public void testRateWithCounterResetWithNoInterpolation() { + long startTime=1*MINUTE, endTime=5*MINUTE; + RateTransform rateTransform = new RateTransform(); + List metrics = new ArrayList<>(); + Metric actualMetric= new Metric("testScope", "testMetric"); + Map actualDPs = new HashMap<>(); + actualDPs.put(1*MINUTE, 1d); + actualDPs.put(2*MINUTE, 2d); + actualDPs.put(3*MINUTE, 0d); + actualDPs.put(4*MINUTE, 1d); + actualDPs.put(5*MINUTE, 2d); + + actualMetric.setDatapoints(actualDPs); + metrics.add(actualMetric); + List expectedMetrics = new ArrayList<>(); + Metric expectedMetric= new Metric("testScope", "testMetric"); + Map expectedDPs = new HashMap<>(); + + expectedDPs.put(2*MINUTE, 1d); + expectedDPs.put(3*MINUTE, -2d); + expectedDPs.put(4*MINUTE, 1d); + expectedDPs.put(5*MINUTE, 1d); + + expectedMetric.setDatapoints(expectedDPs); + expectedMetrics.add(expectedMetric); + List constants = Arrays.asList("1m","FALSE","FALSE"); + QueryContext queryContext = _getQueryContext(startTime, endTime); + List actualMetrics = rateTransform.transform(queryContext, metrics,constants); + assertEquals(expectedMetrics.get(0).getDatapoints(), actualMetrics.get(0).getDatapoints()); + } + + @Test + public void testRateWithCounterResetWithNoSkipNegativeValuesNInterpolation() { + long startTime=1*MINUTE, endTime=8*MINUTE; + RateTransform rateTransform = new RateTransform(); + List metrics = new ArrayList<>(); + Metric actualMetric= new Metric("testScope", "testMetric"); + Map actualDPs = new HashMap<>(); + actualDPs.put(1*MINUTE, 1d); + actualDPs.put(2*MINUTE, 2d); + actualDPs.put(3*MINUTE, 3d); + actualDPs.put(6*MINUTE, 0d); + actualDPs.put(8*MINUTE, 4d); + + actualMetric.setDatapoints(actualDPs); + metrics.add(actualMetric); + List expectedMetrics = new ArrayList<>(); + Metric expectedMetric= new Metric("testScope", "testMetric"); + Map expectedDPs = new HashMap<>(); + + expectedDPs.put(2*MINUTE, 1d); + expectedDPs.put(3*MINUTE, 1d); + expectedDPs.put(4*MINUTE, -1d); + expectedDPs.put(5*MINUTE, -1d); + expectedDPs.put(6*MINUTE, -1d); + expectedDPs.put(7*MINUTE, 2d); + expectedDPs.put(8*MINUTE, 2d); + + expectedMetric.setDatapoints(expectedDPs); + expectedMetrics.add(expectedMetric); + List constants = Arrays.asList("1m","FALSE","TRUE"); + QueryContext queryContext = _getQueryContext(startTime, endTime); + List actualMetrics = rateTransform.transform(queryContext, metrics,constants); + assertEquals(expectedMetrics.get(0).getDatapoints(), actualMetrics.get(0).getDatapoints()); + } + + @Test + public void testRateWithCounterResetWithSkipNegativeValuesNInterpolation() { + long startTime=1*MINUTE, endTime=8*MINUTE; + RateTransform rateTransform = new RateTransform(); + List metrics = new ArrayList<>(); + Metric actualMetric= new Metric("testScope", "testMetric"); + Map actualDPs = new HashMap<>(); + actualDPs.put(1*MINUTE, 2d); + actualDPs.put(2*MINUTE, 3d); + actualDPs.put(5*MINUTE, 0d); + actualDPs.put(8*MINUTE, 3d); + + actualMetric.setDatapoints(actualDPs); + metrics.add(actualMetric); + List expectedMetrics = new ArrayList<>(); + Metric expectedMetric= new Metric("testScope", "testMetric"); + Map expectedDPs = new HashMap<>(); + + expectedDPs.put(2*MINUTE, 1d); + expectedDPs.put(6*MINUTE, 1d); + expectedDPs.put(7*MINUTE, 1d); + expectedDPs.put(8*MINUTE, 1d); + + expectedMetric.setDatapoints(expectedDPs); + expectedMetrics.add(expectedMetric); + List constants = Arrays.asList("1m","TRUE","TRUE"); + QueryContext queryContext = _getQueryContext(startTime, endTime); + List actualMetrics = rateTransform.transform(queryContext, metrics,constants); + assertEquals(expectedMetrics.get(0).getDatapoints(), actualMetrics.get(0).getDatapoints()); + } + + @Test + public void testRateWithCounterResetWithSkipNegativeValuesNInterpolationNonUniform() { + long startTime=1*MINUTE, endTime=8*MINUTE; + RateTransform rateTransform = new RateTransform(); + List metrics = new ArrayList<>(); + Metric actualMetric= new Metric("testScope", "testMetric"); + Map actualDPs = new HashMap<>(); + actualDPs.put(1*MINUTE, 2d); + actualDPs.put(2*MINUTE, 3d); + actualDPs.put(5*MINUTE, 0d); + actualDPs.put(8*MINUTE, 6d); + + actualMetric.setDatapoints(actualDPs); + metrics.add(actualMetric); + List expectedMetrics = new ArrayList<>(); + Metric expectedMetric= new Metric("testScope", "testMetric"); + Map expectedDPs = new HashMap<>(); + + expectedDPs.put(2*MINUTE, 1d); + expectedDPs.put(6*MINUTE, 2d); + expectedDPs.put(7*MINUTE, 2d); + expectedDPs.put(8*MINUTE, 2d); + + expectedMetric.setDatapoints(expectedDPs); + expectedMetrics.add(expectedMetric); + List constants = Arrays.asList("1m","TRUE","TRUE"); + QueryContext queryContext = _getQueryContext(startTime, endTime); + List actualMetrics = rateTransform.transform(queryContext, metrics,constants); + assertEquals(expectedMetrics.get(0).getDatapoints(), actualMetrics.get(0).getDatapoints()); + } + + @Test + public void testRateWithNoConstants() { + long startTime=1*MINUTE, endTime=8*MINUTE; + RateTransform rateTransform = new RateTransform(); + List metrics = new ArrayList<>(); + Metric actualMetric= new Metric("testScope", "testMetric"); + Map actualDPs = new HashMap<>(); + actualDPs.put(1*MINUTE, 2d); + actualDPs.put(2*MINUTE, 3d); + actualDPs.put(5*MINUTE, 0d); + actualDPs.put(8*MINUTE, 6d); + + actualMetric.setDatapoints(actualDPs); + metrics.add(actualMetric); + List expectedMetrics = new ArrayList<>(); + Metric expectedMetric= new Metric("testScope", "testMetric"); + Map expectedDPs = new HashMap<>(); + + expectedDPs.put(2*MINUTE, 1d); + expectedDPs.put(6*MINUTE, 2d); + expectedDPs.put(7*MINUTE, 2d); + expectedDPs.put(8*MINUTE, 2d); + + expectedMetric.setDatapoints(expectedDPs); + expectedMetrics.add(expectedMetric); + QueryContext queryContext = _getQueryContext(startTime, endTime); + List actualMetrics = rateTransform.transform(queryContext, metrics); + assertEquals(expectedMetrics.get(0).getDatapoints(), actualMetrics.get(0).getDatapoints()); + } + + private QueryContext _getQueryContext(long startTimestamp, long endTimestamp) { + QueryContext result = new QueryContext(); + TSDBQueryExpression expr = new TSDBQueryExpression(); + expr.setStartTimestamp(startTimestamp); + expr.setEndTimestamp(endTimestamp); + result.setExpression(expr); + return result; + } +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/ScaleTransformTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/ScaleTransformTest.java index 4068f220c..449c33369 100755 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/ScaleTransformTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/ScaleTransformTest.java @@ -247,27 +247,11 @@ public void testScaleTransformWithNoConstantShareNoCommonDPs() { metrics.add(metric_2); List constants = new ArrayList(); - constants.add("intersect"); Map expected = new HashMap(); - scaleTransform = new MetricReducerOrMappingTransform(new ScaleValueReducerOrMapping()); List result = scaleTransform.transform(null, metrics, constants); assertEquals(result.get(0).getDatapoints().size(), 0); assertEquals(expected, result.get(0).getDatapoints()); - - constants = new ArrayList(1); - expected = new HashMap(); - expected.put(1000L, 1.0); - expected.put(2000L, 2.0); - expected.put(3000L, 3.0); - expected.put(100L, 10.0); - expected.put(200L, 100.0); - expected.put(300L, 1000.0); - scaleTransform = new MetricReducerOrMappingTransform(new ScaleValueReducerOrMapping()); - result = scaleTransform.transform(null, metrics, constants); - - assertEquals(result.get(0).getDatapoints().size(), 6); - assertEquals(expected, result.get(0).getDatapoints()); } @Test @@ -299,27 +283,14 @@ public void testScaleTransformWithNoConstantShareSomeCommonDPs() { metrics.add(metric_2); List constants = new ArrayList(1); - constants.add("intersect"); Map expected = new HashMap(); + expected.put(3000L, 3000.0); List result = scaleTransform.transform(null, metrics, constants); assertEquals(result.get(0).getDatapoints().size(), 1); assertEquals(expected, result.get(0).getDatapoints()); - - constants = new ArrayList(1); - expected = new HashMap(); - expected.put(1000L, 1.0); - expected.put(2000L, 2.0); - expected.put(3000L, 3000.0); - expected.put(100L, 10.0); - expected.put(200L, 100.0); - scaleTransform = new MetricReducerOrMappingTransform(new ScaleValueReducerOrMapping()); - result = scaleTransform.transform(null, metrics, constants); - - assertEquals(result.get(0).getDatapoints().size(), 5); - assertEquals(expected, result.get(0).getDatapoints()); } @@ -365,4 +336,4 @@ public void testScaleTransformWithFullConstantShareSomeCommonDPs() { assertEquals(expected, result.get(0).getDatapoints()); } } -/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ +/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ \ No newline at end of file diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/Scale_VTransformTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/Scale_VTransformTest.java index 2c327bdfe..897993cc9 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/Scale_VTransformTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/Scale_VTransformTest.java @@ -54,14 +54,15 @@ public void testScale_VTransformWithoutMetrics() { scale_vTransform.transform(null, metrics); } - @Test(expected = IllegalArgumentException.class) + @Test public void testScale_VTransformWithOnlyOneMetric() { Transform scale_vTransform = new MetricZipperTransform(new ScaleValueZipper()); List metrics = new ArrayList(); Metric metric = new Metric(TEST_SCOPE, TEST_METRIC); metrics.add(metric); - scale_vTransform.transform(null, metrics); + List result = scale_vTransform.transform(null, metrics); + assertEquals(result.size(), 0); } @Test(expected = IllegalArgumentException.class) @@ -77,7 +78,6 @@ public void testScale_VTransformWithConstants() { scale_vTransform.transform(null, metrics, constants); } - @Test(expected = IllegalArgumentException.class) public void testScale_VTransformVectorWithoutPoints() { Transform scale_vTransform = new MetricZipperTransform(new ScaleValueZipper()); Map datapoints = new HashMap(); @@ -93,7 +93,8 @@ public void testScale_VTransformVectorWithoutPoints() { metrics.add(metric); metrics.add(vector); - scale_vTransform.transform(null, metrics); + List result = scale_vTransform.transform(null, metrics); + assertEquals(result.get(0).getDatapoints().size(), 0); } @Test diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/SliceTransformTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/SliceTransformTest.java new file mode 100644 index 000000000..9ff8011ac --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/SliceTransformTest.java @@ -0,0 +1,109 @@ +package com.salesforce.dva.argus.service.metric.transform; + +import static org.junit.Assert.assertEquals; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.junit.Test; + +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.util.QueryContext; +import com.salesforce.dva.argus.util.TSDBQueryExpression; + +public class SliceTransformTest { + private static final String TEST_SCOPE = "test-scope"; + private static final String TEST_METRIC = "test-metric"; + private static final long SECOND=1000l; + + @Test + public void testWithAbsoluteTime() { + SliceTransform transform = new SliceTransform(); + + Metric actualMetric = new Metric(TEST_SCOPE, TEST_METRIC); + Map actualDps = new HashMap(); + actualDps.put(1l, 1d); + actualDps.put(2l, 2d); + actualDps.put(3l, 3d); + actualDps.put(4l, 4d); + actualDps.put(5l, 5d); + + actualMetric.setDatapoints(actualDps); + + List constants = new ArrayList(); + constants.add("2"); + constants.add("4"); + + QueryContext context = new QueryContext(); + + QueryContext childContext = new QueryContext(); + TSDBQueryExpression exp = new TSDBQueryExpression(); + exp.setStartTimestamp(1l); + exp.setEndTimestamp(6l); + childContext.setExpression(exp); + + context.setChildContexts(Arrays.asList(childContext)); + + List actual = transform.transform(context, Arrays.asList(actualMetric),constants); + + Metric expectedMetric = new Metric(TEST_SCOPE, TEST_METRIC); + Map expectedDps = new HashMap(); + expectedDps.put(2l, 2d); + expectedDps.put(3l, 3d); + expectedDps.put(4l, 4d); + + expectedMetric.setDatapoints(expectedDps); + + List expected = Arrays.asList(expectedMetric); + + assertEquals(expected.get(0), actual.get(0)); + assertEquals(expected.get(0).getDatapoints(), actual.get(0).getDatapoints()); + } + + @Test + public void testWithRelativeTime() { + SliceTransform transform = new SliceTransform(); + + Metric actualMetric = new Metric(TEST_SCOPE, TEST_METRIC); + Map actualDps = new HashMap(); + actualDps.put(1*SECOND, 1d); + actualDps.put(2*SECOND, 2d); + actualDps.put(3*SECOND, 3d); + actualDps.put(4*SECOND, 4d); + actualDps.put(5*SECOND, 5d); + actualDps.put(6*SECOND, 6d); + actualDps.put(7*SECOND, 7d); + + actualMetric.setDatapoints(actualDps); + + List constants = new ArrayList(); + constants.add("start + 2s"); + constants.add("end-2s"); + + QueryContext context = new QueryContext(); + QueryContext childContext = new QueryContext(); + TSDBQueryExpression exp = new TSDBQueryExpression(); + exp.setStartTimestamp(1*SECOND); + exp.setEndTimestamp(7*SECOND); + childContext.setExpression(exp); + + context.setChildContexts(Arrays.asList(childContext)); + List actual = transform.transform(context, Arrays.asList(actualMetric),constants); + + Metric expectedMetric = new Metric(TEST_SCOPE, TEST_METRIC); + Map expectedDps = new HashMap(); + expectedDps.put(3*SECOND, 3d); + expectedDps.put(4*SECOND, 4d); + expectedDps.put(5*SECOND, 5d); + + expectedMetric.setDatapoints(expectedDps); + + List expected = Arrays.asList(expectedMetric); + + assertEquals(expected.get(0), actual.get(0)); + assertEquals(expected.get(0).getDatapoints(), actual.get(0).getDatapoints()); + } +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/Sum_VTransformTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/Sum_VTransformTest.java index 4ac998f90..e73985780 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/Sum_VTransformTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/metric/transform/Sum_VTransformTest.java @@ -54,14 +54,15 @@ public void testSum_VTransformWithoutMetrics() { sum_vTransform.transform(null, metrics); } - @Test(expected = IllegalArgumentException.class) + @Test public void testSum_VTransformWithOnlyOneMetric() { Transform sum_vTransform = new MetricZipperTransform(new SumValueZipper()); List metrics = new ArrayList(); Metric metric = new Metric(TEST_SCOPE, TEST_METRIC); metrics.add(metric); - sum_vTransform.transform(null, metrics); + List result = sum_vTransform.transform(null, metrics); + assertEquals(result.size(), 0); } @Test(expected = IllegalArgumentException.class) @@ -77,7 +78,6 @@ public void testSum_VTransformWithConstants() { sum_vTransform.transform(null, metrics, constants); } - @Test(expected = IllegalArgumentException.class) public void testSum_VTransformVectorWithoutPoints() { Transform sum_vTransform = new MetricZipperTransform(new SumValueZipper()); Map datapoints = new HashMap(); @@ -93,7 +93,8 @@ public void testSum_VTransformVectorWithoutPoints() { metrics.add(metric); metrics.add(vector); - sum_vTransform.transform(null, metrics); + List result = sum_vTransform.transform(null, metrics); + assertEquals(result.get(0).getDatapoints().size(), 0); } @Test diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/monitor/CounterMetricTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/monitor/CounterMetricTest.java new file mode 100644 index 000000000..fbe357cf9 --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/monitor/CounterMetricTest.java @@ -0,0 +1,103 @@ +package com.salesforce.dva.argus.service.monitor; + +import com.google.common.collect.ImmutableMap; +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.service.MonitorService; +import org.junit.Before; +import org.junit.Test; + +import java.util.Map; +import java.util.concurrent.CountDownLatch; + +import static org.junit.Assert.assertEquals; + +public class CounterMetricTest { + private static final double DOUBLE_COMPARISON_MAX_DELTA = 0.001; + private static final MonitorService.Counter COUNTER = MonitorService.Counter.ALERTS_EVALUATED; + private static final String SCOPE = COUNTER.getScope(); + private static final String METRIC_NAME = COUNTER.getMetric(); + private static final Map TAGS = ImmutableMap.of("host", "localhost"); + private CounterMetric cm; + + @Before + public void setUp() { + final Metric m = new Metric(SCOPE, METRIC_NAME); + m.setTags(TAGS); + cm = new CounterMetric(m, COUNTER); + } + + @Test + public void addValue_sumThenResetValue_testParallelAdds() throws Exception { + final CountDownLatch gate = new CountDownLatch(1); + final int workerCount = 3; + final int iterations = 100; + final Thread[] workers = new Thread[workerCount]; + + for (int i = 0; i < workers.length; i++) { + Thread thread = new Thread(new Runnable() { + + @Override + public void run() { + try { + gate.await(); + for (int j = 0; j < iterations; j++) { + cm.addValue(1.0); + } + } catch (InterruptedException ex) { + org.junit.Assert.fail("This should never happen."); + } + } + }); + + thread.setDaemon(true); + thread.start(); + workers[i] = thread; + } + gate.countDown(); + for (Thread worker : workers) { + worker.join(1500); + } + + final double expectedCounterValue = workerCount * iterations; + assertEquals(expectedCounterValue, cm.getValue(), DOUBLE_COMPARISON_MAX_DELTA); + assertEquals(expectedCounterValue, cm.getCurrentGaugeAdderValue(), DOUBLE_COMPARISON_MAX_DELTA); + + assertEquals(expectedCounterValue, cm.computeNewGaugeValueAndResetGaugeAdder(), DOUBLE_COMPARISON_MAX_DELTA); + + assertEquals(expectedCounterValue, cm.getValue(), DOUBLE_COMPARISON_MAX_DELTA); + assertEquals(0.0, cm.getCurrentGaugeAdderValue(), DOUBLE_COMPARISON_MAX_DELTA); + + final int moreIterations = 10; + final double delta = 5.0; + for (int i = 0; i < moreIterations; i++) { + cm.addValue(delta); + } + final double expectedNewGaugeValue = delta * moreIterations; + final double expectedNewCounterValue = expectedNewGaugeValue + expectedCounterValue; + assertEquals(expectedNewCounterValue, cm.getValue(), DOUBLE_COMPARISON_MAX_DELTA); + assertEquals(expectedNewGaugeValue, cm.getCurrentGaugeAdderValue(), DOUBLE_COMPARISON_MAX_DELTA); + + assertEquals(expectedNewGaugeValue, cm.computeNewGaugeValueAndResetGaugeAdder(), DOUBLE_COMPARISON_MAX_DELTA); + + assertEquals(expectedNewCounterValue, cm.getValue(), DOUBLE_COMPARISON_MAX_DELTA); + assertEquals(0.0, cm.getCurrentGaugeAdderValue(), DOUBLE_COMPARISON_MAX_DELTA); + } + + @Test + public void setValue_test() { + final int iterations = 10; + final double delta = 5.0; + for (int i = 0; i < iterations; i++) { + cm.addValue(delta); + } + final double expectedCounterValue = delta * iterations; + assertEquals(expectedCounterValue, cm.getValue(), DOUBLE_COMPARISON_MAX_DELTA); + assertEquals(expectedCounterValue, cm.getCurrentGaugeAdderValue(), DOUBLE_COMPARISON_MAX_DELTA); + + final double newValue = 78.6; + cm.setValue(newValue); + + assertEquals(newValue, cm.getValue(), DOUBLE_COMPARISON_MAX_DELTA); + assertEquals(newValue, cm.getCurrentGaugeAdderValue(), DOUBLE_COMPARISON_MAX_DELTA); + } +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/monitor/DataLagServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/monitor/DataLagServiceTest.java new file mode 100644 index 000000000..ceba4aaea --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/monitor/DataLagServiceTest.java @@ -0,0 +1,262 @@ +/* + * + * * Copyright (c) 2016, Salesforce.com, Inc. + * * All rights reserved. + * * + * * Redistribution and use in source and binary forms, with or without + * * modification, are permitted provided that the following conditions are met: + * * + * * 1. Redistributions of source code must retain the above copyright notice, + * * this list of conditions and the following disclaimer. + * * + * * 2. Redistributions in binary form must reproduce the above copyright notice, + * * this list of conditions and the following disclaimer in the documentation + * * and/or other materials provided with the distribution. + * * + * * 3. Neither the name of Salesforce.com nor the names of its contributors may + * * be used to endorse or promote products derived from this software without + * * specific prior written permission. + * * + * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * * POSSIBILITY OF SUCH DAMAGE. + * + */ + +package com.salesforce.dva.argus.service.monitor; + +import com.salesforce.dva.argus.TestUtils; +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.service.MailService; +import com.salesforce.dva.argus.service.MetricService; +import com.salesforce.dva.argus.service.MetricStorageService; +import com.salesforce.dva.argus.service.TSDBService; +import com.salesforce.dva.argus.service.mail.DefaultMailService; +import com.salesforce.dva.argus.service.metric.DefaultMetricService; +import com.salesforce.dva.argus.service.metric.ElasticSearchConsumerOffsetMetricsService; +import com.salesforce.dva.argus.service.tsdb.DefaultTSDBService; +import com.salesforce.dva.argus.service.tsdb.MetricQuery; +import com.salesforce.dva.argus.system.SystemConfiguration; +import org.junit.Before; +import org.junit.Test; +import org.junit.Ignore; +import org.junit.runner.RunWith; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; +import org.powermock.core.classloader.annotations.PrepareForTest; +import org.powermock.modules.junit4.PowerMockRunner; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import static junit.framework.TestCase.assertTrue; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyList; +import static org.mockito.ArgumentMatchers.anySet; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.powermock.api.mockito.PowerMockito.when; + +@RunWith(PowerMockRunner.class) +@PrepareForTest(SystemConfiguration.class) +public class DataLagServiceTest { + + private DataLagMonitorGoldenMetric goldenMetricService; + private DataLagMonitorConsumerOffset consumerOffsetService; + @Mock SystemConfiguration systemConfigGoldenMetric; + @Mock SystemConfiguration systemConfigConsumerOffset; + @Mock MetricService mockedMetricService; + @Mock MetricStorageService mockedMetricStorageService; + @Mock TSDBService mockedTSDBService; + @Mock MailService mailService; + + private static final List DC_LIST = Arrays.asList("DC1", "DC2", "DC3", "DC4"); + private static final String DEFAULT_EXPRESSION = "-5m:scope.default:metric.default:max:1m-max"; + + @Before + public void setUp() { + setupMockServices(); + goldenMetricService = spy(new DataLagMonitorGoldenMetric(systemConfigGoldenMetric, mockedMetricService, mockedTSDBService)); + consumerOffsetService = spy(new DataLagMonitorConsumerOffset(systemConfigConsumerOffset, mockedMetricStorageService, mockedMetricService, mockedTSDBService, mailService)); + } + + private void setupMockServices() { + systemConfigGoldenMetric = mock(SystemConfiguration.class); + systemConfigConsumerOffset = mock(SystemConfiguration.class); + mockedMetricService = mock(DefaultMetricService.class); + mockedMetricStorageService = mock(ElasticSearchConsumerOffsetMetricsService.class); + mockedTSDBService = mock(DefaultTSDBService.class); + mailService = mock(DefaultMailService.class); + when(mailService.sendMessage(any())).thenReturn(true); + + when(systemConfigConsumerOffset.getValue(SystemConfiguration.Property.DC_LIST)).thenReturn(String.join(",", DC_LIST)); + setupDefaultSysConfigValues(systemConfigConsumerOffset, DataLagService.Property.DATA_LAG_ENFORCE_DC_LIST, "DC4"); + setupDefaultSysConfigValues(systemConfigConsumerOffset, DataLagMonitorConsumerOffset.Property.DATA_LAG_DEFAULT_EXPRESSION, DEFAULT_EXPRESSION); + setupDefaultSysConfigValues(systemConfigConsumerOffset, DataLagMonitorConsumerOffset.Property.DATA_LAG_INERTIA, "60000"); + setupDefaultSysConfigValues(systemConfigConsumerOffset, DataLagMonitorConsumerOffset.Property.DATA_LAG_CLEAR_THRESHOLD, "{\"16\":[\"DC1\"], \"13\": [\"DC3\"]}"); + setupDefaultSysConfigValues(systemConfigConsumerOffset, DataLagMonitorConsumerOffset.Property.DATA_LAG_DEFAULT_CLEAR_THRESHOLD, "10"); + setupDefaultSysConfigValues(systemConfigConsumerOffset, DataLagMonitorConsumerOffset.Property.DATA_LAG_TRIGGER_THRESHOLD, "{\"36\":[\"DC1\",\"DC2\"], \"33\": [\"DC3\"]}"); + setupDefaultSysConfigValues(systemConfigConsumerOffset, DataLagMonitorConsumerOffset.Property.DATA_LAG_DEFAULT_TRIGGER_THRESHOLD, "20"); + setupDefaultSysConfigValues(systemConfigConsumerOffset, DataLagMonitorConsumerOffset.Property.DATA_LAG_QUERY_EXPRESSION, "{\"-5m:scope.test:metric.test{groupId=*testGroupId*,topic=*test.#DC#.topic*}:max:1m-max\":[\"DC1\",\"DC2\",\"DC3\"]}"); + setupDefaultSysConfigValues(systemConfigConsumerOffset, DataLagMonitorConsumerOffset.Property.DATA_LAG_DEBUG, "false"); + setupDefaultSysConfigValues(systemConfigConsumerOffset, DataLagMonitorConsumerOffset.Property.DATA_LAG_EMAIL, "test@example.com"); + + when(systemConfigGoldenMetric.getValue(SystemConfiguration.Property.DC_LIST)).thenReturn(String.join(",", DC_LIST)); + setupDefaultSysConfigValues(systemConfigGoldenMetric, DataLagService.Property.DATA_LAG_ENFORCE_DC_LIST, "DC4"); + setupDefaultSysConfigValues(systemConfigGoldenMetric, DataLagMonitorGoldenMetric.Property.DATA_LAG_DEFAULT_EXPRESSION, DEFAULT_EXPRESSION); + setupDefaultSysConfigValues(systemConfigGoldenMetric, DataLagMonitorGoldenMetric.Property.DATA_LAG_THRESHOLD, "10000"); // 10 seconds threshold. + setupDefaultSysConfigValues(systemConfigGoldenMetric, DataLagMonitorGoldenMetric.Property.DATA_LAG_QUERY_EXPRESSION, "{\"-1h:scope.#DC#.test:metric.test:avg:1m-sum\":[\"DC1\",\"DC2\"], \"-4h:scope.#DC#.test2:metric.test2:avg:1m-sum\": [\"DC3\"]}"); + + } + + @Test + public void testQueryForDC() { + Map tags = new HashMap<>(); + tags.put("groupId", "*testGroupId*"); + Long currentTime = System.currentTimeMillis(); + for( String dc: DC_LIST) { + //ConsumerOffset. + tags.put("topic", "*test." + dc + ".topic*"); + String expression = "-5m:scope.test:metric.test{groupId=*testGroupId*,topic=*test." + dc.toLowerCase() + ".topic*}:max:1m-max"; + if (dc.equalsIgnoreCase("DC4")) { + expression = DEFAULT_EXPRESSION; + } + List mQList = Arrays.asList(new MetricQuery("scope.test", "metric.test", tags, currentTime - 5 * 60 * 1000L, currentTime)); + when(mockedMetricService.getQueries(expression, currentTime)).thenReturn(mQList); + assertEquals(dc, consumerOffsetService.getDCFromTopic("*test." + dc.toLowerCase() + ".topic*")); + + //GoldenMetric. + if(dc.equalsIgnoreCase("DC4")) { + when(mockedMetricService.getMetrics(DEFAULT_EXPRESSION, currentTime)).thenReturn(null); + } else if(dc.equalsIgnoreCase("DC3")) { + when(mockedMetricService.getMetrics("-1h:scope." + dc + ".test2:metric.test2:avg:1m-sum", currentTime)).thenReturn(null); + } else { + when(mockedMetricService.getMetrics("-1h:scope." + dc + ".test:metric.test:avg:1m-sum", currentTime)).thenReturn(null); + } + } + consumerOffsetService.queryMetricsForDC(new HashSet<>(DC_LIST), currentTime); + goldenMetricService.queryMetricsForDC(new HashSet<>(DC_LIST), currentTime); + } + + @Test + public void testComputeDataLag() { + Metric triggerM = new Metric("scope.test", "metric.test"); + Metric clearM = new Metric("scope.test", "metric.test"); + Metric noChange = new Metric("scope.test", "metric.test"); + Map lagState = new HashMap<>(); + Long currTime = System.currentTimeMillis(); + for(int i = 0 ; i < 10; i++) { + triggerM.addDatapoint( currTime - (i + 1) * 20_000, 42.0 + i % 3); + clearM.addDatapoint(currTime - i * 1000, 1.0 + i % 4); + noChange.addDatapoint(currTime - i * 3000, i * 2.0); // Not all datapoints satisfy clear criterion. + } + + assertTrue(consumerOffsetService.computeDataLag("DC1", Arrays.asList(triggerM))); + assertFalse(consumerOffsetService.computeDataLag("DC1", Arrays.asList(clearM))); + + assertTrue(goldenMetricService.computeDataLag("DC1", Arrays.asList(triggerM))); + assertFalse(goldenMetricService.computeDataLag("DC1", Arrays.asList(clearM))); + + lagState.put("DC1", true); // Default to true and it should not change. + TestUtils.setField(consumerOffsetService, "lagStatePerDC", lagState); + assertTrue(consumerOffsetService.computeDataLag("DC1", Arrays.asList(noChange))); + assertFalse(goldenMetricService.computeDataLag("DC1", Arrays.asList(noChange))); + } + + @Test + public void testIsDataLagging() { + assertTrue(consumerOffsetService.isDataLagging("DC4")); //enforceLagPresentSet. + assertFalse(consumerOffsetService.isDataLagging("Dc1 ")); + assertFalse(consumerOffsetService.isDataLagging("dc5")); + assertTrue(goldenMetricService.isDataLagging("DC4")); //enforceLagPresentSet. + assertFalse(goldenMetricService.isDataLagging("dc2 ")); + assertFalse(goldenMetricService.isDataLagging("dc5")); + + Map lagState = new HashMap<>(); + // Set all dc to enable data lag. + DC_LIST.forEach(dc -> lagState.put(dc, true)); + TestUtils.setField(consumerOffsetService, "lagStatePerDC", lagState); + assertTrue(consumerOffsetService.isDataLagging(" dC8 ")); + assertTrue(consumerOffsetService.isDataLagging(" dC3")); + + TestUtils.setField(goldenMetricService, "_isDataLaggingbyDCMap", lagState); + assertTrue(goldenMetricService.isDataLagging(" dC8 ")); + assertTrue(goldenMetricService.isDataLagging(" dC1")); + + //Set some dc data lag state to be true and check for rest. + List enableDataLagForSomeDC = Arrays.asList("DC1", "DC3"); + DC_LIST.forEach(dc -> lagState.put(dc, false)); + enableDataLagForSomeDC.forEach(dc -> lagState.put(dc, true)); + TestUtils.setField(consumerOffsetService, "lagStatePerDC", lagState); + TestUtils.setField(goldenMetricService, "_isDataLaggingbyDCMap", lagState); + DC_LIST.forEach(dc -> { + if (enableDataLagForSomeDC.contains(dc) || dc.equalsIgnoreCase("DC4")) { + assertTrue(consumerOffsetService.isDataLagging(dc)); + assertTrue(goldenMetricService.isDataLagging(dc)); + } else { + assertFalse(consumerOffsetService.isDataLagging(dc)); + assertFalse(goldenMetricService.isDataLagging(dc)); + } + } ); + } + + @Test + @Ignore("Test failing travis environment, disabling it") + public void testPushMetric() { + ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class); + List expectedOutput = new ArrayList<>(); + doNothing().when(mockedTSDBService).putMetrics(anyList()); + for( String dc: DC_LIST) { + Long currentTime = System.currentTimeMillis(); + Metric m = new Metric("argus.core", "datalag.offset"); + m.setTag("dc", dc); + m.setTag("host", SystemConfiguration.getHostname()); + m.addDatapoint(currentTime, 1.0); + consumerOffsetService.pushMetric(currentTime, 1.0, dc); + expectedOutput.add(m); + + m = new Metric(m); + m.setMetric("datalag.seconds"); + goldenMetricService.pushMetric(currentTime, 1.0, dc); + expectedOutput.add(m); + } + verify(mockedTSDBService, times(8)).putMetrics(captor.capture()); + List actualOutput = captor.getAllValues().stream().flatMap(Collection::stream).collect(Collectors.toList()); + Collections.sort(actualOutput); + Collections.sort(expectedOutput); + assertEquals(expectedOutput, actualOutput); + } + + private void setupDefaultSysConfigValues(SystemConfiguration mocksysConfig, DataLagService.Property p, String reply) { + when(mocksysConfig.getValue(p.getName(), p.getDefaultValue())).thenReturn(reply); + } + + private void setupDefaultSysConfigValues(SystemConfiguration mocksysConfig, DataLagMonitorConsumerOffset.Property p, String reply) { + when(mocksysConfig.getValue(p.getName(), p.getDefaultValue())).thenReturn(reply); + } + + private void setupDefaultSysConfigValues(SystemConfiguration mocksysConfig, DataLagMonitorGoldenMetric.Property p, String reply) { + when(mocksysConfig.getValue(p.getName(), p.getDefaultValue())).thenReturn(reply); + } +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/monitor/GaugeMetricTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/monitor/GaugeMetricTest.java new file mode 100644 index 000000000..2d0042eab --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/monitor/GaugeMetricTest.java @@ -0,0 +1,105 @@ +package com.salesforce.dva.argus.service.monitor; + +import com.google.common.collect.ImmutableMap; +import com.salesforce.dva.argus.entity.Metric; +import org.junit.Before; +import org.junit.Test; + +import java.util.Map; +import java.util.concurrent.CountDownLatch; + +import static org.junit.Assert.assertEquals; + +public class GaugeMetricTest { + private static final double DOUBLE_COMPARISON_MAX_DELTA = 0.001; + private static final String SCOPE = "test.scope"; + private static final String METRIC_NAME = "test.name"; + private static final Map TAGS = ImmutableMap.of("host", "localhost"); + private GaugeMetric gm; + + @Before + public void setUp() { + final Metric m = new Metric(SCOPE, METRIC_NAME); + m.setTags(TAGS); + gm = new GaugeMetric(m); + } + + @Test + public void getObjectName_test() { + assertEquals("ArgusMetrics:type=Gauge,scope=test.scope,metric=test.name,host=localhost", gm.getObjectName()); + } + + @Test + public void addValue_sumThenResetValue_testParallelAdds() throws Exception { + final CountDownLatch gate = new CountDownLatch(1); + final int workerCount = 3; + final int iterations = 100; + final Thread[] workers = new Thread[workerCount]; + + for (int i = 0; i < workers.length; i++) { + Thread thread = new Thread(new Runnable() { + + @Override + public void run() { + try { + gate.await(); + for (int j = 0; j < iterations; j++) { + gm.addValue(1.0); + } + } catch (InterruptedException ex) { + org.junit.Assert.fail("This should never happen."); + } + } + }); + + thread.setDaemon(true); + thread.start(); + workers[i] = thread; + } + gate.countDown(); + for (Thread worker : workers) { + worker.join(1500); + } + + final double expectedGaugeValue = workerCount * iterations; + assertEquals(0.0, gm.getValue(), DOUBLE_COMPARISON_MAX_DELTA); + assertEquals(expectedGaugeValue, gm.getCurrentGaugeAdderValue(), DOUBLE_COMPARISON_MAX_DELTA); + + assertEquals(expectedGaugeValue, gm.computeNewGaugeValueAndResetGaugeAdder(), DOUBLE_COMPARISON_MAX_DELTA); + + assertEquals(expectedGaugeValue, gm.getValue(), DOUBLE_COMPARISON_MAX_DELTA); + assertEquals(0.0, gm.getCurrentGaugeAdderValue(), DOUBLE_COMPARISON_MAX_DELTA); + + final int moreIterations = 10; + final double delta = 5.0; + for (int i = 0; i < moreIterations; i++) { + gm.addValue(delta); + } + final double expectedNewGaugeValue = delta * moreIterations; + assertEquals(expectedGaugeValue, gm.getValue(), DOUBLE_COMPARISON_MAX_DELTA); + assertEquals(expectedNewGaugeValue, gm.getCurrentGaugeAdderValue(), DOUBLE_COMPARISON_MAX_DELTA); + + assertEquals(expectedNewGaugeValue, gm.computeNewGaugeValueAndResetGaugeAdder(), DOUBLE_COMPARISON_MAX_DELTA); + + assertEquals(expectedNewGaugeValue, gm.getValue(), DOUBLE_COMPARISON_MAX_DELTA); + assertEquals(0.0, gm.getCurrentGaugeAdderValue(), DOUBLE_COMPARISON_MAX_DELTA); + } + + @Test + public void setValue_test() { + final int iterations = 10; + final double delta = 5.0; + for (int i = 0; i < iterations; i++) { + gm.addValue(delta); + } + final double expectedGaugeValue = delta * iterations; + assertEquals(0.0, gm.getValue(), DOUBLE_COMPARISON_MAX_DELTA); + assertEquals(expectedGaugeValue, gm.getCurrentGaugeAdderValue(), DOUBLE_COMPARISON_MAX_DELTA); + + final double newValue = 78.6; + gm.setValue(newValue); + + assertEquals(0.0, gm.getValue(), DOUBLE_COMPARISON_MAX_DELTA); + assertEquals(newValue, gm.getCurrentGaugeAdderValue(), DOUBLE_COMPARISON_MAX_DELTA); + } +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/mq/kafka/ProducerConsumerSerializationTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/mq/kafka/ProducerConsumerSerializationTest.java new file mode 100644 index 000000000..3e7151d30 --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/mq/kafka/ProducerConsumerSerializationTest.java @@ -0,0 +1,69 @@ +package com.salesforce.dva.argus.service.mq.kafka; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableMap; +import com.salesforce.dva.argus.entity.Annotation; +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.system.SystemConfiguration; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +import java.util.Properties; +import java.util.concurrent.ExecutorService; + +import static org.junit.Assert.assertEquals; + +@RunWith(MockitoJUnitRunner.class) +public class ProducerConsumerSerializationTest { + private SystemConfiguration config; + @Mock + private KafkaProducer kafkaProducer; + @Mock + private ExecutorService executorService; + private ObjectMapper mapper; + private int maxBufferSize = 1000; + + private DefaultProducer producer; + private DefaultConsumer consumer; + + @Before + public void setUp() { + config = new SystemConfiguration(new Properties()); + mapper = new ObjectMapper(); + producer = new DefaultProducer(config, kafkaProducer, executorService, mapper); + consumer = new DefaultConsumer(config, mapper, maxBufferSize); + } + + @Test + public void serialize_deserialize_testAnnotation() throws Exception { + Annotation a = new Annotation("testsource", + "testid", + "testtype", + "testscope", + "testmetric", + 1549656991903L); + a.setTag("testtagkey", "testtagvalue"); + + String json = producer.serialize(a); + Annotation result = consumer.deserialize(json, Annotation.class); + + assertEquals(a, result); + } + + @Test + public void serialize_deserialize_testMetric() throws Exception { + Metric m = new Metric("testscope", "testmetric"); + m.setTag("testtagkey", "testtagvalue"); + m.setDatapoints(ImmutableMap.of(1549656000000L, 3.14)); + + String json = producer.serialize(m); + Metric result = consumer.deserialize(json, Metric.class); + + assertEquals(m, result); + } + +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/querystore/ElasticSearchQueryStoreServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/querystore/ElasticSearchQueryStoreServiceTest.java new file mode 100644 index 000000000..10e989513 --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/querystore/ElasticSearchQueryStoreServiceTest.java @@ -0,0 +1,242 @@ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + + +package com.salesforce.dva.argus.service.querystore; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.entity.QueryStoreRecord; +import com.salesforce.dva.argus.service.MonitorService; +import com.salesforce.dva.argus.system.SystemConfiguration; +import com.salesforce.dva.argus.system.SystemException; +import org.apache.http.entity.BasicHttpEntity; +import org.apache.http.entity.StringEntity; +import org.apache.http.util.EntityUtils; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RestClient; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.mockito.ArgumentCaptor; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Properties; +import java.util.Set; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +public class ElasticSearchQueryStoreServiceTest { + + private RestClient restClient; + private String createSucessReply = String.join("\n", + "{" + + " \"took\": 55," + + " \"errors\": false," + + " \"items\": [" + + " {" + + " \"create\": {" + + " \"_index\": \"argusqs-v1\"," + + " \"_type\": \"argus-query_type\"," + + " \"_id\": \"26efe188e63fb16c94bd6ec9bbd98d0f\"," + + " \"_version\": 1," + + " \"result\": \"created\"," + + " \"_shards\": {" + + " \"total\": 2," + + " \"successful\": 1," + + " \"failed\": 0" + + " }," + + " \"created\": true," + + " \"status\": 201" + + " }" + + " }" + + " ]" + + "}"); + + static private SystemConfiguration systemConfig; + static private ElasticSearchQueryStoreService _esQueryStoreService; + static private String queryStoreIndexName; + static private String queryStoreTypeName; + + + @BeforeClass + public static void setUpClass() { + Properties config = new Properties(); + systemConfig = new SystemConfiguration(config); + MonitorService mockedMonitor = mock(MonitorService.class); + _esQueryStoreService = new ElasticSearchQueryStoreService(systemConfig, mockedMonitor); + queryStoreIndexName=systemConfig.getValue(ElasticSearchQueryStoreService.Property.QUERY_STORE_ES_INDEX_NAME.getName(), + ElasticSearchQueryStoreService.Property.QUERY_STORE_ES_INDEX_NAME.getDefaultValue()); + queryStoreTypeName=systemConfig.getValue(ElasticSearchQueryStoreService.Property.QUERY_STORE_ES_INDEX_TYPE.getName(), + ElasticSearchQueryStoreService.Property.QUERY_STORE_ES_INDEX_TYPE.getDefaultValue()); + } + + @Test + public void testPutIndexUsingQueryStoreIndex() throws IOException { + + List metrics = new ArrayList<>(); + Metric myMetric = new Metric("scope1", "metric1"); + metrics.add(myMetric); + ElasticSearchQueryStoreService spyService = _initializeSpyService(_esQueryStoreService, createSucessReply, createSucessReply); + + Set records = new HashSet<>(); + + for(Metric m : metrics) { + QueryStoreRecord qsr = new QueryStoreRecord(m.getScope(),m.getMetric()); + records.add(qsr); + } + + spyService.upsertQueryStoreRecords(records); + + ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(Request.class); + + verify(restClient, times(1)).performRequest(requestCaptor.capture()); + + Request capturedRequest = requestCaptor.getValue(); + String createJson = EntityUtils.toString(capturedRequest.getEntity()); + assertTrue(createJson.contains("index")); + assertTrue(createJson.contains("mts")); + assertTrue(createJson.contains("sourcehost")); + assertTrue(createJson.contains("scope")); + assertTrue(createJson.contains("metric")); + String expectedURL = String.format("/%s/%s/_bulk", queryStoreIndexName, queryStoreTypeName); + assertEquals(expectedURL, capturedRequest.getEndpoint()); + } + + @Test + public void testDoExtractResponse() throws Exception { + final String message = "this is a test"; + BasicHttpEntity entity = new BasicHttpEntity(); + try(ByteArrayInputStream bis = new ByteArrayInputStream(message.getBytes())) { + entity.setContent(bis); + } + catch (IOException e) { + throw e; + } + + String responseMessage = ElasticSearchQueryStoreService.doExtractResponse(200, entity); + assertEquals("expect the entity to be equal after extraction", message, responseMessage); + } + + @Test + public void pushExistingRecordToElasticSearch() + { + QueryStoreRecord record1= new QueryStoreRecord("scope1","metric1"); + + Set records = new HashSet<>(); + records.add(record1); + ElasticSearchQueryStoreService spyService = _initializeSpyService(_esQueryStoreService, createSucessReply, createSucessReply); + spyService.addQueryRecordsToCreatedBloom(records); + + List metrics = new ArrayList<>(); + Metric myMetric = new Metric("scope1", "metric1"); + metrics.add(myMetric); + Set recordsToAdd = new HashSet<>(metrics.size()); + recordsToAdd.add(myMetric); + + spyService.putArgusWsQueries(metrics); + verify(spyService,times(1)).insertRecordsToES(new HashSet<>(metrics.size())); + verify(spyService,never()).upsertQueryStoreRecords(new HashSet<>(metrics.size())); + + } + + @Rule + public ExpectedException expectedException = ExpectedException.none(); + + @Test + public void testDoExtractResponse400() { + expectedException.expect(IllegalArgumentException.class); + expectedException.expectMessage("Status code: 400"); + ElasticSearchQueryStoreService.doExtractResponse(400, null); + } + + @Test + public void testDoExtractResponse500() { + expectedException.expect(SystemException.class); + expectedException.expectMessage("Status code: 500"); + ElasticSearchQueryStoreService.doExtractResponse(500, null); + } + + @Test + public void testQueryStoreRecordListMapper() throws Exception { + ObjectMapper mapper = ElasticSearchQueryStoreService.getQueryStoreObjectMapper(new QueryStoreRecordList.IndexSerializer()); + + QueryStoreRecord record1 = new QueryStoreRecord("scope1", "metric1"); + QueryStoreRecordList recordList = new QueryStoreRecordList(new HashSet<>(Arrays.asList(record1)), QueryStoreRecordList.HashAlgorithm.fromString("MD5")); + + String serialized = mapper.writeValueAsString(recordList); + + assertTrue("expect the serialized record to have scope", serialized.contains("scope1")); + assertTrue("expect the serialized record to have metric", serialized.contains("metric1")); + } + + private ElasticSearchQueryStoreService _initializeSpyService(ElasticSearchQueryStoreService service, + String firstReply, String secondReply) { + + restClient = mock(RestClient.class); + + service.setESRestClient(restClient); + + ElasticSearchQueryStoreService spyService = spy(service); + + doAnswer(new Answer() { + private int count = 0; + + public Object answer(InvocationOnMock invocation) { + count++; + if (count == 1) { + return firstReply; + } + + return secondReply; + } + }).when(spyService).extractResponse(any()); + + return spyService; + } +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/SchedulingServiceIT.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/schedule/SchedulingServiceIT.java similarity index 80% rename from ArgusCore/src/test/java/com/salesforce/dva/argus/service/SchedulingServiceIT.java rename to ArgusCore/src/test/java/com/salesforce/dva/argus/service/schedule/SchedulingServiceIT.java index 99e2ed283..91f1a976b 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/SchedulingServiceIT.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/schedule/SchedulingServiceIT.java @@ -1,4 +1,4 @@ -package com.salesforce.dva.argus.service; +package com.salesforce.dva.argus.service.schedule; import static com.salesforce.dva.argus.service.MQService.MQQueue.ALERT; import static org.junit.Assert.assertEquals; @@ -6,18 +6,25 @@ import java.util.List; import java.util.Properties; +import com.salesforce.dva.argus.IntegrationTest; +import com.salesforce.dva.argus.service.AlertService; +import com.salesforce.dva.argus.service.MQService; +import com.salesforce.dva.argus.service.SchedulingService; +import com.salesforce.dva.argus.service.UserService; import org.junit.Test; -import com.salesforce.dva.argus.AbstractTest; +import com.salesforce.dva.argus.AbstractTestIT; import com.salesforce.dva.argus.entity.Alert; import com.salesforce.dva.argus.entity.PrincipalUser; import com.salesforce.dva.argus.service.alert.DefaultAlertService.AlertWithTimestamp; +import org.junit.experimental.categories.Category; -public class SchedulingServiceIT extends AbstractTest { +@Category(IntegrationTest.class) +public class SchedulingServiceIT extends AbstractTestIT { @Test public void testAlertSchedulingWithDistributedDatabase() throws InterruptedException { - + Properties props = new Properties(); props.put("service.binding.scheduling", "com.salesforce.dva.argus.service.schedule.DistributedDatabaseSchedulingService"); system = getInstance(props); diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/schedule/SchedulingServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/schedule/SchedulingServiceTest.java new file mode 100644 index 000000000..d476eaa98 --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/schedule/SchedulingServiceTest.java @@ -0,0 +1,175 @@ +/* + * Copyright (c) 2016, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +package com.salesforce.dva.argus.service.schedule; + +import com.google.inject.Provider; +import com.salesforce.dva.argus.entity.Alert; +import com.salesforce.dva.argus.entity.Notification; +import com.salesforce.dva.argus.entity.PrincipalUser; +import com.salesforce.dva.argus.entity.Trigger; +import com.salesforce.dva.argus.entity.Trigger.TriggerType; +import com.salesforce.dva.argus.service.AuditService; +import com.salesforce.dva.argus.service.GlobalInterlockService; +import com.salesforce.dva.argus.service.HistoryService; +import com.salesforce.dva.argus.service.MQService; +import com.salesforce.dva.argus.service.MailService; +import com.salesforce.dva.argus.service.MetricService; +import com.salesforce.dva.argus.service.MonitorService; +import com.salesforce.dva.argus.service.TSDBService; +import com.salesforce.dva.argus.service.UserService; +import com.salesforce.dva.argus.service.alert.DefaultAlertService; +import com.salesforce.dva.argus.service.alert.notifier.AuditNotifier; +import com.salesforce.dva.argus.service.alert.retriever.ImageDataRetriever; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; + +import javax.persistence.EntityManager; +import javax.persistence.Persistence; +import java.util.ArrayList; +import java.util.Arrays; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import org.junit.BeforeClass; +import org.junit.AfterClass; +import com.salesforce.dva.argus.system.SystemMain; +import com.salesforce.dva.argus.TestUtils; + + +@RunWith(org.mockito.junit.MockitoJUnitRunner.class) +public class SchedulingServiceTest { + @Mock Provider _emProviderMock; + @Mock + TSDBService _tsdbServiceMock; + @Mock private MetricService _metricServiceMock; + @Mock private MailService _mailServiceMock; + @Mock private HistoryService _historyServiceMock; + @Mock private MonitorService _monitorServiceMock; + @Mock private AuditService _auditServiceMock; + @Mock private ImageDataRetriever imageDataRetrieverMock; + + private EntityManager em; + + static private UserService userService; + static private SystemMain system; + static private MQService mqService; + + + + @BeforeClass + static public void setUpClass() { + system = TestUtils.getInstance(); + system.start(); + userService = system.getServiceFactory().getUserService(); + mqService = system.getServiceFactory().getMQService(); + } + + @AfterClass + static public void tearDownClass() { + if (system != null) { + system.getServiceFactory().getManagementService().cleanupRecords(); + system.stop(); + } + } + + @Before + public void setup() { + // set up EM + em = Persistence.createEntityManagerFactory("argus-pu").createEntityManager(); + when(_emProviderMock.get()).thenReturn(em); + em.getTransaction().begin(); + } + + @After + public void teardown() { + // rolling back transactions and forcing the gc to clean up. Otherwise the EM created above gets injected by guice in ut's that run afterwards. So weird + em.getTransaction().rollback(); + em.close(); + em = null; + System.gc(); + } + + @Test + public void testAlertSchedulingWithGlobalInterlock() { + + // Alert service with mocked tsdb service + DefaultAlertService alertServiceOriginal = new DefaultAlertService(system.getConfiguration(), mqService, _metricServiceMock, _auditServiceMock, + _tsdbServiceMock, _mailServiceMock, _historyServiceMock, _monitorServiceMock, imageDataRetrieverMock, system.getNotifierFactory(), + _emProviderMock); + + DefaultAlertService alertService = spy(alertServiceOriginal); + DefaultSchedulingService schedulingService = new DefaultSchedulingService(alertService, + system.getServiceFactory().getGlobalInterlockService(), + userService, + system.getServiceFactory().getServiceManagementService(), + system.getServiceFactory().getAuditService(), + system.getConfiguration()); + + schedulingService.enableScheduling(); + DefaultSchedulingService.SchedulingThread schedulingThread = spy(schedulingService.new SchedulingThread("schedule-alerts", GlobalInterlockService.LockType.ALERT_SCHEDULING)); + int originalJobCount = schedulingThread.getEnabledJobs().size(); + + int noOfAlerts = TestUtils.random.nextInt(2) + 9; + PrincipalUser user = userService.findAdminUser(); + + for (int i = 0; i < noOfAlerts; i++) { + String expression = "DIVIDE(-1h:argus.jvm:file.descriptor.open{host=unknown-host}:avg, " + + "-1h:argus.jvm:file.descriptor.max{host=unknown-host}:avg)"; + + Alert alert = alertService.updateAlert(new Alert(user, user, TestUtils.createRandomName(), expression, "* * * * *")); + alert.setEnabled(true); + + Trigger trigger = new Trigger(alert, TriggerType.GREATER_THAN_OR_EQ, "testTrigger", 0, 0); + alert.setTriggers(Arrays.asList(trigger)); + Notification notification = new Notification("testNotification", alert, AuditNotifier.class.getName(), new ArrayList(), + 0); + alert.setNotifications(Arrays.asList(notification)); + + alertService.updateAlert(alert); + } + // Return null so the inner doSchedule loop stops + doReturn(null).when(schedulingThread).refreshMaster(any()); + schedulingThread.doSchedule(); + schedulingService.stopAlertScheduling(); + // TODO: FIXME: Some other unit test is able to affect the number of enabled alerts, additng originalJobCount here is a workaround + verify(schedulingThread, times(originalJobCount + noOfAlerts)).doScheduleJob(any(), any()); + } +} +/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/schema/AbstractSchemaServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/schema/AbstractSchemaServiceTest.java index d80cf8efd..c41c662d0 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/schema/AbstractSchemaServiceTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/schema/AbstractSchemaServiceTest.java @@ -1,21 +1,28 @@ package com.salesforce.dva.argus.service.schema; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import com.salesforce.dva.argus.TestUtils; +import com.salesforce.dva.argus.entity.Metric; +import org.junit.Test; +import org.mockito.Mockito; +import org.mockito.stubbing.Answer; +import com.salesforce.dva.argus.system.SystemConfiguration; +import com.salesforce.dva.argus.service.MonitorService; + import java.util.Calendar; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Set; +import org.junit.Before; -import org.apache.commons.lang3.tuple.Pair; -import org.junit.Test; -import org.mockito.Mockito; -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; +import java.util.Properties; -import com.salesforce.dva.argus.AbstractTest; -import com.salesforce.dva.argus.entity.Metric; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import static org.mockito.Mockito.mock; /** @@ -26,78 +33,70 @@ * @author Bhinav Sura (bhinav.sura@salesforce.com) * */ -public class AbstractSchemaServiceTest extends AbstractTest { +public class AbstractSchemaServiceTest { - private int scopesCount = 0; - private int scopeAndMetricsCount = 0; - private int metricsCount = 0; + private int scopesCreatedCount = 0; + private int metricsCreatedCount = 0; + + private ElasticSearchSchemaService _esSchemaService; + private SystemConfiguration systemConfig; + private String myClassName = AbstractSchemaServiceTest.class.getSimpleName(); + + + @Before + public void setUpClass() { + Properties config = new Properties(); + systemConfig = new SystemConfiguration(config); + MonitorService mockedMonitor = mock(MonitorService.class); + ElasticSearchUtils mockedElasticSearchUtils = mock(ElasticSearchUtils.class); + _esSchemaService = new ElasticSearchSchemaService(systemConfig, mockedMonitor, mockedElasticSearchUtils); + } @Test public void testPutEverythingCached() { - List metrics = createRandomMetrics("test-scope", "test-metric", 10); - - metrics.addAll(createRandomMetrics(null, null, 10)); + List metrics = TestUtils.createRandomMetrics(myClassName, "test-scope", "test-metric", 10); - ElasticSearchSchemaService service = new ElasticSearchSchemaService(system.getConfiguration(), system.getServiceFactory().getMonitorService()); + metrics.addAll(TestUtils.createRandomMetrics(myClassName, null, null, 10)); - ElasticSearchSchemaService spyService = _initializeSpyService(service); + ElasticSearchSchemaService spyService = _initializeSpyService(_esSchemaService); spyService.put(metrics); Set scopeNames = new HashSet<>(); - Set> scopeAndMetricNames = new HashSet<>(); for(Metric m : metrics) { scopeNames.add(m.getScope()); - scopeAndMetricNames.add(Pair.of(m.getScope(), m.getMetric())); } - assertEquals(metricsCount, metrics.size()); - assertEquals(scopeAndMetricsCount, scopeAndMetricNames.size()); - assertEquals(scopesCount, scopeNames.size()); - - // add to bloom filter cache - spyService._addToBloomFilter(spyService._fracture(metrics).get(0)); - spyService._addToBloomFilterScopeAndMetricOnly(spyService._fractureScopeAndMetrics(scopeAndMetricNames).get(0)); - spyService._addToBloomFilterScopeOnly(spyService._fractureScopes(scopeNames).get(0)); + assertEquals(metricsCreatedCount, metrics.size()); + assertEquals(scopesCreatedCount, scopeNames.size()); spyService.put(metrics); - // count should be same since we are re-reading cached value - - assertEquals(metricsCount, metrics.size()); - assertEquals(scopeAndMetricsCount, scopeAndMetricNames.size()); - assertEquals(scopesCount, scopeNames.size()); + initCounters(); + assertEquals(metricsCreatedCount, 0); + assertEquals(scopesCreatedCount, 0); } @Test public void testPutPartialCached() { - List metrics = createRandomMetrics("test-scope", "test-metric", 10); + List metrics = TestUtils.createRandomMetrics(myClassName, "test-scope", "test-metric", 10); - ElasticSearchSchemaService service = new ElasticSearchSchemaService(system.getConfiguration(), system.getServiceFactory().getMonitorService()); - ElasticSearchSchemaService spyService = _initializeSpyService(service); + ElasticSearchSchemaService spyService = _initializeSpyService(_esSchemaService); spyService.put(metrics); Set scopeNames = new HashSet<>(); - Set> scopeAndMetricNames = new HashSet<>(); for(Metric m : metrics) { scopeNames.add(m.getScope()); - scopeAndMetricNames.add(Pair.of(m.getScope(), m.getMetric())); } - assertEquals(metricsCount, metrics.size()); - assertEquals(scopeAndMetricsCount, scopeAndMetricNames.size()); - assertEquals(scopesCount, scopeNames.size()); - - // add to bloom filter cache - spyService._addToBloomFilter(spyService._fracture(metrics).get(0)); - spyService._addToBloomFilterScopeAndMetricOnly(spyService._fractureScopeAndMetrics(scopeAndMetricNames).get(0)); - spyService._addToBloomFilterScopeOnly(spyService._fractureScopes(scopeNames).get(0)); + assertEquals(metricsCreatedCount, metrics.size()); + assertEquals(scopesCreatedCount, scopeNames.size()); - List newMetrics = createRandomMetrics(null, null, 10); + List newMetrics = TestUtils.createRandomMetrics(myClassName, null, null, 10); // 1st metric already in cache (partial case scenario), and now we call put with both list of metrics @@ -106,88 +105,164 @@ public void testPutPartialCached() { spyService.put(newMetrics); scopeNames.clear(); - scopeAndMetricNames.clear(); for(Metric m : newMetrics) { scopeNames.add(m.getScope()); - scopeAndMetricNames.add(Pair.of(m.getScope(), m.getMetric())); } - assertEquals(metricsCount, newMetrics.size()); - assertEquals(scopeAndMetricsCount, scopeAndMetricNames.size()); - assertEquals(scopesCount, scopeNames.size()); + assertEquals(metricsCreatedCount, newMetrics.size()); + assertEquals(scopesCreatedCount, scopeNames.size()); + } + + @Test + public void testPutSameMetricWithDifferentTags() { + List metrics = TestUtils.createRandomMetrics(myClassName, "test-scope", "test-metric", 1); + Metric metric = metrics.get(0); + + ElasticSearchSchemaService spyService = _initializeSpyService(_esSchemaService); + Set scopeNames = new HashSet<>(); + scopeNames.add(metric.getScope()); + spyService.put(metrics); + // Both metadata and scope are new + assertEquals(metricsCreatedCount, 1); + assertEquals(scopesCreatedCount, 1); + + Map.Entry originalTagEntry = metric.getTags().entrySet().iterator().next(); + String originalTagKey = originalTagEntry.getKey(); + String originalTagValue = originalTagEntry.getValue(); + String randomTagKey = TestUtils.createRandomName(AbstractSchemaServiceTest.class.getSimpleName()); + String randomTagValue = TestUtils.createRandomName(AbstractSchemaServiceTest.class.getSimpleName()); + + // New tagvalue for same scope:metric should update metric + initCounters(); + metrics.get(0).setTag(originalTagKey, randomTagValue); + spyService.put(metrics); + assertEquals(metricsCreatedCount, 1); + assertEquals(scopesCreatedCount, 0); + + // New tagkey should update metric + initCounters(); + metrics.get(0).setTag(randomTagKey, originalTagValue); + spyService.put(metrics); + assertEquals(metricsCreatedCount, 1); + assertEquals(scopesCreatedCount, 0); + + // Same scope:metric:{seentag1=seenvalue1,seentag2=seenvalue2} doesn't need update and shouldn't + initCounters(); + metrics.get(0).setTag(randomTagKey, originalTagValue); + spyService.put(metrics); + assertEquals(metricsCreatedCount, 0); + assertEquals(scopesCreatedCount, 0); } @Test public void testPutNothingCached() { - List metrics = createRandomMetrics("test-scope", "test-metric", 10); + List metrics = TestUtils.createRandomMetrics(myClassName, "test-scope", "test-metric", 10); + + metrics.addAll(TestUtils.createRandomMetrics(myClassName, null, null, 10)); + + ElasticSearchSchemaService spyService = _initializeSpyService(_esSchemaService); + + // Make implementationSpecificPut specifically NOT add to the bloomfilters on a put + Mockito.doAnswer((Answer) invocation -> { + @SuppressWarnings("unchecked") + Set metricsToCreate = Set.class.cast(invocation.getArguments()[0]); + Set scopeNamesToCreate = Set.class.cast(invocation.getArguments()[1]); - metrics.addAll(createRandomMetrics(null, null, 10)); + metricsCreatedCount += metricsToCreate.size(); + scopesCreatedCount += scopeNamesToCreate.size(); - ElasticSearchSchemaService service = new ElasticSearchSchemaService(system.getConfiguration(), system.getServiceFactory().getMonitorService()); - ElasticSearchSchemaService spyService = _initializeSpyService(service); + return null; + }).when(spyService).implementationSpecificPut(Mockito.any(), Mockito.any(), Mockito.any()); spyService.put(metrics); Set scopeNames = new HashSet<>(); - Set> scopeAndMetricNames = new HashSet<>(); for(Metric m : metrics) { scopeNames.add(m.getScope()); - scopeAndMetricNames.add(Pair.of(m.getScope(), m.getMetric())); } - assertEquals(metricsCount, metrics.size()); - assertEquals(scopeAndMetricsCount, scopeAndMetricNames.size()); - assertEquals(scopesCount, scopeNames.size()); + assertEquals(metricsCreatedCount, metrics.size()); + assertEquals(scopesCreatedCount, scopeNames.size()); spyService.put(metrics); - assertEquals(metricsCount, 2 * metrics.size()); - assertEquals(scopeAndMetricsCount, 2 * scopeAndMetricNames.size()); - assertEquals(scopesCount, 2 * scopeNames.size()); + assertEquals(metricsCreatedCount, 2 * metrics.size()); + assertEquals(scopesCreatedCount, 2 * scopeNames.size()); } private ElasticSearchSchemaService _initializeSpyService(ElasticSearchSchemaService service) { ElasticSearchSchemaService spyService = Mockito.spy(service); initCounters(); - Mockito.doAnswer(new Answer() { - @Override - public Void answer(InvocationOnMock invocation) throws Throwable { - @SuppressWarnings("unchecked") - List metrics = List.class.cast(invocation.getArguments()[0]); + Mockito.doAnswer((Answer) invocation -> { + @SuppressWarnings("unchecked") + Set metricsToCreate = Set.class.cast(invocation.getArguments()[0]); + Set scopeNamesToCreate = Set.class.cast(invocation.getArguments()[1]); - Set scopeNames = Set.class.cast(invocation.getArguments()[1]); + metricsCreatedCount += metricsToCreate.size(); + scopesCreatedCount += scopeNamesToCreate.size(); - Set> scopeAndMetricNames = Set.class.cast(invocation.getArguments()[2]); - - scopesCount += scopeNames.size(); - scopeAndMetricsCount += scopeAndMetricNames.size(); - metricsCount += metrics.size(); - - return null; + // Simulate a successful put, which will add to the corresponding bloomsfilters + if (metricsToCreate.size() > 0) { + service._addToModifiedBloom(spyService._fracture(metricsToCreate).get(0)); + } + if (scopeNamesToCreate.size() > 0) { + service._addToModifiedBloom(spyService._fractureScopes(scopeNamesToCreate).get(0)); } - }).when(spyService).implementationSpecificPut(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); + + return null; + }).when(spyService).implementationSpecificPut(Mockito.any(), Mockito.any(), Mockito.any()); return spyService; } private void initCounters() { - scopesCount = 0; - scopeAndMetricsCount = 0; - metricsCount = 0; + scopesCreatedCount = 0; + metricsCreatedCount = 0; } @Test - public void testNumHoursUntilNextFlushBloomFilter() { - ElasticSearchSchemaService service = new ElasticSearchSchemaService(system.getConfiguration(), system.getServiceFactory().getMonitorService()); - + public void testNumHoursUntilNextClearBloomFilter() { Calendar calendar = Calendar.getInstance(); - - // Will wait 24 hours before next flush if at same hour boundary int hour = calendar.get(Calendar.HOUR_OF_DAY); - assertTrue(service.getNumHoursUntilTargetHour(hour) == 24); + // Will wait 24 hours before next flush if at same hour boundary + int secondsUntil = _esSchemaService.getNumSecondsUntilNthHourOfDay(hour, calendar); + assertTrue("secondsUntil should be between 23 hours and 24 hours but was " + secondsUntil,secondsUntil >= 23 * 60 * 60 && secondsUntil <= 24 * 60 * 60); + + calendar.set(Calendar.HOUR_OF_DAY, Math.floorMod(hour - 2, 24)); + secondsUntil = _esSchemaService.getNumSecondsUntilNthHourOfDay(hour, calendar); + assertTrue("secondsUntil should be between 1 hours and 2 hours but was " + secondsUntil,secondsUntil >= 1 * 60 * 60 && secondsUntil <= 2 * 60 * 60); + + calendar.set(Calendar.HOUR_OF_DAY, Math.floorMod(hour + 2, 24)); + secondsUntil = _esSchemaService.getNumSecondsUntilNthHourOfDay(hour, calendar); + assertTrue("secondsUntil should be between 21 hours and 22 hours but was " + secondsUntil, secondsUntil >= 21 * 60 * 60 && secondsUntil <= 22 * 60 * 60); + } + + @Test + public void testNumHoursUntilNextFlushBloomFilter() { + // use Wednesday 6 AM this week as start date + Calendar wedAtSix = Calendar.getInstance(); + wedAtSix.set(Calendar.HOUR_OF_DAY, 6); + wedAtSix.set(Calendar.DAY_OF_WEEK, 4); + + // Test Sunday, Monday Tuesday, Wednesday of next week @ 4 AM + for (int dayIndex = 0; dayIndex < 3; dayIndex++) { + int nthHour = dayIndex * 24 + 4; + int secondsUntil = _esSchemaService.getNumSecondsUntilNthHourOfWeek(nthHour, wedAtSix); + int floorHoursUntil = secondsUntil / 60 / 60; + int expectedHours = (4 + dayIndex) * 24 - 2; + assertTrue("hoursUntil should be between " + (expectedHours - 1) + " and " + expectedHours, expectedHours - 1 <= floorHoursUntil && floorHoursUntil <= expectedHours); + } + // Test Wednesday Thursday, Fri, Sat of this week @ 8 AM + for (int dayIndex = 3; dayIndex < 7; dayIndex++) { + int nthHour = dayIndex * 24 + 8; + int secondsUntil = _esSchemaService.getNumSecondsUntilNthHourOfWeek(nthHour, wedAtSix); + int floorHoursUntil = secondsUntil / 60 / 60; + int expectedHours = (dayIndex - 3) * 24 + 2; + assertTrue("hoursUntil should be between " + (expectedHours - 1) + " and " + expectedHours, expectedHours - 1 <= floorHoursUntil && floorHoursUntil <= expectedHours); + } } } diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/schema/CachedDiscoveryServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/schema/CachedDiscoveryServiceTest.java index af4b1801b..bc3f822d8 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/schema/CachedDiscoveryServiceTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/schema/CachedDiscoveryServiceTest.java @@ -2,7 +2,6 @@ import org.junit.Test; -import com.salesforce.dva.argus.AbstractTest; import com.salesforce.dva.argus.service.CacheService; import com.salesforce.dva.argus.service.DiscoveryService; import com.salesforce.dva.argus.service.schema.CachedDiscoveryService; @@ -14,30 +13,51 @@ import java.util.List; -public class CachedDiscoveryServiceTest extends AbstractTest { - +import org.junit.BeforeClass; +import org.junit.AfterClass; +import com.salesforce.dva.argus.system.SystemMain; +import com.salesforce.dva.argus.TestUtils; + +public class CachedDiscoveryServiceTest { + private static final String CACHED_QUERIES = "[{\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB0\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"scope\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB1\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"scope\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB2\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"scope\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB3\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"scope\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB4\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"scope\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB5\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"scope\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB6\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"scope\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB7\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"scope\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB8\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"scope\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB9\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"scope\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB10\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"scope\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB11\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"scope\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB12\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"scope\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB13\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"scope\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB14\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"scope\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB15\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"scope\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB16\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"scope\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB17\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"scope\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB18\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"scope\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB19\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"scope\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB20\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"scope\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB21\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"scope\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB22\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"scope\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB23\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"scope\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB24\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"scope\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB25\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"scope\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB26\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"scope\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB27\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"scope\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB28\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"scope\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB29\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"scope\"} ]"; - + + static private SystemMain system; + + @BeforeClass + static public void setUpClass() { + system = TestUtils.getInstance(); + system.start(); + } + + @AfterClass + static public void tearDownClass() { + if (system != null) { + system.getServiceFactory().getManagementService().cleanupRecords(); + system.stop(); + } + } + @Test public void testWildcardExpansionWithinLimit() { - + CacheService cacheServiceMock = mock(CacheService.class); when(cacheServiceMock.get(anyString())).thenReturn(CACHED_QUERIES); DiscoveryService discoveryServiceMock = mock(DiscoveryService.class); - - CachedDiscoveryService service = new CachedDiscoveryService(cacheServiceMock, discoveryServiceMock, system.getConfiguration()); + + CachedDiscoveryService service = new CachedDiscoveryService(cacheServiceMock, discoveryServiceMock, system.getConfiguration(), null); List queries = service.getMatchingQueries(new MetricQuery("scope*", "metric", null, System.currentTimeMillis() - (30 * 24 * 60 * 60 * 1000L), System.currentTimeMillis())); assertEquals(30, queries.size()); } - + @Test(expected = WildcardExpansionLimitExceededException.class) public void testWildcardExpansionExceedingLimit() { - + CacheService cacheServiceMock = mock(CacheService.class); when(cacheServiceMock.get(anyString())).thenReturn(CACHED_QUERIES); DiscoveryService discoveryServiceMock = mock(DiscoveryService.class); - - CachedDiscoveryService service = new CachedDiscoveryService(cacheServiceMock, discoveryServiceMock, system.getConfiguration()); + + CachedDiscoveryService service = new CachedDiscoveryService(cacheServiceMock, discoveryServiceMock, system.getConfiguration(), null); service.getMatchingQueries(new MetricQuery("scope*", "metric", null, System.currentTimeMillis() - (300 * 24 * 60 * 60 * 1000L), System.currentTimeMillis())); } diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/schema/DefaultDiscoveryServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/schema/DefaultDiscoveryServiceTest.java index 62435bf07..06f863255 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/schema/DefaultDiscoveryServiceTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/schema/DefaultDiscoveryServiceTest.java @@ -28,13 +28,13 @@ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ - + package com.salesforce.dva.argus.service.schema; -import com.salesforce.dva.argus.AbstractTest; import com.salesforce.dva.argus.entity.MetricSchemaRecord; import com.salesforce.dva.argus.entity.MetricSchemaRecordQuery; import com.salesforce.dva.argus.entity.MetricSchemaRecordQuery.MetricSchemaRecordQueryBuilder; +import com.salesforce.dva.argus.service.MonitorService; import com.salesforce.dva.argus.service.SchemaService; import com.salesforce.dva.argus.service.schema.DefaultDiscoveryService; import com.salesforce.dva.argus.service.schema.WildcardExpansionLimitExceededException; @@ -53,30 +53,57 @@ import static org.mockito.Mockito.*; -public class DefaultDiscoveryServiceTest extends AbstractTest { - +import org.junit.BeforeClass; +import org.junit.AfterClass; +import com.salesforce.dva.argus.system.SystemMain; +import com.salesforce.dva.argus.TestUtils; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +@RunWith(MockitoJUnitRunner.class) +public class DefaultDiscoveryServiceTest { + + private static SystemMain system; + @Mock + private MonitorService monitorService; + + @BeforeClass + static public void setUpClass() { + system = TestUtils.getInstance(); + system.start(); + } + + @AfterClass + static public void tearDownClass() { + if (system != null) { + system.getServiceFactory().getManagementService().cleanupRecords(); + system.stop(); + } + } + @Test public void testWildcardQueriesMatchWithinLimit() { - + SchemaService schemaServiceMock = mock(SchemaService.class); List records = new ArrayList<>(); records.add(new MetricSchemaRecord(null, "scope0", "metric0", "source", "unittest")); records.add(new MetricSchemaRecord(null, "scope1", "metric1", "source", "unittest")); when(schemaServiceMock.get(any(MetricSchemaRecordQuery.class))).thenReturn(records); - DefaultDiscoveryService discoveryService = new DefaultDiscoveryService(schemaServiceMock, system.getConfiguration()); + DefaultDiscoveryService discoveryService = new DefaultDiscoveryService(schemaServiceMock, system.getConfiguration(), monitorService); Map tags = new HashMap(); tags.put("source", "unittest"); MetricQuery query = new MetricQuery("scope[0|1]", "metric[0|1]", tags, 1L, 2L); List queries = discoveryService.getMatchingQueries(query); - + assertEquals(2, queries.size()); assertEquals(new MetricQuery("scope0", "metric0", tags, 1L, 2L), queries.get(0)); assertEquals(new MetricQuery("scope1", "metric1", tags, 1L, 2L), queries.get(1)); } - + /** * Assume that following schemarecords exist in the database: * scope0,metric0,source,unittest0,null @@ -85,9 +112,9 @@ public void testWildcardQueriesMatchWithinLimit() { */ @Test public void testWildcardQueriesMatchMultipleTags() { - + SchemaService schemaServiceMock = mock(SchemaService.class); - + MetricSchemaRecordQuery queryForTag1 = new MetricSchemaRecordQueryBuilder().scope("scope0") .metric("metric0") .tagKey("source") @@ -101,12 +128,12 @@ public void testWildcardQueriesMatchMultipleTags() { .tagValue("device[1]") .limit(500) .build(); - - when(schemaServiceMock.get(queryForTag1)) - .thenReturn(Arrays.asList(new MetricSchemaRecord(null, "scope0", "metric0", "source", "unittest0"))); + +// when(schemaServiceMock.get(queryForTag1)) +// .thenReturn(Arrays.asList(new MetricSchemaRecord(null, "scope0", "metric0", "source", "unittest0"))); when(schemaServiceMock.get(queryForTag2)).thenReturn(new ArrayList<>()); - - DefaultDiscoveryService discoveryService = new DefaultDiscoveryService(schemaServiceMock, system.getConfiguration()); + + DefaultDiscoveryService discoveryService = new DefaultDiscoveryService(schemaServiceMock, system.getConfiguration(), monitorService); Map tags = new HashMap(); tags.put("source", "unittest0"); @@ -114,10 +141,10 @@ public void testWildcardQueriesMatchMultipleTags() { MetricQuery query = new MetricQuery("scope0", "metric0", tags, 1L, 2L); List matchedQueries = discoveryService.getMatchingQueries(query); - + assertTrue(matchedQueries.isEmpty()); } - + /** * Assume that following schemarecords exist in the database: * scope0,metric0,source,unittest0,null @@ -129,9 +156,9 @@ public void testWildcardQueriesMatchMultipleTags() { */ @Test public void testWildcardQueriesMatchMultipleTags1() { - + SchemaService schemaServiceMock = mock(SchemaService.class); - + MetricSchemaRecordQuery queryForTag1 = new MetricSchemaRecordQueryBuilder().scope("scope0") .metric("metric0") .tagKey("source") @@ -145,13 +172,13 @@ public void testWildcardQueriesMatchMultipleTags1() { .tagValue("device[1]") .limit(500) .build(); - - when(schemaServiceMock.get(queryForTag1)).thenReturn(Arrays.asList( - new MetricSchemaRecord(null, "scope0", "metric0", "source", "unittest0"), - new MetricSchemaRecord(null, "scope1", "metric0", "source", "unittest0"))); - when(schemaServiceMock.get(queryForTag2)).thenReturn(new ArrayList<>()); - - DefaultDiscoveryService discoveryService = new DefaultDiscoveryService(schemaServiceMock, system.getConfiguration()); + +// when(schemaServiceMock.get(queryForTag1)).thenReturn(Arrays.asList( +// new MetricSchemaRecord(null, "scope0", "metric0", "source", "unittest0"), +// new MetricSchemaRecord(null, "scope1", "metric0", "source", "unittest0"))); +// when(schemaServiceMock.get(queryForTag2)).thenReturn(new ArrayList<>()); + + DefaultDiscoveryService discoveryService = new DefaultDiscoveryService(schemaServiceMock, system.getConfiguration(), monitorService); Map tags = new HashMap(); tags.put("source", "unittest0"); @@ -159,13 +186,13 @@ public void testWildcardQueriesMatchMultipleTags1() { MetricQuery query = new MetricQuery("scope?", "metric0", tags, 1L, 2L); List matchedQueries = discoveryService.getMatchingQueries(query); - + assertTrue(matchedQueries.isEmpty()); } - + @Test(expected = WildcardExpansionLimitExceededException.class) public void testWildcardQueriesMatchExceedingLimit() { - + SchemaService schemaServiceMock = mock(SchemaService.class); List records = new ArrayList<>(); records.add(new MetricSchemaRecord(null, "scope", "metric0", "source", "unittest")); @@ -198,21 +225,21 @@ public void testWildcardQueriesMatchExceedingLimit() { records.add(new MetricSchemaRecord(null, "scope", "metric27", "source", "unittest")); records.add(new MetricSchemaRecord(null, "scope", "metric28", "source", "unittest")); records.add(new MetricSchemaRecord(null, "scope", "metric29", "source", "unittest")); - + when(schemaServiceMock.get(any(MetricSchemaRecordQuery.class))).thenReturn(records); - DefaultDiscoveryService discoveryService = new DefaultDiscoveryService(schemaServiceMock, system.getConfiguration()); - + DefaultDiscoveryService discoveryService = new DefaultDiscoveryService(schemaServiceMock, system.getConfiguration(), monitorService); + Map tags = new HashMap(); tags.put("source", "unittest"); - MetricQuery query = new MetricQuery("scope", "metric*", null, System.currentTimeMillis() - (100 * 24 * 60 * 60 * 1000L), System.currentTimeMillis()); + MetricQuery query = new MetricQuery("scope", "metric*", null, System.currentTimeMillis() - (250 * 24 * 60 * 60 * 1000L), System.currentTimeMillis()); List queries = discoveryService.getMatchingQueries(query); assertEquals(30, queries.size()); } - + @Test public void testWildcardQueriesMatchWithDownsampling() { - + SchemaService schemaServiceMock = mock(SchemaService.class); List records = new ArrayList<>(); records.add(new MetricSchemaRecord(null, "scope", "metric0", "source", "unittest")); @@ -245,10 +272,10 @@ public void testWildcardQueriesMatchWithDownsampling() { records.add(new MetricSchemaRecord(null, "scope", "metric27", "source", "unittest")); records.add(new MetricSchemaRecord(null, "scope", "metric28", "source", "unittest")); records.add(new MetricSchemaRecord(null, "scope", "metric29", "source", "unittest")); - + when(schemaServiceMock.get(any(MetricSchemaRecordQuery.class))).thenReturn(records); - DefaultDiscoveryService discoveryService = new DefaultDiscoveryService(schemaServiceMock, system.getConfiguration()); - + DefaultDiscoveryService discoveryService = new DefaultDiscoveryService(schemaServiceMock, system.getConfiguration(), monitorService); + Map tags = new HashMap(); tags.put("source", "unittest"); @@ -261,11 +288,11 @@ public void testWildcardQueriesMatchWithDownsampling() { @Test public void testWildcardQueriesNoMatch() { - + SchemaService schemaServiceMock = mock(SchemaService.class); List records = new ArrayList<>(); when(schemaServiceMock.get(any(MetricSchemaRecordQuery.class))).thenReturn(records); - DefaultDiscoveryService discoveryService = new DefaultDiscoveryService(schemaServiceMock, system.getConfiguration()); + DefaultDiscoveryService discoveryService = new DefaultDiscoveryService(schemaServiceMock, system.getConfiguration(), monitorService); Map tags = new HashMap(); tags.put("source", "unittest"); @@ -278,12 +305,12 @@ public void testWildcardQueriesNoMatch() { @Test public void testNonWildcardQuery() { - + SchemaService schemaServiceMock = mock(SchemaService.class); List records = new ArrayList<>(); - when(schemaServiceMock.get(any(MetricSchemaRecordQuery.class))).thenReturn(records); - DefaultDiscoveryService discoveryService = new DefaultDiscoveryService(schemaServiceMock, system.getConfiguration()); - +// when(schemaServiceMock.get(any(MetricSchemaRecordQuery.class))).thenReturn(records); + DefaultDiscoveryService discoveryService = new DefaultDiscoveryService(schemaServiceMock, system.getConfiguration(), monitorService); + Map tags = new HashMap(); tags.put("recordType", "A"); @@ -293,6 +320,6 @@ public void testNonWildcardQuery() { assertEquals(1, queries.size()); assertEquals(query, queries.get(0)); } - + } -/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ \ No newline at end of file +/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/schema/ElasticSearchSchemaServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/schema/ElasticSearchSchemaServiceTest.java index 343a667f5..c11e70ed2 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/schema/ElasticSearchSchemaServiceTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/schema/ElasticSearchSchemaServiceTest.java @@ -1,166 +1,58 @@ package com.salesforce.dva.argus.service.schema; +import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonElement; import com.google.gson.JsonParser; -import com.salesforce.dva.argus.AbstractTest; -import com.salesforce.dva.argus.entity.Metric; -import com.salesforce.dva.argus.entity.MetatagsRecord; +import com.salesforce.dva.argus.entity.KeywordQuery; import com.salesforce.dva.argus.entity.MetricSchemaRecord; import com.salesforce.dva.argus.entity.MetricSchemaRecordQuery; -import com.salesforce.dva.argus.entity.ScopeAndMetricOnlySchemaRecord; +import com.salesforce.dva.argus.service.MonitorService; import com.salesforce.dva.argus.service.SchemaService; +import com.salesforce.dva.argus.system.SystemConfiguration; import com.salesforce.dva.argus.system.SystemException; -import org.apache.commons.lang3.StringUtils; import org.apache.http.entity.BasicHttpEntity; -import org.apache.http.entity.StringEntity; import org.apache.http.util.EntityUtils; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; +import org.junit.After; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; -import org.mockito.*; +import org.mockito.ArgumentCaptor; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; +import org.junit.BeforeClass; +import java.util.Properties; + import java.io.ByteArrayInputStream; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.HashMap; +import java.util.HashSet; +import java.util.concurrent.atomic.AtomicInteger; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; -import static org.mockito.Mockito.*; - +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; -public class ElasticSearchSchemaServiceTest extends AbstractTest { +public class ElasticSearchSchemaServiceTest { private RestClient restClient; - - private String createSucessReply = String.join("\n", - "{" + - " \"took\": 178," + - " \"errors\": false," + - " \"items\": [" + - " {" + - " \"create\": {" + - " \"_index\": \"scopemetricnames\"," + - " \"_type\": \"scopemetric_type\"," + - " \"_id\": \"0f56139fa1c2a1834405bffd8e4570f1\"," + - " \"_version\": 1," + - " \"result\": \"created\"," + - " \"_shards\": {" + - " \"total\": 2," + - " \"successful\": 2," + - " \"failed\": 0" + - " }," + - " \"created\": true," + - " \"status\": 201" + - " }" + - " }," + - " {" + - " \"create\": {" + - " \"_index\": \"scopemetricnames\"," + - " \"_type\": \"scopemetric_type\"," + - " \"_id\": \"5b5d61f40ff3df194cc0e5b2afe0c5b4\"," + - " \"_version\": 1," + - " \"result\": \"created\"," + - " \"_shards\": {" + - " \"total\": 2," + - " \"successful\": 2," + - " \"failed\": 0" + - " }," + - " \"created\": true," + - " \"status\": 201" + - " }" + - " }" + - " ]" + - "}"); - - - private String createFailReply = String.join("\n", - "{", - " \"took\": 2," + - " \"errors\": true," + - " \"items\": [" + - " {" + - " \"create\": {" + - " \"_index\": \"scopemetricnames\"," + - " \"_type\": \"scopemetric_type\"," + - " \"_id\": \"0f56139fa1c2a1834405bffd8e4570f1\"," + - " \"status\": 409," + - " \"error\": {" + - " \"type\": \"version_conflict_engine_exception\"," + - " \"reason\": \"[scopemetric_type][9602e82b184a4930c2cf5de4651e0b3b]: version conflict, document already exists (current version [110])\"," + - " \"index_uuid\": \"zxhVd68hTPmEfCWYKtkjSQ\"," + - " \"shard\": \"0\"," + - " \"index\": \"scopemetricnames\"" + - " }" + - " }" + - " }," + - " {" + - " \"create\": {" + - " \"_index\": \"scopemetricnames\"," + - " \"_type\": \"scopemetric_type\"," + - " \"_id\": \"5b5d61f40ff3df194cc0e5b2afe0c5b4\"," + - " \"status\": 409," + - " \"error\": {" + - " \"type\": \"version_conflict_engine_exception\"," + - " \"reason\": \"[scopemetric_type][398b3cee85ea47fa673a2fc3ac9970c3]: version conflict, document already exists (current version [110])\"," + - " \"index_uuid\": \"zxhVd68hTPmEfCWYKtkjSQ\"," + - " \"shard\": \"0\"," + - " \"index\": \"scopemetricnames\"" + - " }" + - " }" + - " }" + - " ]" + - "}"); - - private String updateSucessReply = String.join("\n", - "{", - " \"took\": 2," + - " \"errors\": false," + - " \"items\": [" + - " {" + - " \"update\": {" + - " \"_index\": \"scopemetricnames\"," + - " \"_type\": \"scopemetric_type\"," + - " \"_id\": \"0f56139fa1c2a1834405bffd8e4570f1\"," + - " \"_version\": 87," + - " \"result\": \"noop\"," + - " \"_shards\": {" + - " \"total\": 2," + - " \"successful\": 2," + - " \"failed\": 0" + - " }," + - " \"status\": 200" + - " }" + - " }," + - " {" + - " \"update\": {" + - " \"_index\": \"scopemetricnames\"," + - " \"_type\": \"scopemetric_type\"," + - " \"_id\": \"5b5d61f40ff3df194cc0e5b2afe0c5b4\"," + - " \"_version\": 87," + - " \"result\": \"noop\"," + - " \"_shards\": {" + - " \"total\": 2," + - " \"successful\": 2," + - " \"failed\": 0" + - " }," + - " \"status\": 200" + - " }" + - " }" + - " ]" + - "}"); - private String getReply = String.join("\n", "{", " \"took\": 1,", @@ -221,39 +113,6 @@ public class ElasticSearchSchemaServiceTest extends AbstractTest { " }", "}"); - private String scopeAndMetricQuery = String.join("\n", - "{", - " \"query\": {", - " \"bool\": {", - " \"filter\": [", - " {", - " \"regexp\": {", - " \"metric.raw\": \"argus.*\"", - " }", - " },", - " {", - " \"regexp\": {", - " \"scope.raw\": \"system\"", - " }", - " }", - " ]", - " }", - " },", - " \"size\": 0,", - " \"aggs\": {", - " \"distinct_values\": {", - " \"terms\": {", - " \"field\": \"metric.raw\",", - " \"order\": {", - " \"_term\": \"asc\"", - " },", - " \"size\": 10000,", - " \"execution_hint\": \"map\"", - " }", - " }", - " }", - "}"); - private String metricQueryTagvRegex = String.join("\n", "{", " \"query\": {", @@ -335,92 +194,24 @@ public class ElasticSearchSchemaServiceTest extends AbstractTest { " }", "}"); - @Test - public void testPutCreateUsingScopeAndMetricSchemaIndex() throws IOException { - - List metrics = new ArrayList<>(); - - for(char ch = 'a'; ch < 'l'; ch++) { - metrics.add(new Metric("scope" + ch, "metric" + ch)); - } - - ElasticSearchSchemaService service = new ElasticSearchSchemaService(system.getConfiguration(), system.getServiceFactory().getMonitorService()); - - ElasticSearchSchemaService spyService = _initializeSpyService(service, createSucessReply, createSucessReply); - - List records = new ArrayList<>(); - - for(Metric m : metrics) { - ScopeAndMetricOnlySchemaRecord msr = new ScopeAndMetricOnlySchemaRecord(m.getScope(), m.getMetric()); - records.add(msr); - } - - spyService.upsertScopeAndMetrics(records); - - ArgumentCaptor requestUrlCaptor = ArgumentCaptor.forClass(String.class); - ArgumentCaptor createJsonCaptor = ArgumentCaptor.forClass(StringEntity.class); + static private ElasticSearchSchemaService _esSchemaService; + static private SystemConfiguration systemConfig; - verify(restClient, times(1)).performRequest(any(), requestUrlCaptor.capture(), any(), createJsonCaptor.capture()); - - String requestUrl = requestUrlCaptor.getValue(); - String createJson = EntityUtils.toString(createJsonCaptor.getValue()); - - assertTrue(createJson.contains("create")); - assertFalse(createJson.contains("update")); - assertEquals(StringUtils.countMatches(createJson, "cts"), 11); - assertEquals(StringUtils.countMatches(createJson, "mts"), 11); - assertEquals("/scopemetricnames/scopemetric_type/_bulk", requestUrl); + @BeforeClass + public static void setUpClass() { + Properties config = new Properties(); + systemConfig = new SystemConfiguration(config); + MonitorService mockedMonitor = mock(MonitorService.class); + ElasticSearchUtils mockedElasticSearchUtils = mock(ElasticSearchUtils.class); + _esSchemaService = new ElasticSearchSchemaService(systemConfig, mockedMonitor, mockedElasticSearchUtils); } - @Test - public void testPutUpdateUsingScopeAndMetricSchemaIndex() throws IOException { - - List metrics = new ArrayList<>(); - - for(char ch = 'a'; ch < 'l'; ch++) { - metrics.add(new Metric("scope" + ch, "metric" + ch)); - } - - ElasticSearchSchemaService service = new ElasticSearchSchemaService(system.getConfiguration(), system.getServiceFactory().getMonitorService()); - - ElasticSearchSchemaService spyService = _initializeSpyService(service, createFailReply, updateSucessReply); - - List records = new ArrayList<>(); - - for(Metric m : metrics) { - ScopeAndMetricOnlySchemaRecord msr = new ScopeAndMetricOnlySchemaRecord(m.getScope(), m.getMetric()); - records.add(msr); - } - - spyService.upsertScopeAndMetrics(records); - - ArgumentCaptor requestUrlCaptor = ArgumentCaptor.forClass(String.class); - ArgumentCaptor createJsonCaptor = ArgumentCaptor.forClass(StringEntity.class); - - verify(restClient, times(2)).performRequest(any(), requestUrlCaptor.capture(), any(), createJsonCaptor.capture()); - - List requestUrls = requestUrlCaptor.getAllValues(); - List createJsonEntities = createJsonCaptor.getAllValues(); - - List createJsons = new ArrayList<>(); - - for(StringEntity createJsonEntity : createJsonEntities) { - createJsons.add(EntityUtils.toString(createJsonEntity)); - } - - assertTrue(createJsons.get(0).contains("create")); - assertTrue(createJsons.get(1).contains("update")); - - assertEquals(StringUtils.countMatches(createJsons.get(0), "cts"), 11); - assertEquals(StringUtils.countMatches(createJsons.get(0), "mts"), 11); - - assertEquals(StringUtils.countMatches(createJsons.get(1), "cts"), 0); - assertEquals(StringUtils.countMatches(createJsons.get(1), "mts"), 2); - - assertEquals("/scopemetricnames/scopemetric_type/_bulk", requestUrls.get(0)); - assertEquals("/scopemetricnames/scopemetric_type/_bulk", requestUrls.get(1)); + @After + public void tearDown() { + _esSchemaService.clearBlooms(); } + /* @Test public void testPutCreateUsingMetatagsIndex() throws IOException { @@ -435,32 +226,142 @@ public void testPutCreateUsingMetatagsIndex() throws IOException { myMetric.setMetatagsRecord(metatags); metrics.add(myMetric); - ElasticSearchSchemaService service = new ElasticSearchSchemaService(system.getConfiguration(), system.getServiceFactory().getMonitorService()); - - ElasticSearchSchemaService spyService = _initializeSpyService(service, createSucessReply, createSucessReply); - - List records = new ArrayList<>(); + Set records = new HashSet<>(); for(Metric m : metrics) { MetatagsRecord msr = new MetatagsRecord(m.getMetatagsRecord().getMetatags(), m.getMetatagsRecord().getKey()); records.add(msr); } - spyService.upsertMetatags(records); - - ArgumentCaptor requestUrlCaptor = ArgumentCaptor.forClass(String.class); - ArgumentCaptor createJsonCaptor = ArgumentCaptor.forClass(StringEntity.class); - - verify(restClient, times(1)).performRequest(any(), requestUrlCaptor.capture(), any(), createJsonCaptor.capture()); - - String requestUrl = requestUrlCaptor.getValue(); - String createJson = EntityUtils.toString(createJsonCaptor.getValue()); + Pair retPair = _esSchemaService.getListAndBodyForUpsertMetatags(records); + String createJson = retPair.getValue(); assertTrue(createJson.contains("create")); assertFalse(createJson.contains("update")); assertTrue(createJson.contains("cts")); assertTrue(createJson.contains("mts")); - assertEquals("/metatags/metatags_type/_bulk", requestUrl); + } */ + + @Test + public void testGetWithLimitZeroSingleRequest() throws IOException { + String reply = "{\"took\":166,\"timed_out\":false,\"_shards\":{\"total\":30,\"successful\":30,\"failed\":0},\"hits\":{\"total\":4912,\"max_score\":0,\"hits\":[{\"_index\":\"tags_v1\",\"_type\":\"metadata_type\",\"_id\":\"52263bdece06f6734ed6188afae9311c\",\"_score\":0,\"_source\":{\"scope\":\"system.PRD.SP2.acs-ist20\",\"metric\":\"CpuPerc.cpu.idle\",\"tagk\":\"device\",\"tagv\":\"myhostname.abc.com\",\"mts\":1561068744947,\"cts\":1561068744947,\"ets\":1565561544947}},{\"_index\":\"tags_v1\",\"_type\":\"metadata_type\",\"_id\":\"a3abe4e1cacc45328a1f06d2126a2af5\",\"_score\":0,\"_source\":{\"scope\":\"system.PRD.SP2.twist38\",\"metric\":\"CpuPerc.cpu.idle\",\"tagk\":\"device\",\"tagv\":\"myhostname7.abc.com\",\"mts\":1561068494614,\"cts\":1561068494614,\"ets\":1565561294614}}]}}"; + ObjectMapper mapper = new ObjectMapper(); + ElasticSearchSchemaService service = spy(_esSchemaService); + restClient = mock(RestClient.class); + service.setRestClient(restClient); + doAnswer(invocation -> reply).when(service).extractResponse(any()); + MetricSchemaRecordQuery query = new MetricSchemaRecordQuery.MetricSchemaRecordQueryBuilder().scope("system*") + .metric("*") + .tagKey("*") + .tagValue("*") + .namespace("*") + .limit(0) + .build(); + service.get(query); + verify(restClient, times(1)).performRequest(any(Request.class)); + } + + @Test + public void testGetWithLimitZeroTripleRequest() throws IOException { + String firstReply = "{\"took\":166,\"timed_out\":false,\"_shards\":{\"total\":30,\"successful\":30,\"failed\":0},\"hits\":{\"total\":20001,\"max_score\":0,\"hits\":[{\"_index\":\"tags_v1\",\"_type\":\"metadata_type\",\"_id\":\"52263bdece06f6734ed6188afae9311c\",\"_score\":0,\"_source\":{\"scope\":\"system.PRD.SP2.acs-ist20\",\"metric\":\"CpuPerc.cpu.idle\",\"tagk\":\"device\",\"tagv\":\"myhostname.abc.com\",\"mts\":1561068744947,\"cts\":1561068744947,\"ets\":1565561544947}},{\"_index\":\"tags_v1\",\"_type\":\"metadata_type\",\"_id\":\"a3abe4e1cacc45328a1f06d2126a2af5\",\"_score\":0,\"_source\":{\"scope\":\"system.PRD.SP2.twist38\",\"metric\":\"CpuPerc.cpu.idle\",\"tagk\":\"device\",\"tagv\":\"myhostname1.abc.com\",\"mts\":1561068494614,\"cts\":1561068494614,\"ets\":1565561294614}}]}}"; + String secondReply = "{\"_scroll_id\":\"DnF1ZXJ5VGhlbkZldGNoHgAAAAAhdTEjFnJEQi1zWl9jVG95OVVYLWpkcjJ5S2cAAAAAIXUxIhZyREItc1pfY1RveTlVWC1qZHIyeUtnAAAAAW-10tMWUFFKYzVDeXhUbktLRGFjSENVcHZKdwAAAAFskDgcFmt1T2VDZ0c2UVRxLXY1TXlKRnd0ckEAAAABbJA4HRZrdU9lQ2dHNlFUcS12NU15SkZ3dHJBAAAAACGPEkMWdHUzLWFOSWZRTGU3ek56YzNsaTF6QQAAAAFsBMd-FlRvRnV5alotUk5xbUxxRmwtUXROdncAAAABbATHfxZUb0Z1eWpaLVJOcW1McUZsLVF0TnZ3AAAAACFp_IIWUzQxMXhreFlTR3FWeG5IRnVnSnhpUQAAAAFsGX66FnhENG1NdmFOU2dxVTFfRUZibXhhTVEAAAABd8cv6RZsOEtuREYwZVEyS2U2ZWJzeGZNNlB3AAAAAWw8oPsWWF83dHk2QzdRZ3VzS0p3bldfOHcxUQAAAAFzn27JFm5uSjJUcDZJU2RDbGN6eTVlZmdyWHcAAAABc59uyBZubkoyVHA2SVNkQ2xjenk1ZWZnclh3AAAAACEV3bMWUXJOQ3Z3dURRQ0tydHBrU2hON3FEdwAAAAFvMVndFlJXWTdPdm5PUVlXTnk4Nktqd3B5SGcAAAABbzFZ3hZSV1k3T3ZuT1FZV055ODZLandweUhnAAAAACEBnW4WdGlIUjh5MEtSX0NMaGFpZlRGaWdZZwAAAABF45wcFjllN1ZSSkpNU3Etd1JwVnZ3SXhQZVEAAAABeCNZPhZMcllZZTlwNVN3ZW96VFEzcGxORDdRAAAAAWyQOB4Wa3VPZUNnRzZRVHEtdjVNeUpGd3RyQQAAAAAhTpoyFlB2YU9NcnREUkRtSzVSSGM2ajNnS2cAAAAAIU6aMxZQdmFPTXJ0RFJEbUs1UkhjNmozZ0tnAAAAACE6wJcWN1M1UEZMYWZRQ0tPcnJkSEplaXI1dwAAAAAhOsCYFjdTNVBGTGFmUUNLT3JyZEhKZWlyNXcAAAAAITrAmRY3UzVQRkxhZlFDS09ycmRISmVpcjV3AAAAATbSky0WMHBNYlJUQlFUYTZkNGVoSEo3RURUQQAAAAFuDuwmFkVhRjR1WlJPU09TYkhFaXMwSHEzb1EAAAABbg7sJxZFYUY0dVpST1NPU2JIRWlzMEhxM29RAAAAAW-10tQWUFFKYzVDeXhUbktLRGFjSENVcHZKdw==\",\"took\":116,\"timed_out\":false,\"_shards\":{\"total\":30,\"successful\":30,\"failed\":0},\"hits\":{\"total\":15023,\"max_score\":0,\"hits\":[{\"_index\":\"tags_v1\",\"_type\":\"metadata_type\",\"_id\":\"3c9c78d3cd1abea74db350a0be0739e7\",\"_score\":0,\"_source\":{\"scope\":\"system.PRD.NONE.hdaas\",\"metric\":\"CpuPerc.cpu.idle\",\"tagk\":\"device\",\"tagv\":\"myhostname2.abc.com\",\"mts\":1559707972543,\"cts\":1559707972543,\"ets\":1564200772543}},{\"_index\":\"tags_v1\",\"_type\":\"metadata_type\",\"_id\":\"401fa5083eaea30f37aa0d0795ce3fb0\",\"_score\":0,\"_source\":{\"scope\":\"system.PRD.NONE.hdaas\",\"metric\":\"CpuPerc.cpu.idle\",\"tagk\":\"device\",\"tagv\":\"myhostname3.abc.com\",\"mts\":1558649134083,\"cts\":1558649134083,\"ets\":1562537134083}}]}}"; + String thirdReply = "{\"_scroll_id\":\"DnF1ZXJ5VGhlbkZldGNoHgAAAAF4XWLeFlpaSmQ1YURfUnd1eXpMczNWTW5TSlEAAAABeF1i3xZaWkpkNWFEX1J3dXl6THMzVk1uU0pRAAAAAXhdYuAWWlpKZDVhRF9Sd3V5ekxzM1ZNblNKUQAAAAFtJwloFm9RdWllMnJsVEJhWlA2alRJR3d6TFEAAAAAIZCQSBZ0dTMtYU5JZlFMZTd6TnpjM2xpMXpBAAAAAWwGRYQWVG9GdXlqWi1STnFtTHFGbC1RdE52dwAAAAAha3qHFlM0MTF4a3hZU0dxVnhuSEZ1Z0p4aVEAAAAAIWt6iBZTNDExeGt4WVNHcVZ4bkhGdWdKeGlRAAAAAXfo2lUWaFZsQi1hVGVRakNsWXUtc3V3dGZJdwAAAAF36NpUFmhWbEItYVRlUWpDbFl1LXN1d3RmSXcAAAABbBr8uhZ4RDRtTXZhTlNncVUxX0VGYm14YU1RAAAAAWwa_LsWeEQ0bU12YU5TZ3FVMV9FRmJteGFNUQAAAAFsPh8GFlhfN3R5NkM3UWd1c0tKd25XXzh3MVEAAAAAIRdbuBZRck5Ddnd1RFFDS3J0cGtTaE43cUR3AAAAAWyUBksWWGlWZ3ZCWWlRM3VjM3Mxa3BWRGtIQQAAAAAhF1u5FlFyTkN2d3VEUUNLcnRwa1NoTjdxRHcAAAABNmDpNxZSZFNsTl9YS1QxeS0wNUxYbU1TSWdnAAAAAEXlGiQWOWU3VlJKSk1TcS13UnBWdndJeFBlUQAAAAAhAxt5FnRpSFI4eTBLUl9DTGhhaWZURmlnWWcAAAABbPZgbBZIeERJRG5fSVQ0Q3hUYnA1ekR2WU13AAAAAXAGF6AWTHdwTUhCajJUZk9STWIyN0RPVkVEQQAAAAFwBhehFkx3cE1IQmoyVGZPUk1iMjdET1ZFREEAAAABcAYXohZMd3BNSEJqMlRmT1JNYjI3RE9WRURBAAAAACFQGDgWUHZhT01ydERSRG1LNVJIYzZqM2dLZwAAAAE21BEyFjBwTWJSVEJRVGE2ZDRlaEhKN0VEVEEAAAAAITFRGRZDYVMxdEdiQVMzS2liS0FCSEkxcElBAAAAAW4Qai4WRWFGNHVaUk9TT1NiSEVpczBIcTNvUQAAAAAhMVEaFkNhUzF0R2JBUzNLaWJLQUJISTFwSUEAAAABdwSJLRZicGgwSHR3OFRwdTlGeGcwYm51MWNRAAAAAW2p_BIWRU9waVVXdXFUV213SkF6UDRGTzhPQQ==\",\"took\":146,\"timed_out\":false,\"_shards\":{\"total\":30,\"successful\":30,\"failed\":0},\"hits\":{\"total\":15023,\"max_score\":0,\"hits\":[{\"_index\":\"tags_v1\",\"_type\":\"metadata_type\",\"_id\":\"5d0f580b9281a8ab0cf5f71c9bb4f700\",\"_score\":0,\"_source\":{\"scope\":\"system.PRD.NONE.hdaas\",\"metric\":\"CpuPerc.cpu.idle\",\"tagk\":\"device\",\"tagv\":\"myhostname4.abc.com\",\"mts\":1561385448995,\"cts\":1561385448995,\"ets\":1565878248995}},{\"_index\":\"tags_v1\",\"_type\":\"metadata_type\",\"_id\":\"4bc0b8c2954343ad4e50ff42c727f45a\",\"_score\":0,\"_source\":{\"scope\":\"system.PRD.SP2.mist33\",\"metric\":\"CpuPerc.cpu.idle\",\"tagk\":\"device\",\"tagv\":\"myhostname5.abc.com\",\"mts\":1561385455276,\"cts\":1561385455276,\"ets\":1565878255276}}]}}"; + ObjectMapper mapper = new ObjectMapper(); + ElasticSearchSchemaService service = spy(_esSchemaService); + restClient = mock(RestClient.class); + service.setRestClient(restClient); + doAnswer(new Answer() { + int callCount = 0; + @Override + public Object answer(InvocationOnMock invocationOnMock) throws Throwable { + callCount++; + if (callCount == 1) { + Request request = invocationOnMock.getArgument(0, Request.class); + String jsonStr = EntityUtils.toString(request.getEntity()); + JsonNode tree = mapper.readTree(jsonStr); + request.getEndpoint().endsWith("_search"); + assertNull(tree.get("scroll")); + assertNull(tree.get("scroll_id")); + } else if (callCount == 2) { + Request request = invocationOnMock.getArgument(0, Request.class); + String jsonStr = EntityUtils.toString(request.getEntity()); + JsonNode tree = mapper.readTree(jsonStr); + request.getEndpoint().contains("_search?scroll="); + assertNull(tree.get("scroll")); + assertNull(tree.get("scroll_id")); + } else if (callCount == 3) { + Request request = invocationOnMock.getArgument(0, Request.class); + String jsonStr = EntityUtils.toString(request.getEntity()); + JsonNode tree = mapper.readTree(jsonStr); + request.getEndpoint().endsWith("/_search/scroll"); + assertNotNull(tree.get("scroll")); + assertNotNull(tree.get("scroll_id")); + } + return null; + } + }).when(restClient).performRequest(any(Request.class)); + doAnswer(new Answer() { + int callCount = 0; + @Override + public Object answer(InvocationOnMock invocationOnMock) { + callCount++; + switch (callCount) { + case 1: + return firstReply; + case 2: + return secondReply; + case 3: + return thirdReply; + default: + fail("There shouldn't be a 4th extractResponse call by this test"); + return ""; + } + } + }).when(service).extractResponse(any()); + MetricSchemaRecordQuery query = new MetricSchemaRecordQuery.MetricSchemaRecordQueryBuilder().scope("system*") + .metric("*") + .tagKey("*") + .tagValue("*") + .namespace("*") + .limit(0) + .build(); + service.get(query); + verify(restClient, times(3)).performRequest(any(Request.class)); + verify(service, times(3)).extractResponse(any()); + } + + @Test + public void testGetWithScroll() throws IOException { + String reply = "{\"_scroll_id\":\"DnF1ZXJ5VGhlbkZldGNoMgAAAAENlX7HFjdTNVBGTGFmUUNLT3JyZEhKZWlyNXcAAAABDYdg9xZQUUpjNUN5eFRuS0tEYWNIQ1Vwdkp3AAAAAAN8R34WTHJZWWU5cDVTd2VvelRRM3BsTkQ3UQAAAAAGraWTFnRpSFI4eTBLUl9DTGhhaWZURmlnWWcAAAABCFeY2RY5ZTdWUkpKTVNxLXdScFZ2d0l4UGVRAAAAAQ2HYPgWUFFKYzVDeXhUbktLRGFjSENVcHZKdwAAAAAGraWUFnRpSFI4eTBLUl9DTGhhaWZURmlnWWcAAAABCefkTxZ4RDRtTXZhTlNncVUxX0VGYm14YU1RAAAAAAJYwTcWUmRTbE5fWEtUMXktMDVMWG1NU0lnZwAAAAADfUxpFjBwTWJSVEJRVGE2ZDRlaEhKN0VEVEEAAAABDYNu5RZFYUY0dVpST1NPU2JIRWlzMEhxM29RAAAAAAHPMhIWbDhLbkRGMGVRMktlNmVic3hmTTZQdwAAAAEIC84XFjNnYkNhamRMUWdpRDhJRlpTR3l5c2cAAAAA9Bb5phZSV1k3T3ZuT1FZV055ODZLandweUhnAAAAAAatpZUWdGlIUjh5MEtSX0NMaGFpZlRGaWdZZwAAAAENbfO-FlhfN3R5NkM3UWd1c0tKd25XXzh3MVEAAAAA9Bb5pxZSV1k3T3ZuT1FZV055ODZLandweUhnAAAAAQ2DbuYWRWFGNHVaUk9TT1NiSEVpczBIcTNvUQAAAAAGraWWFnRpSFI4eTBLUl9DTGhhaWZURmlnWWcAAAAArdRfQhZaWkpkNWFEX1J3dXl6THMzVk1uU0pRAAAAAAG3nz8WaFZsQi1hVGVRakNsWXUtc3V3dGZJdwAAAAEINqojFmt1T2VDZ0c2UVRxLXY1TXlKRnd0ckEAAAABCAvOGBYzZ2JDYWpkTFFnaUQ4SUZaU0d5eXNnAAAAAQg2qiQWa3VPZUNnRzZRVHEtdjVNeUpGd3RyQQAAAAEOFh5PFlM0MTF4a3hZU0dxVnhuSEZ1Z0p4aVEAAAAAAc8yExZsOEtuREYwZVEyS2U2ZWJzeGZNNlB3AAAAAQ3CTCoWckRCLXNaX2NUb3k5VVgtamRyMnlLZwAAAAADfUxqFjBwTWJSVEJRVGE2ZDRlaEhKN0VEVEEAAAABCGR0zhZUb0Z1eWpaLVJOcW1McUZsLVF0TnZ3AAAAAQ1t878WWF83dHk2QzdRZ3VzS0p3bldfOHcxUQAAAAENwkwrFnJEQi1zWl9jVG95OVVYLWpkcjJ5S2cAAAABDbrdJRZQdmFPTXJ0RFJEbUs1UkhjNmozZ0tnAAAAAQnn5FAWeEQ0bU12YU5TZ3FVMV9FRmJteGFNUQAAAAENwkwsFnJEQi1zWl9jVG95OVVYLWpkcjJ5S2cAAAABDZEgehZRck5Ddnd1RFFDS3J0cGtTaE43cUR3AAAAAK3UX1UWWlpKZDVhRF9Sd3V5ekxzM1ZNblNKUQAAAAENjv2hFm9RdWllMnJsVEJhWlA2alRJR3d6TFEAAAABDbrdJhZQdmFPTXJ0RFJEbUs1UkhjNmozZ0tnAAAAAQhkdM8WVG9GdXlqWi1STnFtTHFGbC1RdE52dwAAAAACtVXxFkx3cE1IQmoyVGZPUk1iMjdET1ZFREEAAAABDZV-yBY3UzVQRkxhZlFDS09ycmRISmVpcjV3AAAAAAIO8osWbm5KMlRwNklTZENsY3p5NWVmZ3JYdwAAAAENg27nFkVhRjR1WlJPU09TYkhFaXMwSHEzb1EAAAAA87q5RBZicGgwSHR3OFRwdTlGeGcwYm51MWNRAAAAAAIO8owWbm5KMlRwNklTZENsY3p5NWVmZ3JYdwAAAADzurlFFmJwaDBIdHc4VHB1OUZ4ZzBibnUxY1EAAAAAAg7yjRZubkoyVHA2SVNkQ2xjenk1ZWZnclh3AAAAAQhkdNAWVG9GdXlqWi1STnFtTHFGbC1RdE52dwAAAAENh2D5FlBRSmM1Q3l4VG5LS0RhY0hDVXB2SncAAAABDauQZxZDYVMxdEdiQVMzS2liS0FCSEkxcElB\",\"took\":937,\"timed_out\":false,\"_shards\":{\"total\":50,\"successful\":50,\"failed\":0},\"hits\":{\"total\":1,\"max_score\":0,\"hits\":[{\"_index\":\"metadata_index\",\"_type\":\"metadata_type\",\"_id\":\"e199fa2a0f00da90fec8c1eb543442b0\",\"_score\":0,\"_source\":{\"scope\":\"ajna.consumer\",\"metric\":\"datapoints.posted\",\"tagk\":\"uuid\",\"tagv\":\"myhostname6.abc.com\",\"mts\":1555112561350,\"ets\":1559000561350}}]}}"; + ObjectMapper mapper = new ObjectMapper(); + ElasticSearchSchemaService service = spy(_esSchemaService); + restClient = mock(RestClient.class); + service.setRestClient(restClient); + doAnswer(new Answer() { + int callCount = 0; + @Override + public Object answer(InvocationOnMock invocationOnMock) throws Throwable { + if (callCount == 1) { + Request request = invocationOnMock.getArgument(0, Request.class); + String jsonStr = EntityUtils.toString(request.getEntity()); + JsonNode tree = mapper.readTree(jsonStr); + assertNotNull(tree.get("scroll")); + assertNotNull(tree.get("scroll_id")); + } + callCount++; + return null; + } + }).when(restClient).performRequest(any(Request.class)); + doAnswer(invocation -> reply).when(service).extractResponse(any()); + MetricSchemaRecordQuery query = new MetricSchemaRecordQuery.MetricSchemaRecordQueryBuilder().scope("system*") + .metric("*") + .tagKey("*") + .tagValue("*") + .namespace("*") + .limit(10001) + .build(); + service.get(query); + verify(restClient, times(2)).performRequest(any(Request.class)); } @@ -477,19 +378,18 @@ public void testGetUniqueUsingScopeSchemaIndex() throws IOException { SchemaService.RecordType scopeType = SchemaService.RecordType.SCOPE; - ElasticSearchSchemaService service = new ElasticSearchSchemaService(system.getConfiguration(), system.getServiceFactory().getMonitorService()); + MonitorService mockedMonitor = mock(MonitorService.class); - ElasticSearchSchemaService spyService = _initializeSpyService(service, getReply, getReply); + ElasticSearchSchemaService spyService = _initializeSpyService(_esSchemaService, getReply, getReply); spyService.getUnique(queryForScope, scopeType); - ArgumentCaptor requestUrlCaptor = ArgumentCaptor.forClass(String.class); - ArgumentCaptor queryJsonCaptor = ArgumentCaptor.forClass(StringEntity.class); + ArgumentCaptor captor = ArgumentCaptor.forClass(Request.class); - verify(restClient, times(1)).performRequest(any(), requestUrlCaptor.capture(), any(), queryJsonCaptor.capture()); + verify(restClient, times(1)).performRequest(captor.capture()); - String requestUrl = requestUrlCaptor.getValue(); - String queryJson = convertToPrettyJson(EntityUtils.toString(queryJsonCaptor.getValue())); + String requestUrl = captor.getValue().getEndpoint(); + String queryJson = convertToPrettyJson(EntityUtils.toString(captor.getValue().getEntity())); assertEquals(scopeQuery, queryJson); assertEquals("/scopenames/scope_type/_search", requestUrl); @@ -498,40 +398,6 @@ public void testGetUniqueUsingScopeSchemaIndex() throws IOException { assertTrue(queryForScope.isQueryOnlyOnScopeAndMetric()); } - @Test - public void testGetUniqueUsingScopeAndMetricSchemaIndex() throws IOException { - - MetricSchemaRecordQuery queryForMetric = new MetricSchemaRecordQuery.MetricSchemaRecordQueryBuilder().scope("system") - .metric("argus*") - .tagKey("*") - .tagValue("*") - .namespace("*") - .limit(2) - .build(); - - SchemaService.RecordType scopeType = SchemaService.RecordType.METRIC; - - ElasticSearchSchemaService service = new ElasticSearchSchemaService(system.getConfiguration(), system.getServiceFactory().getMonitorService()); - - ElasticSearchSchemaService spyService = _initializeSpyService(service, getReply, getReply); - - spyService.getUnique(queryForMetric, scopeType); - - ArgumentCaptor requestUrlCaptor = ArgumentCaptor.forClass(String.class); - ArgumentCaptor queryJsonCaptor = ArgumentCaptor.forClass(StringEntity.class); - - verify(restClient, times(1)).performRequest(any(), requestUrlCaptor.capture(), any(), queryJsonCaptor.capture()); - - String requestUrl = requestUrlCaptor.getValue(); - String queryJson = convertToPrettyJson(EntityUtils.toString(queryJsonCaptor.getValue())); - - assertEquals(scopeAndMetricQuery, queryJson); - assertEquals("/scopemetricnames/scopemetric_type/_search", requestUrl); - - assertFalse(queryForMetric.isQueryOnlyOnScope()); - assertTrue(queryForMetric.isQueryOnlyOnScopeAndMetric()); - } - @Test public void testGetUniqueUsingMetricTagvRegexSchemaIndex() throws IOException { @@ -545,19 +411,16 @@ public void testGetUniqueUsingMetricTagvRegexSchemaIndex() throws IOException { SchemaService.RecordType scopeType = SchemaService.RecordType.TAGV; - ElasticSearchSchemaService service = new ElasticSearchSchemaService(system.getConfiguration(), system.getServiceFactory().getMonitorService()); - ElasticSearchSchemaService spyService = _initializeSpyService(service, getReply, getReply); + ElasticSearchSchemaService spyService = _initializeSpyService(_esSchemaService, getReply, getReply); spyService.getUnique(queryForMetric, scopeType); - ArgumentCaptor requestUrlCaptor = ArgumentCaptor.forClass(String.class); - ArgumentCaptor queryJsonCaptor = ArgumentCaptor.forClass(StringEntity.class); + ArgumentCaptor captor = ArgumentCaptor.forClass(Request.class); + verify(restClient, times(1)).performRequest(captor.capture()); - verify(restClient, times(1)).performRequest(any(), requestUrlCaptor.capture(), any(), queryJsonCaptor.capture()); - - String requestUrl = requestUrlCaptor.getValue(); - String queryJson = convertToPrettyJson(EntityUtils.toString(queryJsonCaptor.getValue())); + String requestUrl = captor.getValue().getEndpoint(); + String queryJson = convertToPrettyJson(EntityUtils.toString(captor.getValue().getEntity())); assertEquals(metricQueryTagvRegex, queryJson); assertEquals("/metadata_index/metadata_type/_search", requestUrl); @@ -579,19 +442,16 @@ public void testGetUniqueUsingMetricNamespaceRegexSchemaIndex() throws IOExcepti SchemaService.RecordType scopeType = SchemaService.RecordType.NAMESPACE; - ElasticSearchSchemaService service = new ElasticSearchSchemaService(system.getConfiguration(), system.getServiceFactory().getMonitorService()); - - ElasticSearchSchemaService spyService = _initializeSpyService(service, getReply, getReply); + ElasticSearchSchemaService spyService = _initializeSpyService(_esSchemaService, getReply, getReply); spyService.getUnique(queryForMetric, scopeType); - ArgumentCaptor requestUrlCaptor = ArgumentCaptor.forClass(String.class); - ArgumentCaptor queryJsonCaptor = ArgumentCaptor.forClass(StringEntity.class); + ArgumentCaptor captor = ArgumentCaptor.forClass(Request.class); - verify(restClient, times(1)).performRequest(any(), requestUrlCaptor.capture(), any(), queryJsonCaptor.capture()); + verify(restClient, times(1)).performRequest(captor.capture()); - String requestUrl = requestUrlCaptor.getValue(); - String queryJson = convertToPrettyJson(EntityUtils.toString(queryJsonCaptor.getValue())); + String requestUrl = captor.getValue().getEndpoint(); + String queryJson = convertToPrettyJson(EntityUtils.toString(captor.getValue().getEntity())); assertEquals(metricQueryNamespaceRegex, queryJson); assertEquals("/metadata_index/metadata_type/_search", requestUrl); @@ -600,6 +460,63 @@ public void testGetUniqueUsingMetricNamespaceRegexSchemaIndex() throws IOExcepti assertFalse(queryForMetric.isQueryOnlyOnScopeAndMetric()); } + @Test + public void testKeywordSearchWithQueryStringWithScroll() throws IOException { + ElasticSearchSchemaService service = spy(_esSchemaService); + restClient = mock(RestClient.class); + service.setRestClient(restClient); + doAnswer(new Answer() { + int callCount = 0; + @Override + public Object answer(InvocationOnMock invocationOnMock) throws Throwable { + callCount++; + if (callCount <= 1) { + return "{\"tokens\":[{\"token\":\"text\",\"start_offset\":0,\"end_offset\":4,\"type\":\"word\",\"position\":0}]}"; + } else { + return "{\"took\":711,\"timed_out\":false,\"_shards\":{\"total\":50,\"successful\":50,\"failed\":0},\"hits\":{\"total\":0,\"max_score\":null,\"hits\":[]}}"; + } + } + }).when(service).extractResponse(any()); + KeywordQuery query = new KeywordQuery.KeywordQueryBuilder() + .query("text*") + .limit(10001) + .build(); + service.keywordSearch(query); + // 1 time for token analysis, 1 for first scroll call, 1 more for /_search/scroll + verify(restClient, times(3)).performRequest(any(Request.class)); + } + + @Test + public void testKeywordSearchWithType() throws IOException { + ElasticSearchSchemaService service = spy(_esSchemaService); + restClient = mock(RestClient.class); + service.setRestClient(restClient); + doAnswer(new Answer() { + int callCount = 0; + @Override + public Object answer(InvocationOnMock invocationOnMock) throws Throwable { + // First 5 calls are analyzeToken REST calls + callCount++; + if (callCount <= 5) { + return "{\"tokens\":[{\"token\":\"text\",\"start_offset\":0,\"end_offset\":4,\"type\":\"word\",\"position\":0}]}"; + } else { + return "{\"took\":3939,\"timed_out\":false,\"_shards\":{\"total\":50,\"successful\":50,\"failed\":0},\"hits\":{\"total\":0,\"max_score\":0,\"hits\":[]},\"aggregations\":{\"distinct_values\":{\"doc_count_error_upper_bound\":0,\"sum_other_doc_count\":0,\"buckets\":[]}}}"; + } + } + }).when(service).extractResponse(any()); + KeywordQuery query = new KeywordQuery.KeywordQueryBuilder().scope("text*") + .metric("text*") + .tagKey("text*") + .tagValue("text*") + .namespace("text*") + .type(SchemaService.RecordType.METRIC) + .limit(10001) + .build(); + service.keywordSearch(query); + verify(restClient, times(6)).performRequest(any(Request.class)); + } + + /* @Test public void testUpsertWhenAllNewDocsShouldNotUpdateMTSField() throws IOException { String esCreateResponse=String.join("\n", "{" + @@ -640,8 +557,7 @@ public void testUpsertWhenAllNewDocsShouldNotUpdateMTSField() throws IOException " }" + " ]" + "}"); - ElasticSearchSchemaService schemaService = new ElasticSearchSchemaService(system.getConfiguration(), system.getServiceFactory().getMonitorService()); - ElasticSearchSchemaService spySchemaService = spy(schemaService); + ElasticSearchSchemaService spySchemaService = spy(_esSchemaService); RestClient _restClient = mock(RestClient.class); doReturn(null).when(_restClient).performRequest(any(), any(), any(),any()); spySchemaService.setRestClient(_restClient); @@ -653,12 +569,12 @@ public void testUpsertWhenAllNewDocsShouldNotUpdateMTSField() throws IOException metrics.add(m1); metrics.add(m2); spySchemaService.put(metrics); - verify(spySchemaService, never()).updateMtsField(any(), any(), any(), any()); + verify(spySchemaService, never()).updateMetadataRecordMts(any()); } @Test - public void testUpsertWhenSomeDocsExistShouldUpdateMTSFieldForExistingDocs() throws IOException { - String esCreateResponse=String.join("\n", "{" + + public void testRealUpsertWithOne409() throws IOException { + String esCreateResponse = String.join("\n", "{" + " \"took\": 5," + " \"errors\": true," + " \"items\": [" + @@ -666,7 +582,7 @@ public void testUpsertWhenSomeDocsExistShouldUpdateMTSFieldForExistingDocs() thr " \"create\": {" + " \"_index\": \"metadata_index\"," + " \"_type\": \"metadata_type\"," + - " \"_id\": \"1\"," + + " \"_id\": \"dd123151c817644189a2d28757b5be8a\"," + " \"status\": 409," + " \"error\": {" + " \"type\": \"version_conflict_engine_exception\"," + @@ -695,24 +611,41 @@ public void testUpsertWhenSomeDocsExistShouldUpdateMTSFieldForExistingDocs() thr " }" + " ]" + "}"); - ElasticSearchSchemaService schemaService = new ElasticSearchSchemaService(system.getConfiguration(), system.getServiceFactory().getMonitorService()); - ElasticSearchSchemaService spySchemaService = spy(schemaService); + String updateResponse = "{\"took\":1,\"errors\":false,\"items\":[{\"update\":{\"_index\":\"metadata_index\",\"_type\":\"metadata_type\",\"_id\":\"dd123151c817644189a2d28757b5be8a\",\"_version\":2,\"result\":\"updated\",\"_shards\":{\"total\":2,\"successful\":2,\"failed\":0},\"status\":200}}]}"; + String scopeCreateResponse = "{\"took\":1,\"errors\":true,\"items\":[{\"create\":{\"_index\":\"scopenames\",\"_type\":\"scope_type\",\"_id\":\"fbeb48e92a72df5f9844d2ecd4d1e825\",\"status\":409,\"error\":{\"type\":\"version_conflict_engine_exception\",\"reason\":\"[scope_type][fbeb48e92a72df5f9844d2ecd4d1e825]: version conflict, document already exists (current version [1])\",\"index_uuid\":\"fbq7sEAmQm6aMSH7z0Ij5A\",\"shard\":\"1\",\"index\":\"scopenames\"}}},{\"create\":{\"_index\":\"scopenames\",\"_type\":\"scope_type\",\"_id\":\"e7ccd95462e7696b26349360b709a1d7\",\"_version\":1,\"result\":\"created\",\"_shards\":{\"total\":2,\"successful\":1,\"failed\":0},\"created\":true,\"status\":201}}]}"; + String scopeUpdateResopnse = "{\"took\":2,\"errors\":false,\"items\":[{\"update\":{\"_index\":\"scopenames\",\"_type\":\"scope_type\",\"_id\":\"fbeb48e92a72df5f9844d2ecd4d1e825\",\"_version\":2,\"result\":\"updated\",\"_shards\":{\"total\":2,\"successful\":2,\"failed\":0},\"status\":200}}]}"; + ElasticSearchSchemaService spySchemaService = spy(_esSchemaService); RestClient _restClient = mock(RestClient.class); - doReturn(null).when(_restClient).performRequest(any(), any(), any(),any()); + doAnswer(new Answer() { + int count = 0; + @Override + public Object answer(InvocationOnMock invocationOnMock) throws Throwable { + count++; + // 2nd call: the updateMetadata, and 4th call: the updateScope, should only update one object AKA request body should be 2 lines + if (count == 2 || count == 4) { + StringEntity entity = invocationOnMock.getArgument(1, StringEntity.class); + assertTrue(EntityUtils.toString(entity).split("\r\n|\r|\n").length == 2); + } + return null; + } + }).when(_restClient).performRequest(any(), any(), any(),any()); spySchemaService.setRestClient(_restClient); - doReturn(esCreateResponse).when(spySchemaService).extractResponse(any()); - doNothing().when(spySchemaService).upsertScopeAndMetrics(any()); - doNothing().when(spySchemaService).upsertScopes(any()); - doAnswer(new Answer() { - @Override - public Void answer(InvocationOnMock invocation) throws Throwable { - @SuppressWarnings("unchecked") - List updateDocIds = List.class.cast(invocation.getArguments()[0]); - assertEquals("1", updateDocIds.get(0)); - assertEquals(1, updateDocIds.size()); - return null; - } - }).when(spySchemaService).updateMtsField(any(), any(), any(), any()); + doAnswer(new Answer() { + int count = 0; + @Override + public Object answer(InvocationOnMock invocationOnMock) throws Throwable { + count++; + if (count == 1) { + return esCreateResponse; + } else if (count == 2) { + return updateResponse; + } else if (count == 3) { + return scopeCreateResponse; + } else { + return scopeUpdateResopnse; + } + } + }).when(spySchemaService).extractResponse(any()); List metrics = new ArrayList<>(); Metric m1= new Metric("scope1", "metric1"); @@ -720,7 +653,7 @@ public Void answer(InvocationOnMock invocation) throws Throwable { metrics.add(m1); metrics.add(m2); spySchemaService.put(metrics); - verify(spySchemaService, times(1)).updateMtsField(any(), any(), any(), any()); + verify(spySchemaService, times(1)).updateMetadataRecordMts(any()); } @Test @@ -733,7 +666,7 @@ public void testUpsertWhenAllDocsExistShouldUpdateMTSFieldForAllDocs() throws IO " \"create\": {" + " \"_index\": \"metadata_index\"," + " \"_type\": \"metadata_type\"," + - " \"_id\": \"1\"," + + " \"_id\": \"dd123151c817644189a2d28757b5be8a\"," + " \"status\": 409," + " \"error\": {" + " \"type\": \"version_conflict_engine_exception\"," + @@ -748,7 +681,7 @@ public void testUpsertWhenAllDocsExistShouldUpdateMTSFieldForAllDocs() throws IO " \"create\": {" + " \"_index\": \"metadata_index\"," + " \"_type\": \"metadata_type\"," + - " \"_id\": \"2\"," + + " \"_id\": \"4f86f5e6dc6d4672830d97de21e75a20\"," + " \"status\": 409," + " \"error\": {" + " \"type\": \"version_conflict_engine_exception\"," + @@ -761,25 +694,20 @@ public void testUpsertWhenAllDocsExistShouldUpdateMTSFieldForAllDocs() throws IO " }" + " ]" + "}"); - ElasticSearchSchemaService schemaService = new ElasticSearchSchemaService(system.getConfiguration(), system.getServiceFactory().getMonitorService()); - ElasticSearchSchemaService spySchemaService = spy(schemaService); + ElasticSearchSchemaService spySchemaService = spy(_esSchemaService); RestClient _restClient = mock(RestClient.class); doReturn(null).when(_restClient).performRequest(any(), any(), any(),any()); spySchemaService.setRestClient(_restClient); doReturn(esCreateResponse).when(spySchemaService).extractResponse(any()); - doNothing().when(spySchemaService).upsertScopeAndMetrics(any()); - doNothing().when(spySchemaService).upsertScopes(any()); - doAnswer(new Answer() { - @Override - public Void answer(InvocationOnMock invocation) throws Throwable { - @SuppressWarnings("unchecked") - List updateDocIds = List.class.cast(invocation.getArguments()[0]); - assertEquals("1", updateDocIds.get(0)); - assertEquals("2", updateDocIds.get(1)); - assertEquals(2, updateDocIds.size()); - return null; - } - }).when(spySchemaService).updateMtsField(any(), any(), any(), any()); + doReturn(new HashSet<>()).when(spySchemaService).upsertScopeRecords(any()); + doAnswer((Answer>) invocation -> { + @SuppressWarnings("unchecked") + Set recordsToUpdate = Set.class.cast(invocation.getArguments()[0]); + assertTrue(recordsToUpdate.stream().anyMatch(r -> r.getScope().equals("scope1"))); + assertTrue(recordsToUpdate.stream().anyMatch(r -> r.getScope().equals("scope2"))); + assertEquals(2, recordsToUpdate.size()); + return new HashSet<>(); + }).when(spySchemaService).updateMetadataRecordMts(any()); List metrics = new ArrayList<>(); Metric m1= new Metric("scope1", "metric1"); @@ -787,8 +715,47 @@ public Void answer(InvocationOnMock invocation) throws Throwable { metrics.add(m1); metrics.add(m2); spySchemaService.put(metrics); - verify(spySchemaService, times(1)).updateMtsField(any(), any(), any(), any()); - } + verify(spySchemaService, times(1)).updateMetadataRecordMts(any()); + } */ + + @Test + public void testConstructTagNotEqualsQuery() throws IOException { + String tagValue = "notTagValue"; + AtomicInteger invocationCount = new AtomicInteger(0); + ObjectMapper mapper = new ObjectMapper(); + RestClient customClient = mock(RestClient.class); + Answer requestAnswer = invocation -> { + Request request = invocation.getArgument(0, Request.class); + String requestUrl = request.getEndpoint(); + assertTrue(requestUrl.endsWith("_search")); + JsonNode root = mapper.readTree(EntityUtils.toString(request.getEntity())); + JsonNode nots = root.get("query").get("bool").get("must_not"); + JsonNode filters = root.get("query").get("bool").get("filter"); + assertEquals(3, filters.size()); + String actualTagValue = nots.get(0).get("regexp").get("tagv.raw").asText(); + assertEquals(tagValue, actualTagValue); + invocationCount.incrementAndGet(); + return null; + }; + doAnswer(requestAnswer).when(customClient).performRequest(any(Request.class)); + + ElasticSearchUtils mockedElasticSearchUtils = mock(ElasticSearchUtils.class); + ElasticSearchSchemaService schemaService = spy(new ElasticSearchSchemaService(systemConfig, + mock(MonitorService.class), + mockedElasticSearchUtils)); + schemaService.setRestClient(customClient); + + doReturn("{\"hits\":{\"total\": 0, \"max_score\": null, \"hits\": []}}").when(schemaService).extractResponse(any()); + schemaService.setRestClient(customClient); + schemaService.get(new MetricSchemaRecordQuery.MetricSchemaRecordQueryBuilder() + .scope("scope") + .metric("metric") + .tagKey("tagKey") + .tagValue("~" + tagValue) + .build() + ); + assertEquals(1, invocationCount.get()); + } private String convertToPrettyJson(String jsonString) { JsonParser parser = new JsonParser(); @@ -805,8 +772,6 @@ private ElasticSearchSchemaService _initializeSpyService(ElasticSearchSchemaServ service.setRestClient(restClient); - service.enableScopeMetricNamesIndex(); - ElasticSearchSchemaService spyService = spy(service); doAnswer(new Answer() { @@ -827,11 +792,11 @@ public Object answer(InvocationOnMock invocation) { @Test public void testMetriccSchemaRecordListMapper() throws Exception { - ObjectMapper mapper = ElasticSearchSchemaService.createObjectMapper(); + ObjectMapper mapper = ElasticSearchSchemaService._getMetadataObjectMapper(new MetricSchemaRecordList.IndexSerializer()); MetricSchemaRecord record1 = new MetricSchemaRecord("namespace1", "scope1", "metric1", "tagK1", "tagV1", 10); //MetricSchemaRecord record2 = new MetricSchemaRecord("namespace2", "scope2", "metric2", "tagK2", "tagV2", 10); - MetricSchemaRecordList recordList = new MetricSchemaRecordList(Arrays.asList(record1), MetricSchemaRecordList.HashAlgorithm.fromString("MD5")); + MetricSchemaRecordList recordList = new MetricSchemaRecordList(new HashSet<>(Arrays.asList(record1)), MetricSchemaRecordList.HashAlgorithm.fromString("MD5")); String serialized = mapper.writeValueAsString(recordList); @@ -871,21 +836,21 @@ public void testDoExtractResponse500() { ElasticSearchSchemaService.doExtractResponse(500, null); } - @Test - public void testGetRequestBodyForMtsFieldUpdate() { - String expected = "{\"update\" : {\"_id\" : \"a303abc25d534dd8ff97121668e952e6\" } }\n" + - "{\"doc\" : {\"mts\": 0,\"ets\":3888000000}}\n" + - "{\"update\" : {\"_id\" : \"8b7f219c5131eeff5b02a6e798c9ec2d\" } }\n" + - "{\"doc\" : {\"mts\": 0,\"ets\":864000000,\"_retention_discovery_\":10}}\n"; + /* this can be reused if we ever bring back UPDATE + public void testGetRequestBodyForMtsFieldUpdate() throws IOException { + ObjectMapper updateMapper = ElasticSearchSchemaService._getMetadataObjectMapper(new MetricSchemaRecordList.UpdateSerializer()); + String expectedRegex = "\\{\"update\":\\{\"_id\":\"a303abc25d534dd8ff97121668e952e6\"\\}\\}\n" + + "\\{\"doc\":\\{\"mts\":[0-9]+,\"ets\":[0-9]+\\}\\}\n" + + "\\{\"update\":\\{\"_id\":\"8b7f219c5131eeff5b02a6e798c9ec2d\"\\}\\}\n" + + "\\{\"doc\":\\{\"mts\":[0-9]+,\"_retention_discovery_\":10,\"ets\":[0-9]+\\}\\}\n"; MetricSchemaRecord record1 = new MetricSchemaRecord("namespace1", "scope1", "metric1", "tagK1", "tagV1", 10); MetricSchemaRecord record2 = new MetricSchemaRecord("namespace2", "scope2", "metric2", "tagK2", "tagV2"); //retention will be the default 45 days - MetricSchemaRecordList recordList = new MetricSchemaRecordList(Arrays.asList(record1, record2), MetricSchemaRecordList.HashAlgorithm.fromString("MD5")); + MetricSchemaRecordList recordList = new MetricSchemaRecordList(new HashSet<>(Arrays.asList(record1, record2)), MetricSchemaRecordList.HashAlgorithm.fromString("MD5")); + String requestBody = updateMapper.writeValueAsString(recordList); - String requestBody = ElasticSearchSchemaService._getRequestBodyForMtsFieldUpdate(Arrays.asList("a303abc25d534dd8ff97121668e952e6", "8b7f219c5131eeff5b02a6e798c9ec2d"), recordList, 0); - - assertEquals("the update request body is different", expected, requestBody); - } + assertTrue(requestBody.matches(expectedRegex)); + }*/ } diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/schema/ElasticSearchUtilsTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/schema/ElasticSearchUtilsTest.java new file mode 100644 index 000000000..3946e78f3 --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/schema/ElasticSearchUtilsTest.java @@ -0,0 +1,47 @@ +package com.salesforce.dva.argus.service.schema; + +import com.salesforce.dva.argus.system.SystemException; +import org.apache.http.entity.BasicHttpEntity; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import java.io.ByteArrayInputStream; +import java.io.IOException; + +import static org.junit.Assert.*; + +public class ElasticSearchUtilsTest { + + @Test + public void testDoExtractResponse() throws Exception { + final String message = "this is a test"; + BasicHttpEntity entity = new BasicHttpEntity(); + try(ByteArrayInputStream bis = new ByteArrayInputStream(message.getBytes())) { + entity.setContent(bis); + } + catch (IOException e) { + throw e; + } + + String responseMessage = ElasticSearchUtils.doExtractResponse(200, entity); + assertEquals("expect the entity to be equal after extraction", message, responseMessage); + } + + @Rule + public ExpectedException expectedException = ExpectedException.none(); + + @Test + public void testDoExtractResponse400() { + expectedException.expect(IllegalArgumentException.class); + expectedException.expectMessage("Status code: 400"); + ElasticSearchUtils.doExtractResponse(400, null); + } + + @Test + public void testDoExtractResponse500() { + expectedException.expect(SystemException.class); + expectedException.expectMessage("Status code: 500"); + ElasticSearchUtils.doExtractResponse(500, null); + } +} \ No newline at end of file diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/tsdb/AbstractTSDBServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/tsdb/AbstractTSDBServiceTest.java index c0830524f..516fc003e 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/tsdb/AbstractTSDBServiceTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/tsdb/AbstractTSDBServiceTest.java @@ -1,19 +1,17 @@ package com.salesforce.dva.argus.service.tsdb; -import com.salesforce.dva.argus.AbstractTest; -import com.salesforce.dva.argus.entity.Annotation; -import com.salesforce.dva.argus.service.schema.ElasticSearchSchemaService; -import org.apache.http.client.ClientProtocolException; -import org.apache.http.entity.StringEntity; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.util.EntityUtils; -import org.elasticsearch.client.RestClient; -import org.junit.Test; -import org.mockito.ArgumentCaptor; -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; import java.io.IOException; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; @@ -23,21 +21,57 @@ import java.util.TreeMap; import java.util.concurrent.ConcurrentLinkedQueue; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.mockito.Matchers.any; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -public class AbstractTSDBServiceTest extends AbstractTest { +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableMap; +import com.salesforce.dva.argus.service.MonitorService; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.util.EntityUtils; +import org.junit.Test; +import org.mockito.ArgumentCaptor; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; + +import com.salesforce.dva.argus.entity.Annotation; +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.service.TSDBService; +import com.salesforce.dva.argus.system.SystemException; + +import org.junit.BeforeClass; +import org.junit.AfterClass; +import com.salesforce.dva.argus.system.SystemMain; +import com.salesforce.dva.argus.TestUtils; + + +public class AbstractTSDBServiceTest { static final int RUNS = 100; static final int THREADS = 20; CloseableHttpClient readHttpClient; CloseableHttpClient writeHttpClient; + static private SystemMain system; + + private final Logger _logger = LoggerFactory.getLogger(AbstractTSDBServiceTest.class); + + @BeforeClass + static public void setUpClass() { + system = TestUtils.getInstance(); + system.start(); + } + + @AfterClass + static public void tearDownClass() { + if (system != null) { + system.getServiceFactory().getManagementService().cleanupRecords(); + system.stop(); + } + } + + private String getReply1 = String.join("\n", "[" + " {" + @@ -182,6 +216,25 @@ public class AbstractTSDBServiceTest extends AbstractTest { " ]" + "}"); + private String getAnnotationReply = String.join("\n", + "[" + + " {" + + " \"metric\": \"TestType1-__-TestScope1.6f94d354\"," + + " \"tags\": {" + + " \"TestTag\": \"TagValue\"," + + " \"meta\": \"eyJkaXNwbGF5TmFtZSI6bnVsbCwidW5pdHMiOm51bGx9\"" + + " }," + + " \"aggregateTags\": []," + + " \"tsuids\": [" + + " \"000089A6D1A70000000000010000000000060000000000B00000010F7CB800000000028C00000E2548D1\"" + + " ]," + + " \"type\": \"ANNOTATION\"," + + " \"fields\": {" + + " \"owner\": \"jdoe\"" + + " }" + + " }" + + "]"); + @Test public void testAnnotationWorkflow() throws IOException { DefaultTSDBService service = new DefaultTSDBService(system.getConfiguration(), system.getServiceFactory().getMonitorService()); @@ -220,11 +273,27 @@ public void testAnnotationWorkflow() throws IOException { assertTrue(urls.get(3).contains("query")); assertTrue(contents.contains(getBody3.replaceAll("\\s+",""))); + + List queries = new ArrayList<>(); + queries.add(toQuery(annotations.get(0))); + + spyService = _initializeSpyService(service, getAnnotationReply); + spyService.getAnnotations(queries); + + verify(spyService, times(1)).executeHttpRequest(any(), urlCaptor.capture(), any(), contentCaptor.capture()); } + private AnnotationQuery toQuery(Annotation annotation) { + String scope = annotation.getScope(); + String metric = annotation.getMetric(); + Map tags = annotation.getTags(); + String type = annotation.getType(); + Long timestamp = annotation.getTimestamp(); + + return new AnnotationQuery(scope, metric, tags, type, timestamp, null); + } - private AbstractTSDBService _initializeSpyService(AbstractTSDBService service, - String... replies) { + private AbstractTSDBService _initializeSpyService(AbstractTSDBService service, String... replies) { readHttpClient = mock(CloseableHttpClient.class); writeHttpClient = mock(CloseableHttpClient.class); @@ -246,7 +315,6 @@ public Object answer(InvocationOnMock invocation) { return spyService; } - private Annotation _constructAnnotation(char appendChar) { Annotation result = new Annotation("TestSource"+ appendChar, "TestID"+ appendChar, @@ -284,6 +352,32 @@ public void testWrapperListUsedInBatching() { assertEquals(wrapperList.get(1).size(), 1); } + @Test + public void testFractureMetrics() { + TSDBService service = new AbstractTSDBService(system.getConfiguration(), system.getServiceFactory().getMonitorService()); + Metric metric = new Metric("testscope", "testMetric"); + Map datapoints = new HashMap<>(); + + for (int i = 0; i <= 200; i++) { + datapoints.put(System.currentTimeMillis() + (i * 60000L), (double)(TestUtils.random.nextInt(50))); + } + metric.setDatapoints(datapoints); + try { + Method method = AbstractTSDBService.class.getDeclaredMethod("fractureMetric", Metric.class); + + method.setAccessible(true); + + List metricList = (List) method.invoke(service, metric); + + assertEquals(3, metricList.size()); + assertEquals(100, metricList.get(0).getDatapoints().size()); + assertEquals(100, metricList.get(1).getDatapoints().size()); + assertEquals(1, metricList.get(2).getDatapoints().size()); + } catch (NoSuchMethodException | IllegalAccessException | IllegalArgumentException | InvocationTargetException ex) { + throw new SystemException("Failed to construct fracture metric method using reflection"); + } + } + @Test public void testCyclingIterator() { AbstractTSDBService service = new AbstractTSDBService(system.getConfiguration(), system.getServiceFactory().getMonitorService()); @@ -298,7 +392,7 @@ public void testCyclingIterator() { Iterator iter = service.constructCyclingIterator(endpoints); List threads = new ArrayList<>(); ConcurrentLinkedQueue queue = new ConcurrentLinkedQueue<>(); - System.out.println(String.format("Trying %d .next() calls with %d threads calling cycling iterator on endpoints %s", RUNS, THREADS, String.join(", ", endpoints))); + _logger.debug(String.format("Trying %d .next() calls with %d threads calling cycling iterator on endpoints %s", RUNS, THREADS, String.join(", ", endpoints))); for (int i = 0; i < THREADS; i++) { Thread thread = new Thread(new IterateTask(iter, queue)); threads.add(thread); @@ -328,6 +422,22 @@ public void testCyclingIterator() { } } + @Test + public void testConstructNotEqualsQuery() throws IOException { + DefaultTSDBService service = new DefaultTSDBService(system.getConfiguration(), mock(MonitorService.class)); + ObjectMapper mapper = service.getMapper(); + MetricQuery query = new MetricQuery( + "scope", + "metric", + new ImmutableMap.Builder().put("tagk", "~tagv").build(), + 0L, + 1L); + String queryJson = mapper.writeValueAsString(query); + JsonNode root = mapper.readTree(queryJson); + String tagValue = root.get("queries").get(0).get("tags").get("tagk").asText(); + assertEquals(MetricQuery.TAG_NOT_EQUALS_TSDB_PREFIX + "tagv" + MetricQuery.TAG_NOT_EQUALS_TSDB_SUFFIX, tagValue); + } + class IterateTask implements Runnable { Random random = new Random(); Iterator iter; diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/tsdb/CachedTSDBServiceTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/tsdb/CachedTSDBServiceTest.java new file mode 100644 index 000000000..fc4f36209 --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/tsdb/CachedTSDBServiceTest.java @@ -0,0 +1,160 @@ +package com.salesforce.dva.argus.service.tsdb; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.ArgumentMatchers.anySet; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.lang.reflect.Constructor; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; + +import com.salesforce.dva.argus.entity.Metric; +import com.salesforce.dva.argus.service.CacheService; +import com.salesforce.dva.argus.service.MonitorService; +import com.salesforce.dva.argus.service.TSDBService; +import com.salesforce.dva.argus.system.SystemConfiguration; + + +import com.salesforce.dva.argus.system.SystemMain; +import com.salesforce.dva.argus.TestUtils; +import org.junit.BeforeClass; +import org.junit.AfterClass; + + +@RunWith(MockitoJUnitRunner.class) +public class CachedTSDBServiceTest { + @Mock + private MonitorService monitorService; + @Mock + private CacheService cacheService; + @Mock + private TSDBService tsdbService; + @Mock + CachedTSDBService cachedTSDBService; + + static private SystemMain system; + + + @BeforeClass + static public void setUpClass() { + system = TestUtils.getInstance(); + system.start(); + } + + @AfterClass + static public void tearDownClass() { + if (system != null) { + system.getServiceFactory().getManagementService().cleanupRecords(); + system.stop(); + } + } + + @Before + public void setup() throws Exception{ + Constructor constructor = CachedTSDBService.class.getDeclaredConstructor(SystemConfiguration.class, + MonitorService.class,CacheService.class,TSDBService.class); + constructor.setAccessible(true); + cachedTSDBService = constructor.newInstance(system.getConfiguration(), monitorService,cacheService,tsdbService); + } + + @Test + public void testWhenCacheMissShouldCallTSDBServiceGetMetricsMethod() throws Exception { + long startTime = System.currentTimeMillis()-60*60*1000, endTime = System.currentTimeMillis(); + MetricQuery query = new MetricQuery("scope", "metric", new HashMap<>(), + startTime, endTime); + cachedTSDBService.getMetrics(Arrays.asList(query)); + verify(tsdbService,times(1)).getMetrics(any()); + } + + @SuppressWarnings("unchecked") + @Test + public void testWhenCacheMissShouldReturnMetricsFromOnlyTSDBNShouldNotCallCacheService() { + long startTime = System.currentTimeMillis()-60*60*1000, endTime = System.currentTimeMillis(); + MetricQuery query = new MetricQuery("scope", "metric", new HashMap<>(), + startTime, endTime); + + Metric metric = new Metric("scope", "metric"); + Map dps = new HashMap<>(); + dps.put(startTime,1d); + metric.setDatapoints(dps); + Map> metricsFromTSDB = new HashMap<>(); + metricsFromTSDB.put(query, Arrays.asList(metric)); + + when(tsdbService.getMetrics(any())).thenReturn(metricsFromTSDB); + Map> actual = cachedTSDBService.getMetrics(Arrays.asList(query)); + assertEquals(metricsFromTSDB, actual); + verify(cacheService, never()).getRange(anySet(), anyInt(), anyInt()); + } + + @SuppressWarnings("unchecked") + @Test + public void testWhenMetricsExistInCacheShouldReturnMetricsFromBothCacheNTSDB() throws Exception { + long currTime = System.currentTimeMillis(); + long startTime = currTime-4*24*60*60*1000, endTime = currTime; + MetricQuery query = new MetricQuery("scope", "metric", new HashMap<>(), + startTime, endTime); + + Metric lastHourMetric= new Metric("scope", "metric"); + Map lastHourDps = new HashMap<>(); + lastHourDps.put(endTime, 2d); + lastHourMetric.setDatapoints(lastHourDps); + Map> lastHourMetrics = mock(Map.class); + when(lastHourMetrics.get(any())).thenReturn(Arrays.asList(lastHourMetric)); + when(tsdbService.getMetrics(any())).thenReturn(lastHourMetrics); + + StringBuilder cacheKey = new StringBuilder(); + cacheKey.append(startTime); + cacheKey.append(":null:scope:metric:{}:AVG:AVG:300000"); + String cacheKeyValue = "{\"metatagsRecord\":null,\"datapoints\":{\""+ startTime+ "\":1}," + + "\"metric\":\"metric\",\"units\":null,\"namespace\":null,\"query\":{\"aggregator\":\"AVG\",\"downsampler\":\"AVG\"," + + "\"downsamplingPeriod\":300000,\"metric\":\"metric\",\"startTimestamp\":1544659200000,\"endTimestamp\":1544816134802," + + "\"scope\":\"scope\",\"tags\":{}},\"displayName\":null,\"scope\":\"scope\",\"uid\":null,\"tags\":{}}"; + Map metricsFromCache = new HashMap<>(); + metricsFromCache.put(cacheKey.toString(), Arrays.asList(cacheKeyValue)); + when(cacheService.getRange(anySet(), anyInt(), anyInt())).thenReturn(metricsFromCache); + + Map> actual = cachedTSDBService.getMetrics(Arrays.asList(query)); + + Metric expectedMetric= new Metric("scope", "metric"); + Map dps = new HashMap<>(); + dps.put(startTime, 1d); + dps.put(endTime, 2d); + expectedMetric.setDatapoints(dps); + List expected = new ArrayList<>(); + expected.add(expectedMetric); + assertEquals(expected, actual.get(query)); + } + + @Test + public void testIfQueryHasHistogram() { + long startTime = System.currentTimeMillis()-60*60*1000, endTime = System.currentTimeMillis(); + MetricQuery query = new MetricQuery("scope", "metric", new HashMap<>(), + startTime, endTime); + assertFalse(cachedTSDBService.isQueryHavingHistogram(query)); + + query = new MetricQuery("scope", "metric", new HashMap<>(),startTime, endTime); + query.setShowHistogramBuckets(true); + assertTrue(cachedTSDBService.isQueryHavingHistogram(query)); + + query = new MetricQuery("scope", "metric", new HashMap<>(),startTime, endTime); + query.setPercentile(new String[]{"50"}); + assertTrue(cachedTSDBService.isQueryHavingHistogram(query)); + } +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/tsdb/MetricFederationTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/tsdb/MetricFederationTest.java index 1c9a2abf6..16e261350 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/service/tsdb/MetricFederationTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/service/tsdb/MetricFederationTest.java @@ -47,15 +47,38 @@ import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.module.SimpleModule; -import com.salesforce.dva.argus.AbstractTest; import com.salesforce.dva.argus.entity.Metric; import com.salesforce.dva.argus.service.MetricService; import com.salesforce.dva.argus.service.TSDBService; import com.salesforce.dva.argus.service.metric.transform.TransformFactory; -public class MetricFederationTest extends AbstractTest { +import org.junit.BeforeClass; +import com.salesforce.dva.argus.system.SystemMain; +import com.salesforce.dva.argus.TestUtils; +import org.junit.AfterClass; + + + +public class MetricFederationTest { private ObjectMapper _mapper; + static private SystemMain system; + + @BeforeClass + static public void setUpClass() { + system = TestUtils.getInstance(); + system.start(); + } + + @AfterClass + static public void tearDownClass() { + if (system != null) { + system.getServiceFactory().getManagementService().cleanupRecords(); + system.stop(); + } + } + + @Before public void initialize() { _mapper = new ObjectMapper(); @@ -99,7 +122,7 @@ public void testEndPointFederationForkJoinSumDownsamplerWithTag() { Map> queryMetricsMap = queryFederation.join(mapQuerySubQueries, subQueryMetricsMap); assertEquals(1, queryMetricsMap.size()); assertEquals(3, queryMetricsMap.get(queries.get(0)).size()); - + // Three time series assertEquals("{host=machineHost1}", queryMetricsMap.get(queries.get(0)).get(0).getTags().toString()); assertEquals("{1477386300=4.940423168E9}", queryMetricsMap.get(queries.get(0)).get(0).getDatapoints().toString()); @@ -108,7 +131,7 @@ public void testEndPointFederationForkJoinSumDownsamplerWithTag() { assertEquals("{host=machineHost3}", queryMetricsMap.get(queries.get(0)).get(2).getTags().toString()); assertEquals("{1477386500=4.940423168E9, 1477386600=4.940423168E9}", queryMetricsMap.get(queries.get(0)).get(2).getDatapoints().toString()); } - + @Test public void testEndPointFederationForkJoinSumDownsamplerWithNoTag() { MetricService metricService = system.getServiceFactory().getMetricService(); @@ -132,12 +155,12 @@ public void testEndPointFederationForkJoinSumDownsamplerWithNoTag() { Map> queryMetricsMap = queryFederation.join(mapQuerySubQueries, subQueryMetricsMap); assertEquals(1, queryMetricsMap.size()); assertEquals(1, queryMetricsMap.get(queries.get(0)).size()); - + // One time series, since no tag specified assertEquals("{}", queryMetricsMap.get(queries.get(0)).get(0).getTags().toString()); assertEquals("{1477386300=7.0, 1477386500=6.0, 1477386600=7.0}", queryMetricsMap.get(queries.get(0)).get(0).getDatapoints().toString()); } - + @Test public void testEndPointFederationForkJoinMinDownsampler() { MetricService metricService = system.getServiceFactory().getMetricService(); @@ -161,12 +184,12 @@ public void testEndPointFederationForkJoinMinDownsampler() { Map> queryMetricsMap = queryFederation.join(mapQuerySubQueries, subQueryMetricsMap); assertEquals(1, queryMetricsMap.size()); assertEquals(1, queryMetricsMap.get(queries.get(0)).size()); - + // One time series, since no tag specified assertEquals("{}", queryMetricsMap.get(queries.get(0)).get(0).getTags().toString()); assertEquals("{1477386300=3.0, 1477386500=6.0, 1477386600=7.0}", queryMetricsMap.get(queries.get(0)).get(0).getDatapoints().toString()); } - + @Test public void testEndPointFederationForkJoinMaxDownsampler() { MetricService metricService = system.getServiceFactory().getMetricService(); @@ -190,7 +213,7 @@ public void testEndPointFederationForkJoinMaxDownsampler() { Map> queryMetricsMap = queryFederation.join(mapQuerySubQueries, subQueryMetricsMap); assertEquals(1, queryMetricsMap.size()); assertEquals(1, queryMetricsMap.get(queries.get(0)).size()); - + // One time series, since no tag specified assertEquals("{}", queryMetricsMap.get(queries.get(0)).get(0).getTags().toString()); assertEquals("{1477386300=4.0, 1477386500=6.0, 1477386600=7.0}", queryMetricsMap.get(queries.get(0)).get(0).getDatapoints().toString()); @@ -219,7 +242,7 @@ public void testEndPointFederationForkJoinCountDownsampler() { Map> queryMetricsMap = queryFederation.join(mapQuerySubQueries, subQueryMetricsMap); assertEquals(1, queryMetricsMap.size()); assertEquals(3, queryMetricsMap.get(queries.get(0)).size()); - + // Three time series assertEquals("{host=machineHost1}", queryMetricsMap.get(queries.get(0)).get(0).getTags().toString()); assertEquals("{1477386300=1.0}", queryMetricsMap.get(queries.get(0)).get(0).getDatapoints().toString()); @@ -228,7 +251,7 @@ public void testEndPointFederationForkJoinCountDownsampler() { assertEquals("{host=machineHost3}", queryMetricsMap.get(queries.get(0)).get(2).getTags().toString()); assertEquals("{1477386500=1.0, 1477386600=1.0}", queryMetricsMap.get(queries.get(0)).get(2).getDatapoints().toString()); } - + @Test public void testEndPointFederationForkJoinCountDownsamplerWithNoTag() { MetricService metricService = system.getServiceFactory().getMetricService(); @@ -252,12 +275,12 @@ public void testEndPointFederationForkJoinCountDownsamplerWithNoTag() { Map> queryMetricsMap = queryFederation.join(mapQuerySubQueries, subQueryMetricsMap); assertEquals(1, queryMetricsMap.size()); assertEquals(1, queryMetricsMap.get(queries.get(0)).size()); - + // One time series, since no tag specified assertEquals("{}", queryMetricsMap.get(queries.get(0)).get(0).getTags().toString()); assertEquals("{1477386300=7.0, 1477386500=6.0, 1477386600=7.0}", queryMetricsMap.get(queries.get(0)).get(0).getDatapoints().toString()); } - + private List getMetricsFromMetricString(String content){ List metrics = null; try { diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/system/SystemConfiguratorTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/system/SystemConfiguratorTest.java index e93f0e23b..0ac40eb21 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/system/SystemConfiguratorTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/system/SystemConfiguratorTest.java @@ -28,15 +28,38 @@ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ - + package com.salesforce.dva.argus.system; -import com.salesforce.dva.argus.AbstractTest; import org.junit.Test; +import java.util.Properties; +import java.io.IOException; +import java.io.InputStream; import static org.junit.Assert.*; -public class SystemConfiguratorTest extends AbstractTest { +public class SystemConfiguratorTest { + + public SystemMain getInstance() { + Properties config = new Properties(); + InputStream is = null; + + try { + is = getClass().getResourceAsStream("/argus.properties"); + config.load(is); + } catch (IOException ex) { + throw new SystemException(ex); + } finally { + if (is != null) { + try { + is.close(); + } catch (IOException ex) { + assert false : "This should never occur."; + } + } + } + return SystemMain.getInstance(config); + } @Test public void testLifecycle() { diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/util/AlertUtilsTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/util/AlertUtilsTest.java index d0c1acaba..ac7203da4 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/util/AlertUtilsTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/util/AlertUtilsTest.java @@ -1,38 +1,114 @@ package com.salesforce.dva.argus.util; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -import java.math.BigInteger; -import java.text.SimpleDateFormat; -import java.util.*; -import java.util.regex.Pattern; - -import com.salesforce.dva.argus.AbstractTest; +import com.google.inject.Provider; +import com.salesforce.dva.argus.TestUtils; import com.salesforce.dva.argus.entity.Alert; import com.salesforce.dva.argus.entity.History; import com.salesforce.dva.argus.entity.Metric; import com.salesforce.dva.argus.entity.Notification; +import com.salesforce.dva.argus.entity.PrincipalUser; import com.salesforce.dva.argus.entity.Trigger; -import com.salesforce.dva.argus.service.MetricService; +import com.salesforce.dva.argus.service.AlertService; +import com.salesforce.dva.argus.service.CacheService; +import com.salesforce.dva.argus.service.DiscoveryService; import com.salesforce.dva.argus.service.UserService; import com.salesforce.dva.argus.service.alert.DefaultAlertService; +import com.salesforce.dva.argus.service.metric.DefaultMetricService; import com.salesforce.dva.argus.service.metric.MetricReader; +import com.salesforce.dva.argus.service.schema.CachedDiscoveryService; +import com.salesforce.dva.argus.service.tsdb.MetricQuery; +import com.salesforce.dva.argus.system.SystemMain; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Test; import org.quartz.CronScheduleBuilder; import org.quartz.CronTrigger; import org.quartz.TriggerBuilder; +import org.slf4j.LoggerFactory; + +import java.math.BigInteger; +import java.sql.DriverManager; +import java.sql.SQLNonTransientConnectionException; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.regex.Pattern; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + + +public class AlertUtilsTest { + + private static final String CACHED_QUERIES_0 = "[{\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB0\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC1.service1\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB1\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC2.service2\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB5\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC1.service1\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB6\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC2.service2\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB10\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC1.service1\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB11\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC2.service2\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB15\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC1.service1\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB16\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC2.service2\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB20\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC1.service1\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB21\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC2.service2\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB25\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC1.service1\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB26\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC2.service2\"}]"; + private static final String CACHED_QUERIES_1 = "[{\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB0\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC1.service1\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB1\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC2.service2\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB2\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC3.service3\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB3\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC4.service4\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB4\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC5.service5\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB5\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC1.service1\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB6\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC2.service2\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB7\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC3.service3\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB8\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC4.service4\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB9\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC5.service5\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB10\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC1.service1\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB11\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC2.service2\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB12\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC3.service3\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB13\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC4.service4\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB14\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC5.service5\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB15\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC1.service1\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB16\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC2.service2\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB17\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC3.service3\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB18\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC4.service4\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB19\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC5.service5\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB20\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC1.service1\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB21\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC2.service2\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB22\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC3.service3\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB23\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC4.service4\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB24\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC5.service5\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB25\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC1.service1\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB26\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC2.service2\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB27\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC3.service3\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB28\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC4.service4\"}, {\"aggregator\":\"SUM\",\"metric\":\"winterfell.backupTimestamps-NyB29\",\"tags\":{\"device\":\"myhost-mycompany.com\"},\"endTimestamp\":1485904591853,\"startTimestamp\":1485903991000,\"scope\":\"system.DC5.service5\"} ]"; + + private static PrincipalUser admin; + private static AlertService alertService; + private static UserService userService; + private static SystemMain system; + protected static final Map tags; + + static { + tags = new HashMap<>(); + tags.put("source", "unittest"); + ch.qos.logback.classic.Logger apacheLogger = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger("org.apache"); + apacheLogger.setLevel(ch.qos.logback.classic.Level.OFF); + } + + @BeforeClass + static public void setUpClass() { + try { + Class.forName("org.apache.derby.jdbc.EmbeddedDriver"); + DriverManager.getConnection("jdbc:derby:memory:argus;create=true").close(); + } catch (Exception ex) { + LoggerFactory.getLogger(AlertUtilsTest.class).error("Exception in setUp:{}", ex.getMessage()); + fail("Exception during database startup."); + } + + system = TestUtils.getInstance(); + system.start(); + userService = system.getServiceFactory().getUserService(); + admin = userService.findAdminUser(); + alertService = system.getServiceFactory().getAlertService(); + } + + @AfterClass + static public void tearDownClass() { + if (system != null) { + system.getServiceFactory().getManagementService().cleanupRecords(); + system.stop(); + } + try { + DriverManager.getConnection("jdbc:derby:memory:argus;shutdown=true").close(); + } catch (SQLNonTransientConnectionException ex) { + if (ex.getErrorCode() >= 50000 || ex.getErrorCode() < 40000) { + throw new RuntimeException(ex); + } + } catch (Exception ex) { + throw new RuntimeException(ex); + } + } -public class AlertUtilsTest extends AbstractTest { @Test public void isScopePresentInWhiteListTest() { List scopesSet = new ArrayList(Arrays.asList(new Pattern[] {Pattern.compile("argus.core"), Pattern.compile("kafka.broker.*.ajna_local")})); - - assertTrue(AlertUtils.isScopePresentInWhiteList("-1d:argus.core:alerts.scheduled:zimsum:15m-sum",scopesSet)); - assertTrue(AlertUtils.isScopePresentInWhiteList("COUNT(-75m:-15m:kafka.broker.CHI.NONE.ajna_local:kafka.server.BrokerTopicMetrics.BytesInPerSec.BytesCount{device=*}:avg:1m-avg)", scopesSet)); - assertFalse(AlertUtils.isScopePresentInWhiteList("COUNT(-75m:-15m:kafka1.broker.CHI.NONE.ajna_local:kafka.server.BrokerTopicMetrics.BytesInPerSec.BytesCount{device=*}:avg:1m-avg)", scopesSet)); + assertTrue(AlertUtils.isPatternPresentInWhiteList("-1d:argus.core:alerts.scheduled:zimsum:15m-sum",scopesSet)); + assertTrue(AlertUtils.isPatternPresentInWhiteList("COUNT(-75m:-15m:kafka.broker.CHI.NONE.ajna_local:kafka.server.BrokerTopicMetrics.BytesInPerSec.BytesCount{device=*}:avg:1m-avg)", scopesSet)); + assertFalse(AlertUtils.isPatternPresentInWhiteList("COUNT(-75m:-15m:kafka1.broker.CHI.NONE.ajna_local:kafka.server.BrokerTopicMetrics.BytesInPerSec.BytesCount{device=*}:avg:1m-avg)", scopesSet)); } private static long _toBeginOfMinute(long millis){ @@ -46,7 +122,6 @@ public void testCronLoop() { try { Thread.sleep(200); }catch (Exception e) { - System.out.println("Exiting"); return; } testCronTrigger(); @@ -69,13 +144,6 @@ public void testCronTrigger() { Date nextFireTime = cronTrigger.getFireTimeAfter(previousMinuteLastSecondTime); - if(nextFireTime.equals(fireTime)) - { - System.out.println(String.format("Current Time %s: Fire Time %s Matches", sdf.format(new Date()), sdf.format(nextFireTime))); - } else { - System.out.println(String.format("Current Time %s: Fire Time %s", sdf.format(new Date()), sdf.format(nextFireTime))); - } - assertTrue(nextFireTime.equals(fireTime)); } @@ -84,8 +152,9 @@ public void testAbsoluteTimeStampsInExpression() { Long alertEnqueueTime = 1418319600000L; ArrayList expressionArray = new ArrayList (Arrays.asList( + "DIFF(COUNT(CULL_BELOW(SHIFT(-25h:-24h:core.IA2.SP1.na70:SFDC_type-Stats-name1-Login-name2-Success.Last1min{device=na70-app*}:avg:10m-max,#24h#),#3#,#value#)),SUM(COUNT(CULL_BELOW(FILL(#-1h#,#-0h#, #10m#, #0m#, #4#),-1h:-0h:core.IA2.SP1.na70:SFDC_type-Stats-name1-Login-name2-Success.Last1min{device=na70-app*}:avg:10m-max,#3#,#value#)),#-1#))", "-20m:-0d:scone.*.*.cs19:acs.DELETERequestProcessingTime_95thPercentile{device=*acs2-1*}:avg", - " SCALE( SUM( DIVIDE( DIFF( DOWNSAMPLE( SUM( CULL_BELOW( DERIVATIVE( -1h:-40m:core.*.*.eu11:SFDC_type-Stats-name1-Search-name2-Client-name3-Query_Count__SolrLive.Count{device=eu11-app*}:sum:1m-max ), #0.001#, #value# ), #union# ), #10m-sum# ), DOWNSAMPLE( SUM( CULL_BELOW( DERIVATIVE( -2h:-40m:core.*.*.eu11:SFDC_type-Stats-name1-Search-name2-Client-name3-Search_Fallbacks__SolrLive.Count{device=eu11-app*}:sum:1m-max ), #0.01#, #value# ), #union# ), #10m-sum# ), #union# ), CULL_BELOW( DOWNSAMPLE( SUM( CULL_BELOW( DERIVATIVE( -40m:core.*.*.eu11:SFDC_type-Stats-name1-Search-name2-Client-name3-Query_Count__SolrLive.Count{device=eu11-app*}:sum:1m-max ), #0.001#, #value# ), #union# ), #10m-sum# ), #1000#, #value# ) ), #-1# ), #-100# ) ", + " SCALE( SUM( DIVIDE( DIFF( DOWNSAMPLE( SUM( CULL_BELOW( DERIVATIVE(-1h:-40m:core.*.*.eu11:SFDC_type-Stats-name1-Search-name2-Client-name3-Query_Count__SolrLive.Count{device=eu11-app*}:sum:1m-max ), #0.001#, #value# ), #union# ), #10m-sum# ), DOWNSAMPLE( SUM( CULL_BELOW( DERIVATIVE(-2h:-40m:core.*.*.eu11:SFDC_type-Stats-name1-Search-name2-Client-name3-Search_Fallbacks__SolrLive.Count{device=eu11-app*}:sum:1m-max ), #0.01#, #value# ), #union# ), #10m-sum# ), #union# ), CULL_BELOW( DOWNSAMPLE( SUM( CULL_BELOW( DERIVATIVE( -40m:core.*.*.eu11:SFDC_type-Stats-name1-Search-name2-Client-name3-Query_Count__SolrLive.Count{device=eu11-app*}:sum:1m-max ), #0.001#, #value# ), #union# ), #10m-sum# ), #1000#, #value# ) ), #-1# ), #-100# ) ", "ABOVE(-1d:scope:metric:avg:4h-avg, #0.5#, #avg#)", "ABOVE(-1h:scope:metric:avg:4h-avg, #0.5#)", "ALIASBYTAG(-1s:scope:metric{device=*,source=*}:sum)", @@ -93,12 +162,13 @@ public void testAbsoluteTimeStampsInExpression() { "GROUPBY(-2d:-1d:scope:metricA{host=*}:avg,#(myhost[1-9])#, #SUM#, #union#)", "LIMIT( -21d:-1d:scope:metricA:avg:4h-avg, -1d:scope:metricB:avg:4h-avg,#1#)", "RANGE(-10d:scope:metric[ABCD]:avg:1d-max)", - "DOWNSAMPLE(DOWNSAMPLE(GROUPBYTAG(CULL_BELOW(-115m:-15m:iot-provisioning-server.PRD.SP2.-:health.status{device=provisioning-warden-*}:avg:1m-max, #1#, #value#), #DeploymentName#, #MAX#), #1m-max#), #10m-count#)", - "DOWNSAMPLE(CULL_BELOW(DERIVATIVE(-115m:-15m:iot-container.PRD.NONE.-:iot.flows.state.load.errors_count{flowsnakeEnvironmentName=iot-prd-stmfa-00ds70000000mqy}:zimsum:1m-sum), #0#, #value#), #10m-sum#)", - "DOWNSAMPLE(-2d:alerts.scheduled:alert-1429851:zimsum, #5m-sum#,#-2d#, #-0m#, #0#)" + "DOWNSAMPLE(DOWNSAMPLE(GROUPBYTAG(CULL_BELOW(-115m:-15m:iot-provisioning-server.SP2.-:health.status{device=provisioning-warden-*}:avg:1m-max, #1#, #value#), #DeploymentName#, #MAX#), #1m-max#), #10m-count#)", + "DOWNSAMPLE(CULL_BELOW(DERIVATIVE(-115m:-15m:iot-container.NONE.-:iot.flows.state.load.errors_count{flowsnakeEnvironmentName=iot-prd-stmfa-00ds70000000mqy}:zimsum:1m-sum), #0#, #value#), #10m-sum#)", + "DOWNSAMPLE(-2d:argus.alerts:scheduled{alertId=1429851}:zimsum, #5m-sum#,#-2d#, #-0m#, #0#)" )); ArrayList expectedOutput = new ArrayList (Arrays.asList( + "DIFF(COUNT(CULL_BELOW(SHIFT(1418229600000:1418233200000:core.IA2.SP1.na70:SFDC_type-Stats-name1-Login-name2-Success.Last1min{device=na70-app*}:avg:10m-max,#24h#),#3#,#value#)),SUM(COUNT(CULL_BELOW(FILL(#1418316000000#,#1418319600000#,#10m#,#0m#,#4#),1418316000000:1418319600000:core.IA2.SP1.na70:SFDC_type-Stats-name1-Login-name2-Success.Last1min{device=na70-app*}:avg:10m-max,#3#,#value#)),#-1#))", "1418318400000:1418319600000:scone.*.*.cs19:acs.DELETERequestProcessingTime_95thPercentile{device=*acs2-1*}:avg", "SCALE(SUM(DIVIDE(DIFF(DOWNSAMPLE(SUM(CULL_BELOW(DERIVATIVE(1418316000000:1418317200000:core.*.*.eu11:SFDC_type-Stats-name1-Search-name2-Client-name3-Query_Count__SolrLive.Count{device=eu11-app*}:sum:1m-max),#0.001#,#value#),#union#),#10m-sum#),DOWNSAMPLE(SUM(CULL_BELOW(DERIVATIVE(1418312400000:1418317200000:core.*.*.eu11:SFDC_type-Stats-name1-Search-name2-Client-name3-Search_Fallbacks__SolrLive.Count{device=eu11-app*}:sum:1m-max),#0.01#,#value#),#union#),#10m-sum#),#union#),CULL_BELOW(DOWNSAMPLE(SUM(CULL_BELOW(DERIVATIVE(1418317200000:1418319600000:core.*.*.eu11:SFDC_type-Stats-name1-Search-name2-Client-name3-Query_Count__SolrLive.Count{device=eu11-app*}:sum:1m-max),#0.001#,#value#),#union#),#10m-sum#),#1000#,#value#)),#-1#),#-100#)", "ABOVE(1418233200000:1418319600000:scope:metric:avg:4h-avg,#0.5#,#avg#)", @@ -108,13 +178,18 @@ public void testAbsoluteTimeStampsInExpression() { "GROUPBY(1418146800000:1418233200000:scope:metricA{host=*}:avg,#(myhost[1-9])#,#SUM#,#union#)", "LIMIT(1416505200000:1418233200000:scope:metricA:avg:4h-avg,1418233200000:1418319600000:scope:metricB:avg:4h-avg,#1#)", "RANGE(1417455600000:1418319600000:scope:metric[ABCD]:avg:1d-max)", - "DOWNSAMPLE(DOWNSAMPLE(GROUPBYTAG(CULL_BELOW(1418312700000:1418318700000:iot-provisioning-server.PRD.SP2.-:health.status{device=provisioning-warden-*}:avg:1m-max,#1#,#value#),#DeploymentName#,#MAX#),#1m-max#),#10m-count#)", - "DOWNSAMPLE(CULL_BELOW(DERIVATIVE(1418312700000:1418318700000:iot-container.PRD.NONE.-:iot.flows.state.load.errors_count{flowsnakeEnvironmentName=iot-prd-stmfa-00ds70000000mqy}:zimsum:1m-sum),#0#,#value#),#10m-sum#)", - "DOWNSAMPLE(1418146800000:1418319600000:alerts.scheduled:alert-1429851:zimsum,#5m-sum#,#1418146800000#,#1418319600000#,#0#)" + "DOWNSAMPLE(DOWNSAMPLE(GROUPBYTAG(CULL_BELOW(1418312700000:1418318700000:iot-provisioning-server.SP2.-:health.status{device=provisioning-warden-*}:avg:1m-max,#1#,#value#),#DeploymentName#,#MAX#),#1m-max#),#10m-count#)", + "DOWNSAMPLE(CULL_BELOW(DERIVATIVE(1418312700000:1418318700000:iot-container.NONE.-:iot.flows.state.load.errors_count{flowsnakeEnvironmentName=iot-prd-stmfa-00ds70000000mqy}:zimsum:1m-sum),#0#,#value#),#10m-sum#)", + "DOWNSAMPLE(1418146800000:1418319600000:argus.alerts:scheduled{alertId=1429851}:zimsum,#5m-sum#,#1418146800000#,#1418319600000#,#0#)" )); UserService userService = system.getServiceFactory().getUserService(); - Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "alert_name", expressionArray.get(0), "* * * * *"); + String alertName = "alert_name-" + TestUtils.createRandomName(); + Alert alert = new Alert(userService.findAdminUser(), + userService.findAdminUser(), + alertName, + expressionArray.get(0), + "* * * * *"); Notification notification = new Notification("notification_name", alert, "notifier_name", new ArrayList(), 23); Trigger trigger = new Trigger(alert, Trigger.TriggerType.GREATER_THAN_OR_EQ, "trigger_name", 2D, 5); @@ -140,17 +215,632 @@ public void testAbsoluteTimeStampsInExpression() { @Test public void testDetectDCFromExpression() { - MetricService _mService = system.getServiceFactory().getMetricService(); - int idx = 0; - ArrayList expressionList = new ArrayList<>(Arrays.asList( - "-2h:system.DC1.service:metric:max", - "-1m:system.DC2.service:metric{tagk=tagv}:min", - "DIVIDE(-15m:system.DC3.service:metric1:avg, -15m:system.DC4.service:metric2:avg)", - "-75m:system.dc5.service:metric:sum")); - String [][] actualOutput = new String[][]{{"DC1"},{"DC2"},{"DC4","DC3"},{"DC5"}}; - for(String currentExpression: expressionList) { - List expectedOutput = _mService.getDCFromExpression(currentExpression); - assertEquals(expectedOutput, new ArrayList<>(Arrays.asList(actualOutput[idx++]))); + + Map> testSuite = new HashMap<>(); + + testSuite.put("-30d:system.[DC1|DC2].[service1|service2]:metric:avg", Arrays.asList("DC1", "DC2")); + testSuite.put("-30d:*DC*:metric:max", Arrays.asList("DC1", "DC2", "DC3", "DC4", "DC5")); + testSuite.put("-2h:system.DC1.service:metric:max", Arrays.asList("DC1")); + testSuite.put("-1m:system.DC2.service:metric{tagk=tagv}:min", Arrays.asList("DC2")); + testSuite.put("DIVIDE(-15m:system.DC3.service:metric1:avg, -15m:system.DC4.service:metric2:avg)", Arrays.asList("DC3", "DC4")); + testSuite.put("-75m:system.dc5.service:metric:sum", Arrays.asList("DC5")); + + + + CacheService cacheServiceMock = mock(CacheService.class); + when(cacheServiceMock.get("system.[DC1|DC2].[service1|service2]:metric{{}}")).thenReturn(CACHED_QUERIES_0); + when(cacheServiceMock.get("*DC*:metric{{}}")).thenReturn(CACHED_QUERIES_1); + DiscoveryService discoveryServiceMock = mock(DiscoveryService.class); + + CachedDiscoveryService service = new CachedDiscoveryService(cacheServiceMock, discoveryServiceMock, system.getConfiguration(), null); + Provider> queryprovider = () -> new MetricReader<>(system.getServiceFactory().getTSDBService(), service,null); + + DefaultMetricService _mServiceMock = new DefaultMetricService(system.getServiceFactory().getMonitorService(),null, null,queryprovider, system.getConfiguration()); + + for(Map.Entry> currentSuite: testSuite.entrySet()) { + List actualOutput = _mServiceMock.extractDCFromMetricQuery(_mServiceMock.getQueries(currentSuite.getKey())); + Collections.sort(actualOutput); + assertEquals(currentSuite.getValue(), actualOutput); + } + } + + + // ------------------------------------------------------------------------------------------------ + // Alert Setter & Getter Tests + // ------------------------------------------------------------------------------------------------ + + @Test + public void testAlert_setExpression() + { + String expression = "ABOVE(-4h:scope:metric:avg:4h-avg,#0.5#)"; + + String alertName = TestUtils.createRandomName(); + Alert a = alertService.updateAlert(new Alert(admin, admin, alertName, expression, "* * * * *")); + String returned_expression = a.getExpression(); + assertEquals(expression, returned_expression); + + a.setExpression(expression); + returned_expression = a.getExpression(); + assertEquals(expression, returned_expression); + + a = alertService.updateAlert(a); + alertService.deleteAlert(a.getName(), userService.findAdminUser()); + } + + @Test + public void testAlert_setInvalidExpression() + { + Alert a = null; + String expression = "ABOVE(-4h:scope:metric:avg:4h-avg,#0.5#)"; + String invalid_expression = ")(Ao23890OAOjkfak:0a89s--8103"; + + try + { + String alertName = TestUtils.createRandomName(); + a = alertService.updateAlert(new Alert(admin, admin, alertName, expression, "* * * * *")); + String returned_expression = a.getExpression(); + assertEquals(expression, returned_expression); + + a.setExpression(invalid_expression); + a = alertService.updateAlert(a); + assertTrue(false); + } + catch (RuntimeException e) + { + assertNotNull(a); + String returned_expression = a.getExpression(); + assertEquals(invalid_expression, returned_expression); // NOTE, it's not in the database! + } + + alertService.deleteAlert(a.getName(), userService.findAdminUser()); + + + try + { + a = null; + String alertName = TestUtils.createRandomName(); + a = new Alert(admin, admin, alertName, invalid_expression, "* * * * *"); + assertTrue(true); + a.validateAlert(); + assertTrue(false); + + } + catch (RuntimeException e) + { + assertNotNull(a); + assertEquals(a.getExpression(), invalid_expression); + } + + try + { + a = null; + String alertName = TestUtils.createRandomName(); + a = new Alert(admin, admin, alertName, "", "* * * * *"); + assertTrue(false); + } + catch (RuntimeException e) + { + assertNull(a); + } + + try + { + a = null; + String alertName = TestUtils.createRandomName(); + a = new Alert(admin, admin, alertName, null, "* * * * *"); + assertTrue(false); + } + catch (RuntimeException e) + { + assertNull(a); + } + } + + + @Test + public void testAlert_setCron() + { + String expression = "ABOVE(-4h:scope:metric:avg:4h-avg,#0.5#)"; + String other_cron = "* */4 * * *"; + String valid_cron = "* * * * *"; + + String alertName = TestUtils.createRandomName(); + Alert a = alertService.updateAlert(new Alert(admin, admin, alertName, expression, valid_cron)); + + String returned_cron = a.getCronEntry(); + assertEquals(valid_cron, returned_cron); + + a.setCronEntry(other_cron); + returned_cron = a.getCronEntry(); + assertEquals(other_cron, returned_cron); + + a = alertService.updateAlert(a); + alertService.deleteAlert(a.getName(), userService.findAdminUser()); + } + + @Ignore + @Test + public void testAlert_setInvalidCron() + { + Alert a = null; + String expression = "ABOVE(-4h:scope:metric:avg:4h-avg,#0.5#)"; + String invalid_cron = "+ + + + + +"; + String valid_cron = "* * * * *"; + + try + { + String alertName = TestUtils.createRandomName(); + a = alertService.updateAlert(new Alert(admin, admin, alertName, expression, valid_cron)); + a.setCronEntry(invalid_cron); + a = alertService.updateAlert(a); + assertTrue(false); + } + catch (RuntimeException e) + { + assertNotNull(a); + String returned_cron = a.getCronEntry(); + assertEquals(invalid_cron, returned_cron); + } + + alertService.deleteAlert(a.getName(), userService.findAdminUser()); + + try + { + a = null; + String alertName = TestUtils.createRandomName(); + a = new Alert(admin, admin, alertName, expression, invalid_cron); + assertTrue(true); + a.validateAlert(); + assertTrue(false); + } + catch (RuntimeException e) + { + assertNotNull(a); + assertEquals(a.getCronEntry(), invalid_cron); + } + + try + { + a = null; + String alertName = TestUtils.createRandomName(); + a = new Alert(admin, admin, alertName, expression, ""); + assertTrue(false); + } + catch (RuntimeException e) + { + assertNull(a); + } + + try + { + a = null; + String alertName = TestUtils.createRandomName(); + a = new Alert(admin, admin, alertName, expression, null); + assertTrue(false); + } + catch (RuntimeException e) + { + assertNull(a); + } + } + + + @Test + public void testAlert_setOwner() + { + Alert a = null; + String expression = "ABOVE(-4h:scope:metric:avg:4h-avg,#0.5#)"; + String valid_cron = "* * * * *"; + + String alertName = TestUtils.createRandomName(); + a = alertService.updateAlert(new Alert(admin, admin, alertName, expression, valid_cron)); + PrincipalUser u = a.getOwner(); + assertEquals(admin, u); + + String userName1 = TestUtils.createRandomName(); + PrincipalUser expectedUser = userService.updateUser(new PrincipalUser(admin, userName1, userName1 + "testuser@testcompany.com")); + a.setOwner(expectedUser); + a = alertService.updateAlert(a); + + u = a.getOwner(); + assertEquals(expectedUser, u); + + String userName2 = TestUtils.createRandomName(); + PrincipalUser expectedUser2 = userService.updateUser(new PrincipalUser(admin, userName2, userName2 + "testuser2@testcompany.com")); + a.setOwner(expectedUser2); + a = alertService.updateAlert(a); + + u = a.getOwner(); + assertEquals(expectedUser2, u); + + alertService.deleteAlert(a.getName(), userService.findUserByUsername(userName2)); + userService.deleteUser(userService.findUserByUsername(userName1)); + userService.deleteUser(userService.findUserByUsername(userName2)); + } + + + @Test + public void testAlert_setInvalidOwner() + { + Alert a = null; + String expression = "ABOVE(-4h:scope:metric:avg:4h-avg,#0.5#)"; + String valid_cron = "* * * * *"; + + String alertName = TestUtils.createRandomName(); + a = alertService.updateAlert(new Alert(admin, admin, alertName, expression, valid_cron)); + PrincipalUser u = a.getOwner(); + assertEquals( admin, u ); + + try + { + a.setOwner(null); + assertTrue( false ); + } + catch (RuntimeException e) + { + u = a.getOwner(); + assertEquals( admin, u ); + } + + alertService.deleteAlert(a.getName(), userService.findAdminUser()); + } + + + @Test + public void testAlert_setName() + { + String expression = "ABOVE(-4h:scope:metric:avg:4h-avg,#0.5#)"; + String valid_cron = "* * * * *"; + String name = TestUtils.createRandomName() + "sample"; + String name2 = TestUtils.createRandomName() + "sample2"; + + Alert a = alertService.updateAlert(new Alert(admin, admin, name, expression, valid_cron)); + String n = a.getName(); + assertEquals( name, n ); + + a.setName(name2); + a = alertService.updateAlert(a); + n = a.getName(); + assertEquals(name2, n); + + alertService.deleteAlert(a.getName(), userService.findAdminUser()); + } + + @Test + public void testAlert_setInvalidName() + { + Alert a = null; + String expression = "ABOVE(-4h:scope:metric:avg:4h-avg,#0.5#)"; + String valid_cron = "* * * * *"; + String name = TestUtils.createRandomName() + "sample"; + + a = alertService.updateAlert(new Alert(admin, admin, name, expression, valid_cron)); + String n = a.getName(); + assertEquals( name, n ); + + try + { + a.setName(null); + assertTrue( false ); + } + catch (RuntimeException e) + { + n = a.getName(); + assertEquals( name, n ); + } + + // Is an empty name also valid or invalid? + try + { + a.setName(""); + assertTrue( false ); + } + catch (RuntimeException e) + { + n = a.getName(); + assertEquals( name, n ); + } + + alertService.deleteAlert(a.getName(), userService.findAdminUser()); + } + + @Test + public void testAlert_setShared() + { + String expression = "ABOVE(-4h:scope:metric:avg:4h-avg,#0.5#)"; + String valid_cron = "* * * * *"; + String name = TestUtils.createRandomName() + "sample"; + + Alert a = alertService.updateAlert(new Alert(admin, admin, name, expression, valid_cron)); + boolean b = a.isShared(); + assertFalse( b ); + + a.setShared(true); + a = alertService.updateAlert(a); + b = a.isShared(); + assertTrue( b ); + + a.setShared(false); + b = a.isShared(); + assertFalse( b ); + a = alertService.updateAlert(a); + + alertService.deleteAlert(a.getName(), userService.findAdminUser()); + } + + @Test + public void testAlert_setMissingDataNotificationEnabled() + { + String expression = "ABOVE(-4h:scope:metric:avg:4h-avg,#0.5#)"; + String valid_cron = "* * * * *"; + String name = TestUtils.createRandomName() + "sample"; + + Alert a = alertService.updateAlert(new Alert(admin, admin, name, expression, valid_cron)); + boolean b = a.isMissingDataNotificationEnabled(); + assertFalse( b ); + + a.setMissingDataNotificationEnabled(true); + a = alertService.updateAlert(a); + b = a.isMissingDataNotificationEnabled(); + assertTrue( b ); + + a.setMissingDataNotificationEnabled(false); + a = alertService.updateAlert(a); + b = a.isMissingDataNotificationEnabled(); + assertFalse( b ); + + alertService.deleteAlert(a.getName(), userService.findAdminUser()); + } + + + @Test + public void testAlert_setEnabled() + { + String expression = "ABOVE(-4h:scope:metric:avg:4h-avg,#0.5#)"; + String valid_cron = "* * * * *"; + String name = TestUtils.createRandomName() + "sample"; + + Alert a = alertService.updateAlert(new Alert(admin, admin, name, expression, valid_cron)); + + boolean b = a.isEnabled(); + assertFalse( b ); + + a.setEnabled(true); + a = alertService.updateAlert(a); + b = a.isEnabled(); + assertTrue( b ); + + a.setEnabled(false); + a = alertService.updateAlert(a); + b = a.isEnabled(); + assertFalse( b ); + + alertService.deleteAlert(a.getName(), userService.findAdminUser()); + } + + @Test + public void testAlert_setTriggers() + { + String expression = "ABOVE(-4h:scope:metric:avg:4h-avg,#0.5#)"; + String valid_cron = "* * * * *"; + String name = TestUtils.createRandomName() + "sample"; + + Alert a = alertService.updateAlert(new Alert(admin, admin, name, expression, valid_cron)); + Trigger trigger1 = new Trigger(a, Trigger.TriggerType.GREATER_THAN_OR_EQ, "warning", 2D, 100); + Trigger trigger2 = new Trigger(a, Trigger.TriggerType.GREATER_THAN, "critical", 50, 100); + + a.setTriggers(Arrays.asList(new Trigger[] { trigger1, trigger2 })); + a = alertService.updateAlert(a); + + for (Trigger trigger : a.getTriggers()) { + alertService.deleteTrigger(trigger); } + a.setTriggers(null); + a = alertService.updateAlert(a); + + alertService.deleteAlert(a.getName(), userService.findAdminUser()); + } + + @Test + public void testAlert_setInvalidTriggers() + { + Alert a = null; + String expression = "ABOVE(-4h:scope:metric:avg:4h-avg,#0.5#)"; + String valid_cron = "* * * * *"; + String name = TestUtils.createRandomName() + "sample"; + + a = alertService.updateAlert(new Alert(admin, admin, name, expression, valid_cron)); + + // Pass null list + a.setTriggers(null); + List triggers = a.getTriggers(); + assertTrue(triggers.isEmpty()); + + // Pass empty list + a.setTriggers(Arrays.asList(new Trigger[0])); + triggers = a.getTriggers(); + assertTrue(triggers.isEmpty()); + + // Pass list of null triggers. + // TODO - Alert should handle this case by filtering null triggers from the list OR raising an exception + + alertService.deleteAlert(a.getName(), userService.findAdminUser()); } + + +// @Test +// public void testAlert_setNotifications() +// { +// String expression = "ABOVE(-4h:scope:metric:avg:4h-avg,#0.5#)"; +// String valid_cron = "* * * * *"; +// String name = TestUtils.createRandomName() + "sample"; +// +// Alert a = alertService.updateAlert(new Alert(admin, admin, name, expression, valid_cron)); +// +// // Create and add 2 triggers +// Trigger trigger1 = new Trigger(a, Trigger.TriggerType.GREATER_THAN_OR_EQ, "warning", 2D, 100); +// Trigger trigger2 = new Trigger(a, Trigger.TriggerType.GREATER_THAN, "critical", 50, 100); +// a.setTriggers(Arrays.asList(new Trigger[] { trigger1, trigger2 })); +// // NOTE - putting alertService.updatealert() here causes a duplicate key exception +// +// // Create and add 2 notifications, one for each trigger +// Notification not1 = new Notification("notification_1x", a, "not1", null, 5000); +// Notification not2 = new Notification("notification_2x", a, "not2", null, 5000); +// +// a.setNotifications(Arrays.asList(new Notification[] {not1, not2})); +// not1.setTriggers(Arrays.asList(new Trigger[] {trigger1, trigger2})); +// not2.setTriggers(Arrays.asList(new Trigger[] {trigger1, trigger2})); +// +// a = alertService.updateAlert(a); +// +// +// // Clean up the Alert +// for (Trigger trigger : a.getTriggers()) { +// alertService.deleteTrigger(trigger); +// } +// a.setTriggers(null); +// a = alertService.updateAlert(a); +// +// for (Notification notification : a.getNotifications()) { +// notification.setTriggers(null); +// alertService.deleteNotification(notification); +// } +// a.setNotifications(null); +// a = alertService.updateAlert(a); +// +// alertService.deleteAlert(a.getName(), userService.findAdminUser()); +// } + + +// @Test +// public void testAlert_addNotification() +// { +// // set to list of notifications +// // set to empty or null -> result is empty. +// // invalid is not a valid scenario +// +// String expression = "ABOVE(-4h:scope:metric:avg:4h-avg,#0.5#)"; +// String valid_cron = "* * * * *"; +// String name = TestUtils.createRandomName() + "sample"; +// +// Alert a = alertService.updateAlert(new Alert(admin, admin, name, expression, valid_cron)); +// +// // Create and add 2 triggers +// Trigger trigger1 = new Trigger(a, Trigger.TriggerType.GREATER_THAN_OR_EQ, "warning", 2D, 100); +// Trigger trigger2 = new Trigger(a, Trigger.TriggerType.GREATER_THAN, "critical", 50, 100); +// a.setTriggers(Arrays.asList(new Trigger[] { trigger1, trigger2 })); +// +// // Create and add 2 notifications, one for each trigger +// Notification not1 = new Notification("notification_1", a, "not1", null, 5000); +// Notification not2 = new Notification("notification_2", a, "not2", null, 5000); +// a.setNotifications(Arrays.asList(new Notification[] {not1, not2})); +// +// not1.setTriggers(Arrays.asList(new Trigger[] {trigger1})); +// not2.setTriggers(Arrays.asList(new Trigger[] {trigger2})); +// +// a = alertService.updateAlert(a); +// +// +// // Add a trigger and Notification +// +// // Add a trigger +// Trigger trigger3 = new Trigger(a, Trigger.TriggerType.GREATER_THAN_OR_EQ, "notice", 10, 100); +// List t = new ArrayList( a.getTriggers()); +// t.add(trigger3); +// a.setTriggers(t); +// +// // Add a notification +// Notification not3 = new Notification("notification_3", a, "not3", null, 5000); +// a.addNotification(not3); +// not3.setTriggers(Arrays.asList(new Trigger[] { trigger3 })); +// a = alertService.updateAlert(a); +// +// +// // Clean up the Alert +// for (Trigger trigger : a.getTriggers()) { +// alertService.deleteTrigger(trigger); +// } +// a.setTriggers(null); +// a = alertService.updateAlert(a); +// +// for (Notification notification : a.getNotifications()) { +// notification.setTriggers(null); +// alertService.deleteNotification(notification); +// } +// a.setNotifications(null); +// a = alertService.updateAlert(a); +// +// alertService.deleteAlert(a.getName(), userService.findAdminUser()); +// } +// +// +// @Test +// public void testAlert_removeNotification() +// { +// // set to list of notifications +// // set to empty or null -> result is empty. +// // invalid is not a valid scenario +// +// String expression = "ABOVE(-4h:scope:metric:avg:4h-avg,#0.5#)"; +// String valid_cron = "* * * * *"; +// String name = TestUtils.createRandomName() + "sample"; +// +// Alert a = alertService.updateAlert(new Alert(admin, admin, name, expression, valid_cron)); +// +// // Create and add 3 triggers +// Trigger trigger1 = new Trigger(a, Trigger.TriggerType.GREATER_THAN_OR_EQ, "warning", 2D, 100); +// Trigger trigger2 = new Trigger(a, Trigger.TriggerType.GREATER_THAN, "critical", 50, 100); +// Trigger trigger3 = new Trigger(a, Trigger.TriggerType.GREATER_THAN_OR_EQ, "notice", 10, 100); +// a.setTriggers(Arrays.asList(new Trigger[] { trigger1, trigger2 })); +// +// // Create and add 2 notifications, one for each trigger +// Notification not1 = new Notification("notification_1", a, "not1", null, 5000); +// Notification not2 = new Notification("notification_2", a, "not2", null, 5000); +// Notification not3 = new Notification("notification_3", a, "not3", null, 5000); +// a.setNotifications(Arrays.asList(new Notification[] {not1, not2, not3})); +// +// not1.setTriggers(Arrays.asList(new Trigger[] {trigger1})); +// not2.setTriggers(Arrays.asList(new Trigger[] {trigger2})); +// not3.setTriggers(Arrays.asList(new Trigger[] {trigger3})); +// +// a = alertService.updateAlert(a); +// +// +// // Remove a trigger and notification +// +// // Remove trigger3 from the alert +// List t = new ArrayList( a.getTriggers()); +// t.remove(trigger3); +// a.setTriggers(t); +// +// // Remove not3 from the alert and trigger +// List ns = new ArrayList(a.getNotifications()); +// ns.remove(not3); +// not3.setTriggers(null); +// a.setNotifications(ns); +// +// // alertService.deleteNotification(not3); // This seems not to be necessary but I'm not sure hwo the notification gets deleted +// +// a = alertService.updateAlert(a); +// +// +// // Clean up the Alert +// for (Trigger trigger : a.getTriggers()) { +// alertService.deleteTrigger(trigger); +// } +// a.setTriggers(null); +// a = alertService.updateAlert(a); +// +// for (Notification notification : a.getNotifications()) { +// notification.setTriggers(null); +// alertService.deleteNotification(notification); +// } +// a.setNotifications(null); +// a = alertService.updateAlert(a); +// +// alertService.deleteAlert(a.getName(), userService.findAdminUser()); +// } + + // TODO - validate notification content and test validation. + // TODO - validate trigger content and test validation. } diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/util/CronTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/util/CronTest.java index ece0407c8..04a8f99f8 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/util/CronTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/util/CronTest.java @@ -36,6 +36,10 @@ import org.slf4j.LoggerFactory; import java.util.Calendar; import java.util.Random; +import java.util.Date; +import org.quartz.CronScheduleBuilder; +import org.quartz.CronTrigger; +import org.quartz.TriggerBuilder; import static org.junit.Assert.*; @@ -54,11 +58,6 @@ public static void beforeClass() { LoggerFactory.getLogger(CronTest.class).info("Using a random seed of " + seed); } - @Test - public void testIsValidWithSpecialEntry() { - assertTrue(Cron.isValid("@monTHly")); - } - @Test public void testTablesAlways() { CronTabEntry cron = new CronTabEntry("* * * * *"); @@ -161,11 +160,370 @@ public void testTablesMixedList() { } } } - + + private final static + String[] inputCrons = { + "* * * * *", + "* * ? * 2-6", + "* */1 * * *", + "* */12 * * *", + "* */15 * * *", + "* */2 * * *", + "* */3 * * *", + "* */4 * * *", + "* */6 * * *", + "* */8 * * *", + "* 0-12 ? * 2-6", + "* 11-23 ? * 2-6", + "* 12-0 ? * 2-6", + "* 21-11 ? * 2-6", + "* 21-9 ? * 2-6", + "* 4-18 ? * 2-6", + "* 5-17 ? * 2-6", + "* 6-18 ? * 2-6", + "*/1 * * * *", + "*/10 * * * *", + "*/10 * ? * 2-6", + "*/10 0-12 ? * 2-6", + "*/10 11-23 ? * 2-6", + "*/10 12-0 ? * 2-6", + "*/10 21-11 ? * 2-6", + "*/10 21-9 ? * 2-6", + "*/10 4-18 ? * 2-6", + "*/10 5-17 ? * 2-6", + "*/10 6-18 ? * 2-6", + "*/11 * * * *", + "*/12 */5 * * *", + "*/13 * * * *", + "*/15 * * * *", + "*/17 * * * *", + "*/19 * * * *", + "*/2 * * * *", + "*/20 * * * *", + "*/23 * * * *", + "*/25 * * * *", + "*/29 * * * *", + "*/3 * * * *", + "*/3 * ? * 2-6", + "*/3 11-23 ? * 2-6", + "*/3 12-0 ? * 2-6", + "*/3 21-11 ? * 2-6", + "*/3 21-9 ? * 2-6", + "*/3 4-18 ? * 2-6", + "*/3 5-17 ? * 2-6", + "*/3 6-18 ? * 2-6", + "*/30 * * * *", + "*/30 * ? * 1-5", + "*/30 * ? * 2-6", + "*/30 */6 * * *", + "*/30 0-12 ? * 2-6", + "*/30 11-21 ? * 2-6", + "*/30 11-23 ? * 1-5", + "*/30 11-23 ? * 2-6", + "*/30 12-0 ? * 1-5", + "*/30 12-0 ? * 2-6", + "*/30 17-5 ? * 2-6", + "*/30 18-4 ? * 2-6", + "*/30 18-6 ? * 2-6", + "*/30 21-11 ? * 1-5", + "*/30 21-11 ? * 2-6", + "*/30 21-9 ? * 1-5", + "*/30 21-9 ? * 2-6", + "*/30 23-11 ? * 2-6", + "*/30 4-18 ? * 1-5", + "*/30 4-18 ? * 2-6", + "*/30 5-17 ? * 1-5", + "*/30 5-17 ? * 2-6", + "*/30 6-18 ? * 1-5", + "*/30 6-18 ? * 2-6", + "*/30 9-21 ? * 2-6", + "*/31 * * * *", + "*/35 * * * *", + "*/4 * * * *", + "*/40 * * * *", + "*/45 * * * *", + "*/5 * * * *", + "*/5 * ? * 2-6", + "*/5 0-12 ? * 2-6", + "*/5 11-23 ? * 2-6", + "*/5 12-0 ? * 2-6", + "*/5 21-11 ? * 2-6", + "*/5 21-9 ? * 2-6", + "*/5 4-18 ? * 2-6", + "*/5 5-17 ? * 2-6", + "*/5 6-18 ? * 2-6", + "*/53 * * * *", + "*/55 * ? * 2-6", + "*/55 0-12 ? * 2-6", + "*/55 11-23 ? * 2-6", + "*/55 12-0 ? * 2-6", + "*/55 21-11 ? * 2-6", + "*/55 21-9 ? * 2-6", + "*/55 4-18 ? * 2-6", + "*/55 5-17 ? * 2-6", + "*/55 6-18 ? * 2-6", + "*/59 * * * *", + "*/6 * * * *", + "*/7 * * * *", + "*/9 * * * *", + "/10 * * * *", + "0 * * * *", + "0 */1 * * *", + "0 */10 * * *", + "0 */12 * * *", + "0 */15 * * *", + "0 */19 * * *", + "0 */2 * * *", + "0 */3 * * *", + "0 */4 * * *", + "0 */5 * * *", + "0 */6 * * *", + "0 */8 * * *", + "0 0 * * *", + "0 1 * * *", + "0 1,7,13,19 * * *", + "0 10 * * *", + "0 11 * * *", + "0 14 * * *", + "0 16 * * *", + "0 16 */1 * *", + "0 17 * * *", + "0 18 * * *", + "0 2 * * *", + "0 2,14 * * *", + "0 20 * * *", + "0 3,15 * * *", + "0 4,16 * * *", + "0 5 * * *", + "0 6 * * *", + "0 7 * * *", + "0 8 * * *", + "0 8,17 * * *", + "0,10,20,30,40,50 * * * *", + "0,15,30,45 * * * *", + "0/1 * * * *", + "0 0 1 1 *", // Anually + "0 0 1 * *", // Monthly + "0 0 * * 1", // Weekly + "0 0 * * *", // Daily or Midnight + "0 * * * *", // hourly + }; + + String[] invalidInputCrons = { + "0 0 * * 0", // used as Weekly // not valid - fix DB? might be no instances + "0 0 * * 0", // not valid - found in DB + }; + + // TODO - contact teams using these and have them fix up the crons - in Quartz these are Sun-Thu but it looks + // like the intent may be Mon-Fri + // TODO - FIRST you must check that the UI validator maps 1-7 as Sun-Sat as documented. If it can't be restricted, add back the 0->1 mapping for dow,dom + String[] wrongCrons = { + "*/30 * ? * 1-5", + "*/30 11-23 ? * 1-5", + "*/30 12-0 ? * 1-5", + "*/30 21-11 ? * 1-5", + "*/30 21-9 ? * 1-5", + "*/30 4-18 ? * 1-5", + "*/30 5-17 ? * 1-5", + "*/30 6-18 ? * 1-5", + }; + @Test public void testConvertCronEntryToQuartzCronEntry() { - assertEquals(Cron.convertToQuartzCronEntry(" * 5-17 * * * "), "0 * 5-17 * * ?"); - assertEquals(Cron.convertToQuartzCronEntry("* 5-17 * * 1-5"), "0 * 5-17 * * 1-5"); + + assertEquals(Cron.convertToQuartzCronEntry("0 0 * * 0"), "0 0 0 ? * 0"); // TODO - not valid unless we restore 0->1 for dow,dom + assertEquals(Cron.convertToQuartzCronEntry("0 0 0 * 0"), "0 0 0 0 * 0"); // NOTE - not valid for quartz unless we restore 0->1 for dow,dom + assertEquals(Cron.convertToQuartzCronEntry(" * 5-17 * * * "), "0 * 5-17 ? * *"); + assertEquals(Cron.convertToQuartzCronEntry("* 5-17 * * 1-5"), "0 * 5-17 ? * 1-5"); + assertEquals(Cron.convertToQuartzCronEntry("0 0 * *"), "0 0 0 * * ?"); + assertEquals(Cron.convertToQuartzCronEntry("0 */30 * * *"), "0 0 */30 ? * *"); + assertEquals(Cron.convertToQuartzCronEntry("* 5-17 * * 1-5"), "0 * 5-17 ? * 1-5"); + assertEquals(Cron.convertToQuartzCronEntry("* 5-17 ? * ?"), "0 * 5-17 ? * *"); + assertEquals(Cron.convertToQuartzCronEntry("* 5-17 ? * *"), "0 * 5-17 ? * *"); + + // Special conversions + assertEquals(Cron.convertToQuartzCronEntry("0 0 1 1 *"), "0 0 0 1 1 ?"); // ANNUALLY or YEARLY + assertEquals(Cron.convertToQuartzCronEntry("0 0 1 * *"), "0 0 0 1 * ?"); // MONTHLY + assertEquals(Cron.convertToQuartzCronEntry("0 0 * * 1"), "0 0 0 ? * 1"); // WEEKLY + assertEquals(Cron.convertToQuartzCronEntry("0 0 * * *"), "0 0 0 ? * *"); // DAILY or MIDNIGHT + assertEquals(Cron.convertToQuartzCronEntry("0 * * * *"), "0 0 * ? * *"); // HOURLY + + // Convert Argus alert cron expressions known in Dec 2018. + for (int i = 0 ; i < inputCrons.length; i++) + { + String inputCron = inputCrons[i]; + String quartzCron = ""; + boolean converted = false; + boolean isValid = false; + try + { + quartzCron = Cron.convertToQuartzCronEntry(inputCrons[i]); + converted = true; + isValid = Cron.isCronEntryValid(inputCron); +// if (!isValid) +// System.out.println(String.format("%d: '%s' -> '%s' is %sa valid quartz cron.", i, inputCron, quartzCron, isValid ? "": "NOT ")); + } + catch (Exception e) + { + System.out.println(String.format("%d: '%s' -> '%s' is invalid or can't be converted to a quartz cron: %s", i, inputCron, quartzCron, e.getMessage())); + } + assertTrue(converted); + assertTrue(isValid); + } + } + + @Test + public void testValidCronEntry() { + assertFalse(Cron.isCronEntryValid("0 0 * * 0")); // NOT Valid Quartz TODO - if UI can't validate we need to re-enable conversion of dow, dom 0->1 + assertFalse(Cron.isCronEntryValid("0 0 0 * 0")); // NOT Valid Quartz + assertFalse(Cron.isCronEntryValid("0 0 1 * 0")); // NOT Valid Quartz + assertFalse(Cron.isCronEntryValid("0 0 1 * 1")); // NOT Valid Quartz + assertTrue(Cron.isCronEntryValid(" * 5-17 * * * ")); + + assertTrue(Cron.isCronEntryValid("* 5-17 * * 1-5")); + assertTrue(Cron.isCronEntryValid("* 5-17 ? * 1-5")); + assertTrue(Cron.isCronEntryValid("* 5-17 ? * ?")); + assertTrue(Cron.isCronEntryValid("* 5-17 * * ?")); + assertTrue(Cron.isCronEntryValid("* 5-17 ? * *")); + + assertTrue(Cron.isCronEntryValid("0 0 * *")); + + // Special conversions + assertTrue(Cron.isCronEntryValid("0 0 1 1 *")); // ANNUALLY or YEARLY + assertTrue(Cron.isCronEntryValid("0 0 1 * *")); // MONTHLY + assertTrue(Cron.isCronEntryValid("0 0 * * 1")); // WEEKLY -> should become 0 0 0 ? * 1 + assertTrue(Cron.isCronEntryValid("0 0 * * *")); // DAILY or MIDNIGHT + assertTrue(Cron.isCronEntryValid("0 * * * *")); // HOURLY + + for (int i = 1; i < 24; i++) + { + String cron=String.format("0 */%d * * *", i); + assertTrue(Cron.isCronEntryValid(cron)); + } + + for (int i = 1; i < 60; i++) + { + String cron=String.format("*/%d 0 * * *", i); + assertTrue(Cron.isCronEntryValid(cron)); + } + + } + + /* + Old vs new cron translation test. + Iterate through all known expressions. + Iterate through some time period in 1 minute steps. + Compare old vs new implementation. + + */ + + // This is the original conversion function which we are using for comparison with the new one. + private static String convertToOldQuartzCronEntry(String cronEntry) { + // adding seconds field + cronEntry = "0 " + cronEntry.trim(); + + // if day of the week is not specified, substitute it with ?, so as to prevent conflict with month field + if(cronEntry.charAt(cronEntry.length() - 1) == '*') { + return cronEntry.substring(0, cronEntry.length() - 1) + "?"; + }else { + return cronEntry; + } + } + + private CronTrigger makeCronTrigger(String quartzExpr) + { + CronTrigger trigger = null; + try + { + trigger = TriggerBuilder.newTrigger(). + withSchedule(CronScheduleBuilder.cronSchedule(quartzExpr)). + build(); + } + catch (Exception e) {} + return trigger; + } + + + private boolean compareTranslations(String cronExpr, int steps) + { + boolean success = true; + String oldQuartz = null; + String newQuartz = null; + CronTrigger oldTrigger = null; + CronTrigger newTrigger = null; + Date oldPrev = null; + Date newPrev = null; + Date oldWhen = null; + Date newWhen = null; + int i = 0; + + try + { + oldQuartz = convertToOldQuartzCronEntry(cronExpr); + newQuartz = Cron.convertToQuartzCronEntry(cronExpr); + + oldTrigger = makeCronTrigger(oldQuartz); + newTrigger = makeCronTrigger(newQuartz); + + if (oldTrigger == null) + { + System.out.println(String.format("Old Translation of Cron expr '%s' was NOT VALID. Skipping comparison...", cronExpr)); + } + else + { + assertNotNull(newTrigger); + + oldWhen = new Date(); + newWhen = (Date) oldWhen.clone(); + + for (i = 0; i < steps; i++) + { + oldPrev = (Date) oldWhen.clone(); + newPrev = (Date) newWhen.clone(); + + oldWhen = oldTrigger.getFireTimeAfter(oldWhen); + newWhen = newTrigger.getFireTimeAfter(newWhen); + + if (oldWhen == null || newWhen == null) + { + System.out.println("Reached maximum time, end of comparison."); + assertTrue( oldWhen == newWhen ); + break; + } + + boolean isSame = oldWhen.equals(newWhen); + + if (!isSame) + { +// System.out.println(String.format("Testing Cron '%s' -> old = '%s', new = '%s' --> Comparison failed at step %d", cronExpr, oldQuartz, newQuartz, i )); +// System.out.println(String.format("%s %s %s", i, +// oldWhen.toString(), isSame ? "==" : "!=", newWhen.toString())); + success = false; + break; + } + } + } + } + catch (RuntimeException e) + { + e.printStackTrace(); + success = false; + } + + return success; + } + + @Test + public void verifyCronTranslation() throws Exception + { + boolean allPassed = true; + + for (int i = 0; i < inputCrons.length; i++) { + boolean passed = compareTranslations(inputCrons[i], 10000); + allPassed = passed && allPassed; + } + + assertTrue(allPassed); } } -/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */ +/* Copyright (c) 2016-2019, Salesforce.com, Inc. All rights reserved. */ diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/util/DiscoveryUtilsTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/util/DiscoveryUtilsTest.java new file mode 100644 index 000000000..7c901bd78 --- /dev/null +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/util/DiscoveryUtilsTest.java @@ -0,0 +1,44 @@ +package com.salesforce.dva.argus.util; + +import org.junit.Test; +import static org.junit.Assert.*; +import com.salesforce.dva.argus.service.DiscoveryService; +import com.salesforce.dva.argus.service.tsdb.MetricQuery; + +public class DiscoveryUtilsTest { + + private static final long MAX_DATAPOINTS_PER_RESPONSE = 5000000; + + @Test + public void testGetMaxTimeSeriesAllowed() { + MetricQuery query = new MetricQuery("test","test",null,0L,0L); + long relTime = System.currentTimeMillis()-10*60000; + query.setStartTimestamp(relTime+0L); + query.setEndTimestamp(relTime+60000l); + query.setDownsamplingPeriod(60000l); + assertEquals((long)DiscoveryService.maxTimeseriesAllowed(query, MAX_DATAPOINTS_PER_RESPONSE), MAX_DATAPOINTS_PER_RESPONSE); + + query.setDownsamplingPeriod(30000l); + assertEquals((long)DiscoveryService.maxTimeseriesAllowed(query, MAX_DATAPOINTS_PER_RESPONSE), MAX_DATAPOINTS_PER_RESPONSE/2); + + query.setDownsamplingPeriod(0l); + assertEquals((long)DiscoveryService.maxTimeseriesAllowed(query, MAX_DATAPOINTS_PER_RESPONSE), MAX_DATAPOINTS_PER_RESPONSE); + + query.setDownsamplingPeriod(120000l); + assertEquals((long)DiscoveryService.maxTimeseriesAllowed(query, MAX_DATAPOINTS_PER_RESPONSE), MAX_DATAPOINTS_PER_RESPONSE); + + query.setStartTimestamp(relTime+0L); + query.setEndTimestamp(relTime+480000l); + query.setDownsamplingPeriod(60000l); + assertEquals((long)DiscoveryService.maxTimeseriesAllowed(query, MAX_DATAPOINTS_PER_RESPONSE), MAX_DATAPOINTS_PER_RESPONSE/8); + + query.setDownsamplingPeriod(2*60000l); + assertEquals((long)DiscoveryService.maxTimeseriesAllowed(query, MAX_DATAPOINTS_PER_RESPONSE), MAX_DATAPOINTS_PER_RESPONSE/4); + + long relTimeInSeconds = relTime/1000; + query.setStartTimestamp(relTimeInSeconds+0L); + query.setEndTimestamp(relTimeInSeconds+60l); + query.setDownsamplingPeriod(60000l); + assertEquals((long)DiscoveryService.maxTimeseriesAllowed(query, MAX_DATAPOINTS_PER_RESPONSE), MAX_DATAPOINTS_PER_RESPONSE); + } +} diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/util/QueryUtilsTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/util/QueryUtilsTest.java index fa5d70b89..194496a1b 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/util/QueryUtilsTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/util/QueryUtilsTest.java @@ -2,96 +2,296 @@ import static org.junit.Assert.*; +import java.util.List; + import org.junit.Test; -import com.salesforce.dva.argus.AbstractTest; import com.salesforce.dva.argus.service.metric.transform.TransformFactory.Function; +import com.salesforce.dva.argus.service.tsdb.AnnotationQuery; + +public class QueryUtilsTest { + + @Test + public void testGetQueryContextWithSingleExpression() { + long relativeTo = System.currentTimeMillis(); + QueryContext context = QueryUtils.getQueryContext("-1h:argus.core:alerts.scheduled:zimsum:1m-sum", relativeTo); + assertNull(context.getTransform()); + assertNull(context.getConstants()); + assertEquals(context.getChildContexts().size(),0); + assertNotNull(context.getExpression()); + TSDBQueryExpression expression = context.getExpression(); + assertEquals(expression.getScope(), "argus.core"); + assertEquals(expression.getMetric(), "alerts.scheduled"); + assertEquals(expression.getAggregator().toString(), "ZIMSUM"); + assertEquals(expression.getDownsampler().toString(), "SUM"); + assertEquals(expression.getDownsamplingPeriod(), new Long(60000)); + assertEquals(expression.getStartTimestamp(),new Long(((relativeTo - 3600*1000)/1000)*1000)); + assertEquals(expression.getEndTimestamp(),new Long(relativeTo)); + } + + @Test + public void testGetQueryWithSingleTransform() { + long relativeTo = System.currentTimeMillis(); + QueryContext context = QueryUtils.getQueryContext("DOWNSAMPLE(-1h:argus.core:alerts.scheduled:zimsum:1m-sum, #1h-sum#)", relativeTo); + assertEquals(context.getTransform(), Function.DOWNSAMPLE); + assertEquals(context.getConstants().size(), 1); + assertEquals(context.getConstants().get(0), "1h-sum"); + assertEquals(context.getChildContexts().size(),1); + assertNull(context.getExpression()); + assertNull(context.getChildContexts().get(0).getTransform()); + assertNotNull(context.getChildContexts().get(0).getExpression()); + TSDBQueryExpression expression = context.getChildContexts().get(0).getExpression(); + assertEquals(expression.getScope(), "argus.core"); + assertEquals(expression.getMetric(), "alerts.scheduled"); + assertEquals(expression.getAggregator().toString(), "ZIMSUM"); + assertEquals(expression.getDownsampler().toString(), "SUM"); + assertEquals(expression.getDownsamplingPeriod(), new Long(60000)); + assertEquals(expression.getStartTimestamp(),new Long(((relativeTo - 3600*1000)/1000)*1000)); + assertEquals(expression.getEndTimestamp(),new Long(relativeTo)); + } + + @Test + public void testGetQueryWithNestedTransform() { + long relativeTo = System.currentTimeMillis(); + QueryContext context = QueryUtils.getQueryContext("UNION(DOWNSAMPLE(-1h:argus.core:alerts.scheduled:zimsum:1m-sum, #1h-sum#), SUM(-1h:argus.core:alerts.scheduled:zimsum:1m-sum, -1h:argus.core:alerts.evaluated:zimsum:1m-sum))", relativeTo); + assertEquals(context.getTransform(), Function.UNION); + assertEquals(context.getConstants().size(), 0); + assertEquals(context.getChildContexts().size(),2); + assertNull(context.getExpression()); + + QueryContext context1 = context.getChildContexts().get(0); + assertEquals(context1.getTransform(), Function.DOWNSAMPLE); + assertEquals(context1.getConstants().size(), 1); + assertEquals(context1.getConstants().get(0), "1h-sum"); + assertEquals(context1.getChildContexts().size(),1); + assertNull(context1.getExpression()); + assertNotNull(context1.getChildContexts().get(0).getExpression()); + assertNull(context.getExpression()); + TSDBQueryExpression expression = context1.getChildContexts().get(0).getExpression(); + assertEquals(expression.getScope(), "argus.core"); + assertEquals(expression.getMetric(), "alerts.scheduled"); + assertEquals(expression.getAggregator().toString(), "ZIMSUM"); + assertEquals(expression.getDownsampler().toString(), "SUM"); + assertEquals(expression.getDownsamplingPeriod(), new Long(60000)); + assertEquals(expression.getStartTimestamp(),new Long(((relativeTo - 3600*1000)/1000)*1000)); + assertEquals(expression.getEndTimestamp(),new Long(relativeTo)); + + QueryContext context2 = context.getChildContexts().get(1); + assertEquals(context2.getTransform(), Function.SUM); + assertEquals(context2.getConstants().size(), 0); + assertEquals(context2.getChildContexts().size(),2); + assertNull(context2.getExpression()); + + TSDBQueryExpression expression1 = context2.getChildContexts().get(0).getExpression(); + assertEquals(expression1.getScope(), "argus.core"); + assertEquals(expression1.getMetric(), "alerts.scheduled"); + assertEquals(expression1.getAggregator().toString(), "ZIMSUM"); + assertEquals(expression1.getDownsampler().toString(), "SUM"); + assertEquals(expression1.getDownsamplingPeriod(), new Long(60000)); + assertEquals(expression1.getStartTimestamp(),new Long(((relativeTo - 3600*1000)/1000)*1000)); + assertEquals(expression1.getEndTimestamp(),new Long(relativeTo)); + + TSDBQueryExpression expression2 = context2.getChildContexts().get(1).getExpression(); + assertEquals(expression2.getScope(), "argus.core"); + assertEquals(expression2.getMetric(), "alerts.evaluated"); + assertEquals(expression2.getAggregator().toString(), "ZIMSUM"); + assertEquals(expression2.getDownsampler().toString(), "SUM"); + assertEquals(expression2.getDownsamplingPeriod(), new Long(60000)); + assertEquals(expression2.getStartTimestamp(),new Long(((relativeTo - 3600*1000)/1000)*1000)); + assertEquals(expression2.getEndTimestamp(),new Long(relativeTo)); + } + + @Test + public void testGetQueryWithNestedTransformWithDivide1() { + long relativeTo = System.currentTimeMillis(); + QueryContext context = QueryUtils.getQueryContext("DIVIDE(-1h:argus.core:alerts.scheduled:zimsum:1m-sum, SUM(-1h:argus.core:alerts.scheduled:zimsum:1m-sum, -1h:argus.core:alerts.evaluated:zimsum:1m-sum))", relativeTo); + assertEquals(context.getTransform(), Function.DIVIDE); + assertEquals(context.getConstants().size(), 0); + assertEquals(context.getChildContexts().size(),2); + assertNull(context.getExpression()); + + QueryContext context1 = context.getChildContexts().get(0); + assertEquals(context1.getTransform(), null); + assertNull(context1.getConstants()); + assertEquals(context1.getChildContexts().size(),0); + assertNotNull(context1.getExpression()); + TSDBQueryExpression expression = context1.getExpression(); + assertEquals(expression.getScope(), "argus.core"); + assertEquals(expression.getMetric(), "alerts.scheduled"); + assertEquals(expression.getAggregator().toString(), "ZIMSUM"); + assertEquals(expression.getDownsampler().toString(), "SUM"); + assertEquals(expression.getDownsamplingPeriod(), new Long(60000)); + assertEquals(expression.getStartTimestamp(),new Long(((relativeTo - 3600*1000)/1000)*1000)); + assertEquals(expression.getEndTimestamp(),new Long(relativeTo)); + + QueryContext context2 = context.getChildContexts().get(1); + assertEquals(context2.getTransform(), Function.SUM); + assertEquals(context2.getConstants().size(), 0); + assertEquals(context2.getChildContexts().size(),2); + assertNull(context2.getExpression()); + + TSDBQueryExpression expression1 = context2.getChildContexts().get(0).getExpression(); + assertEquals(expression1.getScope(), "argus.core"); + assertEquals(expression1.getMetric(), "alerts.scheduled"); + assertEquals(expression1.getAggregator().toString(), "ZIMSUM"); + assertEquals(expression1.getDownsampler().toString(), "SUM"); + assertEquals(expression1.getDownsamplingPeriod(), new Long(60000)); + assertEquals(expression1.getStartTimestamp(),new Long(((relativeTo - 3600*1000)/1000)*1000)); + assertEquals(expression1.getEndTimestamp(),new Long(relativeTo)); + + TSDBQueryExpression expression2 = context2.getChildContexts().get(1).getExpression(); + assertEquals(expression2.getScope(), "argus.core"); + assertEquals(expression2.getMetric(), "alerts.evaluated"); + assertEquals(expression2.getAggregator().toString(), "ZIMSUM"); + assertEquals(expression2.getDownsampler().toString(), "SUM"); + assertEquals(expression2.getDownsamplingPeriod(), new Long(60000)); + assertEquals(expression2.getStartTimestamp(),new Long(((relativeTo - 3600*1000)/1000)*1000)); + assertEquals(expression2.getEndTimestamp(),new Long(relativeTo)); + } + + @Test + public void testGetQueryWithNestedTransformWithDivide2() { + long relativeTo = System.currentTimeMillis(); + QueryContext context = QueryUtils.getQueryContext("DIVIDE(SUM(-1h:argus.core:alerts.scheduled:zimsum:1m-sum, -1h:argus.core:alerts.evaluated:zimsum:1m-sum), -1h:argus.core:alerts.scheduled:zimsum:1m-sum)", relativeTo); + assertEquals(context.getTransform(), Function.DIVIDE); + assertEquals(context.getConstants().size(), 0); + assertEquals(context.getChildContexts().size(),2); + assertNull(context.getExpression()); + + QueryContext context1 = context.getChildContexts().get(1); + assertEquals(context1.getTransform(), null); + assertNull(context1.getConstants()); + assertEquals(context1.getChildContexts().size(),0); + assertNotNull(context1.getExpression()); + TSDBQueryExpression expression = context1.getExpression(); + assertEquals(expression.getScope(), "argus.core"); + assertEquals(expression.getMetric(), "alerts.scheduled"); + assertEquals(expression.getAggregator().toString(), "ZIMSUM"); + assertEquals(expression.getDownsampler().toString(), "SUM"); + assertEquals(expression.getDownsamplingPeriod(), new Long(60000)); + assertEquals(expression.getStartTimestamp(),new Long(((relativeTo - 3600*1000)/1000)*1000)); + assertEquals(expression.getEndTimestamp(),new Long(relativeTo)); + + QueryContext context2 = context.getChildContexts().get(0); + assertEquals(context2.getTransform(), Function.SUM); + assertEquals(context2.getConstants().size(), 0); + assertEquals(context2.getChildContexts().size(),2); + assertNull(context2.getExpression()); + + TSDBQueryExpression expression1 = context2.getChildContexts().get(0).getExpression(); + assertEquals(expression1.getScope(), "argus.core"); + assertEquals(expression1.getMetric(), "alerts.scheduled"); + assertEquals(expression1.getAggregator().toString(), "ZIMSUM"); + assertEquals(expression1.getDownsampler().toString(), "SUM"); + assertEquals(expression1.getDownsamplingPeriod(), new Long(60000)); + assertEquals(expression1.getStartTimestamp(),new Long(((relativeTo - 3600*1000)/1000)*1000)); + assertEquals(expression1.getEndTimestamp(),new Long(relativeTo)); + + TSDBQueryExpression expression2 = context2.getChildContexts().get(1).getExpression(); + assertEquals(expression2.getScope(), "argus.core"); + assertEquals(expression2.getMetric(), "alerts.evaluated"); + assertEquals(expression2.getAggregator().toString(), "ZIMSUM"); + assertEquals(expression2.getDownsampler().toString(), "SUM"); + assertEquals(expression2.getDownsamplingPeriod(), new Long(60000)); + assertEquals(expression2.getStartTimestamp(),new Long(((relativeTo - 3600*1000)/1000)*1000)); + assertEquals(expression2.getEndTimestamp(),new Long(relativeTo)); + } + + + @Test + public void testEmptyFillTransform() { + long relativeTo = System.currentTimeMillis(); + QueryContext context = QueryUtils.getQueryContext("FILL(#-1d#,#-0d#,#4h#,#0m#,#100#)", relativeTo); + assertEquals(context.getTransform(), Function.FILL); + assertEquals(context.getConstants().size(), 5); + assertEquals(context.getConstants().get(0), "-1d"); + assertEquals(context.getConstants().get(1), "-0d"); + assertEquals(context.getConstants().get(2), "4h"); + assertEquals(context.getConstants().get(3), "0m"); + assertEquals(context.getConstants().get(4), "100"); + assertEquals(context.getChildContexts().size(),0); + assertNull(context.getExpression()); + } + + @Test + public void testThreeLevelsNestedTransform() { + long relativeTo = System.currentTimeMillis(); + QueryContext context = QueryUtils.getQueryContext("SUM(DOWNSAMPLE(UNION(-1h:argus.core:alerts.scheduled:zimsum:1m-sum,-1h:argus.core:alerts.evaluated:zimsum:1m-sum),#1m-avg#),#union#)", relativeTo); + assertEquals(context.getTransform(), Function.SUM); + assertEquals(context.getConstants().size(), 1); + assertEquals(context.getConstants().get(0), "union"); + assertEquals(context.getChildContexts().size(),1); + assertNull(context.getExpression()); + + QueryContext context1 = context.getChildContexts().get(0); + assertEquals(context1.getTransform(), Function.DOWNSAMPLE); + assertEquals(context1.getConstants().size(), 1); + assertEquals(context1.getConstants().get(0), "1m-avg"); + assertEquals(context1.getChildContexts().size(),1); + assertNull(context1.getExpression()); + + QueryContext context2 = context1.getChildContexts().get(0); + assertEquals(context2.getTransform(), Function.UNION); + assertEquals(context2.getConstants().size(), 0); + assertEquals(context2.getChildContexts().size(),2); + assertNull(context2.getExpression()); + + TSDBQueryExpression expression = context2.getChildContexts().get(0).getExpression(); + assertEquals(expression.getScope(), "argus.core"); + assertEquals(expression.getMetric(), "alerts.scheduled"); + assertEquals(expression.getAggregator().toString(), "ZIMSUM"); + assertEquals(expression.getDownsampler().toString(), "SUM"); + assertEquals(expression.getDownsamplingPeriod(), new Long(60000)); + assertEquals(expression.getStartTimestamp(),new Long(((relativeTo - 3600*1000)/1000)*1000)); + assertEquals(expression.getEndTimestamp(),new Long(relativeTo)); + + + TSDBQueryExpression expression1 = context2.getChildContexts().get(1).getExpression(); + assertEquals(expression1.getScope(), "argus.core"); + assertEquals(expression1.getMetric(), "alerts.evaluated"); + assertEquals(expression1.getAggregator().toString(), "ZIMSUM"); + assertEquals(expression1.getDownsampler().toString(), "SUM"); + assertEquals(expression1.getDownsamplingPeriod(), new Long(60000)); + assertEquals(expression1.getStartTimestamp(),new Long(((relativeTo - 3600*1000)/1000)*1000)); + assertEquals(expression1.getEndTimestamp(),new Long(relativeTo)); + } + + @Test + public void testGetScopesFromExpression() { + List scopes = QueryUtils.getScopesFromExpression("SUM(DOWNSAMPLE(UNION(-1h:argus.core:alerts.scheduled:zimsum:1m-sum,-1h:argus.core:alerts.evaluated:zimsum:1m-sum),#1m-avg#),#union#)"); + assertEquals(scopes.size(),1); + assertEquals(scopes.get(0),"argus.core"); + + scopes = QueryUtils.getScopesFromExpression("FILL(#-1d#,#-0d#,#4h#,#0m#,#100#)"); + assertEquals(scopes.size(),0); + + scopes = QueryUtils.getScopesFromExpression("DIVIDE(SUM(-1h:argus.core1:alerts.scheduled:zimsum:1m-sum, -1h:argus.core2:alerts.evaluated:zimsum:1m-sum), -1h:argus.core3:alerts.scheduled:zimsum:1m-sum)"); + assertEquals(scopes.size(),3); + assertTrue(scopes.contains(new String("argus.core1"))); + assertTrue(scopes.contains(new String("argus.core2"))); + assertTrue(scopes.contains(new String("argus.core3"))); + scopes = QueryUtils.getScopesFromExpression("DIVIDE(SUM(-1h:system.*.NONE:alerts.scheduled:zimsum:1m-sum, -1h:argus.core2:alerts.evaluated:zimsum:1m-sum), -1h:argus.core3:alerts.scheduled:zimsum:1m-sum)"); + assertEquals(scopes.size(),3); + } + + @Test + public void testNotEqualsQuery() { + long relativeTo = System.currentTimeMillis(); + QueryContext context = QueryUtils.getQueryContext("-1h:argus.core:alerts.evaluated{host!=myhostname7.abc.com}:zimsum:1m-sum", relativeTo); + assertNull(context.getTransform()); + assertNull(context.getConstants()); + assertEquals(context.getChildContexts().size(),0); + assertNotNull(context.getExpression()); + TSDBQueryExpression expression = context.getExpression(); + assertEquals(expression.getScope(), "argus.core"); + assertEquals(expression.getMetric(), "alerts.evaluated"); + assertEquals(expression.getAggregator().toString(), "ZIMSUM"); + assertEquals(expression.getDownsampler().toString(), "SUM"); + assertEquals(expression.getTags().keySet().size(),1); + assertEquals(expression.getTags().get("host"),AnnotationQuery.TAG_NOT_EQUALS_INTERNAL_PREFIX+"myhostname7.abc.com"); + assertEquals(expression.getDownsamplingPeriod(), new Long(60000)); + assertEquals(expression.getStartTimestamp(),new Long(((relativeTo - 3600*1000)/1000)*1000)); + assertEquals(expression.getEndTimestamp(),new Long(relativeTo)); + } -public class QueryUtilsTest extends AbstractTest { - - @Test - public void testGetQueryContextWithSingleExpression() { - long relativeTo = System.currentTimeMillis(); - QueryContext context = QueryUtils.getQueryContext("-1h:argus.core:alerts.scheduled:zimsum:1m-sum", relativeTo); - assertNull(context.getTransform()); - assertNull(context.getConstants()); - assertEquals(context.getChildContexts().size(),0); - assertEquals(context.getChildExpressions().size(),1); - TSDBQueryExpression expression = context.getChildExpressions().get(0); - assertEquals(expression.getScope(), "argus.core"); - assertEquals(expression.getMetric(), "alerts.scheduled"); - assertEquals(expression.getAggregator().toString(), "ZIMSUM"); - assertEquals(expression.getDownsampler().toString(), "SUM"); - assertEquals(expression.getDownsamplingPeriod(), new Long(60000)); - assertEquals(expression.getStartTimestamp(),new Long(((relativeTo - 3600*1000)/1000)*1000)); - assertEquals(expression.getEndTimestamp(),new Long(relativeTo)); - } - - @Test - public void testGetQueryWithSingleTransform() { - long relativeTo = System.currentTimeMillis(); - QueryContext context = QueryUtils.getQueryContext("DOWNSAMPLE(-1h:argus.core:alerts.scheduled:zimsum:1m-sum, #1h-sum#)", relativeTo); - assertEquals(context.getTransform(), Function.DOWNSAMPLE); - assertEquals(context.getConstants().size(), 1); - assertEquals(context.getConstants().get(0), "1h-sum"); - assertEquals(context.getChildContexts().size(),0); - assertEquals(context.getChildExpressions().size(),1); - TSDBQueryExpression expression = context.getChildExpressions().get(0); - assertEquals(expression.getScope(), "argus.core"); - assertEquals(expression.getMetric(), "alerts.scheduled"); - assertEquals(expression.getAggregator().toString(), "ZIMSUM"); - assertEquals(expression.getDownsampler().toString(), "SUM"); - assertEquals(expression.getDownsamplingPeriod(), new Long(60000)); - assertEquals(expression.getStartTimestamp(),new Long(((relativeTo - 3600*1000)/1000)*1000)); - assertEquals(expression.getEndTimestamp(),new Long(relativeTo)); - } - - @Test - public void testGetQueryWithNestedTransform() { - long relativeTo = System.currentTimeMillis(); - QueryContext context = QueryUtils.getQueryContext("UNION(DOWNSAMPLE(-1h:argus.core:alerts.scheduled:zimsum:1m-sum, #1h-sum#), SUM(-1h:argus.core:alerts.scheduled:zimsum:1m-sum, -1h:argus.core:alerts.evaluated:zimsum:1m-sum))", relativeTo); - assertEquals(context.getTransform(), Function.UNION); - assertEquals(context.getConstants().size(), 0); - assertEquals(context.getChildContexts().size(),2); - assertEquals(context.getChildExpressions().size(),0); - - QueryContext context1 = context.getChildContexts().get(0); - assertEquals(context1.getTransform(), Function.DOWNSAMPLE); - assertEquals(context1.getConstants().size(), 1); - assertEquals(context1.getConstants().get(0), "1h-sum"); - assertEquals(context1.getChildContexts().size(),0); - assertEquals(context1.getChildExpressions().size(),1); - TSDBQueryExpression expression = context1.getChildExpressions().get(0); - assertEquals(expression.getScope(), "argus.core"); - assertEquals(expression.getMetric(), "alerts.scheduled"); - assertEquals(expression.getAggregator().toString(), "ZIMSUM"); - assertEquals(expression.getDownsampler().toString(), "SUM"); - assertEquals(expression.getDownsamplingPeriod(), new Long(60000)); - assertEquals(expression.getStartTimestamp(),new Long(((relativeTo - 3600*1000)/1000)*1000)); - assertEquals(expression.getEndTimestamp(),new Long(relativeTo)); - - QueryContext context2 = context.getChildContexts().get(1); - assertEquals(context2.getTransform(), Function.SUM); - assertEquals(context2.getConstants().size(), 0); - assertEquals(context2.getChildContexts().size(),0); - assertEquals(context2.getChildExpressions().size(),2); - - TSDBQueryExpression expression1 = context2.getChildExpressions().get(0); - assertEquals(expression1.getScope(), "argus.core"); - assertEquals(expression1.getMetric(), "alerts.scheduled"); - assertEquals(expression1.getAggregator().toString(), "ZIMSUM"); - assertEquals(expression1.getDownsampler().toString(), "SUM"); - assertEquals(expression1.getDownsamplingPeriod(), new Long(60000)); - assertEquals(expression1.getStartTimestamp(),new Long(((relativeTo - 3600*1000)/1000)*1000)); - assertEquals(expression1.getEndTimestamp(),new Long(relativeTo)); - - TSDBQueryExpression expression2 = context2.getChildExpressions().get(1); - assertEquals(expression2.getScope(), "argus.core"); - assertEquals(expression2.getMetric(), "alerts.evaluated"); - assertEquals(expression2.getAggregator().toString(), "ZIMSUM"); - assertEquals(expression2.getDownsampler().toString(), "SUM"); - assertEquals(expression2.getDownsamplingPeriod(), new Long(60000)); - assertEquals(expression2.getStartTimestamp(),new Long(((relativeTo - 3600*1000)/1000)*1000)); - assertEquals(expression2.getEndTimestamp(),new Long(relativeTo)); - } } diff --git a/ArgusCore/src/test/java/com/salesforce/dva/argus/util/TemplateReplacerTest.java b/ArgusCore/src/test/java/com/salesforce/dva/argus/util/TemplateReplacerTest.java index c7f57d290..8ff85adf5 100644 --- a/ArgusCore/src/test/java/com/salesforce/dva/argus/util/TemplateReplacerTest.java +++ b/ArgusCore/src/test/java/com/salesforce/dva/argus/util/TemplateReplacerTest.java @@ -1,6 +1,5 @@ package com.salesforce.dva.argus.util; -import com.salesforce.dva.argus.AbstractTest; import com.salesforce.dva.argus.entity.Alert; import com.salesforce.dva.argus.entity.History; import com.salesforce.dva.argus.entity.Metric; @@ -9,6 +8,7 @@ import com.salesforce.dva.argus.service.AlertService; import com.salesforce.dva.argus.service.UserService; import com.salesforce.dva.argus.service.alert.DefaultAlertService; +import org.junit.ComparisonFailure; import org.junit.Test; import java.math.BigInteger; @@ -19,11 +19,33 @@ import static org.junit.Assert.*; -public class TemplateReplacerTest extends AbstractTest { +import org.junit.BeforeClass; +import org.junit.AfterClass; +import com.salesforce.dva.argus.system.SystemMain; +import com.salesforce.dva.argus.TestUtils; + + +public class TemplateReplacerTest { private static final String expression = "DIVIDE(-1h:argus.jvm:file.descriptor.open{host=unknown-host}:avg, -1h:argus.jvm:file.descriptor.max{host=unknown-host}:avg)"; + static private SystemMain system; + + @BeforeClass + static public void setUpClass() { + system = TestUtils.getInstance(); + system.start(); + } + + @AfterClass + static public void tearDownClass() { + if (system != null) { + system.getServiceFactory().getManagementService().cleanupRecords(); + system.stop(); + } + } + @Test public void testTemplateNaming() { UserService userService = system.getServiceFactory().getUserService(); @@ -114,7 +136,12 @@ public void testObjectTemplateReplacement() { @Test public void testConditionalOutput() { UserService userService = system.getServiceFactory().getUserService(); - Alert alert = new Alert(userService.findAdminUser(), userService.findAdminUser(), "alert_name", expression, "* * * * *"); + String alertName = "alert_name-" + TestUtils.createRandomName(); + Alert alert = new Alert(userService.findAdminUser(), + userService.findAdminUser(), + alertName, + expression, + "* * * * *"); Notification notification = new Notification("notification_name", alert, "notifier_name", new ArrayList(), 23); Trigger trigger = new Trigger(alert, Trigger.TriggerType.GREATER_THAN_OR_EQ, "trigger_name", 2D, 7.1D,5); @@ -175,6 +202,12 @@ public void testCornerCases() { String expectedOutput = customTemplate; assertEquals(expectedOutput, TemplateReplacer.applyTemplateChanges(context, customTemplate)); + + customTemplate = "Failed template evaluation: ${triggerTimeStamp}"; + assertEquals(customTemplate, TemplateReplacer.applyTemplateChanges(context, customTemplate)); + + customTemplate = "Success template evaluation: ${triggerTimestamp?datetime?iso('GMT')}"; + assertEquals("Success template evaluation: 2014-12-11T17:40:00Z", TemplateReplacer.applyTemplateChanges(context, customTemplate)); } -} \ No newline at end of file +} diff --git a/ArgusCore/src/test/resources/com/salesforce/dva/argus/service/metric/MetricReaderTest.testFunctionsWithConstant.properties b/ArgusCore/src/test/resources/com/salesforce/dva/argus/service/metric/MetricReaderTest.testFunctionsWithConstant.properties index e8e1f966b..9793b0843 100644 --- a/ArgusCore/src/test/resources/com/salesforce/dva/argus/service/metric/MetricReaderTest.testFunctionsWithConstant.properties +++ b/ArgusCore/src/test/resources/com/salesforce/dva/argus/service/metric/MetricReaderTest.testFunctionsWithConstant.properties @@ -1,6 +1,6 @@ func=FILL(SUM(DIFF(SUM(CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-ASG-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-CHI-SP1-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-CHI-SP2-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-CHI-SP3-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-CHI-SP4-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-DFW-SP1-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-DFW-SP2-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-FRF-SP1-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-LON-SP9-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-PAR-SP1-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-PHX-SP1-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-PHX-SP2-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-TYO-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-WAS-SP1-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-WAS-SP2-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-WAS-SP3-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-WAS-SP4-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#)),SUM(CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-ASG-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-CHI-SP1-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-CHI-SP2-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-CHI-SP3-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-CHI-SP4-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-DFW-SP1-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-DFW-SP2-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-FRF-SP1-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-LON-SP9-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-PAR-SP1-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-PHX-SP1-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-PHX-SP2-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-TYO-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-WAS-SP1-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-WAS-SP2-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-WAS-SP3-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#),CULL_BELOW(CULL_ABOVE(PROPAGATE(SCALE(-1h:ajna-auditor.SFZ.AGG.none:AJNA-WAS-SP4-SFZ.all_metrics.latency.max{device=ajna-mmcnsmr8-2-sfz}:max:20m-max,#0.001#),#1m#),#1800#,#value#),#0.001#,#value#))),#1#),#1m#,#0m#,#0#) func-with-str-constant=AVERAGEBELOW(-1d:na1:app_record.count:avg, #abc#) -func-with-regex-constant=AVERAGEBELOW(-1d:na1:app_record.count:avg, #(app1-1\\.ops\\.sfdc\\.net)#) +func-with-regex-constant=AVERAGEBELOW(-1d:na1:app_record.count:avg, #(myhost\\.com)#) func-with-int-constant=AVERAGEBELOW(-1d:na1:app_record.count:avg, #200#) func-with-int-constant1=AVERAGEBELOW(-1d:na1:app_record.count:avg, #2#) func-with-float-constant=AVERAGEBELOW(-1d:na1:app_record.count:avg, #200.45#) diff --git a/ArgusDocker/README.md b/ArgusDocker/README.md deleted file mode 100644 index 20cd045b8..000000000 --- a/ArgusDocker/README.md +++ /dev/null @@ -1,45 +0,0 @@ -ArgusDocker -===== - -ArgusDocker contains docker-compose files to deploy a complete argus system. Follow these steps -to deploy: - - 1. Follow the build instructions for [`ArgusWeb`](https://github.com/salesforce/Argus/tree/develop/ArgusWeb) to install the NPM dependencies. - 1. Build the images using `mvn install` - 1. Bring up the environment using `docker-compose up -d` - -More information about docker can be found at - * docker: https://docs.docker.com/ - * Dockerfile reference: https://docs.docker.com/engine/reference/builder/ - * docker run reference: https://docs.docker.com/engine/reference/run/ - * docker-compose: https://docs.docker.com/compose/ - * docker-compose reference: https://docs.docker.com/compose/compose-file/ - -## simple -The simple deployment contains approximately the minimum services required for a full argus deployment: - * opentsdb - * The image chosen also includes HBase - * Available after deployment at http://localhost:4242/ - * grafana - * not actually required, but useful for troubleshooting the data in opentsdb - * Available after deployment at http://localhost:3000/ - * redis - * kafka - * The image chosen also includes zookeeper - * argus-web-services - * The REST api for argus, running inside a tomcat container - * Available after deployment at http://localhost:8081/argus/ - * The top level tomcat web.xml is overridden to include a CORS filter - * argus-web - * The web ui / frontend for argus - * Available after deployment at http://localhost:8082/app/ - * argus-metrics-client - * An instance of the argus-client configured to process metrics. The argus-web-services adds the - metrics to kafka, and the argus-metrics-client consumes them from kafka and stores them into - opentsdb. - -The simple deployment will use the images you have built locally with `mvn install`, or use images from -docker hub if you have not built anything locally. - -## persistent -This uses a postgres database and has persistent volumes for opentsdb, etc. diff --git a/ArgusDocker/persistent/conf/argus-web-services/tomcat-users.xml b/ArgusDocker/persistent/conf/argus-web-services/tomcat-users.xml deleted file mode 100644 index 342da8964..000000000 --- a/ArgusDocker/persistent/conf/argus-web-services/tomcat-users.xml +++ /dev/null @@ -1,43 +0,0 @@ - - - - - - - - - \ No newline at end of file diff --git a/ArgusDocker/persistent/conf/argus-web-services/web.xml b/ArgusDocker/persistent/conf/argus-web-services/web.xml deleted file mode 100644 index bcf75e6a5..000000000 --- a/ArgusDocker/persistent/conf/argus-web-services/web.xml +++ /dev/null @@ -1,4695 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - default - org.apache.catalina.servlets.DefaultServlet - - debug - 0 - - - listings - false - - 1 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - jsp - org.apache.jasper.servlet.JspServlet - - fork - false - - - xpoweredBy - false - - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - default - / - - - - - jsp - *.jsp - *.jspx - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - CorsFilter - org.apache.catalina.filters.CorsFilter - true - - cors.allowed.methods - GET,POST,HEAD,OPTIONS,PUT,DELETE - - - cors.allowed.headers - Content-Type,X-Requested-With,accept,Origin,Access-Control-Request-Method,Access-Control-Request-Headers - - - - - - - - - - - - - - - - - - - - - CorsFilter - /* - - - - - - - - 30 - - - - - - - - - - - - 123 - application/vnd.lotus-1-2-3 - - - 3dml - text/vnd.in3d.3dml - - - 3ds - image/x-3ds - - - 3g2 - video/3gpp2 - - - 3gp - video/3gpp - - - 7z - application/x-7z-compressed - - - aab - application/x-authorware-bin - - - aac - audio/x-aac - - - aam - application/x-authorware-map - - - aas - application/x-authorware-seg - - - abs - audio/x-mpeg - - - abw - application/x-abiword - - - ac - application/pkix-attr-cert - - - acc - application/vnd.americandynamics.acc - - - ace - application/x-ace-compressed - - - acu - application/vnd.acucobol - - - acutc - application/vnd.acucorp - - - adp - audio/adpcm - - - aep - application/vnd.audiograph - - - afm - application/x-font-type1 - - - afp - application/vnd.ibm.modcap - - - ahead - application/vnd.ahead.space - - - ai - application/postscript - - - aif - audio/x-aiff - - - aifc - audio/x-aiff - - - aiff - audio/x-aiff - - - aim - application/x-aim - - - air - application/vnd.adobe.air-application-installer-package+zip - - - ait - application/vnd.dvb.ait - - - ami - application/vnd.amiga.ami - - - anx - application/annodex - - - apk - application/vnd.android.package-archive - - - appcache - text/cache-manifest - - - application - application/x-ms-application - - - apr - application/vnd.lotus-approach - - - arc - application/x-freearc - - - art - image/x-jg - - - asc - application/pgp-signature - - - asf - video/x-ms-asf - - - asm - text/x-asm - - - aso - application/vnd.accpac.simply.aso - - - asx - video/x-ms-asf - - - atc - application/vnd.acucorp - - - atom - application/atom+xml - - - atomcat - application/atomcat+xml - - - atomsvc - application/atomsvc+xml - - - atx - application/vnd.antix.game-component - - - au - audio/basic - - - avi - video/x-msvideo - - - avx - video/x-rad-screenplay - - - aw - application/applixware - - - axa - audio/annodex - - - axv - video/annodex - - - azf - application/vnd.airzip.filesecure.azf - - - azs - application/vnd.airzip.filesecure.azs - - - azw - application/vnd.amazon.ebook - - - bat - application/x-msdownload - - - bcpio - application/x-bcpio - - - bdf - application/x-font-bdf - - - bdm - application/vnd.syncml.dm+wbxml - - - bed - application/vnd.realvnc.bed - - - bh2 - application/vnd.fujitsu.oasysprs - - - bin - application/octet-stream - - - blb - application/x-blorb - - - blorb - application/x-blorb - - - bmi - application/vnd.bmi - - - bmp - image/bmp - - - body - text/html - - - book - application/vnd.framemaker - - - box - application/vnd.previewsystems.box - - - boz - application/x-bzip2 - - - bpk - application/octet-stream - - - btif - image/prs.btif - - - bz - application/x-bzip - - - bz2 - application/x-bzip2 - - - c - text/x-c - - - c11amc - application/vnd.cluetrust.cartomobile-config - - - c11amz - application/vnd.cluetrust.cartomobile-config-pkg - - - c4d - application/vnd.clonk.c4group - - - c4f - application/vnd.clonk.c4group - - - c4g - application/vnd.clonk.c4group - - - c4p - application/vnd.clonk.c4group - - - c4u - application/vnd.clonk.c4group - - - cab - application/vnd.ms-cab-compressed - - - caf - audio/x-caf - - - cap - application/vnd.tcpdump.pcap - - - car - application/vnd.curl.car - - - cat - application/vnd.ms-pki.seccat - - - cb7 - application/x-cbr - - - cba - application/x-cbr - - - cbr - application/x-cbr - - - cbt - application/x-cbr - - - cbz - application/x-cbr - - - cc - text/x-c - - - cct - application/x-director - - - ccxml - application/ccxml+xml - - - cdbcmsg - application/vnd.contact.cmsg - - - cdf - application/x-cdf - - - cdkey - application/vnd.mediastation.cdkey - - - cdmia - application/cdmi-capability - - - cdmic - application/cdmi-container - - - cdmid - application/cdmi-domain - - - cdmio - application/cdmi-object - - - cdmiq - application/cdmi-queue - - - cdx - chemical/x-cdx - - - cdxml - application/vnd.chemdraw+xml - - - cdy - application/vnd.cinderella - - - cer - application/pkix-cert - - - cfs - application/x-cfs-compressed - - - cgm - image/cgm - - - chat - application/x-chat - - - chm - application/vnd.ms-htmlhelp - - - chrt - application/vnd.kde.kchart - - - cif - chemical/x-cif - - - cii - application/vnd.anser-web-certificate-issue-initiation - - - cil - application/vnd.ms-artgalry - - - cla - application/vnd.claymore - - - class - application/java - - - clkk - application/vnd.crick.clicker.keyboard - - - clkp - application/vnd.crick.clicker.palette - - - clkt - application/vnd.crick.clicker.template - - - clkw - application/vnd.crick.clicker.wordbank - - - clkx - application/vnd.crick.clicker - - - clp - application/x-msclip - - - cmc - application/vnd.cosmocaller - - - cmdf - chemical/x-cmdf - - - cml - chemical/x-cml - - - cmp - application/vnd.yellowriver-custom-menu - - - cmx - image/x-cmx - - - cod - application/vnd.rim.cod - - - com - application/x-msdownload - - - conf - text/plain - - - cpio - application/x-cpio - - - cpp - text/x-c - - - cpt - application/mac-compactpro - - - crd - application/x-mscardfile - - - crl - application/pkix-crl - - - crt - application/x-x509-ca-cert - - - cryptonote - application/vnd.rig.cryptonote - - - csh - application/x-csh - - - csml - chemical/x-csml - - - csp - application/vnd.commonspace - - - css - text/css - - - cst - application/x-director - - - csv - text/csv - - - cu - application/cu-seeme - - - curl - text/vnd.curl - - - cww - application/prs.cww - - - cxt - application/x-director - - - cxx - text/x-c - - - dae - model/vnd.collada+xml - - - daf - application/vnd.mobius.daf - - - dart - application/vnd.dart - - - dataless - application/vnd.fdsn.seed - - - davmount - application/davmount+xml - - - dbk - application/docbook+xml - - - dcr - application/x-director - - - dcurl - text/vnd.curl.dcurl - - - dd2 - application/vnd.oma.dd2+xml - - - ddd - application/vnd.fujixerox.ddd - - - deb - application/x-debian-package - - - def - text/plain - - - deploy - application/octet-stream - - - der - application/x-x509-ca-cert - - - dfac - application/vnd.dreamfactory - - - dgc - application/x-dgc-compressed - - - dib - image/bmp - - - dic - text/x-c - - - dir - application/x-director - - - dis - application/vnd.mobius.dis - - - dist - application/octet-stream - - - distz - application/octet-stream - - - djv - image/vnd.djvu - - - djvu - image/vnd.djvu - - - dll - application/x-msdownload - - - dmg - application/x-apple-diskimage - - - dmp - application/vnd.tcpdump.pcap - - - dms - application/octet-stream - - - dna - application/vnd.dna - - - doc - application/msword - - - docm - application/vnd.ms-word.document.macroenabled.12 - - - docx - application/vnd.openxmlformats-officedocument.wordprocessingml.document - - - dot - application/msword - - - dotm - application/vnd.ms-word.template.macroenabled.12 - - - dotx - application/vnd.openxmlformats-officedocument.wordprocessingml.template - - - dp - application/vnd.osgi.dp - - - dpg - application/vnd.dpgraph - - - dra - audio/vnd.dra - - - dsc - text/prs.lines.tag - - - dssc - application/dssc+der - - - dtb - application/x-dtbook+xml - - - dtd - application/xml-dtd - - - dts - audio/vnd.dts - - - dtshd - audio/vnd.dts.hd - - - dump - application/octet-stream - - - dv - video/x-dv - - - dvb - video/vnd.dvb.file - - - dvi - application/x-dvi - - - dwf - model/vnd.dwf - - - dwg - image/vnd.dwg - - - dxf - image/vnd.dxf - - - dxp - application/vnd.spotfire.dxp - - - dxr - application/x-director - - - ecelp4800 - audio/vnd.nuera.ecelp4800 - - - ecelp7470 - audio/vnd.nuera.ecelp7470 - - - ecelp9600 - audio/vnd.nuera.ecelp9600 - - - ecma - application/ecmascript - - - edm - application/vnd.novadigm.edm - - - edx - application/vnd.novadigm.edx - - - efif - application/vnd.picsel - - - ei6 - application/vnd.pg.osasli - - - elc - application/octet-stream - - - emf - application/x-msmetafile - - - eml - message/rfc822 - - - emma - application/emma+xml - - - emz - application/x-msmetafile - - - eol - audio/vnd.digital-winds - - - eot - application/vnd.ms-fontobject - - - eps - application/postscript - - - epub - application/epub+zip - - - es3 - application/vnd.eszigno3+xml - - - esa - application/vnd.osgi.subsystem - - - esf - application/vnd.epson.esf - - - et3 - application/vnd.eszigno3+xml - - - etx - text/x-setext - - - eva - application/x-eva - - - evy - application/x-envoy - - - exe - application/octet-stream - - - exi - application/exi - - - ext - application/vnd.novadigm.ext - - - ez - application/andrew-inset - - - ez2 - application/vnd.ezpix-album - - - ez3 - application/vnd.ezpix-package - - - f - text/x-fortran - - - f4v - video/x-f4v - - - f77 - text/x-fortran - - - f90 - text/x-fortran - - - fbs - image/vnd.fastbidsheet - - - fcdt - application/vnd.adobe.formscentral.fcdt - - - fcs - application/vnd.isac.fcs - - - fdf - application/vnd.fdf - - - fe_launch - application/vnd.denovo.fcselayout-link - - - fg5 - application/vnd.fujitsu.oasysgp - - - fgd - application/x-director - - - fh - image/x-freehand - - - fh4 - image/x-freehand - - - fh5 - image/x-freehand - - - fh7 - image/x-freehand - - - fhc - image/x-freehand - - - fig - application/x-xfig - - - flac - audio/flac - - - fli - video/x-fli - - - flo - application/vnd.micrografx.flo - - - flv - video/x-flv - - - flw - application/vnd.kde.kivio - - - flx - text/vnd.fmi.flexstor - - - fly - text/vnd.fly - - - fm - application/vnd.framemaker - - - fnc - application/vnd.frogans.fnc - - - for - text/x-fortran - - - fpx - image/vnd.fpx - - - frame - application/vnd.framemaker - - - fsc - application/vnd.fsc.weblaunch - - - fst - image/vnd.fst - - - ftc - application/vnd.fluxtime.clip - - - fti - application/vnd.anser-web-funds-transfer-initiation - - - fvt - video/vnd.fvt - - - fxp - application/vnd.adobe.fxp - - - fxpl - application/vnd.adobe.fxp - - - fzs - application/vnd.fuzzysheet - - - g2w - application/vnd.geoplan - - - g3 - image/g3fax - - - g3w - application/vnd.geospace - - - gac - application/vnd.groove-account - - - gam - application/x-tads - - - gbr - application/rpki-ghostbusters - - - gca - application/x-gca-compressed - - - gdl - model/vnd.gdl - - - geo - application/vnd.dynageo - - - gex - application/vnd.geometry-explorer - - - ggb - application/vnd.geogebra.file - - - ggt - application/vnd.geogebra.tool - - - ghf - application/vnd.groove-help - - - gif - image/gif - - - gim - application/vnd.groove-identity-message - - - gml - application/gml+xml - - - gmx - application/vnd.gmx - - - gnumeric - application/x-gnumeric - - - gph - application/vnd.flographit - - - gpx - application/gpx+xml - - - gqf - application/vnd.grafeq - - - gqs - application/vnd.grafeq - - - gram - application/srgs - - - gramps - application/x-gramps-xml - - - gre - application/vnd.geometry-explorer - - - grv - application/vnd.groove-injector - - - grxml - application/srgs+xml - - - gsf - application/x-font-ghostscript - - - gtar - application/x-gtar - - - gtm - application/vnd.groove-tool-message - - - gtw - model/vnd.gtw - - - gv - text/vnd.graphviz - - - gxf - application/gxf - - - gxt - application/vnd.geonext - - - gz - application/x-gzip - - - h - text/x-c - - - h261 - video/h261 - - - h263 - video/h263 - - - h264 - video/h264 - - - hal - application/vnd.hal+xml - - - hbci - application/vnd.hbci - - - hdf - application/x-hdf - - - hh - text/x-c - - - hlp - application/winhlp - - - hpgl - application/vnd.hp-hpgl - - - hpid - application/vnd.hp-hpid - - - hps - application/vnd.hp-hps - - - hqx - application/mac-binhex40 - - - htc - text/x-component - - - htke - application/vnd.kenameaapp - - - htm - text/html - - - html - text/html - - - hvd - application/vnd.yamaha.hv-dic - - - hvp - application/vnd.yamaha.hv-voice - - - hvs - application/vnd.yamaha.hv-script - - - i2g - application/vnd.intergeo - - - icc - application/vnd.iccprofile - - - ice - x-conference/x-cooltalk - - - icm - application/vnd.iccprofile - - - ico - image/x-icon - - - ics - text/calendar - - - ief - image/ief - - - ifb - text/calendar - - - ifm - application/vnd.shana.informed.formdata - - - iges - model/iges - - - igl - application/vnd.igloader - - - igm - application/vnd.insors.igm - - - igs - model/iges - - - igx - application/vnd.micrografx.igx - - - iif - application/vnd.shana.informed.interchange - - - imp - application/vnd.accpac.simply.imp - - - ims - application/vnd.ms-ims - - - in - text/plain - - - ink - application/inkml+xml - - - inkml - application/inkml+xml - - - install - application/x-install-instructions - - - iota - application/vnd.astraea-software.iota - - - ipfix - application/ipfix - - - ipk - application/vnd.shana.informed.package - - - irm - application/vnd.ibm.rights-management - - - irp - application/vnd.irepository.package+xml - - - iso - application/x-iso9660-image - - - itp - application/vnd.shana.informed.formtemplate - - - ivp - application/vnd.immervision-ivp - - - ivu - application/vnd.immervision-ivu - - - jad - text/vnd.sun.j2me.app-descriptor - - - jam - application/vnd.jam - - - jar - application/java-archive - - - java - text/x-java-source - - - jisp - application/vnd.jisp - - - jlt - application/vnd.hp-jlyt - - - jnlp - application/x-java-jnlp-file - - - joda - application/vnd.joost.joda-archive - - - jpe - image/jpeg - - - jpeg - image/jpeg - - - jpg - image/jpeg - - - jpgm - video/jpm - - - jpgv - video/jpeg - - - jpm - video/jpm - - - js - application/javascript - - - jsf - text/plain - - - json - application/json - - - jsonml - application/jsonml+json - - - jspf - text/plain - - - kar - audio/midi - - - karbon - application/vnd.kde.karbon - - - kfo - application/vnd.kde.kformula - - - kia - application/vnd.kidspiration - - - kml - application/vnd.google-earth.kml+xml - - - kmz - application/vnd.google-earth.kmz - - - kne - application/vnd.kinar - - - knp - application/vnd.kinar - - - kon - application/vnd.kde.kontour - - - kpr - application/vnd.kde.kpresenter - - - kpt - application/vnd.kde.kpresenter - - - kpxx - application/vnd.ds-keypoint - - - ksp - application/vnd.kde.kspread - - - ktr - application/vnd.kahootz - - - ktx - image/ktx - - - ktz - application/vnd.kahootz - - - kwd - application/vnd.kde.kword - - - kwt - application/vnd.kde.kword - - - lasxml - application/vnd.las.las+xml - - - latex - application/x-latex - - - lbd - application/vnd.llamagraphics.life-balance.desktop - - - lbe - application/vnd.llamagraphics.life-balance.exchange+xml - - - les - application/vnd.hhe.lesson-player - - - lha - application/x-lzh-compressed - - - link66 - application/vnd.route66.link66+xml - - - list - text/plain - - - list3820 - application/vnd.ibm.modcap - - - listafp - application/vnd.ibm.modcap - - - lnk - application/x-ms-shortcut - - - log - text/plain - - - lostxml - application/lost+xml - - - lrf - application/octet-stream - - - lrm - application/vnd.ms-lrm - - - ltf - application/vnd.frogans.ltf - - - lvp - audio/vnd.lucent.voice - - - lwp - application/vnd.lotus-wordpro - - - lzh - application/x-lzh-compressed - - - m13 - application/x-msmediaview - - - m14 - application/x-msmediaview - - - m1v - video/mpeg - - - m21 - application/mp21 - - - m2a - audio/mpeg - - - m2v - video/mpeg - - - m3a - audio/mpeg - - - m3u - audio/x-mpegurl - - - m3u8 - application/vnd.apple.mpegurl - - - m4a - audio/mp4 - - - m4b - audio/mp4 - - - m4r - audio/mp4 - - - m4u - video/vnd.mpegurl - - - m4v - video/mp4 - - - ma - application/mathematica - - - mac - image/x-macpaint - - - mads - application/mads+xml - - - mag - application/vnd.ecowin.chart - - - maker - application/vnd.framemaker - - - man - text/troff - - - mar - application/octet-stream - - - mathml - application/mathml+xml - - - mb - application/mathematica - - - mbk - application/vnd.mobius.mbk - - - mbox - application/mbox - - - mc1 - application/vnd.medcalcdata - - - mcd - application/vnd.mcd - - - mcurl - text/vnd.curl.mcurl - - - mdb - application/x-msaccess - - - mdi - image/vnd.ms-modi - - - me - text/troff - - - mesh - model/mesh - - - meta4 - application/metalink4+xml - - - metalink - application/metalink+xml - - - mets - application/mets+xml - - - mfm - application/vnd.mfmp - - - mft - application/rpki-manifest - - - mgp - application/vnd.osgeo.mapguide.package - - - mgz - application/vnd.proteus.magazine - - - mid - audio/midi - - - midi - audio/midi - - - mie - application/x-mie - - - mif - application/x-mif - - - mime - message/rfc822 - - - mj2 - video/mj2 - - - mjp2 - video/mj2 - - - mk3d - video/x-matroska - - - mka - audio/x-matroska - - - mks - video/x-matroska - - - mkv - video/x-matroska - - - mlp - application/vnd.dolby.mlp - - - mmd - application/vnd.chipnuts.karaoke-mmd - - - mmf - application/vnd.smaf - - - mmr - image/vnd.fujixerox.edmics-mmr - - - mng - video/x-mng - - - mny - application/x-msmoney - - - mobi - application/x-mobipocket-ebook - - - mods - application/mods+xml - - - mov - video/quicktime - - - movie - video/x-sgi-movie - - - mp1 - audio/mpeg - - - mp2 - audio/mpeg - - - mp21 - application/mp21 - - - mp2a - audio/mpeg - - - mp3 - audio/mpeg - - - mp4 - video/mp4 - - - mp4a - audio/mp4 - - - mp4s - application/mp4 - - - mp4v - video/mp4 - - - mpa - audio/mpeg - - - mpc - application/vnd.mophun.certificate - - - mpe - video/mpeg - - - mpeg - video/mpeg - - - mpega - audio/x-mpeg - - - mpg - video/mpeg - - - mpg4 - video/mp4 - - - mpga - audio/mpeg - - - mpkg - application/vnd.apple.installer+xml - - - mpm - application/vnd.blueice.multipass - - - mpn - application/vnd.mophun.application - - - mpp - application/vnd.ms-project - - - mpt - application/vnd.ms-project - - - mpv2 - video/mpeg2 - - - mpy - application/vnd.ibm.minipay - - - mqy - application/vnd.mobius.mqy - - - mrc - application/marc - - - mrcx - application/marcxml+xml - - - ms - text/troff - - - mscml - application/mediaservercontrol+xml - - - mseed - application/vnd.fdsn.mseed - - - mseq - application/vnd.mseq - - - msf - application/vnd.epson.msf - - - msh - model/mesh - - - msi - application/x-msdownload - - - msl - application/vnd.mobius.msl - - - msty - application/vnd.muvee.style - - - mts - model/vnd.mts - - - mus - application/vnd.musician - - - musicxml - application/vnd.recordare.musicxml+xml - - - mvb - application/x-msmediaview - - - mwf - application/vnd.mfer - - - mxf - application/mxf - - - mxl - application/vnd.recordare.musicxml - - - mxml - application/xv+xml - - - mxs - application/vnd.triscape.mxs - - - mxu - video/vnd.mpegurl - - - n-gage - application/vnd.nokia.n-gage.symbian.install - - - n3 - text/n3 - - - nb - application/mathematica - - - nbp - application/vnd.wolfram.player - - - nc - application/x-netcdf - - - ncx - application/x-dtbncx+xml - - - nfo - text/x-nfo - - - ngdat - application/vnd.nokia.n-gage.data - - - nitf - application/vnd.nitf - - - nlu - application/vnd.neurolanguage.nlu - - - nml - application/vnd.enliven - - - nnd - application/vnd.noblenet-directory - - - nns - application/vnd.noblenet-sealer - - - nnw - application/vnd.noblenet-web - - - npx - image/vnd.net-fpx - - - nsc - application/x-conference - - - nsf - application/vnd.lotus-notes - - - ntf - application/vnd.nitf - - - nzb - application/x-nzb - - - oa2 - application/vnd.fujitsu.oasys2 - - - oa3 - application/vnd.fujitsu.oasys3 - - - oas - application/vnd.fujitsu.oasys - - - obd - application/x-msbinder - - - obj - application/x-tgif - - - oda - application/oda - - - - odb - application/vnd.oasis.opendocument.database - - - - odc - application/vnd.oasis.opendocument.chart - - - - odf - application/vnd.oasis.opendocument.formula - - - odft - application/vnd.oasis.opendocument.formula-template - - - - odg - application/vnd.oasis.opendocument.graphics - - - - odi - application/vnd.oasis.opendocument.image - - - - odm - application/vnd.oasis.opendocument.text-master - - - - odp - application/vnd.oasis.opendocument.presentation - - - - ods - application/vnd.oasis.opendocument.spreadsheet - - - - odt - application/vnd.oasis.opendocument.text - - - oga - audio/ogg - - - ogg - audio/ogg - - - ogv - video/ogg - - - - ogx - application/ogg - - - omdoc - application/omdoc+xml - - - onepkg - application/onenote - - - onetmp - application/onenote - - - onetoc - application/onenote - - - onetoc2 - application/onenote - - - opf - application/oebps-package+xml - - - opml - text/x-opml - - - oprc - application/vnd.palm - - - org - application/vnd.lotus-organizer - - - osf - application/vnd.yamaha.openscoreformat - - - osfpvg - application/vnd.yamaha.openscoreformat.osfpvg+xml - - - otc - application/vnd.oasis.opendocument.chart-template - - - otf - application/x-font-otf - - - - otg - application/vnd.oasis.opendocument.graphics-template - - - - oth - application/vnd.oasis.opendocument.text-web - - - oti - application/vnd.oasis.opendocument.image-template - - - - otp - application/vnd.oasis.opendocument.presentation-template - - - - ots - application/vnd.oasis.opendocument.spreadsheet-template - - - - ott - application/vnd.oasis.opendocument.text-template - - - oxps - application/oxps - - - oxt - application/vnd.openofficeorg.extension - - - p - text/x-pascal - - - p10 - application/pkcs10 - - - p12 - application/x-pkcs12 - - - p7b - application/x-pkcs7-certificates - - - p7c - application/pkcs7-mime - - - p7m - application/pkcs7-mime - - - p7r - application/x-pkcs7-certreqresp - - - p7s - application/pkcs7-signature - - - p8 - application/pkcs8 - - - pas - text/x-pascal - - - paw - application/vnd.pawaafile - - - pbd - application/vnd.powerbuilder6 - - - pbm - image/x-portable-bitmap - - - pcap - application/vnd.tcpdump.pcap - - - pcf - application/x-font-pcf - - - pcl - application/vnd.hp-pcl - - - pclxl - application/vnd.hp-pclxl - - - pct - image/pict - - - pcurl - application/vnd.curl.pcurl - - - pcx - image/x-pcx - - - pdb - application/vnd.palm - - - pdf - application/pdf - - - pfa - application/x-font-type1 - - - pfb - application/x-font-type1 - - - pfm - application/x-font-type1 - - - pfr - application/font-tdpfr - - - pfx - application/x-pkcs12 - - - pgm - image/x-portable-graymap - - - pgn - application/x-chess-pgn - - - pgp - application/pgp-encrypted - - - pic - image/pict - - - pict - image/pict - - - pkg - application/octet-stream - - - pki - application/pkixcmp - - - pkipath - application/pkix-pkipath - - - plb - application/vnd.3gpp.pic-bw-large - - - plc - application/vnd.mobius.plc - - - plf - application/vnd.pocketlearn - - - pls - audio/x-scpls - - - pml - application/vnd.ctc-posml - - - png - image/png - - - pnm - image/x-portable-anymap - - - pnt - image/x-macpaint - - - portpkg - application/vnd.macports.portpkg - - - pot - application/vnd.ms-powerpoint - - - potm - application/vnd.ms-powerpoint.template.macroenabled.12 - - - potx - application/vnd.openxmlformats-officedocument.presentationml.template - - - ppam - application/vnd.ms-powerpoint.addin.macroenabled.12 - - - ppd - application/vnd.cups-ppd - - - ppm - image/x-portable-pixmap - - - pps - application/vnd.ms-powerpoint - - - ppsm - application/vnd.ms-powerpoint.slideshow.macroenabled.12 - - - ppsx - application/vnd.openxmlformats-officedocument.presentationml.slideshow - - - ppt - application/vnd.ms-powerpoint - - - pptm - application/vnd.ms-powerpoint.presentation.macroenabled.12 - - - pptx - application/vnd.openxmlformats-officedocument.presentationml.presentation - - - pqa - application/vnd.palm - - - prc - application/x-mobipocket-ebook - - - pre - application/vnd.lotus-freelance - - - prf - application/pics-rules - - - ps - application/postscript - - - psb - application/vnd.3gpp.pic-bw-small - - - psd - image/vnd.adobe.photoshop - - - psf - application/x-font-linux-psf - - - pskcxml - application/pskc+xml - - - ptid - application/vnd.pvi.ptid1 - - - pub - application/x-mspublisher - - - pvb - application/vnd.3gpp.pic-bw-var - - - pwn - application/vnd.3m.post-it-notes - - - pya - audio/vnd.ms-playready.media.pya - - - pyv - video/vnd.ms-playready.media.pyv - - - qam - application/vnd.epson.quickanime - - - qbo - application/vnd.intu.qbo - - - qfx - application/vnd.intu.qfx - - - qps - application/vnd.publishare-delta-tree - - - qt - video/quicktime - - - qti - image/x-quicktime - - - qtif - image/x-quicktime - - - qwd - application/vnd.quark.quarkxpress - - - qwt - application/vnd.quark.quarkxpress - - - qxb - application/vnd.quark.quarkxpress - - - qxd - application/vnd.quark.quarkxpress - - - qxl - application/vnd.quark.quarkxpress - - - qxt - application/vnd.quark.quarkxpress - - - ra - audio/x-pn-realaudio - - - ram - audio/x-pn-realaudio - - - rar - application/x-rar-compressed - - - ras - image/x-cmu-raster - - - rcprofile - application/vnd.ipunplugged.rcprofile - - - rdf - application/rdf+xml - - - rdz - application/vnd.data-vision.rdz - - - rep - application/vnd.businessobjects - - - res - application/x-dtbresource+xml - - - rgb - image/x-rgb - - - rif - application/reginfo+xml - - - rip - audio/vnd.rip - - - ris - application/x-research-info-systems - - - rl - application/resource-lists+xml - - - rlc - image/vnd.fujixerox.edmics-rlc - - - rld - application/resource-lists-diff+xml - - - rm - application/vnd.rn-realmedia - - - rmi - audio/midi - - - rmp - audio/x-pn-realaudio-plugin - - - rms - application/vnd.jcp.javame.midlet-rms - - - rmvb - application/vnd.rn-realmedia-vbr - - - rnc - application/relax-ng-compact-syntax - - - roa - application/rpki-roa - - - roff - text/troff - - - rp9 - application/vnd.cloanto.rp9 - - - rpss - application/vnd.nokia.radio-presets - - - rpst - application/vnd.nokia.radio-preset - - - rq - application/sparql-query - - - rs - application/rls-services+xml - - - rsd - application/rsd+xml - - - rss - application/rss+xml - - - rtf - application/rtf - - - rtx - text/richtext - - - s - text/x-asm - - - s3m - audio/s3m - - - saf - application/vnd.yamaha.smaf-audio - - - sbml - application/sbml+xml - - - sc - application/vnd.ibm.secure-container - - - scd - application/x-msschedule - - - scm - application/vnd.lotus-screencam - - - scq - application/scvp-cv-request - - - scs - application/scvp-cv-response - - - scurl - text/vnd.curl.scurl - - - sda - application/vnd.stardivision.draw - - - sdc - application/vnd.stardivision.calc - - - sdd - application/vnd.stardivision.impress - - - sdkd - application/vnd.solent.sdkm+xml - - - sdkm - application/vnd.solent.sdkm+xml - - - sdp - application/sdp - - - sdw - application/vnd.stardivision.writer - - - see - application/vnd.seemail - - - seed - application/vnd.fdsn.seed - - - sema - application/vnd.sema - - - semd - application/vnd.semd - - - semf - application/vnd.semf - - - ser - application/java-serialized-object - - - setpay - application/set-payment-initiation - - - setreg - application/set-registration-initiation - - - sfd-hdstx - application/vnd.hydrostatix.sof-data - - - sfs - application/vnd.spotfire.sfs - - - sfv - text/x-sfv - - - sgi - image/sgi - - - sgl - application/vnd.stardivision.writer-global - - - sgm - text/sgml - - - sgml - text/sgml - - - sh - application/x-sh - - - shar - application/x-shar - - - shf - application/shf+xml - - - - sid - image/x-mrsid-image - - - sig - application/pgp-signature - - - sil - audio/silk - - - silo - model/mesh - - - sis - application/vnd.symbian.install - - - sisx - application/vnd.symbian.install - - - sit - application/x-stuffit - - - sitx - application/x-stuffitx - - - skd - application/vnd.koan - - - skm - application/vnd.koan - - - skp - application/vnd.koan - - - skt - application/vnd.koan - - - sldm - application/vnd.ms-powerpoint.slide.macroenabled.12 - - - sldx - application/vnd.openxmlformats-officedocument.presentationml.slide - - - slt - application/vnd.epson.salt - - - sm - application/vnd.stepmania.stepchart - - - smf - application/vnd.stardivision.math - - - smi - application/smil+xml - - - smil - application/smil+xml - - - smv - video/x-smv - - - smzip - application/vnd.stepmania.package - - - snd - audio/basic - - - snf - application/x-font-snf - - - so - application/octet-stream - - - spc - application/x-pkcs7-certificates - - - spf - application/vnd.yamaha.smaf-phrase - - - spl - application/x-futuresplash - - - spot - text/vnd.in3d.spot - - - spp - application/scvp-vp-response - - - spq - application/scvp-vp-request - - - spx - audio/ogg - - - sql - application/x-sql - - - src - application/x-wais-source - - - srt - application/x-subrip - - - sru - application/sru+xml - - - srx - application/sparql-results+xml - - - ssdl - application/ssdl+xml - - - sse - application/vnd.kodak-descriptor - - - ssf - application/vnd.epson.ssf - - - ssml - application/ssml+xml - - - st - application/vnd.sailingtracker.track - - - stc - application/vnd.sun.xml.calc.template - - - std - application/vnd.sun.xml.draw.template - - - stf - application/vnd.wt.stf - - - sti - application/vnd.sun.xml.impress.template - - - stk - application/hyperstudio - - - stl - application/vnd.ms-pki.stl - - - str - application/vnd.pg.format - - - stw - application/vnd.sun.xml.writer.template - - - sub - text/vnd.dvb.subtitle - - - sus - application/vnd.sus-calendar - - - susp - application/vnd.sus-calendar - - - sv4cpio - application/x-sv4cpio - - - sv4crc - application/x-sv4crc - - - svc - application/vnd.dvb.service - - - svd - application/vnd.svd - - - svg - image/svg+xml - - - svgz - image/svg+xml - - - swa - application/x-director - - - swf - application/x-shockwave-flash - - - swi - application/vnd.aristanetworks.swi - - - sxc - application/vnd.sun.xml.calc - - - sxd - application/vnd.sun.xml.draw - - - sxg - application/vnd.sun.xml.writer.global - - - sxi - application/vnd.sun.xml.impress - - - sxm - application/vnd.sun.xml.math - - - sxw - application/vnd.sun.xml.writer - - - t - text/troff - - - t3 - application/x-t3vm-image - - - taglet - application/vnd.mynfc - - - tao - application/vnd.tao.intent-module-archive - - - tar - application/x-tar - - - tcap - application/vnd.3gpp2.tcap - - - tcl - application/x-tcl - - - teacher - application/vnd.smart.teacher - - - tei - application/tei+xml - - - teicorpus - application/tei+xml - - - tex - application/x-tex - - - texi - application/x-texinfo - - - texinfo - application/x-texinfo - - - text - text/plain - - - tfi - application/thraud+xml - - - tfm - application/x-tex-tfm - - - tga - image/x-tga - - - thmx - application/vnd.ms-officetheme - - - tif - image/tiff - - - tiff - image/tiff - - - tmo - application/vnd.tmobile-livetv - - - torrent - application/x-bittorrent - - - tpl - application/vnd.groove-tool-template - - - tpt - application/vnd.trid.tpt - - - tr - text/troff - - - tra - application/vnd.trueapp - - - trm - application/x-msterminal - - - tsd - application/timestamped-data - - - tsv - text/tab-separated-values - - - ttc - application/x-font-ttf - - - ttf - application/x-font-ttf - - - ttl - text/turtle - - - twd - application/vnd.simtech-mindmapper - - - twds - application/vnd.simtech-mindmapper - - - txd - application/vnd.genomatix.tuxedo - - - txf - application/vnd.mobius.txf - - - txt - text/plain - - - u32 - application/x-authorware-bin - - - udeb - application/x-debian-package - - - ufd - application/vnd.ufdl - - - ufdl - application/vnd.ufdl - - - ulw - audio/basic - - - ulx - application/x-glulx - - - umj - application/vnd.umajin - - - unityweb - application/vnd.unity - - - uoml - application/vnd.uoml+xml - - - uri - text/uri-list - - - uris - text/uri-list - - - urls - text/uri-list - - - ustar - application/x-ustar - - - utz - application/vnd.uiq.theme - - - uu - text/x-uuencode - - - uva - audio/vnd.dece.audio - - - uvd - application/vnd.dece.data - - - uvf - application/vnd.dece.data - - - uvg - image/vnd.dece.graphic - - - uvh - video/vnd.dece.hd - - - uvi - image/vnd.dece.graphic - - - uvm - video/vnd.dece.mobile - - - uvp - video/vnd.dece.pd - - - uvs - video/vnd.dece.sd - - - uvt - application/vnd.dece.ttml+xml - - - uvu - video/vnd.uvvu.mp4 - - - uvv - video/vnd.dece.video - - - uvva - audio/vnd.dece.audio - - - uvvd - application/vnd.dece.data - - - uvvf - application/vnd.dece.data - - - uvvg - image/vnd.dece.graphic - - - uvvh - video/vnd.dece.hd - - - uvvi - image/vnd.dece.graphic - - - uvvm - video/vnd.dece.mobile - - - uvvp - video/vnd.dece.pd - - - uvvs - video/vnd.dece.sd - - - uvvt - application/vnd.dece.ttml+xml - - - uvvu - video/vnd.uvvu.mp4 - - - uvvv - video/vnd.dece.video - - - uvvx - application/vnd.dece.unspecified - - - uvvz - application/vnd.dece.zip - - - uvx - application/vnd.dece.unspecified - - - uvz - application/vnd.dece.zip - - - vcard - text/vcard - - - vcd - application/x-cdlink - - - vcf - text/x-vcard - - - vcg - application/vnd.groove-vcard - - - vcs - text/x-vcalendar - - - vcx - application/vnd.vcx - - - vis - application/vnd.visionary - - - viv - video/vnd.vivo - - - vob - video/x-ms-vob - - - vor - application/vnd.stardivision.writer - - - vox - application/x-authorware-bin - - - vrml - model/vrml - - - vsd - application/vnd.visio - - - vsf - application/vnd.vsf - - - vss - application/vnd.visio - - - vst - application/vnd.visio - - - vsw - application/vnd.visio - - - vtu - model/vnd.vtu - - - vxml - application/voicexml+xml - - - w3d - application/x-director - - - wad - application/x-doom - - - wav - audio/x-wav - - - wax - audio/x-ms-wax - - - - wbmp - image/vnd.wap.wbmp - - - wbs - application/vnd.criticaltools.wbs+xml - - - wbxml - application/vnd.wap.wbxml - - - wcm - application/vnd.ms-works - - - wdb - application/vnd.ms-works - - - wdp - image/vnd.ms-photo - - - weba - audio/webm - - - webm - video/webm - - - webp - image/webp - - - wg - application/vnd.pmi.widget - - - wgt - application/widget - - - wks - application/vnd.ms-works - - - wm - video/x-ms-wm - - - wma - audio/x-ms-wma - - - wmd - application/x-ms-wmd - - - wmf - application/x-msmetafile - - - - wml - text/vnd.wap.wml - - - - wmlc - application/vnd.wap.wmlc - - - - wmls - text/vnd.wap.wmlscript - - - - wmlsc - application/vnd.wap.wmlscriptc - - - wmv - video/x-ms-wmv - - - wmx - video/x-ms-wmx - - - wmz - application/x-msmetafile - - - woff - application/x-font-woff - - - wpd - application/vnd.wordperfect - - - wpl - application/vnd.ms-wpl - - - wps - application/vnd.ms-works - - - wqd - application/vnd.wqd - - - wri - application/x-mswrite - - - wrl - model/vrml - - - wsdl - application/wsdl+xml - - - wspolicy - application/wspolicy+xml - - - wtb - application/vnd.webturbo - - - wvx - video/x-ms-wvx - - - x32 - application/x-authorware-bin - - - x3d - model/x3d+xml - - - x3db - model/x3d+binary - - - x3dbz - model/x3d+binary - - - x3dv - model/x3d+vrml - - - x3dvz - model/x3d+vrml - - - x3dz - model/x3d+xml - - - xaml - application/xaml+xml - - - xap - application/x-silverlight-app - - - xar - application/vnd.xara - - - xbap - application/x-ms-xbap - - - xbd - application/vnd.fujixerox.docuworks.binder - - - xbm - image/x-xbitmap - - - xdf - application/xcap-diff+xml - - - xdm - application/vnd.syncml.dm+xml - - - xdp - application/vnd.adobe.xdp+xml - - - xdssc - application/dssc+xml - - - xdw - application/vnd.fujixerox.docuworks - - - xenc - application/xenc+xml - - - xer - application/patch-ops-error+xml - - - xfdf - application/vnd.adobe.xfdf - - - xfdl - application/vnd.xfdl - - - xht - application/xhtml+xml - - - xhtml - application/xhtml+xml - - - xhvml - application/xv+xml - - - xif - image/vnd.xiff - - - xla - application/vnd.ms-excel - - - xlam - application/vnd.ms-excel.addin.macroenabled.12 - - - xlc - application/vnd.ms-excel - - - xlf - application/x-xliff+xml - - - xlm - application/vnd.ms-excel - - - xls - application/vnd.ms-excel - - - xlsb - application/vnd.ms-excel.sheet.binary.macroenabled.12 - - - xlsm - application/vnd.ms-excel.sheet.macroenabled.12 - - - xlsx - application/vnd.openxmlformats-officedocument.spreadsheetml.sheet - - - xlt - application/vnd.ms-excel - - - xltm - application/vnd.ms-excel.template.macroenabled.12 - - - xltx - application/vnd.openxmlformats-officedocument.spreadsheetml.template - - - xlw - application/vnd.ms-excel - - - xm - audio/xm - - - xml - application/xml - - - xo - application/vnd.olpc-sugar - - - xop - application/xop+xml - - - xpi - application/x-xpinstall - - - xpl - application/xproc+xml - - - xpm - image/x-xpixmap - - - xpr - application/vnd.is-xpr - - - xps - application/vnd.ms-xpsdocument - - - xpw - application/vnd.intercon.formnet - - - xpx - application/vnd.intercon.formnet - - - xsl - application/xml - - - xslt - application/xslt+xml - - - xsm - application/vnd.syncml+xml - - - xspf - application/xspf+xml - - - xul - application/vnd.mozilla.xul+xml - - - xvm - application/xv+xml - - - xvml - application/xv+xml - - - xwd - image/x-xwindowdump - - - xyz - chemical/x-xyz - - - xz - application/x-xz - - - yang - application/yang - - - yin - application/yin+xml - - - z - application/x-compress - - - Z - application/x-compress - - - z1 - application/x-zmachine - - - z2 - application/x-zmachine - - - z3 - application/x-zmachine - - - z4 - application/x-zmachine - - - z5 - application/x-zmachine - - - z6 - application/x-zmachine - - - z7 - application/x-zmachine - - - z8 - application/x-zmachine - - - zaz - application/vnd.zzazz.deck+xml - - - zip - application/zip - - - zir - application/vnd.zul - - - zirz - application/vnd.zul - - - zmm - application/vnd.handheld-entertainment+xml - - - - - - - - - - - - - - - - - - index.html - index.htm - index.jsp - - - diff --git a/ArgusDocker/persistent/conf/argus.properties b/ArgusDocker/persistent/conf/argus.properties deleted file mode 100644 index 430ca9e13..000000000 --- a/ArgusDocker/persistent/conf/argus.properties +++ /dev/null @@ -1,34 +0,0 @@ - -system.property.jpa.javax.persistence.schema-generation.database.action=create -system.property.jpa.javax.persistence.jdbc.driver=org.postgresql.Driver -system.property.jpa.javax.persistence.jdbc.url=jdbc:postgresql://postgres:5432/argus_user -system.property.jpa.javax.persistence.jdbc.user=argus_user -system.property.jpa.javax.persistence.jdbc.password=password -system.property.jpa.eclipselink.ddl-generation=drop-and-create-tables -system.property.jpa.eclipselink.logging.level=SEVERE -system.property.jpa.eclipselink.logging.parameters=true -system.property.jpa.eclipselink.target-database=Auto -system.property.jpa.eclipselink.canonicalmodel.subpackage=unit - -build.property.secure.cookies=false -system.property.admin.email=argus-admin@mycompany.com -system.property.log.level=DEBUG -system.property.mail.enabled=false - -# skip ldap (any user can log in with any password) -service.binding.auth=com.salesforce.dva.argus.service.auth.NoAuthService - -service.property.mail.alerturl.template=https\://localhost\:8443/argus/\#/alerts/$alertid$ -service.property.mail.metricurl.template=https\://localhost\:8443/argus/\#/viewmetrics?expression\=$expression$ -service.property.mail.smtp.auth=false -service.property.mail.smtp.host=smtprelay.mycompany.com -service.property.mail.smtp.starttls.enable=false -service.property.tsdb.connection.count=2 -service.property.tsdb.endpoint.read=http://opentsdb:4242 -service.property.tsdb.endpoint.timeout=10000 -service.property.tsdb.endpoint.write=http://opentsdb:4242 -service.property.cache.redis.cluster=redis:6379 - -# kafka -service.property.mq.kafka.brokers=kafka:9092 -service.property.mq.zookeeper.connect=kafka:2181 \ No newline at end of file diff --git a/ArgusDocker/persistent/conf/config.js b/ArgusDocker/persistent/conf/config.js deleted file mode 100644 index 8f92af01c..000000000 --- a/ArgusDocker/persistent/conf/config.js +++ /dev/null @@ -1,29 +0,0 @@ -/*! Copyright (c) 2016, Salesforce.com, Inc. - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - * - * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - * - * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * - * Neither the name of Salesforce.com nor the names of its contributors may be used to endorse or promote products derived from this software - * without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, - * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR - * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF - * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, - * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ -angular.module("argus.urlConfig", []) -.constant('CONFIG', { - version: '2.3.0-SNAPSHOT', - wsUrl: 'http://localhost:8081/argus/', - emailUrl: 'https://mail.google.com/mail/?view=cm&fs=1&tf=1&to=argus-dev@mycompany.com', - feedUrl: 'https://groups.google.com/a/mycompany.com/forum/?hl=en#!forum/argus-user', - wikiUrl: 'https://github.com/salesforce/Argus/wiki', - issueUrl: 'https://groups.google.com/a/mycompany.com/forum/?hl=en#!forum/argus-dev', - templatePath: '/app/views/argus_custom_directives/templates/' -}); diff --git a/ArgusDocker/persistent/conf/opentsdb/opentsdb.conf b/ArgusDocker/persistent/conf/opentsdb/opentsdb.conf deleted file mode 100644 index 4749e9e56..000000000 --- a/ArgusDocker/persistent/conf/opentsdb/opentsdb.conf +++ /dev/null @@ -1,5 +0,0 @@ -## additional -tsd.http.request.enable_chunked = true -tsd.http.request.max_chunk = 1048576 -tsd.http.query.allow_delete=true -tsd.http.request.cors_domains=* \ No newline at end of file diff --git a/ArgusDocker/persistent/conf/opentsdb/start_opentsdb.sh b/ArgusDocker/persistent/conf/opentsdb/start_opentsdb.sh deleted file mode 100644 index be7e104e8..000000000 --- a/ArgusDocker/persistent/conf/opentsdb/start_opentsdb.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash -export TSDB_VERSION="2.2.0" -echo "Sleeping for 30 seconds to give HBase time to warm up" -sleep 30 - -if [ ! -e /opt/opentsdb_tables_created.txt ]; then - echo "creating tsdb tables" - bash /opt/bin/create_tsdb_tables.sh - echo "created tsdb tables" -fi - -echo "starting opentsdb" -/opt/opentsdb/opentsdb-${TSDB_VERSION}/build/tsdb tsd --port=4242 --staticroot=/opt/opentsdb/opentsdb-${TSDB_VERSION}/build/staticroot --cachedir=/tmp --auto-metric --config=/opt/opentsdb/config/opentsdb.conf diff --git a/ArgusDocker/persistent/docker-compose.yml b/ArgusDocker/persistent/docker-compose.yml deleted file mode 100644 index 00f4be116..000000000 --- a/ArgusDocker/persistent/docker-compose.yml +++ /dev/null @@ -1,123 +0,0 @@ -version: '2' -services: - opentsdb: - image: petergrace/opentsdb-docker:latest - hostname: opentsdb - container_name: opentsdb - ports: - - "4242:4242" - volumes: - - ./storage/opentsdb/data/hbase:/data/hbase - - ./storage/opentsdb/tmp:/tmp - - ./conf/opentsdb/start_opentsdb.sh:/opt/bin/start_opentsdb.sh - - ./conf/opentsdb/opentsdb.conf:/opt/opentsdb/config/opentsdb.conf:ro - postgres: - image: postgres:9.6.1 - hostname: postgres - container_name: postgres - ports: - - "5432:5432" - environment: - - POSTGRES_USER=argus_user - - POSTGRES_PASSWORD=password - volumes: - - ./storage/postgres/var/lib/postgresql/data:/var/lib/postgresql/data - grafana: - image: grafana/grafana:4.0.0 - hostname: grafana - container_name: grafana - ports: - - "3000:3000" - volumes: - - ./storage/grafana/var/lib/grafana:/var/lib/grafana - redis: - image: redis:3.2.5 - hostname: redis - container_name: redis - kafka: - image: spotify/kafka:latest - hostname: kafka - container_name: kafka - ports: - - "2181:2181" - - "9092:9092" - environment: - - ADVERTISED_HOST=kafka - - ADVERTISED_PORT=9092 - argus-web-services: - image: salesforce/argus-webservices - hostname: argus-web-services - container_name: argus-web-services - ports: - - "8081:8080" - - "5005:5005" - volumes: - - ./conf/argus.properties:/usr/argus/argus.properties:ro - - ./conf/argus-web-services/web.xml:/usr/local/tomcat/conf/web.xml:ro - - ./conf/argus-web-services/tomcat-users.xml:/usr/local/tomcat/conf/tomcat-users.xml:ro - environment: - - "CATALINA_OPTS=-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dargus.config.public.location=/usr/argus/argus.properties" - argus-web: - image: salesforce/argus-web - hostname: argus-web - container_name: argus-web - ports: - - "8082:8000" - volumes: - - ./conf/config.js:/usr/argus/argusWeb/app/js/config.js:ro - argus-metrics-client: - image: salesforce/argus-client - hostname: argus-metrics-client - container_name: argus-metrics-client - ports: - - "5006:5000" - volumes: - - ./conf/argus.properties:/usr/argus/argusClient/argus.properties:ro - environment: - - "EXTRA_JAVA_OPTS=-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5000" - - "ARGUSCLIENT_CFG=/usr/argus/argusClient/argus.properties" - depends_on: - - "kafka" - command: [COMMIT_METRICS] - argus-alert-client: - image: salesforce/argus-client - hostname: argus-alert-client - container_name: argus-alert-client - ports: - - "5007:5000" - volumes: - - ./conf/argus.properties:/usr/argus/argusClient/argus.properties:ro - environment: - - "EXTRA_JAVA_OPTS=-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5000" - - "ARGUSCLIENT_CFG=/usr/argus/argusClient/argus.properties" - depends_on: - - "kafka" - command: [ALERT] - argus-annotations-client: - image: salesforce/argus-client - hostname: argus-annotations-client - container_name: argus-annotations-client - ports: - - "5008:5000" - volumes: - - ./conf/argus.properties:/usr/argus/argusClient/argus.properties:ro - environment: - - "EXTRA_JAVA_OPTS=-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5000" - - "ARGUSCLIENT_CFG=/usr/argus/argusClient/argus.properties" - depends_on: - - "kafka" - command: [COMMIT_ANNOTATIONS] - argus-processor-client: - image: salesforce/argus-client - hostname: argus-processor-client - container_name: argus-processor-client - ports: - - "5009:5000" - volumes: - - ./conf/argus.properties:/usr/argus/argusClient/argus.properties:ro - environment: - - "EXTRA_JAVA_OPTS=-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5000" - - "ARGUSCLIENT_CFG=/usr/argus/argusClient/argus.properties" - depends_on: - - "kafka" - command: [PROCESS_QUERIES] diff --git a/ArgusDocker/persistent/storage/grafana/.gitignore b/ArgusDocker/persistent/storage/grafana/.gitignore deleted file mode 100644 index 5e7d2734c..000000000 --- a/ArgusDocker/persistent/storage/grafana/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -# Ignore everything in this directory -* -# Except this file -!.gitignore diff --git a/ArgusDocker/persistent/storage/opentsdb/.gitignore b/ArgusDocker/persistent/storage/opentsdb/.gitignore deleted file mode 100644 index 5e7d2734c..000000000 --- a/ArgusDocker/persistent/storage/opentsdb/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -# Ignore everything in this directory -* -# Except this file -!.gitignore diff --git a/ArgusDocker/persistent/storage/postgres/.gitignore b/ArgusDocker/persistent/storage/postgres/.gitignore deleted file mode 100644 index 5e7d2734c..000000000 --- a/ArgusDocker/persistent/storage/postgres/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -# Ignore everything in this directory -* -# Except this file -!.gitignore diff --git a/ArgusDocker/simple/argus-web-services/tomcat-users.xml b/ArgusDocker/simple/argus-web-services/tomcat-users.xml deleted file mode 100644 index 342da8964..000000000 --- a/ArgusDocker/simple/argus-web-services/tomcat-users.xml +++ /dev/null @@ -1,43 +0,0 @@ - - - - - - - - - \ No newline at end of file diff --git a/ArgusDocker/simple/argus-web-services/web.xml b/ArgusDocker/simple/argus-web-services/web.xml deleted file mode 100644 index bcf75e6a5..000000000 --- a/ArgusDocker/simple/argus-web-services/web.xml +++ /dev/null @@ -1,4695 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - default - org.apache.catalina.servlets.DefaultServlet - - debug - 0 - - - listings - false - - 1 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - jsp - org.apache.jasper.servlet.JspServlet - - fork - false - - - xpoweredBy - false - - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - default - / - - - - - jsp - *.jsp - *.jspx - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - CorsFilter - org.apache.catalina.filters.CorsFilter - true - - cors.allowed.methods - GET,POST,HEAD,OPTIONS,PUT,DELETE - - - cors.allowed.headers - Content-Type,X-Requested-With,accept,Origin,Access-Control-Request-Method,Access-Control-Request-Headers - - - - - - - - - - - - - - - - - - - - - CorsFilter - /* - - - - - - - - 30 - - - - - - - - - - - - 123 - application/vnd.lotus-1-2-3 - - - 3dml - text/vnd.in3d.3dml - - - 3ds - image/x-3ds - - - 3g2 - video/3gpp2 - - - 3gp - video/3gpp - - - 7z - application/x-7z-compressed - - - aab - application/x-authorware-bin - - - aac - audio/x-aac - - - aam - application/x-authorware-map - - - aas - application/x-authorware-seg - - - abs - audio/x-mpeg - - - abw - application/x-abiword - - - ac - application/pkix-attr-cert - - - acc - application/vnd.americandynamics.acc - - - ace - application/x-ace-compressed - - - acu - application/vnd.acucobol - - - acutc - application/vnd.acucorp - - - adp - audio/adpcm - - - aep - application/vnd.audiograph - - - afm - application/x-font-type1 - - - afp - application/vnd.ibm.modcap - - - ahead - application/vnd.ahead.space - - - ai - application/postscript - - - aif - audio/x-aiff - - - aifc - audio/x-aiff - - - aiff - audio/x-aiff - - - aim - application/x-aim - - - air - application/vnd.adobe.air-application-installer-package+zip - - - ait - application/vnd.dvb.ait - - - ami - application/vnd.amiga.ami - - - anx - application/annodex - - - apk - application/vnd.android.package-archive - - - appcache - text/cache-manifest - - - application - application/x-ms-application - - - apr - application/vnd.lotus-approach - - - arc - application/x-freearc - - - art - image/x-jg - - - asc - application/pgp-signature - - - asf - video/x-ms-asf - - - asm - text/x-asm - - - aso - application/vnd.accpac.simply.aso - - - asx - video/x-ms-asf - - - atc - application/vnd.acucorp - - - atom - application/atom+xml - - - atomcat - application/atomcat+xml - - - atomsvc - application/atomsvc+xml - - - atx - application/vnd.antix.game-component - - - au - audio/basic - - - avi - video/x-msvideo - - - avx - video/x-rad-screenplay - - - aw - application/applixware - - - axa - audio/annodex - - - axv - video/annodex - - - azf - application/vnd.airzip.filesecure.azf - - - azs - application/vnd.airzip.filesecure.azs - - - azw - application/vnd.amazon.ebook - - - bat - application/x-msdownload - - - bcpio - application/x-bcpio - - - bdf - application/x-font-bdf - - - bdm - application/vnd.syncml.dm+wbxml - - - bed - application/vnd.realvnc.bed - - - bh2 - application/vnd.fujitsu.oasysprs - - - bin - application/octet-stream - - - blb - application/x-blorb - - - blorb - application/x-blorb - - - bmi - application/vnd.bmi - - - bmp - image/bmp - - - body - text/html - - - book - application/vnd.framemaker - - - box - application/vnd.previewsystems.box - - - boz - application/x-bzip2 - - - bpk - application/octet-stream - - - btif - image/prs.btif - - - bz - application/x-bzip - - - bz2 - application/x-bzip2 - - - c - text/x-c - - - c11amc - application/vnd.cluetrust.cartomobile-config - - - c11amz - application/vnd.cluetrust.cartomobile-config-pkg - - - c4d - application/vnd.clonk.c4group - - - c4f - application/vnd.clonk.c4group - - - c4g - application/vnd.clonk.c4group - - - c4p - application/vnd.clonk.c4group - - - c4u - application/vnd.clonk.c4group - - - cab - application/vnd.ms-cab-compressed - - - caf - audio/x-caf - - - cap - application/vnd.tcpdump.pcap - - - car - application/vnd.curl.car - - - cat - application/vnd.ms-pki.seccat - - - cb7 - application/x-cbr - - - cba - application/x-cbr - - - cbr - application/x-cbr - - - cbt - application/x-cbr - - - cbz - application/x-cbr - - - cc - text/x-c - - - cct - application/x-director - - - ccxml - application/ccxml+xml - - - cdbcmsg - application/vnd.contact.cmsg - - - cdf - application/x-cdf - - - cdkey - application/vnd.mediastation.cdkey - - - cdmia - application/cdmi-capability - - - cdmic - application/cdmi-container - - - cdmid - application/cdmi-domain - - - cdmio - application/cdmi-object - - - cdmiq - application/cdmi-queue - - - cdx - chemical/x-cdx - - - cdxml - application/vnd.chemdraw+xml - - - cdy - application/vnd.cinderella - - - cer - application/pkix-cert - - - cfs - application/x-cfs-compressed - - - cgm - image/cgm - - - chat - application/x-chat - - - chm - application/vnd.ms-htmlhelp - - - chrt - application/vnd.kde.kchart - - - cif - chemical/x-cif - - - cii - application/vnd.anser-web-certificate-issue-initiation - - - cil - application/vnd.ms-artgalry - - - cla - application/vnd.claymore - - - class - application/java - - - clkk - application/vnd.crick.clicker.keyboard - - - clkp - application/vnd.crick.clicker.palette - - - clkt - application/vnd.crick.clicker.template - - - clkw - application/vnd.crick.clicker.wordbank - - - clkx - application/vnd.crick.clicker - - - clp - application/x-msclip - - - cmc - application/vnd.cosmocaller - - - cmdf - chemical/x-cmdf - - - cml - chemical/x-cml - - - cmp - application/vnd.yellowriver-custom-menu - - - cmx - image/x-cmx - - - cod - application/vnd.rim.cod - - - com - application/x-msdownload - - - conf - text/plain - - - cpio - application/x-cpio - - - cpp - text/x-c - - - cpt - application/mac-compactpro - - - crd - application/x-mscardfile - - - crl - application/pkix-crl - - - crt - application/x-x509-ca-cert - - - cryptonote - application/vnd.rig.cryptonote - - - csh - application/x-csh - - - csml - chemical/x-csml - - - csp - application/vnd.commonspace - - - css - text/css - - - cst - application/x-director - - - csv - text/csv - - - cu - application/cu-seeme - - - curl - text/vnd.curl - - - cww - application/prs.cww - - - cxt - application/x-director - - - cxx - text/x-c - - - dae - model/vnd.collada+xml - - - daf - application/vnd.mobius.daf - - - dart - application/vnd.dart - - - dataless - application/vnd.fdsn.seed - - - davmount - application/davmount+xml - - - dbk - application/docbook+xml - - - dcr - application/x-director - - - dcurl - text/vnd.curl.dcurl - - - dd2 - application/vnd.oma.dd2+xml - - - ddd - application/vnd.fujixerox.ddd - - - deb - application/x-debian-package - - - def - text/plain - - - deploy - application/octet-stream - - - der - application/x-x509-ca-cert - - - dfac - application/vnd.dreamfactory - - - dgc - application/x-dgc-compressed - - - dib - image/bmp - - - dic - text/x-c - - - dir - application/x-director - - - dis - application/vnd.mobius.dis - - - dist - application/octet-stream - - - distz - application/octet-stream - - - djv - image/vnd.djvu - - - djvu - image/vnd.djvu - - - dll - application/x-msdownload - - - dmg - application/x-apple-diskimage - - - dmp - application/vnd.tcpdump.pcap - - - dms - application/octet-stream - - - dna - application/vnd.dna - - - doc - application/msword - - - docm - application/vnd.ms-word.document.macroenabled.12 - - - docx - application/vnd.openxmlformats-officedocument.wordprocessingml.document - - - dot - application/msword - - - dotm - application/vnd.ms-word.template.macroenabled.12 - - - dotx - application/vnd.openxmlformats-officedocument.wordprocessingml.template - - - dp - application/vnd.osgi.dp - - - dpg - application/vnd.dpgraph - - - dra - audio/vnd.dra - - - dsc - text/prs.lines.tag - - - dssc - application/dssc+der - - - dtb - application/x-dtbook+xml - - - dtd - application/xml-dtd - - - dts - audio/vnd.dts - - - dtshd - audio/vnd.dts.hd - - - dump - application/octet-stream - - - dv - video/x-dv - - - dvb - video/vnd.dvb.file - - - dvi - application/x-dvi - - - dwf - model/vnd.dwf - - - dwg - image/vnd.dwg - - - dxf - image/vnd.dxf - - - dxp - application/vnd.spotfire.dxp - - - dxr - application/x-director - - - ecelp4800 - audio/vnd.nuera.ecelp4800 - - - ecelp7470 - audio/vnd.nuera.ecelp7470 - - - ecelp9600 - audio/vnd.nuera.ecelp9600 - - - ecma - application/ecmascript - - - edm - application/vnd.novadigm.edm - - - edx - application/vnd.novadigm.edx - - - efif - application/vnd.picsel - - - ei6 - application/vnd.pg.osasli - - - elc - application/octet-stream - - - emf - application/x-msmetafile - - - eml - message/rfc822 - - - emma - application/emma+xml - - - emz - application/x-msmetafile - - - eol - audio/vnd.digital-winds - - - eot - application/vnd.ms-fontobject - - - eps - application/postscript - - - epub - application/epub+zip - - - es3 - application/vnd.eszigno3+xml - - - esa - application/vnd.osgi.subsystem - - - esf - application/vnd.epson.esf - - - et3 - application/vnd.eszigno3+xml - - - etx - text/x-setext - - - eva - application/x-eva - - - evy - application/x-envoy - - - exe - application/octet-stream - - - exi - application/exi - - - ext - application/vnd.novadigm.ext - - - ez - application/andrew-inset - - - ez2 - application/vnd.ezpix-album - - - ez3 - application/vnd.ezpix-package - - - f - text/x-fortran - - - f4v - video/x-f4v - - - f77 - text/x-fortran - - - f90 - text/x-fortran - - - fbs - image/vnd.fastbidsheet - - - fcdt - application/vnd.adobe.formscentral.fcdt - - - fcs - application/vnd.isac.fcs - - - fdf - application/vnd.fdf - - - fe_launch - application/vnd.denovo.fcselayout-link - - - fg5 - application/vnd.fujitsu.oasysgp - - - fgd - application/x-director - - - fh - image/x-freehand - - - fh4 - image/x-freehand - - - fh5 - image/x-freehand - - - fh7 - image/x-freehand - - - fhc - image/x-freehand - - - fig - application/x-xfig - - - flac - audio/flac - - - fli - video/x-fli - - - flo - application/vnd.micrografx.flo - - - flv - video/x-flv - - - flw - application/vnd.kde.kivio - - - flx - text/vnd.fmi.flexstor - - - fly - text/vnd.fly - - - fm - application/vnd.framemaker - - - fnc - application/vnd.frogans.fnc - - - for - text/x-fortran - - - fpx - image/vnd.fpx - - - frame - application/vnd.framemaker - - - fsc - application/vnd.fsc.weblaunch - - - fst - image/vnd.fst - - - ftc - application/vnd.fluxtime.clip - - - fti - application/vnd.anser-web-funds-transfer-initiation - - - fvt - video/vnd.fvt - - - fxp - application/vnd.adobe.fxp - - - fxpl - application/vnd.adobe.fxp - - - fzs - application/vnd.fuzzysheet - - - g2w - application/vnd.geoplan - - - g3 - image/g3fax - - - g3w - application/vnd.geospace - - - gac - application/vnd.groove-account - - - gam - application/x-tads - - - gbr - application/rpki-ghostbusters - - - gca - application/x-gca-compressed - - - gdl - model/vnd.gdl - - - geo - application/vnd.dynageo - - - gex - application/vnd.geometry-explorer - - - ggb - application/vnd.geogebra.file - - - ggt - application/vnd.geogebra.tool - - - ghf - application/vnd.groove-help - - - gif - image/gif - - - gim - application/vnd.groove-identity-message - - - gml - application/gml+xml - - - gmx - application/vnd.gmx - - - gnumeric - application/x-gnumeric - - - gph - application/vnd.flographit - - - gpx - application/gpx+xml - - - gqf - application/vnd.grafeq - - - gqs - application/vnd.grafeq - - - gram - application/srgs - - - gramps - application/x-gramps-xml - - - gre - application/vnd.geometry-explorer - - - grv - application/vnd.groove-injector - - - grxml - application/srgs+xml - - - gsf - application/x-font-ghostscript - - - gtar - application/x-gtar - - - gtm - application/vnd.groove-tool-message - - - gtw - model/vnd.gtw - - - gv - text/vnd.graphviz - - - gxf - application/gxf - - - gxt - application/vnd.geonext - - - gz - application/x-gzip - - - h - text/x-c - - - h261 - video/h261 - - - h263 - video/h263 - - - h264 - video/h264 - - - hal - application/vnd.hal+xml - - - hbci - application/vnd.hbci - - - hdf - application/x-hdf - - - hh - text/x-c - - - hlp - application/winhlp - - - hpgl - application/vnd.hp-hpgl - - - hpid - application/vnd.hp-hpid - - - hps - application/vnd.hp-hps - - - hqx - application/mac-binhex40 - - - htc - text/x-component - - - htke - application/vnd.kenameaapp - - - htm - text/html - - - html - text/html - - - hvd - application/vnd.yamaha.hv-dic - - - hvp - application/vnd.yamaha.hv-voice - - - hvs - application/vnd.yamaha.hv-script - - - i2g - application/vnd.intergeo - - - icc - application/vnd.iccprofile - - - ice - x-conference/x-cooltalk - - - icm - application/vnd.iccprofile - - - ico - image/x-icon - - - ics - text/calendar - - - ief - image/ief - - - ifb - text/calendar - - - ifm - application/vnd.shana.informed.formdata - - - iges - model/iges - - - igl - application/vnd.igloader - - - igm - application/vnd.insors.igm - - - igs - model/iges - - - igx - application/vnd.micrografx.igx - - - iif - application/vnd.shana.informed.interchange - - - imp - application/vnd.accpac.simply.imp - - - ims - application/vnd.ms-ims - - - in - text/plain - - - ink - application/inkml+xml - - - inkml - application/inkml+xml - - - install - application/x-install-instructions - - - iota - application/vnd.astraea-software.iota - - - ipfix - application/ipfix - - - ipk - application/vnd.shana.informed.package - - - irm - application/vnd.ibm.rights-management - - - irp - application/vnd.irepository.package+xml - - - iso - application/x-iso9660-image - - - itp - application/vnd.shana.informed.formtemplate - - - ivp - application/vnd.immervision-ivp - - - ivu - application/vnd.immervision-ivu - - - jad - text/vnd.sun.j2me.app-descriptor - - - jam - application/vnd.jam - - - jar - application/java-archive - - - java - text/x-java-source - - - jisp - application/vnd.jisp - - - jlt - application/vnd.hp-jlyt - - - jnlp - application/x-java-jnlp-file - - - joda - application/vnd.joost.joda-archive - - - jpe - image/jpeg - - - jpeg - image/jpeg - - - jpg - image/jpeg - - - jpgm - video/jpm - - - jpgv - video/jpeg - - - jpm - video/jpm - - - js - application/javascript - - - jsf - text/plain - - - json - application/json - - - jsonml - application/jsonml+json - - - jspf - text/plain - - - kar - audio/midi - - - karbon - application/vnd.kde.karbon - - - kfo - application/vnd.kde.kformula - - - kia - application/vnd.kidspiration - - - kml - application/vnd.google-earth.kml+xml - - - kmz - application/vnd.google-earth.kmz - - - kne - application/vnd.kinar - - - knp - application/vnd.kinar - - - kon - application/vnd.kde.kontour - - - kpr - application/vnd.kde.kpresenter - - - kpt - application/vnd.kde.kpresenter - - - kpxx - application/vnd.ds-keypoint - - - ksp - application/vnd.kde.kspread - - - ktr - application/vnd.kahootz - - - ktx - image/ktx - - - ktz - application/vnd.kahootz - - - kwd - application/vnd.kde.kword - - - kwt - application/vnd.kde.kword - - - lasxml - application/vnd.las.las+xml - - - latex - application/x-latex - - - lbd - application/vnd.llamagraphics.life-balance.desktop - - - lbe - application/vnd.llamagraphics.life-balance.exchange+xml - - - les - application/vnd.hhe.lesson-player - - - lha - application/x-lzh-compressed - - - link66 - application/vnd.route66.link66+xml - - - list - text/plain - - - list3820 - application/vnd.ibm.modcap - - - listafp - application/vnd.ibm.modcap - - - lnk - application/x-ms-shortcut - - - log - text/plain - - - lostxml - application/lost+xml - - - lrf - application/octet-stream - - - lrm - application/vnd.ms-lrm - - - ltf - application/vnd.frogans.ltf - - - lvp - audio/vnd.lucent.voice - - - lwp - application/vnd.lotus-wordpro - - - lzh - application/x-lzh-compressed - - - m13 - application/x-msmediaview - - - m14 - application/x-msmediaview - - - m1v - video/mpeg - - - m21 - application/mp21 - - - m2a - audio/mpeg - - - m2v - video/mpeg - - - m3a - audio/mpeg - - - m3u - audio/x-mpegurl - - - m3u8 - application/vnd.apple.mpegurl - - - m4a - audio/mp4 - - - m4b - audio/mp4 - - - m4r - audio/mp4 - - - m4u - video/vnd.mpegurl - - - m4v - video/mp4 - - - ma - application/mathematica - - - mac - image/x-macpaint - - - mads - application/mads+xml - - - mag - application/vnd.ecowin.chart - - - maker - application/vnd.framemaker - - - man - text/troff - - - mar - application/octet-stream - - - mathml - application/mathml+xml - - - mb - application/mathematica - - - mbk - application/vnd.mobius.mbk - - - mbox - application/mbox - - - mc1 - application/vnd.medcalcdata - - - mcd - application/vnd.mcd - - - mcurl - text/vnd.curl.mcurl - - - mdb - application/x-msaccess - - - mdi - image/vnd.ms-modi - - - me - text/troff - - - mesh - model/mesh - - - meta4 - application/metalink4+xml - - - metalink - application/metalink+xml - - - mets - application/mets+xml - - - mfm - application/vnd.mfmp - - - mft - application/rpki-manifest - - - mgp - application/vnd.osgeo.mapguide.package - - - mgz - application/vnd.proteus.magazine - - - mid - audio/midi - - - midi - audio/midi - - - mie - application/x-mie - - - mif - application/x-mif - - - mime - message/rfc822 - - - mj2 - video/mj2 - - - mjp2 - video/mj2 - - - mk3d - video/x-matroska - - - mka - audio/x-matroska - - - mks - video/x-matroska - - - mkv - video/x-matroska - - - mlp - application/vnd.dolby.mlp - - - mmd - application/vnd.chipnuts.karaoke-mmd - - - mmf - application/vnd.smaf - - - mmr - image/vnd.fujixerox.edmics-mmr - - - mng - video/x-mng - - - mny - application/x-msmoney - - - mobi - application/x-mobipocket-ebook - - - mods - application/mods+xml - - - mov - video/quicktime - - - movie - video/x-sgi-movie - - - mp1 - audio/mpeg - - - mp2 - audio/mpeg - - - mp21 - application/mp21 - - - mp2a - audio/mpeg - - - mp3 - audio/mpeg - - - mp4 - video/mp4 - - - mp4a - audio/mp4 - - - mp4s - application/mp4 - - - mp4v - video/mp4 - - - mpa - audio/mpeg - - - mpc - application/vnd.mophun.certificate - - - mpe - video/mpeg - - - mpeg - video/mpeg - - - mpega - audio/x-mpeg - - - mpg - video/mpeg - - - mpg4 - video/mp4 - - - mpga - audio/mpeg - - - mpkg - application/vnd.apple.installer+xml - - - mpm - application/vnd.blueice.multipass - - - mpn - application/vnd.mophun.application - - - mpp - application/vnd.ms-project - - - mpt - application/vnd.ms-project - - - mpv2 - video/mpeg2 - - - mpy - application/vnd.ibm.minipay - - - mqy - application/vnd.mobius.mqy - - - mrc - application/marc - - - mrcx - application/marcxml+xml - - - ms - text/troff - - - mscml - application/mediaservercontrol+xml - - - mseed - application/vnd.fdsn.mseed - - - mseq - application/vnd.mseq - - - msf - application/vnd.epson.msf - - - msh - model/mesh - - - msi - application/x-msdownload - - - msl - application/vnd.mobius.msl - - - msty - application/vnd.muvee.style - - - mts - model/vnd.mts - - - mus - application/vnd.musician - - - musicxml - application/vnd.recordare.musicxml+xml - - - mvb - application/x-msmediaview - - - mwf - application/vnd.mfer - - - mxf - application/mxf - - - mxl - application/vnd.recordare.musicxml - - - mxml - application/xv+xml - - - mxs - application/vnd.triscape.mxs - - - mxu - video/vnd.mpegurl - - - n-gage - application/vnd.nokia.n-gage.symbian.install - - - n3 - text/n3 - - - nb - application/mathematica - - - nbp - application/vnd.wolfram.player - - - nc - application/x-netcdf - - - ncx - application/x-dtbncx+xml - - - nfo - text/x-nfo - - - ngdat - application/vnd.nokia.n-gage.data - - - nitf - application/vnd.nitf - - - nlu - application/vnd.neurolanguage.nlu - - - nml - application/vnd.enliven - - - nnd - application/vnd.noblenet-directory - - - nns - application/vnd.noblenet-sealer - - - nnw - application/vnd.noblenet-web - - - npx - image/vnd.net-fpx - - - nsc - application/x-conference - - - nsf - application/vnd.lotus-notes - - - ntf - application/vnd.nitf - - - nzb - application/x-nzb - - - oa2 - application/vnd.fujitsu.oasys2 - - - oa3 - application/vnd.fujitsu.oasys3 - - - oas - application/vnd.fujitsu.oasys - - - obd - application/x-msbinder - - - obj - application/x-tgif - - - oda - application/oda - - - - odb - application/vnd.oasis.opendocument.database - - - - odc - application/vnd.oasis.opendocument.chart - - - - odf - application/vnd.oasis.opendocument.formula - - - odft - application/vnd.oasis.opendocument.formula-template - - - - odg - application/vnd.oasis.opendocument.graphics - - - - odi - application/vnd.oasis.opendocument.image - - - - odm - application/vnd.oasis.opendocument.text-master - - - - odp - application/vnd.oasis.opendocument.presentation - - - - ods - application/vnd.oasis.opendocument.spreadsheet - - - - odt - application/vnd.oasis.opendocument.text - - - oga - audio/ogg - - - ogg - audio/ogg - - - ogv - video/ogg - - - - ogx - application/ogg - - - omdoc - application/omdoc+xml - - - onepkg - application/onenote - - - onetmp - application/onenote - - - onetoc - application/onenote - - - onetoc2 - application/onenote - - - opf - application/oebps-package+xml - - - opml - text/x-opml - - - oprc - application/vnd.palm - - - org - application/vnd.lotus-organizer - - - osf - application/vnd.yamaha.openscoreformat - - - osfpvg - application/vnd.yamaha.openscoreformat.osfpvg+xml - - - otc - application/vnd.oasis.opendocument.chart-template - - - otf - application/x-font-otf - - - - otg - application/vnd.oasis.opendocument.graphics-template - - - - oth - application/vnd.oasis.opendocument.text-web - - - oti - application/vnd.oasis.opendocument.image-template - - - - otp - application/vnd.oasis.opendocument.presentation-template - - - - ots - application/vnd.oasis.opendocument.spreadsheet-template - - - - ott - application/vnd.oasis.opendocument.text-template - - - oxps - application/oxps - - - oxt - application/vnd.openofficeorg.extension - - - p - text/x-pascal - - - p10 - application/pkcs10 - - - p12 - application/x-pkcs12 - - - p7b - application/x-pkcs7-certificates - - - p7c - application/pkcs7-mime - - - p7m - application/pkcs7-mime - - - p7r - application/x-pkcs7-certreqresp - - - p7s - application/pkcs7-signature - - - p8 - application/pkcs8 - - - pas - text/x-pascal - - - paw - application/vnd.pawaafile - - - pbd - application/vnd.powerbuilder6 - - - pbm - image/x-portable-bitmap - - - pcap - application/vnd.tcpdump.pcap - - - pcf - application/x-font-pcf - - - pcl - application/vnd.hp-pcl - - - pclxl - application/vnd.hp-pclxl - - - pct - image/pict - - - pcurl - application/vnd.curl.pcurl - - - pcx - image/x-pcx - - - pdb - application/vnd.palm - - - pdf - application/pdf - - - pfa - application/x-font-type1 - - - pfb - application/x-font-type1 - - - pfm - application/x-font-type1 - - - pfr - application/font-tdpfr - - - pfx - application/x-pkcs12 - - - pgm - image/x-portable-graymap - - - pgn - application/x-chess-pgn - - - pgp - application/pgp-encrypted - - - pic - image/pict - - - pict - image/pict - - - pkg - application/octet-stream - - - pki - application/pkixcmp - - - pkipath - application/pkix-pkipath - - - plb - application/vnd.3gpp.pic-bw-large - - - plc - application/vnd.mobius.plc - - - plf - application/vnd.pocketlearn - - - pls - audio/x-scpls - - - pml - application/vnd.ctc-posml - - - png - image/png - - - pnm - image/x-portable-anymap - - - pnt - image/x-macpaint - - - portpkg - application/vnd.macports.portpkg - - - pot - application/vnd.ms-powerpoint - - - potm - application/vnd.ms-powerpoint.template.macroenabled.12 - - - potx - application/vnd.openxmlformats-officedocument.presentationml.template - - - ppam - application/vnd.ms-powerpoint.addin.macroenabled.12 - - - ppd - application/vnd.cups-ppd - - - ppm - image/x-portable-pixmap - - - pps - application/vnd.ms-powerpoint - - - ppsm - application/vnd.ms-powerpoint.slideshow.macroenabled.12 - - - ppsx - application/vnd.openxmlformats-officedocument.presentationml.slideshow - - - ppt - application/vnd.ms-powerpoint - - - pptm - application/vnd.ms-powerpoint.presentation.macroenabled.12 - - - pptx - application/vnd.openxmlformats-officedocument.presentationml.presentation - - - pqa - application/vnd.palm - - - prc - application/x-mobipocket-ebook - - - pre - application/vnd.lotus-freelance - - - prf - application/pics-rules - - - ps - application/postscript - - - psb - application/vnd.3gpp.pic-bw-small - - - psd - image/vnd.adobe.photoshop - - - psf - application/x-font-linux-psf - - - pskcxml - application/pskc+xml - - - ptid - application/vnd.pvi.ptid1 - - - pub - application/x-mspublisher - - - pvb - application/vnd.3gpp.pic-bw-var - - - pwn - application/vnd.3m.post-it-notes - - - pya - audio/vnd.ms-playready.media.pya - - - pyv - video/vnd.ms-playready.media.pyv - - - qam - application/vnd.epson.quickanime - - - qbo - application/vnd.intu.qbo - - - qfx - application/vnd.intu.qfx - - - qps - application/vnd.publishare-delta-tree - - - qt - video/quicktime - - - qti - image/x-quicktime - - - qtif - image/x-quicktime - - - qwd - application/vnd.quark.quarkxpress - - - qwt - application/vnd.quark.quarkxpress - - - qxb - application/vnd.quark.quarkxpress - - - qxd - application/vnd.quark.quarkxpress - - - qxl - application/vnd.quark.quarkxpress - - - qxt - application/vnd.quark.quarkxpress - - - ra - audio/x-pn-realaudio - - - ram - audio/x-pn-realaudio - - - rar - application/x-rar-compressed - - - ras - image/x-cmu-raster - - - rcprofile - application/vnd.ipunplugged.rcprofile - - - rdf - application/rdf+xml - - - rdz - application/vnd.data-vision.rdz - - - rep - application/vnd.businessobjects - - - res - application/x-dtbresource+xml - - - rgb - image/x-rgb - - - rif - application/reginfo+xml - - - rip - audio/vnd.rip - - - ris - application/x-research-info-systems - - - rl - application/resource-lists+xml - - - rlc - image/vnd.fujixerox.edmics-rlc - - - rld - application/resource-lists-diff+xml - - - rm - application/vnd.rn-realmedia - - - rmi - audio/midi - - - rmp - audio/x-pn-realaudio-plugin - - - rms - application/vnd.jcp.javame.midlet-rms - - - rmvb - application/vnd.rn-realmedia-vbr - - - rnc - application/relax-ng-compact-syntax - - - roa - application/rpki-roa - - - roff - text/troff - - - rp9 - application/vnd.cloanto.rp9 - - - rpss - application/vnd.nokia.radio-presets - - - rpst - application/vnd.nokia.radio-preset - - - rq - application/sparql-query - - - rs - application/rls-services+xml - - - rsd - application/rsd+xml - - - rss - application/rss+xml - - - rtf - application/rtf - - - rtx - text/richtext - - - s - text/x-asm - - - s3m - audio/s3m - - - saf - application/vnd.yamaha.smaf-audio - - - sbml - application/sbml+xml - - - sc - application/vnd.ibm.secure-container - - - scd - application/x-msschedule - - - scm - application/vnd.lotus-screencam - - - scq - application/scvp-cv-request - - - scs - application/scvp-cv-response - - - scurl - text/vnd.curl.scurl - - - sda - application/vnd.stardivision.draw - - - sdc - application/vnd.stardivision.calc - - - sdd - application/vnd.stardivision.impress - - - sdkd - application/vnd.solent.sdkm+xml - - - sdkm - application/vnd.solent.sdkm+xml - - - sdp - application/sdp - - - sdw - application/vnd.stardivision.writer - - - see - application/vnd.seemail - - - seed - application/vnd.fdsn.seed - - - sema - application/vnd.sema - - - semd - application/vnd.semd - - - semf - application/vnd.semf - - - ser - application/java-serialized-object - - - setpay - application/set-payment-initiation - - - setreg - application/set-registration-initiation - - - sfd-hdstx - application/vnd.hydrostatix.sof-data - - - sfs - application/vnd.spotfire.sfs - - - sfv - text/x-sfv - - - sgi - image/sgi - - - sgl - application/vnd.stardivision.writer-global - - - sgm - text/sgml - - - sgml - text/sgml - - - sh - application/x-sh - - - shar - application/x-shar - - - shf - application/shf+xml - - - - sid - image/x-mrsid-image - - - sig - application/pgp-signature - - - sil - audio/silk - - - silo - model/mesh - - - sis - application/vnd.symbian.install - - - sisx - application/vnd.symbian.install - - - sit - application/x-stuffit - - - sitx - application/x-stuffitx - - - skd - application/vnd.koan - - - skm - application/vnd.koan - - - skp - application/vnd.koan - - - skt - application/vnd.koan - - - sldm - application/vnd.ms-powerpoint.slide.macroenabled.12 - - - sldx - application/vnd.openxmlformats-officedocument.presentationml.slide - - - slt - application/vnd.epson.salt - - - sm - application/vnd.stepmania.stepchart - - - smf - application/vnd.stardivision.math - - - smi - application/smil+xml - - - smil - application/smil+xml - - - smv - video/x-smv - - - smzip - application/vnd.stepmania.package - - - snd - audio/basic - - - snf - application/x-font-snf - - - so - application/octet-stream - - - spc - application/x-pkcs7-certificates - - - spf - application/vnd.yamaha.smaf-phrase - - - spl - application/x-futuresplash - - - spot - text/vnd.in3d.spot - - - spp - application/scvp-vp-response - - - spq - application/scvp-vp-request - - - spx - audio/ogg - - - sql - application/x-sql - - - src - application/x-wais-source - - - srt - application/x-subrip - - - sru - application/sru+xml - - - srx - application/sparql-results+xml - - - ssdl - application/ssdl+xml - - - sse - application/vnd.kodak-descriptor - - - ssf - application/vnd.epson.ssf - - - ssml - application/ssml+xml - - - st - application/vnd.sailingtracker.track - - - stc - application/vnd.sun.xml.calc.template - - - std - application/vnd.sun.xml.draw.template - - - stf - application/vnd.wt.stf - - - sti - application/vnd.sun.xml.impress.template - - - stk - application/hyperstudio - - - stl - application/vnd.ms-pki.stl - - - str - application/vnd.pg.format - - - stw - application/vnd.sun.xml.writer.template - - - sub - text/vnd.dvb.subtitle - - - sus - application/vnd.sus-calendar - - - susp - application/vnd.sus-calendar - - - sv4cpio - application/x-sv4cpio - - - sv4crc - application/x-sv4crc - - - svc - application/vnd.dvb.service - - - svd - application/vnd.svd - - - svg - image/svg+xml - - - svgz - image/svg+xml - - - swa - application/x-director - - - swf - application/x-shockwave-flash - - - swi - application/vnd.aristanetworks.swi - - - sxc - application/vnd.sun.xml.calc - - - sxd - application/vnd.sun.xml.draw - - - sxg - application/vnd.sun.xml.writer.global - - - sxi - application/vnd.sun.xml.impress - - - sxm - application/vnd.sun.xml.math - - - sxw - application/vnd.sun.xml.writer - - - t - text/troff - - - t3 - application/x-t3vm-image - - - taglet - application/vnd.mynfc - - - tao - application/vnd.tao.intent-module-archive - - - tar - application/x-tar - - - tcap - application/vnd.3gpp2.tcap - - - tcl - application/x-tcl - - - teacher - application/vnd.smart.teacher - - - tei - application/tei+xml - - - teicorpus - application/tei+xml - - - tex - application/x-tex - - - texi - application/x-texinfo - - - texinfo - application/x-texinfo - - - text - text/plain - - - tfi - application/thraud+xml - - - tfm - application/x-tex-tfm - - - tga - image/x-tga - - - thmx - application/vnd.ms-officetheme - - - tif - image/tiff - - - tiff - image/tiff - - - tmo - application/vnd.tmobile-livetv - - - torrent - application/x-bittorrent - - - tpl - application/vnd.groove-tool-template - - - tpt - application/vnd.trid.tpt - - - tr - text/troff - - - tra - application/vnd.trueapp - - - trm - application/x-msterminal - - - tsd - application/timestamped-data - - - tsv - text/tab-separated-values - - - ttc - application/x-font-ttf - - - ttf - application/x-font-ttf - - - ttl - text/turtle - - - twd - application/vnd.simtech-mindmapper - - - twds - application/vnd.simtech-mindmapper - - - txd - application/vnd.genomatix.tuxedo - - - txf - application/vnd.mobius.txf - - - txt - text/plain - - - u32 - application/x-authorware-bin - - - udeb - application/x-debian-package - - - ufd - application/vnd.ufdl - - - ufdl - application/vnd.ufdl - - - ulw - audio/basic - - - ulx - application/x-glulx - - - umj - application/vnd.umajin - - - unityweb - application/vnd.unity - - - uoml - application/vnd.uoml+xml - - - uri - text/uri-list - - - uris - text/uri-list - - - urls - text/uri-list - - - ustar - application/x-ustar - - - utz - application/vnd.uiq.theme - - - uu - text/x-uuencode - - - uva - audio/vnd.dece.audio - - - uvd - application/vnd.dece.data - - - uvf - application/vnd.dece.data - - - uvg - image/vnd.dece.graphic - - - uvh - video/vnd.dece.hd - - - uvi - image/vnd.dece.graphic - - - uvm - video/vnd.dece.mobile - - - uvp - video/vnd.dece.pd - - - uvs - video/vnd.dece.sd - - - uvt - application/vnd.dece.ttml+xml - - - uvu - video/vnd.uvvu.mp4 - - - uvv - video/vnd.dece.video - - - uvva - audio/vnd.dece.audio - - - uvvd - application/vnd.dece.data - - - uvvf - application/vnd.dece.data - - - uvvg - image/vnd.dece.graphic - - - uvvh - video/vnd.dece.hd - - - uvvi - image/vnd.dece.graphic - - - uvvm - video/vnd.dece.mobile - - - uvvp - video/vnd.dece.pd - - - uvvs - video/vnd.dece.sd - - - uvvt - application/vnd.dece.ttml+xml - - - uvvu - video/vnd.uvvu.mp4 - - - uvvv - video/vnd.dece.video - - - uvvx - application/vnd.dece.unspecified - - - uvvz - application/vnd.dece.zip - - - uvx - application/vnd.dece.unspecified - - - uvz - application/vnd.dece.zip - - - vcard - text/vcard - - - vcd - application/x-cdlink - - - vcf - text/x-vcard - - - vcg - application/vnd.groove-vcard - - - vcs - text/x-vcalendar - - - vcx - application/vnd.vcx - - - vis - application/vnd.visionary - - - viv - video/vnd.vivo - - - vob - video/x-ms-vob - - - vor - application/vnd.stardivision.writer - - - vox - application/x-authorware-bin - - - vrml - model/vrml - - - vsd - application/vnd.visio - - - vsf - application/vnd.vsf - - - vss - application/vnd.visio - - - vst - application/vnd.visio - - - vsw - application/vnd.visio - - - vtu - model/vnd.vtu - - - vxml - application/voicexml+xml - - - w3d - application/x-director - - - wad - application/x-doom - - - wav - audio/x-wav - - - wax - audio/x-ms-wax - - - - wbmp - image/vnd.wap.wbmp - - - wbs - application/vnd.criticaltools.wbs+xml - - - wbxml - application/vnd.wap.wbxml - - - wcm - application/vnd.ms-works - - - wdb - application/vnd.ms-works - - - wdp - image/vnd.ms-photo - - - weba - audio/webm - - - webm - video/webm - - - webp - image/webp - - - wg - application/vnd.pmi.widget - - - wgt - application/widget - - - wks - application/vnd.ms-works - - - wm - video/x-ms-wm - - - wma - audio/x-ms-wma - - - wmd - application/x-ms-wmd - - - wmf - application/x-msmetafile - - - - wml - text/vnd.wap.wml - - - - wmlc - application/vnd.wap.wmlc - - - - wmls - text/vnd.wap.wmlscript - - - - wmlsc - application/vnd.wap.wmlscriptc - - - wmv - video/x-ms-wmv - - - wmx - video/x-ms-wmx - - - wmz - application/x-msmetafile - - - woff - application/x-font-woff - - - wpd - application/vnd.wordperfect - - - wpl - application/vnd.ms-wpl - - - wps - application/vnd.ms-works - - - wqd - application/vnd.wqd - - - wri - application/x-mswrite - - - wrl - model/vrml - - - wsdl - application/wsdl+xml - - - wspolicy - application/wspolicy+xml - - - wtb - application/vnd.webturbo - - - wvx - video/x-ms-wvx - - - x32 - application/x-authorware-bin - - - x3d - model/x3d+xml - - - x3db - model/x3d+binary - - - x3dbz - model/x3d+binary - - - x3dv - model/x3d+vrml - - - x3dvz - model/x3d+vrml - - - x3dz - model/x3d+xml - - - xaml - application/xaml+xml - - - xap - application/x-silverlight-app - - - xar - application/vnd.xara - - - xbap - application/x-ms-xbap - - - xbd - application/vnd.fujixerox.docuworks.binder - - - xbm - image/x-xbitmap - - - xdf - application/xcap-diff+xml - - - xdm - application/vnd.syncml.dm+xml - - - xdp - application/vnd.adobe.xdp+xml - - - xdssc - application/dssc+xml - - - xdw - application/vnd.fujixerox.docuworks - - - xenc - application/xenc+xml - - - xer - application/patch-ops-error+xml - - - xfdf - application/vnd.adobe.xfdf - - - xfdl - application/vnd.xfdl - - - xht - application/xhtml+xml - - - xhtml - application/xhtml+xml - - - xhvml - application/xv+xml - - - xif - image/vnd.xiff - - - xla - application/vnd.ms-excel - - - xlam - application/vnd.ms-excel.addin.macroenabled.12 - - - xlc - application/vnd.ms-excel - - - xlf - application/x-xliff+xml - - - xlm - application/vnd.ms-excel - - - xls - application/vnd.ms-excel - - - xlsb - application/vnd.ms-excel.sheet.binary.macroenabled.12 - - - xlsm - application/vnd.ms-excel.sheet.macroenabled.12 - - - xlsx - application/vnd.openxmlformats-officedocument.spreadsheetml.sheet - - - xlt - application/vnd.ms-excel - - - xltm - application/vnd.ms-excel.template.macroenabled.12 - - - xltx - application/vnd.openxmlformats-officedocument.spreadsheetml.template - - - xlw - application/vnd.ms-excel - - - xm - audio/xm - - - xml - application/xml - - - xo - application/vnd.olpc-sugar - - - xop - application/xop+xml - - - xpi - application/x-xpinstall - - - xpl - application/xproc+xml - - - xpm - image/x-xpixmap - - - xpr - application/vnd.is-xpr - - - xps - application/vnd.ms-xpsdocument - - - xpw - application/vnd.intercon.formnet - - - xpx - application/vnd.intercon.formnet - - - xsl - application/xml - - - xslt - application/xslt+xml - - - xsm - application/vnd.syncml+xml - - - xspf - application/xspf+xml - - - xul - application/vnd.mozilla.xul+xml - - - xvm - application/xv+xml - - - xvml - application/xv+xml - - - xwd - image/x-xwindowdump - - - xyz - chemical/x-xyz - - - xz - application/x-xz - - - yang - application/yang - - - yin - application/yin+xml - - - z - application/x-compress - - - Z - application/x-compress - - - z1 - application/x-zmachine - - - z2 - application/x-zmachine - - - z3 - application/x-zmachine - - - z4 - application/x-zmachine - - - z5 - application/x-zmachine - - - z6 - application/x-zmachine - - - z7 - application/x-zmachine - - - z8 - application/x-zmachine - - - zaz - application/vnd.zzazz.deck+xml - - - zip - application/zip - - - zir - application/vnd.zul - - - zirz - application/vnd.zul - - - zmm - application/vnd.handheld-entertainment+xml - - - - - - - - - - - - - - - - - - index.html - index.htm - index.jsp - - - diff --git a/ArgusDocker/simple/argus.properties b/ArgusDocker/simple/argus.properties deleted file mode 100644 index af3c12581..000000000 --- a/ArgusDocker/simple/argus.properties +++ /dev/null @@ -1,24 +0,0 @@ -# Default settings for unit and integration tests. -build.property.persistence.unit=org.eclipse.persistence.jpa.PersistenceProvider\n false\n \n \n \n \n \n \n \n \n \n \n \n -build.property.secure.cookies=false -system.property.admin.email=argus-admin@mycompany.com -system.property.log.level=DEBUG -system.property.mail.enabled=false - -# skip ldap (any user can log in with any password) -service.binding.auth=com.salesforce.dva.argus.service.auth.NoAuthService - -service.property.mail.alerturl.template=https\://localhost\:8443/argus/\#/alerts/$alertid$ -service.property.mail.metricurl.template=https\://localhost\:8443/argus/\#/viewmetrics?expression\=$expression$ -service.property.mail.smtp.auth=false -service.property.mail.smtp.host=smtprelay.mycompany.com -service.property.mail.smtp.starttls.enable=false -service.property.tsdb.connection.count=2 -service.property.tsdb.endpoint.read=http://opentsdb:4242 -service.property.tsdb.endpoint.timeout=10000 -service.property.tsdb.endpoint.write=http://opentsdb:4242 -service.property.cache.redis.cluster=redis:6379 - -# kafka -service.property.mq.kafka.brokers=kafka:9092 -service.property.mq.zookeeper.connect=kafka:2181 \ No newline at end of file diff --git a/ArgusDocker/simple/conf/opentsdb/opentsdb.conf b/ArgusDocker/simple/conf/opentsdb/opentsdb.conf deleted file mode 100644 index 4749e9e56..000000000 --- a/ArgusDocker/simple/conf/opentsdb/opentsdb.conf +++ /dev/null @@ -1,5 +0,0 @@ -## additional -tsd.http.request.enable_chunked = true -tsd.http.request.max_chunk = 1048576 -tsd.http.query.allow_delete=true -tsd.http.request.cors_domains=* \ No newline at end of file diff --git a/ArgusDocker/simple/conf/opentsdb/start_opentsdb.sh b/ArgusDocker/simple/conf/opentsdb/start_opentsdb.sh deleted file mode 100755 index be7e104e8..000000000 --- a/ArgusDocker/simple/conf/opentsdb/start_opentsdb.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash -export TSDB_VERSION="2.2.0" -echo "Sleeping for 30 seconds to give HBase time to warm up" -sleep 30 - -if [ ! -e /opt/opentsdb_tables_created.txt ]; then - echo "creating tsdb tables" - bash /opt/bin/create_tsdb_tables.sh - echo "created tsdb tables" -fi - -echo "starting opentsdb" -/opt/opentsdb/opentsdb-${TSDB_VERSION}/build/tsdb tsd --port=4242 --staticroot=/opt/opentsdb/opentsdb-${TSDB_VERSION}/build/staticroot --cachedir=/tmp --auto-metric --config=/opt/opentsdb/config/opentsdb.conf diff --git a/ArgusDocker/simple/config.js b/ArgusDocker/simple/config.js deleted file mode 100644 index 8f92af01c..000000000 --- a/ArgusDocker/simple/config.js +++ /dev/null @@ -1,29 +0,0 @@ -/*! Copyright (c) 2016, Salesforce.com, Inc. - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - * - * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - * - * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * - * Neither the name of Salesforce.com nor the names of its contributors may be used to endorse or promote products derived from this software - * without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, - * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR - * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF - * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, - * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ -angular.module("argus.urlConfig", []) -.constant('CONFIG', { - version: '2.3.0-SNAPSHOT', - wsUrl: 'http://localhost:8081/argus/', - emailUrl: 'https://mail.google.com/mail/?view=cm&fs=1&tf=1&to=argus-dev@mycompany.com', - feedUrl: 'https://groups.google.com/a/mycompany.com/forum/?hl=en#!forum/argus-user', - wikiUrl: 'https://github.com/salesforce/Argus/wiki', - issueUrl: 'https://groups.google.com/a/mycompany.com/forum/?hl=en#!forum/argus-dev', - templatePath: '/app/views/argus_custom_directives/templates/' -}); diff --git a/ArgusDocker/simple/docker-compose.yml b/ArgusDocker/simple/docker-compose.yml deleted file mode 100644 index 1ccc6cd7f..000000000 --- a/ArgusDocker/simple/docker-compose.yml +++ /dev/null @@ -1,93 +0,0 @@ -version: '2' -services: - opentsdb: - image: petergrace/opentsdb-docker:latest - hostname: opentsdb - container_name: opentsdb - ports: - - "4242:4242" - volumes: - - ./conf/opentsdb/start_opentsdb.sh:/opt/bin/start_opentsdb.sh - - ./conf/opentsdb/opentsdb.conf:/opt/opentsdb/config/opentsdb.conf:ro - grafana: - image: grafana/grafana:4.0.0 - hostname: grafana - container_name: grafana - ports: - - "3000:3000" - redis: - image: redis:3.2.5 - hostname: redis - container_name: redis - kafka: - image: spotify/kafka:latest - hostname: kafka - container_name: kafka - ports: - - "2181:2181" - - "9092:9092" - environment: - - ADVERTISED_HOST=kafka - - ADVERTISED_PORT=9092 - argus-web-services: - image: salesforce/argus-webservices - hostname: argus-web-services - container_name: argus-web-services - ports: - - "8081:8080" - - "5005:5005" - volumes: - - ./argus.properties:/usr/argus/argus.properties:ro - - ./argus-web-services/web.xml:/usr/local/tomcat/conf/web.xml:ro - environment: - - "CATALINA_OPTS=-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dargus.config.public.location=/usr/argus/argus.properties" - argus-web: - image: salesforce/argus-web - hostname: argus-web - container_name: argus-web - ports: - - "8082:8000" - volumes: - - ./config.js:/usr/argus/argusWeb/app/js/config.js:ro - argus-metrics-client: - image: salesforce/argus-client - hostname: argus-metrics-client - container_name: argus-metrics-client - ports: - - "5006:5000" - volumes: - - ./argus.properties:/usr/argus/argusClient/argus.properties:ro - environment: - - "EXTRA_JAVA_OPTS=-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5000" - - "ARGUSCLIENT_CFG=/usr/argus/argusClient/argus.properties" - depends_on: - - "kafka" - command: [COMMIT_METRICS] - argus-alert-client: - image: salesforce/argus-client - hostname: argus-alert-client - container_name: argus-alert-client - ports: - - "5007:5000" - volumes: - - ./argus.properties:/usr/argus/argusClient/argus.properties:ro - environment: - - "EXTRA_JAVA_OPTS=-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5000" - - "ARGUSCLIENT_CFG=/usr/argus/argusClient/argus.properties" - depends_on: - - "kafka" - command: [ALERT] - argus-annotations-client: - image: salesforce/argus-client - hostname: argus-annotations-client - container_name: argus-annotations-client - ports: - - "5008:5000" - volumes: - - ./argus.properties:/usr/argus/argusClient/argus.properties:ro - environment: - - "EXTRA_JAVA_OPTS=-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5000" - - "ARGUSCLIENT_CFG=/usr/argus/argusClient/argus.properties" - depends_on: - - "kafka" - command: [COMMIT_ANNOTATIONS] diff --git a/ArgusSDK/pom.xml b/ArgusSDK/pom.xml index b2290e392..d3b5242d4 100644 --- a/ArgusSDK/pom.xml +++ b/ArgusSDK/pom.xml @@ -4,10 +4,11 @@ argus com.salesforce.argus - 2.22.3 + 4-SNAPSHOT .. argus-sdk + 4.75-SNAPSHOT jar ArgusSDK Java SDK for the Argus Production Monitoring Tool. @@ -36,7 +37,7 @@ ${project.artifactId}-${project.parent.version}.jar - package + package single @@ -113,22 +114,22 @@ - - ${project.groupId} - argus - ${project.version} - resources - zip - provided - - - ${project.groupId} - argus - ${project.version} - test-resources - zip - provided - + + ${project.groupId} + argus + ${project.parent.version} + resources + zip + provided + + + ${project.groupId} + argus + ${project.parent.version} + test-resources + zip + provided + junit junit @@ -159,8 +160,14 @@ org.mockito - mockito-all - 1.10.19 + mockito-core + 2.27.0 + test + + + org.powermock + powermock-api-mockito2 + 2.0.0 test diff --git a/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/AlertService.java b/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/AlertService.java index 1468d7bb4..74b7c6a0a 100644 --- a/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/AlertService.java +++ b/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/AlertService.java @@ -282,6 +282,26 @@ public Alert updateAlert(BigInteger alertId, Alert alert) throws IOException, To return fromJson(response.getResult(), Alert.class); } + + /** + * Clones an existing alert. + * + * @param alertId The alert ID. + * @param newAlertName The name of the cloned alert. + * + * @return The cloned alert. + * + * @throws IOException If the server cannot be reached. + * @throws TokenExpiredException If the token sent along with the request has expired + */ + public Alert cloneAlert(BigInteger alertId, String newAlertName) throws IOException, TokenExpiredException { + String requestUrl = RESOURCE + "/" + alertId.toString() + "/clone?alertname=" + newAlertName; + ArgusResponse response = getClient().executeHttpRequest(ArgusHttpClient.RequestType.GET, requestUrl, null); + + assertValidResponse(response, requestUrl); + return fromJson(response.getResult(), Alert.class); + } + /** * Updates an existing notification. * diff --git a/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/Annotation.java b/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/Annotation.java index a65136e67..105c06e84 100644 --- a/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/Annotation.java +++ b/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/Annotation.java @@ -30,6 +30,8 @@ */ package com.salesforce.dva.argus.sdk.entity; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; + import java.util.Map; import java.util.Objects; @@ -38,6 +40,7 @@ * * @author Tom Valine (tvaline@salesforce.com) */ +@JsonIgnoreProperties(ignoreUnknown = true) public class Annotation extends TSDBEntity { //~ Instance fields ****************************************************************************************************************************** diff --git a/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/Batch.java b/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/Batch.java index 659e7ee83..1b26d6bcc 100644 --- a/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/Batch.java +++ b/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/Batch.java @@ -30,6 +30,8 @@ */ package com.salesforce.dva.argus.sdk.entity; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; + import java.util.List; import java.util.Objects; @@ -38,6 +40,7 @@ * * @author Tom Valine (tvaline@salesforce.com) */ +@JsonIgnoreProperties(ignoreUnknown = true) public class Batch { //~ Instance fields ****************************************************************************************************************************** diff --git a/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/Entity.java b/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/Entity.java index 3267b8e0a..b73d3ee69 100644 --- a/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/Entity.java +++ b/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/Entity.java @@ -30,6 +30,8 @@ */ package com.salesforce.dva.argus.sdk.entity; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; + import java.io.Serializable; import java.math.BigInteger; import java.util.Date; @@ -41,6 +43,7 @@ * @author Tom Valine (tvaline@salesforce.com) */ @SuppressWarnings("serial") +@JsonIgnoreProperties(ignoreUnknown = true) abstract class Entity implements Serializable { //~ Instance fields ****************************************************************************************************************************** diff --git a/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/Metric.java b/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/Metric.java index 01f8e94b8..0a01530e0 100644 --- a/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/Metric.java +++ b/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/Metric.java @@ -30,6 +30,8 @@ */ package com.salesforce.dva.argus.sdk.entity; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; + import java.util.Map; import java.util.Objects; @@ -38,6 +40,7 @@ * * @author Bhinav Sura (bhinav.sura@salesforce.com) */ +@JsonIgnoreProperties(ignoreUnknown = true) public class Metric extends TSDBEntity { //~ Instance fields ****************************************************************************************************************************** diff --git a/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/MetricDiscoveryQuery.java b/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/MetricDiscoveryQuery.java index ae421af06..5fcb7627c 100644 --- a/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/MetricDiscoveryQuery.java +++ b/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/MetricDiscoveryQuery.java @@ -1,8 +1,11 @@ package com.salesforce.dva.argus.sdk.entity; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; + import java.io.Serializable; @SuppressWarnings("serial") +@JsonIgnoreProperties(ignoreUnknown = true) public class MetricDiscoveryQuery implements Serializable{ private static int DEFAULT_LIMIT = 50; diff --git a/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/MetricDiscoveryResult.java b/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/MetricDiscoveryResult.java index ae57bdac7..c48586817 100644 --- a/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/MetricDiscoveryResult.java +++ b/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/MetricDiscoveryResult.java @@ -5,6 +5,7 @@ import java.util.ArrayList; import java.util.List; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationContext; @@ -13,6 +14,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; @SuppressWarnings("serial") +@JsonIgnoreProperties(ignoreUnknown = true) public class MetricDiscoveryResult implements Serializable{ /* * When type is not null, the data is a list of strings otherwise list of MetricSchemaRecords diff --git a/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/MetricSchemaRecord.java b/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/MetricSchemaRecord.java index e25fbec5c..9ec681b04 100644 --- a/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/MetricSchemaRecord.java +++ b/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/MetricSchemaRecord.java @@ -30,6 +30,8 @@ */ package com.salesforce.dva.argus.sdk.entity; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; + import java.util.Objects; /** @@ -39,6 +41,7 @@ * * @author Tom Valine (tvaline@salesforce.com) */ +@JsonIgnoreProperties(ignoreUnknown = true) public final class MetricSchemaRecord { //~ Instance fields ****************************************************************************************************************************** diff --git a/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/TSDBEntity.java b/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/TSDBEntity.java index 1f8f59f16..820b5a6c0 100644 --- a/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/TSDBEntity.java +++ b/ArgusSDK/src/main/java/com/salesforce/dva/argus/sdk/entity/TSDBEntity.java @@ -30,6 +30,8 @@ */ package com.salesforce.dva.argus.sdk.entity; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; + import java.util.Map; import java.util.Objects; @@ -38,6 +40,7 @@ * * @author Bhinav Sura (bhinav.sura@salesforce.com) */ +@JsonIgnoreProperties(ignoreUnknown = true) public abstract class TSDBEntity { //~ Instance fields ****************************************************************************************************************************** diff --git a/ArgusSDK/src/test/java/com/salesforce/dva/argus/sdk/AlertServiceTest.java b/ArgusSDK/src/test/java/com/salesforce/dva/argus/sdk/AlertServiceTest.java index 43ce659c7..52cf44517 100644 --- a/ArgusSDK/src/test/java/com/salesforce/dva/argus/sdk/AlertServiceTest.java +++ b/ArgusSDK/src/test/java/com/salesforce/dva/argus/sdk/AlertServiceTest.java @@ -161,6 +161,19 @@ public void testUpdateAlert() throws IOException, TokenExpiredException { } } + @Test + public void testCloneAlert() throws IOException, TokenExpiredException { + try(ArgusService argusService = new ArgusService(getMockedClient("/AlertServiceTest.json"))) { + AlertService alertService = argusService.getAlertService(); + Alert alert = alertService.getAlert(BigInteger.ONE); + + Alert result = alertService.cloneAlert(BigInteger.ONE, "TestAlertClone"); + Alert expected = _constructClonedAlert(); + + assertEquals(expected, result); + } + } + @Test public void testUpdateNotification() throws IOException, TokenExpiredException { try(ArgusService argusService = new ArgusService(getMockedClient("/AlertServiceTest.json"))) { @@ -323,6 +336,14 @@ private Alert _constructUpdatedAlert() { return alert; } + private Alert _constructClonedAlert() { + Alert alert = _constructPersistedAlert(); + + alert.setId(BigInteger.ZERO); + alert.setName("TestAlertClone"); + return alert; + } + private Alert _constructPersistedAlert() { Alert alert = _constructUnpersistedAlert(); diff --git a/ArgusSDK/src/test/resources/AlertServiceTest.json b/ArgusSDK/src/test/resources/AlertServiceTest.json index b21044dd7..56635705a 100644 --- a/ArgusSDK/src/test/resources/AlertServiceTest.json +++ b/ArgusSDK/src/test/resources/AlertServiceTest.json @@ -33,6 +33,13 @@ "status": 200, "message": "update alert", "jsonOutput" : "{\"id\":1,\"createdById\":1,\"createdDate\":1472282830936,\"modifiedById\":1,\"modifiedDate\":1472282830936,\"name\":\"UpdatedAlert\",\"expression\":\"-1d:argus.jvm:open.file.descriptors:sum\",\"cronEntry\":\"* * * * *\",\"enabled\":false,\"missingDataNotificationEnabled\":false,\"notificationsIds\":[],\"triggersIds\":[],\"ownerName\":\"admin\",\"shared\":false}" +},{ + "type": "GET", + "endpoint": "/alerts/1/clone?alertname=TestAlertClone", + "jsonInput" :null, + "status": 200, + "message": "clone alert", + "jsonOutput" : "{\"id\":0,\"createdById\":1,\"createdDate\":1472282830936,\"modifiedById\":1,\"modifiedDate\":1472282830936,\"name\":\"TestAlertClone\",\"expression\":\"-1d:argus.jvm:open.file.descriptors:sum\",\"cronEntry\":\"* * * * *\",\"enabled\":false,\"missingDataNotificationEnabled\":false,\"notificationsIds\":[],\"triggersIds\":[],\"ownerName\":\"admin\",\"shared\":false}" },{ "type": "GET", "endpoint": "/alerts/1/notifications", diff --git a/ArgusWeb/.gitignore b/ArgusWeb/.gitignore index 4be6a2968..6ef10eecf 100644 --- a/ArgusWeb/.gitignore +++ b/ArgusWeb/.gitignore @@ -89,4 +89,7 @@ bower_components/ tmp .DS_Store .idea -app/etc/ \ No newline at end of file +app/etc/ + +# This is generated by Grunt +app/js/config.js diff --git a/ArgusWeb/README.md b/ArgusWeb/README.md index 9c365be5e..10aa0c00e 100644 --- a/ArgusWeb/README.md +++ b/ArgusWeb/README.md @@ -1,120 +1,82 @@ -ArgusWeb -= +# ArgusWeb -#### Build the Front-End ---- -> Install the following: +- [ArgusWeb](#argusweb) + - [Prerequisites](#prerequisites) + - [Install NodeJS & NPM](#install-nodejs--npm) + - [Install Grunt](#install-grunt) + - [Setup](#setup) + - [Developing locally](#developing-locally) + - [Building for Staging/Prod](#building-for-stagingprod) + - [Other commands](#other-commands) + +## Prerequisites +### Install NodeJS & NPM -- NodeJS & NPM -- Bower (required for versions prior to Argus 2.6.0) -- Grunt - -##### Install NodeJS -# -> **Recommended** - You can use Homebrew to install NodeJS. This will install the latest version. +**Recommended** - You can use Homebrew to install NodeJS. This will install the latest version. ```sh $ brew install node ``` -This will prevent NPM permission issues when installing bower & grunt globally. +This will prevent NPM permission issues when `npm install`ing anything else -> OR, you can download and install the latest **stable** version of NodeJS - https://nodejs.org/en/download/ +**OR** - you can download and install the latest **stable** version of NodeJS - https://nodejs.org/en/download/ -# -###### To fix any NPM permission errors: -- http://stackoverflow.com/questions/16151018/npm-throws-error-without-sudo -- https://docs.npmjs.com/getting-started/fixing-npm-permissions To verify NodeJS installation: ```sh $ node -v ``` -##### Install Grunt globally -# +### Install Grunt ```sh $ npm install -g grunt ``` +You should not have to use `sudo` when running `npm install`. See resolving permission errors: +- http://stackoverflow.com/questions/16151018/npm-throws-error-without-sudo +- https://docs.npmjs.com/getting-started/fixing-npm-permissions -##### Install Bower globally (required for versions prior to Argus 2.6.0) -# -```sh -$ npm install -g bower -``` -To verify Bower installation: -```sh -$ bower -v -``` - -##### Build Commands -# -###### Run npm to build and install: -# +## Setup +This installs all dependencies both at the project-level (`ArgusWeb/`) and Angular app level (`app/`) ```sh $ npm install ``` -###### Run bower to install packages from bower.json (required for versions prior to Argus 2.6.0): -# -```sh -$ bower install -``` - -###### Run grunt to build -# +## Developing locally +Run one of these commands according to what webservice environment you wish to use ```sh -$ grunt +$ grunt replace:local +# OR +$ grunt replace:qa ``` - -###### Use [Webpack](https://webpack.js.org/concepts/) to bundle non vendor Javascript files (optional) -# +**Note: you may have to modify `wsUrl` in `app/js/config.js` if webservices is running on a different port** ```sh -$ npm run bundle +# This automatically uses jscodeshift to replace all templateUrl references +# to require format for webpack build process. The replace will be undone on Ctrl+C +$ npm start ``` -*dist* folder will be generated by Webpack in *ArgusWeb* directory -# -> NOTE: You can update the following .json files to reflect the environment you want to build for. - -./config/local.json +To [eslint](http://eslint.org/) check against *app* folder (all the source code): ```sh -$ grunt replace:local +$ npm run lint ``` -./config/development.json +## Building for Staging/Prod ```sh -$ grunt replace:development +$ grunt replace: +# modify app/js/config.js as needed +$ npm run bundle ``` +This will bundle all files into a flat `dist/` folder, which can then be copied to the staging/prod host. -./config/production.json +Example for copying the UI so it runs on Tomcat's `localhost:8080/argus/`: ```sh -$ grunt replace:production +$ cp -r dist $TOMCAT_HOME/webapps/argus ``` -# -##### Available npm scripts +## Other commands Remove all existing npm modules ```sh $ npm run clean ``` -Use [jscodeshift](https://github.com/facebook/jscodeshift) to replace all templateUrl references to `require` format for webpack build process -```sh -$ npm run transformTemplates -``` - -Use [webpack-dev-server](https://github.com/webpack/webpack-dev-server) for development (Must run `npm run transformTemplates` first to have proper templates) -```sh -$ npm start -``` - -The reverse of `npm run transformTemplates` (normally run this after done with webpack) -```sh -$ npm run undoTransformTemplates -``` - -Run [eslint](http://eslint.org/) check against *app* folder (all the source code) -```sh -$ npm run lint -``` diff --git a/ArgusWeb/app/css/main.css b/ArgusWeb/app/css/main.css index 34f229426..75897c431 100644 --- a/ArgusWeb/app/css/main.css +++ b/ArgusWeb/app/css/main.css @@ -179,7 +179,7 @@ h5.bcDesc {margin-bottom:20px; font-weight:normal;} .metricFields, .metricFields .form-control {margin:0 2em .5em 0;} /* main graph/chart - i.e. metrics */ -#graphContainer {margin:1.5em auto 0; min-width:310px; height:400px;} +.graphContainer {margin:1.5em auto 0; min-width:310px; height:600px;} /* autocomplete */ .autoComplete .category {padding:4px; border:1px solid #71ABCE; border-radius:5px; font-size:11px;} @@ -314,6 +314,7 @@ h5.bcDesc {margin-bottom:20px; font-weight:normal;} .toolbarMenu a.glyphicon {font-size:18px; color:#999;} .toolbarMenu a.glyphicon:hover {color:inherit;} .toolbarMenu li label {font-weight:400; font-size:13px;} +.toolbarMenu a .oi {font-size:18px; color:#999;} .dateRange {float:right; margin:7px 2em 0 0; font-size:12px; color:#999;} @@ -416,6 +417,7 @@ ag-dashboard table {width: 100%; table-layout:fixed;} .fullscreen { width: 100%; height: 100%; + background-color: white; } /* for ag-table */ @@ -484,7 +486,6 @@ a.definition-tip, a.definition-tip:hover, a.definition-tip:visited { position: fixed; top: 55px; left: 0; right: 0; bottom: 0; height: auto; - z-index: ; } .search-options input[type=text] { @@ -492,4 +493,34 @@ a.definition-tip, a.definition-tip:hover, a.definition-tip:visited { color: #555; border-radius: 3px; border: 1px solid #ccc; +} + +.glyphicon.spinning { + animation: spin 1s infinite linear; + -webkit-animation: spin 1s infinite linear; +} + +@keyframes spin { + from { + transform: scale(1) rotate(0deg); + } + to { + transform: scale(1) rotate(360deg); + } +} +@-webkit-keyframes spin { + from { + -webkit-transform: rotate(0deg); + } + to { + -webkit-transform: rotate(360deg); + } +} + +.fixed-dashboard-info { + left: 3%; + top: 35%; + min-width: 250px; + padding-right: 3%; + position: fixed; } \ No newline at end of file diff --git a/ArgusWeb/app/index.html b/ArgusWeb/app/index.html index ee3d507b9..6b932a3f6 100644 --- a/ArgusWeb/app/index.html +++ b/ArgusWeb/app/index.html @@ -12,6 +12,7 @@ + @@ -39,7 +40,7 @@ -

+
diff --git a/ArgusWeb/app/js/argusConfig.js b/ArgusWeb/app/js/argusConfig.js index 9c14c44d4..16d77777e 100644 --- a/ArgusWeb/app/js/argusConfig.js +++ b/ArgusWeb/app/js/argusConfig.js @@ -61,12 +61,30 @@ angular.module('argus.config', []) reloadOnSearch: false }). when('/alerts', { + redirectTo: function() { + window.location.href = '/argusmvp/#/alerts'; + } + // templateUrl: 'js/templates/alert-list.html', + // controller: 'Alerts', + // label: 'Alerts List', + // activeTab: 'alerts' + }). + when('/alerts/:alertId', { + redirectTo: function(parameters) { + window.location.href = '/argusmvp/#/alerts/'+parameters.alertId; + } + // templateUrl: 'js/templates/alert-detail.html', + // controller: 'AlertsDetail', + // label: '{{alerts.alertId}}', + // activeTab: 'alerts' + }). + when('/alertsdeprecated', { templateUrl: 'js/templates/alert-list.html', controller: 'Alerts', label: 'Alerts List', activeTab: 'alerts' }). - when('/alerts/:alertId', { + when('/alertsdeprecated/:alertId',{ templateUrl: 'js/templates/alert-detail.html', controller: 'AlertsDetail', label: '{{alerts.alertId}}', diff --git a/ArgusWeb/app/js/config.js b/ArgusWeb/app/js/config.js deleted file mode 100644 index 67abf10c4..000000000 --- a/ArgusWeb/app/js/config.js +++ /dev/null @@ -1,38 +0,0 @@ -/*! Copyright (c) 2016, Salesforce.com, Inc. - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - * - * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - * - * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * - * Neither the name of Salesforce.com nor the names of its contributors may be used to endorse or promote products derived from this software - * without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, - * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR - * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF - * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, - * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ -'use strict'; -/*global angular:false */ - -angular.module('argus.urlConfig', []) -.constant('CONFIG', { - version: '2.22', - wsUrl: 'http://localhost:8080/argusws/', - wsBetaUrl: 'http://localhost:8080/argusws/beta/', - emailUrl: 'https://mail.google.com/mail/?view=cm&fs=1&tf=1&to=argus-dev@mycompany.com', - feedUrl: 'https://groups.google.com/a/mycompany.com/forum/?hl=en#!forum/argus-user', - wikiUrl: 'https://github.com/salesforce/Argus/wiki', - docUrl: 'https://github.com/salesforce/Argus/wiki', - issueUrl: 'https://groups.google.com/a/salesforce.com/forum/?hl=en#!forum/argus-dev', - templatePath: 'app/js/templates', - acceptOAuthPath:'users/accept_oauth', - checkOAuthAccessPath : 'users/check_oauth_access', - oauthListPath: 'users/oauth_approved_apps', - oauthDeletePath: 'users/revoke_oauth_access' -}); diff --git a/ArgusWeb/app/js/controllers/alerts.js b/ArgusWeb/app/js/controllers/alerts.js index fb44392bd..7ce4ceae4 100644 --- a/ArgusWeb/app/js/controllers/alerts.js +++ b/ArgusWeb/app/js/controllers/alerts.js @@ -22,7 +22,6 @@ angular.module('argus.controllers.alerts', ['ngResource']) .controller('Alerts', ['Auth', '$scope', 'growl', 'Alerts', 'TableListService', 'Storage', function (Auth, $scope, growl, Alerts, TableListService, Storage) { - $scope.colName = { id:'ID', name:'Name', @@ -34,7 +33,8 @@ angular.module('argus.controllers.alerts', ['ngResource']) }; $scope.properties = { title: 'Alert', - type: 'alerts' + type: 'alerts', + path: 'alertsdeprecated' }; $scope.tabNames = { userPrivileged: Auth.isPrivileged(), diff --git a/ArgusWeb/app/js/controllers/dashboards.js b/ArgusWeb/app/js/controllers/dashboards.js index c3dd16d12..021f148cb 100644 --- a/ArgusWeb/app/js/controllers/dashboards.js +++ b/ArgusWeb/app/js/controllers/dashboards.js @@ -35,7 +35,8 @@ angular.module('argus.controllers.dashboards', ['ngResource', 'ui.codemirror']) }; $scope.properties = { title: 'Dashboard', - type: 'dashboards' + type: 'dashboards', + path: 'dashboards' }; $scope.tabNames = { userPrivileged: Auth.isPrivileged(), diff --git a/ArgusWeb/app/js/controllers/dashboardsDetail.js b/ArgusWeb/app/js/controllers/dashboardsDetail.js index 8d4435ddd..0984aa25a 100644 --- a/ArgusWeb/app/js/controllers/dashboardsDetail.js +++ b/ArgusWeb/app/js/controllers/dashboardsDetail.js @@ -5,24 +5,39 @@ angular.module('argus.controllers.dashboards.detail', ['ngResource', 'ui.codemir .controller('DashboardsDetail', ['Storage', '$scope','$http', '$routeParams', '$location', '$window', 'growl', 'Dashboards', 'History','$sessionStorage', 'Auth', function (Storage, $scope,$http, $routeParams, $location, $window, growl, Dashboards, History, $sessionStorage, Auth) { $scope.dashboardNotEditable = true; + $scope.needToRefreshView = false; + $scope.isSaving = false; + $scope.saveButtonText = 'Saved'; $scope.isDashboardDirty = function () { - return !angular.equals($scope.dashboard, $scope.unmodifiedDashboard); + if (angular.equals($scope.dashboard, $scope.unmodifiedDashboard)){ + $scope.saveButtonText = 'Saved'; + return false; + }else{ + $scope.saveButtonText = 'Save'; + return true; + } }; $scope.updateDashboard = function () { if ($scope.isDashboardDirty()) { var dashboard = $scope.dashboard; + $scope.isSaving = true; + $scope.saveButtonText = 'Saving'; Dashboards.update({dashboardId: dashboard.id}, dashboard, function () { + $scope.isSaving = false; $scope.unmodifiedDashboard = angular.copy(dashboard); growl.success(('Updated "') + dashboard.name + '"'); $scope.fetchHistory(); // remove existing session storage for update if ($sessionStorage.dashboards !== undefined) delete $sessionStorage.dashboards.cachedData; - $window.location.reload(); + // $window.location.reload(); }, function () { growl.error('Failed to update "' + dashboard.name + '"'); + $scope.isSaving = false; + $scope.saveButtonText = 'Save'; }); } + $scope.needToRefreshView = true; }; $scope.resetDashboard = function () { @@ -39,6 +54,15 @@ angular.module('argus.controllers.dashboards.detail', ['ngResource', 'ui.codemir $scope.selectTab = function (tab) { $scope.selectedTab = tab; + if(tab === 1 && $scope.needToRefreshView){ + var url = $window.location.href; + var indexOfQuery = url.indexOf('?'); + if (indexOfQuery > 0){ + url = url.substring(0, indexOfQuery); //clear the query paramter + window.location.href = url; + } + window.location.reload(); + } }; $scope.fetchHistory = function() { diff --git a/ArgusWeb/app/js/controllers/login.js b/ArgusWeb/app/js/controllers/login.js index 273367db3..d72e6d322 100644 --- a/ArgusWeb/app/js/controllers/login.js +++ b/ArgusWeb/app/js/controllers/login.js @@ -21,7 +21,16 @@ /*global angular:false */ angular.module('argus.controllers.login', []) -.controller('Login', ['$scope', function ($scope) { +.controller('Login', ['$scope', 'growl', 'Storage', function ($scope, growl, Storage) { $scope.username = null; $scope.password = null; + try { + var loginError = Storage.get('loginError'); + if (loginError) { + growl.error(loginError); + } + } catch(error) { + console.log(error); + } + Storage.clear('loginError'); }]); diff --git a/ArgusWeb/app/js/controllers/viewMetrics.js b/ArgusWeb/app/js/controllers/viewMetrics.js index 76edf4742..b3037423a 100644 --- a/ArgusWeb/app/js/controllers/viewMetrics.js +++ b/ArgusWeb/app/js/controllers/viewMetrics.js @@ -1,17 +1,15 @@ /*global angular:false, console:false */ 'use strict'; -angular.module('argus.controllers.viewMetrics', ['ngResource']) -.controller('ViewMetrics', ['$location', '$routeParams', '$scope', '$compile', 'growl', 'Metrics', 'Annotations', 'SearchService', 'Controls', 'ChartDataProcessingService', 'DateHandlerService', 'InputTracker', - function ($location, $routeParams, $scope, $compile, growl, Metrics, Annotations, SearchService, Controls, ChartDataProcessingService, DateHandlerService, InputTracker) { +angular.module('argus.controllers.viewMetrics', ['ngResource', 'ui.codemirror']) +.controller('ViewMetrics', ['$location', '$routeParams', '$scope', '$compile', 'growl', 'Metrics', 'Annotations', 'SearchService', 'Controls', 'ChartDataProcessingService', 'DateHandlerService', 'UtilService', + function ($location, $routeParams, $scope, $compile, growl, Metrics, Annotations, SearchService, Controls, ChartDataProcessingService, DateHandlerService, UtilService) { var lastParams; var noMorePages = false; $scope.annotationType = 'ALERT'; $scope.expression = $routeParams.expression ? $routeParams.expression : null; - $scope.includeAnnotations = InputTracker.getDefaultValue('viewMetricsWithAnnotation', true); - $scope.$watch('includeAnnotations', function (newValue) { - InputTracker.updateDefaultValue('viewMetricsWithAnnotation', true, newValue); - }); + // Do not query annotations by default + $scope.includeAnnotations = false; // sub-views: (1) single chart, (2) metric discovery $scope.checkMetricExpression = function() { if ($scope.expression) { @@ -214,7 +212,7 @@ angular.module('argus.controllers.viewMetrics', ['ngResource']) var agg_Str = ''; var namespace_Str = (n && n.length > 1) ? ':' + n : ''; - var defaultDownsampler; + var defaultDownsampler = ''; /* Add default settings for: start, aggregator full: -1h:scope:metric{tags}:avg:namespace @@ -238,15 +236,23 @@ angular.module('argus.controllers.viewMetrics', ['ngResource']) // } // ------------- - + $scope.updateChart = function (series, annotationInfo, expressions) { // if the metric expression is not empty + + //User configured chart type has higher priority + var chartType = $scope.chartType || ChartDataProcessingService.getChartTypeByExpressions(expressions); + var chartMarkup = ChartDataProcessingService.getChartMarkupByType(chartType); + if (series && series.length > 0) { var chartScope = $scope.$new(false); chartScope.chartConfig = { chartId: 'container', expressions: expressions, - chartType: 'line' + chartType: chartType, + chart: { + height: 500 + } }; chartScope.dateConfig = {}; chartScope.series = series; @@ -286,10 +292,7 @@ angular.module('argus.controllers.viewMetrics', ['ngResource']) annotationCount.tot--; if (annotationCount.tot === 0) { $scope.chartLoaded = true; - angular.element('#' + 'container').append($compile( - '
' + - '' + - '
')(chartScope) + angular.element('#' + 'container').append($compile(chartMarkup)(chartScope) ); } }, function (error) { @@ -297,24 +300,64 @@ angular.module('argus.controllers.viewMetrics', ['ngResource']) annotationCount.tot--; if (annotationCount.tot === 0) { $scope.chartLoaded = true; - angular.element('#' + 'container').append($compile( - '
' + - '' + - '
')(chartScope) + angular.element('#' + 'container').append($compile(chartMarkup)(chartScope) ); } }); } } else { $scope.chartLoaded = true; - angular.element('#' + 'container').append($compile( - '
' + - '' + - '
')(chartScope) + angular.element('#' + 'container').append($compile(chartMarkup)(chartScope) ); } } }; - $scope.getMetricData(null); + $scope.resetChartType = function(type){ + $scope.chartType = type; + $scope.getMetricData(); + }; + + $scope.getMetricData(); + + $scope.editorShown = false; + $scope.treeText = ''; + $scope.prettify = function() { + $scope.editorShown = true; + $scope.treeText = UtilService.prettifyExpression($scope.expression); + }; + $scope.hide = function() { + $scope.editorShown = false; + }; + $scope.textAreaOnChange = function() { + var tree = UtilService.getExpressionTree($scope.treeText); + $scope.expression = UtilService.flatTree(tree); + }; + $scope.editorLoaded = function (editor) { + editor.setSize(null, 'auto'); + editor.on('keydown', function(editor, event){ + event.stopPropagation(); + }); + }; + + $scope.editorOptions = { + lineWrapping: true, + lineNumbers: true, + mode: 'julia', + viewportMargin: Infinity, + tabSize: 2, + foldGutter: { + rangeFinder: UtilService.rangeFinderParentheses, + }, + gutters: ['CodeMirror-linenumbers', 'CodeMirror-foldgutter'], + extraKeys: { /* key board short cuts in the the editor */ + 'Alt-Space': 'autocomplete', + 'Ctrl-Alt-F': function(editor) { + editor.setOption('fullScreen', !editor.getOption('fullScreen')); + }, + 'Esc': function(editor) { + if (editor.getOption('fullScreen')) editor.setOption('fullScreen', false); + }, + } + }; }]); diff --git a/ArgusWeb/app/js/directives/charts/chart.js b/ArgusWeb/app/js/directives/charts/chart.js index 8448ec5d9..8bd3e267a 100644 --- a/ArgusWeb/app/js/directives/charts/chart.js +++ b/ArgusWeb/app/js/directives/charts/chart.js @@ -183,7 +183,9 @@ angular.module('argus.directives.charts.chart', []) } // TODO: below functions 'should' be refactored to the chart services. - function setupChart(scope, element, attributes, controls) { + function setupChart(scope, element, attributes, controls, type) { + // save the controls to chart scope + scope.controls = controls; // remove/clear any previous chart rendering from DOM var lastEl = element[0].querySelector('[id^=element_chart]'); var lastId = lastEl? lastEl.id: null; @@ -192,7 +194,8 @@ angular.module('argus.directives.charts.chart', []) // if the element has content previously, leave the id unchanged var newChartId = lastId || 'element_' + VIEWELEMENT.chart + chartNameIndex++; - var chartType = attributes.type ? attributes.type : 'line'; + //UI configured type > html defined type > default type (line) + var chartType = type || attributes.type || 'line'; chartType = chartType.toLowerCase(); // TODO: make this a constant somewhere else var supportedChartTypes = ['line', 'area', 'scatter', 'stackarea', 'bar', 'stackbar', 'heatmap']; @@ -273,6 +276,10 @@ angular.module('argus.directives.charts.chart', []) element.on('$destroy', function(){ chartNameIndex = 1; }); + //scope functions + scope.resetChartType = function(type){ + setupChart(scope, element, attributes, scope.controls, type); + }; } }; } diff --git a/ArgusWeb/app/js/directives/charts/heatmap.js b/ArgusWeb/app/js/directives/charts/heatmap.js index 348067533..4812d74e4 100644 --- a/ArgusWeb/app/js/directives/charts/heatmap.js +++ b/ArgusWeb/app/js/directives/charts/heatmap.js @@ -263,6 +263,10 @@ angular.module('argus.directives.charts.heatmap', []) .style('display', displayProperty) .attr('displayProperty', displayProperty);//this is for recording the display property when circle is outside range } + // change chart type + $scope.changeChartType = function (type) { + $scope.$parent.resetChartType(type); + } }], // compile: function (iElement, iAttrs, transclude) {}, link: function (scope, element) { @@ -336,7 +340,7 @@ angular.module('argus.directives.charts.heatmap', []) // color scheme var z = ChartToolService.setColorScheme(scope.menuOption.colorPalette); // determine chart layout and dimensions - var containerHeight = isSmallChart ? 150 : 330; + var containerHeight = isSmallChart ? ChartElementService.smallChartHeight : ChartElementService.defaultChartHeight; var containerWidth = $('#' + chartId).width(); // remember the original size var defaultContainerWidth = -1; diff --git a/ArgusWeb/app/js/directives/charts/lineChart.js b/ArgusWeb/app/js/directives/charts/lineChart.js index 3d47d87e7..aec95d298 100644 --- a/ArgusWeb/app/js/directives/charts/lineChart.js +++ b/ArgusWeb/app/js/directives/charts/lineChart.js @@ -1,6 +1,5 @@ 'use strict'; /*global angular:false, d3:false, $:false, window:false, screen:false, console:false */ - angular.module('argus.directives.charts.lineChart', []) .directive('lineChart', ['$timeout', 'Storage', 'ChartToolService', 'ChartElementService', function($timeout, Storage, ChartToolService, ChartElementService) { //--------------------resize all charts------------------- @@ -273,6 +272,11 @@ angular.module('argus.directives.charts.lineChart', []) // .style('display', displayProperty) // .attr('displayProperty', displayProperty);//this is for recording the display property when circle is outside range } + + // change chart type + $scope.changeChartType = function (type) { + $scope.$parent.resetChartType(type); + } }], // compile: function (iElement, iAttrs, transclude) {}, link: function (scope, element) { @@ -345,7 +349,7 @@ angular.module('argus.directives.charts.lineChart', []) // color scheme var z = ChartToolService.setColorScheme(scope.menuOption.colorPalette); // determine chart layout and dimensions - var containerHeight = isSmallChart ? 175 : 330; + var containerHeight = isSmallChart ? ChartElementService.smallChartHeight : ChartElementService.defaultChartHeight; var containerWidth = $('#' + chartId).width(); // remember the original size var defaultContainerWidth = -1; diff --git a/ArgusWeb/app/js/directives/controls/compute.js b/ArgusWeb/app/js/directives/controls/compute.js index 5e2bd60da..e77ac961f 100644 --- a/ArgusWeb/app/js/directives/controls/compute.js +++ b/ArgusWeb/app/js/directives/controls/compute.js @@ -32,7 +32,7 @@ angular.module('argus.directives.controls.compute', []) element.hide(); scope.$on(dashboardCtrl.getControlChangeEventName(), function(evt, control) { - refreshControl(scope, element.text(), control, dashboardCtrl) + refreshControl(scope, element.text(), control, dashboardCtrl); }); refreshControl(scope, element.text(), {}, dashboardCtrl); diff --git a/ArgusWeb/app/js/directives/controls/select.js b/ArgusWeb/app/js/directives/controls/select.js index eacfbeb6e..39356ff6e 100644 --- a/ArgusWeb/app/js/directives/controls/select.js +++ b/ArgusWeb/app/js/directives/controls/select.js @@ -46,7 +46,7 @@ angular.module('argus.directives.controls.select', ['selectize']) //find all option tags in the ag-select tag, and add their contents element.find('ng-transclude option').each(function(){ selectize.addOption({ - text: this.innerText, + text: this.innerHTML, value: this.value }); optionSet.add(this.value); diff --git a/ArgusWeb/app/js/services/auth.js b/ArgusWeb/app/js/services/auth.js index e557c9792..cb89086ce 100644 --- a/ArgusWeb/app/js/services/auth.js +++ b/ArgusWeb/app/js/services/auth.js @@ -41,14 +41,9 @@ angular.module('argus.services.auth', []) }); }, logout: function () { - Storage.reset(); + Storage.reset(); //remove user info, token $resource(CONFIG.wsUrl + 'v2/auth/logout', {}, {}).get({}, function () { growl.info('You are now logged out'); - //-------Token Based Authentication---------- - //remove token - // Storage.clear('accessToken'); - // Storage.clear('refreshToken'); - $location.path('/login'); }, function () { growl.error('Logout failed'); @@ -92,7 +87,7 @@ angular.module('argus.services.auth', []) return $resource(CONFIG.wsUrl + refreshPath, {}, {}).save(creds, function(data){ Storage.set('accessToken', data.accessToken); }, function(error){ - growl.error(error); + console.log(error); }); } }; diff --git a/ArgusWeb/app/js/services/charts/chartElements.js b/ArgusWeb/app/js/services/charts/chartElements.js index ca1194ddc..0a8e8e62a 100644 --- a/ArgusWeb/app/js/services/charts/chartElements.js +++ b/ArgusWeb/app/js/services/charts/chartElements.js @@ -23,7 +23,8 @@ angular.module('argus.services.charts.elements', []) var annotationLabelFontSize = 14; var extraYAxisPadding = ChartToolService.extraYAxisPadding; this.customizedChartType = ['scatter', 'bar', 'stackbar']; - + this.defaultChartHeight = 330; + this.smallChartHeight = 150; var setGraphColorStyle = function (graph, color, chartType, opacity) { graph.style('stroke', color); diff --git a/ArgusWeb/app/js/services/charts/dataProcessing.js b/ArgusWeb/app/js/services/charts/dataProcessing.js index a03f02f42..365850bce 100644 --- a/ArgusWeb/app/js/services/charts/dataProcessing.js +++ b/ArgusWeb/app/js/services/charts/dataProcessing.js @@ -376,7 +376,28 @@ angular.module('argus.services.charts.dataProcessing', []) createSeriesName: createSeriesName, - copyFlagSeries: copyFlagSeries + copyFlagSeries: copyFlagSeries, + + getChartTypeByExpressions: function (expressions) { + var chartType = 'line'; + if(expressions.length > 0){ + // in view metrics page, there will only be one expression + if (expressions[0].includes(':histogram-buckets')){ + chartType = 'stackbar'; + } + } + return chartType; + }, + + getChartMarkupByType: function(chartType) { + return chartType === 'heatmap'? + '
' + + '' + + '
' : + '
' + + '' + + '
'; + }, }; return service; diff --git a/ArgusWeb/app/js/services/factories/dashboards.js b/ArgusWeb/app/js/services/factories/dashboards.js index 48260ea3b..634d65553 100644 --- a/ArgusWeb/app/js/services/factories/dashboards.js +++ b/ArgusWeb/app/js/services/factories/dashboards.js @@ -7,6 +7,6 @@ angular.module('argus.services.dashboards', []) query: {method: 'GET', params: {dashboardId: ''}, isArray: true}, update: {method: 'PUT'}, getMeta: {method: 'GET', url: CONFIG.wsUrl + 'dashboards/meta', isArray: true}, - getPersonalDashboards: {method: 'GET', url: CONFIG.wsUrl + 'dashboards/meta?owner=' + Auth.getUsername(), isArray: true} + getPersonalDashboards: {method: 'GET', url: CONFIG.wsUrl + 'dashboards/meta?shared=false&owner=' + Auth.getUsername(), isArray: true} }); }]); diff --git a/ArgusWeb/app/js/services/storage.js b/ArgusWeb/app/js/services/storage.js index 08425984d..c1e120092 100644 --- a/ArgusWeb/app/js/services/storage.js +++ b/ArgusWeb/app/js/services/storage.js @@ -2,12 +2,12 @@ /*global angular:false, LZString:false, Promise:false */ angular.module('argus.services.storage', []) -.factory('Storage', ['$rootScope', '$localStorage', '$sessionStorage','$injector', '$window', '$location', function ($rootScope, $localStorage, $sessionStorage, $injector, $window, $location) { +.factory('Storage', ['$rootScope', '$sessionStorage','$injector', '$window', '$location', function ($rootScope, $sessionStorage, $injector, $window, $location) { var storageKeyPrefix = 'ngStorage-'; var serializer = angular.toJson; var deserializer = angular.fromJson; var localStorage = $window.localStorage; - $rootScope.storage = $localStorage; + // $rootScope.storage = $localStorage; var warnModalCount = 0; //prevent user from clicking so many confirm modals function warn (ls) { if (warnModalCount > 0) return; @@ -60,9 +60,16 @@ angular.module('argus.services.storage', []) return { get : function (key) { - var result = localStorage.getItem(storageKeyPrefix + key); - if(result !== undefined) result = deserializer(result); - return angular.isDefined(result) ? result : null; + try { + var result = localStorage.getItem(storageKeyPrefix + key); + if(result !== undefined) { + result = deserializer(result); + } + return angular.isDefined(result) ? result : null; + } catch (e){ + console.log(e); + } + return null; }, set : function (key, value) { @@ -86,6 +93,9 @@ angular.module('argus.services.storage', []) //delete user info, but preserve the storage of preferences this.clear('user'); this.clear('target'); + //remove token + this.clear('accessToken'); + this.clear('refreshToken'); $sessionStorage.$reset(); }, resetAll : function () { diff --git a/ArgusWeb/app/js/services/tokenAuthInterceptor.js b/ArgusWeb/app/js/services/tokenAuthInterceptor.js index 074903f5a..3aa1949e0 100644 --- a/ArgusWeb/app/js/services/tokenAuthInterceptor.js +++ b/ArgusWeb/app/js/services/tokenAuthInterceptor.js @@ -9,6 +9,13 @@ angular.module('argus.services.tokenAuthInterceptor',[]) var refreshTokenRequest = null; var failRequestLimit = 50; var failRequestCounter = 0; + function redirectToLogin(){ + var target = Storage.get('target'); + //remove user info and other stuff + Storage.reset(); + Storage.set('target', target); // for redirect to previous url after relogin + $location.path('/login'); + } return { 'request' : function(config){ config.headers = config.headers || {}; @@ -26,20 +33,17 @@ angular.module('argus.services.tokenAuthInterceptor',[]) var path = $location.path(); var deferred = $q.defer(); - var target = Storage.get('target'); - if(response.status === 0){ - Storage.reset(); - Storage.set('target', target); - $location.path('/login'); - + redirectToLogin(); + }else if(response.status !== 401 && (response.config.url === CONFIG.wsUrl + refreshPath)){ + var message = 'Your refresh token is invalid'; + growl.error(response.data && response.data.message || message); //-------Token Based Authentication---------- + redirectToLogin(); }else if(response.status === 401){ - if(path === '/login' || - !Storage.get('accessToken')|| - !Storage.get('refreshToken') - ){ + if(path === '/login'){ //login fails, just return to login page - //no token found just return to login page + }else if(!Storage.get('accessToken')|| !Storage.get('refreshToken')){ + Storage.set('loginError', 'accessToken or refreshToken missing'); }else if(failRequestCounter > failRequestLimit){ //prevent infinite loop //this might happen when you can get refreshToken but keeps getting 401 with new requests @@ -49,11 +53,12 @@ angular.module('argus.services.tokenAuthInterceptor',[]) 'This might be caused by invalid webservice endpoint, please check your markup!
' ); - deferred.reject(); + deferred.reject(response); return deferred.promise; }else if(response.config.url === CONFIG.wsUrl + refreshPath){ - growl.error('You refresh token has expired');//-------Token Based Authentication---------- + message = 'Your refresh token has expired'; + growl.error(response.data && response.data.message || message );//-------Token Based Authentication---------- }else{ //accessToken fails, refresh accessToken failRequestCounter ++; @@ -75,10 +80,7 @@ angular.module('argus.services.tokenAuthInterceptor',[]) }); return deferred.promise; } - //remove token and other stuff - Storage.reset(); - Storage.set('target', target); - $location.path('/login'); + redirectToLogin(); } deferred.reject(response); diff --git a/ArgusWeb/app/js/services/utilService.js b/ArgusWeb/app/js/services/utilService.js index 3bec97910..3b55f613e 100644 --- a/ArgusWeb/app/js/services/utilService.js +++ b/ArgusWeb/app/js/services/utilService.js @@ -1,5 +1,6 @@ /*global angular:false, copyProperties:false */ 'use strict'; + angular.module('argus.services.utils', []) .service('UtilService', ['$filter', function($filter) { var options = { @@ -94,6 +95,198 @@ angular.module('argus.services.utils', []) target[i] = obj[i]; } return target; + }, + + ExpressionNode: class { + constructor(type, text){ + this.type = type + this.text = text + this.children = [] + } + appendChild(node){ + this.children.push(node) + } + }, + + getExpressionTree: function(expression){ + expression = expression.trim() + const n = expression.length + const stack = [] + let curT = undefined //current transform + let tmpText = '' + let tmpType = 'expression' + + for(let i = 0; i < n; i ++ ) { + const c = expression[i] + if(c.match(/\s/)) continue + let node + switch (c) { + case '(': + if (curT) { + stack.push(curT) + } + curT = new this.ExpressionNode('transform', tmpText) + tmpText = '' + tmpType = 'expression' + continue + case ')': + if(tmpText !== ''){ + node = new this.ExpressionNode(tmpType, tmpText) + curT.appendChild(node) + } + if (stack.length === 0){ + //end of outter most expression + return curT + } + const lastT = stack.pop() + lastT.appendChild(curT) //add just ended transform to parent + curT = lastT + tmpText = '' + tmpType = 'expression' + continue + case ',': + if (tmpText === '') continue // xxx),xxx + if (tmpType === 'tag') { //do not take comma as seperator + tmpText += c + continue + } + node = new this.ExpressionNode(tmpType, tmpText) + curT.appendChild(node) + tmpText = '' + tmpType = 'expression' + continue + case '{': + tmpText += c + tmpType = 'tag' //TODO: add tag children for expression + continue + case '}': + tmpText += c + tmpType = 'expression' + continue + case '#': + tmpText += c + tmpType = 'constant' + continue + default: + tmpText += c + } + } + if (tmpText !== '') { + //just a normal expression without transform + return new this.ExpressionNode(tmpType, tmpText) + } + return curT // if there is a tranform, root should be returned in the loop + }, + + printTree: function(depth, isFirstChild, stringArr, previousNode, node) { + const indentation = ' '.repeat(depth * 2) + if (previousNode && previousNode.type === 'transform'){ + stringArr.push(`\n${indentation}`) + } + if (isFirstChild){ + stringArr.push(indentation) //indentation + } + if (node.type === 'transform'){ + stringArr.push(`${node.text}(\n`) + let isFirstChild = true + let previousChild + for(let child of node.children){ + if (!isFirstChild) { + stringArr.push(',') + } + this.printTree(depth + 1, isFirstChild, stringArr, previousChild, child) + previousChild = child + if (isFirstChild) isFirstChild = false + } + stringArr.push(`\n${indentation})`) + } else { + stringArr.push(node.text) + } + }, + + printTreeFlat: function(stringArr, node) { + if (node.type === 'transform'){ + stringArr.push(`${node.text}(`) + let isFirstChild = true + for(let child of node.children){ + if (!isFirstChild) { + stringArr.push(',') + } + this.printTreeFlat(stringArr,child) + if (isFirstChild) isFirstChild = false + } + stringArr.push(`)`) + } else { + stringArr.push(node.text) + } + }, + + prettifyExpression: function(expression) { + const tree = this.getExpressionTree(expression) + const stringArr = [] + this.printTree(0, true, stringArr, undefined, tree) + return stringArr.join('') + }, + + flatTree: function(tree) { + const stringArr = [] + this.printTreeFlat(stringArr, tree) + return stringArr.join('') + }, + + typeOfNode: function(text) { + const firstChar = text.trim()[0] + if (firstChar === '#') return 'constant' + if (/[A-Z]/.test(firstChar)) return 'transform' + return 'expression' + }, + + rangeFinderParentheses: function(cm, start) { + var line = start.line, lineText = cm.getLine(line); + var tokenType; + + function findOpening(openCh) { + for (var at = start.ch, pass = 0;;) { + var found = at <= 0 ? -1 : lineText.lastIndexOf(openCh, at - 1); + if (found == -1) { + if (pass == 1) break; + pass = 1; + at = lineText.length; + continue; + } + if (pass == 1 && found < start.ch) break; + tokenType = cm.getTokenTypeAt(CodeMirror.Pos(line, found + 1)); + if (!/^(comment|string)/.test(tokenType)) return found + 1; + at = found - 1; + } + } + + var startToken = "(", endToken = ")", startCh = findOpening("("); + if (startCh == null) { + startToken = "[", endToken = "]"; + startCh = findOpening("["); + } + + if (startCh == null) return; + var count = 1, lastLine = cm.lastLine(), end, endCh; + outer: for (var i = line; i <= lastLine; ++i) { + var text = cm.getLine(i), pos = i == line ? startCh : 0; + for (;;) { + var nextOpen = text.indexOf(startToken, pos), nextClose = text.indexOf(endToken, pos); + if (nextOpen < 0) nextOpen = text.length; + if (nextClose < 0) nextClose = text.length; + pos = Math.min(nextOpen, nextClose); + if (pos == text.length) break; + if (cm.getTokenTypeAt(CodeMirror.Pos(i, pos + 1)) == tokenType) { + if (pos == nextOpen) ++count; + else if (!--count) { end = i; endCh = pos; break outer; } + } + ++pos; + } + } + if (end == null || line == end && endCh == startCh) return; + return {from: CodeMirror.Pos(line, startCh), + to: CodeMirror.Pos(end, endCh)}; } }; return options; diff --git a/ArgusWeb/app/js/templates/charts/topToolbar.html b/ArgusWeb/app/js/templates/charts/topToolbar.html index 1cc42744c..baa8478ab 100644 --- a/ArgusWeb/app/js/templates/charts/topToolbar.html +++ b/ArgusWeb/app/js/templates/charts/topToolbar.html @@ -138,6 +138,37 @@

{{chartConfig.subti

+
+ + + + + +
+
diff --git a/ArgusWeb/app/js/templates/dashboard-detail.html b/ArgusWeb/app/js/templates/dashboard-detail.html index 4661843ce..d9964d259 100644 --- a/ArgusWeb/app/js/templates/dashboard-detail.html +++ b/ArgusWeb/app/js/templates/dashboard-detail.html @@ -16,7 +16,7 @@
{{dashboard.description}}
-
+
@@ -34,10 +34,19 @@
{{dashboard.description}}

- +
-
+

Markup (Fullscreen editor mode: Click on the editor and press Ctrl-Alt-F)

Need help for dashboard markup?
diff --git a/ArgusWeb/app/js/templates/headerMenu.html b/ArgusWeb/app/js/templates/headerMenu.html index 1fd809250..2dcc85aab 100644 --- a/ArgusWeb/app/js/templates/headerMenu.html +++ b/ArgusWeb/app/js/templates/headerMenu.html @@ -2,7 +2,7 @@