diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 61524b5b..1a11fcec 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -6,6 +6,8 @@ on: pull_request: branches: - "*" +env: + ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true jobs: Get-CI-Image-Tag: @@ -57,6 +59,7 @@ jobs: - 11 - 17 - 21 + - 23 name: Build and Test runs-on: windows-latest diff --git a/build.gradle b/build.gradle index 70bfd9ce..68524f55 100644 --- a/build.gradle +++ b/build.gradle @@ -6,7 +6,7 @@ buildscript { ext { opensearch_group = "org.opensearch" - opensearch_version = System.getProperty("opensearch.version", "2.13.0-SNAPSHOT") + opensearch_version = System.getProperty("opensearch.version", "2.18.1-SNAPSHOT") isSnapshot = "true" == System.getProperty("build.snapshot", "true") buildVersionQualifier = System.getProperty("build.version_qualifier", "") kotlin_version = System.getProperty("kotlin.version", "1.8.21") diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index 7f93135c..a4b76b95 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 3999f7f3..7c553f64 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-8.4-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.10.2-bin.zip networkTimeout=10000 zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionSha256Sum=3e1af3ae886920c3ac87f7a91f816c0c7c436f276a6eefdb3da152100fef72ae +distributionSha256Sum=31c55713e40233a8303827ceb42ca48a47267a0ad4bab9177123121e71524c26 diff --git a/gradlew b/gradlew index 1aa94a42..f5feea6d 100755 --- a/gradlew +++ b/gradlew @@ -15,6 +15,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +# SPDX-License-Identifier: Apache-2.0 +# ############################################################################## # @@ -55,7 +57,7 @@ # Darwin, MinGW, and NonStop. # # (3) This script is generated from the Groovy template -# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt # within the Gradle project. # # You can find Gradle at https://github.com/gradle/gradle/. @@ -84,7 +86,8 @@ done # shellcheck disable=SC2034 APP_BASE_NAME=${0##*/} # Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) -APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit +APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s +' "$PWD" ) || exit # Use the maximum available, or set MAX_FD != -1 to use that value. MAX_FD=maximum diff --git a/gradlew.bat b/gradlew.bat index 6689b85b..9b42019c 100644 --- a/gradlew.bat +++ b/gradlew.bat @@ -13,6 +13,8 @@ @rem See the License for the specific language governing permissions and @rem limitations under the License. @rem +@rem SPDX-License-Identifier: Apache-2.0 +@rem @if "%DEBUG%"=="" @echo off @rem ########################################################################## @@ -43,11 +45,11 @@ set JAVA_EXE=java.exe %JAVA_EXE% -version >NUL 2>&1 if %ERRORLEVEL% equ 0 goto execute -echo. -echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. +echo. 1>&2 +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2 +echo. 1>&2 +echo Please set the JAVA_HOME variable in your environment to match the 1>&2 +echo location of your Java installation. 1>&2 goto fail @@ -57,11 +59,11 @@ set JAVA_EXE=%JAVA_HOME%/bin/java.exe if exist "%JAVA_EXE%" goto execute -echo. -echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. +echo. 1>&2 +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2 +echo. 1>&2 +echo Please set the JAVA_HOME variable in your environment to match the 1>&2 +echo location of your Java installation. 1>&2 goto fail diff --git a/release-notes/opensearch-common-utils.release-notes-2.13.0.0.md b/release-notes/opensearch-common-utils.release-notes-2.13.0.0.md new file mode 100644 index 00000000..8aef8153 --- /dev/null +++ b/release-notes/opensearch-common-utils.release-notes-2.13.0.0.md @@ -0,0 +1,16 @@ +## Version 2.13.0.0 2023-03-21 + +Compatible with OpenSearch 2.13.0 + +### Maintenance +* Increment version to 2.13.0-SNAPSHOT ([#591](https://github.com/opensearch-project/common-utils/pull/591)) + +### Enhancement +* add queryFieldNames field in Doc Level Queries (#[582](https://github.com/opensearch-project/common-utils/pull/582)) (#[597](https://github.com/opensearch-project/common-utils/pull/597)) + +# Features +* fix findings API enhancemnts (#[611](https://github.com/opensearch-project/common-utils/pull/611)) (#[617](https://github.com/opensearch-project/common-utils/pull/617)) +* Feature findings enhancemnt (#[596](https://github.com/opensearch-project/common-utils/pull/596)) (#[606](https://github.com/opensearch-project/common-utils/pull/606)) + +### Documentation +* Added 2.13.0.0 release notes ([#622](https://github.com/opensearch-project/common-utils/pull/622)) \ No newline at end of file diff --git a/release-notes/opensearch-common-utils.release-notes-2.14.0.0.md b/release-notes/opensearch-common-utils.release-notes-2.14.0.0.md new file mode 100644 index 00000000..0dfd3028 --- /dev/null +++ b/release-notes/opensearch-common-utils.release-notes-2.14.0.0.md @@ -0,0 +1,15 @@ +## Version 2.14.0.0 2024-04-30 + +Compatible with OpenSearch 2.14.0 + +### Maintenance +* Increment version to 2.14.0-SNAPSHOT ([#625](https://github.com/opensearch-project/common-utils/pull/625)) + +### Refactor +* Obfuscate ip addresses in alert error message ([#511](https://github.com/opensearch-project/common-utils/pull/511)) +* Change doc level query name validation ([#630](https://github.com/opensearch-project/common-utils/pull/630)) +* Added validation for the new clusters field. ([#633](https://github.com/opensearch-project/common-utils/pull/633)) +* Wrapped URI syntax exception in IllegalArgument exception. ([#645](https://github.com/opensearch-project/common-utils/pull/645)) + +### Documentation +* Added 2.14.0.0 release notes. ([#648](https://github.com/opensearch-project/common-utils/pull/648)) \ No newline at end of file diff --git a/release-notes/opensearch-common-utils.release-notes-2.15.0.0.md b/release-notes/opensearch-common-utils.release-notes-2.15.0.0.md new file mode 100644 index 00000000..3a4e546f --- /dev/null +++ b/release-notes/opensearch-common-utils.release-notes-2.15.0.0.md @@ -0,0 +1,20 @@ +## Version 2.15.0.0 2024-06-10 + +Compatible with OpenSearch 2.15.0 + +### Maintenance +* Increment version to 2.15.0-SNAPSHOT ([#651](https://github.com/opensearch-project/common-utils/pull/651)) + + +### Features +* CorrelationAlert model added ([#631](https://github.com/opensearch-project/common-utils/pull/631), [#679](https://github.com/opensearch-project/common-utils/pull/679)) + +### Bug Fixes +* Bug fixes for correlation Alerts ([#670](https://github.com/opensearch-project/common-utils/pull/670), [#680](https://github.com/opensearch-project/common-utils/pull/680)) + +### Enhancements +* Add start_time and end_time filters to GetAlertsRequest. ([#655](https://github.com/opensearch-project/common-utils/pull/655)) +* Added new models for Alerting Comments ([#663](https://github.com/opensearch-project/common-utils/pull/663), [#671](https://github.com/opensearch-project/common-utils/pull/671), [#674](https://github.com/opensearch-project/common-utils/pull/674) [#678](https://github.com/opensearch-project/common-utils/pull/678)) + +### Documentation +* Added 2.15.0.0 release notes. ([#672](https://github.com/opensearch-project/common-utils/pull/672)) diff --git a/release-notes/opensearch-common-utils.release-notes-2.16.0.0.md b/release-notes/opensearch-common-utils.release-notes-2.16.0.0.md new file mode 100644 index 00000000..6ae5b6b7 --- /dev/null +++ b/release-notes/opensearch-common-utils.release-notes-2.16.0.0.md @@ -0,0 +1,12 @@ +## Version 2.16.0.0 2024-07-25 + +Compatible with OpenSearch 2.16.0 + +### Maintenance +* Increment version to 2.16.0-SNAPSHOT ([#688](https://github.com/opensearch-project/common-utils/pull/688)) + +### Enhancements +* [Backport 2.x] Add support for remote monitors ([#694](https://github.com/opensearch-project/common-utils/pull/694)) + +### Documentation +* Added 2.16.0.0 release notes. ([#700](https://github.com/opensearch-project/common-utils/pull/700)) \ No newline at end of file diff --git a/release-notes/opensearch-common-utils.release-notes-2.17.0.0.md b/release-notes/opensearch-common-utils.release-notes-2.17.0.0.md new file mode 100644 index 00000000..ff24e040 --- /dev/null +++ b/release-notes/opensearch-common-utils.release-notes-2.17.0.0.md @@ -0,0 +1,16 @@ +## Version 2.17.0.0 2024-09-03 + +Compatible with OpenSearch 2.17.0 + +### Maintenance +* Fixed Common-Utils CIs: ([#703](https://github.com/opensearch-project/common-utils/pull/703)) + +### Bug Fixes +* Added missing ctx variables ([#710](https://github.com/opensearch-project/common-utils/pull/710)) +* Changed the names of security actions for Alerting Comments feature ([#724](https://github.com/opensearch-project/common-utils/pull/724)) + +### Enhancements +* Updated pull request template to include API spec change in checklist ([#696](https://github.com/opensearch-project/common-utils/pull/696)) + +### Documentation +* Added 2.17.0.0 release notes ([#727](https://github.com/opensearch-project/common-utils/pull/727)) diff --git a/release-notes/opensearch-common-utils.release-notes-2.18.0.0.md b/release-notes/opensearch-common-utils.release-notes-2.18.0.0.md new file mode 100644 index 00000000..a39b7b31 --- /dev/null +++ b/release-notes/opensearch-common-utils.release-notes-2.18.0.0.md @@ -0,0 +1,13 @@ +## Version 2.18.0.0 2024-10-28 + +Compatible with OpenSearch 2.18.0 + +### Maintenance +* Increment version to 2.18.0-SNAPSHOT ([#729](https://github.com/opensearch-project/common-utils/pull/729)) +* Update Gradle to 8.10.2 ([#746](https://github.com/opensearch-project/common-utils/pull/746)) + +### Enhancements +* changes to support dynamic deletion of doc-level monitor query indices ([#734](https://github.com/opensearch-project/common-utils/pull/734)) + +### Documentation +* Added 2.18.0.0 release notes. ([#750](https://github.com/opensearch-project/common-utils/pull/750)) \ No newline at end of file diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/AlertingActions.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/AlertingActions.kt index f2ada6a5..fcf98261 100644 --- a/src/main/kotlin/org/opensearch/commons/alerting/action/AlertingActions.kt +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/AlertingActions.kt @@ -21,6 +21,9 @@ object AlertingActions { const val SUBSCRIBE_FINDINGS_ACTION_NAME = "cluster:admin/opensearch/alerting/findings/subscribe" const val GET_MONITOR_ACTION_NAME = "cluster:admin/opendistro/alerting/monitor/get" const val SEARCH_MONITORS_ACTION_NAME = "cluster:admin/opendistro/alerting/monitor/search" + const val INDEX_COMMENT_ACTION_NAME = "cluster:admin/opensearch/alerting/comments/write" + const val SEARCH_COMMENTS_ACTION_NAME = "cluster:admin/opensearch/alerting/comments/search" + const val DELETE_COMMENT_ACTION_NAME = "cluster:admin/opensearch/alerting/comments/delete" @JvmField val INDEX_MONITOR_ACTION_TYPE = @@ -73,4 +76,16 @@ object AlertingActions { @JvmField val SEARCH_MONITORS_ACTION_TYPE = ActionType(SEARCH_MONITORS_ACTION_NAME, ::SearchResponse) + + @JvmField + val INDEX_COMMENT_ACTION_TYPE = + ActionType(INDEX_COMMENT_ACTION_NAME, ::IndexCommentResponse) + + @JvmField + val SEARCH_COMMENTS_ACTION_TYPE = + ActionType(SEARCH_COMMENTS_ACTION_NAME, ::SearchResponse) + + @JvmField + val DELETE_COMMENT_ACTION_TYPE = + ActionType(DELETE_COMMENT_ACTION_NAME, ::DeleteCommentResponse) } diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/DeleteCommentRequest.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/DeleteCommentRequest.kt new file mode 100644 index 00000000..811dcd9e --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/DeleteCommentRequest.kt @@ -0,0 +1,34 @@ +package org.opensearch.commons.alerting.action + +import org.opensearch.action.ActionRequest +import org.opensearch.action.ActionRequestValidationException +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import java.io.IOException + +class DeleteCommentRequest : ActionRequest { + val commentId: String + + constructor(commentId: String) : super() { + this.commentId = commentId + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + commentId = sin.readString() + ) + + override fun validate(): ActionRequestValidationException? { + if (commentId.isBlank()) { + val exception = ActionRequestValidationException() + exception.addValidationError("comment id must not be blank") + return exception + } + return null + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(commentId) + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/DeleteCommentResponse.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/DeleteCommentResponse.kt new file mode 100644 index 00000000..f00fe266 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/DeleteCommentResponse.kt @@ -0,0 +1,32 @@ +package org.opensearch.commons.alerting.action + +import org.opensearch.commons.alerting.util.IndexUtils +import org.opensearch.commons.notifications.action.BaseResponse +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder + +class DeleteCommentResponse : BaseResponse { + var commentId: String + + constructor( + id: String + ) : super() { + this.commentId = id + } + + constructor(sin: StreamInput) : this( + sin.readString() // commentId + ) + + override fun writeTo(out: StreamOutput) { + out.writeString(commentId) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return builder.startObject() + .field(IndexUtils._ID, commentId) + .endObject() + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/DocLevelMonitorFanOutAction.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/DocLevelMonitorFanOutAction.kt new file mode 100644 index 00000000..801edc47 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/DocLevelMonitorFanOutAction.kt @@ -0,0 +1,15 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.action + +import org.opensearch.action.ActionType + +class DocLevelMonitorFanOutAction private constructor() : ActionType(NAME, ::DocLevelMonitorFanOutResponse) { + companion object { + val INSTANCE = DocLevelMonitorFanOutAction() + const val NAME = "cluster:admin/opensearch/alerting/monitor/doclevel/fanout" + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/DocLevelMonitorFanOutRequest.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/DocLevelMonitorFanOutRequest.kt new file mode 100644 index 00000000..fe5cfe29 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/DocLevelMonitorFanOutRequest.kt @@ -0,0 +1,101 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.action + +import org.opensearch.action.ActionRequest +import org.opensearch.action.ActionRequestValidationException +import org.opensearch.commons.alerting.model.IndexExecutionContext +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.MonitorMetadata +import org.opensearch.commons.alerting.model.WorkflowRunContext +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.index.shard.ShardId +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.ToXContentObject +import org.opensearch.core.xcontent.XContentBuilder +import java.io.IOException + +class DocLevelMonitorFanOutRequest : ActionRequest, ToXContentObject { + val monitor: Monitor + val dryRun: Boolean + val monitorMetadata: MonitorMetadata + val executionId: String + val indexExecutionContext: IndexExecutionContext? + val shardIds: List + val concreteIndicesSeenSoFar: List + val workflowRunContext: WorkflowRunContext? + + constructor( + monitor: Monitor, + dryRun: Boolean, + monitorMetadata: MonitorMetadata, + executionId: String, + indexExecutionContext: IndexExecutionContext?, + shardIds: List, + concreteIndicesSeenSoFar: List, + workflowRunContext: WorkflowRunContext? + ) : super() { + this.monitor = monitor + this.dryRun = dryRun + this.monitorMetadata = monitorMetadata + this.executionId = executionId + this.indexExecutionContext = indexExecutionContext + this.shardIds = shardIds + this.concreteIndicesSeenSoFar = concreteIndicesSeenSoFar + this.workflowRunContext = workflowRunContext + require(false == shardIds.isEmpty()) { } + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + monitor = Monitor.readFrom(sin)!!, + dryRun = sin.readBoolean(), + monitorMetadata = MonitorMetadata.readFrom(sin), + executionId = sin.readString(), + shardIds = sin.readList(::ShardId), + concreteIndicesSeenSoFar = sin.readStringList(), + workflowRunContext = if (sin.readBoolean()) { + WorkflowRunContext(sin) + } else { null }, + indexExecutionContext = IndexExecutionContext(sin) + ) + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + monitor.writeTo(out) + out.writeBoolean(dryRun) + monitorMetadata.writeTo(out) + out.writeString(executionId) + out.writeCollection(shardIds) + out.writeStringCollection(concreteIndicesSeenSoFar) + out.writeBoolean(workflowRunContext != null) + workflowRunContext?.writeTo(out) + indexExecutionContext?.writeTo(out) + } + + override fun validate(): ActionRequestValidationException? { + var actionValidationException: ActionRequestValidationException? = null + if (shardIds.isEmpty()) { + actionValidationException = ActionRequestValidationException() + actionValidationException.addValidationError("shard_ids is null or empty") + } + return actionValidationException + } + + @Throws(IOException::class) + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + .field("monitor", monitor) + .field("dry_run", dryRun) + .field("execution_id", executionId) + .field("index_execution_context", indexExecutionContext) + .field("shard_ids", shardIds) + .field("concrete_indices", concreteIndicesSeenSoFar) + .field("workflow_run_context", workflowRunContext) + return builder.endObject() + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/DocLevelMonitorFanOutResponse.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/DocLevelMonitorFanOutResponse.kt new file mode 100644 index 00000000..6e5cde55 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/DocLevelMonitorFanOutResponse.kt @@ -0,0 +1,92 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.action + +import org.opensearch.commons.alerting.model.DocumentLevelTriggerRunResult +import org.opensearch.commons.alerting.model.InputRunResults +import org.opensearch.commons.alerting.util.AlertingException +import org.opensearch.core.action.ActionResponse +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.ToXContentObject +import org.opensearch.core.xcontent.XContentBuilder +import java.io.IOException + +class DocLevelMonitorFanOutResponse : ActionResponse, ToXContentObject { + val nodeId: String + val executionId: String + val monitorId: String + val lastRunContexts: MutableMap + val inputResults: InputRunResults + val triggerResults: Map + val exception: AlertingException? + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + nodeId = sin.readString(), + executionId = sin.readString(), + monitorId = sin.readString(), + lastRunContexts = sin.readMap()!! as MutableMap, + inputResults = InputRunResults.readFrom(sin), + triggerResults = suppressWarning(sin.readMap(StreamInput::readString, DocumentLevelTriggerRunResult::readFrom)), + exception = sin.readException() + ) + + constructor( + nodeId: String, + executionId: String, + monitorId: String, + lastRunContexts: MutableMap, + inputResults: InputRunResults = InputRunResults(), // partial, + triggerResults: Map = mapOf(), + exception: AlertingException? = null + ) : super() { + this.nodeId = nodeId + this.executionId = executionId + this.monitorId = monitorId + this.lastRunContexts = lastRunContexts + this.inputResults = inputResults + this.triggerResults = triggerResults + this.exception = exception + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(nodeId) + out.writeString(executionId) + out.writeString(monitorId) + out.writeMap(lastRunContexts) + inputResults.writeTo(out) + out.writeMap( + triggerResults, + StreamOutput::writeString, + { stream, stats -> stats.writeTo(stream) } + ) + out.writeException(exception) + } + + @Throws(IOException::class) + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + .field("node_id", nodeId) + .field("execution_id", executionId) + .field("monitor_id", monitorId) + .field("last_run_contexts", lastRunContexts) + .field("input_results", inputResults) + .field("trigger_results", triggerResults) + .field("exception", exception) + .endObject() + return builder + } + + companion object { + @Suppress("UNCHECKED_CAST") + fun suppressWarning(map: MutableMap?): Map { + return map as Map + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/GetAlertsRequest.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/GetAlertsRequest.kt index 8b8a5a1f..4592a9be 100644 --- a/src/main/kotlin/org/opensearch/commons/alerting/action/GetAlertsRequest.kt +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/GetAlertsRequest.kt @@ -5,6 +5,7 @@ import org.opensearch.action.ActionRequestValidationException import org.opensearch.commons.alerting.model.Table import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.index.query.BoolQueryBuilder import java.io.IOException class GetAlertsRequest : ActionRequest { @@ -16,6 +17,7 @@ class GetAlertsRequest : ActionRequest { val monitorIds: List? val workflowIds: List? val alertIds: List? + val boolQueryBuilder: BoolQueryBuilder? constructor( table: Table, @@ -25,7 +27,8 @@ class GetAlertsRequest : ActionRequest { alertIndex: String?, monitorIds: List? = null, workflowIds: List? = null, - alertIds: List? = null + alertIds: List? = null, + boolQueryBuilder: BoolQueryBuilder? = null ) : super() { this.table = table this.severityLevel = severityLevel @@ -35,6 +38,7 @@ class GetAlertsRequest : ActionRequest { this.monitorIds = monitorIds this.workflowIds = workflowIds this.alertIds = alertIds + this.boolQueryBuilder = boolQueryBuilder } @Throws(IOException::class) @@ -46,7 +50,8 @@ class GetAlertsRequest : ActionRequest { alertIndex = sin.readOptionalString(), monitorIds = sin.readOptionalStringList(), workflowIds = sin.readOptionalStringList(), - alertIds = sin.readOptionalStringList() + alertIds = sin.readOptionalStringList(), + boolQueryBuilder = if (sin.readOptionalBoolean() == true) BoolQueryBuilder(sin) else null ) override fun validate(): ActionRequestValidationException? { @@ -63,5 +68,11 @@ class GetAlertsRequest : ActionRequest { out.writeOptionalStringCollection(monitorIds) out.writeOptionalStringCollection(workflowIds) out.writeOptionalStringCollection(alertIds) + if (boolQueryBuilder != null) { + out.writeOptionalBoolean(true) + boolQueryBuilder.writeTo(out) + } else { + out.writeOptionalBoolean(false) + } } } diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/IndexCommentRequest.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/IndexCommentRequest.kt new file mode 100644 index 00000000..3eb05f13 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/IndexCommentRequest.kt @@ -0,0 +1,78 @@ +package org.opensearch.commons.alerting.action + +import org.opensearch.action.ActionRequest +import org.opensearch.action.ActionRequestValidationException +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.rest.RestRequest +import java.io.IOException + +/** + * Request to index/create a Comment + * + * entityId: the entity that the Comment is attached to and therefore associated with (e.g. in Alerting, + * the entity is an Alert). This field is expected to be non-blank if the request is to create a new Comment. + * + * commentId: the ID of an existing Comment. This field is expected to be non-blank if the request is to + * update an existing Comment. + */ +class IndexCommentRequest : ActionRequest { + val entityId: String + val entityType: String + val commentId: String + val seqNo: Long + val primaryTerm: Long + val method: RestRequest.Method + var content: String + + constructor( + entityId: String, + entityType: String, + commentId: String, + seqNo: Long, + primaryTerm: Long, + method: RestRequest.Method, + content: String + ) : super() { + this.entityId = entityId + this.entityType = entityType + this.commentId = commentId + this.seqNo = seqNo + this.primaryTerm = primaryTerm + this.method = method + this.content = content + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + entityId = sin.readString(), + entityType = sin.readString(), + commentId = sin.readString(), + seqNo = sin.readLong(), + primaryTerm = sin.readLong(), + method = sin.readEnum(RestRequest.Method::class.java), + content = sin.readString() + ) + + override fun validate(): ActionRequestValidationException? { + if (method == RestRequest.Method.POST && entityId.isBlank() || + method == RestRequest.Method.PUT && commentId.isBlank() + ) { + val exception = ActionRequestValidationException() + exception.addValidationError("id must not be blank") + return exception + } + return null + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(entityId) + out.writeString(entityType) + out.writeString(commentId) + out.writeLong(seqNo) + out.writeLong(primaryTerm) + out.writeEnum(method) + out.writeString(content) + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/IndexCommentResponse.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/IndexCommentResponse.kt new file mode 100644 index 00000000..7c9bb9b7 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/IndexCommentResponse.kt @@ -0,0 +1,57 @@ +package org.opensearch.commons.alerting.action + +import org.opensearch.commons.alerting.model.Comment +import org.opensearch.commons.alerting.util.IndexUtils.Companion._ID +import org.opensearch.commons.alerting.util.IndexUtils.Companion._PRIMARY_TERM +import org.opensearch.commons.alerting.util.IndexUtils.Companion._SEQ_NO +import org.opensearch.commons.notifications.action.BaseResponse +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import java.io.IOException + +class IndexCommentResponse : BaseResponse { + var id: String + var seqNo: Long + var primaryTerm: Long + var comment: Comment + + constructor( + id: String, + seqNo: Long, + primaryTerm: Long, + comment: Comment + ) : super() { + this.id = id + this.seqNo = seqNo + this.primaryTerm = primaryTerm + this.comment = comment + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readString(), // id + sin.readLong(), // seqNo + sin.readLong(), // primaryTerm + Comment.readFrom(sin) // comment + ) + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(id) + out.writeLong(seqNo) + out.writeLong(primaryTerm) + comment.writeTo(out) + } + + @Throws(IOException::class) + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return builder.startObject() + .field(_ID, id) + .field(_SEQ_NO, seqNo) + .field(_PRIMARY_TERM, primaryTerm) + .field("comment", comment) + .endObject() + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/SearchCommentRequest.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/SearchCommentRequest.kt new file mode 100644 index 00000000..e0d150d0 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/SearchCommentRequest.kt @@ -0,0 +1,33 @@ +package org.opensearch.commons.alerting.action + +import org.opensearch.action.ActionRequest +import org.opensearch.action.ActionRequestValidationException +import org.opensearch.action.search.SearchRequest +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import java.io.IOException + +class SearchCommentRequest : ActionRequest { + + val searchRequest: SearchRequest + + constructor( + searchRequest: SearchRequest + ) : super() { + this.searchRequest = searchRequest + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + searchRequest = SearchRequest(sin) + ) + + override fun validate(): ActionRequestValidationException? { + return null + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + searchRequest.writeTo(out) + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/alerts/AlertError.kt b/src/main/kotlin/org/opensearch/commons/alerting/alerts/AlertError.kt index 3959187b..28ef4a55 100644 --- a/src/main/kotlin/org/opensearch/commons/alerting/alerts/AlertError.kt +++ b/src/main/kotlin/org/opensearch/commons/alerting/alerts/AlertError.kt @@ -12,7 +12,10 @@ import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken import java.io.IOException import java.time.Instant -data class AlertError(val timestamp: Instant, val message: String) : Writeable, ToXContent { +data class AlertError(val timestamp: Instant, var message: String) : Writeable, ToXContent { + init { + this.message = obfuscateIPAddresses(message) + } @Throws(IOException::class) constructor(sin: StreamInput) : this( @@ -54,6 +57,12 @@ data class AlertError(val timestamp: Instant, val message: String) : Writeable, fun readFrom(sin: StreamInput): AlertError { return AlertError(sin) } + + fun obfuscateIPAddresses(exceptionMessage: String): String { + val ipAddressPattern = "\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}" + val obfuscatedMessage = exceptionMessage.replace(ipAddressPattern.toRegex(), "x.x.x.x") + return obfuscatedMessage + } } override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/BaseAlert.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/BaseAlert.kt new file mode 100644 index 00000000..dd9bd4dd --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/BaseAlert.kt @@ -0,0 +1,208 @@ +package org.opensearch.commons.alerting.model + +import org.opensearch.common.lucene.uid.Versions +import org.opensearch.commons.alerting.util.IndexUtils.Companion.NO_SCHEMA_VERSION +import org.opensearch.commons.alerting.util.instant +import org.opensearch.commons.alerting.util.optionalUserField +import org.opensearch.commons.authuser.User +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException +import java.time.Instant + +/** CorrelationAlert and Alert can extend the UnifiedAlert class to inherit the common fields and behavior + * of UnifiedAlert class. + */ +open class BaseAlert( + open val id: String = Alert.NO_ID, + open val version: Long = Alert.NO_VERSION, + open val schemaVersion: Int = NO_SCHEMA_VERSION, + open val user: User?, + open val triggerName: String, + + // State will be later moved to this Class (after `monitorBasedAlerts` extend this Class) + open val state: Alert.State, + open val startTime: Instant, + open val endTime: Instant? = null, + open val acknowledgedTime: Instant? = null, + open val errorMessage: String? = null, + open val severity: String, + open val actionExecutionResults: List +) : Writeable, ToXContent { + + init { + if (errorMessage != null) { + require((state == Alert.State.DELETED) || (state == Alert.State.ERROR) || (state == Alert.State.AUDIT)) { + "Attempt to create an alert with an error in state: $state" + } + } + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + id = sin.readString(), + version = sin.readLong(), + schemaVersion = sin.readInt(), + user = if (sin.readBoolean()) { + User(sin) + } else { + null + }, + triggerName = sin.readString(), + state = sin.readEnum(Alert.State::class.java), + startTime = sin.readInstant(), + endTime = sin.readOptionalInstant(), + acknowledgedTime = sin.readOptionalInstant(), + errorMessage = sin.readOptionalString(), + severity = sin.readString(), + actionExecutionResults = sin.readList(::ActionExecutionResult) + ) + + fun isAcknowledged(): Boolean = (state == Alert.State.ACKNOWLEDGED) + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(id) + out.writeLong(version) + out.writeInt(schemaVersion) + out.writeBoolean(user != null) + user?.writeTo(out) + out.writeString(triggerName) + out.writeEnum(state) + out.writeInstant(startTime) + out.writeOptionalInstant(endTime) + out.writeOptionalInstant(acknowledgedTime) + out.writeOptionalString(errorMessage) + out.writeString(severity) + out.writeCollection(actionExecutionResults) + } + + companion object { + const val ALERT_ID_FIELD = "id" + const val SCHEMA_VERSION_FIELD = "schema_version" + const val ALERT_VERSION_FIELD = "version" + const val USER_FIELD = "user" + const val TRIGGER_NAME_FIELD = "trigger_name" + const val STATE_FIELD = "state" + const val START_TIME_FIELD = "start_time" + const val END_TIME_FIELD = "end_time" + const val ACKNOWLEDGED_TIME_FIELD = "acknowledged_time" + const val ERROR_MESSAGE_FIELD = "error_message" + const val SEVERITY_FIELD = "severity" + const val ACTION_EXECUTION_RESULTS_FIELD = "action_execution_results" + const val NO_ID = "" + const val NO_VERSION = Versions.NOT_FOUND + + @JvmStatic + @JvmOverloads + @Throws(IOException::class) + fun parse(xcp: XContentParser, version: Long = NO_VERSION): BaseAlert { + lateinit var id: String + var schemaVersion = NO_SCHEMA_VERSION + var version: Long = Versions.NOT_FOUND + var user: User? = null + lateinit var triggerName: String + lateinit var state: Alert.State + lateinit var startTime: Instant + lateinit var severity: String + var endTime: Instant? = null + var acknowledgedTime: Instant? = null + var errorMessage: String? = null + val actionExecutionResults: MutableList = mutableListOf() + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + when (fieldName) { + USER_FIELD -> user = if (xcp.currentToken() == XContentParser.Token.VALUE_NULL) null else User.parse(xcp) + ALERT_ID_FIELD -> id = xcp.text() + ALERT_VERSION_FIELD -> version = xcp.longValue() + SCHEMA_VERSION_FIELD -> schemaVersion = xcp.intValue() + TRIGGER_NAME_FIELD -> triggerName = xcp.text() + STATE_FIELD -> state = Alert.State.valueOf(xcp.text()) + ERROR_MESSAGE_FIELD -> errorMessage = xcp.textOrNull() + SEVERITY_FIELD -> severity = xcp.text() + ACTION_EXECUTION_RESULTS_FIELD -> { + XContentParserUtils.ensureExpectedToken( + XContentParser.Token.START_ARRAY, + xcp.currentToken(), + xcp + ) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + actionExecutionResults.add(ActionExecutionResult.parse(xcp)) + } + } + START_TIME_FIELD -> startTime = requireNotNull(xcp.instant()) + END_TIME_FIELD -> endTime = requireNotNull(xcp.instant()) + ACKNOWLEDGED_TIME_FIELD -> acknowledgedTime = xcp.instant() + } + } + + return BaseAlert( + id = id, + startTime = requireNotNull(startTime), + endTime = endTime, + state = requireNotNull(state), + version = version, + errorMessage = errorMessage, + actionExecutionResults = actionExecutionResults, + schemaVersion = schemaVersion, + user = user, + triggerName = requireNotNull(triggerName), + severity = severity, + acknowledgedTime = acknowledgedTime + ) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): Alert { + return Alert(sin) + } + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return createXContentBuilder(builder, true) + } + + fun toXContentWithUser(builder: XContentBuilder): XContentBuilder { + return createXContentBuilder(builder, false) + } + + fun createXContentBuilder(builder: XContentBuilder, secure: Boolean): XContentBuilder { + if (!secure) { + builder.optionalUserField(USER_FIELD, user) + } + builder + .field(ALERT_ID_FIELD, id) + .field(ALERT_VERSION_FIELD, version) + .field(SCHEMA_VERSION_FIELD, schemaVersion) + .field(TRIGGER_NAME_FIELD, triggerName) + .field(STATE_FIELD, state) + .field(ERROR_MESSAGE_FIELD, errorMessage) + .field(SEVERITY_FIELD, severity) + .field(ACTION_EXECUTION_RESULTS_FIELD, actionExecutionResults.toTypedArray()) + .field(START_TIME_FIELD, startTime) + .field(END_TIME_FIELD, endTime) + .field(ACKNOWLEDGED_TIME_FIELD, acknowledgedTime) + return builder + } + + open fun asTemplateArg(): Map { + return mapOf( + ACKNOWLEDGED_TIME_FIELD to acknowledgedTime?.toEpochMilli(), + ALERT_ID_FIELD to id, + ALERT_VERSION_FIELD to version, + END_TIME_FIELD to endTime?.toEpochMilli(), + ERROR_MESSAGE_FIELD to errorMessage, + SEVERITY_FIELD to severity, + START_TIME_FIELD to startTime.toEpochMilli(), + STATE_FIELD to state.toString(), + TRIGGER_NAME_FIELD to triggerName + ) + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/BucketLevelTrigger.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/BucketLevelTrigger.kt index 59dbae7f..39ea4fbc 100644 --- a/src/main/kotlin/org/opensearch/commons/alerting/model/BucketLevelTrigger.kt +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/BucketLevelTrigger.kt @@ -69,7 +69,13 @@ data class BucketLevelTrigger( NAME_FIELD to name, SEVERITY_FIELD to severity, ACTIONS_FIELD to actions.map { it.asTemplateArg() }, - PARENT_BUCKET_PATH to getParentBucketPath() + PARENT_BUCKET_PATH to getParentBucketPath(), + CONDITION_FIELD to mapOf( + SCRIPT_FIELD to mapOf( + SOURCE_FIELD to bucketSelector.script.idOrCode, + LANG_FIELD to bucketSelector.script.lang + ) + ) ) } @@ -81,6 +87,9 @@ data class BucketLevelTrigger( const val BUCKET_LEVEL_TRIGGER_FIELD = "bucket_level_trigger" const val CONDITION_FIELD = "condition" const val PARENT_BUCKET_PATH = "parentBucketPath" + const val SCRIPT_FIELD = "script" + const val SOURCE_FIELD = "source" + const val LANG_FIELD = "lang" val XCONTENT_REGISTRY = NamedXContentRegistry.Entry( Trigger::class.java, diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/BucketLevelTriggerRunResult.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/BucketLevelTriggerRunResult.kt new file mode 100644 index 00000000..34328ca2 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/BucketLevelTriggerRunResult.kt @@ -0,0 +1,57 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.model + +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import java.io.IOException + +data class BucketLevelTriggerRunResult( + override var triggerName: String, + override var error: Exception? = null, + var aggregationResultBuckets: Map, + var actionResultsMap: MutableMap> = mutableMapOf() +) : TriggerRunResult(triggerName, error) { + + @Throws(IOException::class) + @Suppress("UNCHECKED_CAST") + constructor(sin: StreamInput) : this( + sin.readString(), + sin.readException() as Exception?, // error + sin.readMap(StreamInput::readString, ::AggregationResultBucket), + sin.readMap() as MutableMap> + ) + + override fun internalXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return builder + .field(AGG_RESULT_BUCKETS, aggregationResultBuckets) + .field(ACTIONS_RESULTS, actionResultsMap as Map) + } + + @Throws(IOException::class) + @Suppress("UNCHECKED_CAST") + override fun writeTo(out: StreamOutput) { + super.writeTo(out) + out.writeMap(aggregationResultBuckets, StreamOutput::writeString) { + valueOut: StreamOutput, aggResultBucket: AggregationResultBucket -> + aggResultBucket.writeTo(valueOut) + } + out.writeMap(actionResultsMap as Map) + } + + companion object { + const val AGG_RESULT_BUCKETS = "agg_result_buckets" + const val ACTIONS_RESULTS = "action_results" + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): TriggerRunResult { + return BucketLevelTriggerRunResult(sin) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/ChainedAlertTriggerRunResult.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/ChainedAlertTriggerRunResult.kt new file mode 100644 index 00000000..015762cf --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/ChainedAlertTriggerRunResult.kt @@ -0,0 +1,69 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.model + +import org.opensearch.commons.alerting.alerts.AlertError +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.script.ScriptException +import java.io.IOException +import java.time.Instant + +data class ChainedAlertTriggerRunResult( + override var triggerName: String, + var triggered: Boolean, + override var error: Exception?, + var actionResults: MutableMap = mutableMapOf(), + val associatedAlertIds: Set +) : TriggerRunResult(triggerName, error) { + + @Throws(IOException::class) + @Suppress("UNCHECKED_CAST") + constructor(sin: StreamInput) : this( + triggerName = sin.readString(), + error = sin.readException(), + triggered = sin.readBoolean(), + actionResults = sin.readMap() as MutableMap, + associatedAlertIds = sin.readStringList().toSet() + ) + + override fun alertError(): AlertError? { + if (error != null) { + return AlertError(Instant.now(), "Failed evaluating trigger:\n${error!!.userErrorMessage()}") + } + for (actionResult in actionResults.values) { + if (actionResult.error != null) { + return AlertError(Instant.now(), "Failed running action:\n${actionResult.error.userErrorMessage()}") + } + } + return null + } + + override fun internalXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + if (error is ScriptException) error = Exception((error as ScriptException).toJsonString(), error) + return builder + .field("triggered", triggered) + .field("action_results", actionResults as Map) + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + super.writeTo(out) + out.writeBoolean(triggered) + out.writeMap(actionResults as Map) + out.writeStringCollection(associatedAlertIds) + } + + companion object { + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): TriggerRunResult { + return ChainedAlertTriggerRunResult(sin) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/ClusterMetricsInput.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/ClusterMetricsInput.kt index f834b435..a11214e6 100644 --- a/src/main/kotlin/org/opensearch/commons/alerting/model/ClusterMetricsInput.kt +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/ClusterMetricsInput.kt @@ -3,6 +3,7 @@ package org.opensearch.commons.alerting.model import org.apache.commons.validator.routines.UrlValidator import org.apache.http.client.utils.URIBuilder import org.opensearch.common.CheckedFunction +import org.opensearch.commons.utils.CLUSTER_NAME_REGEX import org.opensearch.core.ParseField import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.common.io.stream.StreamOutput @@ -58,6 +59,12 @@ data class ClusterMetricsInput( "Only port '$SUPPORTED_PORT' is supported." } + if (clusters.isNotEmpty()) { + require(clusters.all { CLUSTER_NAME_REGEX.matches(it) }) { + "Cluster names are not valid." + } + } + clusterMetricType = findApiType(constructedUri.path) this.parseEmptyFields() } @@ -151,14 +158,18 @@ data class ClusterMetricsInput( return if (url.isEmpty()) { constructUrlFromInputs() } else { - URIBuilder(url).build() + try { + URIBuilder(url).build() + } catch (e: URISyntaxException) { + throw IllegalArgumentException("Invalid URL syntax.") + } } } /** * Isolates just the path parameters from the [ClusterMetricsInput] URI. * @return The path parameters portion of the [ClusterMetricsInput] URI. - * @throws IllegalArgumentException if the [ClusterMetricType] requires path parameters, but none are supplied; + * @throws [IllegalArgumentException] if the [ClusterMetricType] requires path parameters, but none are supplied; * or when path parameters are provided for an [ClusterMetricType] that does not use path parameters. */ fun parsePathParams(): String { @@ -199,7 +210,7 @@ data class ClusterMetricsInput( * Examines the path of a [ClusterMetricsInput] to determine which API is being called. * @param uriPath The path to examine. * @return The [ClusterMetricType] associated with the [ClusterMetricsInput] monitor. - * @throws IllegalArgumentException when the API to call cannot be determined from the URI. + * @throws [IllegalArgumentException] when the API to call cannot be determined from the URI. */ private fun findApiType(uriPath: String): ClusterMetricType { var apiType = ClusterMetricType.BLANK @@ -236,7 +247,11 @@ data class ClusterMetricsInput( .setHost(SUPPORTED_HOST) .setPort(SUPPORTED_PORT) .setPath(path + pathParams) - uriBuilder.build() + try { + uriBuilder.build() + } catch (e: URISyntaxException) { + throw IllegalArgumentException("Invalid URL syntax.") + } } } diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/ClusterMetricsTriggerRunResult.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/ClusterMetricsTriggerRunResult.kt new file mode 100644 index 00000000..d3af9be3 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/ClusterMetricsTriggerRunResult.kt @@ -0,0 +1,110 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.model + +import org.opensearch.commons.alerting.alerts.AlertError +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.ToXContentObject +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.script.ScriptException +import java.io.IOException +import java.time.Instant + +data class ClusterMetricsTriggerRunResult( + override var triggerName: String, + override var triggered: Boolean, + override var error: Exception?, + override var actionResults: MutableMap = mutableMapOf(), + var clusterTriggerResults: List = listOf() +) : QueryLevelTriggerRunResult( + triggerName = triggerName, + error = error, + triggered = triggered, + actionResults = actionResults +) { + + @Throws(IOException::class) + @Suppress("UNCHECKED_CAST") + constructor(sin: StreamInput) : this( + triggerName = sin.readString(), + error = sin.readException(), + triggered = sin.readBoolean(), + actionResults = sin.readMap() as MutableMap, + clusterTriggerResults = sin.readList((ClusterTriggerResult)::readFrom) + ) + + override fun alertError(): AlertError? { + if (error != null) { + return AlertError(Instant.now(), "Failed evaluating trigger:\n${error!!.userErrorMessage()}") + } + for (actionResult in actionResults.values) { + if (actionResult.error != null) { + return AlertError(Instant.now(), "Failed running action:\n${actionResult.error.userErrorMessage()}") + } + } + return null + } + + override fun internalXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + if (error is ScriptException) error = Exception((error as ScriptException).toJsonString(), error) + builder + .field(TRIGGERED_FIELD, triggered) + .field(ACTION_RESULTS_FIELD, actionResults as Map) + .startArray(CLUSTER_RESULTS_FIELD) + clusterTriggerResults.forEach { it.toXContent(builder, params) } + return builder.endArray() + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + super.writeTo(out) + out.writeBoolean(triggered) + out.writeMap(actionResults as Map) + clusterTriggerResults.forEach { it.writeTo(out) } + } + + companion object { + const val TRIGGERED_FIELD = "triggered" + const val ACTION_RESULTS_FIELD = "action_results" + const val CLUSTER_RESULTS_FIELD = "cluster_results" + } + + data class ClusterTriggerResult( + val cluster: String, + val triggered: Boolean + ) : ToXContentObject, Writeable { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + cluster = sin.readString(), + triggered = sin.readBoolean() + ) + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return builder.startObject() + .startObject(cluster) + .field(TRIGGERED_FIELD, triggered) + .endObject() + .endObject() + } + + override fun writeTo(out: StreamOutput) { + out.writeString(cluster) + out.writeBoolean(triggered) + } + + companion object { + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): ClusterTriggerResult { + return ClusterTriggerResult(sin) + } + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/Comment.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/Comment.kt new file mode 100644 index 00000000..45c007e3 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/Comment.kt @@ -0,0 +1,165 @@ +package org.opensearch.commons.alerting.model + +import org.opensearch.commons.alerting.util.IndexUtils.Companion._ID +import org.opensearch.commons.alerting.util.instant +import org.opensearch.commons.alerting.util.optionalTimeField +import org.opensearch.commons.alerting.util.optionalUserField +import org.opensearch.commons.alerting.util.optionalUsernameField +import org.opensearch.commons.authuser.User +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken +import java.io.IOException +import java.time.Instant + +data class Comment( + val id: String = NO_ID, + val entityId: String = NO_ID, + val entityType: String, + val content: String, + val createdTime: Instant, + val lastUpdatedTime: Instant?, + val user: User? +) : Writeable, ToXContent { + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + id = sin.readString(), + entityId = sin.readString(), + entityType = sin.readString(), + content = sin.readString(), + createdTime = sin.readInstant(), + lastUpdatedTime = sin.readOptionalInstant(), + user = if (sin.readBoolean()) User(sin) else null + ) + + constructor( + entityId: String, + entityType: String, + content: String, + createdTime: Instant, + user: User? + ) : this ( + entityId = entityId, + entityType = entityType, + content = content, + createdTime = createdTime, + lastUpdatedTime = null, + user = user + ) + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(id) + out.writeString(entityId) + out.writeString(entityType) + out.writeString(content) + out.writeInstant(createdTime) + out.writeOptionalInstant(lastUpdatedTime) + out.writeBoolean(user != null) + user?.writeTo(out) + } + + fun asTemplateArg(): Map { + return mapOf( + _ID to id, + ENTITY_ID_FIELD to entityId, + ENTITY_TYPE_FIELD to entityType, + COMMENT_CONTENT_FIELD to content, + COMMENT_CREATED_TIME_FIELD to createdTime, + COMMENT_LAST_UPDATED_TIME_FIELD to lastUpdatedTime, + COMMENT_USER_FIELD to user?.name + ) + } + + // used to create the Comment JSON object for an API response (displayed to user) + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return createXContentBuilder(builder, false) + } + + // used to create the Comment JSON object for indexing a doc into an index (not displayed to user) + fun toXContentWithUser(builder: XContentBuilder): XContentBuilder { + return createXContentBuilder(builder, true) + } + + private fun createXContentBuilder(builder: XContentBuilder, includeFullUser: Boolean): XContentBuilder { + builder.startObject() + .field(ENTITY_ID_FIELD, entityId) + .field(ENTITY_TYPE_FIELD, entityType) + .field(COMMENT_CONTENT_FIELD, content) + .optionalTimeField(COMMENT_CREATED_TIME_FIELD, createdTime) + .optionalTimeField(COMMENT_LAST_UPDATED_TIME_FIELD, lastUpdatedTime) + + if (includeFullUser) { + // if we're storing a Comment into an internal index, include full User + builder.optionalUserField(COMMENT_USER_FIELD, user) + } else { + // if we're displaying the Comment as part of an API call response, only include username + builder.optionalUsernameField(COMMENT_USER_FIELD, user) + } + + builder.endObject() + return builder + } + + companion object { + const val ENTITY_ID_FIELD = "entity_id" + const val ENTITY_TYPE_FIELD = "entity_type" + const val COMMENT_CONTENT_FIELD = "content" + const val COMMENT_CREATED_TIME_FIELD = "created_time" + const val COMMENT_LAST_UPDATED_TIME_FIELD = "last_updated_time" + const val COMMENT_USER_FIELD = "user" + const val NO_ID = "" + + @JvmStatic + @JvmOverloads + @Throws(IOException::class) + fun parse(xcp: XContentParser, id: String = NO_ID): Comment { + lateinit var entityId: String + lateinit var entityType: String + var content = "" + lateinit var createdTime: Instant + var lastUpdatedTime: Instant? = null + var user: User? = null + + ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + ENTITY_ID_FIELD -> entityId = xcp.text() + ENTITY_TYPE_FIELD -> entityType = xcp.text() + COMMENT_CONTENT_FIELD -> content = xcp.text() + COMMENT_CREATED_TIME_FIELD -> createdTime = requireNotNull(xcp.instant()) + COMMENT_LAST_UPDATED_TIME_FIELD -> lastUpdatedTime = xcp.instant() + COMMENT_USER_FIELD -> + user = if (xcp.currentToken() == XContentParser.Token.VALUE_NULL) { + null + } else { + User.parse(xcp) + } + } + } + + return Comment( + id = id, + entityId = entityId, + entityType = entityType, + content = content, + createdTime = createdTime, + lastUpdatedTime = lastUpdatedTime, + user = user + ) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): Comment { + return Comment(sin) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/CorrelationAlert.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/CorrelationAlert.kt new file mode 100644 index 00000000..f0a56a86 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/CorrelationAlert.kt @@ -0,0 +1,142 @@ +package org.opensearch.commons.alerting.model + +import org.opensearch.commons.authuser.User +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException +import java.time.Instant + +class CorrelationAlert : BaseAlert { + + // CorrelationAlert-specific properties + val correlatedFindingIds: List + val correlationRuleId: String + val correlationRuleName: String + + constructor( + correlatedFindingIds: List, + correlationRuleId: String, + correlationRuleName: String, + id: String, + version: Long, + schemaVersion: Int, + user: User?, + triggerName: String, + state: Alert.State, + startTime: Instant, + endTime: Instant?, + acknowledgedTime: Instant?, + errorMessage: String?, + severity: String, + actionExecutionResults: List + ) : super( + id = id, + version = version, + schemaVersion = schemaVersion, + user = user, + triggerName = triggerName, + state = state, + startTime = startTime, + endTime = endTime, + acknowledgedTime = acknowledgedTime, + errorMessage = errorMessage, + severity = severity, + actionExecutionResults = actionExecutionResults + ) { + this.correlatedFindingIds = correlatedFindingIds + this.correlationRuleId = correlationRuleId + this.correlationRuleName = correlationRuleName + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : super(sin) { + correlatedFindingIds = sin.readStringList() + correlationRuleId = sin.readString() + correlationRuleName = sin.readString() + } + + // Override to include CorrelationAlert specific fields + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + .startArray(CORRELATED_FINDING_IDS) + correlatedFindingIds.forEach { id -> + builder.value(id) + } + builder.endArray() + .field(CORRELATION_RULE_ID, correlationRuleId) + .field(CORRELATION_RULE_NAME, correlationRuleName) + super.toXContentWithUser(builder) + builder.endObject() + return builder + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + super.writeTo(out) + out.writeStringCollection(correlatedFindingIds) + out.writeString(correlationRuleId) + out.writeString(correlationRuleName) + } + override fun asTemplateArg(): Map { + val superTemplateArgs = super.asTemplateArg() + val correlationSpecificArgs = mapOf( + CORRELATED_FINDING_IDS to correlatedFindingIds, + CORRELATION_RULE_ID to correlationRuleId, + CORRELATION_RULE_NAME to correlationRuleName + ) + return superTemplateArgs + correlationSpecificArgs + } + companion object { + const val CORRELATED_FINDING_IDS = "correlated_finding_ids" + const val CORRELATION_RULE_ID = "correlation_rule_id" + const val CORRELATION_RULE_NAME = "correlation_rule_name" + + @JvmStatic + @Throws(IOException::class) + fun parse(xcp: XContentParser, id: String = NO_ID, version: Long = NO_VERSION): CorrelationAlert { + // Parse additional CorrelationAlert-specific fields + val correlatedFindingIds: MutableList = mutableListOf() + var correlationRuleId: String? = null + var correlationRuleName: String? = null + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + CORRELATED_FINDING_IDS -> { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + correlatedFindingIds.add(xcp.text()) + } + } + CORRELATION_RULE_ID -> correlationRuleId = xcp.text() + CORRELATION_RULE_NAME -> correlationRuleName = xcp.text() + } + } + + val unifiedAlert = parse(xcp, version) + return CorrelationAlert( + correlatedFindingIds = correlatedFindingIds, + correlationRuleId = requireNotNull(correlationRuleId), + correlationRuleName = requireNotNull(correlationRuleName), + id = requireNotNull(unifiedAlert.id), + version = requireNotNull(unifiedAlert.version), + schemaVersion = requireNotNull(unifiedAlert.schemaVersion), + user = unifiedAlert.user, + triggerName = requireNotNull(unifiedAlert.triggerName), + state = requireNotNull(unifiedAlert.state), + startTime = requireNotNull(unifiedAlert.startTime), + endTime = unifiedAlert.endTime, + acknowledgedTime = unifiedAlert.acknowledgedTime, + errorMessage = unifiedAlert.errorMessage, + severity = requireNotNull(unifiedAlert.severity), + actionExecutionResults = unifiedAlert.actionExecutionResults + ) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/DataSources.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/DataSources.kt index b922a706..7e995b53 100644 --- a/src/main/kotlin/org/opensearch/commons/alerting/model/DataSources.kt +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/DataSources.kt @@ -31,6 +31,13 @@ data class DataSources( /** Configures a custom index pattern for alertHistoryIndex alias.*/ val alertsHistoryIndexPattern: String? = "<.opendistro-alerting-alert-history-{now/d}-1>", // AlertIndices.ALERT_HISTORY_INDEX_PATTERN + /** Configures a custom index alias to store comments associated with alerts.*/ + + val commentsIndex: String? = DEFAULT_COMMENTS_INDEX, // CommentsIndices.COMMENTS_HISTORY_WRITE_INDEX + + /** Configures a custom index pattern for commentsIndex alias.*/ + val commentsIndexPattern: String? = DEFAULT_COMMENTS_INDEX_PATTERN, // CommentsIndices.COMMENTS_HISTORY_INDEX_PATTERN + /** Configures custom mappings by field type for query index. * Custom query index mappings are configurable, only if a custom query index is configured too. */ val queryIndexMappingsByType: Map> = mapOf(), @@ -74,10 +81,34 @@ data class DataSources( alertsIndex = sin.readString(), alertsHistoryIndex = sin.readOptionalString(), alertsHistoryIndexPattern = sin.readOptionalString(), + commentsIndex = sin.readOptionalString(), + commentsIndexPattern = sin.readOptionalString(), queryIndexMappingsByType = sin.readMap() as Map>, findingsEnabled = sin.readOptionalBoolean() ) + constructor( + queryIndex: String, + findingsIndex: String, + findingsIndexPattern: String?, + alertsIndex: String, + alertsHistoryIndex: String?, + alertsHistoryIndexPattern: String?, + queryIndexMappingsByType: Map>, + findingsEnabled: Boolean? + ) : this( + queryIndex = queryIndex, + findingsIndex = findingsIndex, + findingsIndexPattern = findingsIndexPattern, + alertsIndex = alertsIndex, + alertsHistoryIndex = alertsHistoryIndex, + alertsHistoryIndexPattern = alertsHistoryIndexPattern, + commentsIndex = DEFAULT_COMMENTS_INDEX, + commentsIndexPattern = DEFAULT_COMMENTS_INDEX_PATTERN, + queryIndexMappingsByType = queryIndexMappingsByType, + findingsEnabled = findingsEnabled + ) + @Suppress("UNCHECKED_CAST") fun asTemplateArg(): Map { return mapOf( @@ -87,6 +118,8 @@ data class DataSources( ALERTS_INDEX_FIELD to alertsIndex, ALERTS_HISTORY_INDEX_FIELD to alertsHistoryIndex, ALERTS_HISTORY_INDEX_PATTERN_FIELD to alertsHistoryIndexPattern, + COMMENTS_INDEX_FIELD to commentsIndex, + COMMENTS_INDEX_PATTERN_FIELD to commentsIndexPattern, QUERY_INDEX_MAPPINGS_BY_TYPE to queryIndexMappingsByType, FINDINGS_ENABLED_FIELD to findingsEnabled ) @@ -100,6 +133,8 @@ data class DataSources( builder.field(ALERTS_INDEX_FIELD, alertsIndex) builder.field(ALERTS_HISTORY_INDEX_FIELD, alertsHistoryIndex) builder.field(ALERTS_HISTORY_INDEX_PATTERN_FIELD, alertsHistoryIndexPattern) + builder.field(COMMENTS_INDEX_FIELD, commentsIndex) + builder.field(COMMENTS_INDEX_PATTERN_FIELD, commentsIndexPattern) builder.field(QUERY_INDEX_MAPPINGS_BY_TYPE, queryIndexMappingsByType as Map) builder.field(FINDINGS_ENABLED_FIELD, findingsEnabled) builder.endObject() @@ -113,9 +148,14 @@ data class DataSources( const val ALERTS_INDEX_FIELD = "alerts_index" const val ALERTS_HISTORY_INDEX_FIELD = "alerts_history_index" const val ALERTS_HISTORY_INDEX_PATTERN_FIELD = "alerts_history_index_pattern" + const val COMMENTS_INDEX_FIELD = "comments_index" + const val COMMENTS_INDEX_PATTERN_FIELD = "comments_index_pattern" const val QUERY_INDEX_MAPPINGS_BY_TYPE = "query_index_mappings_by_type" const val FINDINGS_ENABLED_FIELD = "findings_enabled" + const val DEFAULT_COMMENTS_INDEX = ".opensearch-alerting-comments-history-write" + const val DEFAULT_COMMENTS_INDEX_PATTERN = "<.opensearch-alerting-comments-history-{now/d}-1>" + @JvmStatic @Throws(IOException::class) @Suppress("UNCHECKED_CAST") @@ -126,6 +166,8 @@ data class DataSources( var alertsIndex = "" var alertsHistoryIndex = "" var alertsHistoryIndexPattern = "" + var commentsIndex = "" + var commentsIndexPattern = "" var queryIndexMappingsByType: Map> = mapOf() var findingsEnabled = false @@ -141,6 +183,8 @@ data class DataSources( ALERTS_INDEX_FIELD -> alertsIndex = xcp.text() ALERTS_HISTORY_INDEX_FIELD -> alertsHistoryIndex = xcp.text() ALERTS_HISTORY_INDEX_PATTERN_FIELD -> alertsHistoryIndexPattern = xcp.text() + COMMENTS_INDEX_FIELD -> commentsIndex = xcp.text() + COMMENTS_INDEX_PATTERN_FIELD -> commentsIndexPattern = xcp.text() QUERY_INDEX_MAPPINGS_BY_TYPE -> queryIndexMappingsByType = xcp.map() as Map> FINDINGS_ENABLED_FIELD -> findingsEnabled = xcp.booleanValue() } @@ -152,6 +196,8 @@ data class DataSources( alertsIndex = alertsIndex, alertsHistoryIndex = alertsHistoryIndex, alertsHistoryIndexPattern = alertsHistoryIndexPattern, + commentsIndex = commentsIndex, + commentsIndexPattern = commentsIndexPattern, queryIndexMappingsByType = queryIndexMappingsByType, findingsEnabled = findingsEnabled ) @@ -166,6 +212,8 @@ data class DataSources( out.writeString(alertsIndex) out.writeOptionalString(alertsHistoryIndex) out.writeOptionalString(alertsHistoryIndexPattern) + out.writeOptionalString(commentsIndex) + out.writeOptionalString(commentsIndexPattern) out.writeMap(queryIndexMappingsByType as Map) out.writeOptionalBoolean(findingsEnabled) } diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/DocLevelMonitorInput.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/DocLevelMonitorInput.kt index 4ed95cdb..3193ee57 100644 --- a/src/main/kotlin/org/opensearch/commons/alerting/model/DocLevelMonitorInput.kt +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/DocLevelMonitorInput.kt @@ -24,7 +24,7 @@ data class DocLevelMonitorInput( sin.readList(::DocLevelQuery) // docLevelQueries ) - fun asTemplateArg(): Map { + override fun asTemplateArg(): Map { return mapOf( DESCRIPTION_FIELD to description, INDICES_FIELD to indices, diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/DocLevelQuery.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/DocLevelQuery.kt index 7c72b0ca..ebba6bf9 100644 --- a/src/main/kotlin/org/opensearch/commons/alerting/model/DocLevelQuery.kt +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/DocLevelQuery.kt @@ -22,9 +22,9 @@ data class DocLevelQuery( init { // Ensure the name and tags have valid characters - validateQuery(name) + validateQueryName(name) for (tag in tags) { - validateQuery(tag) + validateQueryTag(tag) } } @@ -80,6 +80,7 @@ data class DocLevelQuery( const val QUERY_FIELD_NAMES_FIELD = "query_field_names" const val NO_ID = "" val INVALID_CHARACTERS: List = listOf(" ", "[", "]", "{", "}", "(", ")") + val QUERY_NAME_REGEX = "^.{1,256}$".toRegex() // regex to restrict string length between 1 - 256 chars @JvmStatic @Throws(IOException::class) @@ -100,7 +101,7 @@ data class DocLevelQuery( QUERY_ID_FIELD -> id = xcp.text() NAME_FIELD -> { name = xcp.text() - validateQuery(name) + validateQueryName(name) } QUERY_FIELD -> query = xcp.text() @@ -112,7 +113,7 @@ data class DocLevelQuery( ) while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { val tag = xcp.text() - validateQuery(tag) + validateQueryTag(tag) tags.add(tag) } } @@ -159,16 +160,20 @@ data class DocLevelQuery( return DocLevelQuery(sin) } - // TODO: add test for this - private fun validateQuery(stringVal: String) { + private fun validateQueryTag(stringVal: String) { for (inValidChar in INVALID_CHARACTERS) { if (stringVal.contains(inValidChar)) { throw IllegalArgumentException( - "They query name or tag, $stringVal, contains an invalid character: [' ','[',']','{','}','(',')']" + "The query tag, $stringVal, contains an invalid character: [' ','[',']','{','}','(',')']" ) } } } + private fun validateQueryName(stringVal: String) { + if (!stringVal.matches(QUERY_NAME_REGEX)) { + throw IllegalArgumentException("The query name, $stringVal, should be between 1 - 256 characters.") + } + } } // constructor for java plugins' convenience to optionally avoid passing empty list for 'fieldsBeingQueried' field diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/DocumentLevelTrigger.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/DocumentLevelTrigger.kt index df584234..a1f8b617 100644 --- a/src/main/kotlin/org/opensearch/commons/alerting/model/DocumentLevelTrigger.kt +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/DocumentLevelTrigger.kt @@ -60,7 +60,13 @@ data class DocumentLevelTrigger( ID_FIELD to id, NAME_FIELD to name, SEVERITY_FIELD to severity, - ACTIONS_FIELD to actions.map { it.asTemplateArg() } + ACTIONS_FIELD to actions.map { it.asTemplateArg() }, + CONDITION_FIELD to mapOf( + SCRIPT_FIELD to mapOf( + SOURCE_FIELD to condition.idOrCode, + LANG_FIELD to condition.lang + ) + ) ) } @@ -78,6 +84,8 @@ data class DocumentLevelTrigger( const val CONDITION_FIELD = "condition" const val SCRIPT_FIELD = "script" const val QUERY_IDS_FIELD = "query_ids" + const val SOURCE_FIELD = "source" + const val LANG_FIELD = "lang" val XCONTENT_REGISTRY = NamedXContentRegistry.Entry( Trigger::class.java, diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/DocumentLevelTriggerRunResult.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/DocumentLevelTriggerRunResult.kt new file mode 100644 index 00000000..1acb354b --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/DocumentLevelTriggerRunResult.kt @@ -0,0 +1,82 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.model + +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.script.ScriptException +import java.io.IOException + +data class DocumentLevelTriggerRunResult( + override var triggerName: String, + var triggeredDocs: List, + override var error: Exception?, + var actionResultsMap: MutableMap> = mutableMapOf() +) : TriggerRunResult(triggerName, error) { + + @Throws(IOException::class) + @Suppress("UNCHECKED_CAST") + constructor(sin: StreamInput) : this( + triggerName = sin.readString(), + error = sin.readException(), + triggeredDocs = sin.readStringList(), + actionResultsMap = readActionResults(sin) + ) + + override fun internalXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + if (error is ScriptException) error = Exception((error as ScriptException).toJsonString(), error) + return builder + .field("triggeredDocs", triggeredDocs as List) + .field("action_results", actionResultsMap as Map) + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + super.writeTo(out) + out.writeStringCollection(triggeredDocs) + out.writeInt(actionResultsMap.size) + actionResultsMap.forEach { (alert, actionResults) -> + out.writeString(alert) + out.writeInt(actionResults.size) + actionResults.forEach { (id, result) -> + out.writeString(id) + result.writeTo(out) + } + } + } + + companion object { + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): TriggerRunResult { + return DocumentLevelTriggerRunResult(sin) + } + + @JvmStatic + fun readActionResults(sin: StreamInput): MutableMap> { + val actionResultsMapReconstruct: MutableMap> = mutableMapOf() + val size = sin.readInt() + var idx = 0 + while (idx < size) { + val alert = sin.readString() + val actionResultsSize = sin.readInt() + val actionRunResultElem = mutableMapOf() + var i = 0 + while (i < actionResultsSize) { + val actionId = sin.readString() + val actionResult = ActionRunResult.readFrom(sin) + actionRunResultElem[actionId] = actionResult + ++i + } + actionResultsMapReconstruct[alert] = actionRunResultElem + ++idx + } + return actionResultsMapReconstruct + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/IndexExecutionContext.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/IndexExecutionContext.kt new file mode 100644 index 00000000..8872b525 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/IndexExecutionContext.kt @@ -0,0 +1,66 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.model + +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import java.io.IOException + +data class IndexExecutionContext( + val queries: List, + val lastRunContext: MutableMap, // previous execution + val updatedLastRunContext: MutableMap, // without sequence numbers + val indexName: String, + val concreteIndexName: String, + val updatedIndexNames: List, + val concreteIndexNames: List, + val conflictingFields: List, + val docIds: List? = emptyList() +) : Writeable, ToXContent { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + queries = sin.readList { DocLevelQuery(sin) }, + lastRunContext = sin.readMap() as MutableMap, + updatedLastRunContext = sin.readMap() as MutableMap, + indexName = sin.readString(), + concreteIndexName = sin.readString(), + updatedIndexNames = sin.readStringList(), + concreteIndexNames = sin.readStringList(), + conflictingFields = sin.readStringList(), + docIds = sin.readOptionalStringList() + ) + + override fun writeTo(out: StreamOutput?) { + out!!.writeCollection(queries) + out.writeMap(lastRunContext) + out.writeMap(updatedLastRunContext) + out.writeString(indexName) + out.writeString(concreteIndexName) + out.writeStringCollection(updatedIndexNames) + out.writeStringCollection(concreteIndexNames) + out.writeStringCollection(conflictingFields) + out.writeOptionalStringCollection(docIds) + } + + override fun toXContent(builder: XContentBuilder?, params: ToXContent.Params?): XContentBuilder { + builder!!.startObject() + .field("queries", queries) + .field("last_run_context", lastRunContext) + .field("updated_last_run_context", updatedLastRunContext) + .field("index_name", indexName) + .field("concrete_index_name", concreteIndexName) + .field("udpated_index_names", updatedIndexNames) + .field("concrete_index_names", concreteIndexNames) + .field("conflicting_fields", conflictingFields) + .field("doc_ids", docIds) + .endObject() + return builder + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/Input.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/Input.kt index b3472f8a..3846cea6 100644 --- a/src/main/kotlin/org/opensearch/commons/alerting/model/Input.kt +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/Input.kt @@ -3,6 +3,10 @@ package org.opensearch.commons.alerting.model import org.opensearch.commons.alerting.model.ClusterMetricsInput.Companion.URI_FIELD import org.opensearch.commons.alerting.model.DocLevelMonitorInput.Companion.DOC_LEVEL_INPUT_FIELD import org.opensearch.commons.alerting.model.SearchInput.Companion.SEARCH_FIELD +import org.opensearch.commons.alerting.model.remote.monitors.RemoteDocLevelMonitorInput +import org.opensearch.commons.alerting.model.remote.monitors.RemoteDocLevelMonitorInput.Companion.REMOTE_DOC_LEVEL_MONITOR_INPUT_FIELD +import org.opensearch.commons.alerting.model.remote.monitors.RemoteMonitorInput +import org.opensearch.commons.alerting.model.remote.monitors.RemoteMonitorInput.Companion.REMOTE_MONITOR_INPUT_FIELD import org.opensearch.commons.notifications.model.BaseModel import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.xcontent.XContentParser @@ -14,7 +18,9 @@ interface Input : BaseModel { enum class Type(val value: String) { DOCUMENT_LEVEL_INPUT(DOC_LEVEL_INPUT_FIELD), CLUSTER_METRICS_INPUT(URI_FIELD), - SEARCH_INPUT(SEARCH_FIELD); + SEARCH_INPUT(SEARCH_FIELD), + REMOTE_MONITOR_INPUT(REMOTE_MONITOR_INPUT_FIELD), + REMOTE_DOC_LEVEL_MONITOR_INPUT(REMOTE_DOC_LEVEL_MONITOR_INPUT_FIELD); override fun toString(): String { return value @@ -32,8 +38,12 @@ interface Input : BaseModel { SearchInput.parseInner(xcp) } else if (xcp.currentName() == Type.CLUSTER_METRICS_INPUT.value) { ClusterMetricsInput.parseInner(xcp) - } else { + } else if (xcp.currentName() == Type.DOCUMENT_LEVEL_INPUT.value) { DocLevelMonitorInput.parse(xcp) + } else if (xcp.currentName() == Type.REMOTE_MONITOR_INPUT.value) { + RemoteMonitorInput.parse(xcp) + } else { + RemoteDocLevelMonitorInput.parse(xcp) } XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, xcp.nextToken(), xcp) return input @@ -46,6 +56,8 @@ interface Input : BaseModel { Type.DOCUMENT_LEVEL_INPUT -> DocLevelMonitorInput(sin) Type.CLUSTER_METRICS_INPUT -> ClusterMetricsInput(sin) Type.SEARCH_INPUT -> SearchInput(sin) + Type.REMOTE_MONITOR_INPUT -> RemoteMonitorInput(sin) + Type.REMOTE_DOC_LEVEL_MONITOR_INPUT -> RemoteDocLevelMonitorInput(sin) // This shouldn't be reachable but ensuring exhaustiveness as Kotlin warns // enum can be null in Java else -> throw IllegalStateException("Unexpected input [$type] when reading Trigger") @@ -54,4 +66,7 @@ interface Input : BaseModel { } fun name(): String + + /** Returns a representation of the schedule suitable for passing into painless and mustache scripts. */ + fun asTemplateArg(): Map = emptyMap() } diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/Monitor.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/Monitor.kt index b2099d93..bccfccfe 100644 --- a/src/main/kotlin/org/opensearch/commons/alerting/model/Monitor.kt +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/Monitor.kt @@ -1,6 +1,7 @@ package org.opensearch.commons.alerting.model import org.opensearch.common.CheckedFunction +import org.opensearch.commons.alerting.model.remote.monitors.RemoteMonitorTrigger import org.opensearch.commons.alerting.util.IndexUtils.Companion.MONITOR_MAX_INPUTS import org.opensearch.commons.alerting.util.IndexUtils.Companion.MONITOR_MAX_TRIGGERS import org.opensearch.commons.alerting.util.IndexUtils.Companion.NO_SCHEMA_VERSION @@ -22,7 +23,7 @@ import org.opensearch.core.xcontent.XContentParser import org.opensearch.core.xcontent.XContentParserUtils import java.io.IOException import java.time.Instant -import java.util.Locale +import java.util.regex.Pattern data class Monitor( override val id: String = NO_ID, @@ -34,13 +35,14 @@ data class Monitor( override val enabledTime: Instant?, // TODO: Check how this behaves during rolling upgrade/multi-version cluster // Can read/write and parsing break if it's done from an old -> new version of the plugin? - val monitorType: MonitorType, + val monitorType: String, val user: User?, val schemaVersion: Int = NO_SCHEMA_VERSION, val inputs: List, val triggers: List, val uiMetadata: Map, val dataSources: DataSources = DataSources(), + val deleteQueryIndexInEveryRun: Boolean? = false, val owner: String? = "alerting" ) : ScheduledJob { @@ -56,13 +58,13 @@ data class Monitor( require(triggerIds.add(trigger.id)) { "Duplicate trigger id: ${trigger.id}. Trigger ids must be unique." } // Verify Trigger type based on Monitor type when (monitorType) { - MonitorType.QUERY_LEVEL_MONITOR -> + MonitorType.QUERY_LEVEL_MONITOR.value -> require(trigger is QueryLevelTrigger) { "Incompatible trigger [${trigger.id}] for monitor type [$monitorType]" } - MonitorType.BUCKET_LEVEL_MONITOR -> + MonitorType.BUCKET_LEVEL_MONITOR.value -> require(trigger is BucketLevelTrigger) { "Incompatible trigger [${trigger.id}] for monitor type [$monitorType]" } - MonitorType.CLUSTER_METRICS_MONITOR -> + MonitorType.CLUSTER_METRICS_MONITOR.value -> require(trigger is QueryLevelTrigger) { "Incompatible trigger [${trigger.id}] for monitor type [$monitorType]" } - MonitorType.DOC_LEVEL_MONITOR -> + MonitorType.DOC_LEVEL_MONITOR.value -> require(trigger is DocumentLevelTrigger) { "Incompatible trigger [${trigger.id}] for monitor type [$monitorType]" } } } @@ -94,7 +96,7 @@ data class Monitor( schedule = Schedule.readFrom(sin), lastUpdateTime = sin.readInstant(), enabledTime = sin.readOptionalInstant(), - monitorType = sin.readEnum(MonitorType::class.java), + monitorType = sin.readString(), user = if (sin.readBoolean()) { User(sin) } else { @@ -109,6 +111,7 @@ data class Monitor( } else { DataSources() }, + deleteQueryIndexInEveryRun = sin.readOptionalBoolean(), owner = sin.readOptionalString() ) @@ -126,8 +129,18 @@ data class Monitor( } /** Returns a representation of the monitor suitable for passing into painless and mustache scripts. */ - fun asTemplateArg(): Map { - return mapOf(_ID to id, _VERSION to version, NAME_FIELD to name, ENABLED_FIELD to enabled) + fun asTemplateArg(): Map { + return mapOf( + _ID to id, + _VERSION to version, + NAME_FIELD to name, + ENABLED_FIELD to enabled, + MONITOR_TYPE_FIELD to monitorType.toString(), + ENABLED_TIME_FIELD to enabledTime?.toEpochMilli(), + LAST_UPDATE_TIME_FIELD to lastUpdateTime.toEpochMilli(), + SCHEDULE_FIELD to schedule.asTemplateArg(), + INPUTS_FIELD to inputs.map { it.asTemplateArg() } + ) } fun toXContentWithUser(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { @@ -158,6 +171,7 @@ data class Monitor( .optionalTimeField(LAST_UPDATE_TIME_FIELD, lastUpdateTime) if (uiMetadata.isNotEmpty()) builder.field(UI_METADATA_FIELD, uiMetadata) builder.field(DATA_SOURCES_FIELD, dataSources) + builder.field(DELETE_QUERY_INDEX_IN_EVERY_RUN_FIELD, deleteQueryIndexInEveryRun) builder.field(OWNER_FIELD, owner) if (params.paramAsBoolean("with_type", false)) builder.endObject() return builder.endObject() @@ -179,7 +193,7 @@ data class Monitor( schedule.writeTo(out) out.writeInstant(lastUpdateTime) out.writeOptionalInstant(enabledTime) - out.writeEnum(monitorType) + out.writeString(monitorType) out.writeBoolean(user != null) user?.writeTo(out) out.writeInt(schemaVersion) @@ -188,8 +202,10 @@ data class Monitor( inputs.forEach { if (it is SearchInput) { out.writeEnum(Input.Type.SEARCH_INPUT) - } else { + } else if (it is DocLevelMonitorInput) { out.writeEnum(Input.Type.DOCUMENT_LEVEL_INPUT) + } else { + out.writeEnum(Input.Type.REMOTE_DOC_LEVEL_MONITOR_INPUT) } it.writeTo(out) } @@ -199,6 +215,7 @@ data class Monitor( when (it) { is BucketLevelTrigger -> out.writeEnum(Trigger.Type.BUCKET_LEVEL_TRIGGER) is DocumentLevelTrigger -> out.writeEnum(Trigger.Type.DOCUMENT_LEVEL_TRIGGER) + is RemoteMonitorTrigger -> out.writeEnum(Trigger.Type.REMOTE_MONITOR_TRIGGER) else -> out.writeEnum(Trigger.Type.QUERY_LEVEL_TRIGGER) } it.writeTo(out) @@ -206,6 +223,7 @@ data class Monitor( out.writeMap(uiMetadata) out.writeBoolean(dataSources != null) // for backward compatibility with pre-existing monitors which don't have datasources field dataSources.writeTo(out) + out.writeOptionalBoolean(deleteQueryIndexInEveryRun) out.writeOptionalString(owner) } @@ -226,7 +244,9 @@ data class Monitor( const val UI_METADATA_FIELD = "ui_metadata" const val DATA_SOURCES_FIELD = "data_sources" const val ENABLED_TIME_FIELD = "enabled_time" + const val DELETE_QUERY_INDEX_IN_EVERY_RUN_FIELD = "delete_query_index_in_every_run" const val OWNER_FIELD = "owner" + val MONITOR_TYPE_PATTERN = Pattern.compile("[a-zA-Z0-9_]{5,25}") // This is defined here instead of in ScheduledJob to avoid having the ScheduledJob class know about all // the different subclasses and creating circular dependencies @@ -253,6 +273,7 @@ data class Monitor( val triggers: MutableList = mutableListOf() val inputs: MutableList = mutableListOf() var dataSources = DataSources() + var deleteQueryIndexInEveryRun = false var owner = "alerting" XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) @@ -265,9 +286,10 @@ data class Monitor( NAME_FIELD -> name = xcp.text() MONITOR_TYPE_FIELD -> { monitorType = xcp.text() - val allowedTypes = MonitorType.values().map { it.value } - if (!allowedTypes.contains(monitorType)) { - throw IllegalStateException("Monitor type should be one of $allowedTypes") + val matcher = MONITOR_TYPE_PATTERN.matcher(monitorType) + val find = matcher.matches() + if (!find) { + throw IllegalStateException("Monitor type should follow pattern ${MONITOR_TYPE_PATTERN.pattern()}") } } USER_FIELD -> user = if (xcp.currentToken() == XContentParser.Token.VALUE_NULL) null else User.parse(xcp) @@ -305,6 +327,11 @@ data class Monitor( } else { DataSources.parse(xcp) } + DELETE_QUERY_INDEX_IN_EVERY_RUN_FIELD -> deleteQueryIndexInEveryRun = if (xcp.currentToken() == XContentParser.Token.VALUE_NULL) { + deleteQueryIndexInEveryRun + } else { + xcp.booleanValue() + } OWNER_FIELD -> owner = if (xcp.currentToken() == XContentParser.Token.VALUE_NULL) owner else xcp.text() else -> { xcp.skipChildren() @@ -325,13 +352,14 @@ data class Monitor( requireNotNull(schedule) { "Monitor schedule is null" }, lastUpdateTime ?: Instant.now(), enabledTime, - MonitorType.valueOf(monitorType.uppercase(Locale.ROOT)), + monitorType, user, schemaVersion, inputs.toList(), triggers.toList(), uiMetadata, dataSources, + deleteQueryIndexInEveryRun, owner ) } diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/MonitorMetadata.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/MonitorMetadata.kt new file mode 100644 index 00000000..a90f3cc3 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/MonitorMetadata.kt @@ -0,0 +1,197 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.model + +import org.opensearch.commons.alerting.model.Monitor.Companion.NO_ID +import org.opensearch.commons.alerting.util.instant +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import org.opensearch.index.seqno.SequenceNumbers +import java.io.IOException +import java.time.Instant + +data class MonitorMetadata( + val id: String, + val seqNo: Long = SequenceNumbers.UNASSIGNED_SEQ_NO, + val primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM, + val monitorId: String, + val lastActionExecutionTimes: List, + val lastRunContext: Map, + // Maps (sourceIndex + monitorId) --> concreteQueryIndex + val sourceToQueryIndexMapping: MutableMap = mutableMapOf() +) : Writeable, ToXContent { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + id = sin.readString(), + seqNo = sin.readLong(), + primaryTerm = sin.readLong(), + monitorId = sin.readString(), + lastActionExecutionTimes = sin.readList(ActionExecutionTime.Companion::readFrom), + lastRunContext = Monitor.suppressWarning(sin.readMap()), + sourceToQueryIndexMapping = sin.readMap() as MutableMap + ) + + override fun writeTo(out: StreamOutput) { + out.writeString(id) + out.writeLong(seqNo) + out.writeLong(primaryTerm) + out.writeString(monitorId) + out.writeCollection(lastActionExecutionTimes) + out.writeMap(lastRunContext) + out.writeMap(sourceToQueryIndexMapping as MutableMap) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + if (params.paramAsBoolean("with_type", false)) builder.startObject(METADATA) + builder.field(MONITOR_ID_FIELD, monitorId) + .field(LAST_ACTION_EXECUTION_FIELD, lastActionExecutionTimes.toTypedArray()) + if (lastRunContext.isNotEmpty()) builder.field(LAST_RUN_CONTEXT_FIELD, lastRunContext) + if (sourceToQueryIndexMapping.isNotEmpty()) { + builder.field(SOURCE_TO_QUERY_INDEX_MAP_FIELD, sourceToQueryIndexMapping as MutableMap) + } + if (params.paramAsBoolean("with_type", false)) builder.endObject() + return builder.endObject() + } + + companion object { + const val METADATA = "metadata" + const val MONITOR_ID_FIELD = "monitor_id" + const val LAST_ACTION_EXECUTION_FIELD = "last_action_execution_times" + const val LAST_RUN_CONTEXT_FIELD = "last_run_context" + const val SOURCE_TO_QUERY_INDEX_MAP_FIELD = "source_to_query_index_mapping" + + @JvmStatic + @JvmOverloads + @Throws(IOException::class) + fun parse( + xcp: XContentParser, + id: String = NO_ID, + seqNo: Long = SequenceNumbers.UNASSIGNED_SEQ_NO, + primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM + ): MonitorMetadata { + lateinit var monitorId: String + val lastActionExecutionTimes = mutableListOf() + var lastRunContext: Map = mapOf() + var sourceToQueryIndexMapping: MutableMap = mutableMapOf() + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + MONITOR_ID_FIELD -> monitorId = xcp.text() + LAST_ACTION_EXECUTION_FIELD -> { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + lastActionExecutionTimes.add(ActionExecutionTime.parse(xcp)) + } + } + LAST_RUN_CONTEXT_FIELD -> lastRunContext = xcp.map() + SOURCE_TO_QUERY_INDEX_MAP_FIELD -> sourceToQueryIndexMapping = xcp.map() as MutableMap + } + } + + return MonitorMetadata( + if (id != NO_ID) id else "$monitorId-metadata", + seqNo = seqNo, + primaryTerm = primaryTerm, + monitorId = monitorId, + lastActionExecutionTimes = lastActionExecutionTimes, + lastRunContext = lastRunContext, + sourceToQueryIndexMapping = sourceToQueryIndexMapping + ) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): MonitorMetadata { + return MonitorMetadata(sin) + } + + /** workflowMetadataId is used as key for monitor metadata in the case when the workflow execution happens + so the monitor lastRunContext (in the case of doc level monitor) is not interfering with the monitor execution + WorkflowMetadataId will be either workflowId-metadata (when executing the workflow as it is scheduled) + or timestampWithUUID-metadata (when a workflow is executed in a dry-run mode) + In the case of temp workflow, doc level monitors must have lastRunContext created from scratch + That's why we are using workflowMetadataId - in order to ensure that the doc level monitor metadata is created from scratch + **/ + fun getId(monitor: Monitor, workflowMetadataId: String? = null): String { + return if (workflowMetadataId.isNullOrEmpty()) { "${monitor.id}-metadata" } + // WorkflowMetadataId already contains -metadata suffix + else { "$workflowMetadataId-${monitor.id}-metadata" } + } + } +} + +/** + * A value object containing action execution time. + */ +data class ActionExecutionTime( + val actionId: String, + val executionTime: Instant +) : Writeable, ToXContent { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readString(), // actionId + sin.readInstant() // executionTime + ) + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return builder.startObject() + .field(ACTION_ID_FIELD, actionId) + .field(EXECUTION_TIME_FIELD, executionTime) + .endObject() + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(actionId) + out.writeInstant(executionTime) + } + + companion object { + const val ACTION_ID_FIELD = "action_id" + const val EXECUTION_TIME_FIELD = "execution_time" + + @JvmStatic + @Throws(IOException::class) + fun parse(xcp: XContentParser): ActionExecutionTime { + lateinit var actionId: String + lateinit var executionTime: Instant + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + ACTION_ID_FIELD -> actionId = xcp.text() + EXECUTION_TIME_FIELD -> executionTime = xcp.instant()!! + } + } + + return ActionExecutionTime( + actionId, + executionTime + ) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): ActionExecutionTime { + return ActionExecutionTime(sin) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/MonitorRunResult.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/MonitorRunResult.kt new file mode 100644 index 00000000..d403313b --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/MonitorRunResult.kt @@ -0,0 +1,215 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.model + +import org.apache.logging.log4j.LogManager +import org.opensearch.OpenSearchException +import org.opensearch.commons.alerting.alerts.AlertError +import org.opensearch.commons.alerting.util.optionalTimeField +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.script.ScriptException +import java.io.IOException +import java.time.Instant + +data class MonitorRunResult( + val monitorName: String, + val periodStart: Instant, + val periodEnd: Instant, + val error: Exception? = null, + val inputResults: InputRunResults = InputRunResults(), + val triggerResults: Map = mapOf() +) : Writeable, ToXContent { + + @Throws(IOException::class) + @Suppress("UNCHECKED_CAST") + constructor(sin: StreamInput) : this( + sin.readString(), // monitorName + sin.readInstant(), // periodStart + sin.readInstant(), // periodEnd + sin.readException(), // error + InputRunResults.readFrom(sin), // inputResults + suppressWarning(sin.readMap()) as Map // triggerResults + ) + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return builder.startObject() + .field("monitor_name", monitorName) + .optionalTimeField("period_start", periodStart) + .optionalTimeField("period_end", periodEnd) + .field("error", error?.message) + .field("input_results", inputResults) + .field("trigger_results", triggerResults) + .endObject() + } + + /** Returns error information to store in the Alert. Currently it's just the stack trace but it can be more */ + fun alertError(): AlertError? { + if (error != null) { + return AlertError(Instant.now(), "Failed running monitor:\n${error.userErrorMessage()}") + } + + if (inputResults.error != null) { + return AlertError(Instant.now(), "Failed fetching inputs:\n${inputResults.error.userErrorMessage()}") + } + return null + } + + fun scriptContextError(trigger: Trigger): Exception? { + return error ?: inputResults.error ?: triggerResults[trigger.id]?.error + } + + companion object { + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): MonitorRunResult { + return MonitorRunResult(sin) + } + + @Suppress("UNCHECKED_CAST") + fun suppressWarning(map: MutableMap?): Map { + return map as Map + } + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(monitorName) + out.writeInstant(periodStart) + out.writeInstant(periodEnd) + out.writeException(error) + inputResults.writeTo(out) + out.writeMap(triggerResults) + } +} + +data class InputRunResults( + val results: List> = listOf(), + val error: Exception? = null, + val aggTriggersAfterKey: MutableMap? = null +) : Writeable, ToXContent { + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return builder.startObject() + .field("results", results) + .field("error", error?.message) + .endObject() + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeVInt(results.size) + for (map in results) { + out.writeMap(map) + } + out.writeException(error) + } + + companion object { + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): InputRunResults { + val count = sin.readVInt() // count + val list = mutableListOf>() + for (i in 0 until count) { + list.add(suppressWarning(sin.readMap())) // result(map) + } + val error = sin.readException() // error + return InputRunResults(list, error) + } + + @Suppress("UNCHECKED_CAST") + fun suppressWarning(map: MutableMap?): Map { + return map as Map + } + } + + fun afterKeysPresent(): Boolean { + aggTriggersAfterKey?.forEach { + if (it.value.afterKey != null && !it.value.lastPage) { + return true + } + } + return false + } +} + +data class TriggerAfterKey(val afterKey: Map?, val lastPage: Boolean) + +data class ActionRunResult( + val actionId: String, + val actionName: String, + val output: Map, + val throttled: Boolean = false, + val executionTime: Instant? = null, + val error: Exception? = null +) : Writeable, ToXContent { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readString(), // actionId + sin.readString(), // actionName + suppressWarning(sin.readMap()), // output + sin.readBoolean(), // throttled + sin.readOptionalInstant(), // executionTime + sin.readException() // error + ) + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return builder.startObject() + .field("id", actionId) + .field("name", actionName) + .field("output", output) + .field("throttled", throttled) + .optionalTimeField("executionTime", executionTime) + .field("error", error?.message) + .endObject() + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(actionId) + out.writeString(actionName) + out.writeMap(output) + out.writeBoolean(throttled) + out.writeOptionalInstant(executionTime) + out.writeException(error) + } + + companion object { + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): ActionRunResult { + return ActionRunResult(sin) + } + + @Suppress("UNCHECKED_CAST") + fun suppressWarning(map: MutableMap?): MutableMap { + return map as MutableMap + } + } +} + +private val logger = LogManager.getLogger(MonitorRunResult::class.java) + +/** Constructs an error message from an exception suitable for human consumption. */ +fun Throwable.userErrorMessage(): String { + return when { + this is ScriptException -> this.scriptStack.joinToString(separator = "\n", limit = 100) + this is OpenSearchException -> this.detailedMessage + this.message != null -> { + logger.info("Internal error: ${this.message}. See the opensearch.log for details", this) + this.message!! + } + else -> { + logger.info("Unknown Internal error. See the OpenSearch log for details.", this) + "Unknown Internal error. See the OpenSearch log for details." + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/QueryLevelTrigger.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/QueryLevelTrigger.kt index 0be93671..a88ef9b6 100644 --- a/src/main/kotlin/org/opensearch/commons/alerting/model/QueryLevelTrigger.kt +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/QueryLevelTrigger.kt @@ -60,7 +60,13 @@ data class QueryLevelTrigger( ID_FIELD to id, NAME_FIELD to name, SEVERITY_FIELD to severity, - ACTIONS_FIELD to actions.map { it.asTemplateArg() } + ACTIONS_FIELD to actions.map { it.asTemplateArg() }, + CONDITION_FIELD to mapOf( + SCRIPT_FIELD to mapOf( + SOURCE_FIELD to condition.idOrCode, + LANG_FIELD to condition.lang + ) + ) ) } @@ -77,6 +83,8 @@ data class QueryLevelTrigger( const val QUERY_LEVEL_TRIGGER_FIELD = "query_level_trigger" const val CONDITION_FIELD = "condition" const val SCRIPT_FIELD = "script" + const val SOURCE_FIELD = "source" + const val LANG_FIELD = "lang" val XCONTENT_REGISTRY = NamedXContentRegistry.Entry( Trigger::class.java, diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/QueryLevelTriggerRunResult.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/QueryLevelTriggerRunResult.kt new file mode 100644 index 00000000..101d0067 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/QueryLevelTriggerRunResult.kt @@ -0,0 +1,66 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.model + +import org.opensearch.commons.alerting.alerts.AlertError +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.script.ScriptException +import java.io.IOException +import java.time.Instant + +open class QueryLevelTriggerRunResult( + override var triggerName: String, + open var triggered: Boolean, + override var error: Exception?, + open var actionResults: MutableMap = mutableMapOf() +) : TriggerRunResult(triggerName, error) { + + @Throws(IOException::class) + @Suppress("UNCHECKED_CAST") + constructor(sin: StreamInput) : this( + triggerName = sin.readString(), + error = sin.readException(), + triggered = sin.readBoolean(), + actionResults = sin.readMap() as MutableMap + ) + + override fun alertError(): AlertError? { + if (error != null) { + return AlertError(Instant.now(), "Failed evaluating trigger:\n${error!!.userErrorMessage()}") + } + for (actionResult in actionResults.values) { + if (actionResult.error != null) { + return AlertError(Instant.now(), "Failed running action:\n${actionResult.error.userErrorMessage()}") + } + } + return null + } + + override fun internalXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + if (error is ScriptException) error = Exception((error as ScriptException).toJsonString(), error) + return builder + .field("triggered", triggered) + .field("action_results", actionResults as Map) + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + super.writeTo(out) + out.writeBoolean(triggered) + out.writeMap(actionResults as Map) + } + + companion object { + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): TriggerRunResult { + return QueryLevelTriggerRunResult(sin) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/Schedule.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/Schedule.kt index d82bc375..3d08c095 100644 --- a/src/main/kotlin/org/opensearch/commons/alerting/model/Schedule.kt +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/Schedule.kt @@ -146,6 +146,9 @@ sealed class Schedule : BaseModel { abstract fun getPeriodEndingAt(endTime: Instant?): Pair abstract fun runningOnTime(lastExecutionTime: Instant?): Boolean + + /** Returns a representation of the schedule suitable for passing into painless and mustache scripts. */ + abstract fun asTemplateArg(): Map } /** @@ -257,6 +260,14 @@ data class CronSchedule( out.writeString(expression) out.writeZoneId(timezone) } + + override fun asTemplateArg(): Map = + mapOf( + CRON_FIELD to mapOf( + EXPRESSION_FIELD to expression, + TIMEZONE_FIELD to timezone.toString() + ) + ) } data class IntervalSchedule( @@ -354,4 +365,12 @@ data class IntervalSchedule( out.writeInt(interval) out.writeEnum(unit) } + + override fun asTemplateArg(): Map = + mapOf( + PERIOD_FIELD to mapOf( + INTERVAL_FIELD to interval, + UNIT_FIELD to unit.toString() + ) + ) } diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/SearchInput.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/SearchInput.kt index 7579a10d..99a5cb8d 100644 --- a/src/main/kotlin/org/opensearch/commons/alerting/model/SearchInput.kt +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/SearchInput.kt @@ -85,4 +85,12 @@ data class SearchInput(val indices: List, val query: SearchSourceBuilder return SearchInput(sin) } } + + override fun asTemplateArg(): Map = + mapOf( + SEARCH_FIELD to mapOf( + INDICES_FIELD to indices, + QUERY_FIELD to query.toString() + ) + ) } diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/Trigger.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/Trigger.kt index 1834f3b7..7cfb9f41 100644 --- a/src/main/kotlin/org/opensearch/commons/alerting/model/Trigger.kt +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/Trigger.kt @@ -1,6 +1,7 @@ package org.opensearch.commons.alerting.model import org.opensearch.commons.alerting.model.action.Action +import org.opensearch.commons.alerting.model.remote.monitors.RemoteMonitorTrigger import org.opensearch.commons.notifications.model.BaseModel import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.xcontent.XContentParser @@ -14,7 +15,8 @@ interface Trigger : BaseModel { QUERY_LEVEL_TRIGGER(QueryLevelTrigger.QUERY_LEVEL_TRIGGER_FIELD), BUCKET_LEVEL_TRIGGER(BucketLevelTrigger.BUCKET_LEVEL_TRIGGER_FIELD), NOOP_TRIGGER(NoOpTrigger.NOOP_TRIGGER_FIELD), - CHAINED_ALERT_TRIGGER(ChainedAlertTrigger.CHAINED_ALERT_TRIGGER_FIELD); + CHAINED_ALERT_TRIGGER(ChainedAlertTrigger.CHAINED_ALERT_TRIGGER_FIELD), + REMOTE_MONITOR_TRIGGER(RemoteMonitorTrigger.REMOTE_MONITOR_TRIGGER_FIELD); override fun toString(): String { return value @@ -55,6 +57,7 @@ interface Trigger : BaseModel { Type.BUCKET_LEVEL_TRIGGER -> BucketLevelTrigger(sin) Type.DOCUMENT_LEVEL_TRIGGER -> DocumentLevelTrigger(sin) Type.CHAINED_ALERT_TRIGGER -> ChainedAlertTrigger(sin) + Type.REMOTE_MONITOR_TRIGGER -> RemoteMonitorTrigger(sin) // This shouldn't be reachable but ensuring exhaustiveness as Kotlin warns // enum can be null in Java else -> throw IllegalStateException("Unexpected input [$type] when reading Trigger") diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/TriggerRunResult.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/TriggerRunResult.kt new file mode 100644 index 00000000..84efde39 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/TriggerRunResult.kt @@ -0,0 +1,55 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.model + +import org.opensearch.commons.alerting.alerts.AlertError +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import java.io.IOException +import java.time.Instant + +abstract class TriggerRunResult( + open var triggerName: String, + open var error: Exception? = null +) : Writeable, ToXContent { + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + .field("name", triggerName) + + internalXContent(builder, params) + val msg = error?.message + + builder.field("error", msg) + .endObject() + return builder + } + + abstract fun internalXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder + + /** Returns error information to store in the Alert. Currently it's just the stack trace but it can be more */ + open fun alertError(): AlertError? { + if (error != null) { + return AlertError(Instant.now(), "Failed evaluating trigger:\n${error!!.userErrorMessage()}") + } + return null + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(triggerName) + out.writeException(error) + } + + companion object { + @Suppress("UNCHECKED_CAST") + fun suppressWarning(map: MutableMap?): MutableMap { + return map as MutableMap + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/WorkflowMetadata.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/WorkflowMetadata.kt new file mode 100644 index 00000000..48deaed6 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/WorkflowMetadata.kt @@ -0,0 +1,106 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.model + +import org.opensearch.commons.alerting.util.instant +import org.opensearch.commons.alerting.util.optionalTimeField +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException +import java.time.Instant + +data class WorkflowMetadata( + val id: String, + val workflowId: String, + val monitorIds: List, + val latestRunTime: Instant, + val latestExecutionId: String +) : Writeable, ToXContent { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + id = sin.readString(), + workflowId = sin.readString(), + monitorIds = sin.readStringList(), + latestRunTime = sin.readInstant(), + latestExecutionId = sin.readString() + ) + + override fun writeTo(out: StreamOutput) { + out.writeString(id) + out.writeString(workflowId) + out.writeStringCollection(monitorIds) + out.writeInstant(latestRunTime) + out.writeString(latestExecutionId) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + if (params.paramAsBoolean("with_type", false)) builder.startObject(METADATA) + builder.field(WORKFLOW_ID_FIELD, workflowId) + .field(MONITOR_IDS_FIELD, monitorIds) + .optionalTimeField(LATEST_RUN_TIME, latestRunTime) + .field(LATEST_EXECUTION_ID, latestExecutionId) + if (params.paramAsBoolean("with_type", false)) builder.endObject() + return builder.endObject() + } + + companion object { + const val METADATA = "workflow_metadata" + const val WORKFLOW_ID_FIELD = "workflow_id" + const val MONITOR_IDS_FIELD = "monitor_ids" + const val LATEST_RUN_TIME = "latest_run_time" + const val LATEST_EXECUTION_ID = "latest_execution_id" + + @JvmStatic + @JvmOverloads + @Throws(IOException::class) + fun parse(xcp: XContentParser): WorkflowMetadata { + lateinit var workflowId: String + var monitorIds = mutableListOf() + lateinit var latestRunTime: Instant + lateinit var latestExecutionId: String + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + WORKFLOW_ID_FIELD -> workflowId = xcp.text() + MONITOR_IDS_FIELD -> { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + monitorIds.add(xcp.text()) + } + } + LATEST_RUN_TIME -> latestRunTime = xcp.instant()!! + LATEST_EXECUTION_ID -> latestExecutionId = xcp.text() + } + } + return WorkflowMetadata( + id = "$workflowId-metadata", + workflowId = workflowId, + monitorIds = monitorIds, + latestRunTime = latestRunTime, + latestExecutionId = latestExecutionId + ) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): WorkflowMetadata { + return WorkflowMetadata(sin) + } + + fun getId(workflowId: String? = null) = "$workflowId-metadata" + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/WorkflowRunContext.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/WorkflowRunContext.kt new file mode 100644 index 00000000..d478315e --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/WorkflowRunContext.kt @@ -0,0 +1,55 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.model + +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.ToXContentObject +import org.opensearch.core.xcontent.XContentBuilder + +data class WorkflowRunContext( + // In case of dry run it's random generated id, while in other cases it's workflowId + val workflowId: String, + val workflowMetadataId: String, + val chainedMonitorId: String?, + val matchingDocIdsPerIndex: Map>, + val auditDelegateMonitorAlerts: Boolean +) : Writeable, ToXContentObject { + companion object { + fun readFrom(sin: StreamInput): WorkflowRunContext { + return WorkflowRunContext(sin) + } + } + + constructor(sin: StreamInput) : this( + sin.readString(), + sin.readString(), + sin.readOptionalString(), + sin.readMap() as Map>, + sin.readBoolean() + ) + + override fun writeTo(out: StreamOutput) { + out.writeString(workflowId) + out.writeString(workflowMetadataId) + out.writeOptionalString(chainedMonitorId) + out.writeMap(matchingDocIdsPerIndex) + out.writeBoolean(auditDelegateMonitorAlerts) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params?): XContentBuilder { + builder.startObject() + .field("workflow_id", workflowId) + .field("workflow_metadata_id", workflowMetadataId) + .field("chained_monitor_id", chainedMonitorId) + .field("matching_doc_ids_per_index", matchingDocIdsPerIndex) + .field("audit_delegate_monitor_alerts", auditDelegateMonitorAlerts) + .endObject() + return builder + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/WorkflowRunResult.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/WorkflowRunResult.kt new file mode 100644 index 00000000..1b5fe3d8 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/WorkflowRunResult.kt @@ -0,0 +1,82 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.model + +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import java.io.IOException +import java.lang.Exception +import java.time.Instant + +data class WorkflowRunResult( + val workflowId: String, + val workflowName: String, + val monitorRunResults: List> = mutableListOf(), + val executionStartTime: Instant, + var executionEndTime: Instant? = null, + val executionId: String, + val error: Exception? = null, + val triggerResults: Map = mapOf() +) : Writeable, ToXContent { + + @Throws(IOException::class) + @Suppress("UNCHECKED_CAST") + constructor(sin: StreamInput) : this( + workflowId = sin.readString(), + workflowName = sin.readString(), + monitorRunResults = sin.readList> { s: StreamInput -> MonitorRunResult.readFrom(s) }, + executionStartTime = sin.readInstant(), + executionEndTime = sin.readOptionalInstant(), + executionId = sin.readString(), + error = sin.readException(), + triggerResults = suppressWarning(sin.readMap()) as Map + ) + + override fun writeTo(out: StreamOutput) { + out.writeString(workflowId) + out.writeString(workflowName) + out.writeList(monitorRunResults) + out.writeInstant(executionStartTime) + out.writeOptionalInstant(executionEndTime) + out.writeString(executionId) + out.writeException(error) + out.writeMap(triggerResults) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + builder.field("execution_id", executionId) + builder.field("workflow_name", workflowName) + builder.field("workflow_id", workflowId) + builder.field("trigger_results", triggerResults) + builder.startArray("monitor_run_results") + for (monitorResult in monitorRunResults) { + monitorResult.toXContent(builder, ToXContent.EMPTY_PARAMS) + } + builder.endArray() + .field("execution_start_time", executionStartTime) + .field("execution_end_time", executionEndTime) + .field("error", error?.message) + .endObject() + return builder + } + + companion object { + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): WorkflowRunResult { + return WorkflowRunResult(sin) + } + + @Suppress("UNCHECKED_CAST") + fun suppressWarning(map: MutableMap?): Map { + return map as Map + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/action/Action.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/action/Action.kt index 4fa0c514..88d15210 100644 --- a/src/main/kotlin/org/opensearch/commons/alerting/model/action/Action.kt +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/action/Action.kt @@ -65,7 +65,12 @@ data class Action( } fun asTemplateArg(): Map { - return mapOf(NAME_FIELD to name) + return mapOf( + ID_FIELD to id, + NAME_FIELD to name, + DESTINATION_ID_FIELD to destinationId, + THROTTLE_ENABLED_FIELD to throttleEnabled + ) } @Throws(IOException::class) diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/remote/monitors/RemoteDocLevelMonitorInput.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/remote/monitors/RemoteDocLevelMonitorInput.kt new file mode 100644 index 00000000..1e6184f3 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/remote/monitors/RemoteDocLevelMonitorInput.kt @@ -0,0 +1,81 @@ +package org.opensearch.commons.alerting.model.remote.monitors + +import org.opensearch.commons.alerting.model.DocLevelMonitorInput +import org.opensearch.commons.alerting.model.DocLevelMonitorInput.Companion.DOC_LEVEL_INPUT_FIELD +import org.opensearch.commons.alerting.model.Input +import org.opensearch.core.common.bytes.BytesReference +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException +import java.nio.ByteBuffer + +data class RemoteDocLevelMonitorInput(val input: BytesReference, val docLevelMonitorInput: DocLevelMonitorInput) : Input { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readBytesReference(), + DocLevelMonitorInput.readFrom(sin) + ) + + override fun asTemplateArg(): Map { + val bytes = input.toBytesRef().bytes + return mapOf( + RemoteDocLevelMonitorInput.INPUT_SIZE to bytes.size, + RemoteDocLevelMonitorInput.INPUT_FIELD to bytes, + DOC_LEVEL_INPUT_FIELD to docLevelMonitorInput + ) + } + + override fun name(): String { + return REMOTE_DOC_LEVEL_MONITOR_INPUT_FIELD + } + + override fun writeTo(out: StreamOutput) { + out.writeBytesReference(input) + docLevelMonitorInput.writeTo(out) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + val bytes = input.toBytesRef().bytes + return builder.startObject() + .startObject(REMOTE_DOC_LEVEL_MONITOR_INPUT_FIELD) + .field(RemoteMonitorInput.INPUT_SIZE, bytes.size) + .field(RemoteMonitorInput.INPUT_FIELD, bytes) + .field(DOC_LEVEL_INPUT_FIELD, docLevelMonitorInput) + .endObject() + .endObject() + } + + companion object { + const val INPUT_FIELD = "input" + const val INPUT_SIZE = "size" + const val REMOTE_DOC_LEVEL_MONITOR_INPUT_FIELD = "remote_doc_level_monitor_input" + + fun parse(xcp: XContentParser): RemoteDocLevelMonitorInput { + var bytes: ByteArray? = null + var size: Int = 0 + var docLevelMonitorInput: DocLevelMonitorInput? = null + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + RemoteMonitorInput.INPUT_FIELD -> bytes = xcp.binaryValue() + RemoteMonitorInput.INPUT_SIZE -> size = xcp.intValue() + Input.Type.DOCUMENT_LEVEL_INPUT.value -> { + docLevelMonitorInput = DocLevelMonitorInput.parse(xcp) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, xcp.nextToken(), xcp) + } + } + } + val input = BytesReference.fromByteBuffer(ByteBuffer.wrap(bytes, 0, size)) + return RemoteDocLevelMonitorInput(input, docLevelMonitorInput!!) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/remote/monitors/RemoteMonitorInput.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/remote/monitors/RemoteMonitorInput.kt new file mode 100644 index 00000000..29a939ff --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/remote/monitors/RemoteMonitorInput.kt @@ -0,0 +1,70 @@ +package org.opensearch.commons.alerting.model.remote.monitors + +import org.opensearch.commons.alerting.model.Input +import org.opensearch.core.common.bytes.BytesReference +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException +import java.nio.ByteBuffer + +data class RemoteMonitorInput(val input: BytesReference) : Input { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readBytesReference() + ) + + override fun asTemplateArg(): Map { + val bytes = input.toBytesRef().bytes + return mapOf( + INPUT_SIZE to bytes.size, + INPUT_FIELD to bytes + ) + } + + override fun name(): String { + return REMOTE_MONITOR_INPUT_FIELD + } + + override fun writeTo(out: StreamOutput) { + out.writeBytesReference(input) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + val bytes = input.toBytesRef().bytes + return builder.startObject() + .startObject(REMOTE_MONITOR_INPUT_FIELD) + .field(INPUT_SIZE, bytes.size) + .field(INPUT_FIELD, bytes) + .endObject() + .endObject() + } + + companion object { + const val INPUT_FIELD = "input" + const val INPUT_SIZE = "size" + const val REMOTE_MONITOR_INPUT_FIELD = "remote_monitor_input" + + fun parse(xcp: XContentParser): RemoteMonitorInput { + var bytes: ByteArray? = null + var size: Int = 0 + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + INPUT_FIELD -> bytes = xcp.binaryValue() + INPUT_SIZE -> size = xcp.intValue() + } + } + val input = BytesReference.fromByteBuffer(ByteBuffer.wrap(bytes, 0, size)) + return RemoteMonitorInput(input) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/remote/monitors/RemoteMonitorTrigger.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/remote/monitors/RemoteMonitorTrigger.kt new file mode 100644 index 00000000..0e89e5ba --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/remote/monitors/RemoteMonitorTrigger.kt @@ -0,0 +1,126 @@ +package org.opensearch.commons.alerting.model.remote.monitors + +import org.opensearch.common.CheckedFunction +import org.opensearch.common.UUIDs +import org.opensearch.commons.alerting.model.Trigger +import org.opensearch.commons.alerting.model.action.Action +import org.opensearch.core.ParseField +import org.opensearch.core.common.bytes.BytesReference +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.NamedXContentRegistry +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException +import java.nio.ByteBuffer + +data class RemoteMonitorTrigger( + override val id: String, + override val name: String, + override val severity: String, + override val actions: List, + val trigger: BytesReference +) : Trigger { + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readString(), + sin.readString(), + sin.readString(), + sin.readList(::Action), + sin.readBytesReference() + ) + + fun asTemplateArg(): Map { + val bytes = trigger.toBytesRef().bytes + return mapOf( + Trigger.ID_FIELD to id, + Trigger.NAME_FIELD to name, + Trigger.SEVERITY_FIELD to severity, + Trigger.ACTIONS_FIELD to actions.map { it.asTemplateArg() }, + TRIGGER_SIZE to bytes.size, + TRIGGER_FIELD to bytes + ) + } + + override fun name(): String { + return REMOTE_MONITOR_TRIGGER_FIELD + } + + override fun writeTo(out: StreamOutput) { + out.writeString(id) + out.writeString(name) + out.writeString(severity) + out.writeCollection(actions) + out.writeBytesReference(trigger) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + val bytes = trigger.toBytesRef().bytes + return builder.startObject() + .startObject(REMOTE_MONITOR_TRIGGER_FIELD) + .field(Trigger.ID_FIELD, id) + .field(Trigger.NAME_FIELD, name) + .field(Trigger.SEVERITY_FIELD, severity) + .field(Trigger.ACTIONS_FIELD, actions.toTypedArray()) + .field(TRIGGER_SIZE, bytes.size) + .field(TRIGGER_FIELD, bytes) + .endObject() + .endObject() + } + + companion object { + const val TRIGGER_FIELD = "trigger" + const val TRIGGER_SIZE = "size" + const val REMOTE_MONITOR_TRIGGER_FIELD = "remote_monitor_trigger" + + val XCONTENT_REGISTRY = NamedXContentRegistry.Entry( + Trigger::class.java, + ParseField(REMOTE_MONITOR_TRIGGER_FIELD), + CheckedFunction { parseInner(it) } + ) + + fun parseInner(xcp: XContentParser): RemoteMonitorTrigger { + var id = UUIDs.base64UUID() // assign a default triggerId if one is not specified + lateinit var name: String + lateinit var severity: String + val actions: MutableList = mutableListOf() + var bytes: ByteArray? = null + var size: Int = 0 + + if (xcp.currentToken() != XContentParser.Token.START_OBJECT && xcp.currentToken() != XContentParser.Token.FIELD_NAME) { + XContentParserUtils.throwUnknownToken(xcp.currentToken(), xcp.tokenLocation) + } + + // If the parser began on START_OBJECT, move to the next token so that the while loop enters on + // the fieldName (or END_OBJECT if it's empty). + if (xcp.currentToken() == XContentParser.Token.START_OBJECT) xcp.nextToken() + while (xcp.currentToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + Trigger.ID_FIELD -> id = xcp.text() + Trigger.NAME_FIELD -> name = xcp.text() + Trigger.SEVERITY_FIELD -> severity = xcp.text() + Trigger.ACTIONS_FIELD -> { + XContentParserUtils.ensureExpectedToken( + XContentParser.Token.START_ARRAY, + xcp.currentToken(), + xcp + ) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + actions.add(Action.parse(xcp)) + } + } + TRIGGER_FIELD -> bytes = xcp.binaryValue() + TRIGGER_SIZE -> size = xcp.intValue() + } + xcp.nextToken() + } + val trigger = BytesReference.fromByteBuffer(ByteBuffer.wrap(bytes, 0, size)) + return RemoteMonitorTrigger(id, name, severity, actions, trigger) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/util/AlertingException.kt b/src/main/kotlin/org/opensearch/commons/alerting/util/AlertingException.kt new file mode 100644 index 00000000..312758f0 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/util/AlertingException.kt @@ -0,0 +1,89 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.util + +import org.apache.logging.log4j.LogManager +import org.opensearch.OpenSearchException +import org.opensearch.OpenSearchSecurityException +import org.opensearch.OpenSearchStatusException +import org.opensearch.core.common.Strings +import org.opensearch.core.rest.RestStatus +import org.opensearch.index.IndexNotFoundException +import org.opensearch.index.engine.VersionConflictEngineException +import org.opensearch.indices.InvalidIndexNameException + +private val log = LogManager.getLogger(AlertingException::class.java) + +/** + * Converts into a user friendly message. + */ +class AlertingException(message: String, val status: RestStatus, val ex: Exception) : OpenSearchException(message, ex) { + + override fun status(): RestStatus { + return status + } + + companion object { + @JvmStatic + fun wrap(ex: Exception): OpenSearchException { + log.error("Alerting error: $ex") + + var friendlyMsg = "Unknown error" + var status = RestStatus.INTERNAL_SERVER_ERROR + when (ex) { + is IndexNotFoundException -> { + status = ex.status() + friendlyMsg = "Configured indices are not found: ${ex.index}" + } + is OpenSearchSecurityException -> { + status = ex.status() + friendlyMsg = "User doesn't have permissions to execute this action. Contact administrator." + } + is OpenSearchStatusException -> { + status = ex.status() + friendlyMsg = ex.message as String + } + is IllegalArgumentException -> { + status = RestStatus.BAD_REQUEST + friendlyMsg = ex.message as String + } + is VersionConflictEngineException -> { + status = ex.status() + friendlyMsg = ex.message as String + } + is InvalidIndexNameException -> { + status = RestStatus.BAD_REQUEST + friendlyMsg = ex.message as String + } + else -> { + if (!Strings.isNullOrEmpty(ex.message)) { + friendlyMsg = ex.message as String + } + } + } + // Wrapping the origin exception as runtime to avoid it being formatted. + // Currently, alerting-kibana is using `error.root_cause.reason` as text in the toast message. + // Below logic is to set friendly message to error.root_cause.reason. + return AlertingException(friendlyMsg, status, Exception("${ex.javaClass.name}: ${ex.message}")) + } + + @JvmStatic + fun merge(vararg ex: AlertingException): AlertingException { + var friendlyMsg = "" + var unwrappedExceptionMsg = "" + ex.forEach { + if (friendlyMsg != "") { + friendlyMsg += ", ${it.message}" + unwrappedExceptionMsg += ", ${it.ex.message}" + } else { + friendlyMsg = it.message.orEmpty() + unwrappedExceptionMsg = "${it.ex.message}" + } + } + return AlertingException(friendlyMsg, ex.first().status, Exception(unwrappedExceptionMsg)) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/util/IndexUtils.kt b/src/main/kotlin/org/opensearch/commons/alerting/util/IndexUtils.kt index 2dbda47b..887e8430 100644 --- a/src/main/kotlin/org/opensearch/commons/alerting/util/IndexUtils.kt +++ b/src/main/kotlin/org/opensearch/commons/alerting/util/IndexUtils.kt @@ -1,5 +1,6 @@ package org.opensearch.commons.alerting.util +import org.opensearch.commons.alerting.model.AggregationResultBucket import org.opensearch.commons.alerting.model.Monitor import org.opensearch.commons.alerting.settings.SupportedClusterMetricsSettings import org.opensearch.commons.authuser.User @@ -8,9 +9,28 @@ import org.opensearch.core.xcontent.XContentBuilder import org.opensearch.core.xcontent.XContentParser import org.opensearch.core.xcontent.XContentParserUtils import java.time.Instant +import java.util.Locale class IndexUtils { companion object { + /** + * This regex asserts that the string: + * The index does not start with an underscore _, hyphen -, or plus sign + + * The index does not contain two consecutive periods (e.g., `..`) + * The index does not contain any whitespace characters, commas, backslashes, forward slashes, asterisks, + * question marks, double quotes, less than or greater than signs, pipes, colons, or periods. + * The length of the index must be between 1 and 255 characters + */ + val VALID_INDEX_NAME_REGEX = Regex("""^(?![_\-\+])(?!.*\.\.)[^\s,\\\/\*\?"<>|#:\.]{1,255}$""") + + /** + * This regex asserts that the string: + * The index pattern can start with an optional period + * The index pattern can contain lowercase letters, digits, underscores, hyphens, asterisks, and periods + * The length of the index pattern must be between 1 and 255 characters + */ + val INDEX_PATTERN_REGEX = Regex("""^(?=.{1,255}$)\.?[a-z0-9_\-\*\.]+$""") + const val NO_SCHEMA_VERSION = 0 const val MONITOR_MAX_INPUTS = 1 @@ -28,7 +48,9 @@ class IndexUtils { } } -fun Monitor.isBucketLevelMonitor(): Boolean = this.monitorType == Monitor.MonitorType.BUCKET_LEVEL_MONITOR +fun Monitor.isBucketLevelMonitor(): Boolean = + isMonitorOfStandardType() && + Monitor.MonitorType.valueOf(this.monitorType.uppercase(Locale.ROOT)) == Monitor.MonitorType.BUCKET_LEVEL_MONITOR fun XContentBuilder.optionalUserField(name: String, user: User?): XContentBuilder { if (user == null) { @@ -37,6 +59,13 @@ fun XContentBuilder.optionalUserField(name: String, user: User?): XContentBuilde return this.field(name, user) } +fun XContentBuilder.optionalUsernameField(name: String, user: User?): XContentBuilder { + if (user == null) { + return nullField(name) + } + return this.field(name, user.name) +} + fun XContentBuilder.optionalTimeField(name: String, instant: Instant?): XContentBuilder { if (instant == null) { return nullField(name) @@ -60,3 +89,16 @@ fun XContentParser.instant(): Instant? { * Extension function for ES 6.3 and above that duplicates the ES 6.2 XContentBuilder.string() method. */ fun XContentBuilder.string(): String = BytesReference.bytes(this).utf8ToString() + +fun Monitor.isMonitorOfStandardType(): Boolean { + val standardMonitorTypes = Monitor.MonitorType.values().map { it.value.uppercase(Locale.ROOT) }.toSet() + return standardMonitorTypes.contains(this.monitorType.uppercase(Locale.ROOT)) +} + +fun getBucketKeysHash(bucketKeys: List): String = bucketKeys.joinToString(separator = "#") + +/** + * Since buckets can have multi-value keys, this converts the bucket key values to a string that can be used + * as the key for a HashMap to easily retrieve [AggregationResultBucket] based on the bucket key values. + */ +fun AggregationResultBucket.getBucketKeysHash(): String = getBucketKeysHash(this.bucketKeys) diff --git a/src/main/kotlin/org/opensearch/commons/utils/ValidationHelpers.kt b/src/main/kotlin/org/opensearch/commons/utils/ValidationHelpers.kt index ab9f7409..3bca2f9b 100644 --- a/src/main/kotlin/org/opensearch/commons/utils/ValidationHelpers.kt +++ b/src/main/kotlin/org/opensearch/commons/utils/ValidationHelpers.kt @@ -8,9 +8,30 @@ package org.opensearch.commons.utils import java.net.URL import java.util.regex.Pattern +/** + * This regex asserts that the string: + * Starts with a lowercase letter, or digit + * Contains a sequence of characters followed by an optional colon and another sequence of characters + * The sequences of characters can include lowercase letters, uppercase letters, digits, underscores, or hyphens + * The total length of the string can range from 1 to 255 characters + */ +val CLUSTER_NAME_REGEX = Regex("^(?=.{1,255}$)[a-z0-9]([a-zA-Z0-9_-]*:?[a-zA-Z0-9_-]*)$") + +/** + * This regex asserts that the string: + * Starts with a lowercase letter, digit, or asterisk + * Contains a sequence of characters followed by an optional colon and another sequence of characters + * The sequences of characters can include lowercase letters, uppercase letters, digits, underscores, asterisks, or hyphens + * The total length of the string can range from 1 to 255 characters + */ +val CLUSTER_PATTERN_REGEX = Regex("^(?=.{1,255}$)[a-z0-9*]([a-zA-Z0-9_*-]*:?[a-zA-Z0-9_*-]*)$") + // Valid ID characters = (All Base64 chars + "_-") to support UUID format and Base64 encoded IDs private val VALID_ID_CHARS: Set = (('a'..'z') + ('A'..'Z') + ('0'..'9') + '+' + '/' + '_' + '-').toSet() +// Invalid characters in a new name field: [* ? < > | #] +private val INVALID_NAME_CHARS = "^\\*\\?<>|#" + fun validateUrl(urlString: String) { require(isValidUrl(urlString)) { "Invalid URL or unsupported" } } @@ -53,3 +74,15 @@ fun validateIamRoleArn(roleArn: String) { val roleArnRegex = Pattern.compile("^arn:aws(-[^:]+)?:iam::([0-9]{12}):([a-zA-Z_0-9+=,.@\\-_/]+)$") require(roleArnRegex.matcher(roleArn).find()) { "Invalid AWS role ARN: $roleArn " } } + +fun isValidName(name: String): Boolean { + // Regex to restrict string so that it cannot start with [_, -, +], + // contain two consecutive periods or contain invalid chars + val regex = Regex("""^(?![_\-\+])(?!.*\.\.)[^$INVALID_NAME_CHARS]+$""") + + return name.matches(regex) +} + +fun getInvalidNameChars(): String { + return INVALID_NAME_CHARS +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/CorrelationAlertTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/CorrelationAlertTests.kt new file mode 100644 index 00000000..aa315aeb --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/CorrelationAlertTests.kt @@ -0,0 +1,92 @@ +package org.opensearch.commons.alerting + +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test +import org.opensearch.commons.alerting.model.Alert +import org.opensearch.commons.alerting.model.CorrelationAlert +import org.opensearch.commons.utils.recreateObject +import java.time.temporal.ChronoUnit + +class CorrelationAlertTests { + + @Test + fun `test correlation alert as template args`() { + // Create sample data for CorrelationAlert + val correlationAlert = randomCorrelationAlert("alertId1", Alert.State.ACTIVE) + + // Generate template args using asTemplateArg() function + val templateArgs = createCorrelationAlertTemplateArgs(correlationAlert) + + assertEquals( + templateArgs["correlated_finding_ids"], + correlationAlert.correlatedFindingIds, + "Template args correlatedFindingIds does not match" + ) + assertEquals( + templateArgs["correlation_rule_id"], + correlationAlert.correlationRuleId, + "Template args correlationRuleId does not match" + ) + assertEquals( + templateArgs["correlation_rule_name"], + correlationAlert.correlationRuleName, + "Template args correlationRuleName does not match" + ) + + // Verify inherited properties from BaseAlert + assertEquals(templateArgs["id"], correlationAlert.id, "alertId1") + assertEquals(templateArgs["version"], correlationAlert.version, "Template args version does not match") + assertEquals(templateArgs["user"], correlationAlert.user, "Template args user does not match") + assertEquals( + templateArgs["trigger_name"], + correlationAlert.triggerName, + "Template args triggerName does not match" + ) + assertEquals(templateArgs["state"], correlationAlert.state, "Template args state does not match") + assertEquals(templateArgs["start_time"], correlationAlert.startTime, "Template args startTime does not match") + assertEquals(templateArgs["end_time"], correlationAlert.endTime, "Template args endTime does not match") + assertEquals( + templateArgs["acknowledged_time"], + correlationAlert.acknowledgedTime, + "Template args acknowledgedTime does not match" + ) + assertEquals( + templateArgs["error_message"], + correlationAlert.errorMessage, + "Template args errorMessage does not match" + ) + assertEquals(templateArgs["severity"], correlationAlert.severity, "Template args severity does not match") + assertEquals( + templateArgs["action_execution_results"], + correlationAlert.actionExecutionResults, + "Template args actionExecutionResults does not match" + ) + } + + @Test + fun `test alert acknowledged`() { + val ackCorrelationAlert = randomCorrelationAlert("alertId1", Alert.State.ACKNOWLEDGED) + Assertions.assertTrue(ackCorrelationAlert.isAcknowledged(), "Alert is not acknowledged") + + val activeCorrelationAlert = randomCorrelationAlert("alertId1", Alert.State.ACTIVE) + Assertions.assertFalse(activeCorrelationAlert.isAcknowledged(), "Alert is acknowledged") + } + + @Test + fun `Feature Correlation Alert serialize and deserialize should be equal`() { + val correlationAlert = randomCorrelationAlert("alertId1", Alert.State.ACTIVE) + val recreatedAlert = recreateObject(correlationAlert) { CorrelationAlert(it) } + assertEquals(correlationAlert.correlatedFindingIds, recreatedAlert.correlatedFindingIds) + assertEquals(correlationAlert.correlationRuleId, recreatedAlert.correlationRuleId) + assertEquals(correlationAlert.correlationRuleName, recreatedAlert.correlationRuleName) + assertEquals(correlationAlert.triggerName, recreatedAlert.triggerName) + assertEquals(correlationAlert.state, recreatedAlert.state) + val expectedStartTime = correlationAlert.startTime.truncatedTo(ChronoUnit.MILLIS) + val actualStartTime = recreatedAlert.startTime.truncatedTo(ChronoUnit.MILLIS) + assertEquals(expectedStartTime, actualStartTime) + assertEquals(correlationAlert.severity, recreatedAlert.severity) + assertEquals(correlationAlert.id, recreatedAlert.id) + assertEquals(correlationAlert.actionExecutionResults, recreatedAlert.actionExecutionResults) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/TestHelpers.kt b/src/test/kotlin/org/opensearch/commons/alerting/TestHelpers.kt index ca193224..ccba0b47 100644 --- a/src/test/kotlin/org/opensearch/commons/alerting/TestHelpers.kt +++ b/src/test/kotlin/org/opensearch/commons/alerting/TestHelpers.kt @@ -18,23 +18,31 @@ import org.opensearch.common.xcontent.XContentType import org.opensearch.commons.alerting.aggregation.bucketselectorext.BucketSelectorExtAggregationBuilder import org.opensearch.commons.alerting.aggregation.bucketselectorext.BucketSelectorExtFilter import org.opensearch.commons.alerting.model.ActionExecutionResult +import org.opensearch.commons.alerting.model.ActionRunResult import org.opensearch.commons.alerting.model.AggregationResultBucket import org.opensearch.commons.alerting.model.Alert +import org.opensearch.commons.alerting.model.BaseAlert import org.opensearch.commons.alerting.model.BucketLevelTrigger +import org.opensearch.commons.alerting.model.BucketLevelTriggerRunResult import org.opensearch.commons.alerting.model.ChainedAlertTrigger import org.opensearch.commons.alerting.model.ChainedMonitorFindings import org.opensearch.commons.alerting.model.ClusterMetricsInput import org.opensearch.commons.alerting.model.CompositeInput +import org.opensearch.commons.alerting.model.CorrelationAlert import org.opensearch.commons.alerting.model.Delegate import org.opensearch.commons.alerting.model.DocLevelMonitorInput import org.opensearch.commons.alerting.model.DocLevelQuery import org.opensearch.commons.alerting.model.DocumentLevelTrigger +import org.opensearch.commons.alerting.model.DocumentLevelTriggerRunResult import org.opensearch.commons.alerting.model.Finding import org.opensearch.commons.alerting.model.Input +import org.opensearch.commons.alerting.model.InputRunResults import org.opensearch.commons.alerting.model.IntervalSchedule import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.MonitorRunResult import org.opensearch.commons.alerting.model.NoOpTrigger import org.opensearch.commons.alerting.model.QueryLevelTrigger +import org.opensearch.commons.alerting.model.QueryLevelTriggerRunResult import org.opensearch.commons.alerting.model.Schedule import org.opensearch.commons.alerting.model.SearchInput import org.opensearch.commons.alerting.model.Sequence @@ -48,6 +56,8 @@ import org.opensearch.commons.alerting.model.action.AlertCategory import org.opensearch.commons.alerting.model.action.PerAlertActionScope import org.opensearch.commons.alerting.model.action.PerExecutionActionScope import org.opensearch.commons.alerting.model.action.Throttle +import org.opensearch.commons.alerting.model.remote.monitors.RemoteMonitorTrigger +import org.opensearch.commons.alerting.util.getBucketKeysHash import org.opensearch.commons.alerting.util.string import org.opensearch.commons.authuser.User import org.opensearch.core.xcontent.NamedXContentRegistry @@ -80,7 +90,7 @@ fun randomQueryLevelMonitor( withMetadata: Boolean = false ): Monitor { return Monitor( - name = name, monitorType = Monitor.MonitorType.QUERY_LEVEL_MONITOR, enabled = enabled, inputs = inputs, + name = name, monitorType = Monitor.MonitorType.QUERY_LEVEL_MONITOR.value, enabled = enabled, inputs = inputs, schedule = schedule, triggers = triggers, enabledTime = enabledTime, lastUpdateTime = lastUpdateTime, user = user, uiMetadata = if (withMetadata) mapOf("foo" to "bar") else mapOf() ) @@ -98,7 +108,7 @@ fun randomQueryLevelMonitorWithoutUser( withMetadata: Boolean = false ): Monitor { return Monitor( - name = name, monitorType = Monitor.MonitorType.QUERY_LEVEL_MONITOR, enabled = enabled, inputs = inputs, + name = name, monitorType = Monitor.MonitorType.QUERY_LEVEL_MONITOR.value, enabled = enabled, inputs = inputs, schedule = schedule, triggers = triggers, enabledTime = enabledTime, lastUpdateTime = lastUpdateTime, user = null, uiMetadata = if (withMetadata) mapOf("foo" to "bar") else mapOf() ) @@ -122,7 +132,7 @@ fun randomBucketLevelMonitor( withMetadata: Boolean = false ): Monitor { return Monitor( - name = name, monitorType = Monitor.MonitorType.BUCKET_LEVEL_MONITOR, enabled = enabled, inputs = inputs, + name = name, monitorType = Monitor.MonitorType.BUCKET_LEVEL_MONITOR.value, enabled = enabled, inputs = inputs, schedule = schedule, triggers = triggers, enabledTime = enabledTime, lastUpdateTime = lastUpdateTime, user = user, uiMetadata = if (withMetadata) mapOf("foo" to "bar") else mapOf() ) @@ -140,7 +150,7 @@ fun randomClusterMetricsMonitor( withMetadata: Boolean = false ): Monitor { return Monitor( - name = name, monitorType = Monitor.MonitorType.CLUSTER_METRICS_MONITOR, enabled = enabled, inputs = inputs, + name = name, monitorType = Monitor.MonitorType.CLUSTER_METRICS_MONITOR.value, enabled = enabled, inputs = inputs, schedule = schedule, triggers = triggers, enabledTime = enabledTime, lastUpdateTime = lastUpdateTime, user = user, uiMetadata = if (withMetadata) mapOf("foo" to "bar") else mapOf() ) @@ -158,7 +168,7 @@ fun randomDocumentLevelMonitor( withMetadata: Boolean = false ): Monitor { return Monitor( - name = name, monitorType = Monitor.MonitorType.DOC_LEVEL_MONITOR, enabled = enabled, inputs = inputs, + name = name, monitorType = Monitor.MonitorType.DOC_LEVEL_MONITOR.value, enabled = enabled, inputs = inputs, schedule = schedule, triggers = triggers, enabledTime = enabledTime, lastUpdateTime = lastUpdateTime, user = user, uiMetadata = if (withMetadata) mapOf("foo" to "bar") else mapOf() ) @@ -398,6 +408,13 @@ fun randomDocLevelMonitorInput( return DocLevelMonitorInput(description = description, indices = indices, queries = queries) } +fun randomSearchInput( + indices: List = listOf(1..RandomNumbers.randomIntBetween(Random(), 0, 10)).map { RandomStrings.randomAsciiLettersOfLength(Random(), 10) }, + query: SearchSourceBuilder = SearchSourceBuilder().query(QueryBuilders.matchAllQuery()) +): SearchInput { + return SearchInput(indices, query) +} + fun randomClusterMetricsInput( path: String = ClusterMetricsInput.ClusterMetricType.values() .filter { it.defaultPath.isNotBlank() && !it.requiresPathParams } @@ -505,6 +522,12 @@ fun parser(xc: String): XContentParser { return parser } +fun parser(xc: ByteArray): XContentParser { + val parser = XContentType.JSON.xContent().createParser(xContentRegistry(), LoggingDeprecationHandler.INSTANCE, xc) + parser.nextToken() + return parser +} + fun xContentRegistry(): NamedXContentRegistry { return NamedXContentRegistry( listOf( @@ -514,7 +537,8 @@ fun xContentRegistry(): NamedXContentRegistry { BucketLevelTrigger.XCONTENT_REGISTRY, DocumentLevelTrigger.XCONTENT_REGISTRY, ChainedAlertTrigger.XCONTENT_REGISTRY, - NoOpTrigger.XCONTENT_REGISTRY + NoOpTrigger.XCONTENT_REGISTRY, + RemoteMonitorTrigger.XCONTENT_REGISTRY ) + SearchModule(Settings.EMPTY, emptyList()).namedXContents ) } @@ -601,3 +625,169 @@ fun randomFinding( timestamp = timestamp ) } + +fun randomCorrelationAlert( + id: String, + state: Alert.State +): CorrelationAlert { + val correlatedFindingIds = listOf("finding1", "finding2") + val correlationRuleId = "rule1" + val correlationRuleName = "Rule 1" + val id = id + val version = 1L + val schemaVersion = 1 + val user = randomUser() + val triggerName = "Trigger 1" + val state = state + val startTime = Instant.now() + val endTime: Instant? = null + val acknowledgedTime: Instant? = null + val errorMessage: String? = null + val severity = "high" + val actionExecutionResults = listOf(randomActionExecutionResult()) + + return CorrelationAlert( + correlatedFindingIds, correlationRuleId, correlationRuleName, + id, version, schemaVersion, user, triggerName, state, + startTime, endTime, acknowledgedTime, errorMessage, severity, + actionExecutionResults + ) +} + +fun createUnifiedAlertTemplateArgs(unifiedAlert: BaseAlert): Map { + return mapOf( + BaseAlert.ALERT_ID_FIELD to unifiedAlert.id, + BaseAlert.ALERT_VERSION_FIELD to unifiedAlert.version, + BaseAlert.SCHEMA_VERSION_FIELD to unifiedAlert.schemaVersion, + BaseAlert.USER_FIELD to unifiedAlert.user, + BaseAlert.TRIGGER_NAME_FIELD to unifiedAlert.triggerName, + BaseAlert.STATE_FIELD to unifiedAlert.state, + BaseAlert.START_TIME_FIELD to unifiedAlert.startTime, + BaseAlert.END_TIME_FIELD to unifiedAlert.endTime, + BaseAlert.ACKNOWLEDGED_TIME_FIELD to unifiedAlert.acknowledgedTime, + BaseAlert.ERROR_MESSAGE_FIELD to unifiedAlert.errorMessage, + BaseAlert.SEVERITY_FIELD to unifiedAlert.severity, + BaseAlert.ACTION_EXECUTION_RESULTS_FIELD to unifiedAlert.actionExecutionResults + ) +} + +fun createCorrelationAlertTemplateArgs(correlationAlert: CorrelationAlert): Map { + val unifiedAlertTemplateArgs = createUnifiedAlertTemplateArgs(correlationAlert) + return unifiedAlertTemplateArgs + mapOf( + CorrelationAlert.CORRELATED_FINDING_IDS to correlationAlert.correlatedFindingIds, + CorrelationAlert.CORRELATION_RULE_ID to correlationAlert.correlationRuleId, + CorrelationAlert.CORRELATION_RULE_NAME to correlationAlert.correlationRuleName + ) +} + +fun randomInputRunResults(): InputRunResults { + return InputRunResults(listOf(), null) +} + +fun randomActionRunResult(): ActionRunResult { + val map = mutableMapOf() + map.plus(Pair("key1", "val1")) + map.plus(Pair("key2", "val2")) + return ActionRunResult( + "1234", + "test-action", + map, + false, + Instant.now(), + null + ) +} + +fun randomDocumentLevelTriggerRunResult(): DocumentLevelTriggerRunResult { + val map = mutableMapOf() + map.plus(Pair("key1", randomActionRunResult())) + map.plus(Pair("key2", randomActionRunResult())) + return DocumentLevelTriggerRunResult( + "trigger-name", + mutableListOf(UUIDs.randomBase64UUID().toString()), + null, + mutableMapOf(Pair("alertId", map)) + ) +} +fun randomDocumentLevelMonitorRunResult(): MonitorRunResult { + val triggerResults = mutableMapOf() + val triggerRunResult = randomDocumentLevelTriggerRunResult() + triggerResults.plus(Pair("test", triggerRunResult)) + + return MonitorRunResult( + "test-monitor", + Instant.now(), + Instant.now(), + null, + randomInputRunResults(), + triggerResults + ) +} + +fun randomBucketLevelTriggerRunResult(): BucketLevelTriggerRunResult { + val map = mutableMapOf() + map.plus(Pair("key1", randomActionRunResult())) + map.plus(Pair("key2", randomActionRunResult())) + + val aggBucket1 = AggregationResultBucket( + "parent_bucket_path_1", + listOf("bucket_key_1"), + mapOf("k1" to "val1", "k2" to "val2") + ) + val aggBucket2 = AggregationResultBucket( + "parent_bucket_path_2", + listOf("bucket_key_2"), + mapOf("k1" to "val1", "k2" to "val2") + ) + + val actionResultsMap: MutableMap> = mutableMapOf() + actionResultsMap[aggBucket1.getBucketKeysHash()] = map + actionResultsMap[aggBucket2.getBucketKeysHash()] = map + + return BucketLevelTriggerRunResult( + "trigger-name", + null, + mapOf( + aggBucket1.getBucketKeysHash() to aggBucket1, + aggBucket2.getBucketKeysHash() to aggBucket2 + ), + actionResultsMap + ) +} + +fun randomBucketLevelMonitorRunResult(): MonitorRunResult { + val triggerResults = mutableMapOf() + val triggerRunResult = randomBucketLevelTriggerRunResult() + triggerResults.plus(Pair("test", triggerRunResult)) + + return MonitorRunResult( + "test-monitor", + Instant.now(), + Instant.now(), + null, + randomInputRunResults(), + triggerResults + ) +} + +fun randomQueryLevelTriggerRunResult(): QueryLevelTriggerRunResult { + val map = mutableMapOf() + map.plus(Pair("key1", randomActionRunResult())) + map.plus(Pair("key2", randomActionRunResult())) + return QueryLevelTriggerRunResult("trigger-name", true, null, map) +} + +fun randomQueryLevelMonitorRunResult(): MonitorRunResult { + val triggerResults = mutableMapOf() + val triggerRunResult = randomQueryLevelTriggerRunResult() + triggerResults.plus(Pair("test", triggerRunResult)) + + return MonitorRunResult( + "test-monitor", + Instant.now(), + Instant.now(), + null, + randomInputRunResults(), + triggerResults + ) +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/DeleteCommentRequestTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/DeleteCommentRequestTests.kt new file mode 100644 index 00000000..70a22953 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/DeleteCommentRequestTests.kt @@ -0,0 +1,22 @@ +package org.opensearch.commons.alerting.action + +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertNotNull +import org.junit.jupiter.api.Test +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.core.common.io.stream.StreamInput + +class DeleteCommentRequestTests { + @Test + fun `test delete comment request writing and parsing`() { + val req = DeleteCommentRequest("1234") + assertNotNull(req) + assertEquals("1234", req.commentId) + + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = DeleteCommentRequest(sin) + assertEquals("1234", newReq.commentId) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/DeleteCommentResponseTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/DeleteCommentResponseTests.kt new file mode 100644 index 00000000..f10067ac --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/DeleteCommentResponseTests.kt @@ -0,0 +1,22 @@ +package org.opensearch.commons.alerting.action + +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertNotNull +import org.junit.jupiter.api.Test +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.core.common.io.stream.StreamInput + +class DeleteCommentResponseTests { + @Test + fun `test delete comment response writing and parsing`() { + val res = DeleteCommentResponse(id = "123") + assertNotNull(res) + assertEquals("123", res.commentId) + + val out = BytesStreamOutput() + res.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newRes = DeleteCommentResponse(sin) + assertEquals("123", newRes.commentId) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/DocLevelMonitorFanOutRequestTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/DocLevelMonitorFanOutRequestTests.kt new file mode 100644 index 00000000..dda45483 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/DocLevelMonitorFanOutRequestTests.kt @@ -0,0 +1,92 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.action + +import org.junit.Assert.assertEquals +import org.junit.jupiter.api.Test +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.commons.alerting.model.ActionExecutionTime +import org.opensearch.commons.alerting.model.DocLevelMonitorInput +import org.opensearch.commons.alerting.model.DocLevelQuery +import org.opensearch.commons.alerting.model.IndexExecutionContext +import org.opensearch.commons.alerting.model.IntervalSchedule +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.MonitorMetadata +import org.opensearch.commons.alerting.model.Workflow +import org.opensearch.commons.alerting.model.WorkflowRunContext +import org.opensearch.commons.alerting.randomDocumentLevelMonitor +import org.opensearch.commons.alerting.randomDocumentLevelTrigger +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.index.shard.ShardId +import org.opensearch.index.seqno.SequenceNumbers +import org.opensearch.script.Script +import java.time.Instant +import java.time.temporal.ChronoUnit +import java.util.UUID + +class DocLevelMonitorFanOutRequestTests { + + @Test + fun `test doc level monitor fan out request as stream`() { + val docQuery = DocLevelQuery(query = "test_field:\"us-west-2\"", fields = listOf(), name = "3") + val docLevelInput = DocLevelMonitorInput("description", listOf("test-index"), listOf(docQuery)) + + val trigger = randomDocumentLevelTrigger(condition = Script("return true")) + val monitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + enabled = true, + schedule = IntervalSchedule(1, ChronoUnit.MINUTES) + ) + val monitorMetadata = MonitorMetadata( + "test", + SequenceNumbers.UNASSIGNED_SEQ_NO, + SequenceNumbers.UNASSIGNED_PRIMARY_TERM, + Monitor.NO_ID, + listOf(ActionExecutionTime("", Instant.now())), + mutableMapOf("index" to mutableMapOf("1" to "1")), + mutableMapOf("test-index" to ".opensearch-sap-test_windows-queries-000001") + ) + val indexExecutionContext = IndexExecutionContext( + listOf(docQuery), + mutableMapOf("index" to mutableMapOf("1" to "1")), + mutableMapOf("index" to mutableMapOf("1" to "1")), + "test-index", + "test-index", + listOf("test-index"), + listOf("test-index"), + listOf("test-field"), + listOf("1", "2") + ) + val workflowRunContext = WorkflowRunContext( + Workflow.NO_ID, + Workflow.NO_ID, + Monitor.NO_ID, + mutableMapOf("index" to listOf("1")), + true + ) + val docLevelMonitorFanOutRequest = DocLevelMonitorFanOutRequest( + monitor, + false, + monitorMetadata, + UUID.randomUUID().toString(), + indexExecutionContext, + listOf(ShardId("test-index", UUID.randomUUID().toString(), 0)), + listOf("test-index"), + workflowRunContext + ) + val out = BytesStreamOutput() + docLevelMonitorFanOutRequest.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newDocLevelMonitorFanOutRequest = DocLevelMonitorFanOutRequest(sin) + assertEquals(docLevelMonitorFanOutRequest.monitor, newDocLevelMonitorFanOutRequest.monitor) + assertEquals(docLevelMonitorFanOutRequest.executionId, newDocLevelMonitorFanOutRequest.executionId) + assertEquals(docLevelMonitorFanOutRequest.monitorMetadata, newDocLevelMonitorFanOutRequest.monitorMetadata) + assertEquals(docLevelMonitorFanOutRequest.indexExecutionContext, newDocLevelMonitorFanOutRequest.indexExecutionContext) + assertEquals(docLevelMonitorFanOutRequest.shardIds, newDocLevelMonitorFanOutRequest.shardIds) + assertEquals(docLevelMonitorFanOutRequest.workflowRunContext, newDocLevelMonitorFanOutRequest.workflowRunContext) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/DocLevelMonitorFanOutResponseTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/DocLevelMonitorFanOutResponseTests.kt new file mode 100644 index 00000000..645b7d5c --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/DocLevelMonitorFanOutResponseTests.kt @@ -0,0 +1,60 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.action + +import org.junit.Assert.assertEquals +import org.junit.jupiter.api.Test +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.commons.alerting.model.InputRunResults +import org.opensearch.commons.alerting.randomDocumentLevelTriggerRunResult +import org.opensearch.core.common.io.stream.StreamInput + +class DocLevelMonitorFanOutResponseTests { + + @Test + fun `test doc level monitor fan out response with errors as stream`() { + val docLevelMonitorFanOutResponse = DocLevelMonitorFanOutResponse( + "nodeid", + "eid", + "monitorId", + mutableMapOf("index" to mutableMapOf("1" to "1")), + InputRunResults(error = null), + mapOf("1" to randomDocumentLevelTriggerRunResult(), "2" to randomDocumentLevelTriggerRunResult()) + ) + val out = BytesStreamOutput() + docLevelMonitorFanOutResponse.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newDocLevelMonitorFanOutResponse = DocLevelMonitorFanOutResponse(sin) + assertEquals(docLevelMonitorFanOutResponse.nodeId, newDocLevelMonitorFanOutResponse.nodeId) + assertEquals(docLevelMonitorFanOutResponse.executionId, newDocLevelMonitorFanOutResponse.executionId) + assertEquals(docLevelMonitorFanOutResponse.monitorId, newDocLevelMonitorFanOutResponse.monitorId) + assertEquals(docLevelMonitorFanOutResponse.lastRunContexts, newDocLevelMonitorFanOutResponse.lastRunContexts) + assertEquals(docLevelMonitorFanOutResponse.inputResults, newDocLevelMonitorFanOutResponse.inputResults) + assertEquals(docLevelMonitorFanOutResponse.triggerResults, newDocLevelMonitorFanOutResponse.triggerResults) + } + + @Test + fun `test doc level monitor fan out response as stream`() { + val workflow = DocLevelMonitorFanOutResponse( + "nodeid", + "eid", + "monitorId", + mapOf("index" to mapOf("1" to "1")) as MutableMap, + InputRunResults(), + mapOf("1" to randomDocumentLevelTriggerRunResult(), "2" to randomDocumentLevelTriggerRunResult()) + ) + val out = BytesStreamOutput() + workflow.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newWorkflow = DocLevelMonitorFanOutResponse(sin) + assertEquals(workflow.nodeId, newWorkflow.nodeId) + assertEquals(workflow.executionId, newWorkflow.executionId) + assertEquals(workflow.monitorId, newWorkflow.monitorId) + assertEquals(workflow.lastRunContexts, newWorkflow.lastRunContexts) + assertEquals(workflow.inputResults, newWorkflow.inputResults) + assertEquals(workflow.triggerResults, newWorkflow.triggerResults) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/GetMonitorResponseTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/GetMonitorResponseTests.kt index d91c7471..eb3f08e4 100644 --- a/src/test/kotlin/org/opensearch/commons/alerting/action/GetMonitorResponseTests.kt +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/GetMonitorResponseTests.kt @@ -42,7 +42,7 @@ class GetMonitorResponseTests : OpenSearchTestCase() { schedule = cronSchedule, lastUpdateTime = Instant.now(), enabledTime = Instant.now(), - monitorType = Monitor.MonitorType.QUERY_LEVEL_MONITOR, + monitorType = Monitor.MonitorType.QUERY_LEVEL_MONITOR.value, user = randomUser(), schemaVersion = 0, inputs = mutableListOf(), diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/IndexCommentRequestTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/IndexCommentRequestTests.kt new file mode 100644 index 00000000..c9afb0d6 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/IndexCommentRequestTests.kt @@ -0,0 +1,44 @@ +package org.opensearch.commons.alerting.action + +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertNotNull +import org.junit.jupiter.api.Test +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.rest.RestRequest + +class IndexCommentRequestTests { + @Test + fun `test index comment post request`() { + val req = IndexCommentRequest("123", "alert", "456", 1L, 2L, RestRequest.Method.POST, "comment") + assertNotNull(req) + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = IndexCommentRequest(sin) + assertEquals("123", newReq.entityId) + assertEquals("alert", newReq.entityType) + assertEquals("456", newReq.commentId) + assertEquals(1L, newReq.seqNo) + assertEquals(2L, newReq.primaryTerm) + assertEquals(RestRequest.Method.POST, newReq.method) + assertEquals("comment", newReq.content) + } + + @Test + fun `test index comment put request`() { + val req = IndexCommentRequest("123", "alert", "456", 1L, 2L, RestRequest.Method.PUT, "comment") + assertNotNull(req) + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = IndexCommentRequest(sin) + assertEquals("123", newReq.entityId) + assertEquals("alert", newReq.entityType) + assertEquals("456", newReq.commentId) + assertEquals(1L, newReq.seqNo) + assertEquals(2L, newReq.primaryTerm) + assertEquals(RestRequest.Method.PUT, newReq.method) + assertEquals("comment", newReq.content) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/IndexCommentResponseTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/IndexCommentResponseTests.kt new file mode 100644 index 00000000..57e4801b --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/IndexCommentResponseTests.kt @@ -0,0 +1,35 @@ +package org.opensearch.commons.alerting.action + +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.commons.alerting.model.Comment +import org.opensearch.commons.alerting.randomUser +import org.opensearch.core.common.io.stream.StreamInput +import java.time.Instant + +class IndexCommentResponseTests { + @Test + fun `test index comment response with comment`() { + val comment = Comment( + "123", + "alert", + "456", + "comment", + Instant.now(), + Instant.now(), + randomUser() + ) + val req = IndexCommentResponse("1234", 1L, 2L, comment) + Assertions.assertNotNull(req) + + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = IndexCommentResponse(sin) + Assertions.assertEquals("1234", newReq.id) + Assertions.assertEquals(1L, newReq.seqNo) + Assertions.assertEquals(2L, newReq.primaryTerm) + Assertions.assertNotNull(newReq.comment) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/IndexMonitorResponseTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/IndexMonitorResponseTests.kt index 2b5ee04d..ca3afa3e 100644 --- a/src/test/kotlin/org/opensearch/commons/alerting/action/IndexMonitorResponseTests.kt +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/IndexMonitorResponseTests.kt @@ -26,7 +26,7 @@ class IndexMonitorResponseTests { schedule = cronSchedule, lastUpdateTime = Instant.now(), enabledTime = Instant.now(), - monitorType = Monitor.MonitorType.QUERY_LEVEL_MONITOR, + monitorType = Monitor.MonitorType.QUERY_LEVEL_MONITOR.value, user = randomUser(), schemaVersion = 0, inputs = mutableListOf(), diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/SearchCommentRequestTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/SearchCommentRequestTests.kt new file mode 100644 index 00000000..596d16c4 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/SearchCommentRequestTests.kt @@ -0,0 +1,27 @@ +package org.opensearch.commons.alerting.action + +import org.opensearch.action.search.SearchRequest +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.common.unit.TimeValue +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.search.builder.SearchSourceBuilder +import org.opensearch.test.OpenSearchTestCase +import org.opensearch.test.rest.OpenSearchRestTestCase +import java.util.concurrent.TimeUnit + +class SearchCommentRequestTests : OpenSearchTestCase() { + fun `test search comments request`() { + val searchSourceBuilder = SearchSourceBuilder().from(0).size(100).timeout(TimeValue(60, TimeUnit.SECONDS)) + val searchRequest = SearchRequest().indices(OpenSearchRestTestCase.randomAlphaOfLength(10)).source(searchSourceBuilder) + val searchCommentRequest = SearchCommentRequest(searchRequest) + assertNotNull(searchCommentRequest) + + val out = BytesStreamOutput() + searchCommentRequest.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = SearchCommentRequest(sin) + + assertNotNull(newReq.searchRequest) + assertEquals(1, newReq.searchRequest.indices().size) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/alerts/AlertErrorTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/alerts/AlertErrorTests.kt new file mode 100644 index 00000000..c5c6d439 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/alerts/AlertErrorTests.kt @@ -0,0 +1,25 @@ +package org.opensearch.commons.alerting.alerts + +import org.junit.Assert +import org.junit.jupiter.api.Test +import java.time.Instant + +class AlertErrorTests { + + @Test + fun `test alertError obfuscates IP addresses in message`() { + val message = + "AlertingException[[5f32db4e2a4fa94f6778cb895dae7a24][10.212.77.91:9300][indices:admin/create]]; " + + "nested: Exception[org.opensearch.transport.RemoteTransportException: [5f32db4e2a4fa94f6778cb895dae7a24][10.212.77.91:9300]" + + "[indices:admin/create]];; java.lang.Exception: org.opensearch.transport.RemoteTransportException: [5f32db4e2a4fa94f6778cb895" + + "dae7a24][10.212.77.91:9300][indices:admin/create]" + val alertError = AlertError(Instant.now(), message = message) + Assert.assertEquals( + alertError.message, + "AlertingException[[5f32db4e2a4fa94f6778cb895dae7a24][x.x.x.x:9300][indices:admin/create]]; " + + "nested: Exception[org.opensearch.transport.RemoteTransportException: [5f32db4e2a4fa94f6778cb895dae7a24][x.x.x.x:9300]" + + "[indices:admin/create]];; java.lang.Exception: org.opensearch.transport.RemoteTransportException: " + + "[5f32db4e2a4fa94f6778cb895dae7a24][x.x.x.x:9300][indices:admin/create]" + ) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/model/BucketLevelTriggerTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/model/BucketLevelTriggerTests.kt new file mode 100644 index 00000000..1a9e3cbe --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/model/BucketLevelTriggerTests.kt @@ -0,0 +1,54 @@ +package org.opensearch.commons.alerting.model + +import org.junit.jupiter.api.Test +import org.opensearch.commons.alerting.model.BucketLevelTrigger.Companion.CONDITION_FIELD +import org.opensearch.commons.alerting.model.BucketLevelTrigger.Companion.LANG_FIELD +import org.opensearch.commons.alerting.model.BucketLevelTrigger.Companion.PARENT_BUCKET_PATH +import org.opensearch.commons.alerting.model.BucketLevelTrigger.Companion.SCRIPT_FIELD +import org.opensearch.commons.alerting.model.BucketLevelTrigger.Companion.SOURCE_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.ACTIONS_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.ID_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.NAME_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.SEVERITY_FIELD +import org.opensearch.commons.alerting.randomBucketLevelTrigger +import kotlin.test.assertEquals +import kotlin.test.assertNotNull + +class BucketLevelTriggerTests { + + @Test + fun `test BucketLevelTrigger asTemplateArgs`() { + val trigger = randomBucketLevelTrigger() + + val templateArgs = trigger.asTemplateArg() + + assertEquals(trigger.id, templateArgs[ID_FIELD], "Template arg field 'id' doesn't match") + assertEquals(trigger.name, templateArgs[NAME_FIELD], "Template arg field 'name' doesn't match") + assertEquals(trigger.severity, templateArgs[SEVERITY_FIELD], "Template arg field 'severity' doesn't match") + val actions = templateArgs[ACTIONS_FIELD] as List<*> + assertEquals( + trigger.actions.size, + actions.size, + "Template arg field 'actions' doesn't match" + ) + assertEquals( + trigger.getParentBucketPath(), + templateArgs[PARENT_BUCKET_PATH], + "Template arg field 'parentBucketPath' doesn't match" + ) + val condition = templateArgs[CONDITION_FIELD] as? Map<*, *> + assertNotNull(condition, "Template arg field 'condition' is empty") + val script = condition[SCRIPT_FIELD] as? Map<*, *> + assertNotNull(script, "Template arg field 'condition.script' is empty") + assertEquals( + trigger.bucketSelector.script.idOrCode, + script[SOURCE_FIELD], + "Template arg field 'script.source' doesn't match" + ) + assertEquals( + trigger.bucketSelector.script.lang, + script[LANG_FIELD], + "Template arg field 'script.lang' doesn't match" + ) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/model/ClusterMetricsInputTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/model/ClusterMetricsInputTests.kt index 9980d5db..6d1c1055 100644 --- a/src/test/kotlin/org/opensearch/commons/alerting/model/ClusterMetricsInputTests.kt +++ b/src/test/kotlin/org/opensearch/commons/alerting/model/ClusterMetricsInputTests.kt @@ -9,6 +9,26 @@ class ClusterMetricsInputTests { private var pathParams = "" private var url = "" + private val validClusters = listOf( + "cluster-name", + "cluster:name" + ) + + private val invalidClusters = listOf( + // Character length less than 1 should return FALSE + "", + + // Character length greater than 255 should return FALSE + (0..255).joinToString(separator = "") { "a" }, + + // Invalid characters should return FALSE + "cluster-#name", + "cluster:#name", + + // More than 1 `:` character should return FALSE + "bad:cluster:name" + ) + @Test fun `test valid ClusterMetricsInput creation using HTTP URI component fields`() { // GIVEN @@ -21,6 +41,7 @@ class ClusterMetricsInputTests { assertEquals(path, clusterMetricsInput.path) assertEquals(pathParams, clusterMetricsInput.pathParams) assertEquals(testUrl, clusterMetricsInput.url) + assertEquals(emptyList(), clusterMetricsInput.clusters) } @Test @@ -34,6 +55,7 @@ class ClusterMetricsInputTests { // THEN assertEquals(url, clusterMetricsInput.url) + assertEquals(emptyList(), clusterMetricsInput.clusters) } @Test @@ -47,6 +69,7 @@ class ClusterMetricsInputTests { // THEN assertEquals(url, clusterMetricsInput.url) + assertEquals(emptyList(), clusterMetricsInput.clusters) } @Test @@ -84,6 +107,7 @@ class ClusterMetricsInputTests { assertEquals(pathParams, clusterMetricsInput.pathParams) assertEquals(url, clusterMetricsInput.url) assertEquals(url, clusterMetricsInput.constructedUri.toString()) + assertEquals(emptyList(), clusterMetricsInput.clusters) } @Test @@ -101,6 +125,7 @@ class ClusterMetricsInputTests { assertEquals(pathParams, clusterMetricsInput.pathParams) assertEquals(url, clusterMetricsInput.url) assertEquals(url, clusterMetricsInput.constructedUri.toString()) + assertEquals(emptyList(), clusterMetricsInput.clusters) } @Test @@ -200,6 +225,7 @@ class ClusterMetricsInputTests { // THEN assertEquals(pathParams, params) assertEquals(testUrl, clusterMetricsInput.constructedUri.toString()) + assertEquals(emptyList(), clusterMetricsInput.clusters) } @Test @@ -216,6 +242,7 @@ class ClusterMetricsInputTests { // THEN assertEquals(pathParams, params) assertEquals(testUrl, clusterMetricsInput.constructedUri.toString()) + assertEquals(emptyList(), clusterMetricsInput.clusters) } @Test @@ -232,6 +259,7 @@ class ClusterMetricsInputTests { // THEN assertEquals(testParams, params) assertEquals(url, clusterMetricsInput.constructedUri.toString()) + assertEquals(emptyList(), clusterMetricsInput.clusters) } @Test @@ -422,6 +450,7 @@ class ClusterMetricsInputTests { assertEquals(testPath, clusterMetricsInput.path) assertEquals(testPathParams, clusterMetricsInput.pathParams) assertEquals(url, clusterMetricsInput.url) + assertEquals(emptyList(), clusterMetricsInput.clusters) } @Test @@ -438,5 +467,128 @@ class ClusterMetricsInputTests { assertEquals(path, clusterMetricsInput.path) assertEquals(pathParams, clusterMetricsInput.pathParams) assertEquals(testUrl, clusterMetricsInput.url) + assertEquals(emptyList(), clusterMetricsInput.clusters) + } + + @Test + fun `test a single valid cluster`() { + validClusters.forEach { + // GIVEN + path = "/_cluster/health" + pathParams = "index1,index2,index3,index4,index5" + url = "" + val clusters = listOf(it) + + // WHEN + val clusterMetricsInput = ClusterMetricsInput( + path = path, + pathParams = pathParams, + url = url, + clusters = clusters + ) + + // THEN + assertEquals(path, clusterMetricsInput.path) + assertEquals(pathParams, clusterMetricsInput.pathParams) + assertEquals(clusters, clusterMetricsInput.clusters) + } + } + + @Test + fun `test multiple valid clusters`() { + // GIVEN + path = "/_cluster/health" + pathParams = "index1,index2,index3,index4,index5" + url = "" + val clusters = validClusters + + // WHEN + val clusterMetricsInput = ClusterMetricsInput( + path = path, + pathParams = pathParams, + url = url, + clusters = clusters + ) + + // THEN + assertEquals(path, clusterMetricsInput.path) + assertEquals(pathParams, clusterMetricsInput.pathParams) + assertEquals(clusters, clusterMetricsInput.clusters) + } + + @Test + fun `test a single invalid cluster`() { + invalidClusters.forEach { + // GIVEN + path = "/_cluster/health" + pathParams = "index1,index2,index3,index4,index5" + url = "" + val clusters = listOf(it) + + // WHEN + THEN + assertFailsWith("The API could not be determined from the provided URI.") { + ClusterMetricsInput( + path = path, + pathParams = pathParams, + url = url, + clusters = clusters + ) + } + } + } + + @Test + fun `test multiple invalid clusters`() { + // GIVEN + path = "/_cluster/health" + pathParams = "index1,index2,index3,index4,index5" + url = "" + val clusters = invalidClusters + + // WHEN + THEN + assertFailsWith("The API could not be determined from the provided URI.") { + ClusterMetricsInput( + path = path, + pathParams = pathParams, + url = url, + clusters = clusters + ) + } + } + + @Test + fun `test url field contains invalid characters`() { + // GIVEN + path = "" + pathParams = "" + url = "http://localhost:9200/${ILLEGAL_PATH_PARAMETER_CHARACTERS.joinToString("")}" + + // WHEN + THEN + assertFailsWith("Invalid URL syntax.") { + ClusterMetricsInput( + path = path, + pathParams = pathParams, + url = url, + clusters = listOf() + ) + } + } + + @Test + fun `test URI fields provided and url contains invalid characters`() { + // GIVEN + path = "/_cluster/health" + pathParams = "index1,index2,index3,index4,index5" + url = "http://localhost:9200/${ILLEGAL_PATH_PARAMETER_CHARACTERS.joinToString("")}" + + // WHEN + THEN + assertFailsWith("Invalid URL syntax.") { + ClusterMetricsInput( + path = path, + pathParams = pathParams, + url = url, + clusters = listOf() + ) + } } } diff --git a/src/test/kotlin/org/opensearch/commons/alerting/model/DataSourcesTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/model/DataSourcesTests.kt new file mode 100644 index 00000000..5f67f2a3 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/model/DataSourcesTests.kt @@ -0,0 +1,36 @@ +package org.opensearch.commons.alerting.model + +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.core.common.io.stream.StreamInput + +class DataSourcesTests { + @Test + fun `Test DataSources construction with no comments indices`() { + val dataSources = DataSources( + ScheduledJob.DOC_LEVEL_QUERIES_INDEX, + ".opensearch-alerting-finding-history-write", + "<.opensearch-alerting-finding-history-{now/d}-1>", + ".opendistro-alerting-alerts", + ".opendistro-alerting-alert-history-write", + "<.opendistro-alerting-alert-history-{now/d}-1>", + mapOf(), + false + ) + Assertions.assertNotNull(dataSources) + + val out = BytesStreamOutput() + dataSources.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newDataSources = DataSources(sin) + Assertions.assertEquals(ScheduledJob.DOC_LEVEL_QUERIES_INDEX, newDataSources.queryIndex) + Assertions.assertEquals(".opensearch-alerting-finding-history-write", newDataSources.findingsIndex) + Assertions.assertEquals("<.opensearch-alerting-finding-history-{now/d}-1>", newDataSources.findingsIndexPattern) + Assertions.assertEquals(".opendistro-alerting-alerts", newDataSources.alertsIndex) + Assertions.assertEquals(".opendistro-alerting-alert-history-write", newDataSources.alertsHistoryIndex) + Assertions.assertEquals("<.opendistro-alerting-alert-history-{now/d}-1>", newDataSources.alertsHistoryIndexPattern) + Assertions.assertEquals(mapOf>(), newDataSources.queryIndexMappingsByType) + Assertions.assertEquals(false, newDataSources.findingsEnabled) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/model/DocLevelMonitorInputTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/model/DocLevelMonitorInputTests.kt index 7110d925..e99dc3c8 100644 --- a/src/test/kotlin/org/opensearch/commons/alerting/model/DocLevelMonitorInputTests.kt +++ b/src/test/kotlin/org/opensearch/commons/alerting/model/DocLevelMonitorInputTests.kt @@ -43,14 +43,33 @@ class DocLevelMonitorInputTests { } @Test - fun `test create Doc Level Query with invalid characters for name`() { - val badString = "query with space" + fun `test create Doc Level Query with invalid name length`() { + val stringBuilder = StringBuilder() + + // test empty string + val emptyString = stringBuilder.toString() + try { + randomDocLevelQuery(name = emptyString) + Assertions.fail("Expecting an illegal argument exception") + } catch (e: IllegalArgumentException) { + Assertions.assertEquals( + "The query name, $emptyString, should be between 1 - 256 characters.", + e.message + ) + } + + // test string with 257 chars + repeat(257) { + stringBuilder.append("a") + } + val badString = stringBuilder.toString() + try { randomDocLevelQuery(name = badString) Assertions.fail("Expecting an illegal argument exception") } catch (e: IllegalArgumentException) { Assertions.assertEquals( - "They query name or tag, $badString, contains an invalid character: [' ','[',']','{','}','(',')']", + "The query name, $badString, should be between 1 - 256 characters.", e.message ) } @@ -65,7 +84,7 @@ class DocLevelMonitorInputTests { Assertions.fail("Expecting an illegal argument exception") } catch (e: IllegalArgumentException) { Assertions.assertEquals( - "They query name or tag, $badString, contains an invalid character: [' ','[',']','{','}','(',')']", + "The query tag, $badString, contains an invalid character: [' ','[',']','{','}','(',')']", e.message ) } diff --git a/src/test/kotlin/org/opensearch/commons/alerting/model/DocumentLevelTriggerTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/model/DocumentLevelTriggerTests.kt new file mode 100644 index 00000000..7375223c --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/model/DocumentLevelTriggerTests.kt @@ -0,0 +1,48 @@ +package org.opensearch.commons.alerting.model + +import org.junit.jupiter.api.Test +import org.opensearch.commons.alerting.model.DocumentLevelTrigger.Companion.CONDITION_FIELD +import org.opensearch.commons.alerting.model.DocumentLevelTrigger.Companion.LANG_FIELD +import org.opensearch.commons.alerting.model.DocumentLevelTrigger.Companion.SCRIPT_FIELD +import org.opensearch.commons.alerting.model.DocumentLevelTrigger.Companion.SOURCE_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.ACTIONS_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.ID_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.NAME_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.SEVERITY_FIELD +import org.opensearch.commons.alerting.randomDocumentLevelTrigger +import kotlin.test.assertEquals +import kotlin.test.assertNotNull + +class DocumentLevelTriggerTests { + + @Test + fun `test DocumentLevelTrigger asTemplateArgs`() { + val trigger = randomDocumentLevelTrigger() + + val templateArgs = trigger.asTemplateArg() + + assertEquals(trigger.id, templateArgs[ID_FIELD], "Template arg field 'id' doesn't match") + assertEquals(trigger.name, templateArgs[NAME_FIELD], "Template arg field 'name' doesn't match") + assertEquals(trigger.severity, templateArgs[SEVERITY_FIELD], "Template arg field 'severity' doesn't match") + val actions = templateArgs[ACTIONS_FIELD] as List<*> + assertEquals( + trigger.actions.size, + actions.size, + "Template arg field 'actions' doesn't match" + ) + val condition = templateArgs[CONDITION_FIELD] as? Map<*, *> + assertNotNull(condition, "Template arg field 'condition' is empty") + val script = condition[SCRIPT_FIELD] as? Map<*, *> + assertNotNull(script, "Template arg field 'condition.script' is empty") + assertEquals( + trigger.condition.idOrCode, + script[SOURCE_FIELD], + "Template arg field 'script.source' doesn't match" + ) + assertEquals( + trigger.condition.lang, + script[LANG_FIELD], + "Template arg field 'script.lang' doesn't match" + ) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/model/MonitorsTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/model/MonitorsTests.kt new file mode 100644 index 00000000..9529e44d --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/model/MonitorsTests.kt @@ -0,0 +1,49 @@ +package org.opensearch.commons.alerting.model + +import org.junit.jupiter.api.Test +import org.opensearch.commons.alerting.randomQueryLevelMonitor +import org.opensearch.commons.alerting.util.IndexUtils +import kotlin.test.assertEquals +import kotlin.test.assertNotNull + +class MonitorsTests { + + @Test + fun `test monitor asTemplateArgs`() { + val monitor = randomQueryLevelMonitor(enabled = true) + + val templateArgs = monitor.asTemplateArg() + + assertEquals(monitor.id, templateArgs[IndexUtils._ID], "Template arg field 'id' doesn't match") + assertEquals( + monitor.version, + templateArgs[IndexUtils._VERSION], + "Template arg field 'version' doesn't match" + ) + assertEquals(monitor.name, templateArgs[Monitor.NAME_FIELD], "Template arg field 'name' doesn't match") + assertEquals( + monitor.enabled, + templateArgs[Monitor.ENABLED_FIELD], + "Template arg field 'enabled' doesn't match" + ) + assertEquals( + monitor.monitorType.toString(), + templateArgs[Monitor.MONITOR_TYPE_FIELD], + "Template arg field 'monitoryType' doesn't match" + ) + assertEquals( + monitor.enabledTime?.toEpochMilli(), + templateArgs[Monitor.ENABLED_TIME_FIELD], + "Template arg field 'enabledTime' doesn't match" + ) + assertEquals( + monitor.lastUpdateTime.toEpochMilli(), + templateArgs[Monitor.LAST_UPDATE_TIME_FIELD], + "Template arg field 'lastUpdateTime' doesn't match" + ) + assertNotNull(templateArgs[Monitor.SCHEDULE_FIELD], "Template arg field 'schedule' not set") + val inputs = templateArgs[Monitor.INPUTS_FIELD] as? List<*> + assertNotNull(inputs, "Template arg field 'inputs' not set") + assertEquals(1, inputs.size, "Template arg field 'inputs' is not populated") + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/model/QueryLevelTriggerTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/model/QueryLevelTriggerTests.kt new file mode 100644 index 00000000..824e1b1e --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/model/QueryLevelTriggerTests.kt @@ -0,0 +1,48 @@ +package org.opensearch.commons.alerting.model + +import org.junit.jupiter.api.Test +import org.opensearch.commons.alerting.model.QueryLevelTrigger.Companion.CONDITION_FIELD +import org.opensearch.commons.alerting.model.QueryLevelTrigger.Companion.LANG_FIELD +import org.opensearch.commons.alerting.model.QueryLevelTrigger.Companion.SCRIPT_FIELD +import org.opensearch.commons.alerting.model.QueryLevelTrigger.Companion.SOURCE_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.ACTIONS_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.ID_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.NAME_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.SEVERITY_FIELD +import org.opensearch.commons.alerting.randomQueryLevelTrigger +import kotlin.test.assertEquals +import kotlin.test.assertNotNull + +class QueryLevelTriggerTests { + + @Test + fun `test QueryLevelTrigger asTemplateArgs`() { + val trigger = randomQueryLevelTrigger() + + val templateArgs = trigger.asTemplateArg() + + assertEquals(trigger.id, templateArgs[ID_FIELD], "Template arg field 'id' doesn't match") + assertEquals(trigger.name, templateArgs[NAME_FIELD], "Template arg field 'name' doesn't match") + assertEquals(trigger.severity, templateArgs[SEVERITY_FIELD], "Template arg field 'severity' doesn't match") + val actions = templateArgs[ACTIONS_FIELD] as List<*> + assertEquals( + trigger.actions.size, + actions.size, + "Template arg field 'actions' doesn't match" + ) + val condition = templateArgs[CONDITION_FIELD] as? Map<*, *> + assertNotNull(condition, "Template arg field 'condition' is empty") + val script = condition[SCRIPT_FIELD] as? Map<*, *> + assertNotNull(script, "Template arg field 'condition.script' is empty") + assertEquals( + trigger.condition.idOrCode, + script[SOURCE_FIELD], + "Template arg field 'script.source' doesn't match" + ) + assertEquals( + trigger.condition.lang, + script[LANG_FIELD], + "Template arg field 'script.lang' doesn't match" + ) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/model/ScheduleTest.kt b/src/test/kotlin/org/opensearch/commons/alerting/model/ScheduleTest.kt index 590dda04..e0423d56 100644 --- a/src/test/kotlin/org/opensearch/commons/alerting/model/ScheduleTest.kt +++ b/src/test/kotlin/org/opensearch/commons/alerting/model/ScheduleTest.kt @@ -1,6 +1,12 @@ package org.opensearch.commons.alerting.model import org.junit.jupiter.api.Test +import org.opensearch.commons.alerting.model.Schedule.Companion.CRON_FIELD +import org.opensearch.commons.alerting.model.Schedule.Companion.EXPRESSION_FIELD +import org.opensearch.commons.alerting.model.Schedule.Companion.INTERVAL_FIELD +import org.opensearch.commons.alerting.model.Schedule.Companion.PERIOD_FIELD +import org.opensearch.commons.alerting.model.Schedule.Companion.TIMEZONE_FIELD +import org.opensearch.commons.alerting.model.Schedule.Companion.UNIT_FIELD import org.opensearch.commons.alerting.util.string import org.opensearch.core.xcontent.ToXContent import java.time.Instant @@ -67,7 +73,8 @@ class ScheduleTest : XContentTestBase { val cronSchedule = CronSchedule(cronExpression, ZoneId.of("America/Los_Angeles")) // The nextTimeToExecute should be the minute after the previous execution time instance, not enabledTimeInstance - val nextTimeToExecute = cronSchedule.getExpectedNextExecutionTime(enabledTimeInstance, previousExecutionTimeInstance) + val nextTimeToExecute = + cronSchedule.getExpectedNextExecutionTime(enabledTimeInstance, previousExecutionTimeInstance) assertNotNull(nextTimeToExecute, "There should be next execute time") assertEquals( previousExecutionTimeInstance.plusSeconds(2L), @@ -107,7 +114,8 @@ class ScheduleTest : XContentTestBase { val intervalSchedule = IntervalSchedule(1, ChronoUnit.MINUTES, testInstance) // The nextTimeToExecute should be the minute after the previous execution time instance - val nextTimeToExecute = intervalSchedule.getExpectedNextExecutionTime(enabledTimeInstance, previousExecutionTimeInstance) + val nextTimeToExecute = + intervalSchedule.getExpectedNextExecutionTime(enabledTimeInstance, previousExecutionTimeInstance) assertNotNull(nextTimeToExecute, "There should be next execute time") assertEquals( previousExecutionTimeInstance.plusSeconds(60L), @@ -165,12 +173,19 @@ class ScheduleTest : XContentTestBase { @Test fun `test invalid type`() { val scheduleString = "{\"foobarzzz\":{\"expression\":\"0 * * * *\",\"timezone\":\"+++9\"}}" - assertFailsWith(IllegalArgumentException::class, "Expected IllegalArgumentException") { Schedule.parse(parser(scheduleString)) } + assertFailsWith(IllegalArgumentException::class, "Expected IllegalArgumentException") { + Schedule.parse( + parser( + scheduleString + ) + ) + } } @Test fun `test two types`() { - val scheduleString = "{\"cron\":{\"expression\":\"0 * * * *\",\"timezone\":\"Asia/Tokyo\"}, \"period\":{\"interval\":\"1\",\"unit\":\"Minutes\"}}" + val scheduleString = + "{\"cron\":{\"expression\":\"0 * * * *\",\"timezone\":\"Asia/Tokyo\"}, \"period\":{\"interval\":\"1\",\"unit\":\"Minutes\"}}" assertFailsWith(IllegalArgumentException::class, "Expected IllegalArgumentException") { Schedule.parse(parser(scheduleString)) } @@ -335,4 +350,44 @@ class ScheduleTest : XContentTestBase { IntervalSchedule(-1, ChronoUnit.MINUTES) } } + + @Test + fun `test IntervalSchedule as asTemplateArgs`() { + val schedule = createTestIntervalSchedule() + + val templateArgs = schedule.asTemplateArg() + + val period = templateArgs[PERIOD_FIELD] as? Map<*, *> + assertNotNull(period, "Template arg field 'period' is empty") + assertEquals( + schedule.interval, + period[INTERVAL_FIELD], + "Template arg field 'interval' doesn't match" + ) + assertEquals( + schedule.unit.toString(), + period[UNIT_FIELD], + "Template arg field 'unit' doesn't match" + ) + } + + @Test + fun `test CronSchedule as asTemplateArgs`() { + val schedule = createTestCronSchedule() + + val templateArgs = schedule.asTemplateArg() + + val cron = templateArgs[CRON_FIELD] as? Map<*, *> + assertNotNull(cron, "Template arg field 'cron' is empty") + assertEquals( + schedule.expression, + cron[EXPRESSION_FIELD], + "Template arg field 'expression' doesn't match" + ) + assertEquals( + schedule.timezone.toString(), + cron[TIMEZONE_FIELD], + "Template arg field 'timezone' doesn't match" + ) + } } diff --git a/src/test/kotlin/org/opensearch/commons/alerting/model/SearchInputTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/model/SearchInputTests.kt new file mode 100644 index 00000000..0fc0f656 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/model/SearchInputTests.kt @@ -0,0 +1,32 @@ +package org.opensearch.commons.alerting.model + +import org.junit.jupiter.api.Test +import org.opensearch.commons.alerting.model.SearchInput.Companion.INDICES_FIELD +import org.opensearch.commons.alerting.model.SearchInput.Companion.QUERY_FIELD +import org.opensearch.commons.alerting.model.SearchInput.Companion.SEARCH_FIELD +import org.opensearch.commons.alerting.randomSearchInput +import kotlin.test.assertEquals +import kotlin.test.assertNotNull + +class SearchInputTests { + + @Test + fun `test SearchInput asTemplateArgs`() { + val searchInput = randomSearchInput() + + val templateArgs = searchInput.asTemplateArg() + + val search = templateArgs[SEARCH_FIELD] as? Map<*, *> + assertNotNull(search, "Template arg field 'search' is empty") + assertEquals( + searchInput.indices, + search[INDICES_FIELD], + "Template arg field 'indices' doesn't match" + ) + assertEquals( + searchInput.query.toString(), + search[QUERY_FIELD], + "Template arg field 'query' doesn't match" + ) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/model/WriteableTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/model/WriteableTests.kt index e81e59bd..170317b2 100644 --- a/src/test/kotlin/org/opensearch/commons/alerting/model/WriteableTests.kt +++ b/src/test/kotlin/org/opensearch/commons/alerting/model/WriteableTests.kt @@ -1,25 +1,43 @@ package org.opensearch.commons.alerting.model +import org.junit.Assert import org.junit.jupiter.api.Assertions import org.junit.jupiter.api.Test +import org.opensearch.common.UUIDs import org.opensearch.common.io.stream.BytesStreamOutput import org.opensearch.commons.alerting.model.action.Action import org.opensearch.commons.alerting.model.action.ActionExecutionPolicy import org.opensearch.commons.alerting.model.action.Throttle +import org.opensearch.commons.alerting.model.remote.monitors.RemoteDocLevelMonitorInput +import org.opensearch.commons.alerting.model.remote.monitors.RemoteMonitorInput +import org.opensearch.commons.alerting.model.remote.monitors.RemoteMonitorTrigger import org.opensearch.commons.alerting.randomAction import org.opensearch.commons.alerting.randomActionExecutionPolicy +import org.opensearch.commons.alerting.randomBucketLevelMonitorRunResult import org.opensearch.commons.alerting.randomBucketLevelTrigger +import org.opensearch.commons.alerting.randomBucketLevelTriggerRunResult import org.opensearch.commons.alerting.randomChainedAlertTrigger import org.opensearch.commons.alerting.randomDocLevelQuery +import org.opensearch.commons.alerting.randomDocumentLevelMonitorRunResult import org.opensearch.commons.alerting.randomDocumentLevelTrigger +import org.opensearch.commons.alerting.randomInputRunResults import org.opensearch.commons.alerting.randomQueryLevelMonitor +import org.opensearch.commons.alerting.randomQueryLevelMonitorRunResult import org.opensearch.commons.alerting.randomQueryLevelTrigger +import org.opensearch.commons.alerting.randomQueryLevelTriggerRunResult import org.opensearch.commons.alerting.randomThrottle import org.opensearch.commons.alerting.randomUser import org.opensearch.commons.alerting.randomUserEmpty +import org.opensearch.commons.alerting.util.IndexUtils import org.opensearch.commons.authuser.User import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable import org.opensearch.search.builder.SearchSourceBuilder +import org.opensearch.test.OpenSearchTestCase +import java.io.IOException +import java.time.Instant +import java.time.temporal.ChronoUnit import kotlin.test.assertTrue class WriteableTests { @@ -190,4 +208,281 @@ class WriteableTests { "Round tripping ActionExecutionPolicy doesn't work" ) } + + @Test + fun `test Comment object`() { + val user = randomUser() + val createdTime = Instant.now() + val comment = Comment( + "123", + "456", + "alert", + "content", + createdTime, + null, + user + ) + Assertions.assertNotNull(comment) + val out = BytesStreamOutput() + comment.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newComment = Comment(sin) + Assertions.assertEquals("123", newComment.id) + Assertions.assertEquals("456", newComment.entityId) + Assertions.assertEquals("alert", newComment.entityType) + Assertions.assertEquals("content", newComment.content) + Assertions.assertEquals(createdTime, newComment.createdTime) + Assertions.assertEquals(user, newComment.user) + } + + @Test + fun `test actionrunresult as stream`() { + val actionRunResult = randomActionRunResult() + val out = BytesStreamOutput() + actionRunResult.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newActionRunResult = ActionRunResult(sin) + OpenSearchTestCase.assertEquals( + "Round tripping ActionRunResult doesn't work", + actionRunResult, + newActionRunResult + ) + } + + @Test + fun `test query-level triggerrunresult as stream`() { + val runResult = randomQueryLevelTriggerRunResult() + val out = BytesStreamOutput() + runResult.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newRunResult = QueryLevelTriggerRunResult(sin) + OpenSearchTestCase.assertEquals(runResult.triggerName, newRunResult.triggerName) + OpenSearchTestCase.assertEquals(runResult.triggered, newRunResult.triggered) + OpenSearchTestCase.assertEquals(runResult.error, newRunResult.error) + OpenSearchTestCase.assertEquals(runResult.actionResults, newRunResult.actionResults) + } + + @Test + fun `test bucket-level triggerrunresult as stream`() { + val runResult = randomBucketLevelTriggerRunResult() + val out = BytesStreamOutput() + runResult.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newRunResult = BucketLevelTriggerRunResult(sin) + OpenSearchTestCase.assertEquals("Round tripping ActionRunResult doesn't work", runResult, newRunResult) + } + + @Test + fun `test doc-level triggerrunresult as stream`() { + val runResult = randomDocumentLevelTriggerRunResult() + val out = BytesStreamOutput() + runResult.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newRunResult = DocumentLevelTriggerRunResult(sin) + OpenSearchTestCase.assertEquals("Round tripping ActionRunResult doesn't work", runResult, newRunResult) + } + + @Test + fun `test inputrunresult as stream`() { + val runResult = randomInputRunResults() + val out = BytesStreamOutput() + runResult.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newRunResult = InputRunResults.readFrom(sin) + OpenSearchTestCase.assertEquals("Round tripping InputRunResults doesn't work", runResult, newRunResult) + } + + @Test + fun `test query-level monitorrunresult as stream`() { + val runResult = randomQueryLevelMonitorRunResult() + val out = BytesStreamOutput() + runResult.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newRunResult = MonitorRunResult(sin) + OpenSearchTestCase.assertEquals("Round tripping MonitorRunResult doesn't work", runResult, newRunResult) + } + + @Test + fun `test bucket-level monitorrunresult as stream`() { + val runResult = randomBucketLevelMonitorRunResult() + val out = BytesStreamOutput() + runResult.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newRunResult = MonitorRunResult(sin) + OpenSearchTestCase.assertEquals("Round tripping MonitorRunResult doesn't work", runResult, newRunResult) + } + + @Test + fun `test doc-level monitorrunresult as stream`() { + val runResult = randomDocumentLevelMonitorRunResult() + val out = BytesStreamOutput() + runResult.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newRunResult = MonitorRunResult(sin) + OpenSearchTestCase.assertEquals("Round tripping MonitorRunResult doesn't work", runResult, newRunResult) + } + + @Test + fun `test DocumentLevelTriggerRunResult as stream`() { + val workflow = randomDocumentLevelTriggerRunResult() + val out = BytesStreamOutput() + workflow.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newWorkflow = DocumentLevelTriggerRunResult(sin) + Assert.assertEquals("Round tripping dltrr failed", newWorkflow, workflow) + } + + @Test + fun `test RemoteMonitorInput as stream`() { + val myMonitorInput = MyMonitorInput(1, "hello", MyMonitorInput(2, "world", null)) + val myObjOut = BytesStreamOutput() + myMonitorInput.writeTo(myObjOut) + val remoteMonitorInput = RemoteMonitorInput(myObjOut.bytes()) + + val out = BytesStreamOutput() + remoteMonitorInput.writeTo(out) + + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newRemoteMonitorInput = RemoteMonitorInput(sin) + val newMyMonitorInput = MyMonitorInput(StreamInput.wrap(newRemoteMonitorInput.input.toBytesRef().bytes)) + Assert.assertEquals("Round tripping RemoteMonitorInput failed", newMyMonitorInput, myMonitorInput) + } + + @Test + fun `test RemoteMonitorTrigger as stream`() { + val myMonitorTrigger = MyMonitorTrigger(1, "hello", MyMonitorTrigger(2, "world", null)) + val myObjOut = BytesStreamOutput() + myMonitorTrigger.writeTo(myObjOut) + val remoteMonitorTrigger = RemoteMonitorTrigger("id", "name", "1", listOf(), myObjOut.bytes()) + + val out = BytesStreamOutput() + remoteMonitorTrigger.writeTo(out) + + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newRemoteMonitorTrigger = RemoteMonitorTrigger(sin) + val newMyMonitorTrigger = MyMonitorTrigger(StreamInput.wrap(newRemoteMonitorTrigger.trigger.toBytesRef().bytes)) + Assert.assertEquals("Round tripping RemoteMonitorTrigger failed", newMyMonitorTrigger, myMonitorTrigger) + } + + @Test + fun `test RemoteDocLevelMonitorInput as stream`() { + val myMonitorInput = MyMonitorInput(1, "hello", MyMonitorInput(2, "world", null)) + val myObjOut = BytesStreamOutput() + myMonitorInput.writeTo(myObjOut) + val docLevelMonitorInput = DocLevelMonitorInput( + "test", + listOf("test"), + listOf(randomDocLevelQuery()) + ) + val remoteDocLevelMonitorInput = RemoteDocLevelMonitorInput(myObjOut.bytes(), docLevelMonitorInput) + + val out = BytesStreamOutput() + remoteDocLevelMonitorInput.writeTo(out) + + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newRemoteDocLevelMonitorInput = RemoteDocLevelMonitorInput(sin) + val newMyMonitorInput = MyMonitorInput(StreamInput.wrap(newRemoteDocLevelMonitorInput.input.toBytesRef().bytes)) + Assert.assertEquals("Round tripping RemoteMonitorInput failed", newMyMonitorInput, myMonitorInput) + val newDocLevelMonitorInput = newRemoteDocLevelMonitorInput.docLevelMonitorInput + Assert.assertEquals("Round tripping DocLevelMonitorInput failed", newDocLevelMonitorInput, docLevelMonitorInput) + } + + @Test + fun `test RemoteMonitor as stream`() { + val myMonitorInput = MyMonitorInput(1, "hello", MyMonitorInput(2, "world", null)) + var myObjOut = BytesStreamOutput() + myMonitorInput.writeTo(myObjOut) + val docLevelMonitorInput = DocLevelMonitorInput( + "test", + listOf("test"), + listOf(randomDocLevelQuery()) + ) + val remoteDocLevelMonitorInput = RemoteDocLevelMonitorInput(myObjOut.bytes(), docLevelMonitorInput) + + val myMonitorTrigger = MyMonitorTrigger(1, "hello", MyMonitorTrigger(2, "world", null)) + myObjOut = BytesStreamOutput() + myMonitorTrigger.writeTo(myObjOut) + val remoteMonitorTrigger = RemoteMonitorTrigger("id", "name", "1", listOf(), myObjOut.bytes()) + + val monitor = Monitor( + Monitor.NO_ID, + Monitor.NO_VERSION, + "hello", + true, + IntervalSchedule(1, ChronoUnit.MINUTES), + Instant.now(), + Instant.now(), + "remote_doc_level_monitor", + null, + IndexUtils.NO_SCHEMA_VERSION, + listOf(remoteDocLevelMonitorInput), + listOf(remoteMonitorTrigger), + mapOf() + ) + + val out = BytesStreamOutput() + monitor.writeTo(out) + + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newMonitor = Monitor(sin) + Assert.assertEquals("Round tripping RemoteMonitor failed", monitor, newMonitor) + } + + fun randomDocumentLevelTriggerRunResult(): DocumentLevelTriggerRunResult { + val map = mutableMapOf() + map.plus(Pair("key1", randomActionRunResult())) + map.plus(Pair("key2", randomActionRunResult())) + return DocumentLevelTriggerRunResult( + "trigger-name", + mutableListOf(UUIDs.randomBase64UUID().toString()), + null, + mutableMapOf(Pair("alertId", map)) + ) + } + + fun randomActionRunResult(): ActionRunResult { + val map = mutableMapOf() + map.plus(Pair("key1", "val1")) + map.plus(Pair("key2", "val2")) + return ActionRunResult( + "1234", + "test-action", + map, + false, + Instant.now(), + null + ) + } +} + +data class MyMonitorInput(val a: Int, val b: String, val c: MyMonitorInput?) : Writeable { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readInt(), + sin.readString(), + sin.readOptionalWriteable { MyMonitorInput(it) } + ) + + override fun writeTo(out: StreamOutput) { + out.writeInt(a) + out.writeString(b) + out.writeOptionalWriteable(c) + } +} + +data class MyMonitorTrigger(val a: Int, val b: String, val c: MyMonitorTrigger?) : Writeable { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readInt(), + sin.readString(), + sin.readOptionalWriteable { MyMonitorTrigger(it) } + ) + + override fun writeTo(out: StreamOutput) { + out.writeInt(a) + out.writeString(b) + out.writeOptionalWriteable(c) + } } diff --git a/src/test/kotlin/org/opensearch/commons/alerting/model/XContentTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/model/XContentTests.kt index 065191fb..42e5ab53 100644 --- a/src/test/kotlin/org/opensearch/commons/alerting/model/XContentTests.kt +++ b/src/test/kotlin/org/opensearch/commons/alerting/model/XContentTests.kt @@ -3,11 +3,17 @@ package org.opensearch.commons.alerting.model import org.junit.Assert.assertEquals import org.junit.jupiter.api.Assertions import org.junit.jupiter.api.Test +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.common.xcontent.XContentFactory +import org.opensearch.common.xcontent.json.JsonXContent import org.opensearch.commons.alerting.builder import org.opensearch.commons.alerting.model.action.Action import org.opensearch.commons.alerting.model.action.ActionExecutionPolicy import org.opensearch.commons.alerting.model.action.PerExecutionActionScope import org.opensearch.commons.alerting.model.action.Throttle +import org.opensearch.commons.alerting.model.remote.monitors.RemoteDocLevelMonitorInput +import org.opensearch.commons.alerting.model.remote.monitors.RemoteMonitorInput +import org.opensearch.commons.alerting.model.remote.monitors.RemoteMonitorTrigger import org.opensearch.commons.alerting.parser import org.opensearch.commons.alerting.randomAction import org.opensearch.commons.alerting.randomActionExecutionPolicy @@ -16,6 +22,7 @@ import org.opensearch.commons.alerting.randomActionWithPolicy import org.opensearch.commons.alerting.randomAlert import org.opensearch.commons.alerting.randomBucketLevelMonitor import org.opensearch.commons.alerting.randomBucketLevelTrigger +import org.opensearch.commons.alerting.randomDocLevelQuery import org.opensearch.commons.alerting.randomQueryLevelMonitor import org.opensearch.commons.alerting.randomQueryLevelMonitorWithoutUser import org.opensearch.commons.alerting.randomQueryLevelTrigger @@ -27,6 +34,7 @@ import org.opensearch.commons.alerting.toJsonString import org.opensearch.commons.alerting.toJsonStringWithUser import org.opensearch.commons.alerting.util.string import org.opensearch.commons.authuser.User +import org.opensearch.core.common.io.stream.StreamInput import org.opensearch.core.xcontent.ToXContent import org.opensearch.index.query.QueryBuilders import org.opensearch.search.builder.SearchSourceBuilder @@ -367,7 +375,7 @@ class XContentTests { """.trimIndent() val parsedMonitor = Monitor.parse(parser(monitorString)) Assertions.assertEquals( - Monitor.MonitorType.QUERY_LEVEL_MONITOR, + Monitor.MonitorType.QUERY_LEVEL_MONITOR.value, parsedMonitor.monitorType, "Incorrect monitor type" ) @@ -506,4 +514,101 @@ class XContentTests { assertEquals("Round tripping alert doesn't work", actionExecutionResult, parsedActionExecutionResultString) } + + @Test + fun `test DataSources parsing`() { + val dataSources = DataSources( + ScheduledJob.DOC_LEVEL_QUERIES_INDEX, + ".opensearch-alerting-finding-history-write", + "<.opensearch-alerting-finding-history-{now/d}-1>", + ".opendistro-alerting-alerts", + ".opendistro-alerting-alert-history-write", + "<.opendistro-alerting-alert-history-{now/d}-1>", + mapOf(), + false + ) + Assertions.assertNotNull(dataSources) + + val dataSourcesString = dataSources.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() + val parsedDataSources = DataSources.parse(parser(dataSourcesString)) + Assertions.assertEquals(dataSources, parsedDataSources, "Round tripping DataSources doesn't work") + } + + @Test + fun `test Comment parsing`() { + val comment = Comment( + "123", + "456", + "alert", + "content", + Instant.now().truncatedTo(ChronoUnit.MILLIS), + null, + randomUser() + ) + Assertions.assertNotNull(comment) + + val commentString = comment.toXContentWithUser(builder()).string() + val parsedComment = Comment.parse(parser(commentString), "123") + Assertions.assertEquals(comment, parsedComment, "Round tripping Comment doesn't work") + } + + @Test + fun `test MonitorMetadata`() { + val monitorMetadata = MonitorMetadata( + id = "monitorId-metadata", + monitorId = "monitorId", + lastActionExecutionTimes = emptyList(), + lastRunContext = emptyMap(), + sourceToQueryIndexMapping = mutableMapOf() + ) + val monitorMetadataString = monitorMetadata.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS).string() + val parsedMonitorMetadata = MonitorMetadata.parse(parser(monitorMetadataString)) + assertEquals("Round tripping MonitorMetadata doesn't work", monitorMetadata, parsedMonitorMetadata) + } + + @Test + fun `test RemoteMonitorInput`() { + val myMonitorInput = MyMonitorInput(1, "hello", MyMonitorInput(2, "world", null)) + val myObjOut = BytesStreamOutput() + myMonitorInput.writeTo(myObjOut) + val remoteMonitorInput = RemoteMonitorInput(myObjOut.bytes()) + + val xContent = remoteMonitorInput.toXContent(JsonXContent.contentBuilder(), ToXContent.EMPTY_PARAMS).string() + val parsedRemoteMonitorInput = RemoteMonitorInput.parse(parser(xContent)) + val parsedMyMonitorInput = MyMonitorInput(StreamInput.wrap(parsedRemoteMonitorInput.input.toBytesRef().bytes)) + assertEquals("Round tripping RemoteMonitorInput doesn't work", myMonitorInput, parsedMyMonitorInput) + } + + @Test + fun `test RemoteMonitorTrigger`() { + val myMonitorTrigger = MyMonitorTrigger(1, "hello", MyMonitorTrigger(2, "world", null)) + val myObjOut = BytesStreamOutput() + myMonitorTrigger.writeTo(myObjOut) + val remoteMonitorTrigger = RemoteMonitorTrigger("id", "name", "1", listOf(), myObjOut.bytes()) + + val xContent = remoteMonitorTrigger.toXContent(JsonXContent.contentBuilder(), ToXContent.EMPTY_PARAMS).string() + val parsedRemoteMonitorTrigger = Trigger.parse(parser(xContent)) as RemoteMonitorTrigger + val parsedMyMonitorTrigger = MyMonitorTrigger(StreamInput.wrap(parsedRemoteMonitorTrigger.trigger.toBytesRef().bytes)) + assertEquals("Round tripping RemoteMonitorTrigger doesn't work", myMonitorTrigger, parsedMyMonitorTrigger) + } + + @Test + fun `test RemoteDocLevelMonitorInput`() { + val myMonitorInput = MyMonitorInput(1, "hello", MyMonitorInput(2, "world", null)) + val myObjOut = BytesStreamOutput() + myMonitorInput.writeTo(myObjOut) + val docLevelMonitorInput = DocLevelMonitorInput( + "test", + listOf("test"), + listOf(randomDocLevelQuery()) + ) + val remoteDocLevelMonitorInput = RemoteDocLevelMonitorInput(myObjOut.bytes(), docLevelMonitorInput) + + val xContent = remoteDocLevelMonitorInput.toXContent(JsonXContent.contentBuilder(), ToXContent.EMPTY_PARAMS).string() + val parsedRemoteDocLevelMonitorInput = RemoteDocLevelMonitorInput.parse(parser(xContent)) + val parsedMyMonitorInput = MyMonitorInput(StreamInput.wrap(parsedRemoteDocLevelMonitorInput.input.toBytesRef().bytes)) + assertEquals("Round tripping RemoteDocLevelMonitorInput doesn't work", myMonitorInput, parsedMyMonitorInput) + val parsedDocLevelMonitorInput = parsedRemoteDocLevelMonitorInput.docLevelMonitorInput + assertEquals("Round tripping RemoteDocLevelMonitorInput doesn't work", docLevelMonitorInput, parsedDocLevelMonitorInput) + } } diff --git a/src/test/kotlin/org/opensearch/commons/alerting/model/action/ActionTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/model/action/ActionTests.kt new file mode 100644 index 00000000..dcbf8998 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/model/action/ActionTests.kt @@ -0,0 +1,40 @@ +package org.opensearch.commons.alerting.model.action + +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test +import org.opensearch.commons.alerting.model.action.Action.Companion.DESTINATION_ID_FIELD +import org.opensearch.commons.alerting.model.action.Action.Companion.ID_FIELD +import org.opensearch.commons.alerting.model.action.Action.Companion.NAME_FIELD +import org.opensearch.commons.alerting.model.action.Action.Companion.THROTTLE_ENABLED_FIELD +import org.opensearch.commons.alerting.randomAction + +class ActionTests { + + @Test + fun `test action asTemplateArgs`() { + val action = randomAction() + + val templateArgs = action.asTemplateArg() + + assertEquals( + action.id, + templateArgs[ID_FIELD], + "Template arg field 'id' doesn't match" + ) + assertEquals( + action.name, + templateArgs[NAME_FIELD], + "Template arg field 'name' doesn't match" + ) + assertEquals( + action.destinationId, + templateArgs[DESTINATION_ID_FIELD], + "Template arg field 'destinationId' doesn't match" + ) + assertEquals( + action.throttleEnabled, + templateArgs[THROTTLE_ENABLED_FIELD], + "Template arg field 'throttleEnabled' doesn't match" + ) + } +}