diff --git a/.codecov.yml b/.codecov.yml new file mode 100644 index 00000000..e711a04d --- /dev/null +++ b/.codecov.yml @@ -0,0 +1,12 @@ +codecov: + require_ci_to_pass: yes + +coverage: + precision: 2 + round: down + range: "75...100" + status: + project: + default: + target: 75% # the required coverage value + threshold: 1% # the leniency in hitting the target \ No newline at end of file diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 00000000..b57ed5c8 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,2 @@ +# This should match the owning team set up in https://github.com/orgs/opensearch-project/teams +* @opensearch-project/common-utils \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 8af6ebb5..29eddb95 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -1,33 +1,31 @@ --- name: 🐛 Bug report about: Create a report to help us improve -title: "[BUG]" -labels: 'bug, untriaged, Beta' +title: '[BUG]' +labels: 'bug, untriaged' assignees: '' --- -**Describe the bug** -A clear and concise description of what the bug is. +**What is the bug?** +A clear and concise description of the bug. -**To Reproduce** +**How can one reproduce the bug?** Steps to reproduce the behavior: 1. Go to '...' 2. Click on '....' 3. Scroll down to '....' 4. See error -**Expected behavior** +**What is the expected behavior?** A clear and concise description of what you expected to happen. -**Plugins** -Please list all plugins currently enabled. - -**Screenshots** -If applicable, add screenshots to help explain your problem. - -**Host/Environment (please complete the following information):** +**What is your host/environment?** - OS: [e.g. iOS] - Version [e.g. 22] + - Plugins + +**Do you have any screenshots?** +If applicable, add screenshots to help explain your problem. -**Additional context** -Add any other context about the problem here. \ No newline at end of file +**Do you have any additional context?** +Add any other context about the problem. diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 00000000..a8199a10 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,7 @@ +contact_links: + - name: OpenSearch Community Support + url: https://discuss.opendistrocommunity.dev/ + about: Please ask and answer questions here. + - name: AWS/Amazon Security + url: https://aws.amazon.com/security/vulnerability-reporting/ + about: Please report security vulnerabilities here. \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index 2791b808..6198f338 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -1,19 +1,18 @@ --- name: 🎆 Feature request -about: Suggest an idea for this project -title: '' -labels: enhancement +about: Request a feature in this project +title: '[FEATURE]' +labels: 'enhancement, untriaged' assignees: '' --- +**Is your feature request related to a problem?** +A clear and concise description of what the problem is, e.g. _I'm always frustrated when [...]_ -**Is your feature request related to a problem? Please describe.** -A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] - -**Describe the solution you'd like** +**What solution would you like?** A clear and concise description of what you want to happen. -**Describe alternatives you've considered** +**What alternatives have you considered?** A clear and concise description of any alternative solutions or features you've considered. -**Additional context** +**Do you have any additional context?** Add any other context or screenshots about the feature request here. \ No newline at end of file diff --git a/.github/workflows/auto-release.yml b/.github/workflows/auto-release.yml new file mode 100644 index 00000000..4453d652 --- /dev/null +++ b/.github/workflows/auto-release.yml @@ -0,0 +1,29 @@ +name: Releases + +on: + push: + tags: + - '*' + +jobs: + + build: + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - name: GitHub App token + id: github_app_token + uses: tibdex/github-app-token@v1.5.0 + with: + app_id: ${{ secrets.APP_ID }} + private_key: ${{ secrets.APP_PRIVATE_KEY }} + installation_id: 22958780 + - name: Get tag + id: tag + uses: dawidd6/action-get-tag@v1 + - uses: actions/checkout@v2 + - uses: ncipollo/release-action@v1 + with: + github_token: ${{ steps.github_app_token.outputs.token }} + bodyFile: release-notes/opensearch-common-utils.release-notes-${{steps.tag.outputs.tag}}.md \ No newline at end of file diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml new file mode 100644 index 00000000..56fef507 --- /dev/null +++ b/.github/workflows/backport.yml @@ -0,0 +1,31 @@ + +name: Backport +on: + pull_request_target: + types: + - closed + - labeled + +jobs: + backport: + runs-on: ubuntu-latest + permissions: + contents: write + pull-requests: write + name: Backport + steps: + - name: GitHub App token + id: github_app_token + uses: tibdex/github-app-token@v1.5.0 + with: + app_id: ${{ secrets.APP_ID }} + private_key: ${{ secrets.APP_PRIVATE_KEY }} + installation_id: 22958780 + + - name: Backport + uses: VachaShah/backport@v2.2.0 + with: + github_token: ${{ steps.github_app_token.outputs.token }} + branch_name: backport/backport-${{ github.event.number }} + labels_template: "<%= JSON.stringify([...labels, 'autocut']) %>" + failure_labels: "failed backport" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f49c13b9..c0b1353f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -6,15 +6,31 @@ on: pull_request: branches: - "*" +env: + ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true jobs: - build: + Get-CI-Image-Tag: + uses: opensearch-project/opensearch-build/.github/workflows/get-ci-image-tag.yml@main + with: + product: opensearch + + build-linux: + needs: Get-CI-Image-Tag strategy: matrix: - java: [14] - + java: + - 11 + - 17 + - 21 name: Build and Test runs-on: ubuntu-latest + container: + # using the same image which is used by opensearch-build team to build the OpenSearch Distribution + # this image tag is subject to change as more dependencies and updates will arrive over time + image: ${{ needs.Get-CI-Image-Tag.outputs.ci-image-version-linux }} + # need to switch to root so that github actions can install runner binary on container without permission issues. + options: --user root steps: - name: Checkout @@ -25,29 +41,46 @@ jobs: with: java-version: ${{ matrix.java }} - # dependencies: OpenSearch - - name: Checkout OpenSearch + - name: Build and Test + run: | + chown -R 1000:1000 `pwd` + su `id -un 1000` -c "./gradlew build && + ./gradlew publishToMavenLocal" + + - name: Upload Coverage Report + uses: codecov/codecov-action@v1 + with: + token: ${{ secrets.CODECOV_TOKEN }} + + build-windows: + strategy: + matrix: + java: + - 11 + - 17 + - 21 + name: Build and Test + runs-on: windows-latest + + steps: + - name: Checkout uses: actions/checkout@v2 + + - name: Setup Java ${{ matrix.java }} + uses: actions/setup-java@v1 with: - repository: 'opensearch-project/OpenSearch' - path: OpenSearch - ref: '1.0' - - name: Build OpenSearch - working-directory: ./OpenSearch - run: ./gradlew publishToMavenLocal -Dbuild.snapshot=false - - # common-utils + java-version: ${{ matrix.java }} + - name: Build and Test run: | - ./gradlew build -Dopensearch.version=1.0.0 + ./gradlew build - name: Publish to Maven Local run: | - ./gradlew publishToMavenLocal -Dopensearch.version=1.0.0 + ./gradlew publishToMavenLocal - name: Upload Coverage Report uses: codecov/codecov-action@v1 with: - file: ./build/reports/jacoco/test/jacocoTestReport.xml - flags: plugin + token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/delete_backport_branch.yml b/.github/workflows/delete_backport_branch.yml new file mode 100644 index 00000000..f24f022b --- /dev/null +++ b/.github/workflows/delete_backport_branch.yml @@ -0,0 +1,15 @@ +name: Delete merged branch of the backport PRs +on: + pull_request: + types: + - closed + +jobs: + delete-branch: + runs-on: ubuntu-latest + if: startsWith(github.event.pull_request.head.ref,'backport/') + steps: + - name: Delete merged branch + uses: SvanBoxel/delete-merged-branch@main + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/maven-publish.yml b/.github/workflows/maven-publish.yml new file mode 100644 index 00000000..be57f77d --- /dev/null +++ b/.github/workflows/maven-publish.yml @@ -0,0 +1,37 @@ +name: Publish snapshots to maven + +on: + workflow_dispatch: + push: + branches: + - main + - '[0-9]+.[0-9]+' + - '[0-9]+.x' +jobs: + build-and-publish-snapshots: + strategy: + fail-fast: false + if: github.repository == 'opensearch-project/common-utils' + runs-on: ubuntu-latest + + permissions: + id-token: write + contents: write + + steps: + - uses: actions/setup-java@v3 + with: + distribution: temurin # Temurin is a distribution of adoptium + java-version: 17 + - uses: actions/checkout@v3 + - uses: aws-actions/configure-aws-credentials@v1 + with: + role-to-assume: ${{ secrets.PUBLISH_SNAPSHOTS_ROLE }} + aws-region: us-east-1 + - name: publish snapshots to maven + run: | + export SONATYPE_USERNAME=$(aws secretsmanager get-secret-value --secret-id maven-snapshots-username --query SecretString --output text) + export SONATYPE_PASSWORD=$(aws secretsmanager get-secret-value --secret-id maven-snapshots-password --query SecretString --output text) + echo "::add-mask::$SONATYPE_USERNAME" + echo "::add-mask::$SONATYPE_PASSWORD" + ./gradlew publishShadowPublicationToSnapshotsRepository diff --git a/.github/workflows/push-common-utils-jar.yml b/.github/workflows/push-common-utils-jar.yml deleted file mode 100644 index 6a4bc703..00000000 --- a/.github/workflows/push-common-utils-jar.yml +++ /dev/null @@ -1,44 +0,0 @@ -name: Upload Common-Utils Jar to Maven - -on: - push: - tags: - - v* -jobs: - upload-common-utils-jar: - runs-on: [ubuntu-16.04] - name: Upload common-utils Jar to Maven - steps: - - name: Checkout Repo - uses: actions/checkout@v2 - - - name: Configure AWS CLI - uses: aws-actions/configure-aws-credentials@v1 - with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: us-east-1 - - - name: Setup Java - uses: actions/setup-java@v1 - with: - java-version: '14' - - - name: Upload common-utils Jar to Maven - env: - passphrase: ${{ secrets.PASSPHRASE }} - run: | - cd .. - export JAVA14_HOME=$JAVA_HOME - aws s3 cp s3://opendistro-docs/github-actions/pgp-public-key . - aws s3 cp s3://opendistro-docs/github-actions/pgp-private-key . - - gpg --import pgp-public-key - gpg --allow-secret-key-import --import pgp-private-key - - mkdir /home/runner/.gradle - aws s3 cp s3://opendistro-docs/github-actions/gradle.properties /home/runner/.gradle/ - - cd common-utils - - ./gradlew publishShadowPublicationToSonatype-stagingRepository -Dcompiler.java=14 -Dbuild.snapshot=false -Djavax.net.ssl.trustStore=$JAVA_HOME/lib/security/cacerts diff --git a/.github/workflows/version.yml b/.github/workflows/version.yml new file mode 100644 index 00000000..6c96199a --- /dev/null +++ b/.github/workflows/version.yml @@ -0,0 +1,53 @@ +name: Increment Version + +on: + push: + tags: + - '*.*.*.*' + +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: GitHub App token + id: github_app_token + uses: tibdex/github-app-token@v1.5.0 + with: + app_id: ${{ secrets.APP_ID }} + private_key: ${{ secrets.APP_PRIVATE_KEY }} + installation_id: 22958780 + + - uses: actions/checkout@v2 + - name: Fetch Tag and Version Information + run: | + TAG=$(echo "${GITHUB_REF#refs/*/}") + CURRENT_VERSION_ARRAY=($(echo "$TAG" | tr . '\n')) + BASE=$(IFS=. ; echo "${CURRENT_VERSION_ARRAY[*]:0:2}") + CURRENT_VERSION=$(IFS=. ; echo "${CURRENT_VERSION_ARRAY[*]:0:3}") + CURRENT_VERSION_ARRAY[2]=$((CURRENT_VERSION_ARRAY[2]+1)) + NEXT_VERSION=$(IFS=. ; echo "${CURRENT_VERSION_ARRAY[*]:0:3}") + echo "TAG=$TAG" >> $GITHUB_ENV + echo "BASE=$BASE" >> $GITHUB_ENV + echo "CURRENT_VERSION=$CURRENT_VERSION" >> $GITHUB_ENV + echo "NEXT_VERSION=$NEXT_VERSION" >> $GITHUB_ENV + - uses: actions/checkout@v2 + with: + ref: ${{ env.BASE }} + token: ${{ steps.github_app_token.outputs.token }} + + - name: Increment Version + run: | + echo Incrementing $CURRENT_VERSION to $NEXT_VERSION + sed -i "s/$CURRENT_VERSION-SNAPSHOT/$NEXT_VERSION-SNAPSHOT/g" build.gradle + + - name: Create Pull Request + uses: peter-evans/create-pull-request@v3 + with: + token: ${{ steps.github_app_token.outputs.token }} + base: ${{ env.BASE }} + commit-message: Incremented version to ${{ env.NEXT_VERSION }} + signoff: true + delete-branch: true + title: '[AUTO] Incremented version to ${{ env.NEXT_VERSION }}.' + body: | + I've noticed that a new tag ${{ env.TAG }} was pushed, and incremented the version from ${{ env.CURRENT_VERSION }} to ${{ env.NEXT_VERSION }}. diff --git a/.whitesource b/.whitesource new file mode 100644 index 00000000..db4b0fec --- /dev/null +++ b/.whitesource @@ -0,0 +1,15 @@ +{ + "scanSettings": { + "configMode": "AUTO", + "configExternalURL": "", + "projectToken": "", + "baseBranches": [] + }, + "checkRunSettings": { + "vulnerableCheckRunConclusionLevel": "failure", + "displayMode": "diff" + }, + "issueSettings": { + "minSeverityLevel": "LOW" + } +} \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index c970f9d3..c25787ad 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,4 +1,51 @@ ## Contributing to this Project OpenSearch is a community project that is built and maintained by people just like **you**. -[This document](https://github.com/opensearch-project/.github/blob/main/CONTRIBUTING.md) explains how you can contribute to this and related projects. \ No newline at end of file +[This document](https://github.com/opensearch-project/.github/blob/main/CONTRIBUTING.md) explains how you can contribute to this and related projects. + +## Developer Certificate of Origin + +OpenSearch is an open source product released under the Apache 2.0 license (see either [the Apache site](https://www.apache.org/licenses/LICENSE-2.0) or the [LICENSE.txt file](LICENSE.txt)). The Apache 2.0 license allows you to freely use, modify, distribute, and sell your own products that include Apache 2.0 licensed software. + +We respect intellectual property rights of others and we want to make sure all incoming contributions are correctly attributed and licensed. A Developer Certificate of Origin (DCO) is a lightweight mechanism to do that. + +The DCO is a declaration attached to every contribution made by every developer. In the commit message of the contribution, the developer simply adds a `Signed-off-by` statement and thereby agrees to the DCO, which you can find below or at [DeveloperCertificate.org](http://developercertificate.org/). + +``` +Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +(a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +(b) The contribution is based upon previous work that, to the + best of my knowledge, is covered under an appropriate open + source license and I have the right under that license to + submit that work with modifications, whether created in whole + or in part by me, under the same open source license (unless + I am permitted to submit under a different license), as + Indicated in the file; or + +(c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +(d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including + all personal information I submit with it, including my + sign-off) is maintained indefinitely and may be redistributed + consistent with this project or the open source license(s) + involved. + ``` + +We require that every contribution to OpenSearch is signed with a Developer Certificate of Origin. Additionally, please use your real name. We do not accept anonymous contributors nor those utilizing pseudonyms. + +Each commit must include a DCO which looks like this + +``` +Signed-off-by: Jane Smith +``` + +You may type this line on your own when writing your commit messages. However, if your user.name and user.email are set in your git configs, you can use `-s` or `– – signoff` to add the `Signed-off-by` line to the end of the commit message. \ No newline at end of file diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index d162eccb..d4a44c46 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -1,7 +1,7 @@ - [Developer Guide](#developer-guide) - [Forking and Cloning](#forking-and-cloning) - [Install Prerequisites](#install-prerequisites) - - [JDK 14](#jdk-14) + - [JDK 11](#jdk-11) - [Building](#building) - [Using IntelliJ IDEA](#using-intellij-idea) - [Submitting Changes](#submitting-changes) @@ -16,9 +16,9 @@ Fork this repository on GitHub, and clone locally with `git clone`. ### Install Prerequisites -#### JDK 14 +#### JDK 11 -OpenSearch components build using Java 14 at a minimum. This means you must have a JDK 14 installed with the environment variable `JAVA_HOME` referencing the path to Java home for your JDK 14 installation, e.g. `JAVA_HOME=/usr/lib/jvm/jdk-14`. +OpenSearch components build using Java 11 at a minimum. This means you must have a JDK 11 installed with the environment variable `JAVA_HOME` referencing the path to Java home for your JDK 11 installation, e.g. `JAVA_HOME=/usr/lib/jvm/jdk-11`. ### Building @@ -36,4 +36,8 @@ Launch Intellij IDEA, choose **Import Project**, and select the `settings.gradle ### Submitting Changes -See [CONTRIBUTING](CONTRIBUTING.md). \ No newline at end of file +See [CONTRIBUTING](CONTRIBUTING.md). + +### Backport + +- [Link to backport documentation](https://github.com/opensearch-project/opensearch-plugins/blob/main/BACKPORT.md) diff --git a/MAINTAINERS.md b/MAINTAINERS.md index c50082af..3802e32b 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -1,13 +1,14 @@ ## Maintainers -| Maintainer | GitHub ID | Affiliation | -| --------------- | --------- | ----------- | -| Ashish Agrawal | [lezzago](https://github.com/lezzago) | Amazon | -| Mohammad Qureshi | [qreshi](https://github.com/qreshi) | Amazon | -| Sriram Kosuri | [skkosuri-amzn](https://github.com/skkosuri-amzn) | Amazon | -| Bowen Lan | [bowenlan-amzn](https://github.com/bowenlan-amzn) | Amazon | -| Rishabh Maurya | [rishabhmaurya](https://github.com/rishabhmaurya) | Amazon | -| Tianli Feng | [tlfeng](https://github.com/tlfeng) | Amazon | -| Annie Lee | [leeyun-amzn](https://github.com/leeyun-amzn) | Amazon | +| Maintainer | GitHub ID | Affiliation | +|------------------|---------------------------------------------------| ----------- | +| Ashish Agrawal | [lezzago](https://github.com/lezzago) | Amazon | +| Mohammad Qureshi | [qreshi](https://github.com/qreshi) | Amazon | +| Sriram Kosuri | [skkosuri-amzn](https://github.com/skkosuri-amzn) | Amazon | +| Bowen Lan | [bowenlan-amzn](https://github.com/bowenlan-amzn) | Amazon | +| Rishabh Maurya | [rishabhmaurya](https://github.com/rishabhmaurya) | Amazon | +| Tianli Feng | [tlfeng](https://github.com/tlfeng) | Amazon | +| Annie Lee | [leeyun-amzn](https://github.com/leeyun-amzn) | Amazon | +| Saurabh Singh | [getsaurabh02](https://github.com/getsaurabh02) | Amazon | [This document](https://github.com/opensearch-project/.github/blob/main/MAINTAINERS.md) explains what maintainers do in this repo, and how they should be doing it. If you're interested in contributing, see [CONTRIBUTING](CONTRIBUTING.md). diff --git a/NOTICE b/NOTICE index be83767d..6c7dc983 100644 --- a/NOTICE +++ b/NOTICE @@ -1,5 +1,5 @@ -OpenSearch -Copyright 2021 OpenSearch Contributors +OpenSearch (https://opensearch.org/) +Copyright OpenSearch Contributors This product includes software developed by Elasticsearch (http://www.elastic.co). diff --git a/README.md b/README.md index 237509c1..98ee4ce2 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,6 @@ - +[![codecov](https://codecov.io/gh/opensearch-project/common-utils/branch/main/graph/badge.svg)](https://codecov.io/gh/opensearch-project/common-utils) + + - [OpenSearch Common Utils](#opensearch-common-utils) - [Contributing](#contributing) @@ -44,4 +46,4 @@ This project is licensed under the [Apache v2.0 License](LICENSE.txt). ## Copyright -Copyright 2020-2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. +Copyright OpenSearch Contributors. See [NOTICE](NOTICE.txt) for details. diff --git a/build-tools/opensearchplugin-coverage.gradle b/build-tools/opensearchplugin-coverage.gradle new file mode 100644 index 00000000..b5b176a3 --- /dev/null +++ b/build-tools/opensearchplugin-coverage.gradle @@ -0,0 +1,49 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +/** + * OpenSearch Plugin build tools don't work with the Gradle Jacoco Plugin to report coverage out of the box. + * https://github.com/elastic/elasticsearch/issues/28867. + * + * This code sets up coverage reporting manually for OpenSearch plugin tests. This is complicated because: + * 1. The OpenSearch integTest Task doesn't implement Gradle's JavaForkOptions so we have to manually start the jacoco agent with the test JVM + * 2. The cluster nodes are stopped using 'kill -9' which means jacoco can't dump it's execution output to a file on VM shutdown + * 3. The Java Security Manager prevents JMX from writing execution output to the file. + * + * To workaround these we start the cluster with jmx enabled and then use Jacoco's JMX MBean to get the execution data before the + * cluster is stopped and dump it to a file. Luckily our current security policy seems to allow this. This will also probably + * break if there are multiple nodes in the integTestCluster. But for now... it sorta works. + */ +apply plugin: 'jacoco' + +// Get gradle to generate the required jvm agent arg for us using a dummy tasks of type Test. Unfortunately Elastic's +// testing tasks don't derive from Test so the jacoco plugin can't do this automatically. +def jacocoDir = "${buildDir}/jacoco" +task dummyTest(type: Test) { + enabled = false + workingDir = file("/") // Force absolute path to jacoco agent jar + jacoco { + destinationFile = file("${jacocoDir}/test.exec") + destinationFile.parentFile.mkdirs() + jmx = true + } +} + +jacocoTestReport { + dependsOn test + executionData dummyTest.jacoco.destinationFile + getSourceDirectories().from(sourceSets.main.allSource) + getClassDirectories().from(sourceSets.main.output) + reports { + html.required = true // human readable + xml.required = true // for coverlay + } +} + +project.gradle.projectsEvaluated { + jacocoTestReport.dependsOn test +} + +check.dependsOn jacocoTestReport diff --git a/build.gradle b/build.gradle index 2d955951..a192777f 100644 --- a/build.gradle +++ b/build.gradle @@ -1,64 +1,77 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. */ buildscript { ext { opensearch_group = "org.opensearch" - opensearch_version = System.getProperty("opensearch.version", "1.0.0") - kotlin_version = System.getProperty("kotlin.version", "1.4.32") + opensearch_version = System.getProperty("opensearch.version", "2.17.2-SNAPSHOT") + isSnapshot = "true" == System.getProperty("build.snapshot", "true") + buildVersionQualifier = System.getProperty("build.version_qualifier", "") + kotlin_version = System.getProperty("kotlin.version", "1.8.21") } repositories { mavenLocal() mavenCentral() maven { url "https://plugins.gradle.org/m2/" } - jcenter() + maven { url "https://aws.oss.sonatype.org/content/repositories/snapshots" } } dependencies { classpath "${opensearch_group}.gradle:build-tools:${opensearch_version}" classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:${kotlin_version}" classpath "org.jetbrains.kotlin:kotlin-allopen:${kotlin_version}" - classpath "io.gitlab.arturbosch.detekt:detekt-gradle-plugin:1.17.1" +// classpath "io.gitlab.arturbosch.detekt:detekt-gradle-plugin:1.20.0-RC1" } } plugins { id 'java-library' id 'maven-publish' - id "com.diffplug.gradle.spotless" version "3.26.1" + id 'com.diffplug.spotless' version '6.22.0' } repositories { mavenLocal() mavenCentral() maven { url "https://plugins.gradle.org/m2/" } - jcenter() + maven { url "https://aws.oss.sonatype.org/content/repositories/snapshots" } } -group 'org.opensearch.commons' +allprojects { + group 'org.opensearch.commons' + version = opensearch_version.tokenize('-')[0] + '.0' + if (buildVersionQualifier) { + version += "-${buildVersionQualifier}" + } + if (isSnapshot) { + version += "-SNAPSHOT" + } +} -sourceCompatibility = 1.8 +targetCompatibility = JavaVersion.VERSION_11 +sourceCompatibility = JavaVersion.VERSION_11 apply plugin: 'java' apply plugin: 'jacoco' apply plugin: 'signing' apply plugin: 'maven-publish' apply plugin: 'com.github.johnrengelman.shadow' -apply plugin: 'io.gitlab.arturbosch.detekt' +// apply plugin: 'io.gitlab.arturbosch.detekt' apply plugin: 'org.jetbrains.kotlin.jvm' apply plugin: 'org.jetbrains.kotlin.plugin.allopen' +apply plugin: 'opensearch.repositories' +apply from: 'build-tools/opensearchplugin-coverage.gradle' configurations { - ktlint + ktlint { + resolutionStrategy { + force "ch.qos.logback:logback-classic:1.3.14" + force "ch.qos.logback:logback-core:1.3.14" + } + } } dependencies { @@ -66,15 +79,19 @@ dependencies { compileOnly "org.jetbrains.kotlin:kotlin-stdlib:${kotlin_version}" compileOnly "org.jetbrains.kotlin:kotlin-stdlib-common:${kotlin_version}" compileOnly "org.jetbrains.kotlinx:kotlinx-coroutines-core:1.4.3" // ${kotlin_version} does not work for coroutines - testCompile "org.opensearch.test:framework:${opensearch_version}" - testCompile "org.jetbrains.kotlin:kotlin-test:${kotlin_version}" - testCompile "org.mockito:mockito-core:3.10.0" + compileOnly "com.cronutils:cron-utils:9.1.6" + compileOnly "commons-validator:commons-validator:1.7" + testImplementation "org.opensearch.test:framework:${opensearch_version}" + testImplementation "org.jetbrains.kotlin:kotlin-test:${kotlin_version}" + testImplementation "org.mockito:mockito-core:3.10.0" testImplementation 'org.junit.jupiter:junit-jupiter-api:5.7.2' testImplementation 'org.mockito:mockito-junit-jupiter:3.10.0' testImplementation "com.nhaarman.mockitokotlin2:mockito-kotlin:2.2.0" + testImplementation "com.cronutils:cron-utils:9.1.6" + testImplementation "commons-validator:commons-validator:1.7" testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.7.2' - ktlint "com.pinterest:ktlint:0.41.0" + ktlint "com.pinterest:ktlint:0.47.1" } test { @@ -95,12 +112,14 @@ spotless { eclipse().configFile rootProject.file('.eclipseformat.xml') } } -detekt { + +// TODO: enable detekt only when snakeyaml vulnerability is fixed +/*detekt { config = files("detekt.yml") buildUponDefaultConfig = true -} +}*/ -task ktlint(type: JavaExec, group: "verification") { +tasks.register('ktlint', JavaExec) { description = "Check Kotlin code style." main = "com.pinterest.ktlint.Main" classpath = configurations.ktlint @@ -112,43 +131,56 @@ task ktlint(type: JavaExec, group: "verification") { check.dependsOn ktlint -task ktlintFormat(type: JavaExec, group: "formatting") { +tasks.register('ktlintFormat', JavaExec) { description = "Fix Kotlin code style deviations." - main = "com.pinterest.ktlint.Main" classpath = configurations.ktlint + // https://github.com/pinterest/ktlint/issues/1391#issuecomment-1251287020 + jvmArgs "--add-opens=java.base/java.lang=ALL-UNNAMED" + setProperty("mainClass", "com.pinterest.ktlint.Main") args "-F", "src/**/*.kt" } compileKotlin { kotlinOptions { freeCompilerArgs = ['-Xjsr305=strict'] - jvmTarget = "1.8" + jvmTarget = "11" } } compileTestKotlin { kotlinOptions { - jvmTarget = "1.8" + jvmTarget = "11" } } shadowJar { - classifier = null + archiveClassifier = null } task sourcesJar(type: Jar) { - classifier = 'sources' + archiveClassifier = 'sources' from sourceSets.main.allJava } task javadocJar(type: Jar) { - classifier = 'javadoc' + archiveClassifier = 'javadoc' from javadoc.destinationDir } - -version '1.0.0.0' - publishing { + repositories { + maven { + name = 'staging' + url = "${rootProject.buildDir}/local-staging-repo" + } + maven { + name = "Snapshots" + url = "https://aws.oss.sonatype.org/content/repositories/snapshots" + credentials { + username "$System.env.SONATYPE_USERNAME" + password "$System.env.SONATYPE_PASSWORD" + } + } + } publications { shadow(MavenPublication) { project.shadow.component(it) @@ -187,9 +219,16 @@ publishing { gradle.startParameter.setShowStacktrace(ShowStacktrace.ALWAYS) gradle.startParameter.setLogLevel(LogLevel.DEBUG) +} - signing { - required { gradle.taskGraph.hasTask("publishShadowPublicationToSonatype-stagingRepository") } - sign publishing.publications.shadow +// updateVersion: Task to auto increment to the next development iteration +task updateVersion { + onlyIf { System.getProperty('newVersion') } + doLast { + ext.newVersion = System.getProperty('newVersion') + println "Setting version to ${newVersion}." + // String tokenization to support -SNAPSHOT + // Include the required files that needs to be updated with new Version + ant.replaceregexp(file:'build.gradle', match: '"opensearch.version", "\\d.*"', replace: '"opensearch.version", "' + newVersion.tokenize('-')[0] + '-SNAPSHOT"', flags:'g', byline:true) } } diff --git a/detekt.yml b/detekt.yml index 34ed3c1e..5d1d194d 100644 --- a/detekt.yml +++ b/detekt.yml @@ -1,31 +1,54 @@ --- # +# Copyright OpenSearch Contributors # SPDX-License-Identifier: Apache-2.0 # -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# style: ForbiddenComment: active: false + LoopWithTooManyJumpStatements: + maxJumpCount: 4 MaxLineLength: - maxLineLength: 150 + maxLineLength: 200 ThrowsCount: active: true max: 10 ReturnCount: active: true max: 10 + UtilityClassWithPublicConstructor: + active: false + +empty-blocks: + EmptyCatchBlock: + excludes: ['**/test/**'] + +exceptions: + SwallowedException: + excludes: ['**/test/**'] + ignoredExceptionTypes: + - 'ZoneRulesException' + - 'DateTimeException' complexity: LargeClass: excludes: ['**/test/**'] LongMethod: excludes: ['**/test/**'] + threshold: 110 LongParameterList: excludes: ['**/test/**'] + constructorThreshold: 8 + ComplexMethod: + threshold: 27 + NestedBlockDepth: + threshold: 10 + +naming: + ObjectPropertyNaming: + constantPattern: '[_A-Za-z][_A-Za-z0-9]*' + +performance: + SpreadOperator: + active: false diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index 28861d27..7f93135c 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 21a8fe40..3999f7f3 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-8.4-bin.zip +networkTimeout=10000 zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-6.4-all.zip +distributionSha256Sum=3e1af3ae886920c3ac87f7a91f816c0c7c436f276a6eefdb3da152100fef72ae diff --git a/gradlew b/gradlew index cccdd3d5..1aa94a42 100755 --- a/gradlew +++ b/gradlew @@ -1,78 +1,127 @@ -#!/usr/bin/env sh +#!/bin/sh + +# +# Copyright © 2015-2021 the original authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ############################################################################## -## -## Gradle start up script for UN*X -## +# +# Gradle start up script for POSIX generated by Gradle. +# +# Important for running: +# +# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is +# noncompliant, but you have some other compliant shell such as ksh or +# bash, then to run this script, type that shell name before the whole +# command line, like: +# +# ksh Gradle +# +# Busybox and similar reduced shells will NOT work, because this script +# requires all of these POSIX shell features: +# * functions; +# * expansions «$var», «${var}», «${var:-default}», «${var+SET}», +# «${var#prefix}», «${var%suffix}», and «$( cmd )»; +# * compound commands having a testable exit status, especially «case»; +# * various built-in commands including «command», «set», and «ulimit». +# +# Important for patching: +# +# (2) This script targets any POSIX shell, so it avoids extensions provided +# by Bash, Ksh, etc; in particular arrays are avoided. +# +# The "traditional" practice of packing multiple parameters into a +# space-separated string is a well documented source of bugs and security +# problems, so this is (mostly) avoided, by progressively accumulating +# options in "$@", and eventually passing that to Java. +# +# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, +# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; +# see the in-line comments for details. +# +# There are tweaks for specific operating systems such as AIX, CygWin, +# Darwin, MinGW, and NonStop. +# +# (3) This script is generated from the Groovy template +# https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# within the Gradle project. +# +# You can find Gradle at https://github.com/gradle/gradle/. +# ############################################################################## # Attempt to set APP_HOME + # Resolve links: $0 may be a link -PRG="$0" -# Need this for relative symlinks. -while [ -h "$PRG" ] ; do - ls=`ls -ld "$PRG"` - link=`expr "$ls" : '.*-> \(.*\)$'` - if expr "$link" : '/.*' > /dev/null; then - PRG="$link" - else - PRG=`dirname "$PRG"`"/$link" - fi +app_path=$0 + +# Need this for daisy-chained symlinks. +while + APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path + [ -h "$app_path" ] +do + ls=$( ls -ld "$app_path" ) + link=${ls#*' -> '} + case $link in #( + /*) app_path=$link ;; #( + *) app_path=$APP_HOME$link ;; + esac done -SAVED="`pwd`" -cd "`dirname \"$PRG\"`/" >/dev/null -APP_HOME="`pwd -P`" -cd "$SAVED" >/dev/null - -APP_NAME="Gradle" -APP_BASE_NAME=`basename "$0"` -# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -DEFAULT_JVM_OPTS="" +# This is normally unused +# shellcheck disable=SC2034 +APP_BASE_NAME=${0##*/} +# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) +APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit # Use the maximum available, or set MAX_FD != -1 to use that value. -MAX_FD="maximum" +MAX_FD=maximum warn () { echo "$*" -} +} >&2 die () { echo echo "$*" echo exit 1 -} +} >&2 # OS specific support (must be 'true' or 'false'). cygwin=false msys=false darwin=false nonstop=false -case "`uname`" in - CYGWIN* ) - cygwin=true - ;; - Darwin* ) - darwin=true - ;; - MINGW* ) - msys=true - ;; - NONSTOP* ) - nonstop=true - ;; +case "$( uname )" in #( + CYGWIN* ) cygwin=true ;; #( + Darwin* ) darwin=true ;; #( + MSYS* | MINGW* ) msys=true ;; #( + NONSTOP* ) nonstop=true ;; esac CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + # Determine the Java command to use to start the JVM. if [ -n "$JAVA_HOME" ] ; then if [ -x "$JAVA_HOME/jre/sh/java" ] ; then # IBM's JDK on AIX uses strange locations for the executables - JAVACMD="$JAVA_HOME/jre/sh/java" + JAVACMD=$JAVA_HOME/jre/sh/java else - JAVACMD="$JAVA_HOME/bin/java" + JAVACMD=$JAVA_HOME/bin/java fi if [ ! -x "$JAVACMD" ] ; then die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME @@ -81,92 +130,120 @@ Please set the JAVA_HOME variable in your environment to match the location of your Java installation." fi else - JAVACMD="java" - which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + JAVACMD=java + if ! command -v java >/dev/null 2>&1 + then + die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. Please set the JAVA_HOME variable in your environment to match the location of your Java installation." + fi fi # Increase the maximum file descriptors if we can. -if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then - MAX_FD_LIMIT=`ulimit -H -n` - if [ $? -eq 0 ] ; then - if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then - MAX_FD="$MAX_FD_LIMIT" - fi - ulimit -n $MAX_FD - if [ $? -ne 0 ] ; then - warn "Could not set maximum file descriptor limit: $MAX_FD" - fi - else - warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" - fi +if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then + case $MAX_FD in #( + max*) + # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC2039,SC3045 + MAX_FD=$( ulimit -H -n ) || + warn "Could not query maximum file descriptor limit" + esac + case $MAX_FD in #( + '' | soft) :;; #( + *) + # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC2039,SC3045 + ulimit -n "$MAX_FD" || + warn "Could not set maximum file descriptor limit to $MAX_FD" + esac fi -# For Darwin, add options to specify how the application appears in the dock -if $darwin; then - GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" -fi +# Collect all arguments for the java command, stacking in reverse order: +# * args from the command line +# * the main class name +# * -classpath +# * -D...appname settings +# * --module-path (only if needed) +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. + +# For Cygwin or MSYS, switch paths to Windows format before running java +if "$cygwin" || "$msys" ; then + APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) + CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) + + JAVACMD=$( cygpath --unix "$JAVACMD" ) -# For Cygwin, switch paths to Windows format before running java -if $cygwin ; then - APP_HOME=`cygpath --path --mixed "$APP_HOME"` - CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` - JAVACMD=`cygpath --unix "$JAVACMD"` - - # We build the pattern for arguments to be converted via cygpath - ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` - SEP="" - for dir in $ROOTDIRSRAW ; do - ROOTDIRS="$ROOTDIRS$SEP$dir" - SEP="|" - done - OURCYGPATTERN="(^($ROOTDIRS))" - # Add a user-defined pattern to the cygpath arguments - if [ "$GRADLE_CYGPATTERN" != "" ] ; then - OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" - fi # Now convert the arguments - kludge to limit ourselves to /bin/sh - i=0 - for arg in "$@" ; do - CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` - CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option - - if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition - eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` - else - eval `echo args$i`="\"$arg\"" + for arg do + if + case $arg in #( + -*) false ;; # don't mess with options #( + /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath + [ -e "$t" ] ;; #( + *) false ;; + esac + then + arg=$( cygpath --path --ignore --mixed "$arg" ) fi - i=$((i+1)) + # Roll the args list around exactly as many times as the number of + # args, so each arg winds up back in the position where it started, but + # possibly modified. + # + # NB: a `for` loop captures its iteration list before it begins, so + # changing the positional parameters here affects neither the number of + # iterations, nor the values presented in `arg`. + shift # remove old arg + set -- "$@" "$arg" # push replacement arg done - case $i in - (0) set -- ;; - (1) set -- "$args0" ;; - (2) set -- "$args0" "$args1" ;; - (3) set -- "$args0" "$args1" "$args2" ;; - (4) set -- "$args0" "$args1" "$args2" "$args3" ;; - (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; - (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; - (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; - (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; - (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; - esac fi -# Escape application args -save () { - for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done - echo " " -} -APP_ARGS=$(save "$@") - -# Collect all arguments for the java command, following the shell quoting and substitution rules -eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" -# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong -if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then - cd "$(dirname "$0")" +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Collect all arguments for the java command: +# * DEFAULT_JVM_OPTS, JAVA_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments, +# and any embedded shellness will be escaped. +# * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be +# treated as '${Hostname}' itself on the command line. + +set -- \ + "-Dorg.gradle.appname=$APP_BASE_NAME" \ + -classpath "$CLASSPATH" \ + org.gradle.wrapper.GradleWrapperMain \ + "$@" + +# Stop when "xargs" is not available. +if ! command -v xargs >/dev/null 2>&1 +then + die "xargs is not available" fi +# Use "xargs" to parse quoted args. +# +# With -n1 it outputs one arg per line, with the quotes and backslashes removed. +# +# In Bash we could simply go: +# +# readarray ARGS < <( xargs -n1 <<<"$var" ) && +# set -- "${ARGS[@]}" "$@" +# +# but POSIX shell has neither arrays nor command substitution, so instead we +# post-process each arg (as a line of input to sed) to backslash-escape any +# character that might be a shell metacharacter, then use eval to reverse +# that process (while maintaining the separation between arguments), and wrap +# the whole thing up as a single "set" statement. +# +# This will of course break if any of these variables contains a newline or +# an unmatched quote. +# + +eval "set -- $( + printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | + xargs -n1 | + sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | + tr '\n' ' ' + )" '"$@"' + exec "$JAVACMD" "$@" diff --git a/gradlew.bat b/gradlew.bat index e95643d6..6689b85b 100644 --- a/gradlew.bat +++ b/gradlew.bat @@ -1,4 +1,20 @@ -@if "%DEBUG%" == "" @echo off +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem + +@if "%DEBUG%"=="" @echo off @rem ########################################################################## @rem @rem Gradle startup script for Windows @@ -9,19 +25,23 @@ if "%OS%"=="Windows_NT" setlocal set DIRNAME=%~dp0 -if "%DIRNAME%" == "" set DIRNAME=. +if "%DIRNAME%"=="" set DIRNAME=. +@rem This is normally unused set APP_BASE_NAME=%~n0 set APP_HOME=%DIRNAME% +@rem Resolve any "." and ".." in APP_HOME to make it shorter. +for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi + @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -set DEFAULT_JVM_OPTS= +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" @rem Find java.exe if defined JAVA_HOME goto findJavaFromJavaHome set JAVA_EXE=java.exe %JAVA_EXE% -version >NUL 2>&1 -if "%ERRORLEVEL%" == "0" goto init +if %ERRORLEVEL% equ 0 goto execute echo. echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. @@ -35,7 +55,7 @@ goto fail set JAVA_HOME=%JAVA_HOME:"=% set JAVA_EXE=%JAVA_HOME%/bin/java.exe -if exist "%JAVA_EXE%" goto init +if exist "%JAVA_EXE%" goto execute echo. echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% @@ -45,38 +65,26 @@ echo location of your Java installation. goto fail -:init -@rem Get command-line arguments, handling Windows variants - -if not "%OS%" == "Windows_NT" goto win9xME_args - -:win9xME_args -@rem Slurp the command line arguments. -set CMD_LINE_ARGS= -set _SKIP=2 - -:win9xME_args_slurp -if "x%~1" == "x" goto execute - -set CMD_LINE_ARGS=%* - :execute @rem Setup the command line set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + @rem Execute Gradle -"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* :end @rem End local scope for the variables with windows NT shell -if "%ERRORLEVEL%"=="0" goto mainEnd +if %ERRORLEVEL% equ 0 goto mainEnd :fail rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of rem the _cmd.exe /c_ return code! -if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 -exit /b 1 +set EXIT_CODE=%ERRORLEVEL% +if %EXIT_CODE% equ 0 set EXIT_CODE=1 +if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% +exit /b %EXIT_CODE% :mainEnd if "%OS%"=="Windows_NT" endlocal diff --git a/release-notes/opensearch-common-utils.release-notes-1.0.0.0.md b/release-notes/opensearch-common-utils.release-notes-1.0.0.0.md new file mode 100644 index 00000000..71d1a7db --- /dev/null +++ b/release-notes/opensearch-common-utils.release-notes-1.0.0.0.md @@ -0,0 +1,29 @@ +## Version 1.0.0.0 2021-07-01 + +Compatible with OpenSearch 1.0.0 + +### Enhancements + + * Notification plugin interface and models ([#31](https://github.com/opensearch-project/common-utils/pull/31)) + +### Infrastructure + + * Support for kotlin and JUnit5 with mockito ([#29](https://github.com/opensearch-project/common-utils/pull/29)) + * Removing Kotlin Runtime library bundled into library ([#30](https://github.com/opensearch-project/common-utils/pull/30)) + * Bump to version 1.0.0.0 #34 ([#34](https://github.com/opensearch-project/common-utils/pull/34)) + +### Documentation + + * Update OpenSearch branch to 1.0 ([#28](https://github.com/opensearch-project/common-utils/pull/28)) + * Cleanup READMEs. ([#32](https://github.com/opensearch-project/common-utils/pull/32)) + +### Maintainence + + * Update issue template with multiple labels ([#18](https://github.com/opensearch-project/common-utils/pull/18)) + * Rename namespaces from OpenDistro to OpenSearch ([#20](https://github.com/opensearch-project/common-utils/pull/20)) + * Rename classes, variables, methods to incorporate OpenSearch ([#21](https://github.com/opensearch-project/common-utils/pull/21)) + * Rename remaining identifiers to OpenSearch ([#23](https://github.com/opensearch-project/common-utils/pull/23)) + * Version changed to rc1 #24 ([#24](https://github.com/opensearch-project/common-utils/pull/24)) + * Rename consts as per changes in security plugin ([#25](https://github.com/opensearch-project/common-utils/pull/25)) + * Move workflow tags to rc1 ([#26](https://github.com/opensearch-project/common-utils/pull/26)) + * Add rc1 release notes ([#27](https://github.com/opensearch-project/common-utils/pull/27)) diff --git a/release-notes/opensearch-common-utils.release-notes-1.2.0.0.md b/release-notes/opensearch-common-utils.release-notes-1.2.0.0.md new file mode 100644 index 00000000..afb92873 --- /dev/null +++ b/release-notes/opensearch-common-utils.release-notes-1.2.0.0.md @@ -0,0 +1,14 @@ +## Version 1.2.0.0 2021-11-05 + +Compatible with OpenSearch 1.2.0 + +### Infrastructure + + * Updates common-utils version to 1.2 ([#77](https://github.com/opensearch-project/common-utils/pull/77)) + * Update maven publication to include cksums. ([#91](https://github.com/opensearch-project/common-utils/pull/91)) + +### Documentation + + * Add themed logo to README ([#41](https://github.com/opensearch-project/common-utils/pull/41)) + * Update copyright notice ([#90](https://github.com/opensearch-project/common-utils/pull/90)) + * Add release notes for version 1.2.0.0 ([#92](https://github.com/opensearch-project/common-utils/pull/92)) \ No newline at end of file diff --git a/release-notes/opensearch-common-utils.release-notes-1.3.0.0.md b/release-notes/opensearch-common-utils.release-notes-1.3.0.0.md new file mode 100644 index 00000000..4f210951 --- /dev/null +++ b/release-notes/opensearch-common-utils.release-notes-1.3.0.0.md @@ -0,0 +1,14 @@ +## Version 1.3.0.0 2022-03-11 + +Compatible with OpenSearch 1.3.0 + +### Infrastructure + + * Updates common-utils version to 1.3 ([#99](https://github.com/opensearch-project/common-utils/pull/99)) + * Update build.sh script to include optional platform param. ([#95](https://github.com/opensearch-project/common-utils/pull/95)) + * Update copyright notice and add DCO check workflow. ([#94](https://github.com/opensearch-project/common-utils/pull/94)) + +### Documentation + + * Update copyright headers ([#117](https://github.com/opensearch-project/common-utils/pull/117)) + * Add release notes for version 1.3.0.0 ([#132](https://github.com/opensearch-project/common-utils/pull/132)) \ No newline at end of file diff --git a/release-notes/opensearch-common-utils.release-notes-2.0.0.0-rc1.md b/release-notes/opensearch-common-utils.release-notes-2.0.0.0-rc1.md new file mode 100644 index 00000000..84418524 --- /dev/null +++ b/release-notes/opensearch-common-utils.release-notes-2.0.0.0-rc1.md @@ -0,0 +1,29 @@ +## Version 2.0.0.0-rc1 2022-04-25 + +Compatible with OpenSearch 2.0.0-rc1 + +### Enhancements + + * Add SQL/PPL transport request/response models for SQL plugin ([#155](https://github.com/opensearch-project/common-utils/pull/155)) + * Support sending email message via Notifications pass-through API ([#158](https://github.com/opensearch-project/common-utils/pull/158)) + +### Infrastructure + + * Upgrade gradle artifacts to 7.3.3 ([#135](https://github.com/opensearch-project/common-utils/pull/135) + * Update common-utils to depend on the OpenSearch repositories plugin ([#137](https://github.com/opensearch-project/common-utils/pull/137)) + * Add sign-off option for version workflow PR ([#143](https://github.com/opensearch-project/common-utils/pull/143)) + * Add qualifier default to alpha1 in build.gradle ([#151](https://github.com/opensearch-project/common-utils/pull/151)) + * Update issue templates from github for bugs and features ([#154](https://github.com/opensearch-project/common-utils/pull/154)) + * Remove support for JDK 14 ([#159](https://github.com/opensearch-project/common-utils/pull/159)) + +### Refactoring + + * Remove feature and feature_list usage for Notifications ([#136](https://github.com/opensearch-project/common-utils/pull/136)) + * Rename references for Get Channels API for Notifications ([#140](https://github.com/opensearch-project/common-utils/pull/140)) + * Remove allowedConfigFeatureList from GetPluginFeaturesResponse for Notifications ([#144](https://github.com/opensearch-project/common-utils/pull/144)) + * Remove NotificationEvent Request, Response and SearchResults ([#153](https://github.com/opensearch-project/common-utils/pull/153)) + * Add NotificationEvent to SendNotificationResponse and Removal of NotificationID ([#156](https://github.com/opensearch-project/common-utils/pull/156)) + +### Documentation + + * Add release notes for version 2.0.0-rc1 ([#162](https://github.com/opensearch-project/common-utils/pull/162)) diff --git a/release-notes/opensearch-common-utils.release-notes-2.0.0.0.md b/release-notes/opensearch-common-utils.release-notes-2.0.0.0.md new file mode 100644 index 00000000..70295ccc --- /dev/null +++ b/release-notes/opensearch-common-utils.release-notes-2.0.0.0.md @@ -0,0 +1,32 @@ +## Version 2.0.0.0 2022-05-18 + +Compatible with OpenSearch 2.0.0 + +### Enhancements + + * Add SQL/PPL transport request/response models for SQL plugin ([#155](https://github.com/opensearch-project/common-utils/pull/155)) + * Support sending email message via Notifications pass-through API ([#158](https://github.com/opensearch-project/common-utils/pull/158)) + +### Infrastructure + + * Upgrade gradle artifacts to 7.3.3 ([#135](https://github.com/opensearch-project/common-utils/pull/135) + * Update common-utils to depend on the OpenSearch repositories plugin ([#137](https://github.com/opensearch-project/common-utils/pull/137)) + * Add sign-off option for version workflow PR ([#143](https://github.com/opensearch-project/common-utils/pull/143)) + * Add qualifier default to alpha1 in build.gradle ([#151](https://github.com/opensearch-project/common-utils/pull/151)) + * Update issue templates from github for bugs and features ([#154](https://github.com/opensearch-project/common-utils/pull/154)) + * Remove support for JDK 14 ([#159](https://github.com/opensearch-project/common-utils/pull/159)) + * Remove RC1 as the qualifier from Common Utils ([#168](https://github.com/opensearch-project/common-utils/pull/168)) + +### Refactoring + + * Remove feature and feature_list usage for Notifications ([#136](https://github.com/opensearch-project/common-utils/pull/136)) + * Rename references for Get Channels API for Notifications ([#140](https://github.com/opensearch-project/common-utils/pull/140)) + * Remove allowedConfigFeatureList from GetPluginFeaturesResponse for Notifications ([#144](https://github.com/opensearch-project/common-utils/pull/144)) + * Remove NotificationEvent Request, Response and SearchResults ([#153](https://github.com/opensearch-project/common-utils/pull/153)) + * Add NotificationEvent to SendNotificationResponse and Removal of NotificationID ([#156](https://github.com/opensearch-project/common-utils/pull/156)) + * Change BaseModel to extend ToXContentObject instead of ToXContent ([#173](https://github.com/opensearch-project/common-utils/pull/173)) + +### Documentation + + * Add release notes for version 2.0.0-rc1 ([#162](https://github.com/opensearch-project/common-utils/pull/162)) + * Add release notes for version 2.0.0.0 ([#177](https://github.com/opensearch-project/common-utils/pull/177)) diff --git a/release-notes/opensearch-common-utils.release-notes-2.1.0.0.md b/release-notes/opensearch-common-utils.release-notes-2.1.0.0.md new file mode 100644 index 00000000..3bf4465f --- /dev/null +++ b/release-notes/opensearch-common-utils.release-notes-2.1.0.0.md @@ -0,0 +1,10 @@ +## Version 2.1.0.0 2022-07-06 + +Compatible with OpenSearch 2.1.0 + +### Maintenance +* Upgrade gradle to 7.4.2. ([#191](https://github.com/opensearch-project/common-utils/pull/191)) +* Bump up the version to 2.1. ([#190](https://github.com/opensearch-project/common-utils/pull/190)) + +### Documentation +* Added 2.1 release notes. ([#194](https://github.com/opensearch-project/common-utils/pull/194)) \ No newline at end of file diff --git a/release-notes/opensearch-common-utils.release-notes-2.10.0.0.md b/release-notes/opensearch-common-utils.release-notes-2.10.0.0.md new file mode 100644 index 00000000..435174f6 --- /dev/null +++ b/release-notes/opensearch-common-utils.release-notes-2.10.0.0.md @@ -0,0 +1,14 @@ +## Version 2.10.0.0 2023-08-31 + +Compatible with OpenSearch 2.10.0 + +### Maintenance +* Upgrade the backport workflow ([#487](https://github.com/opensearch-project/common-utils/pull/487)) +* Updates demo certs used in rest tests ([#518](https://github.com/opensearch-project/common-utils/pull/518)) + +### Feature +* common utils to support Microsoft teams in notifications ([#428](https://github.com/opensearch-project/common-utils/pull/428)) +* support list of monitor ids in Chained Monitor Findings ([#514](https://github.com/opensearch-project/common-utils/pull/514)) + +### Documentation +* Added 2.10.0.0 release notes ([#531](https://github.com/opensearch-project/common-utils/pull/531)) \ No newline at end of file diff --git a/release-notes/opensearch-common-utils.release-notes-2.12.0.0.md b/release-notes/opensearch-common-utils.release-notes-2.12.0.0.md new file mode 100644 index 00000000..25aee069 --- /dev/null +++ b/release-notes/opensearch-common-utils.release-notes-2.12.0.0.md @@ -0,0 +1,25 @@ +## Version 2.12.0.0 2023-02-06 + +Compatible with OpenSearch 2.12.0 + +### Maintenance +* Increment version to 2.12.0-SNAPSHOT ([#545](https://github.com/opensearch-project/common-utils/pull/545)) +* Onboard prod jenkins docker image to github actions ([#557](https://github.com/opensearch-project/common-utils/pull/557)) +* Update Gradle to 8.4 ([#560](https://github.com/opensearch-project/common-utils/pull/560)) +* Add Java 11/17/21 matrix for build, test and integration checks ([#561](https://github.com/opensearch-project/common-utils/pull/561)) +* changed all usages of 'admin' as a password to something different ([#581](https://github.com/opensearch-project/common-utils/pull/581)) +* Update dependency com.pinterest:ktlint to 0.47.1 and fix CVE-2023-6378 ([#585](https://github.com/opensearch-project/common-utils/pull/585)) + +### Enhancement +* add 'fields' parameter in doc level query object. ([#546](https://github.com/opensearch-project/common-utils/pull/546)) +* add fields param in toxcontent() for doc level query ([#549](https://github.com/opensearch-project/common-utils/pull/549)) +* Add User.isAdminDn to User class ([#547](https://github.com/opensearch-project/common-utils/pull/547)) + +### Refactor +* Move get monitor and search monitor action / request / responses to common-utils ([#566](https://github.com/opensearch-project/common-utils/pull/566)) + +# Features +* Implemented cross-cluster monitor support ([#584](https://github.com/opensearch-project/common-utils/pull/584)) + +### Documentation +* Added 2.12.0.0 release notes ([#585](https://github.com/opensearch-project/common-utils/pull/585)) \ No newline at end of file diff --git a/release-notes/opensearch-common-utils.release-notes-2.13.0.0.md b/release-notes/opensearch-common-utils.release-notes-2.13.0.0.md new file mode 100644 index 00000000..8aef8153 --- /dev/null +++ b/release-notes/opensearch-common-utils.release-notes-2.13.0.0.md @@ -0,0 +1,16 @@ +## Version 2.13.0.0 2023-03-21 + +Compatible with OpenSearch 2.13.0 + +### Maintenance +* Increment version to 2.13.0-SNAPSHOT ([#591](https://github.com/opensearch-project/common-utils/pull/591)) + +### Enhancement +* add queryFieldNames field in Doc Level Queries (#[582](https://github.com/opensearch-project/common-utils/pull/582)) (#[597](https://github.com/opensearch-project/common-utils/pull/597)) + +# Features +* fix findings API enhancemnts (#[611](https://github.com/opensearch-project/common-utils/pull/611)) (#[617](https://github.com/opensearch-project/common-utils/pull/617)) +* Feature findings enhancemnt (#[596](https://github.com/opensearch-project/common-utils/pull/596)) (#[606](https://github.com/opensearch-project/common-utils/pull/606)) + +### Documentation +* Added 2.13.0.0 release notes ([#622](https://github.com/opensearch-project/common-utils/pull/622)) \ No newline at end of file diff --git a/release-notes/opensearch-common-utils.release-notes-2.14.0.0.md b/release-notes/opensearch-common-utils.release-notes-2.14.0.0.md new file mode 100644 index 00000000..0dfd3028 --- /dev/null +++ b/release-notes/opensearch-common-utils.release-notes-2.14.0.0.md @@ -0,0 +1,15 @@ +## Version 2.14.0.0 2024-04-30 + +Compatible with OpenSearch 2.14.0 + +### Maintenance +* Increment version to 2.14.0-SNAPSHOT ([#625](https://github.com/opensearch-project/common-utils/pull/625)) + +### Refactor +* Obfuscate ip addresses in alert error message ([#511](https://github.com/opensearch-project/common-utils/pull/511)) +* Change doc level query name validation ([#630](https://github.com/opensearch-project/common-utils/pull/630)) +* Added validation for the new clusters field. ([#633](https://github.com/opensearch-project/common-utils/pull/633)) +* Wrapped URI syntax exception in IllegalArgument exception. ([#645](https://github.com/opensearch-project/common-utils/pull/645)) + +### Documentation +* Added 2.14.0.0 release notes. ([#648](https://github.com/opensearch-project/common-utils/pull/648)) \ No newline at end of file diff --git a/release-notes/opensearch-common-utils.release-notes-2.15.0.0.md b/release-notes/opensearch-common-utils.release-notes-2.15.0.0.md new file mode 100644 index 00000000..3a4e546f --- /dev/null +++ b/release-notes/opensearch-common-utils.release-notes-2.15.0.0.md @@ -0,0 +1,20 @@ +## Version 2.15.0.0 2024-06-10 + +Compatible with OpenSearch 2.15.0 + +### Maintenance +* Increment version to 2.15.0-SNAPSHOT ([#651](https://github.com/opensearch-project/common-utils/pull/651)) + + +### Features +* CorrelationAlert model added ([#631](https://github.com/opensearch-project/common-utils/pull/631), [#679](https://github.com/opensearch-project/common-utils/pull/679)) + +### Bug Fixes +* Bug fixes for correlation Alerts ([#670](https://github.com/opensearch-project/common-utils/pull/670), [#680](https://github.com/opensearch-project/common-utils/pull/680)) + +### Enhancements +* Add start_time and end_time filters to GetAlertsRequest. ([#655](https://github.com/opensearch-project/common-utils/pull/655)) +* Added new models for Alerting Comments ([#663](https://github.com/opensearch-project/common-utils/pull/663), [#671](https://github.com/opensearch-project/common-utils/pull/671), [#674](https://github.com/opensearch-project/common-utils/pull/674) [#678](https://github.com/opensearch-project/common-utils/pull/678)) + +### Documentation +* Added 2.15.0.0 release notes. ([#672](https://github.com/opensearch-project/common-utils/pull/672)) diff --git a/release-notes/opensearch-common-utils.release-notes-2.16.0.0.md b/release-notes/opensearch-common-utils.release-notes-2.16.0.0.md new file mode 100644 index 00000000..6ae5b6b7 --- /dev/null +++ b/release-notes/opensearch-common-utils.release-notes-2.16.0.0.md @@ -0,0 +1,12 @@ +## Version 2.16.0.0 2024-07-25 + +Compatible with OpenSearch 2.16.0 + +### Maintenance +* Increment version to 2.16.0-SNAPSHOT ([#688](https://github.com/opensearch-project/common-utils/pull/688)) + +### Enhancements +* [Backport 2.x] Add support for remote monitors ([#694](https://github.com/opensearch-project/common-utils/pull/694)) + +### Documentation +* Added 2.16.0.0 release notes. ([#700](https://github.com/opensearch-project/common-utils/pull/700)) \ No newline at end of file diff --git a/release-notes/opensearch-common-utils.release-notes-2.17.0.0.md b/release-notes/opensearch-common-utils.release-notes-2.17.0.0.md new file mode 100644 index 00000000..ff24e040 --- /dev/null +++ b/release-notes/opensearch-common-utils.release-notes-2.17.0.0.md @@ -0,0 +1,16 @@ +## Version 2.17.0.0 2024-09-03 + +Compatible with OpenSearch 2.17.0 + +### Maintenance +* Fixed Common-Utils CIs: ([#703](https://github.com/opensearch-project/common-utils/pull/703)) + +### Bug Fixes +* Added missing ctx variables ([#710](https://github.com/opensearch-project/common-utils/pull/710)) +* Changed the names of security actions for Alerting Comments feature ([#724](https://github.com/opensearch-project/common-utils/pull/724)) + +### Enhancements +* Updated pull request template to include API spec change in checklist ([#696](https://github.com/opensearch-project/common-utils/pull/696)) + +### Documentation +* Added 2.17.0.0 release notes ([#727](https://github.com/opensearch-project/common-utils/pull/727)) diff --git a/release-notes/opensearch-common-utils.release-notes-2.2.0.0.md b/release-notes/opensearch-common-utils.release-notes-2.2.0.0.md new file mode 100644 index 00000000..b77f7244 --- /dev/null +++ b/release-notes/opensearch-common-utils.release-notes-2.2.0.0.md @@ -0,0 +1,10 @@ +## Version 2.2.0.0 2022-08-09 + +Compatible with OpenSearch 2.2.0 + +### Infrastructure +* Execute version auto increment in staging ([#200](https://github.com/opensearch-project/common-utils/pull/200)) +* Bump up the version to 2.2. ([#204](https://github.com/opensearch-project/common-utils/pull/204)) + +### Documentation +* Added 2.2 release notes. ([#212](https://github.com/opensearch-project/common-utils/pull/212)) \ No newline at end of file diff --git a/release-notes/opensearch-common-utils.release-notes-2.6.0.0.md b/release-notes/opensearch-common-utils.release-notes-2.6.0.0.md new file mode 100644 index 00000000..83549684 --- /dev/null +++ b/release-notes/opensearch-common-utils.release-notes-2.6.0.0.md @@ -0,0 +1,15 @@ +## Version 2.6.0.0 2023-02-21 + +Compatible with OpenSearch 2.6.0 + +### Maintenance +* Increment version to 2.6.0-SNAPSHOT ([#344](https://github.com/opensearch-project/common-utils/pull/344)) + +### Refactoring +* Add a super admin client builder w/ hosts params ([#322](https://github.com/opensearch-project/common-utils/pull/322)) + +### Bug Fixes +* Fix streaming functions for LegacySNSMessage ([#324](https://github.com/opensearch-project/common-utils/pull/324)) + +### Documentation +* Added 2.6 release notes. ([#363](https://github.com/opensearch-project/common-utils/pull/363)) \ No newline at end of file diff --git a/release-notes/opensearch-common-utils.release-notes-2.7.0.0.md b/release-notes/opensearch-common-utils.release-notes-2.7.0.0.md new file mode 100644 index 00000000..94e9399c --- /dev/null +++ b/release-notes/opensearch-common-utils.release-notes-2.7.0.0.md @@ -0,0 +1,22 @@ +## Version 2.7.0.0 2023-04-17 + +Compatible with OpenSearch 2.7.0 + +### Maintenance +* Increment version to 2.7.0-SNAPSHOT. ([#371](https://github.com/opensearch-project/common-utils/pull/371)) + +### Refactoring +* Fixed xContent dependencies due to OSCore changes. ([#392](https://github.com/opensearch-project/common-utils/pull/392)) + +### Infrastructure +* Publish snapshots to maven via GHA. ([#365](https://github.com/opensearch-project/common-utils/pull/365)) +* Add auto Github release workflow. ([#376](https://github.com/opensearch-project/common-utils/pull/376)) + +### Feature +* InjectSecurity - inject User object in UserInfo in threadContext. ([#396](https://github.com/opensearch-project/common-utils/pull/396)) + +### Bug Fixes +* Fix SNS regex for validation on notification channel to support SNS FIFO topics. ([#381](https://github.com/opensearch-project/common-utils/pull/381)) + +### Documentation +* Added 2.7 release notes. ([#407](https://github.com/opensearch-project/common-utils/pull/407)) \ No newline at end of file diff --git a/release-notes/opensearch-common-utils.release-notes-2.8.0.0.md b/release-notes/opensearch-common-utils.release-notes-2.8.0.0.md new file mode 100644 index 00000000..31769b9f --- /dev/null +++ b/release-notes/opensearch-common-utils.release-notes-2.8.0.0.md @@ -0,0 +1,20 @@ +## Version 2.8.0.0 2023-05-26 + +Compatible with OpenSearch 2.8.0 + +### Maintenance +* upgrade gradle to 8.1.1. ([#418](https://github.com/opensearch-project/common-utils/pull/418)) +* Sync up MAINTAINERS to CODEOWNERS. ([#427](https://github.com/opensearch-project/common-utils/pull/427)) +* Fix build errors after refactoring of Strings class in core. ([#432](https://github.com/opensearch-project/common-utils/pull/432)) +* updating maintainers and codeowners. ([#438](https://github.com/opensearch-project/common-utils/pull/438)) +* fix codeowners file format. ([#440](https://github.com/opensearch-project/common-utils/pull/440)) + +### Infrastructure +* Switch publish maven branches to list. ([#423](https://github.com/opensearch-project/common-utils/pull/423)) + +### Feature +* integrate security-analytics & alerting for correlation engine. ([#412](https://github.com/opensearch-project/common-utils/pull/412)) +* NoOpTrigger. ([#420](https://github.com/opensearch-project/common-utils/pull/420)) + +### Documentation +* Added 2.8 release notes. ([#441](https://github.com/opensearch-project/common-utils/pull/441)) \ No newline at end of file diff --git a/release-notes/opensearch-common-utils.release-notes-2.9.0.0.md b/release-notes/opensearch-common-utils.release-notes-2.9.0.0.md new file mode 100644 index 00000000..c5f78503 --- /dev/null +++ b/release-notes/opensearch-common-utils.release-notes-2.9.0.0.md @@ -0,0 +1,26 @@ +## Version 2.9.0.0 2023-07-11 + +Compatible with OpenSearch 2.9.0 + +### Maintenance +* Increment version to 2.9.0-SNAPSHOT. ([#444](https://github.com/opensearch-project/common-utils/pull/444)) +* Modify triggers to push snapshots on all branches. ([#454](https://github.com/opensearch-project/common-utils/pull/454)) + +### Feature +* Adds Chained alerts triggers for workflows. ([#456](https://github.com/opensearch-project/common-utils/pull/456)) +* Acknowledge chained alert request for workflow. ([#459](https://github.com/opensearch-project/common-utils/pull/459)) +* Adds audit state in Alert. ([#461](https://github.com/opensearch-project/common-utils/pull/461)) +* Add workflowId field in alert. (([#463](https://github.com/opensearch-project/common-utils/pull/463)) +* APIs for get workflow alerts and acknowledge chained alerts. ([#472](https://github.com/opensearch-project/common-utils/pull/472)) +* Add auditDelegateMonitorAlerts flag. ([#476](https://github.com/opensearch-project/common-utils/pull/476)) +* Implemented support for configuring a cluster metrics monitor to call cat/indices, and cat/shards. ([#479](https://github.com/opensearch-project/common-utils/pull/479)) + + +### Bug Fixes +* OpenSearch commons strings library dependency import. ([#474](https://github.com/opensearch-project/common-utils/pull/474)) + +### Refactoring +* Pass workflow id in alert constructors. ([#465](https://github.com/opensearch-project/common-utils/pull/465)) + +### Documentation +* Added 2.9 release notes. ([#482](https://github.com/opensearch-project/common-utils/pull/482)) \ No newline at end of file diff --git a/scripts/build.sh b/scripts/build.sh new file mode 100755 index 00000000..e0495d4a --- /dev/null +++ b/scripts/build.sh @@ -0,0 +1,71 @@ +#!/bin/bash + +# Copyright OpenSearch Contributors. +# SPDX-License-Identifier: Apache-2.0 + +set -ex + +function usage() { + echo "Usage: $0 [args]" + echo "" + echo "Arguments:" + echo -e "-v VERSION\t[Required] OpenSearch version." + echo -e "-q QUALIFIER\t[Optional] Version qualifier." + echo -e "-s SNAPSHOT\t[Optional] Build a snapshot, default is 'false'." + echo -e "-p PLATFORM\t[Optional] Platform, ignored." + echo -e "-a ARCHITECTURE\t[Optional] Build architecture, ignored." + echo -e "-o OUTPUT\t[Optional] Output path, default is 'artifacts'." + echo -e "-h help" +} + +while getopts ":h:v:q:s:o:p:a:" arg; do + case $arg in + h) + usage + exit 1 + ;; + v) + VERSION=$OPTARG + ;; + q) + QUALIFIER=$OPTARG + ;; + s) + SNAPSHOT=$OPTARG + ;; + o) + OUTPUT=$OPTARG + ;; + p) + PLATFORM=$OPTARG + ;; + a) + ARCHITECTURE=$OPTARG + ;; + :) + echo "Error: -${OPTARG} requires an argument" + usage + exit 1 + ;; + ?) + echo "Invalid option: -${arg}" + exit 1 + ;; + esac +done + +if [ -z "$VERSION" ]; then + echo "Error: You must specify the OpenSearch version" + usage + exit 1 +fi + +[[ ! -z "$QUALIFIER" ]] && VERSION=$VERSION-$QUALIFIER +[[ "$SNAPSHOT" == "true" ]] && VERSION=$VERSION-SNAPSHOT +[ -z "$OUTPUT" ] && OUTPUT=artifacts + +./gradlew build -x test -Dopensearch.version=$VERSION -Dbuild.snapshot=$SNAPSHOT -Dbuild.version_qualifier=$QUALIFIER +./gradlew publishShadowPublicationToMavenLocal -Dopensearch.version=$VERSION -Dbuild.snapshot=$SNAPSHOT -Dbuild.version_qualifier=$QUALIFIER +./gradlew publishShadowPublicationToStagingRepository -Dopensearch.version=$VERSION -Dbuild.snapshot=$SNAPSHOT -Dbuild.version_qualifier=$QUALIFIER +mkdir -p $OUTPUT/maven/org/opensearch +cp -r ./build/local-staging-repo/org/opensearch/. $OUTPUT/maven/org/opensearch diff --git a/settings.gradle b/settings.gradle index 7fa821ef..59d3dd1c 100644 --- a/settings.gradle +++ b/settings.gradle @@ -1,12 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. */ rootProject.name = 'common-utils' \ No newline at end of file diff --git a/spotless.license.java b/spotless.license.java index 01b9ea6c..9e182dcd 100644 --- a/spotless.license.java +++ b/spotless.license.java @@ -1,26 +1,5 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. */ diff --git a/src/main/java/org/opensearch/commons/ConfigConstants.java b/src/main/java/org/opensearch/commons/ConfigConstants.java index d7516dbc..a34f7e2f 100644 --- a/src/main/java/org/opensearch/commons/ConfigConstants.java +++ b/src/main/java/org/opensearch/commons/ConfigConstants.java @@ -1,27 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. */ package org.opensearch.commons; @@ -46,5 +25,7 @@ public class ConfigConstants { public static final String INJECTED_USER = "injected_user"; public static final String OPENSEARCH_SECURITY_USE_INJECTED_USER_FOR_PLUGINS = "plugins.security_use_injected_user_for_plugins"; public static final String OPENSEARCH_SECURITY_SSL_HTTP_ENABLED = "plugins.security.ssl.http.enabled"; + public static final String OPENSEARCH_SECURITY_AUTHCZ_ADMIN_DN = "plugins.security.authcz.admin_dn"; public static final String OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT = "_opendistro_security_user_info"; + } diff --git a/src/main/java/org/opensearch/commons/InjectSecurity.java b/src/main/java/org/opensearch/commons/InjectSecurity.java index ab4f0323..e6b283d9 100644 --- a/src/main/java/org/opensearch/commons/InjectSecurity.java +++ b/src/main/java/org/opensearch/commons/InjectSecurity.java @@ -1,27 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. */ package org.opensearch.commons; @@ -31,12 +10,14 @@ import static org.opensearch.commons.ConfigConstants.OPENSEARCH_SECURITY_USE_INJECTED_USER_FOR_PLUGINS; import java.util.List; +import java.util.StringJoiner; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.opensearch.common.Strings; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.concurrent.ThreadContext; +import org.opensearch.commons.authuser.User; +import org.opensearch.core.common.Strings; /** * For background jobs usage only. User or Roles injection can be done using transport layer only. @@ -112,6 +93,7 @@ public InjectSecurity(final String id, final Settings settings, final ThreadCont /** * Injects user or roles, based on opendistro_security_use_injected_user_for_plugins setting. By default injects roles. + * Expects threadContext to be stashed * @param user * @param roles */ @@ -125,7 +107,8 @@ public void inject(final String user, final List roles) { /** * Injects user. - * @param user + * Expects threadContext to be stashed + * @param user name */ public void injectUser(final String user) { if (Strings.isNullOrEmpty(user)) { @@ -136,8 +119,39 @@ public void injectUser(final String user) { threadContext.putTransient(INJECTED_USER, user); log.debug("{}, InjectSecurity - inject roles: {}", Thread.currentThread().getName(), id); } else { - log.error("{}, InjectSecurity- most likely thread context corruption : {}", Thread.currentThread().getName(), id); + log.error("{}, InjectSecurity - most likely thread context corruption : {}", Thread.currentThread().getName(), id); + } + } + + /** + * Injects user object into user info. + * Expects threadContext to be stashed. + * @param user + */ + public void injectUserInfo(final User user) { + if (user == null) { + return; + } + String userObjectAsString = threadContext.getTransient(ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT); + if (userObjectAsString != null) { + log + .error( + "{}, InjectSecurity - id: [{}] found existing user_info: {}", + Thread.currentThread().getName(), + id, + userObjectAsString + ); + return; + } + StringJoiner joiner = new StringJoiner("|"); + joiner.add(user.getName()); + joiner.add(java.lang.String.join(",", user.getBackendRoles())); + joiner.add(java.lang.String.join(",", user.getRoles())); + String requestedTenant = user.getRequestedTenant(); + if (!Strings.isNullOrEmpty(requestedTenant)) { + joiner.add(requestedTenant); } + threadContext.putTransient(ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, joiner.toString()); } /** @@ -160,6 +174,23 @@ public void injectRoles(final List roles) { } } + /** + * Allows one to set the property in threadContext if possible to the value provided. If not possible returns false. + * @param property + * @param value + * @return boolean + */ + public boolean injectProperty(final String property, final Object value) { + if (Strings.isNullOrEmpty(property) || value == null || threadContext.getTransient(property) != null) { + log.debug("{}, InjectSecurity - cannot inject property: {}", Thread.currentThread().getName(), id); + return false; + } else { + threadContext.putTransient(property, value); + log.debug("{}, InjectSecurity - inject property: {}", Thread.currentThread().getName(), id); + return true; + } + } + @Override public void close() { if (ctx != null) { diff --git a/src/main/java/org/opensearch/commons/authuser/AuthUserRequestBuilder.java b/src/main/java/org/opensearch/commons/authuser/AuthUserRequestBuilder.java index c4b32e5f..19811b5b 100644 --- a/src/main/java/org/opensearch/commons/authuser/AuthUserRequestBuilder.java +++ b/src/main/java/org/opensearch/commons/authuser/AuthUserRequestBuilder.java @@ -1,35 +1,14 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. */ package org.opensearch.commons.authuser; import org.opensearch.client.Request; import org.opensearch.client.RequestOptions; -import org.opensearch.common.Strings; import org.opensearch.commons.ConfigConstants; +import org.opensearch.core.common.Strings; public class AuthUserRequestBuilder { private final String auth; diff --git a/src/main/java/org/opensearch/commons/authuser/User.java b/src/main/java/org/opensearch/commons/authuser/User.java index 017405e9..a203b33d 100644 --- a/src/main/java/org/opensearch/commons/authuser/User.java +++ b/src/main/java/org/opensearch/commons/authuser/User.java @@ -1,36 +1,16 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. */ package org.opensearch.commons.authuser; -import static org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; @@ -38,16 +18,18 @@ import org.apache.http.util.EntityUtils; import org.opensearch.client.Response; import org.opensearch.common.Nullable; -import org.opensearch.common.Strings; import org.opensearch.common.inject.internal.ToStringBuilder; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.io.stream.Writeable; -import org.opensearch.common.xcontent.ToXContent; -import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentHelper; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.commons.ConfigConstants; +import org.opensearch.core.common.Strings; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.common.io.stream.Writeable; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; /** * Gets current Authenticated User - name, odfe roles. @@ -270,4 +252,12 @@ public List getCustomAttNames() { public String getRequestedTenant() { return requestedTenant; } + + public boolean isAdminDn(Settings settings) { + if (settings == null) { + return false; + } + List adminDns = settings.getAsList(ConfigConstants.OPENSEARCH_SECURITY_AUTHCZ_ADMIN_DN, Collections.emptyList()); + return adminDns.contains(this.name); + } } diff --git a/src/main/java/org/opensearch/commons/destination/message/LegacyBaseMessage.java b/src/main/java/org/opensearch/commons/destination/message/LegacyBaseMessage.java new file mode 100644 index 00000000..580377d4 --- /dev/null +++ b/src/main/java/org/opensearch/commons/destination/message/LegacyBaseMessage.java @@ -0,0 +1,107 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.destination.message; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.Map; + +import org.apache.http.client.utils.URIBuilder; +import org.opensearch.core.common.Strings; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.common.io.stream.Writeable; + +/** + * This class holds the generic parameters required for a + * message. + */ +public abstract class LegacyBaseMessage implements Writeable { + + private final LegacyDestinationType destinationType; + protected String destinationName; + protected String url; + private final String content; + + LegacyBaseMessage(final LegacyDestinationType destinationType, final String destinationName, final String content) { + if (destinationType == null) { + throw new IllegalArgumentException("Channel type must be defined"); + } + if (!Strings.hasLength(destinationName)) { + throw new IllegalArgumentException("Channel name must be defined"); + } + this.destinationType = destinationType; + this.destinationName = destinationName; + this.content = content; + } + + LegacyBaseMessage(final LegacyDestinationType destinationType, final String destinationName, final String content, final String url) { + this(destinationType, destinationName, content); + if (url == null) { + throw new IllegalArgumentException("url is invalid or empty"); + } + this.url = url; + } + + LegacyBaseMessage(StreamInput streamInput) throws IOException { + this.destinationType = streamInput.readEnum(LegacyDestinationType.class); + this.destinationName = streamInput.readString(); + this.url = streamInput.readOptionalString(); + this.content = streamInput.readString(); + } + + public void setUrl(String url) { + this.url = url; + } + + public LegacyDestinationType getChannelType() { + return destinationType; + } + + public String getChannelName() { + return destinationName; + } + + public String getMessageContent() { + return content; + } + + public String getUrl() { + return url; + } + + public URI getUri() { + return buildUri(getUrl().trim(), null, null, -1, null, null); + } + + protected URI buildUri(String endpoint, String scheme, String host, int port, String path, Map queryParams) { + try { + if (Strings.isNullOrEmpty(endpoint)) { + if (Strings.isNullOrEmpty(scheme)) { + scheme = "https"; + } + URIBuilder uriBuilder = new URIBuilder(); + if (queryParams != null) { + for (Map.Entry e : queryParams.entrySet()) + uriBuilder.addParameter(e.getKey(), e.getValue()); + } + return uriBuilder.setScheme(scheme).setHost(host).setPort(port).setPath(path).build(); + } + return new URIBuilder(endpoint).build(); + } catch (URISyntaxException exception) { + throw new IllegalStateException("Error creating URI"); + } + } + + @Override + public void writeTo(StreamOutput streamOutput) throws IOException { + streamOutput.writeEnum(destinationType); + streamOutput.writeString(destinationName); + streamOutput.writeOptionalString(url); + streamOutput.writeString(content); + } +} diff --git a/src/main/java/org/opensearch/commons/destination/message/LegacyChimeMessage.java b/src/main/java/org/opensearch/commons/destination/message/LegacyChimeMessage.java new file mode 100644 index 00000000..cbdadb65 --- /dev/null +++ b/src/main/java/org/opensearch/commons/destination/message/LegacyChimeMessage.java @@ -0,0 +1,70 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.destination.message; + +import java.io.IOException; + +import org.opensearch.core.common.Strings; +import org.opensearch.core.common.io.stream.StreamInput; + +/** + * This class holds the contents of an Chime message + */ +public class LegacyChimeMessage extends LegacyBaseMessage { + private final String message; + + private LegacyChimeMessage(final String destinationName, final String url, final String message) { + super(LegacyDestinationType.LEGACY_CHIME, destinationName, message, url); + + if (Strings.isNullOrEmpty(message)) { + throw new IllegalArgumentException("Message content is missing"); + } + + this.message = message; + } + + public LegacyChimeMessage(StreamInput streamInput) throws IOException { + super(streamInput); + this.message = super.getMessageContent(); + } + + @Override + public String toString() { + return "DestinationType: " + getChannelType() + ", DestinationName:" + destinationName + ", Url: " + url + ", Message: <...>"; + } + + public static class Builder { + private String message; + private final String destinationName; + private String url; + + public Builder(String destinationName) { + this.destinationName = destinationName; + } + + public LegacyChimeMessage.Builder withMessage(String message) { + this.message = message; + return this; + } + + public LegacyChimeMessage.Builder withUrl(String url) { + this.url = url; + return this; + } + + public LegacyChimeMessage build() { + return new LegacyChimeMessage(this.destinationName, this.url, this.message); + } + } + + public String getMessage() { + return message; + } + + public String getUrl() { + return url; + } +} diff --git a/src/main/java/org/opensearch/commons/destination/message/LegacyCustomWebhookMessage.java b/src/main/java/org/opensearch/commons/destination/message/LegacyCustomWebhookMessage.java new file mode 100644 index 00000000..08058d0f --- /dev/null +++ b/src/main/java/org/opensearch/commons/destination/message/LegacyCustomWebhookMessage.java @@ -0,0 +1,254 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.destination.message; + +import java.io.IOException; +import java.net.URI; +import java.util.Map; + +import org.apache.http.client.methods.HttpPatch; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; +import org.opensearch.core.common.Strings; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; + +/** + * This class holds the content of an CustomWebhook message + */ +public class LegacyCustomWebhookMessage extends LegacyBaseMessage { + + private final String message; + private final String url; + private final String scheme; + private final String host; + private final String method; + private final int port; + private String path; + private final Map queryParams; + private Map headerParams; + + private LegacyCustomWebhookMessage( + final String destinationName, + final String url, + final String scheme, + final String host, + final Integer port, + final String path, + final String method, + final Map queryParams, + final Map headerParams, + final String message + ) { + super(LegacyDestinationType.LEGACY_CUSTOM_WEBHOOK, destinationName, message); + + if (!Strings.isNullOrEmpty(url)) { + setUrl(url.trim()); + } + + if (Strings.isNullOrEmpty(message)) { + throw new IllegalArgumentException("Message content is missing"); + } + + this.scheme = Strings.isNullOrEmpty(scheme) ? "https" : scheme; + this.port = port == null ? -1 : port; + + if (!Strings.isNullOrEmpty(path)) { + if (!path.startsWith("/")) { + this.path = "/" + path; + } + } + + if (Strings.isNullOrEmpty(url) && Strings.isNullOrEmpty(host)) { + throw new IllegalArgumentException("Either fully qualified URL or host name should be provided"); + } + + if (Strings.isNullOrEmpty(method)) { + // Default to POST for backwards compatibility + this.method = "POST"; + } else if (!HttpPost.METHOD_NAME.equals(method) && !HttpPut.METHOD_NAME.equals(method) && !HttpPatch.METHOD_NAME.equals(method)) { + throw new IllegalArgumentException("Invalid method supplied. Only POST, PUT and PATCH are allowed"); + } else { + this.method = method; + } + + this.message = message; + this.url = url; + this.host = host; + this.queryParams = queryParams; + this.headerParams = headerParams; + } + + public LegacyCustomWebhookMessage(StreamInput streamInput) throws IOException { + super(streamInput); + this.message = super.getMessageContent(); + this.url = streamInput.readOptionalString(); + this.scheme = null; + this.host = null; + this.method = streamInput.readOptionalString(); + this.port = -1; + this.path = null; + this.queryParams = null; + if (streamInput.readBoolean()) { + @SuppressWarnings("unchecked") + Map headerParams = (Map) (Map) streamInput.readMap(); + this.headerParams = headerParams; + } + } + + @Override + public String toString() { + return "DestinationType: " + + getChannelType() + + ", DestinationName:" + + destinationName + + ", Url: " + + url + + ", scheme: " + + scheme + + ", Host: " + + host + + ", Port: " + + port + + ", Path: " + + path + + ", Method: " + + method + + ", Message: <...>"; + } + + public static class Builder { + private String message; + private final String destinationName; + private String url; + private String scheme; + private String host; + private Integer port; + private String path; + private String method; + private Map queryParams; + private Map headerParams; + + public Builder(String destinationName) { + this.destinationName = destinationName; + } + + public LegacyCustomWebhookMessage.Builder withScheme(String scheme) { + this.scheme = scheme; + return this; + } + + public LegacyCustomWebhookMessage.Builder withHost(String host) { + this.host = host; + return this; + } + + public LegacyCustomWebhookMessage.Builder withPort(Integer port) { + this.port = port; + return this; + } + + public LegacyCustomWebhookMessage.Builder withPath(String path) { + this.path = path; + return this; + } + + public LegacyCustomWebhookMessage.Builder withMethod(String method) { + this.method = method; + return this; + } + + public LegacyCustomWebhookMessage.Builder withQueryParams(Map queryParams) { + this.queryParams = queryParams; + return this; + } + + public LegacyCustomWebhookMessage.Builder withHeaderParams(Map headerParams) { + this.headerParams = headerParams; + return this; + } + + public LegacyCustomWebhookMessage.Builder withMessage(String message) { + this.message = message; + return this; + } + + public LegacyCustomWebhookMessage.Builder withUrl(String url) { + this.url = url; + return this; + } + + public LegacyCustomWebhookMessage build() { + return new LegacyCustomWebhookMessage( + this.destinationName, + this.url, + this.scheme, + this.host, + this.port, + this.path, + this.method, + this.queryParams, + this.headerParams, + this.message + ); + } + } + + public String getScheme() { + return scheme; + } + + public String getHost() { + return host; + } + + public int getPort() { + return port; + } + + public String getPath() { + return path; + } + + public String getMethod() { + return method; + } + + public Map getQueryParams() { + return queryParams; + } + + public Map getHeaderParams() { + return headerParams; + } + + public URI getUri() { + return buildUri(getUrl(), getScheme(), getHost(), getPort(), getPath(), getQueryParams()); + } + + public String getMessage() { + return message; + } + + @Override + public void writeTo(StreamOutput streamOutput) throws IOException { + super.writeTo(streamOutput); + // Making LegacyCustomWebhookMessage streamable is purely to support the new pass through API from Alerting/ISM -> Notification + // plugin + // and it only supports LegacyCustomWebhookMessage when the url is already constructed by Alerting/ISM. + if (Strings.isNullOrEmpty(getUrl())) { + throw new IllegalStateException("Cannot use LegacyCustomWebhookMessage across transport wire without defining full url."); + } + streamOutput.writeOptionalString(url); + streamOutput.writeOptionalString(method); + streamOutput.writeBoolean(headerParams != null); + if (headerParams != null) { + @SuppressWarnings("unchecked") + Map headerParams = (Map) (Map) this.headerParams; + streamOutput.writeMap(headerParams); + } + } +} diff --git a/src/main/java/org/opensearch/commons/destination/message/LegacyDestinationType.java b/src/main/java/org/opensearch/commons/destination/message/LegacyDestinationType.java new file mode 100644 index 00000000..cf4071aa --- /dev/null +++ b/src/main/java/org/opensearch/commons/destination/message/LegacyDestinationType.java @@ -0,0 +1,17 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.destination.message; + +/** + * Supported legacy notification destinations for Alerting and Index Management + */ +public enum LegacyDestinationType { + LEGACY_CHIME, + LEGACY_SLACK, + LEGACY_CUSTOM_WEBHOOK, + LEGACY_EMAIL, + LEGACY_SNS +} diff --git a/src/main/java/org/opensearch/commons/destination/message/LegacyEmailMessage.java b/src/main/java/org/opensearch/commons/destination/message/LegacyEmailMessage.java new file mode 100644 index 00000000..b0fbc7ec --- /dev/null +++ b/src/main/java/org/opensearch/commons/destination/message/LegacyEmailMessage.java @@ -0,0 +1,234 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.destination.message; + +import java.io.IOException; +import java.net.URI; +import java.util.List; + +import org.opensearch.commons.notifications.model.MethodType; +import org.opensearch.core.common.Strings; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; + +/** + * This class holds the content of an CustomWebhook message + */ +public class LegacyEmailMessage extends LegacyBaseMessage { + + private final String accountName; + private final String host; + private final int port; + private final String method; + private final String from; + private final List recipients; + private final String subject; + private final String message; + + private LegacyEmailMessage( + final String destinationName, + final String accountName, + final String host, + final Integer port, + final String method, + final String from, + final List recipients, + final String subject, + final String message + ) { + super(LegacyDestinationType.LEGACY_EMAIL, destinationName, message); + + if (Strings.isNullOrEmpty(message)) { + throw new IllegalArgumentException("Message content is missing"); + } + + if (Strings.isNullOrEmpty(accountName)) { + throw new IllegalArgumentException("Account name should be provided"); + } + + if (Strings.isNullOrEmpty(host)) { + throw new IllegalArgumentException("Host name should be provided"); + } + + if (Strings.isNullOrEmpty(from)) { + throw new IllegalArgumentException("From address should be provided"); + } + + if (recipients == null || recipients.isEmpty()) { + throw new IllegalArgumentException("List of recipients should be provided"); + } + + this.message = message; + this.accountName = accountName; + this.host = host; + this.port = port == null ? 25 : port; + + if (Strings.isNullOrEmpty(method)) { + // Default to "none" + this.method = "none"; + } else if (!MethodType.NONE.toString().equals(method) + && !MethodType.SSL.toString().equals(method) + && !MethodType.START_TLS.toString().equals(method)) { + throw new IllegalArgumentException("Invalid method supplied. Only none, ssl and start_tls are allowed"); + } else { + this.method = method; + } + + this.from = from; + this.recipients = recipients; + this.subject = Strings.isNullOrEmpty(subject) ? destinationName : subject; + } + + public LegacyEmailMessage(StreamInput streamInput) throws IOException { + super(streamInput); + this.message = super.getMessageContent(); + this.accountName = streamInput.readString(); + this.host = streamInput.readString(); + this.port = streamInput.readInt(); + this.method = streamInput.readString(); + this.from = streamInput.readString(); + this.recipients = streamInput.readStringList(); + this.subject = streamInput.readString(); + } + + @Override + public String toString() { + return "DestinationType: " + + getChannelType() + + ", DestinationName:" + + destinationName + + ", AccountName:" + + accountName + + ", From: " + + from + + ", Host: " + + host + + ", Port: " + + port + + ", Method: " + + method + + ", Subject: <...>" + + ", Message: <...>"; + } + + public static class Builder { + private final String destinationName; + private String accountName; + private String host; + private Integer port; + private String method; + private String from; + private List recipients; + private String subject; + private String message; + + public Builder(String destinationName) { + this.destinationName = destinationName; + } + + public LegacyEmailMessage.Builder withAccountName(String accountName) { + this.accountName = accountName; + return this; + } + + public LegacyEmailMessage.Builder withHost(String host) { + this.host = host; + return this; + } + + public LegacyEmailMessage.Builder withPort(Integer port) { + this.port = port; + return this; + } + + public LegacyEmailMessage.Builder withMethod(String method) { + this.method = method; + return this; + } + + public LegacyEmailMessage.Builder withFrom(String from) { + this.from = from; + return this; + } + + public LegacyEmailMessage.Builder withRecipients(List recipients) { + this.recipients = recipients; + return this; + } + + public LegacyEmailMessage.Builder withSubject(String subject) { + this.subject = subject; + return this; + } + + public LegacyEmailMessage.Builder withMessage(String message) { + this.message = message; + return this; + } + + public LegacyEmailMessage build() { + return new LegacyEmailMessage( + this.destinationName, + this.accountName, + this.host, + this.port, + this.method, + this.from, + this.recipients, + this.subject, + this.message + ); + } + } + + public String getAccountName() { + return accountName; + } + + public String getHost() { + return host; + } + + public int getPort() { + return port; + } + + public String getMethod() { + return method; + } + + public String getFrom() { + return from; + } + + public List getRecipients() { + return recipients; + } + + public String getSubject() { + return subject; + } + + public String getMessage() { + return message; + } + + public URI getUri() { + return buildUri(null, null, host, port, null, null); + } + + @Override + public void writeTo(StreamOutput streamOutput) throws IOException { + super.writeTo(streamOutput); + streamOutput.writeString(accountName); + streamOutput.writeString(host); + streamOutput.writeInt(port); + streamOutput.writeString(method); + streamOutput.writeString(from); + streamOutput.writeStringCollection(recipients); + streamOutput.writeString(subject); + } +} diff --git a/src/main/java/org/opensearch/commons/destination/message/LegacySNSMessage.java b/src/main/java/org/opensearch/commons/destination/message/LegacySNSMessage.java new file mode 100644 index 00000000..8fc7a554 --- /dev/null +++ b/src/main/java/org/opensearch/commons/destination/message/LegacySNSMessage.java @@ -0,0 +1,155 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.destination.message; + +import java.io.IOException; + +import org.opensearch.commons.destination.util.Util; +import org.opensearch.core.common.Strings; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; + +/** + * This class holds the content of an SNS message + */ +public class LegacySNSMessage extends LegacyBaseMessage { + + private final String subject; + private final String message; + private final String roleArn; + private final String topicArn; + private final String clusterName; + + private LegacySNSMessage( + final String destinationName, + final String roleArn, + final String topicArn, + final String clusterName, + final String subject, + final String message + ) { + super(LegacyDestinationType.LEGACY_SNS, destinationName, message); + + if (Strings.isNullOrEmpty(message)) { + throw new IllegalArgumentException("Message content is missing"); + } + if (Strings.isNullOrEmpty(roleArn) || !Util.isValidIAMArn(roleArn)) { + throw new IllegalArgumentException("Role arn is missing/invalid: " + roleArn); + } + + if (Strings.isNullOrEmpty(topicArn) || !Util.isValidSNSArn(topicArn)) { + throw new IllegalArgumentException("Topic arn is missing/invalid: " + topicArn); + } + + if (Strings.isNullOrEmpty(message)) { + throw new IllegalArgumentException("Message content is missing"); + } + + this.subject = subject; + this.message = message; + this.roleArn = roleArn; + this.topicArn = topicArn; + this.clusterName = clusterName; + } + + public LegacySNSMessage(StreamInput streamInput) throws java.io.IOException { + super(streamInput); + this.message = super.getMessageContent(); + this.subject = streamInput.readString(); + this.roleArn = streamInput.readString(); + this.topicArn = streamInput.readString(); + this.clusterName = streamInput.readString(); + } + + @Override + public String toString() { + return "DestinationType: " + + getChannelType() + + ", DestinationName: " + + destinationName + + ", RoleARn: " + + roleArn + + ", TopicArn: " + + topicArn + + ", ClusterName: " + + clusterName + + ", Subject: " + + subject + + ", Message: " + + message; + } + + public static class Builder { + private final String destinationName; + private String subject; + private String message; + private String roleArn; + private String topicArn; + private String clusterName; + + public Builder(String destinationName) { + this.destinationName = destinationName; + } + + public Builder withSubject(String subject) { + this.subject = subject; + return this; + } + + public Builder withMessage(String message) { + this.message = message; + return this; + } + + public Builder withRole(String roleArn) { + this.roleArn = roleArn; + return this; + } + + public Builder withTopicArn(String topicArn) { + this.topicArn = topicArn; + return this; + } + + public Builder withClusterName(String clusterName) { + this.clusterName = clusterName; + return this; + } + + public LegacySNSMessage build() { + return new LegacySNSMessage(this.destinationName, this.roleArn, this.topicArn, this.clusterName, this.subject, this.message); + } + } + + public String getSubject() { + return subject; + } + + public String getMessage() { + return message; + } + + public String getRoleArn() { + return roleArn; + } + + public String getTopicArn() { + return topicArn; + } + + public String getClusterName() { + return clusterName; + } + + @Override + public void writeTo(StreamOutput streamOutput) throws IOException { + super.writeTo(streamOutput); + streamOutput.writeString(subject); + streamOutput.writeString(roleArn); + streamOutput.writeString(topicArn); + streamOutput.writeString(clusterName); + } +} diff --git a/src/main/java/org/opensearch/commons/destination/message/LegacySlackMessage.java b/src/main/java/org/opensearch/commons/destination/message/LegacySlackMessage.java new file mode 100644 index 00000000..48cc842a --- /dev/null +++ b/src/main/java/org/opensearch/commons/destination/message/LegacySlackMessage.java @@ -0,0 +1,74 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.destination.message; + +import java.io.IOException; + +import org.opensearch.core.common.Strings; +import org.opensearch.core.common.io.stream.StreamInput; + +/** + * This class holds the content of an Slack message + */ +public class LegacySlackMessage extends LegacyBaseMessage { + private final String message; + + private LegacySlackMessage(final String destinationName, final String url, final String message) { + super(LegacyDestinationType.LEGACY_SLACK, destinationName, message, url); + + if (Strings.isNullOrEmpty(url)) { // add URL validation + throw new IllegalArgumentException("Fully qualified URL is missing/invalid: " + url); + } + + if (Strings.isNullOrEmpty(message)) { + throw new IllegalArgumentException("Message content is missing"); + } + + this.message = message; + } + + public LegacySlackMessage(StreamInput streamInput) throws IOException { + super(streamInput); + this.message = super.getMessageContent(); + } + + @Override + public String toString() { + return "DestinationType: " + getChannelType() + ", DestinationName:" + destinationName + ", Url: " + url + ", Message: <...>"; + } + + public static class Builder { + private String message; + private String destinationName; + private String url; + + public Builder(String channelName) { + this.destinationName = channelName; + } + + public LegacySlackMessage.Builder withMessage(String message) { + this.message = message; + return this; + } + + public LegacySlackMessage.Builder withUrl(String url) { + this.url = url; + return this; + } + + public LegacySlackMessage build() { + return new LegacySlackMessage(this.destinationName, this.url, this.message); + } + } + + public String getMessage() { + return message; + } + + public String getUrl() { + return url; + } +} diff --git a/src/main/java/org/opensearch/commons/destination/response/LegacyBaseResponse.java b/src/main/java/org/opensearch/commons/destination/response/LegacyBaseResponse.java new file mode 100644 index 00000000..218cb89a --- /dev/null +++ b/src/main/java/org/opensearch/commons/destination/response/LegacyBaseResponse.java @@ -0,0 +1,39 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.destination.response; + +import java.io.IOException; + +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.common.io.stream.Writeable; + +/** + * This class holds the generic response attributes + */ +public abstract class LegacyBaseResponse implements Writeable { + protected Integer statusCode; + + LegacyBaseResponse(final Integer statusCode) { + if (statusCode == null) { + throw new IllegalArgumentException("status code is invalid"); + } + this.statusCode = statusCode; + } + + public LegacyBaseResponse(StreamInput streamInput) throws IOException { + this.statusCode = streamInput.readInt(); + } + + public int getStatusCode() { + return statusCode; + } + + @Override + public void writeTo(StreamOutput streamOutput) throws IOException { + streamOutput.writeInt(statusCode); + } +} diff --git a/src/main/java/org/opensearch/commons/destination/response/LegacyDestinationResponse.java b/src/main/java/org/opensearch/commons/destination/response/LegacyDestinationResponse.java new file mode 100644 index 00000000..fd6467f6 --- /dev/null +++ b/src/main/java/org/opensearch/commons/destination/response/LegacyDestinationResponse.java @@ -0,0 +1,61 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.destination.response; + +import java.io.IOException; + +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; + +/** + * This class is a place holder for destination response metadata + */ +public class LegacyDestinationResponse extends LegacyBaseResponse { + + private final String responseContent; + + private LegacyDestinationResponse(final String responseString, final Integer statusCode) { + super(statusCode); + if (responseString == null) { + throw new IllegalArgumentException("Response is missing"); + } + this.responseContent = responseString; + } + + public LegacyDestinationResponse(StreamInput streamInput) throws IOException { + super(streamInput); + this.responseContent = streamInput.readString(); + } + + public static class Builder { + private String responseContent; + private Integer statusCode; + + public LegacyDestinationResponse.Builder withResponseContent(String responseContent) { + this.responseContent = responseContent; + return this; + } + + public LegacyDestinationResponse.Builder withStatusCode(Integer statusCode) { + this.statusCode = statusCode; + return this; + } + + public LegacyDestinationResponse build() { + return new LegacyDestinationResponse(responseContent, statusCode); + } + } + + public String getResponseContent() { + return this.responseContent; + } + + @Override + public void writeTo(StreamOutput streamOutput) throws IOException { + super.writeTo(streamOutput); + streamOutput.writeString(responseContent); + } +} diff --git a/src/main/java/org/opensearch/commons/destination/util/Util.java b/src/main/java/org/opensearch/commons/destination/util/Util.java new file mode 100644 index 00000000..a97f04b2 --- /dev/null +++ b/src/main/java/org/opensearch/commons/destination/util/Util.java @@ -0,0 +1,35 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.destination.util; + +import java.util.regex.Pattern; + +import org.opensearch.common.ValidationException; +import org.opensearch.core.common.Strings; + +public class Util { + private Util() {} + + public static final Pattern SNS_ARN_REGEX = Pattern + .compile("^arn:aws(-[^:]+)?:sns:([a-zA-Z0-9-]+):([0-9]{12}):([a-zA-Z0-9-_]+)(\\.fifo)?$"); + public static final Pattern IAM_ARN_REGEX = Pattern.compile("^arn:aws(-[^:]+)?:iam::([0-9]{12}):([a-zA-Z0-9-/_+=@.,]+)$"); + + public static String getRegion(String arn) { + // sample topic arn arn:aws:sns:us-west-2:075315751589:test-notification + if (isValidSNSArn(arn)) { + return arn.split(":")[3]; + } + throw new IllegalArgumentException("Unable to retrieve region from ARN " + arn); + } + + public static boolean isValidIAMArn(String arn) { + return Strings.hasLength(arn) && IAM_ARN_REGEX.matcher(arn).find(); + } + + public static boolean isValidSNSArn(String arn) throws ValidationException { + return Strings.hasLength(arn) && SNS_ARN_REGEX.matcher(arn).find(); + } +} diff --git a/src/main/java/org/opensearch/commons/rest/SecureRestClientBuilder.java b/src/main/java/org/opensearch/commons/rest/SecureRestClientBuilder.java index 109809a8..b88965b1 100644 --- a/src/main/java/org/opensearch/commons/rest/SecureRestClientBuilder.java +++ b/src/main/java/org/opensearch/commons/rest/SecureRestClientBuilder.java @@ -1,27 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. */ package org.opensearch.commons.rest; @@ -54,9 +33,9 @@ import org.opensearch.client.RestClient; import org.opensearch.client.RestClientBuilder; import org.opensearch.client.RestHighLevelClient; -import org.opensearch.common.Strings; import org.opensearch.common.settings.Settings; import org.opensearch.commons.ConfigConstants; +import org.opensearch.core.common.Strings; /** * Provides builder to create low-level and high-level REST client to make calls to OpenSearch. @@ -67,7 +46,7 @@ * * Other usage: * RestClient restClient = new SecureRestClientBuilder("localhost", 9200, false) - * .setUserPassword("admin", "admin") + * .setUserPassword("admin", "myStrongPassword123") * .setTrustCerts(trustStorePath) * .build(); * @@ -153,6 +132,15 @@ public SecureRestClientBuilder(Settings settings, Path configPath) { hosts.add(new HttpHost(host, port, httpSSLEnabled ? ConfigConstants.HTTPS : ConfigConstants.HTTP)); } + public SecureRestClientBuilder(Settings settings, Path configPath, HttpHost[] httpHosts) { + this.httpSSLEnabled = settings.getAsBoolean(ConfigConstants.OPENSEARCH_SECURITY_SSL_HTTP_ENABLED, false); + this.settings = settings; + this.configPath = configPath; + this.user = null; + this.passwd = null; + hosts.addAll(Arrays.asList(httpHosts)); + } + /** * Creates a low-level Rest client. * @return diff --git a/src/main/java/org/opensearch/commons/rest/TrustStore.java b/src/main/java/org/opensearch/commons/rest/TrustStore.java index 95f23796..52832b32 100644 --- a/src/main/java/org/opensearch/commons/rest/TrustStore.java +++ b/src/main/java/org/opensearch/commons/rest/TrustStore.java @@ -1,27 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. */ package org.opensearch.commons.rest; diff --git a/src/main/kotlin/org/opensearch/commons/alerting/AlertingPluginInterface.kt b/src/main/kotlin/org/opensearch/commons/alerting/AlertingPluginInterface.kt new file mode 100644 index 00000000..3ce81671 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/AlertingPluginInterface.kt @@ -0,0 +1,356 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.commons.alerting + +import org.opensearch.action.search.SearchResponse +import org.opensearch.client.node.NodeClient +import org.opensearch.commons.alerting.action.AcknowledgeAlertRequest +import org.opensearch.commons.alerting.action.AcknowledgeAlertResponse +import org.opensearch.commons.alerting.action.AcknowledgeChainedAlertRequest +import org.opensearch.commons.alerting.action.AlertingActions +import org.opensearch.commons.alerting.action.DeleteMonitorRequest +import org.opensearch.commons.alerting.action.DeleteMonitorResponse +import org.opensearch.commons.alerting.action.DeleteWorkflowRequest +import org.opensearch.commons.alerting.action.DeleteWorkflowResponse +import org.opensearch.commons.alerting.action.GetAlertsRequest +import org.opensearch.commons.alerting.action.GetAlertsResponse +import org.opensearch.commons.alerting.action.GetFindingsRequest +import org.opensearch.commons.alerting.action.GetFindingsResponse +import org.opensearch.commons.alerting.action.GetMonitorRequest +import org.opensearch.commons.alerting.action.GetMonitorResponse +import org.opensearch.commons.alerting.action.GetWorkflowAlertsRequest +import org.opensearch.commons.alerting.action.GetWorkflowAlertsResponse +import org.opensearch.commons.alerting.action.GetWorkflowRequest +import org.opensearch.commons.alerting.action.GetWorkflowResponse +import org.opensearch.commons.alerting.action.IndexMonitorRequest +import org.opensearch.commons.alerting.action.IndexMonitorResponse +import org.opensearch.commons.alerting.action.IndexWorkflowRequest +import org.opensearch.commons.alerting.action.IndexWorkflowResponse +import org.opensearch.commons.alerting.action.PublishFindingsRequest +import org.opensearch.commons.alerting.action.SearchMonitorRequest +import org.opensearch.commons.alerting.action.SubscribeFindingsResponse +import org.opensearch.commons.notifications.action.BaseResponse +import org.opensearch.commons.utils.recreateObject +import org.opensearch.core.action.ActionListener +import org.opensearch.core.action.ActionResponse +import org.opensearch.core.common.io.stream.NamedWriteableRegistry +import org.opensearch.core.common.io.stream.Writeable + +/** + * All the transport action plugin interfaces for the Alerting plugin + */ +object AlertingPluginInterface { + + /** + * Index monitor interface. + * @param client Node client for making transport action + * @param request The request object + * @param namedWriteableRegistry Registry for building aggregations + * @param listener The listener for getting response + */ + fun indexMonitor( + client: NodeClient, + request: IndexMonitorRequest, + namedWriteableRegistry: NamedWriteableRegistry, + listener: ActionListener + ) { + client.execute( + AlertingActions.INDEX_MONITOR_ACTION_TYPE, + request, + wrapActionListener(listener) { response -> + recreateObject(response, namedWriteableRegistry) { + IndexMonitorResponse( + it + ) + } + } + ) + } + + fun deleteMonitor( + client: NodeClient, + request: DeleteMonitorRequest, + listener: ActionListener + ) { + client.execute( + AlertingActions.DELETE_MONITOR_ACTION_TYPE, + request, + wrapActionListener(listener) { response -> + recreateObject(response) { + DeleteMonitorResponse( + it + ) + } + } + ) + } + + /** + * Index monitor interface. + * @param client Node client for making transport action + * @param request The request object + * @param namedWriteableRegistry Registry for building aggregations + * @param listener The listener for getting response + */ + fun indexWorkflow( + client: NodeClient, + request: IndexWorkflowRequest, + listener: ActionListener + ) { + client.execute( + AlertingActions.INDEX_WORKFLOW_ACTION_TYPE, + request, + wrapActionListener(listener) { response -> + recreateObject(response) { + IndexWorkflowResponse( + it + ) + } + } + ) + } + + fun deleteWorkflow( + client: NodeClient, + request: DeleteWorkflowRequest, + listener: ActionListener + ) { + client.execute( + AlertingActions.DELETE_WORKFLOW_ACTION_TYPE, + request, + wrapActionListener(listener) { response -> + recreateObject(response) { + DeleteWorkflowResponse( + it + ) + } + } + ) + } + + /** + * Get Alerts interface. + * @param client Node client for making transport action + * @param request The request object + * @param listener The listener for getting response + */ + fun getAlerts( + client: NodeClient, + request: GetAlertsRequest, + listener: ActionListener + ) { + client.execute( + AlertingActions.GET_ALERTS_ACTION_TYPE, + request, + wrapActionListener(listener) { response -> + recreateObject(response) { + GetAlertsResponse( + it + ) + } + } + ) + } + + /** + * Get Workflow Alerts interface. + * @param client Node client for making transport action + * @param request The request object + * @param listener The listener for getting response + */ + fun getWorkflowAlerts( + client: NodeClient, + request: GetWorkflowAlertsRequest, + listener: ActionListener + ) { + client.execute( + AlertingActions.GET_WORKFLOW_ALERTS_ACTION_TYPE, + request, + wrapActionListener(listener) { response -> + recreateObject(response) { + GetWorkflowAlertsResponse( + it + ) + } + } + ) + } + + /** + * Get Workflow interface. + * @param client Node client for making transport action + * @param request The request object + * @param listener The listener for getting response + */ + fun getWorkflow( + client: NodeClient, + request: GetWorkflowRequest, + listener: ActionListener + ) { + client.execute( + AlertingActions.GET_WORKFLOW_ACTION_TYPE, + request, + wrapActionListener(listener) { response -> + recreateObject(response) { + GetWorkflowResponse( + it + ) + } + } + ) + } + + /** + * Get Findings interface. + * @param client Node client for making transport action + * @param request The request object + * @param listener The listener for getting response + */ + fun getFindings( + client: NodeClient, + request: GetFindingsRequest, + listener: ActionListener + ) { + client.execute( + AlertingActions.GET_FINDINGS_ACTION_TYPE, + request, + wrapActionListener(listener) { response -> + recreateObject(response) { + GetFindingsResponse( + it + ) + } + } + ) + } + + /** + * Acknowledge Alerts interface. + * @param client Node client for making transport action + * @param request The request object + * @param listener The listener for getting response + */ + fun acknowledgeAlerts( + client: NodeClient, + request: AcknowledgeAlertRequest, + listener: ActionListener + ) { + client.execute( + AlertingActions.ACKNOWLEDGE_ALERTS_ACTION_TYPE, + request, + wrapActionListener(listener) { response -> + recreateObject(response) { + AcknowledgeAlertResponse( + it + ) + } + } + ) + } + + fun publishFinding( + client: NodeClient, + request: PublishFindingsRequest, + listener: ActionListener + ) { + client.execute( + AlertingActions.SUBSCRIBE_FINDINGS_ACTION_TYPE, + request, + wrapActionListener(listener) { response -> + recreateObject(response) { + SubscribeFindingsResponse( + it + ) + } + } + ) + } + + /** + * Acknowledge Chained Alerts interface. + * @param client Node client for making transport action + * @param request The request object + * @param listener The listener for getting response + */ + fun acknowledgeChainedAlerts( + client: NodeClient, + request: AcknowledgeChainedAlertRequest, + listener: ActionListener + ) { + client.execute( + AlertingActions.ACKNOWLEDGE_CHAINED_ALERTS_ACTION_TYPE, + request, + wrapActionListener(listener) { response -> + recreateObject(response) { + AcknowledgeAlertResponse( + it + ) + } + } + ) + } + + /** + * Get Monitor interface. + * @param client Node client for making transport action + * @param request The request object + * @param listener The listener for getting response + */ + fun getMonitor( + client: NodeClient, + request: GetMonitorRequest, + listener: ActionListener + ) { + client.execute( + AlertingActions.GET_MONITOR_ACTION_TYPE, + request, + wrapActionListener(listener) { response -> + recreateObject(response) { + GetMonitorResponse( + it + ) + } + } + ) + } + + /** + * Search Monitors interface. + * @param client Node client for making transport action + * @param request The request object + * @param listener The listener for getting response + */ + fun searchMonitors( + client: NodeClient, + request: SearchMonitorRequest, + listener: ActionListener + ) { + client.execute( + AlertingActions.SEARCH_MONITORS_ACTION_TYPE, + request, + // we do not use the wrapActionListener in this case since there is no need + // to recreate any object or specially handle onResponse / onFailure. It is + // simply returning a SearchResponse. + listener + ) + } + + @Suppress("UNCHECKED_CAST") + private fun wrapActionListener( + listener: ActionListener, + recreate: (Writeable) -> Response + ): ActionListener { + return object : ActionListener { + override fun onResponse(response: ActionResponse) { + val recreated = response as? Response ?: recreate(response) + listener.onResponse(recreated) + } + + override fun onFailure(exception: java.lang.Exception) { + listener.onFailure(exception) + } + } as ActionListener + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/AcknowledgeAlertRequest.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/AcknowledgeAlertRequest.kt new file mode 100644 index 00000000..01f3fdd2 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/AcknowledgeAlertRequest.kt @@ -0,0 +1,48 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.action + +import org.opensearch.action.ActionRequest +import org.opensearch.action.ActionRequestValidationException +import org.opensearch.action.support.WriteRequest +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import java.io.IOException +import java.util.Collections + +class AcknowledgeAlertRequest : ActionRequest { + val monitorId: String + val alertIds: List + val refreshPolicy: WriteRequest.RefreshPolicy + + constructor( + monitorId: String, + alertIds: List, + refreshPolicy: WriteRequest.RefreshPolicy + ) : super() { + this.monitorId = monitorId + this.alertIds = alertIds + this.refreshPolicy = refreshPolicy + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readString(), // monitorId + Collections.unmodifiableList(sin.readStringList()), // alertIds + WriteRequest.RefreshPolicy.readFrom(sin) // refreshPolicy + ) + + override fun validate(): ActionRequestValidationException? { + return null + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(monitorId) + out.writeStringCollection(alertIds) + refreshPolicy.writeTo(out) + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/AcknowledgeAlertResponse.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/AcknowledgeAlertResponse.kt new file mode 100644 index 00000000..38d81778 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/AcknowledgeAlertResponse.kt @@ -0,0 +1,77 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.commons.alerting.action + +import org.opensearch.commons.alerting.model.Alert +import org.opensearch.commons.notifications.action.BaseResponse +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import java.io.IOException +import java.util.Collections + +class AcknowledgeAlertResponse : BaseResponse { + + val acknowledged: List + val failed: List + val missing: List + + constructor( + acknowledged: List, + failed: List, + missing: List + ) : super() { + this.acknowledged = acknowledged + this.failed = failed + this.missing = missing + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + Collections.unmodifiableList(sin.readList(::Alert)), // acknowledged + Collections.unmodifiableList(sin.readList(::Alert)), // failed + Collections.unmodifiableList(sin.readStringList()) // missing + ) + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeCollection(acknowledged) + out.writeCollection(failed) + out.writeStringCollection(missing) + } + + @Throws(IOException::class) + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject().startArray("success") + acknowledged.forEach { builder.value(it.id) } + builder.endArray().startArray("failed") + failed.forEach { buildFailedAlertAcknowledgeObject(builder, it) } + missing.forEach { buildMissingAlertAcknowledgeObject(builder, it) } + return builder.endArray().endObject() + } + + private fun buildFailedAlertAcknowledgeObject(builder: XContentBuilder, failedAlert: Alert) { + builder.startObject() + .startObject(failedAlert.id) + val reason = when (failedAlert.state) { + Alert.State.ERROR -> "Alert is in an error state and can not be acknowledged." + Alert.State.COMPLETED -> "Alert has already completed and can not be acknowledged." + Alert.State.ACKNOWLEDGED -> "Alert has already been acknowledged." + else -> "Alert state unknown and can not be acknowledged" + } + builder.field("failed_reason", reason) + .endObject() + .endObject() + } + + private fun buildMissingAlertAcknowledgeObject(builder: XContentBuilder, alertID: String) { + builder.startObject() + .startObject(alertID) + .field("failed_reason", "Alert: $alertID does not exist (it may have already completed).") + .endObject() + .endObject() + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/AcknowledgeChainedAlertRequest.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/AcknowledgeChainedAlertRequest.kt new file mode 100644 index 00000000..81d1fef6 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/AcknowledgeChainedAlertRequest.kt @@ -0,0 +1,43 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.action + +import org.opensearch.action.ActionRequest +import org.opensearch.action.ActionRequestValidationException +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import java.io.IOException +import java.util.Collections + +/** Request DTO for acknowledging chained alerts generated by workflow.*/ +class AcknowledgeChainedAlertRequest : ActionRequest { + val workflowId: String + val alertIds: List + + constructor( + workflowId: String, + alertIds: List + ) : super() { + this.workflowId = workflowId + this.alertIds = alertIds + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readString(), // workflowId + Collections.unmodifiableList(sin.readStringList()) // alertIds + ) + + override fun validate(): ActionRequestValidationException? { + return null + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(workflowId) + out.writeStringCollection(alertIds) + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/AlertingActions.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/AlertingActions.kt new file mode 100644 index 00000000..fcf98261 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/AlertingActions.kt @@ -0,0 +1,91 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.commons.alerting.action + +import org.opensearch.action.ActionType +import org.opensearch.action.search.SearchResponse + +object AlertingActions { + const val INDEX_MONITOR_ACTION_NAME = "cluster:admin/opendistro/alerting/monitor/write" + const val INDEX_WORKFLOW_ACTION_NAME = "cluster:admin/opensearch/alerting/workflow/write" + const val GET_ALERTS_ACTION_NAME = "cluster:admin/opendistro/alerting/alerts/get" + const val GET_WORKFLOW_ALERTS_ACTION_NAME = "cluster:admin/opensearch/alerting/workflow_alerts/get" + const val GET_WORKFLOW_ACTION_NAME = "cluster:admin/opensearch/alerting/workflow/get" + const val DELETE_MONITOR_ACTION_NAME = "cluster:admin/opendistro/alerting/monitor/delete" + const val DELETE_WORKFLOW_ACTION_NAME = "cluster:admin/opensearch/alerting/workflow/delete" + const val GET_FINDINGS_ACTION_NAME = "cluster:admin/opensearch/alerting/findings/get" + const val ACKNOWLEDGE_ALERTS_ACTION_NAME = "cluster:admin/opendistro/alerting/alerts/ack" + const val ACKNOWLEDGE_CHAINED_ALERTS_ACTION_NAME = "cluster:admin/opendistro/alerting/chained_alerts/ack" + const val SUBSCRIBE_FINDINGS_ACTION_NAME = "cluster:admin/opensearch/alerting/findings/subscribe" + const val GET_MONITOR_ACTION_NAME = "cluster:admin/opendistro/alerting/monitor/get" + const val SEARCH_MONITORS_ACTION_NAME = "cluster:admin/opendistro/alerting/monitor/search" + const val INDEX_COMMENT_ACTION_NAME = "cluster:admin/opensearch/alerting/comments/write" + const val SEARCH_COMMENTS_ACTION_NAME = "cluster:admin/opensearch/alerting/comments/search" + const val DELETE_COMMENT_ACTION_NAME = "cluster:admin/opensearch/alerting/comments/delete" + + @JvmField + val INDEX_MONITOR_ACTION_TYPE = + ActionType(INDEX_MONITOR_ACTION_NAME, ::IndexMonitorResponse) + + @JvmField + val INDEX_WORKFLOW_ACTION_TYPE = + ActionType(INDEX_WORKFLOW_ACTION_NAME, ::IndexWorkflowResponse) + + @JvmField + val GET_ALERTS_ACTION_TYPE = + ActionType(GET_ALERTS_ACTION_NAME, ::GetAlertsResponse) + + @JvmField + val GET_WORKFLOW_ALERTS_ACTION_TYPE = + ActionType(GET_WORKFLOW_ALERTS_ACTION_NAME, ::GetWorkflowAlertsResponse) + + @JvmField + val GET_WORKFLOW_ACTION_TYPE = + ActionType(GET_WORKFLOW_ACTION_NAME, ::GetWorkflowResponse) + + @JvmField + val DELETE_MONITOR_ACTION_TYPE = + ActionType(DELETE_MONITOR_ACTION_NAME, ::DeleteMonitorResponse) + + @JvmField + val DELETE_WORKFLOW_ACTION_TYPE = + ActionType(DELETE_WORKFLOW_ACTION_NAME, ::DeleteWorkflowResponse) + + @JvmField + val GET_FINDINGS_ACTION_TYPE = + ActionType(GET_FINDINGS_ACTION_NAME, ::GetFindingsResponse) + + @JvmField + val ACKNOWLEDGE_ALERTS_ACTION_TYPE = + ActionType(ACKNOWLEDGE_ALERTS_ACTION_NAME, ::AcknowledgeAlertResponse) + + @JvmField + val SUBSCRIBE_FINDINGS_ACTION_TYPE = + ActionType(SUBSCRIBE_FINDINGS_ACTION_NAME, ::SubscribeFindingsResponse) + + @JvmField + val ACKNOWLEDGE_CHAINED_ALERTS_ACTION_TYPE = + ActionType(ACKNOWLEDGE_CHAINED_ALERTS_ACTION_NAME, ::AcknowledgeAlertResponse) + + @JvmField + val GET_MONITOR_ACTION_TYPE = + ActionType(GET_MONITOR_ACTION_NAME, ::GetMonitorResponse) + + @JvmField + val SEARCH_MONITORS_ACTION_TYPE = + ActionType(SEARCH_MONITORS_ACTION_NAME, ::SearchResponse) + + @JvmField + val INDEX_COMMENT_ACTION_TYPE = + ActionType(INDEX_COMMENT_ACTION_NAME, ::IndexCommentResponse) + + @JvmField + val SEARCH_COMMENTS_ACTION_TYPE = + ActionType(SEARCH_COMMENTS_ACTION_NAME, ::SearchResponse) + + @JvmField + val DELETE_COMMENT_ACTION_TYPE = + ActionType(DELETE_COMMENT_ACTION_NAME, ::DeleteCommentResponse) +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/DeleteCommentRequest.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/DeleteCommentRequest.kt new file mode 100644 index 00000000..811dcd9e --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/DeleteCommentRequest.kt @@ -0,0 +1,34 @@ +package org.opensearch.commons.alerting.action + +import org.opensearch.action.ActionRequest +import org.opensearch.action.ActionRequestValidationException +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import java.io.IOException + +class DeleteCommentRequest : ActionRequest { + val commentId: String + + constructor(commentId: String) : super() { + this.commentId = commentId + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + commentId = sin.readString() + ) + + override fun validate(): ActionRequestValidationException? { + if (commentId.isBlank()) { + val exception = ActionRequestValidationException() + exception.addValidationError("comment id must not be blank") + return exception + } + return null + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(commentId) + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/DeleteCommentResponse.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/DeleteCommentResponse.kt new file mode 100644 index 00000000..f00fe266 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/DeleteCommentResponse.kt @@ -0,0 +1,32 @@ +package org.opensearch.commons.alerting.action + +import org.opensearch.commons.alerting.util.IndexUtils +import org.opensearch.commons.notifications.action.BaseResponse +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder + +class DeleteCommentResponse : BaseResponse { + var commentId: String + + constructor( + id: String + ) : super() { + this.commentId = id + } + + constructor(sin: StreamInput) : this( + sin.readString() // commentId + ) + + override fun writeTo(out: StreamOutput) { + out.writeString(commentId) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return builder.startObject() + .field(IndexUtils._ID, commentId) + .endObject() + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/DeleteMonitorRequest.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/DeleteMonitorRequest.kt new file mode 100644 index 00000000..b0bd9992 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/DeleteMonitorRequest.kt @@ -0,0 +1,35 @@ +package org.opensearch.commons.alerting.action + +import org.opensearch.action.ActionRequest +import org.opensearch.action.ActionRequestValidationException +import org.opensearch.action.support.WriteRequest +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import java.io.IOException + +class DeleteMonitorRequest : ActionRequest { + + val monitorId: String + val refreshPolicy: WriteRequest.RefreshPolicy + + constructor(monitorId: String, refreshPolicy: WriteRequest.RefreshPolicy) : super() { + this.monitorId = monitorId + this.refreshPolicy = refreshPolicy + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + monitorId = sin.readString(), + refreshPolicy = WriteRequest.RefreshPolicy.readFrom(sin) + ) + + override fun validate(): ActionRequestValidationException? { + return null + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(monitorId) + refreshPolicy.writeTo(out) + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/DeleteMonitorResponse.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/DeleteMonitorResponse.kt new file mode 100644 index 00000000..90ac17ba --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/DeleteMonitorResponse.kt @@ -0,0 +1,38 @@ +package org.opensearch.commons.alerting.action + +import org.opensearch.commons.alerting.util.IndexUtils +import org.opensearch.commons.notifications.action.BaseResponse +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder + +class DeleteMonitorResponse : BaseResponse { + var id: String + var version: Long + + constructor( + id: String, + version: Long + ) : super() { + this.id = id + this.version = version + } + + constructor(sin: StreamInput) : this( + sin.readString(), // id + sin.readLong() // version + ) + + override fun writeTo(out: StreamOutput) { + out.writeString(id) + out.writeLong(version) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return builder.startObject() + .field(IndexUtils._ID, id) + .field(IndexUtils._VERSION, version) + .endObject() + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/DeleteWorkflowRequest.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/DeleteWorkflowRequest.kt new file mode 100644 index 00000000..a3907e83 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/DeleteWorkflowRequest.kt @@ -0,0 +1,39 @@ +package org.opensearch.commons.alerting.action + +import org.opensearch.action.ActionRequest +import org.opensearch.action.ActionRequestValidationException +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import java.io.IOException + +class DeleteWorkflowRequest : ActionRequest { + + val workflowId: String + + /** + * Flag that indicates whether the delegate monitors should be deleted or not. + * If the flag is set to true, Delegate monitors will be deleted only in the case when they are part of the specified workflow and no other. + */ + val deleteDelegateMonitors: Boolean? + + constructor(workflowId: String, deleteDelegateMonitors: Boolean?) : super() { + this.workflowId = workflowId + this.deleteDelegateMonitors = deleteDelegateMonitors + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + workflowId = sin.readString(), + deleteDelegateMonitors = sin.readOptionalBoolean() + ) + + override fun validate(): ActionRequestValidationException? { + return null + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(workflowId) + out.writeOptionalBoolean(deleteDelegateMonitors) + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/DeleteWorkflowResponse.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/DeleteWorkflowResponse.kt new file mode 100644 index 00000000..3ccb9f41 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/DeleteWorkflowResponse.kt @@ -0,0 +1,48 @@ +package org.opensearch.commons.alerting.action + +import org.opensearch.commons.alerting.util.IndexUtils +import org.opensearch.commons.notifications.action.BaseResponse +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder + +class DeleteWorkflowResponse : BaseResponse { + var id: String + var version: Long + var nonDeletedMonitors: List? = null + + constructor( + id: String, + version: Long, + nonDeletedMonitors: List? = null + ) : super() { + this.id = id + this.version = version + this.nonDeletedMonitors = nonDeletedMonitors + } + + constructor(sin: StreamInput) : this( + sin.readString(), // id + sin.readLong(), // version + sin.readOptionalStringList() + ) + + override fun writeTo(out: StreamOutput) { + out.writeString(id) + out.writeLong(version) + out.writeOptionalStringCollection(nonDeletedMonitors) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return builder.startObject() + .field(IndexUtils._ID, id) + .field(IndexUtils._VERSION, version) + .field(NON_DELETED_MONITORS, nonDeletedMonitors) + .endObject() + } + + companion object { + const val NON_DELETED_MONITORS = "NON_DELETED_MONITORS" + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/DocLevelMonitorFanOutAction.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/DocLevelMonitorFanOutAction.kt new file mode 100644 index 00000000..801edc47 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/DocLevelMonitorFanOutAction.kt @@ -0,0 +1,15 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.action + +import org.opensearch.action.ActionType + +class DocLevelMonitorFanOutAction private constructor() : ActionType(NAME, ::DocLevelMonitorFanOutResponse) { + companion object { + val INSTANCE = DocLevelMonitorFanOutAction() + const val NAME = "cluster:admin/opensearch/alerting/monitor/doclevel/fanout" + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/DocLevelMonitorFanOutRequest.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/DocLevelMonitorFanOutRequest.kt new file mode 100644 index 00000000..fe5cfe29 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/DocLevelMonitorFanOutRequest.kt @@ -0,0 +1,101 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.action + +import org.opensearch.action.ActionRequest +import org.opensearch.action.ActionRequestValidationException +import org.opensearch.commons.alerting.model.IndexExecutionContext +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.MonitorMetadata +import org.opensearch.commons.alerting.model.WorkflowRunContext +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.index.shard.ShardId +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.ToXContentObject +import org.opensearch.core.xcontent.XContentBuilder +import java.io.IOException + +class DocLevelMonitorFanOutRequest : ActionRequest, ToXContentObject { + val monitor: Monitor + val dryRun: Boolean + val monitorMetadata: MonitorMetadata + val executionId: String + val indexExecutionContext: IndexExecutionContext? + val shardIds: List + val concreteIndicesSeenSoFar: List + val workflowRunContext: WorkflowRunContext? + + constructor( + monitor: Monitor, + dryRun: Boolean, + monitorMetadata: MonitorMetadata, + executionId: String, + indexExecutionContext: IndexExecutionContext?, + shardIds: List, + concreteIndicesSeenSoFar: List, + workflowRunContext: WorkflowRunContext? + ) : super() { + this.monitor = monitor + this.dryRun = dryRun + this.monitorMetadata = monitorMetadata + this.executionId = executionId + this.indexExecutionContext = indexExecutionContext + this.shardIds = shardIds + this.concreteIndicesSeenSoFar = concreteIndicesSeenSoFar + this.workflowRunContext = workflowRunContext + require(false == shardIds.isEmpty()) { } + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + monitor = Monitor.readFrom(sin)!!, + dryRun = sin.readBoolean(), + monitorMetadata = MonitorMetadata.readFrom(sin), + executionId = sin.readString(), + shardIds = sin.readList(::ShardId), + concreteIndicesSeenSoFar = sin.readStringList(), + workflowRunContext = if (sin.readBoolean()) { + WorkflowRunContext(sin) + } else { null }, + indexExecutionContext = IndexExecutionContext(sin) + ) + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + monitor.writeTo(out) + out.writeBoolean(dryRun) + monitorMetadata.writeTo(out) + out.writeString(executionId) + out.writeCollection(shardIds) + out.writeStringCollection(concreteIndicesSeenSoFar) + out.writeBoolean(workflowRunContext != null) + workflowRunContext?.writeTo(out) + indexExecutionContext?.writeTo(out) + } + + override fun validate(): ActionRequestValidationException? { + var actionValidationException: ActionRequestValidationException? = null + if (shardIds.isEmpty()) { + actionValidationException = ActionRequestValidationException() + actionValidationException.addValidationError("shard_ids is null or empty") + } + return actionValidationException + } + + @Throws(IOException::class) + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + .field("monitor", monitor) + .field("dry_run", dryRun) + .field("execution_id", executionId) + .field("index_execution_context", indexExecutionContext) + .field("shard_ids", shardIds) + .field("concrete_indices", concreteIndicesSeenSoFar) + .field("workflow_run_context", workflowRunContext) + return builder.endObject() + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/DocLevelMonitorFanOutResponse.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/DocLevelMonitorFanOutResponse.kt new file mode 100644 index 00000000..6e5cde55 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/DocLevelMonitorFanOutResponse.kt @@ -0,0 +1,92 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.action + +import org.opensearch.commons.alerting.model.DocumentLevelTriggerRunResult +import org.opensearch.commons.alerting.model.InputRunResults +import org.opensearch.commons.alerting.util.AlertingException +import org.opensearch.core.action.ActionResponse +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.ToXContentObject +import org.opensearch.core.xcontent.XContentBuilder +import java.io.IOException + +class DocLevelMonitorFanOutResponse : ActionResponse, ToXContentObject { + val nodeId: String + val executionId: String + val monitorId: String + val lastRunContexts: MutableMap + val inputResults: InputRunResults + val triggerResults: Map + val exception: AlertingException? + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + nodeId = sin.readString(), + executionId = sin.readString(), + monitorId = sin.readString(), + lastRunContexts = sin.readMap()!! as MutableMap, + inputResults = InputRunResults.readFrom(sin), + triggerResults = suppressWarning(sin.readMap(StreamInput::readString, DocumentLevelTriggerRunResult::readFrom)), + exception = sin.readException() + ) + + constructor( + nodeId: String, + executionId: String, + monitorId: String, + lastRunContexts: MutableMap, + inputResults: InputRunResults = InputRunResults(), // partial, + triggerResults: Map = mapOf(), + exception: AlertingException? = null + ) : super() { + this.nodeId = nodeId + this.executionId = executionId + this.monitorId = monitorId + this.lastRunContexts = lastRunContexts + this.inputResults = inputResults + this.triggerResults = triggerResults + this.exception = exception + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(nodeId) + out.writeString(executionId) + out.writeString(monitorId) + out.writeMap(lastRunContexts) + inputResults.writeTo(out) + out.writeMap( + triggerResults, + StreamOutput::writeString, + { stream, stats -> stats.writeTo(stream) } + ) + out.writeException(exception) + } + + @Throws(IOException::class) + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + .field("node_id", nodeId) + .field("execution_id", executionId) + .field("monitor_id", monitorId) + .field("last_run_contexts", lastRunContexts) + .field("input_results", inputResults) + .field("trigger_results", triggerResults) + .field("exception", exception) + .endObject() + return builder + } + + companion object { + @Suppress("UNCHECKED_CAST") + fun suppressWarning(map: MutableMap?): Map { + return map as Map + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/GetAlertsRequest.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/GetAlertsRequest.kt new file mode 100644 index 00000000..4592a9be --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/GetAlertsRequest.kt @@ -0,0 +1,78 @@ +package org.opensearch.commons.alerting.action + +import org.opensearch.action.ActionRequest +import org.opensearch.action.ActionRequestValidationException +import org.opensearch.commons.alerting.model.Table +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.index.query.BoolQueryBuilder +import java.io.IOException + +class GetAlertsRequest : ActionRequest { + val table: Table + val severityLevel: String + val alertState: String + val monitorId: String? + val alertIndex: String? + val monitorIds: List? + val workflowIds: List? + val alertIds: List? + val boolQueryBuilder: BoolQueryBuilder? + + constructor( + table: Table, + severityLevel: String, + alertState: String, + monitorId: String?, + alertIndex: String?, + monitorIds: List? = null, + workflowIds: List? = null, + alertIds: List? = null, + boolQueryBuilder: BoolQueryBuilder? = null + ) : super() { + this.table = table + this.severityLevel = severityLevel + this.alertState = alertState + this.monitorId = monitorId + this.alertIndex = alertIndex + this.monitorIds = monitorIds + this.workflowIds = workflowIds + this.alertIds = alertIds + this.boolQueryBuilder = boolQueryBuilder + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + table = Table.readFrom(sin), + severityLevel = sin.readString(), + alertState = sin.readString(), + monitorId = sin.readOptionalString(), + alertIndex = sin.readOptionalString(), + monitorIds = sin.readOptionalStringList(), + workflowIds = sin.readOptionalStringList(), + alertIds = sin.readOptionalStringList(), + boolQueryBuilder = if (sin.readOptionalBoolean() == true) BoolQueryBuilder(sin) else null + ) + + override fun validate(): ActionRequestValidationException? { + return null + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + table.writeTo(out) + out.writeString(severityLevel) + out.writeString(alertState) + out.writeOptionalString(monitorId) + out.writeOptionalString(alertIndex) + out.writeOptionalStringCollection(monitorIds) + out.writeOptionalStringCollection(workflowIds) + out.writeOptionalStringCollection(alertIds) + if (boolQueryBuilder != null) { + out.writeOptionalBoolean(true) + boolQueryBuilder.writeTo(out) + } else { + out.writeOptionalBoolean(false) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/GetAlertsResponse.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/GetAlertsResponse.kt new file mode 100644 index 00000000..50a4ec53 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/GetAlertsResponse.kt @@ -0,0 +1,47 @@ +package org.opensearch.commons.alerting.action + +import org.opensearch.commons.alerting.model.Alert +import org.opensearch.commons.notifications.action.BaseResponse +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import java.io.IOException +import java.util.Collections + +class GetAlertsResponse : BaseResponse { + val alerts: List + + // totalAlerts is not the same as the size of alerts because there can be 30 alerts from the request, but + // the request only asked for 5 alerts, so totalAlerts will be 30, but alerts will only contain 5 alerts + val totalAlerts: Int? + + constructor( + alerts: List, + totalAlerts: Int? + ) : super() { + this.alerts = alerts + this.totalAlerts = totalAlerts + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + alerts = Collections.unmodifiableList(sin.readList(::Alert)), + totalAlerts = sin.readOptionalInt() + ) + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeCollection(alerts) + out.writeOptionalInt(totalAlerts) + } + + @Throws(IOException::class) + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + .field("alerts", alerts) + .field("totalAlerts", totalAlerts) + + return builder.endObject() + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/GetFindingsRequest.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/GetFindingsRequest.kt new file mode 100644 index 00000000..59dd3c39 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/GetFindingsRequest.kt @@ -0,0 +1,57 @@ +package org.opensearch.commons.alerting.action + +import org.opensearch.action.ActionRequest +import org.opensearch.action.ActionRequestValidationException +import org.opensearch.commons.alerting.model.Table +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.index.query.BoolQueryBuilder +import java.io.IOException + +class GetFindingsRequest : ActionRequest { + val findingId: String? + val table: Table + val monitorId: String? + val monitorIds: List? + val findingIndex: String? + val boolQueryBuilder: BoolQueryBuilder? + constructor( + findingId: String?, + table: Table, + monitorId: String? = null, + findingIndexName: String? = null, + monitorIds: List? = null, + boolQueryBuilder: BoolQueryBuilder? = null + ) : super() { + this.findingId = findingId + this.table = table + this.monitorId = monitorId + this.findingIndex = findingIndexName + this.monitorIds = monitorIds + this.boolQueryBuilder = boolQueryBuilder + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + findingId = sin.readOptionalString(), + table = Table.readFrom(sin), + monitorId = sin.readOptionalString(), + findingIndexName = sin.readOptionalString(), + monitorIds = sin.readOptionalStringList(), + boolQueryBuilder = BoolQueryBuilder(sin) + ) + + override fun validate(): ActionRequestValidationException? { + return null + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeOptionalString(findingId) + table.writeTo(out) + out.writeOptionalString(monitorId) + out.writeOptionalString(findingIndex) + out.writeOptionalStringCollection(monitorIds) + boolQueryBuilder?.writeTo(out) + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/GetFindingsResponse.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/GetFindingsResponse.kt new file mode 100644 index 00000000..9caeebde --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/GetFindingsResponse.kt @@ -0,0 +1,61 @@ +package org.opensearch.commons.alerting.action + +import org.opensearch.commons.alerting.model.FindingWithDocs +import org.opensearch.commons.notifications.action.BaseResponse +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.rest.RestStatus +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import java.io.IOException + +class GetFindingsResponse : BaseResponse { + private var status: RestStatus + var totalFindings: Int? + var findings: List + + constructor( + status: RestStatus, + totalFindings: Int?, + findings: List + ) : super() { + this.status = status + this.totalFindings = totalFindings + this.findings = findings + } + + @Throws(IOException::class) + constructor(sin: StreamInput) { + this.status = sin.readEnum(RestStatus::class.java) + val findings = mutableListOf() + this.totalFindings = sin.readOptionalInt() + var currentSize = sin.readInt() + for (i in 0 until currentSize) { + findings.add(FindingWithDocs.readFrom(sin)) + } + this.findings = findings + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeEnum(status) + out.writeOptionalInt(totalFindings) + out.writeInt(findings.size) + for (finding in findings) { + finding.writeTo(out) + } + } + + @Throws(IOException::class) + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + .field("total_findings", totalFindings) + .field("findings", findings) + + return builder.endObject() + } + + override fun getStatus(): RestStatus { + return this.status + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/GetMonitorRequest.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/GetMonitorRequest.kt new file mode 100644 index 00000000..80676a04 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/GetMonitorRequest.kt @@ -0,0 +1,58 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.action + +import org.opensearch.action.ActionRequest +import org.opensearch.action.ActionRequestValidationException +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.rest.RestRequest +import org.opensearch.search.fetch.subphase.FetchSourceContext +import java.io.IOException + +class GetMonitorRequest : ActionRequest { + val monitorId: String + val version: Long + val method: RestRequest.Method + val srcContext: FetchSourceContext? + + constructor( + monitorId: String, + version: Long, + method: RestRequest.Method, + srcContext: FetchSourceContext? + ) : super() { + this.monitorId = monitorId + this.version = version + this.method = method + this.srcContext = srcContext + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readString(), // monitorId + sin.readLong(), // version + sin.readEnum(RestRequest.Method::class.java), // method + if (sin.readBoolean()) { + FetchSourceContext(sin) // srcContext + } else { + null + } + ) + + override fun validate(): ActionRequestValidationException? { + return null + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(monitorId) + out.writeLong(version) + out.writeEnum(method) + out.writeBoolean(srcContext != null) + srcContext?.writeTo(out) + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/GetMonitorResponse.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/GetMonitorResponse.kt new file mode 100644 index 00000000..7984ed07 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/GetMonitorResponse.kt @@ -0,0 +1,128 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.action + +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.util.IndexUtils.Companion._ID +import org.opensearch.commons.alerting.util.IndexUtils.Companion._PRIMARY_TERM +import org.opensearch.commons.alerting.util.IndexUtils.Companion._SEQ_NO +import org.opensearch.commons.alerting.util.IndexUtils.Companion._VERSION +import org.opensearch.commons.notifications.action.BaseResponse +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.ToXContentFragment +import org.opensearch.core.xcontent.XContentBuilder +import java.io.IOException + +class GetMonitorResponse : BaseResponse { + var id: String + var version: Long + var seqNo: Long + var primaryTerm: Long + var monitor: Monitor? + var associatedWorkflows: List? + + constructor( + id: String, + version: Long, + seqNo: Long, + primaryTerm: Long, + monitor: Monitor?, + associatedCompositeMonitors: List? + ) : super() { + this.id = id + this.version = version + this.seqNo = seqNo + this.primaryTerm = primaryTerm + this.monitor = monitor + this.associatedWorkflows = associatedCompositeMonitors ?: emptyList() + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + id = sin.readString(), // id + version = sin.readLong(), // version + seqNo = sin.readLong(), // seqNo + primaryTerm = sin.readLong(), // primaryTerm + monitor = if (sin.readBoolean()) { + Monitor.readFrom(sin) // monitor + } else { + null + }, + associatedCompositeMonitors = sin.readList((AssociatedWorkflow)::readFrom) + ) + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(id) + out.writeLong(version) + out.writeLong(seqNo) + out.writeLong(primaryTerm) + if (monitor != null) { + out.writeBoolean(true) + monitor?.writeTo(out) + } else { + out.writeBoolean(false) + } + associatedWorkflows?.forEach { + it.writeTo(out) + } + } + + @Throws(IOException::class) + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + .field(_ID, id) + .field(_VERSION, version) + .field(_SEQ_NO, seqNo) + .field(_PRIMARY_TERM, primaryTerm) + if (monitor != null) { + builder.field("monitor", monitor) + } + if (associatedWorkflows != null) { + builder.field("associated_workflows", associatedWorkflows!!.toTypedArray()) + } + return builder.endObject() + } + + class AssociatedWorkflow : ToXContentFragment { + val id: String + val name: String + + constructor(id: String, name: String) { + this.id = id + this.name = name + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params?): XContentBuilder { + builder.startObject() + .field("id", id) + .field("name", name) + .endObject() + return builder + } + + fun writeTo(out: StreamOutput) { + out.writeString(id) + out.writeString(name) + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readString(), + sin.readString() + ) + + companion object { + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): AssociatedWorkflow { + return AssociatedWorkflow(sin) + } + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/GetWorkflowAlertsRequest.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/GetWorkflowAlertsRequest.kt new file mode 100644 index 00000000..9d08fa96 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/GetWorkflowAlertsRequest.kt @@ -0,0 +1,72 @@ +package org.opensearch.commons.alerting.action + +import org.opensearch.action.ActionRequest +import org.opensearch.action.ActionRequestValidationException +import org.opensearch.commons.alerting.model.Table +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import java.io.IOException + +class GetWorkflowAlertsRequest : ActionRequest { + val table: Table + val severityLevel: String + val alertState: String + val alertIndex: String? + val associatedAlertsIndex: String? + val monitorIds: List? + val workflowIds: List? + val alertIds: List? + val getAssociatedAlerts: Boolean + + constructor( + table: Table, + severityLevel: String, + alertState: String, + alertIndex: String?, + associatedAlertsIndex: String?, + monitorIds: List? = null, + workflowIds: List? = null, + alertIds: List? = null, + getAssociatedAlerts: Boolean + ) : super() { + this.table = table + this.severityLevel = severityLevel + this.alertState = alertState + this.alertIndex = alertIndex + this.associatedAlertsIndex = associatedAlertsIndex + this.monitorIds = monitorIds + this.workflowIds = workflowIds + this.alertIds = alertIds + this.getAssociatedAlerts = getAssociatedAlerts + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + table = Table.readFrom(sin), + severityLevel = sin.readString(), + alertState = sin.readString(), + alertIndex = sin.readOptionalString(), + associatedAlertsIndex = sin.readOptionalString(), + monitorIds = sin.readOptionalStringList(), + workflowIds = sin.readOptionalStringList(), + alertIds = sin.readOptionalStringList(), + getAssociatedAlerts = sin.readBoolean() + ) + + override fun validate(): ActionRequestValidationException? { + return null + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + table.writeTo(out) + out.writeString(severityLevel) + out.writeString(alertState) + out.writeOptionalString(alertIndex) + out.writeOptionalString(associatedAlertsIndex) + out.writeOptionalStringCollection(monitorIds) + out.writeOptionalStringCollection(workflowIds) + out.writeOptionalStringCollection(alertIds) + out.writeBoolean(getAssociatedAlerts) + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/GetWorkflowAlertsResponse.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/GetWorkflowAlertsResponse.kt new file mode 100644 index 00000000..5104f344 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/GetWorkflowAlertsResponse.kt @@ -0,0 +1,52 @@ +package org.opensearch.commons.alerting.action + +import org.opensearch.commons.alerting.model.Alert +import org.opensearch.commons.notifications.action.BaseResponse +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import java.io.IOException +import java.util.Collections + +class GetWorkflowAlertsResponse : BaseResponse { + val alerts: List + val associatedAlerts: List + + // totalAlerts is not the same as the size of alerts because there can be 30 alerts from the request, but + // the request only asked for 5 alerts, so totalAlerts will be 30, but alerts will only contain 5 alerts + val totalAlerts: Int? + + constructor( + alerts: List, + associatedAlerts: List, + totalAlerts: Int? + ) : super() { + this.alerts = alerts + this.associatedAlerts = associatedAlerts + this.totalAlerts = totalAlerts + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + alerts = Collections.unmodifiableList(sin.readList(::Alert)), + associatedAlerts = Collections.unmodifiableList(sin.readList(::Alert)), + totalAlerts = sin.readOptionalInt() + ) + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeCollection(alerts) + out.writeCollection(associatedAlerts) + out.writeOptionalInt(totalAlerts) + } + + @Throws(IOException::class) + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + .field("alerts", alerts) + .field("associatedAlerts", associatedAlerts) + .field("totalAlerts", totalAlerts) + return builder.endObject() + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/GetWorkflowRequest.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/GetWorkflowRequest.kt new file mode 100644 index 00000000..cff81206 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/GetWorkflowRequest.kt @@ -0,0 +1,42 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.action + +import org.opensearch.action.ActionRequest +import org.opensearch.action.ActionRequestValidationException +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.rest.RestRequest +import java.io.IOException + +class GetWorkflowRequest : ActionRequest { + val workflowId: String + val method: RestRequest.Method + + constructor( + workflowId: String, + method: RestRequest.Method + ) : super() { + this.workflowId = workflowId + this.method = method + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readString(), // workflowId + sin.readEnum(RestRequest.Method::class.java) // method + ) + + override fun validate(): ActionRequestValidationException? { + return null + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(workflowId) + out.writeEnum(method) + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/GetWorkflowResponse.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/GetWorkflowResponse.kt new file mode 100644 index 00000000..ca6db115 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/GetWorkflowResponse.kt @@ -0,0 +1,91 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.action + +import org.opensearch.commons.alerting.model.Workflow +import org.opensearch.commons.alerting.util.IndexUtils.Companion._ID +import org.opensearch.commons.alerting.util.IndexUtils.Companion._PRIMARY_TERM +import org.opensearch.commons.alerting.util.IndexUtils.Companion._SEQ_NO +import org.opensearch.commons.alerting.util.IndexUtils.Companion._VERSION +import org.opensearch.commons.notifications.action.BaseResponse +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.rest.RestStatus +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import java.io.IOException + +class GetWorkflowResponse : BaseResponse { + var id: String + var version: Long + var seqNo: Long + var primaryTerm: Long + private var status: RestStatus + var workflow: Workflow? + + constructor( + id: String, + version: Long, + seqNo: Long, + primaryTerm: Long, + status: RestStatus, + workflow: Workflow? + ) : super() { + this.id = id + this.version = version + this.seqNo = seqNo + this.primaryTerm = primaryTerm + this.status = status + this.workflow = workflow + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readString(), // id + sin.readLong(), // version + sin.readLong(), // seqNo + sin.readLong(), // primaryTerm + sin.readEnum(RestStatus::class.java), // RestStatus + if (sin.readBoolean()) { + Workflow.readFrom(sin) // monitor + } else { + null + } + ) + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(id) + out.writeLong(version) + out.writeLong(seqNo) + out.writeLong(primaryTerm) + out.writeEnum(status) + if (workflow != null) { + out.writeBoolean(true) + workflow?.writeTo(out) + } else { + out.writeBoolean(false) + } + } + + @Throws(IOException::class) + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + .field(_ID, id) + .field(_VERSION, version) + .field(_SEQ_NO, seqNo) + .field(_PRIMARY_TERM, primaryTerm) + if (workflow != null) { + builder.field("workflow", workflow) + } + + return builder.endObject() + } + + override fun getStatus(): RestStatus { + return this.status + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/IndexCommentRequest.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/IndexCommentRequest.kt new file mode 100644 index 00000000..3eb05f13 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/IndexCommentRequest.kt @@ -0,0 +1,78 @@ +package org.opensearch.commons.alerting.action + +import org.opensearch.action.ActionRequest +import org.opensearch.action.ActionRequestValidationException +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.rest.RestRequest +import java.io.IOException + +/** + * Request to index/create a Comment + * + * entityId: the entity that the Comment is attached to and therefore associated with (e.g. in Alerting, + * the entity is an Alert). This field is expected to be non-blank if the request is to create a new Comment. + * + * commentId: the ID of an existing Comment. This field is expected to be non-blank if the request is to + * update an existing Comment. + */ +class IndexCommentRequest : ActionRequest { + val entityId: String + val entityType: String + val commentId: String + val seqNo: Long + val primaryTerm: Long + val method: RestRequest.Method + var content: String + + constructor( + entityId: String, + entityType: String, + commentId: String, + seqNo: Long, + primaryTerm: Long, + method: RestRequest.Method, + content: String + ) : super() { + this.entityId = entityId + this.entityType = entityType + this.commentId = commentId + this.seqNo = seqNo + this.primaryTerm = primaryTerm + this.method = method + this.content = content + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + entityId = sin.readString(), + entityType = sin.readString(), + commentId = sin.readString(), + seqNo = sin.readLong(), + primaryTerm = sin.readLong(), + method = sin.readEnum(RestRequest.Method::class.java), + content = sin.readString() + ) + + override fun validate(): ActionRequestValidationException? { + if (method == RestRequest.Method.POST && entityId.isBlank() || + method == RestRequest.Method.PUT && commentId.isBlank() + ) { + val exception = ActionRequestValidationException() + exception.addValidationError("id must not be blank") + return exception + } + return null + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(entityId) + out.writeString(entityType) + out.writeString(commentId) + out.writeLong(seqNo) + out.writeLong(primaryTerm) + out.writeEnum(method) + out.writeString(content) + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/IndexCommentResponse.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/IndexCommentResponse.kt new file mode 100644 index 00000000..7c9bb9b7 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/IndexCommentResponse.kt @@ -0,0 +1,57 @@ +package org.opensearch.commons.alerting.action + +import org.opensearch.commons.alerting.model.Comment +import org.opensearch.commons.alerting.util.IndexUtils.Companion._ID +import org.opensearch.commons.alerting.util.IndexUtils.Companion._PRIMARY_TERM +import org.opensearch.commons.alerting.util.IndexUtils.Companion._SEQ_NO +import org.opensearch.commons.notifications.action.BaseResponse +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import java.io.IOException + +class IndexCommentResponse : BaseResponse { + var id: String + var seqNo: Long + var primaryTerm: Long + var comment: Comment + + constructor( + id: String, + seqNo: Long, + primaryTerm: Long, + comment: Comment + ) : super() { + this.id = id + this.seqNo = seqNo + this.primaryTerm = primaryTerm + this.comment = comment + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readString(), // id + sin.readLong(), // seqNo + sin.readLong(), // primaryTerm + Comment.readFrom(sin) // comment + ) + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(id) + out.writeLong(seqNo) + out.writeLong(primaryTerm) + comment.writeTo(out) + } + + @Throws(IOException::class) + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return builder.startObject() + .field(_ID, id) + .field(_SEQ_NO, seqNo) + .field(_PRIMARY_TERM, primaryTerm) + .field("comment", comment) + .endObject() + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/IndexMonitorRequest.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/IndexMonitorRequest.kt new file mode 100644 index 00000000..34e8d314 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/IndexMonitorRequest.kt @@ -0,0 +1,64 @@ +package org.opensearch.commons.alerting.action + +import org.opensearch.action.ActionRequest +import org.opensearch.action.ActionRequestValidationException +import org.opensearch.action.support.WriteRequest +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.rest.RestRequest +import java.io.IOException + +class IndexMonitorRequest : ActionRequest { + val monitorId: String + val seqNo: Long + val primaryTerm: Long + val refreshPolicy: WriteRequest.RefreshPolicy + val method: RestRequest.Method + var monitor: Monitor + val rbacRoles: List? + + constructor( + monitorId: String, + seqNo: Long, + primaryTerm: Long, + refreshPolicy: WriteRequest.RefreshPolicy, + method: RestRequest.Method, + monitor: Monitor, + rbacRoles: List? = null + ) : super() { + this.monitorId = monitorId + this.seqNo = seqNo + this.primaryTerm = primaryTerm + this.refreshPolicy = refreshPolicy + this.method = method + this.monitor = monitor + this.rbacRoles = rbacRoles + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + monitorId = sin.readString(), + seqNo = sin.readLong(), + primaryTerm = sin.readLong(), + refreshPolicy = WriteRequest.RefreshPolicy.readFrom(sin), + method = sin.readEnum(RestRequest.Method::class.java), + monitor = Monitor.readFrom(sin) as Monitor, + rbacRoles = sin.readOptionalStringList() + ) + + override fun validate(): ActionRequestValidationException? { + return null + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(monitorId) + out.writeLong(seqNo) + out.writeLong(primaryTerm) + refreshPolicy.writeTo(out) + out.writeEnum(method) + monitor.writeTo(out) + out.writeOptionalStringCollection(rbacRoles) + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/IndexMonitorResponse.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/IndexMonitorResponse.kt new file mode 100644 index 00000000..0464bff8 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/IndexMonitorResponse.kt @@ -0,0 +1,64 @@ +package org.opensearch.commons.alerting.action + +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.util.IndexUtils.Companion._ID +import org.opensearch.commons.alerting.util.IndexUtils.Companion._PRIMARY_TERM +import org.opensearch.commons.alerting.util.IndexUtils.Companion._SEQ_NO +import org.opensearch.commons.alerting.util.IndexUtils.Companion._VERSION +import org.opensearch.commons.notifications.action.BaseResponse +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import java.io.IOException + +class IndexMonitorResponse : BaseResponse { + var id: String + var version: Long + var seqNo: Long + var primaryTerm: Long + var monitor: Monitor + + constructor( + id: String, + version: Long, + seqNo: Long, + primaryTerm: Long, + monitor: Monitor + ) : super() { + this.id = id + this.version = version + this.seqNo = seqNo + this.primaryTerm = primaryTerm + this.monitor = monitor + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readString(), // id + sin.readLong(), // version + sin.readLong(), // seqNo + sin.readLong(), // primaryTerm + Monitor.readFrom(sin) as Monitor // monitor + ) + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(id) + out.writeLong(version) + out.writeLong(seqNo) + out.writeLong(primaryTerm) + monitor.writeTo(out) + } + + @Throws(IOException::class) + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return builder.startObject() + .field(_ID, id) + .field(_VERSION, version) + .field(_SEQ_NO, seqNo) + .field(_PRIMARY_TERM, primaryTerm) + .field("monitor", monitor) + .endObject() + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/IndexWorkflowRequest.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/IndexWorkflowRequest.kt new file mode 100644 index 00000000..1033e651 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/IndexWorkflowRequest.kt @@ -0,0 +1,165 @@ +package org.opensearch.commons.alerting.action + +import org.opensearch.action.ActionRequest +import org.opensearch.action.ActionRequestValidationException +import org.opensearch.action.ValidateActions +import org.opensearch.action.support.WriteRequest +import org.opensearch.commons.alerting.model.CompositeInput +import org.opensearch.commons.alerting.model.Workflow +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.rest.RestRequest +import java.io.IOException +import java.util.stream.Collectors + +class IndexWorkflowRequest : ActionRequest { + val workflowId: String + val seqNo: Long + val primaryTerm: Long + val refreshPolicy: WriteRequest.RefreshPolicy + val method: RestRequest.Method + var workflow: Workflow + val rbacRoles: List? + + private val MAX_DELEGATE_SIZE = 25 + + constructor( + workflowId: String, + seqNo: Long, + primaryTerm: Long, + refreshPolicy: WriteRequest.RefreshPolicy, + method: RestRequest.Method, + workflow: Workflow, + rbacRoles: List? = null + ) : super() { + this.workflowId = workflowId + this.seqNo = seqNo + this.primaryTerm = primaryTerm + this.refreshPolicy = refreshPolicy + this.method = method + this.workflow = workflow + this.rbacRoles = rbacRoles + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + workflowId = sin.readString(), + seqNo = sin.readLong(), + primaryTerm = sin.readLong(), + refreshPolicy = WriteRequest.RefreshPolicy.readFrom(sin), + method = sin.readEnum(RestRequest.Method::class.java), + workflow = Workflow.readFrom(sin) as Workflow, + rbacRoles = sin.readOptionalStringList() + ) + + override fun validate(): ActionRequestValidationException? { + var validationException: ActionRequestValidationException? = null + + if (workflow.inputs.isEmpty()) { + validationException = ValidateActions.addValidationError( + "Input list can not be empty.", + validationException + ) + return validationException + } + if (workflow.inputs.size > 1) { + validationException = ValidateActions.addValidationError( + "Input list can contain only one element.", + validationException + ) + return validationException + } + if (workflow.inputs[0] !is CompositeInput) { + validationException = ValidateActions.addValidationError( + "When creating a workflow input must be CompositeInput", + validationException + ) + } + val compositeInput = workflow.inputs[0] as CompositeInput + val monitorIds = compositeInput.sequence.delegates.stream().map { it.monitorId }.collect(Collectors.toList()) + + if (monitorIds.isNullOrEmpty()) { + validationException = ValidateActions.addValidationError( + "Delegates list can not be empty.", + validationException + ) + // Break the flow because next checks are dependant on non-null monitorIds + return validationException + } + + if (monitorIds.size > MAX_DELEGATE_SIZE) { + validationException = ValidateActions.addValidationError( + "Delegates list can not be larger then $MAX_DELEGATE_SIZE.", + validationException + ) + } + + if (monitorIds.toSet().size != monitorIds.size) { + validationException = ValidateActions.addValidationError( + "Duplicate delegates not allowed", + validationException + ) + } + val delegates = compositeInput.sequence.delegates + val orderSet = delegates.stream().filter { it.order > 0 }.map { it.order }.collect(Collectors.toSet()) + if (orderSet.size != delegates.size) { + validationException = ValidateActions.addValidationError( + "Sequence ordering of delegate monitor shouldn't contain duplicate order values", + validationException + ) + } + + val monitorIdOrderMap: Map = delegates.associate { it.monitorId to it.order } + delegates.forEach { + if (it.chainedMonitorFindings != null) { + if (it.chainedMonitorFindings.monitorId != null) { + if (monitorIdOrderMap.containsKey(it.chainedMonitorFindings.monitorId) == false) { + validationException = ValidateActions.addValidationError( + "Chained Findings Monitor ${it.chainedMonitorFindings.monitorId} doesn't exist in sequence", + validationException + ) + // Break the flow because next check will generate the NPE + return validationException + } + if (it.order <= monitorIdOrderMap[it.chainedMonitorFindings.monitorId]!!) { + validationException = ValidateActions.addValidationError( + "Chained Findings Monitor ${it.chainedMonitorFindings.monitorId} should be executed before monitor ${it.monitorId}", + validationException + ) + } + } else { + for (monitorId in it.chainedMonitorFindings.monitorIds) { + if (!monitorIdOrderMap.containsKey(monitorId)) { + validationException = ValidateActions.addValidationError( + "Chained Findings Monitor $monitorId doesn't exist in sequence", + validationException + ) + return validationException + } else { + val order = monitorIdOrderMap.get(monitorId)!! + if (order >= it.order) { + return ValidateActions.addValidationError( + "Chained Findings Monitor ${it.chainedMonitorFindings.monitorId} should be executed before monitor ${it.monitorId}. " + + "Order of monitor being chained [$order] should be smaller than order of monitor using findings as source data [${it.order}] in sequence", + validationException + ) + } + } + } + } + } + } + return validationException + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(workflowId) + out.writeLong(seqNo) + out.writeLong(primaryTerm) + refreshPolicy.writeTo(out) + out.writeEnum(method) + workflow.writeTo(out) + out.writeOptionalStringCollection(rbacRoles) + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/IndexWorkflowResponse.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/IndexWorkflowResponse.kt new file mode 100644 index 00000000..b54d0d85 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/IndexWorkflowResponse.kt @@ -0,0 +1,61 @@ +package org.opensearch.commons.alerting.action + +import org.opensearch.commons.alerting.model.Workflow +import org.opensearch.commons.alerting.util.IndexUtils +import org.opensearch.commons.notifications.action.BaseResponse +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import java.io.IOException + +class IndexWorkflowResponse : BaseResponse { + var id: String + var version: Long + var seqNo: Long + var primaryTerm: Long + var workflow: Workflow + + constructor( + id: String, + version: Long, + seqNo: Long, + primaryTerm: Long, + workflow: Workflow + ) : super() { + this.id = id + this.version = version + this.seqNo = seqNo + this.primaryTerm = primaryTerm + this.workflow = workflow + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readString(), // id + sin.readLong(), // version + sin.readLong(), // seqNo + sin.readLong(), // primaryTerm + Workflow.readFrom(sin) as Workflow // workflow + ) + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(id) + out.writeLong(version) + out.writeLong(seqNo) + out.writeLong(primaryTerm) + workflow.writeTo(out) + } + + @Throws(IOException::class) + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return builder.startObject() + .field(IndexUtils._ID, id) + .field(IndexUtils._VERSION, version) + .field(IndexUtils._SEQ_NO, seqNo) + .field(IndexUtils._PRIMARY_TERM, primaryTerm) + .field("workflow", workflow) + .endObject() + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/PublishFindingsRequest.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/PublishFindingsRequest.kt new file mode 100644 index 00000000..6e8f78f1 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/PublishFindingsRequest.kt @@ -0,0 +1,38 @@ +package org.opensearch.commons.alerting.action + +import org.opensearch.action.ActionRequest +import org.opensearch.action.ActionRequestValidationException +import org.opensearch.commons.alerting.model.Finding +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import java.io.IOException + +class PublishFindingsRequest : ActionRequest { + + val monitorId: String + + val finding: Finding + + constructor( + monitorId: String, + finding: Finding + ) : super() { + this.monitorId = monitorId + this.finding = finding + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + monitorId = sin.readString(), + finding = Finding.readFrom(sin) + ) + + override fun validate(): ActionRequestValidationException? { + return null + } + + override fun writeTo(out: StreamOutput) { + out.writeString(monitorId) + finding.writeTo(out) + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/SearchCommentRequest.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/SearchCommentRequest.kt new file mode 100644 index 00000000..e0d150d0 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/SearchCommentRequest.kt @@ -0,0 +1,33 @@ +package org.opensearch.commons.alerting.action + +import org.opensearch.action.ActionRequest +import org.opensearch.action.ActionRequestValidationException +import org.opensearch.action.search.SearchRequest +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import java.io.IOException + +class SearchCommentRequest : ActionRequest { + + val searchRequest: SearchRequest + + constructor( + searchRequest: SearchRequest + ) : super() { + this.searchRequest = searchRequest + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + searchRequest = SearchRequest(sin) + ) + + override fun validate(): ActionRequestValidationException? { + return null + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + searchRequest.writeTo(out) + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/SearchMonitorRequest.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/SearchMonitorRequest.kt new file mode 100644 index 00000000..003d3316 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/SearchMonitorRequest.kt @@ -0,0 +1,38 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.action + +import org.opensearch.action.ActionRequest +import org.opensearch.action.ActionRequestValidationException +import org.opensearch.action.search.SearchRequest +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import java.io.IOException + +class SearchMonitorRequest : ActionRequest { + + val searchRequest: SearchRequest + + constructor( + searchRequest: SearchRequest + ) : super() { + this.searchRequest = searchRequest + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + searchRequest = SearchRequest(sin) + ) + + override fun validate(): ActionRequestValidationException? { + return null + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + searchRequest.writeTo(out) + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/action/SubscribeFindingsResponse.kt b/src/main/kotlin/org/opensearch/commons/alerting/action/SubscribeFindingsResponse.kt new file mode 100644 index 00000000..58419d7c --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/action/SubscribeFindingsResponse.kt @@ -0,0 +1,38 @@ +package org.opensearch.commons.alerting.action + +import org.opensearch.commons.notifications.action.BaseResponse +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.rest.RestStatus +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import java.io.IOException + +class SubscribeFindingsResponse : BaseResponse { + + private var status: RestStatus + + constructor(status: RestStatus) : super() { + this.status = status + } + + @Throws(IOException::class) + constructor(sin: StreamInput) { + this.status = sin.readEnum(RestStatus::class.java) + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeEnum(status) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + .field("status", status.status) + return builder.endObject() + } + + override fun getStatus(): RestStatus { + return this.status + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/aggregation/bucketselectorext/BucketSelectorExtAggregationBuilder.kt b/src/main/kotlin/org/opensearch/commons/alerting/aggregation/bucketselectorext/BucketSelectorExtAggregationBuilder.kt new file mode 100644 index 00000000..75d1f3ae --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/aggregation/bucketselectorext/BucketSelectorExtAggregationBuilder.kt @@ -0,0 +1,244 @@ +package org.opensearch.commons.alerting.aggregation.bucketselectorext + +import org.opensearch.commons.alerting.aggregation.bucketselectorext.BucketSelectorExtFilter.Companion.BUCKET_SELECTOR_COMPOSITE_AGG_FILTER +import org.opensearch.commons.alerting.aggregation.bucketselectorext.BucketSelectorExtFilter.Companion.BUCKET_SELECTOR_FILTER +import org.opensearch.core.ParseField +import org.opensearch.core.common.ParsingException +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.script.Script +import org.opensearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder +import org.opensearch.search.aggregations.pipeline.BucketHelpers +import org.opensearch.search.aggregations.pipeline.PipelineAggregator +import java.io.IOException +import java.util.Objects +import kotlin.collections.ArrayList +import kotlin.collections.HashMap + +class BucketSelectorExtAggregationBuilder : + AbstractPipelineAggregationBuilder { + private val bucketsPathsMap: Map + val parentBucketPath: String + val script: Script + val filter: BucketSelectorExtFilter? + private var gapPolicy = BucketHelpers.GapPolicy.SKIP + + constructor( + name: String, + bucketsPathsMap: Map, + script: Script, + parentBucketPath: String, + filter: BucketSelectorExtFilter? + ) : super(name, NAME.preferredName, listOf(parentBucketPath).toTypedArray()) { + this.bucketsPathsMap = bucketsPathsMap + this.script = script + this.parentBucketPath = parentBucketPath + this.filter = filter + } + + @Throws(IOException::class) + @Suppress("UNCHECKED_CAST") + constructor(sin: StreamInput) : super(sin, NAME.preferredName) { + bucketsPathsMap = sin.readMap() as MutableMap + script = Script(sin) + gapPolicy = BucketHelpers.GapPolicy.readFrom(sin) + parentBucketPath = sin.readString() + filter = if (sin.readBoolean()) { + BucketSelectorExtFilter(sin) + } else { + null + } + } + + @Throws(IOException::class) + override fun doWriteTo(out: StreamOutput) { + out.writeMap(bucketsPathsMap) + script.writeTo(out) + gapPolicy.writeTo(out) + out.writeString(parentBucketPath) + if (filter != null) { + out.writeBoolean(true) + filter.writeTo(out) + } else { + out.writeBoolean(false) + } + } + + /** + * Sets the gap policy to use for this aggregation. + */ + fun gapPolicy(gapPolicy: BucketHelpers.GapPolicy?): BucketSelectorExtAggregationBuilder { + requireNotNull(gapPolicy) { "[gapPolicy] must not be null: [$name]" } + this.gapPolicy = gapPolicy + return this + } + + override fun createInternal(metaData: Map?): PipelineAggregator { + return BucketSelectorExtAggregator(name, bucketsPathsMap, parentBucketPath, script, gapPolicy, filter, metaData) + } + + @Throws(IOException::class) + public override fun internalXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.field(PipelineAggregator.Parser.BUCKETS_PATH.preferredName, bucketsPathsMap as Map?) + .field(PARENT_BUCKET_PATH.preferredName, parentBucketPath) + .field(Script.SCRIPT_PARSE_FIELD.preferredName, script) + .field(PipelineAggregator.Parser.GAP_POLICY.preferredName, gapPolicy.getName()) + if (filter != null) { + if (filter.isCompositeAggregation) { + builder.startObject(BUCKET_SELECTOR_COMPOSITE_AGG_FILTER.preferredName) + .value(filter) + .endObject() + } else { + builder.startObject(BUCKET_SELECTOR_FILTER.preferredName) + .value(filter) + .endObject() + } + } + return builder + } + + override fun overrideBucketsPath(): Boolean { + return true + } + + override fun validate(context: ValidationContext) { + // Nothing to check + } + + override fun hashCode(): Int { + return Objects.hash(super.hashCode(), bucketsPathsMap, script, gapPolicy) + } + + override fun equals(other: Any?): Boolean { + if (this === other) return true + if (other == null || javaClass != other.javaClass) return false + if (!super.equals(other)) return false + val otherCast = other as BucketSelectorExtAggregationBuilder + return ( + bucketsPathsMap == otherCast.bucketsPathsMap && + script == otherCast.script && + gapPolicy == otherCast.gapPolicy + ) + } + + override fun getWriteableName(): String { + return NAME.preferredName + } + + companion object { + val NAME = ParseField("bucket_selector_ext") + val PARENT_BUCKET_PATH = ParseField("parent_bucket_path") + + @Throws(IOException::class) + fun parse(reducerName: String, parser: XContentParser): BucketSelectorExtAggregationBuilder { + var token: XContentParser.Token + var script: Script? = null + var currentFieldName: String? = null + var bucketsPathsMap: MutableMap? = null + var gapPolicy: BucketHelpers.GapPolicy? = null + var parentBucketPath: String? = null + var filter: BucketSelectorExtFilter? = null + while (parser.nextToken().also { token = it } !== XContentParser.Token.END_OBJECT) { + if (token === XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName() + } else if (token === XContentParser.Token.VALUE_STRING) { + when { + PipelineAggregator.Parser.BUCKETS_PATH.match(currentFieldName, parser.deprecationHandler) -> { + bucketsPathsMap = HashMap() + bucketsPathsMap["_value"] = parser.text() + } + PipelineAggregator.Parser.GAP_POLICY.match(currentFieldName, parser.deprecationHandler) -> { + gapPolicy = BucketHelpers.GapPolicy.parse(parser.text(), parser.tokenLocation) + } + Script.SCRIPT_PARSE_FIELD.match(currentFieldName, parser.deprecationHandler) -> { + script = Script.parse(parser) + } + PARENT_BUCKET_PATH.match(currentFieldName, parser.deprecationHandler) -> { + parentBucketPath = parser.text() + } + else -> { + throw ParsingException( + parser.tokenLocation, + "Unknown key for a $token in [$reducerName]: [$currentFieldName]." + ) + } + } + } else if (token === XContentParser.Token.START_ARRAY) { + if (PipelineAggregator.Parser.BUCKETS_PATH.match(currentFieldName, parser.deprecationHandler)) { + val paths: MutableList = ArrayList() + while (parser.nextToken().also { token = it } !== XContentParser.Token.END_ARRAY) { + val path = parser.text() + paths.add(path) + } + bucketsPathsMap = HashMap() + for (i in paths.indices) { + bucketsPathsMap["_value$i"] = paths[i] + } + } else { + throw ParsingException( + parser.tokenLocation, + "Unknown key for a $token in [$reducerName]: [$currentFieldName]." + ) + } + } else if (token === XContentParser.Token.START_OBJECT) { + when { + Script.SCRIPT_PARSE_FIELD.match(currentFieldName, parser.deprecationHandler) -> { + script = Script.parse(parser) + } + PipelineAggregator.Parser.BUCKETS_PATH.match(currentFieldName, parser.deprecationHandler) -> { + val map = parser.map() + bucketsPathsMap = HashMap() + for ((key, value) in map) { + bucketsPathsMap[key] = value.toString() + } + } + BUCKET_SELECTOR_FILTER.match(currentFieldName, parser.deprecationHandler) -> { + filter = BucketSelectorExtFilter.parse(reducerName, false, parser) + } + BUCKET_SELECTOR_COMPOSITE_AGG_FILTER.match( + currentFieldName, + parser.deprecationHandler + ) -> { + filter = BucketSelectorExtFilter.parse(reducerName, true, parser) + } + else -> { + throw ParsingException( + parser.tokenLocation, + "Unknown key for a $token in [$reducerName]: [$currentFieldName]." + ) + } + } + } else { + throw ParsingException(parser.tokenLocation, "Unexpected token $token in [$reducerName].") + } + } + if (bucketsPathsMap == null) { + throw ParsingException( + parser.tokenLocation, + "Missing required field [" + PipelineAggregator.Parser.BUCKETS_PATH.preferredName + "] for bucket_selector aggregation [" + reducerName + "]" + ) + } + if (script == null) { + throw ParsingException( + parser.tokenLocation, + "Missing required field [" + Script.SCRIPT_PARSE_FIELD.preferredName + "] for bucket_selector aggregation [" + reducerName + "]" + ) + } + + if (parentBucketPath == null) { + throw ParsingException( + parser.tokenLocation, + "Missing required field [" + PARENT_BUCKET_PATH + "] for bucket_selector aggregation [" + reducerName + "]" + ) + } + val factory = BucketSelectorExtAggregationBuilder(reducerName, bucketsPathsMap, script, parentBucketPath, filter) + if (gapPolicy != null) { + factory.gapPolicy(gapPolicy) + } + return factory + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/aggregation/bucketselectorext/BucketSelectorExtAggregator.kt b/src/main/kotlin/org/opensearch/commons/alerting/aggregation/bucketselectorext/BucketSelectorExtAggregator.kt new file mode 100644 index 00000000..68017856 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/aggregation/bucketselectorext/BucketSelectorExtAggregator.kt @@ -0,0 +1,158 @@ +package org.opensearch.commons.alerting.aggregation.bucketselectorext + +import org.apache.lucene.util.BytesRef +import org.opensearch.commons.alerting.aggregation.bucketselectorext.BucketSelectorExtAggregationBuilder.Companion.NAME +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.script.BucketAggregationSelectorScript +import org.opensearch.script.Script +import org.opensearch.search.DocValueFormat +import org.opensearch.search.aggregations.Aggregations +import org.opensearch.search.aggregations.InternalAggregation +import org.opensearch.search.aggregations.InternalMultiBucketAggregation +import org.opensearch.search.aggregations.bucket.SingleBucketAggregation +import org.opensearch.search.aggregations.bucket.composite.InternalComposite +import org.opensearch.search.aggregations.bucket.terms.IncludeExclude +import org.opensearch.search.aggregations.pipeline.BucketHelpers +import org.opensearch.search.aggregations.pipeline.SiblingPipelineAggregator +import org.opensearch.search.aggregations.support.AggregationPath +import java.io.IOException + +class BucketSelectorExtAggregator : SiblingPipelineAggregator { + private var name: String? = null + private var bucketsPathsMap: Map + private var parentBucketPath: String + private var script: Script + private var gapPolicy: BucketHelpers.GapPolicy + private var bucketSelectorExtFilter: BucketSelectorExtFilter? = null + + constructor( + name: String?, + bucketsPathsMap: Map, + parentBucketPath: String, + script: Script, + gapPolicy: BucketHelpers.GapPolicy, + filter: BucketSelectorExtFilter?, + metadata: Map? + ) : super(name, bucketsPathsMap.values.toTypedArray(), metadata) { + this.bucketsPathsMap = bucketsPathsMap + this.parentBucketPath = parentBucketPath + this.script = script + this.gapPolicy = gapPolicy + this.bucketSelectorExtFilter = filter + } + + /** + * Read from a stream. + */ + @Suppress("UNCHECKED_CAST") + @Throws(IOException::class) + constructor(sin: StreamInput) : super(sin.readString(), null, null) { + script = Script(sin) + gapPolicy = BucketHelpers.GapPolicy.readFrom(sin) + bucketsPathsMap = sin.readMap() as Map + parentBucketPath = sin.readString() + if (sin.readBoolean()) { + bucketSelectorExtFilter = BucketSelectorExtFilter(sin) + } else { + bucketSelectorExtFilter = null + } + } + + @Throws(IOException::class) + override fun doWriteTo(out: StreamOutput) { + out.writeString(name) + script.writeTo(out) + gapPolicy.writeTo(out) + out.writeGenericValue(bucketsPathsMap) + out.writeString(parentBucketPath) + if (bucketSelectorExtFilter != null) { + out.writeBoolean(true) + bucketSelectorExtFilter!!.writeTo(out) + } else { + out.writeBoolean(false) + } + } + + override fun getWriteableName(): String { + return NAME.preferredName + } + + override fun doReduce(aggregations: Aggregations, reduceContext: InternalAggregation.ReduceContext): InternalAggregation { + val parentBucketPathList = AggregationPath.parse(parentBucketPath).pathElementsAsStringList + var subAggregations: Aggregations = aggregations + for (i in 0 until parentBucketPathList.size - 1) { + subAggregations = subAggregations.get(parentBucketPathList[0]).aggregations + } + val originalAgg = subAggregations.get(parentBucketPathList.last()) as InternalMultiBucketAggregation<*, *> + val buckets = originalAgg.buckets + val factory = reduceContext.scriptService().compile(script, BucketAggregationSelectorScript.CONTEXT) + val selectedBucketsIndex: MutableList = ArrayList() + for (i in buckets.indices) { + val bucket = buckets[i] + if (bucketSelectorExtFilter != null) { + var accepted = true + if (bucketSelectorExtFilter!!.isCompositeAggregation) { + val compBucketKeyObj = (bucket as InternalComposite.InternalBucket).key + val filtersMap: HashMap? = bucketSelectorExtFilter!!.filtersMap + for (sourceKey in compBucketKeyObj.keys) { + if (filtersMap != null) { + if (filtersMap.containsKey(sourceKey)) { + val obj = compBucketKeyObj[sourceKey] + accepted = isAccepted(obj!!, filtersMap[sourceKey]) + if (!accepted) break + } else { + accepted = false + break + } + } + } + } else { + accepted = isAccepted(bucket.key, bucketSelectorExtFilter!!.filters) + } + if (!accepted) continue + } + + val vars: MutableMap = HashMap() + if (script.params != null) { + vars.putAll(script.params) + } + for ((varName, bucketsPath) in bucketsPathsMap) { + val value = BucketHelpers.resolveBucketValue(originalAgg, bucket, bucketsPath, gapPolicy) + vars[varName] = value + } + val executableScript = factory.newInstance(vars) + // TODO: can we use one instance of the script for all buckets? it should be stateless? + if (executableScript.execute()) { + selectedBucketsIndex.add(i) + } + } + + return BucketSelectorIndices( + name(), + parentBucketPath, + selectedBucketsIndex, + originalAgg.metadata + ) + } + + private fun isAccepted(obj: Any, filter: IncludeExclude?): Boolean { + return when (obj.javaClass) { + String::class.java -> { + val stringFilter = filter!!.convertToStringFilter(DocValueFormat.RAW) + stringFilter.accept(BytesRef(obj as String)) + } + java.lang.Long::class.java, Long::class.java -> { + val longFilter = filter!!.convertToLongFilter(DocValueFormat.RAW) + longFilter.accept(obj as Long) + } + java.lang.Double::class.java, Double::class.java -> { + val doubleFilter = filter!!.convertToDoubleFilter() + doubleFilter.accept(obj as Long) + } + else -> { + throw IllegalStateException("Object is not comparable. Please use one of String, Long or Double type.") + } + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/aggregation/bucketselectorext/BucketSelectorExtFilter.kt b/src/main/kotlin/org/opensearch/commons/alerting/aggregation/bucketselectorext/BucketSelectorExtFilter.kt new file mode 100644 index 00000000..3d9466eb --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/aggregation/bucketselectorext/BucketSelectorExtFilter.kt @@ -0,0 +1,141 @@ +package org.opensearch.commons.alerting.aggregation.bucketselectorext + +import org.opensearch.commons.notifications.model.BaseModel +import org.opensearch.core.ParseField +import org.opensearch.core.common.ParsingException +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.search.aggregations.bucket.terms.IncludeExclude +import java.io.IOException + +class BucketSelectorExtFilter : BaseModel { + // used for composite aggregations + val filtersMap: HashMap? + + // used for filtering string term aggregation + val filters: IncludeExclude? + + constructor(filters: IncludeExclude?) { + filtersMap = null + this.filters = filters + } + + constructor(filtersMap: HashMap?) { + this.filtersMap = filtersMap + filters = null + } + + constructor(sin: StreamInput) { + if (sin.readBoolean()) { + val size: Int = sin.readVInt() + filtersMap = java.util.HashMap() + + var i = 0 + while (i <= size) { + filtersMap[sin.readString()] = IncludeExclude(sin) + ++i + } + filters = null + } else { + filters = IncludeExclude(sin) + filtersMap = null + } + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + val isCompAgg = isCompositeAggregation + out.writeBoolean(isCompAgg) + if (isCompAgg) { + out.writeVInt(filtersMap!!.size) + for ((key, value) in filtersMap) { + out.writeString(key) + value.writeTo(out) + } + } else { + filters!!.writeTo(out) + } + } + + @Throws(IOException::class) + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + if (isCompositeAggregation) { + for ((key, filter) in filtersMap!!) { + builder.startObject(key) + filter.toXContent(builder, params) + builder.endObject() + } + } else { + filters!!.toXContent(builder, params) + } + return builder + } + + val isCompositeAggregation: Boolean + get() = if (filtersMap != null && filters == null) { + true + } else if (filtersMap == null && filters != null) { + false + } else { + throw IllegalStateException("Type of selector cannot be determined") + } + + companion object { + const val NAME = "filter" + var BUCKET_SELECTOR_FILTER = ParseField("filter") + var BUCKET_SELECTOR_COMPOSITE_AGG_FILTER = ParseField("composite_agg_filter") + + @Throws(IOException::class) + fun parse(reducerName: String, isCompositeAggregation: Boolean, parser: XContentParser): BucketSelectorExtFilter { + var token: XContentParser.Token + return if (isCompositeAggregation) { + val filtersMap = HashMap() + while (parser.nextToken().also { token = it } !== XContentParser.Token.END_OBJECT) { + if (token === XContentParser.Token.FIELD_NAME) { + val sourceKey = parser.currentName() + token = parser.nextToken() + filtersMap[sourceKey] = parseIncludeExclude(reducerName, parser) + } else { + throw ParsingException( + parser.tokenLocation, + "Unknown key for a " + token + " in [" + reducerName + "]: [" + parser.currentName() + "]." + ) + } + } + BucketSelectorExtFilter(filtersMap) + } else { + BucketSelectorExtFilter(parseIncludeExclude(reducerName, parser)) + } + } + + @Throws(IOException::class) + private fun parseIncludeExclude(reducerName: String, parser: XContentParser): IncludeExclude { + var token: XContentParser.Token + var include: IncludeExclude? = null + var exclude: IncludeExclude? = null + while (parser.nextToken().also { token = it } !== XContentParser.Token.END_OBJECT) { + val fieldName = parser.currentName() + when { + IncludeExclude.INCLUDE_FIELD.match(fieldName, parser.deprecationHandler) -> { + parser.nextToken() + include = IncludeExclude.parseInclude(parser) + } + IncludeExclude.EXCLUDE_FIELD.match(fieldName, parser.deprecationHandler) -> { + parser.nextToken() + exclude = IncludeExclude.parseExclude(parser) + } + else -> { + throw ParsingException( + parser.tokenLocation, + "Unknown key for a $token in [$reducerName]: [$fieldName]." + ) + } + } + } + return IncludeExclude.merge(include, exclude) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/aggregation/bucketselectorext/BucketSelectorIndices.kt b/src/main/kotlin/org/opensearch/commons/alerting/aggregation/bucketselectorext/BucketSelectorIndices.kt new file mode 100644 index 00000000..e2352be9 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/aggregation/bucketselectorext/BucketSelectorIndices.kt @@ -0,0 +1,68 @@ +package org.opensearch.commons.alerting.aggregation.bucketselectorext + +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.search.aggregations.InternalAggregation +import java.io.IOException +import java.util.Objects + +open class BucketSelectorIndices( + name: String?, + private var parentBucketPath: String, + var bucketIndices: List, + metaData: Map? +) : InternalAggregation(name, metaData) { + + @Throws(IOException::class) + override fun doWriteTo(out: StreamOutput) { + out.writeString(parentBucketPath) + out.writeIntArray(bucketIndices.stream().mapToInt { i: Int? -> i!! }.toArray()) + } + + override fun getWriteableName(): String { + return name + } + + override fun reduce(aggregations: List, reduceContext: ReduceContext): BucketSelectorIndices { + throw UnsupportedOperationException("Not supported") + } + + override fun mustReduceOnSingleInternalAgg(): Boolean { + return false + } + + override fun getProperty(path: MutableList?): Any { + throw UnsupportedOperationException("Not supported") + } + + object Fields { + const val PARENT_BUCKET_PATH = "parent_bucket_path" + const val BUCKET_INDICES = "bucket_indices" + } + + @Throws(IOException::class) + override fun doXContentBody(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.field(Fields.PARENT_BUCKET_PATH, parentBucketPath) + builder.field(Fields.BUCKET_INDICES, bucketIndices) + otherStatsToXContent(builder) + return builder + } + + @Throws(IOException::class) + protected fun otherStatsToXContent(builder: XContentBuilder): XContentBuilder { + return builder + } + + override fun hashCode(): Int { + return Objects.hash(super.hashCode(), parentBucketPath) + } + + override fun equals(other: Any?): Boolean { + if (this === other) return true + if (other == null || javaClass != other.javaClass) return false + if (!super.equals(other)) return false + val otherCast = other as BucketSelectorIndices + return name == otherCast.name && parentBucketPath == otherCast.parentBucketPath + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/alerts/AlertError.kt b/src/main/kotlin/org/opensearch/commons/alerting/alerts/AlertError.kt new file mode 100644 index 00000000..28ef4a55 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/alerts/AlertError.kt @@ -0,0 +1,74 @@ +package org.opensearch.commons.alerting.alerts + +import org.opensearch.commons.alerting.util.instant +import org.opensearch.commons.alerting.util.optionalTimeField +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken +import java.io.IOException +import java.time.Instant + +data class AlertError(val timestamp: Instant, var message: String) : Writeable, ToXContent { + init { + this.message = obfuscateIPAddresses(message) + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readInstant(), // timestamp + sin.readString() // message + ) + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeInstant(timestamp) + out.writeString(message) + } + companion object { + + const val TIMESTAMP_FIELD = "timestamp" + const val MESSAGE_FIELD = "message" + + @JvmStatic + @Throws(IOException::class) + fun parse(xcp: XContentParser): AlertError { + lateinit var timestamp: Instant + lateinit var message: String + + ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + TIMESTAMP_FIELD -> timestamp = requireNotNull(xcp.instant()) + MESSAGE_FIELD -> message = xcp.text() + } + } + return AlertError(timestamp = timestamp, message = message) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): AlertError { + return AlertError(sin) + } + + fun obfuscateIPAddresses(exceptionMessage: String): String { + val ipAddressPattern = "\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}" + val obfuscatedMessage = exceptionMessage.replace(ipAddressPattern.toRegex(), "x.x.x.x") + return obfuscatedMessage + } + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return builder.startObject() + .optionalTimeField(TIMESTAMP_FIELD, timestamp) + .field(MESSAGE_FIELD, message) + .endObject() + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/ActionExecutionResult.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/ActionExecutionResult.kt new file mode 100644 index 00000000..6d4614e8 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/ActionExecutionResult.kt @@ -0,0 +1,85 @@ +package org.opensearch.commons.alerting.model + +import org.opensearch.commons.alerting.util.instant +import org.opensearch.commons.alerting.util.optionalTimeField +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.ToXContentObject +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException +import java.time.Instant + +/** + * When an alert triggered, the trigger's actions will be executed. + * Action execution result records action throttle result and is a part of Alert. + */ +data class ActionExecutionResult( + val actionId: String, + val lastExecutionTime: Instant?, + val throttledCount: Int = 0 +) : Writeable, ToXContentObject { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readString(), // actionId + sin.readOptionalInstant(), // lastExecutionTime + sin.readInt() // throttledCount + ) + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return builder.startObject() + .field(ACTION_ID_FIELD, actionId) + .optionalTimeField(LAST_EXECUTION_TIME_FIELD, lastExecutionTime) + .field(THROTTLED_COUNT_FIELD, throttledCount) + .endObject() + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(actionId) + out.writeOptionalInstant(lastExecutionTime) + out.writeInt(throttledCount) + } + + companion object { + const val ACTION_ID_FIELD = "action_id" + const val LAST_EXECUTION_TIME_FIELD = "last_execution_time" + const val THROTTLED_COUNT_FIELD = "throttled_count" + + @JvmStatic + @Throws(IOException::class) + fun parse(xcp: XContentParser): ActionExecutionResult { + lateinit var actionId: String + var throttledCount: Int = 0 + var lastExecutionTime: Instant? = null + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + when (fieldName) { + ACTION_ID_FIELD -> actionId = xcp.text() + THROTTLED_COUNT_FIELD -> throttledCount = xcp.intValue() + LAST_EXECUTION_TIME_FIELD -> lastExecutionTime = xcp.instant() + + else -> { + throw IllegalStateException("Unexpected field: $fieldName, while parsing action") + } + } + } + + requireNotNull(actionId) { "Must set action id" } + return ActionExecutionResult(actionId, lastExecutionTime, throttledCount) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): ActionExecutionResult { + return ActionExecutionResult(sin) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/AggregationResultBucket.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/AggregationResultBucket.kt new file mode 100644 index 00000000..2d36a51c --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/AggregationResultBucket.kt @@ -0,0 +1,86 @@ +package org.opensearch.commons.alerting.model + +import org.opensearch.core.common.ParsingException +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.ToXContentObject +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParser.Token +import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken +import java.io.IOException +import java.util.Locale + +data class AggregationResultBucket( + val parentBucketPath: String?, + val bucketKeys: List, + val bucket: Map? // TODO: Should reduce contents to only top-level to not include sub-aggs here +) : Writeable, ToXContentObject { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this(sin.readString(), sin.readStringList(), sin.readMap()) + + override fun writeTo(out: StreamOutput) { + out.writeString(parentBucketPath) + out.writeStringCollection(bucketKeys) + out.writeMap(bucket) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + innerXContent(builder) + return builder.endObject() + } + + fun innerXContent(builder: XContentBuilder): XContentBuilder { + builder.startObject(CONFIG_NAME) + .field(PARENTS_BUCKET_PATH, parentBucketPath) + .field(BUCKET_KEYS, bucketKeys.toTypedArray()) + .field(BUCKET, bucket) + .endObject() + return builder + } + + companion object { + const val CONFIG_NAME = "agg_alert_content" + const val PARENTS_BUCKET_PATH = "parent_bucket_path" + const val BUCKET_KEYS = "bucket_keys" + private const val BUCKET = "bucket" + + fun parse(xcp: XContentParser): AggregationResultBucket { + var parentBucketPath: String? = null + var bucketKeys = mutableListOf() + var bucket: MutableMap? = null + ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + + if (CONFIG_NAME != xcp.currentName()) { + throw ParsingException( + xcp.tokenLocation, + String.format( + Locale.ROOT, + "Failed to parse object: expecting token with name [%s] but found [%s]", + CONFIG_NAME, + xcp.currentName() + ) + ) + } + while (xcp.nextToken() != Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + when (fieldName) { + PARENTS_BUCKET_PATH -> parentBucketPath = xcp.text() + BUCKET_KEYS -> { + ensureExpectedToken(Token.START_ARRAY, xcp.currentToken(), xcp) + while (xcp.nextToken() != Token.END_ARRAY) { + bucketKeys.add(xcp.text()) + } + } + BUCKET -> bucket = xcp.map() + } + } + return AggregationResultBucket(parentBucketPath, bucketKeys, bucket) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/Alert.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/Alert.kt new file mode 100644 index 00000000..e435c866 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/Alert.kt @@ -0,0 +1,617 @@ +package org.opensearch.commons.alerting.model + +import org.opensearch.common.lucene.uid.Versions +import org.opensearch.commons.alerting.alerts.AlertError +import org.opensearch.commons.alerting.util.IndexUtils.Companion.NO_SCHEMA_VERSION +import org.opensearch.commons.alerting.util.instant +import org.opensearch.commons.alerting.util.optionalTimeField +import org.opensearch.commons.alerting.util.optionalUserField +import org.opensearch.commons.authuser.User +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken +import java.io.IOException +import java.time.Instant + +data class Alert( + val id: String = NO_ID, + val version: Long = NO_VERSION, + val schemaVersion: Int = NO_SCHEMA_VERSION, + val monitorId: String, + val workflowId: String, + val workflowName: String, + val monitorName: String, + val monitorVersion: Long, + val monitorUser: User?, + val triggerId: String, + val triggerName: String, + val findingIds: List, + val relatedDocIds: List, + val state: State, + val startTime: Instant, + val endTime: Instant? = null, + val lastNotificationTime: Instant? = null, + val acknowledgedTime: Instant? = null, + val errorMessage: String? = null, + val errorHistory: List, + val severity: String, + val actionExecutionResults: List, + val aggregationResultBucket: AggregationResultBucket? = null, + val executionId: String? = null, + val associatedAlertIds: List, + val clusters: List? = null +) : Writeable, ToXContent { + + init { + if (errorMessage != null) { + require(state == State.DELETED || state == State.ERROR || state == State.AUDIT) { + "Attempt to create an alert with an error in state: $state" + } + } + } + + constructor( + startTime: Instant, + lastNotificationTime: Instant?, + state: State = State.ACTIVE, + errorMessage: String? = null, + schemaVersion: Int = NO_SCHEMA_VERSION, + executionId: String, + chainedAlertTrigger: ChainedAlertTrigger, + workflow: Workflow, + associatedAlertIds: List, + clusters: List? = null + ) : this( + monitorId = NO_ID, + monitorName = "", + monitorVersion = NO_VERSION, + monitorUser = workflow.user, + triggerId = chainedAlertTrigger.id, + triggerName = chainedAlertTrigger.name, + state = state, + startTime = startTime, + lastNotificationTime = lastNotificationTime, + errorMessage = errorMessage, + errorHistory = emptyList(), + severity = chainedAlertTrigger.severity, + actionExecutionResults = emptyList(), + schemaVersion = schemaVersion, + aggregationResultBucket = null, + findingIds = emptyList(), + relatedDocIds = emptyList(), + executionId = executionId, + workflowId = workflow.id, + workflowName = workflow.name, + associatedAlertIds = associatedAlertIds, + clusters = clusters + ) + + constructor( + monitor: Monitor, + trigger: QueryLevelTrigger, + startTime: Instant, + lastNotificationTime: Instant?, + state: State = State.ACTIVE, + errorMessage: String? = null, + errorHistory: List = mutableListOf(), + actionExecutionResults: List = mutableListOf(), + schemaVersion: Int = NO_SCHEMA_VERSION, + executionId: String? = null, + workflowId: String? = null, + clusters: List? = null + ) : this( + monitorId = monitor.id, + monitorName = monitor.name, + monitorVersion = monitor.version, + monitorUser = monitor.user, + triggerId = trigger.id, + triggerName = trigger.name, + state = state, + startTime = startTime, + lastNotificationTime = lastNotificationTime, + errorMessage = errorMessage, + errorHistory = errorHistory, + severity = trigger.severity, + actionExecutionResults = actionExecutionResults, + schemaVersion = schemaVersion, + aggregationResultBucket = null, + findingIds = emptyList(), + relatedDocIds = emptyList(), + executionId = executionId, + workflowId = workflowId ?: "", + workflowName = "", + associatedAlertIds = emptyList(), + clusters = clusters + ) + + constructor( + monitor: Monitor, + trigger: BucketLevelTrigger, + startTime: Instant, + lastNotificationTime: Instant?, + state: State = State.ACTIVE, + errorMessage: String? = null, + errorHistory: List = mutableListOf(), + actionExecutionResults: List = mutableListOf(), + schemaVersion: Int = NO_SCHEMA_VERSION, + findingIds: List = emptyList(), + executionId: String? = null, + workflowId: String? = null, + clusters: List? = null + ) : this( + monitorId = monitor.id, + monitorName = monitor.name, + monitorVersion = monitor.version, + monitorUser = monitor.user, + triggerId = trigger.id, + triggerName = trigger.name, + state = state, + startTime = startTime, + lastNotificationTime = lastNotificationTime, + errorMessage = errorMessage, + errorHistory = errorHistory, + severity = trigger.severity, + actionExecutionResults = actionExecutionResults, + schemaVersion = schemaVersion, + aggregationResultBucket = null, + findingIds = findingIds, + relatedDocIds = emptyList(), + executionId = executionId, + workflowId = workflowId ?: "", + workflowName = "", + associatedAlertIds = emptyList(), + clusters = clusters + ) + + constructor( + monitor: Monitor, + trigger: BucketLevelTrigger, + startTime: Instant, + lastNotificationTime: Instant?, + state: State = State.ACTIVE, + errorMessage: String? = null, + errorHistory: List = mutableListOf(), + actionExecutionResults: List = mutableListOf(), + schemaVersion: Int = NO_SCHEMA_VERSION, + aggregationResultBucket: AggregationResultBucket, + findingIds: List = emptyList(), + executionId: String? = null, + workflowId: String? = null, + clusters: List? = null + ) : this( + monitorId = monitor.id, + monitorName = monitor.name, + monitorVersion = monitor.version, + monitorUser = monitor.user, + triggerId = trigger.id, + triggerName = trigger.name, + state = state, + startTime = startTime, + lastNotificationTime = lastNotificationTime, + errorMessage = errorMessage, + errorHistory = errorHistory, + severity = trigger.severity, + actionExecutionResults = actionExecutionResults, + schemaVersion = schemaVersion, + aggregationResultBucket = aggregationResultBucket, + findingIds = findingIds, + relatedDocIds = emptyList(), + executionId = executionId, + workflowId = workflowId ?: "", + workflowName = "", + associatedAlertIds = emptyList(), + clusters = clusters + ) + + constructor( + id: String = NO_ID, + monitor: Monitor, + trigger: DocumentLevelTrigger, + findingIds: List, + relatedDocIds: List, + startTime: Instant, + lastNotificationTime: Instant?, + state: State = State.ACTIVE, + errorMessage: String? = null, + errorHistory: List = mutableListOf(), + actionExecutionResults: List = mutableListOf(), + schemaVersion: Int = NO_SCHEMA_VERSION, + executionId: String? = null, + workflowId: String? = null, + clusters: List? = null + ) : this( + id = id, + monitorId = monitor.id, + monitorName = monitor.name, + monitorVersion = monitor.version, + monitorUser = monitor.user, + triggerId = trigger.id, + triggerName = trigger.name, + state = state, + startTime = startTime, + lastNotificationTime = lastNotificationTime, + errorMessage = errorMessage, + errorHistory = errorHistory, + severity = trigger.severity, + actionExecutionResults = actionExecutionResults, + schemaVersion = schemaVersion, + aggregationResultBucket = null, + findingIds = findingIds, + relatedDocIds = relatedDocIds, + executionId = executionId, + workflowId = workflowId ?: "", + workflowName = "", + associatedAlertIds = emptyList(), + clusters = clusters + ) + + constructor( + id: String = NO_ID, + monitor: Monitor, + trigger: NoOpTrigger, + startTime: Instant, + lastNotificationTime: Instant?, + state: State = State.ERROR, + errorMessage: String, + errorHistory: List = mutableListOf(), + schemaVersion: Int = NO_SCHEMA_VERSION, + workflowId: String? = null, + executionId: String?, + clusters: List? = null + ) : this( + id = id, + monitorId = monitor.id, + monitorName = monitor.name, + workflowName = "", + monitorVersion = monitor.version, + monitorUser = monitor.user, + triggerId = trigger.id, + triggerName = trigger.name, + state = state, + startTime = startTime, + lastNotificationTime = lastNotificationTime, + errorMessage = errorMessage, + errorHistory = errorHistory, + severity = trigger.severity, + actionExecutionResults = listOf(), + schemaVersion = schemaVersion, + aggregationResultBucket = null, + findingIds = listOf(), + relatedDocIds = listOf(), + workflowId = workflowId ?: "", + executionId = executionId, + associatedAlertIds = emptyList(), + clusters = clusters + ) + + enum class State { + ACTIVE, ACKNOWLEDGED, COMPLETED, ERROR, DELETED, + + // Alerts are created in audit state when they are generated by delegate monitors of a workflow. + // since chained alerts can be configured and acknowledged, the underlying monitors' alerts are simply + // for evaluating chained alert triggers and auditing purpose. + // Audit state alerts will be created in the history index and do not need to be acknowledged by users. + AUDIT + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + id = sin.readString(), + version = sin.readLong(), + schemaVersion = sin.readInt(), + monitorId = sin.readString(), + workflowId = sin.readString(), + workflowName = sin.readString(), + monitorName = sin.readString(), + monitorVersion = sin.readLong(), + monitorUser = if (sin.readBoolean()) { + User(sin) + } else { + null + }, + triggerId = sin.readString(), + triggerName = sin.readString(), + findingIds = sin.readStringList(), + relatedDocIds = sin.readStringList(), + state = sin.readEnum(State::class.java), + startTime = sin.readInstant(), + endTime = sin.readOptionalInstant(), + lastNotificationTime = sin.readOptionalInstant(), + acknowledgedTime = sin.readOptionalInstant(), + errorMessage = sin.readOptionalString(), + errorHistory = sin.readList(::AlertError), + severity = sin.readString(), + actionExecutionResults = sin.readList(::ActionExecutionResult), + aggregationResultBucket = if (sin.readBoolean()) AggregationResultBucket(sin) else null, + executionId = sin.readOptionalString(), + associatedAlertIds = sin.readStringList(), + clusters = sin.readOptionalStringList() + ) + + fun isAcknowledged(): Boolean = (state == State.ACKNOWLEDGED) + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(id) + out.writeLong(version) + out.writeInt(schemaVersion) + out.writeString(monitorId) + out.writeString(workflowId) + out.writeString(workflowName) + out.writeString(monitorName) + out.writeLong(monitorVersion) + out.writeBoolean(monitorUser != null) + monitorUser?.writeTo(out) + out.writeString(triggerId) + out.writeString(triggerName) + out.writeStringCollection(findingIds) + out.writeStringCollection(relatedDocIds) + out.writeEnum(state) + out.writeInstant(startTime) + out.writeOptionalInstant(endTime) + out.writeOptionalInstant(lastNotificationTime) + out.writeOptionalInstant(acknowledgedTime) + out.writeOptionalString(errorMessage) + out.writeCollection(errorHistory) + out.writeString(severity) + out.writeCollection(actionExecutionResults) + if (aggregationResultBucket != null) { + out.writeBoolean(true) + aggregationResultBucket.writeTo(out) + } else { + out.writeBoolean(false) + } + out.writeOptionalString(executionId) + out.writeStringCollection(associatedAlertIds) + out.writeOptionalStringArray(clusters?.toTypedArray()) + } + + companion object { + + const val ALERT_ID_FIELD = "id" + const val SCHEMA_VERSION_FIELD = "schema_version" + const val ALERT_VERSION_FIELD = "version" + const val MONITOR_ID_FIELD = "monitor_id" + const val WORKFLOW_ID_FIELD = "workflow_id" + const val WORKFLOW_NAME_FIELD = "workflow_name" + const val MONITOR_VERSION_FIELD = "monitor_version" + const val MONITOR_NAME_FIELD = "monitor_name" + const val MONITOR_USER_FIELD = "monitor_user" + const val TRIGGER_ID_FIELD = "trigger_id" + const val TRIGGER_NAME_FIELD = "trigger_name" + const val FINDING_IDS = "finding_ids" + const val RELATED_DOC_IDS = "related_doc_ids" + const val STATE_FIELD = "state" + const val START_TIME_FIELD = "start_time" + const val LAST_NOTIFICATION_TIME_FIELD = "last_notification_time" + const val END_TIME_FIELD = "end_time" + const val ACKNOWLEDGED_TIME_FIELD = "acknowledged_time" + const val ERROR_MESSAGE_FIELD = "error_message" + const val ALERT_HISTORY_FIELD = "alert_history" + const val SEVERITY_FIELD = "severity" + const val ACTION_EXECUTION_RESULTS_FIELD = "action_execution_results" + const val EXECUTION_ID_FIELD = "execution_id" + const val ASSOCIATED_ALERT_IDS_FIELD = "associated_alert_ids" + const val BUCKET_KEYS = AggregationResultBucket.BUCKET_KEYS + const val PARENTS_BUCKET_PATH = AggregationResultBucket.PARENTS_BUCKET_PATH + const val CLUSTERS_FIELD = "clusters" + const val NO_ID = "" + const val NO_VERSION = Versions.NOT_FOUND + + @JvmStatic + @JvmOverloads + @Throws(IOException::class) + fun parse(xcp: XContentParser, id: String = NO_ID, version: Long = NO_VERSION): Alert { + lateinit var monitorId: String + var workflowId = "" + var workflowName = "" + var schemaVersion = NO_SCHEMA_VERSION + lateinit var monitorName: String + var monitorVersion: Long = Versions.NOT_FOUND + var monitorUser: User? = null + lateinit var triggerId: String + lateinit var triggerName: String + val findingIds = mutableListOf() + val relatedDocIds = mutableListOf() + lateinit var state: State + lateinit var startTime: Instant + lateinit var severity: String + var endTime: Instant? = null + var lastNotificationTime: Instant? = null + var acknowledgedTime: Instant? = null + var errorMessage: String? = null + var executionId: String? = null + val errorHistory: MutableList = mutableListOf() + val actionExecutionResults: MutableList = mutableListOf() + var aggAlertBucket: AggregationResultBucket? = null + val associatedAlertIds = mutableListOf() + val clusters = mutableListOf() + ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + MONITOR_ID_FIELD -> monitorId = xcp.text() + WORKFLOW_ID_FIELD -> workflowId = xcp.text() + WORKFLOW_NAME_FIELD -> workflowName = xcp.text() + SCHEMA_VERSION_FIELD -> schemaVersion = xcp.intValue() + MONITOR_NAME_FIELD -> monitorName = xcp.text() + MONITOR_VERSION_FIELD -> monitorVersion = xcp.longValue() + MONITOR_USER_FIELD -> + monitorUser = if (xcp.currentToken() == XContentParser.Token.VALUE_NULL) { + null + } else { + User.parse(xcp) + } + TRIGGER_ID_FIELD -> triggerId = xcp.text() + FINDING_IDS -> { + ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + findingIds.add(xcp.text()) + } + } + RELATED_DOC_IDS -> { + ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + relatedDocIds.add(xcp.text()) + } + } + STATE_FIELD -> state = State.valueOf(xcp.text()) + TRIGGER_NAME_FIELD -> triggerName = xcp.text() + START_TIME_FIELD -> startTime = requireNotNull(xcp.instant()) + END_TIME_FIELD -> endTime = xcp.instant() + LAST_NOTIFICATION_TIME_FIELD -> lastNotificationTime = xcp.instant() + ACKNOWLEDGED_TIME_FIELD -> acknowledgedTime = xcp.instant() + ERROR_MESSAGE_FIELD -> errorMessage = xcp.textOrNull() + EXECUTION_ID_FIELD -> executionId = xcp.textOrNull() + ASSOCIATED_ALERT_IDS_FIELD -> { + ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + associatedAlertIds.add(xcp.text()) + } + } + ALERT_HISTORY_FIELD -> { + ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + errorHistory.add(AlertError.parse(xcp)) + } + } + SEVERITY_FIELD -> severity = xcp.text() + ACTION_EXECUTION_RESULTS_FIELD -> { + ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + actionExecutionResults.add(ActionExecutionResult.parse(xcp)) + } + } + AggregationResultBucket.CONFIG_NAME -> { + // If an Alert with aggAlertBucket contents is indexed into the alerts index first, then + // that field will be added to the mappings. + // In this case, that field will default to null when it isn't present for Alerts created by Query-Level Monitors + // (even though the toXContent doesn't output the field) so null is being accounted for here. + aggAlertBucket = if (xcp.currentToken() == XContentParser.Token.VALUE_NULL) { + null + } else { + AggregationResultBucket.parse(xcp) + } + } + CLUSTERS_FIELD -> { + ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + clusters.add(xcp.text()) + } + } + } + } + + return Alert( + id = id, + version = version, + schemaVersion = schemaVersion, + monitorId = requireNotNull(monitorId), + monitorName = requireNotNull(monitorName), + monitorVersion = monitorVersion, + monitorUser = monitorUser, + triggerId = requireNotNull(triggerId), + triggerName = requireNotNull(triggerName), + state = requireNotNull(state), + startTime = requireNotNull(startTime), + endTime = endTime, + lastNotificationTime = lastNotificationTime, + acknowledgedTime = acknowledgedTime, + errorMessage = errorMessage, + errorHistory = errorHistory, + severity = severity, + actionExecutionResults = actionExecutionResults, + aggregationResultBucket = aggAlertBucket, + findingIds = findingIds, + relatedDocIds = relatedDocIds, + executionId = executionId, + workflowId = workflowId, + workflowName = workflowName, + associatedAlertIds = associatedAlertIds, + clusters = if (clusters.size > 0) clusters else null + ) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): Alert { + return Alert(sin) + } + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return createXContentBuilder(builder, true) + } + + fun toXContentWithUser(builder: XContentBuilder): XContentBuilder { + return createXContentBuilder(builder, false) + } + + private fun createXContentBuilder(builder: XContentBuilder, secure: Boolean): XContentBuilder { + builder.startObject() + .field(ALERT_ID_FIELD, id) + .field(ALERT_VERSION_FIELD, version) + .field(MONITOR_ID_FIELD, monitorId) + .field(WORKFLOW_ID_FIELD, workflowId) + .field(WORKFLOW_NAME_FIELD, workflowName) + .field(ASSOCIATED_ALERT_IDS_FIELD, associatedAlertIds) + .field(SCHEMA_VERSION_FIELD, schemaVersion) + .field(MONITOR_VERSION_FIELD, monitorVersion) + .field(MONITOR_NAME_FIELD, monitorName) + .field(EXECUTION_ID_FIELD, executionId) + + if (!secure) { + builder.optionalUserField(MONITOR_USER_FIELD, monitorUser) + } + + builder.field(TRIGGER_ID_FIELD, triggerId) + .field(TRIGGER_NAME_FIELD, triggerName) + .field(FINDING_IDS, findingIds.toTypedArray()) + .field(RELATED_DOC_IDS, relatedDocIds.toTypedArray()) + .field(STATE_FIELD, state) + .field(ERROR_MESSAGE_FIELD, errorMessage) + .field(ALERT_HISTORY_FIELD, errorHistory.toTypedArray()) + .field(SEVERITY_FIELD, severity) + .field(ACTION_EXECUTION_RESULTS_FIELD, actionExecutionResults.toTypedArray()) + .optionalTimeField(START_TIME_FIELD, startTime) + .optionalTimeField(LAST_NOTIFICATION_TIME_FIELD, lastNotificationTime) + .optionalTimeField(END_TIME_FIELD, endTime) + .optionalTimeField(ACKNOWLEDGED_TIME_FIELD, acknowledgedTime) + aggregationResultBucket?.innerXContent(builder) + + if (!clusters.isNullOrEmpty()) builder.field(CLUSTERS_FIELD, clusters.toTypedArray()) + + builder.endObject() + return builder + } + + fun asTemplateArg(): Map { + return mapOf( + ACKNOWLEDGED_TIME_FIELD to acknowledgedTime?.toEpochMilli(), + ALERT_ID_FIELD to id, + ALERT_VERSION_FIELD to version, + END_TIME_FIELD to endTime?.toEpochMilli(), + ERROR_MESSAGE_FIELD to errorMessage, + EXECUTION_ID_FIELD to executionId, + WORKFLOW_ID_FIELD to workflowId, + WORKFLOW_NAME_FIELD to workflowName, + ASSOCIATED_ALERT_IDS_FIELD to associatedAlertIds, + LAST_NOTIFICATION_TIME_FIELD to lastNotificationTime?.toEpochMilli(), + SEVERITY_FIELD to severity, + START_TIME_FIELD to startTime.toEpochMilli(), + STATE_FIELD to state.toString(), + // Converting bucket keys to comma separated String to avoid manipulation in Action mustache templates + BUCKET_KEYS to aggregationResultBucket?.bucketKeys?.joinToString(","), + PARENTS_BUCKET_PATH to aggregationResultBucket?.parentBucketPath, + FINDING_IDS to findingIds.joinToString(","), + RELATED_DOC_IDS to relatedDocIds.joinToString(","), + CLUSTERS_FIELD to clusters?.joinToString(",") + ) + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/BaseAlert.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/BaseAlert.kt new file mode 100644 index 00000000..dd9bd4dd --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/BaseAlert.kt @@ -0,0 +1,208 @@ +package org.opensearch.commons.alerting.model + +import org.opensearch.common.lucene.uid.Versions +import org.opensearch.commons.alerting.util.IndexUtils.Companion.NO_SCHEMA_VERSION +import org.opensearch.commons.alerting.util.instant +import org.opensearch.commons.alerting.util.optionalUserField +import org.opensearch.commons.authuser.User +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException +import java.time.Instant + +/** CorrelationAlert and Alert can extend the UnifiedAlert class to inherit the common fields and behavior + * of UnifiedAlert class. + */ +open class BaseAlert( + open val id: String = Alert.NO_ID, + open val version: Long = Alert.NO_VERSION, + open val schemaVersion: Int = NO_SCHEMA_VERSION, + open val user: User?, + open val triggerName: String, + + // State will be later moved to this Class (after `monitorBasedAlerts` extend this Class) + open val state: Alert.State, + open val startTime: Instant, + open val endTime: Instant? = null, + open val acknowledgedTime: Instant? = null, + open val errorMessage: String? = null, + open val severity: String, + open val actionExecutionResults: List +) : Writeable, ToXContent { + + init { + if (errorMessage != null) { + require((state == Alert.State.DELETED) || (state == Alert.State.ERROR) || (state == Alert.State.AUDIT)) { + "Attempt to create an alert with an error in state: $state" + } + } + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + id = sin.readString(), + version = sin.readLong(), + schemaVersion = sin.readInt(), + user = if (sin.readBoolean()) { + User(sin) + } else { + null + }, + triggerName = sin.readString(), + state = sin.readEnum(Alert.State::class.java), + startTime = sin.readInstant(), + endTime = sin.readOptionalInstant(), + acknowledgedTime = sin.readOptionalInstant(), + errorMessage = sin.readOptionalString(), + severity = sin.readString(), + actionExecutionResults = sin.readList(::ActionExecutionResult) + ) + + fun isAcknowledged(): Boolean = (state == Alert.State.ACKNOWLEDGED) + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(id) + out.writeLong(version) + out.writeInt(schemaVersion) + out.writeBoolean(user != null) + user?.writeTo(out) + out.writeString(triggerName) + out.writeEnum(state) + out.writeInstant(startTime) + out.writeOptionalInstant(endTime) + out.writeOptionalInstant(acknowledgedTime) + out.writeOptionalString(errorMessage) + out.writeString(severity) + out.writeCollection(actionExecutionResults) + } + + companion object { + const val ALERT_ID_FIELD = "id" + const val SCHEMA_VERSION_FIELD = "schema_version" + const val ALERT_VERSION_FIELD = "version" + const val USER_FIELD = "user" + const val TRIGGER_NAME_FIELD = "trigger_name" + const val STATE_FIELD = "state" + const val START_TIME_FIELD = "start_time" + const val END_TIME_FIELD = "end_time" + const val ACKNOWLEDGED_TIME_FIELD = "acknowledged_time" + const val ERROR_MESSAGE_FIELD = "error_message" + const val SEVERITY_FIELD = "severity" + const val ACTION_EXECUTION_RESULTS_FIELD = "action_execution_results" + const val NO_ID = "" + const val NO_VERSION = Versions.NOT_FOUND + + @JvmStatic + @JvmOverloads + @Throws(IOException::class) + fun parse(xcp: XContentParser, version: Long = NO_VERSION): BaseAlert { + lateinit var id: String + var schemaVersion = NO_SCHEMA_VERSION + var version: Long = Versions.NOT_FOUND + var user: User? = null + lateinit var triggerName: String + lateinit var state: Alert.State + lateinit var startTime: Instant + lateinit var severity: String + var endTime: Instant? = null + var acknowledgedTime: Instant? = null + var errorMessage: String? = null + val actionExecutionResults: MutableList = mutableListOf() + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + when (fieldName) { + USER_FIELD -> user = if (xcp.currentToken() == XContentParser.Token.VALUE_NULL) null else User.parse(xcp) + ALERT_ID_FIELD -> id = xcp.text() + ALERT_VERSION_FIELD -> version = xcp.longValue() + SCHEMA_VERSION_FIELD -> schemaVersion = xcp.intValue() + TRIGGER_NAME_FIELD -> triggerName = xcp.text() + STATE_FIELD -> state = Alert.State.valueOf(xcp.text()) + ERROR_MESSAGE_FIELD -> errorMessage = xcp.textOrNull() + SEVERITY_FIELD -> severity = xcp.text() + ACTION_EXECUTION_RESULTS_FIELD -> { + XContentParserUtils.ensureExpectedToken( + XContentParser.Token.START_ARRAY, + xcp.currentToken(), + xcp + ) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + actionExecutionResults.add(ActionExecutionResult.parse(xcp)) + } + } + START_TIME_FIELD -> startTime = requireNotNull(xcp.instant()) + END_TIME_FIELD -> endTime = requireNotNull(xcp.instant()) + ACKNOWLEDGED_TIME_FIELD -> acknowledgedTime = xcp.instant() + } + } + + return BaseAlert( + id = id, + startTime = requireNotNull(startTime), + endTime = endTime, + state = requireNotNull(state), + version = version, + errorMessage = errorMessage, + actionExecutionResults = actionExecutionResults, + schemaVersion = schemaVersion, + user = user, + triggerName = requireNotNull(triggerName), + severity = severity, + acknowledgedTime = acknowledgedTime + ) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): Alert { + return Alert(sin) + } + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return createXContentBuilder(builder, true) + } + + fun toXContentWithUser(builder: XContentBuilder): XContentBuilder { + return createXContentBuilder(builder, false) + } + + fun createXContentBuilder(builder: XContentBuilder, secure: Boolean): XContentBuilder { + if (!secure) { + builder.optionalUserField(USER_FIELD, user) + } + builder + .field(ALERT_ID_FIELD, id) + .field(ALERT_VERSION_FIELD, version) + .field(SCHEMA_VERSION_FIELD, schemaVersion) + .field(TRIGGER_NAME_FIELD, triggerName) + .field(STATE_FIELD, state) + .field(ERROR_MESSAGE_FIELD, errorMessage) + .field(SEVERITY_FIELD, severity) + .field(ACTION_EXECUTION_RESULTS_FIELD, actionExecutionResults.toTypedArray()) + .field(START_TIME_FIELD, startTime) + .field(END_TIME_FIELD, endTime) + .field(ACKNOWLEDGED_TIME_FIELD, acknowledgedTime) + return builder + } + + open fun asTemplateArg(): Map { + return mapOf( + ACKNOWLEDGED_TIME_FIELD to acknowledgedTime?.toEpochMilli(), + ALERT_ID_FIELD to id, + ALERT_VERSION_FIELD to version, + END_TIME_FIELD to endTime?.toEpochMilli(), + ERROR_MESSAGE_FIELD to errorMessage, + SEVERITY_FIELD to severity, + START_TIME_FIELD to startTime.toEpochMilli(), + STATE_FIELD to state.toString(), + TRIGGER_NAME_FIELD to triggerName + ) + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/BucketLevelTrigger.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/BucketLevelTrigger.kt new file mode 100644 index 00000000..39ea4fbc --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/BucketLevelTrigger.kt @@ -0,0 +1,153 @@ +package org.opensearch.commons.alerting.model + +import org.opensearch.common.CheckedFunction +import org.opensearch.common.UUIDs +import org.opensearch.commons.alerting.aggregation.bucketselectorext.BucketSelectorExtAggregationBuilder +import org.opensearch.commons.alerting.model.Trigger.Companion.ACTIONS_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.ID_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.NAME_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.SEVERITY_FIELD +import org.opensearch.commons.alerting.model.action.Action +import org.opensearch.core.ParseField +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.NamedXContentRegistry +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException + +data class BucketLevelTrigger( + override val id: String = UUIDs.base64UUID(), + override val name: String, + override val severity: String, + val bucketSelector: BucketSelectorExtAggregationBuilder, + override val actions: List +) : Trigger { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readString(), // id + sin.readString(), // name + sin.readString(), // severity + BucketSelectorExtAggregationBuilder(sin), // condition + sin.readList(::Action) // actions + ) + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + .startObject(BUCKET_LEVEL_TRIGGER_FIELD) + .field(ID_FIELD, id) + .field(NAME_FIELD, name) + .field(SEVERITY_FIELD, severity) + .startObject(CONDITION_FIELD) + bucketSelector.internalXContent(builder, params) + builder.endObject() + .field(ACTIONS_FIELD, actions.toTypedArray()) + .endObject() + .endObject() + return builder + } + + override fun name(): String { + return BUCKET_LEVEL_TRIGGER_FIELD + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(id) + out.writeString(name) + out.writeString(severity) + bucketSelector.writeTo(out) + out.writeCollection(actions) + } + + fun asTemplateArg(): Map { + return mapOf( + ID_FIELD to id, + NAME_FIELD to name, + SEVERITY_FIELD to severity, + ACTIONS_FIELD to actions.map { it.asTemplateArg() }, + PARENT_BUCKET_PATH to getParentBucketPath(), + CONDITION_FIELD to mapOf( + SCRIPT_FIELD to mapOf( + SOURCE_FIELD to bucketSelector.script.idOrCode, + LANG_FIELD to bucketSelector.script.lang + ) + ) + ) + } + + fun getParentBucketPath(): String { + return bucketSelector.parentBucketPath + } + + companion object { + const val BUCKET_LEVEL_TRIGGER_FIELD = "bucket_level_trigger" + const val CONDITION_FIELD = "condition" + const val PARENT_BUCKET_PATH = "parentBucketPath" + const val SCRIPT_FIELD = "script" + const val SOURCE_FIELD = "source" + const val LANG_FIELD = "lang" + + val XCONTENT_REGISTRY = NamedXContentRegistry.Entry( + Trigger::class.java, + ParseField(BUCKET_LEVEL_TRIGGER_FIELD), + CheckedFunction { parseInner(it) } + ) + + @JvmStatic + @Throws(IOException::class) + fun parseInner(xcp: XContentParser): BucketLevelTrigger { + var id = UUIDs.base64UUID() // assign a default triggerId if one is not specified + lateinit var name: String + lateinit var severity: String + val actions: MutableList = mutableListOf() + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + lateinit var bucketSelector: BucketSelectorExtAggregationBuilder + + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + + xcp.nextToken() + when (fieldName) { + ID_FIELD -> id = xcp.text() + NAME_FIELD -> name = xcp.text() + SEVERITY_FIELD -> severity = xcp.text() + CONDITION_FIELD -> { + // Using the trigger id as the name in the bucket selector since it is validated for uniqueness within Monitors. + // The contents of the trigger definition are round-tripped through parse and toXContent during Monitor creation + // ensuring that the id is available here in the version of the Monitor object that will be executed, even if the + // user submitted a custom trigger id after the condition definition. + bucketSelector = BucketSelectorExtAggregationBuilder.parse(id, xcp) + } + ACTIONS_FIELD -> { + XContentParserUtils.ensureExpectedToken( + XContentParser.Token.START_ARRAY, + xcp.currentToken(), + xcp + ) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + actions.add(Action.parse(xcp)) + } + } + } + } + + return BucketLevelTrigger( + id = requireNotNull(id) { "Trigger id is null." }, + name = requireNotNull(name) { "Trigger name is null" }, + severity = requireNotNull(severity) { "Trigger severity is null" }, + bucketSelector = requireNotNull(bucketSelector) { "Trigger condition is null" }, + actions = requireNotNull(actions) { "Trigger actions are null" } + ) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): BucketLevelTrigger { + return BucketLevelTrigger(sin) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/BucketLevelTriggerRunResult.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/BucketLevelTriggerRunResult.kt new file mode 100644 index 00000000..34328ca2 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/BucketLevelTriggerRunResult.kt @@ -0,0 +1,57 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.model + +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import java.io.IOException + +data class BucketLevelTriggerRunResult( + override var triggerName: String, + override var error: Exception? = null, + var aggregationResultBuckets: Map, + var actionResultsMap: MutableMap> = mutableMapOf() +) : TriggerRunResult(triggerName, error) { + + @Throws(IOException::class) + @Suppress("UNCHECKED_CAST") + constructor(sin: StreamInput) : this( + sin.readString(), + sin.readException() as Exception?, // error + sin.readMap(StreamInput::readString, ::AggregationResultBucket), + sin.readMap() as MutableMap> + ) + + override fun internalXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return builder + .field(AGG_RESULT_BUCKETS, aggregationResultBuckets) + .field(ACTIONS_RESULTS, actionResultsMap as Map) + } + + @Throws(IOException::class) + @Suppress("UNCHECKED_CAST") + override fun writeTo(out: StreamOutput) { + super.writeTo(out) + out.writeMap(aggregationResultBuckets, StreamOutput::writeString) { + valueOut: StreamOutput, aggResultBucket: AggregationResultBucket -> + aggResultBucket.writeTo(valueOut) + } + out.writeMap(actionResultsMap as Map) + } + + companion object { + const val AGG_RESULT_BUCKETS = "agg_result_buckets" + const val ACTIONS_RESULTS = "action_results" + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): TriggerRunResult { + return BucketLevelTriggerRunResult(sin) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/ChainedAlertTrigger.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/ChainedAlertTrigger.kt new file mode 100644 index 00000000..c56ce856 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/ChainedAlertTrigger.kt @@ -0,0 +1,150 @@ +package org.opensearch.commons.alerting.model + +import org.opensearch.common.CheckedFunction +import org.opensearch.common.UUIDs +import org.opensearch.commons.alerting.model.Trigger.Companion.ACTIONS_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.ID_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.NAME_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.SEVERITY_FIELD +import org.opensearch.commons.alerting.model.action.Action +import org.opensearch.core.ParseField +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.NamedXContentRegistry +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import org.opensearch.script.Script +import java.io.IOException + +data class ChainedAlertTrigger( + override val id: String = UUIDs.base64UUID(), + override val name: String, + override val severity: String, + override val actions: List, + val condition: Script +) : Trigger { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readString(), // id + sin.readString(), // name + sin.readString(), // severity + sin.readList(::Action), // actions + Script(sin) + ) + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + .startObject(CHAINED_ALERT_TRIGGER_FIELD) + .field(ID_FIELD, id) + .field(NAME_FIELD, name) + .field(SEVERITY_FIELD, severity) + .startObject(CONDITION_FIELD) + .field(SCRIPT_FIELD, condition) + .endObject() + .field(ACTIONS_FIELD, actions.toTypedArray()) + .endObject() + .endObject() + return builder + } + + override fun name(): String { + return CHAINED_ALERT_TRIGGER_FIELD + } + + /** Returns a representation of the trigger suitable for passing into painless and mustache scripts. */ + fun asTemplateArg(): Map { + return mapOf( + ID_FIELD to id, + NAME_FIELD to name, + SEVERITY_FIELD to severity, + ACTIONS_FIELD to actions.map { it.asTemplateArg() } + ) + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(id) + out.writeString(name) + out.writeString(severity) + out.writeCollection(actions) + condition.writeTo(out) + } + + companion object { + const val CHAINED_ALERT_TRIGGER_FIELD = "chained_alert_trigger" + const val CONDITION_FIELD = "condition" + const val SCRIPT_FIELD = "script" + const val QUERY_IDS_FIELD = "query_ids" + + val XCONTENT_REGISTRY = NamedXContentRegistry.Entry( + Trigger::class.java, + ParseField(CHAINED_ALERT_TRIGGER_FIELD), + CheckedFunction { parseInner(it) } + ) + + @JvmStatic + @Throws(IOException::class) + fun parseInner(xcp: XContentParser): ChainedAlertTrigger { + var id = UUIDs.base64UUID() // assign a default triggerId if one is not specified + lateinit var name: String + lateinit var severity: String + lateinit var condition: Script + val actions: MutableList = mutableListOf() + + if (xcp.currentToken() != XContentParser.Token.START_OBJECT && xcp.currentToken() != XContentParser.Token.FIELD_NAME) { + XContentParserUtils.throwUnknownToken(xcp.currentToken(), xcp.tokenLocation) + } + + // If the parser began on START_OBJECT, move to the next token so that the while loop enters on + // the fieldName (or END_OBJECT if it's empty). + if (xcp.currentToken() == XContentParser.Token.START_OBJECT) xcp.nextToken() + + while (xcp.currentToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + + xcp.nextToken() + when (fieldName) { + ID_FIELD -> id = xcp.text() + NAME_FIELD -> name = xcp.text() + SEVERITY_FIELD -> severity = xcp.text() + CONDITION_FIELD -> { + xcp.nextToken() + condition = Script.parse(xcp) + require(condition.lang == Script.DEFAULT_SCRIPT_LANG) { + "Invalid script language. Allowed languages are [${Script.DEFAULT_SCRIPT_LANG}]" + } + xcp.nextToken() + } + ACTIONS_FIELD -> { + XContentParserUtils.ensureExpectedToken( + XContentParser.Token.START_ARRAY, + xcp.currentToken(), + xcp + ) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + actions.add(Action.parse(xcp)) + } + } + } + xcp.nextToken() + } + + return ChainedAlertTrigger( + name = requireNotNull(name) { "Trigger name is null" }, + severity = requireNotNull(severity) { "Trigger severity is null" }, + condition = requireNotNull(condition) { "Trigger condition is null" }, + actions = requireNotNull(actions) { "Trigger actions are null" }, + id = requireNotNull(id) { "Trigger id is null." } + ) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): ChainedAlertTrigger { + return ChainedAlertTrigger(sin) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/ChainedAlertTriggerRunResult.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/ChainedAlertTriggerRunResult.kt new file mode 100644 index 00000000..015762cf --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/ChainedAlertTriggerRunResult.kt @@ -0,0 +1,69 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.model + +import org.opensearch.commons.alerting.alerts.AlertError +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.script.ScriptException +import java.io.IOException +import java.time.Instant + +data class ChainedAlertTriggerRunResult( + override var triggerName: String, + var triggered: Boolean, + override var error: Exception?, + var actionResults: MutableMap = mutableMapOf(), + val associatedAlertIds: Set +) : TriggerRunResult(triggerName, error) { + + @Throws(IOException::class) + @Suppress("UNCHECKED_CAST") + constructor(sin: StreamInput) : this( + triggerName = sin.readString(), + error = sin.readException(), + triggered = sin.readBoolean(), + actionResults = sin.readMap() as MutableMap, + associatedAlertIds = sin.readStringList().toSet() + ) + + override fun alertError(): AlertError? { + if (error != null) { + return AlertError(Instant.now(), "Failed evaluating trigger:\n${error!!.userErrorMessage()}") + } + for (actionResult in actionResults.values) { + if (actionResult.error != null) { + return AlertError(Instant.now(), "Failed running action:\n${actionResult.error.userErrorMessage()}") + } + } + return null + } + + override fun internalXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + if (error is ScriptException) error = Exception((error as ScriptException).toJsonString(), error) + return builder + .field("triggered", triggered) + .field("action_results", actionResults as Map) + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + super.writeTo(out) + out.writeBoolean(triggered) + out.writeMap(actionResults as Map) + out.writeStringCollection(associatedAlertIds) + } + + companion object { + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): TriggerRunResult { + return ChainedAlertTriggerRunResult(sin) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/ChainedMonitorFindings.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/ChainedMonitorFindings.kt new file mode 100644 index 00000000..a1b6435a --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/ChainedMonitorFindings.kt @@ -0,0 +1,104 @@ +package org.opensearch.commons.alerting.model + +import org.opensearch.commons.notifications.model.BaseModel +import org.opensearch.commons.utils.validateId +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException +import java.util.Collections + +/** + * Context passed in delegate monitor to filter data matched by a list of monitors based on the findings of the given monitor ids. + */ +// TODO - Remove the class and move the monitorId to Delegate (as a chainedMonitorId property) if this class won't be updated by adding new properties +data class ChainedMonitorFindings( + val monitorId: String? = null, + val monitorIds: List = emptyList() // if monitorId field is non-null it would be given precendence for BWC +) : BaseModel { + + init { + require(!(monitorId.isNullOrBlank() && monitorIds.isEmpty())) { + "at least one of fields, 'monitorIds' and 'monitorId' should be provided" + } + if (monitorId != null && monitorId.isBlank()) { + validateId(monitorId) + } else { + monitorIds.forEach { validateId(it) } + } + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readOptionalString(), // monitorId + Collections.unmodifiableList(sin.readStringList()) + ) + + @Suppress("UNCHECKED_CAST") + fun asTemplateArg(): Map { + return mapOf( + MONITOR_ID_FIELD to monitorId, + MONITOR_IDS_FIELD to monitorIds + ) as Map + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeOptionalString(monitorId) + out.writeStringCollection(monitorIds) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + .field(MONITOR_ID_FIELD, monitorId) + .field(MONITOR_IDS_FIELD, monitorIds) + .endObject() + return builder + } + + companion object { + const val MONITOR_ID_FIELD = "monitor_id" + const val MONITOR_IDS_FIELD = "monitor_ids" + + @JvmStatic + @Throws(IOException::class) + fun parse(xcp: XContentParser): ChainedMonitorFindings { + var monitorId: String? = null + val monitorIds = mutableListOf() + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + MONITOR_ID_FIELD -> { + if (!xcp.currentToken().equals(XContentParser.Token.VALUE_NULL)) { + monitorId = xcp.text() + } + } + + MONITOR_IDS_FIELD -> { + XContentParserUtils.ensureExpectedToken( + XContentParser.Token.START_ARRAY, + xcp.currentToken(), + xcp + ) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + monitorIds.add(xcp.text()) + } + } + } + } + return ChainedMonitorFindings(monitorId, monitorIds) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): ChainedMonitorFindings { + return ChainedMonitorFindings(sin) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/ClusterMetricsInput.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/ClusterMetricsInput.kt new file mode 100644 index 00000000..a11214e6 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/ClusterMetricsInput.kt @@ -0,0 +1,380 @@ +package org.opensearch.commons.alerting.model + +import org.apache.commons.validator.routines.UrlValidator +import org.apache.http.client.utils.URIBuilder +import org.opensearch.common.CheckedFunction +import org.opensearch.commons.utils.CLUSTER_NAME_REGEX +import org.opensearch.core.ParseField +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.NamedXContentRegistry +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException +import java.net.URI +import java.net.URISyntaxException + +val ILLEGAL_PATH_PARAMETER_CHARACTERS = arrayOf(':', '"', '+', '\\', '|', '?', '#', '>', '<', ' ') + +/** + * This is a data class for a URI type of input for Monitors specifically for local clusters. + */ +data class ClusterMetricsInput( + var path: String, + var pathParams: String = "", + var url: String, + var clusters: List = listOf() +) : Input { + val clusterMetricType: ClusterMetricType + val constructedUri: URI + + // Verify parameters are valid during creation + init { + require(validateFields()) { + "The uri.api_type field, uri.path field, or uri.uri field must be defined." + } + + // Create an UrlValidator that only accepts "http" and "https" as valid scheme and allows local URLs. + val urlValidator = UrlValidator(arrayOf("http", "https"), UrlValidator.ALLOW_LOCAL_URLS) + + // Build url field by field if not provided as whole. + constructedUri = toConstructedUri() + + require(urlValidator.isValid(constructedUri.toString())) { + "Invalid URI constructed from the path and path_params inputs, or the url input." + } + + if (url.isNotEmpty() && validateFieldsNotEmpty()) { + require(constructedUri == constructUrlFromInputs()) { + "The provided URL and URI fields form different URLs." + } + } + + require(constructedUri.host.lowercase() == SUPPORTED_HOST) { + "Only host '$SUPPORTED_HOST' is supported." + } + require(constructedUri.port == SUPPORTED_PORT) { + "Only port '$SUPPORTED_PORT' is supported." + } + + if (clusters.isNotEmpty()) { + require(clusters.all { CLUSTER_NAME_REGEX.matches(it) }) { + "Cluster names are not valid." + } + } + + clusterMetricType = findApiType(constructedUri.path) + this.parseEmptyFields() + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readString(), // path + sin.readString(), // path params + sin.readString() // url + ) + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return builder.startObject() + .startObject(URI_FIELD) + .field(API_TYPE_FIELD, clusterMetricType) + .field(PATH_FIELD, path) + .field(PATH_PARAMS_FIELD, pathParams) + .field(URL_FIELD, url) + .field(CLUSTERS_FIELD, clusters) + .endObject() + .endObject() + } + + override fun name(): String { + return URI_FIELD + } + + override fun writeTo(out: StreamOutput) { + out.writeString(clusterMetricType.toString()) + out.writeString(path) + out.writeString(pathParams) + out.writeString(url) + out.writeStringArray(clusters.toTypedArray()) + } + + companion object { + const val SUPPORTED_SCHEME = "http" + const val SUPPORTED_HOST = "localhost" + const val SUPPORTED_PORT = 9200 + + const val API_TYPE_FIELD = "api_type" + const val PATH_FIELD = "path" + const val PATH_PARAMS_FIELD = "path_params" + const val URL_FIELD = "url" + const val URI_FIELD = "uri" + const val CLUSTERS_FIELD = "clusters" + + val XCONTENT_REGISTRY = NamedXContentRegistry.Entry(Input::class.java, ParseField(URI_FIELD), CheckedFunction { parseInner(it) }) + + /** + * This parse function uses [XContentParser] to parse JSON input and store corresponding fields to create a [ClusterMetricsInput] object + */ + @JvmStatic + @Throws(IOException::class) + fun parseInner(xcp: XContentParser): ClusterMetricsInput { + var path = "" + var pathParams = "" + var url = "" + val clusters = mutableListOf() + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + when (fieldName) { + PATH_FIELD -> path = xcp.text() + PATH_PARAMS_FIELD -> pathParams = xcp.text() + URL_FIELD -> url = xcp.text() + CLUSTERS_FIELD -> { + XContentParserUtils.ensureExpectedToken( + XContentParser.Token.START_ARRAY, + xcp.currentToken(), + xcp + ) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) clusters.add(xcp.text()) + } + } + } + return ClusterMetricsInput(path, pathParams, url, clusters) + } + } + + /** + * Constructs the [URI] using either the provided [url], or the + * supported scheme, host, and port and provided [path]+[pathParams]. + * @return The [URI] constructed from [url] if it's defined; + * otherwise a [URI] constructed from the provided [URI] fields. + */ + private fun toConstructedUri(): URI { + return if (url.isEmpty()) { + constructUrlFromInputs() + } else { + try { + URIBuilder(url).build() + } catch (e: URISyntaxException) { + throw IllegalArgumentException("Invalid URL syntax.") + } + } + } + + /** + * Isolates just the path parameters from the [ClusterMetricsInput] URI. + * @return The path parameters portion of the [ClusterMetricsInput] URI. + * @throws [IllegalArgumentException] if the [ClusterMetricType] requires path parameters, but none are supplied; + * or when path parameters are provided for an [ClusterMetricType] that does not use path parameters. + */ + fun parsePathParams(): String { + val path = this.constructedUri.path + val apiType = this.clusterMetricType + + var pathParams: String + if (this.pathParams.isNotEmpty()) { + pathParams = this.pathParams + } else { + val prependPath = if (apiType.supportsPathParams) apiType.prependPath else apiType.defaultPath + pathParams = path.removePrefix(prependPath) + pathParams = pathParams.removeSuffix(apiType.appendPath) + } + + if (pathParams.isNotEmpty()) { + pathParams = pathParams.trim('/') + ILLEGAL_PATH_PARAMETER_CHARACTERS.forEach { character -> + if (pathParams.contains(character)) { + throw IllegalArgumentException( + "The provided path parameters contain invalid characters or spaces. Please omit: " + ILLEGAL_PATH_PARAMETER_CHARACTERS.joinToString(" ") + ) + } + } + } + + if (apiType.requiresPathParams && pathParams.isEmpty()) { + throw IllegalArgumentException("The API requires path parameters.") + } + if (!apiType.supportsPathParams && pathParams.isNotEmpty()) { + throw IllegalArgumentException("The API does not use path parameters.") + } + + return pathParams + } + + /** + * Examines the path of a [ClusterMetricsInput] to determine which API is being called. + * @param uriPath The path to examine. + * @return The [ClusterMetricType] associated with the [ClusterMetricsInput] monitor. + * @throws [IllegalArgumentException] when the API to call cannot be determined from the URI. + */ + private fun findApiType(uriPath: String): ClusterMetricType { + var apiType = ClusterMetricType.BLANK + ClusterMetricType.values() + .filter { option -> option != ClusterMetricType.BLANK } + .forEach { option -> + if (uriPath.startsWith(option.prependPath) || uriPath.startsWith(option.defaultPath)) { + apiType = option + } + } + if (apiType.isBlank()) { + throw IllegalArgumentException("The API could not be determined from the provided URI.") + } + return apiType + } + + /** + * Constructs a [URI] from the supported scheme, host, and port, and the provided [path], and [pathParams]. + * @return The constructed [URI]. + */ + private fun constructUrlFromInputs(): URI { + /** + * this try-catch block is required due to a httpcomponents 5.1.x library issue + * it auto encodes path params in the url. + */ + return try { + val formattedPath = if (path.startsWith("/") || path.isBlank()) path else "/$path" + val formattedPathParams = if (pathParams.startsWith("/") || pathParams.isBlank()) pathParams else "/$pathParams" + val uriBuilder = URIBuilder("$SUPPORTED_SCHEME://$SUPPORTED_HOST:$SUPPORTED_PORT$formattedPath$formattedPathParams") + uriBuilder.build() + } catch (ex: URISyntaxException) { + val uriBuilder = URIBuilder() + .setScheme(SUPPORTED_SCHEME) + .setHost(SUPPORTED_HOST) + .setPort(SUPPORTED_PORT) + .setPath(path + pathParams) + try { + uriBuilder.build() + } catch (e: URISyntaxException) { + throw IllegalArgumentException("Invalid URL syntax.") + } + } + } + + /** + * If [url] field is empty, populates it with [constructedUri]. + * If [path] and [pathParams] are empty, populates them with values from [url]. + */ + private fun parseEmptyFields() { + if (pathParams.isEmpty()) { + pathParams = this.parsePathParams() + } + if (path.isEmpty()) { + path = if (pathParams.isEmpty()) clusterMetricType.defaultPath else clusterMetricType.prependPath + } + if (url.isEmpty()) { + url = constructedUri.toString() + } + } + + /** + * Helper function to confirm at least [url], or required URI component fields are defined. + * @return TRUE if at least either [url] or the other components are provided; otherwise FALSE. + */ + private fun validateFields(): Boolean { + return url.isNotEmpty() || validateFieldsNotEmpty() + } + + /** + * Confirms that required URI component fields are defined. + * Only validating path for now, as that's the only required field. + * @return TRUE if all those fields are defined; otherwise FALSE. + */ + private fun validateFieldsNotEmpty(): Boolean { + return path.isNotEmpty() + } + + /** + * An enum class to quickly reference various supported API. + */ + enum class ClusterMetricType( + val defaultPath: String, + val prependPath: String, + val appendPath: String, + val supportsPathParams: Boolean, + val requiresPathParams: Boolean + ) { + BLANK("", "", "", false, false), + CAT_INDICES( + "/_cat/indices", + "/_cat/indices", + "", + true, + false + ), + CAT_PENDING_TASKS( + "/_cat/pending_tasks", + "/_cat/pending_tasks", + "", + false, + false + ), + CAT_RECOVERY( + "/_cat/recovery", + "/_cat/recovery", + "", + true, + false + ), + CAT_SHARDS( + "/_cat/shards", + "/_cat/shards", + "", + true, + false + ), + CAT_SNAPSHOTS( + "/_cat/snapshots", + "/_cat/snapshots", + "", + true, + true + ), + CAT_TASKS( + "/_cat/tasks", + "/_cat/tasks", + "", + false, + false + ), + CLUSTER_HEALTH( + "/_cluster/health", + "/_cluster/health", + "", + true, + false + ), + CLUSTER_SETTINGS( + "/_cluster/settings", + "/_cluster/settings", + "", + false, + false + ), + CLUSTER_STATS( + "/_cluster/stats", + "/_cluster/stats", + "", + true, + false + ), + NODES_STATS( + "/_nodes/stats", + "/_nodes", + "", + false, + false + ); + + /** + * @return TRUE if the [ClusterMetricType] is [BLANK]; otherwise FALSE. + */ + fun isBlank(): Boolean { + return this === BLANK + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/ClusterMetricsTriggerRunResult.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/ClusterMetricsTriggerRunResult.kt new file mode 100644 index 00000000..d3af9be3 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/ClusterMetricsTriggerRunResult.kt @@ -0,0 +1,110 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.model + +import org.opensearch.commons.alerting.alerts.AlertError +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.ToXContentObject +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.script.ScriptException +import java.io.IOException +import java.time.Instant + +data class ClusterMetricsTriggerRunResult( + override var triggerName: String, + override var triggered: Boolean, + override var error: Exception?, + override var actionResults: MutableMap = mutableMapOf(), + var clusterTriggerResults: List = listOf() +) : QueryLevelTriggerRunResult( + triggerName = triggerName, + error = error, + triggered = triggered, + actionResults = actionResults +) { + + @Throws(IOException::class) + @Suppress("UNCHECKED_CAST") + constructor(sin: StreamInput) : this( + triggerName = sin.readString(), + error = sin.readException(), + triggered = sin.readBoolean(), + actionResults = sin.readMap() as MutableMap, + clusterTriggerResults = sin.readList((ClusterTriggerResult)::readFrom) + ) + + override fun alertError(): AlertError? { + if (error != null) { + return AlertError(Instant.now(), "Failed evaluating trigger:\n${error!!.userErrorMessage()}") + } + for (actionResult in actionResults.values) { + if (actionResult.error != null) { + return AlertError(Instant.now(), "Failed running action:\n${actionResult.error.userErrorMessage()}") + } + } + return null + } + + override fun internalXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + if (error is ScriptException) error = Exception((error as ScriptException).toJsonString(), error) + builder + .field(TRIGGERED_FIELD, triggered) + .field(ACTION_RESULTS_FIELD, actionResults as Map) + .startArray(CLUSTER_RESULTS_FIELD) + clusterTriggerResults.forEach { it.toXContent(builder, params) } + return builder.endArray() + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + super.writeTo(out) + out.writeBoolean(triggered) + out.writeMap(actionResults as Map) + clusterTriggerResults.forEach { it.writeTo(out) } + } + + companion object { + const val TRIGGERED_FIELD = "triggered" + const val ACTION_RESULTS_FIELD = "action_results" + const val CLUSTER_RESULTS_FIELD = "cluster_results" + } + + data class ClusterTriggerResult( + val cluster: String, + val triggered: Boolean + ) : ToXContentObject, Writeable { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + cluster = sin.readString(), + triggered = sin.readBoolean() + ) + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return builder.startObject() + .startObject(cluster) + .field(TRIGGERED_FIELD, triggered) + .endObject() + .endObject() + } + + override fun writeTo(out: StreamOutput) { + out.writeString(cluster) + out.writeBoolean(triggered) + } + + companion object { + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): ClusterTriggerResult { + return ClusterTriggerResult(sin) + } + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/Comment.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/Comment.kt new file mode 100644 index 00000000..45c007e3 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/Comment.kt @@ -0,0 +1,165 @@ +package org.opensearch.commons.alerting.model + +import org.opensearch.commons.alerting.util.IndexUtils.Companion._ID +import org.opensearch.commons.alerting.util.instant +import org.opensearch.commons.alerting.util.optionalTimeField +import org.opensearch.commons.alerting.util.optionalUserField +import org.opensearch.commons.alerting.util.optionalUsernameField +import org.opensearch.commons.authuser.User +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken +import java.io.IOException +import java.time.Instant + +data class Comment( + val id: String = NO_ID, + val entityId: String = NO_ID, + val entityType: String, + val content: String, + val createdTime: Instant, + val lastUpdatedTime: Instant?, + val user: User? +) : Writeable, ToXContent { + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + id = sin.readString(), + entityId = sin.readString(), + entityType = sin.readString(), + content = sin.readString(), + createdTime = sin.readInstant(), + lastUpdatedTime = sin.readOptionalInstant(), + user = if (sin.readBoolean()) User(sin) else null + ) + + constructor( + entityId: String, + entityType: String, + content: String, + createdTime: Instant, + user: User? + ) : this ( + entityId = entityId, + entityType = entityType, + content = content, + createdTime = createdTime, + lastUpdatedTime = null, + user = user + ) + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(id) + out.writeString(entityId) + out.writeString(entityType) + out.writeString(content) + out.writeInstant(createdTime) + out.writeOptionalInstant(lastUpdatedTime) + out.writeBoolean(user != null) + user?.writeTo(out) + } + + fun asTemplateArg(): Map { + return mapOf( + _ID to id, + ENTITY_ID_FIELD to entityId, + ENTITY_TYPE_FIELD to entityType, + COMMENT_CONTENT_FIELD to content, + COMMENT_CREATED_TIME_FIELD to createdTime, + COMMENT_LAST_UPDATED_TIME_FIELD to lastUpdatedTime, + COMMENT_USER_FIELD to user?.name + ) + } + + // used to create the Comment JSON object for an API response (displayed to user) + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return createXContentBuilder(builder, false) + } + + // used to create the Comment JSON object for indexing a doc into an index (not displayed to user) + fun toXContentWithUser(builder: XContentBuilder): XContentBuilder { + return createXContentBuilder(builder, true) + } + + private fun createXContentBuilder(builder: XContentBuilder, includeFullUser: Boolean): XContentBuilder { + builder.startObject() + .field(ENTITY_ID_FIELD, entityId) + .field(ENTITY_TYPE_FIELD, entityType) + .field(COMMENT_CONTENT_FIELD, content) + .optionalTimeField(COMMENT_CREATED_TIME_FIELD, createdTime) + .optionalTimeField(COMMENT_LAST_UPDATED_TIME_FIELD, lastUpdatedTime) + + if (includeFullUser) { + // if we're storing a Comment into an internal index, include full User + builder.optionalUserField(COMMENT_USER_FIELD, user) + } else { + // if we're displaying the Comment as part of an API call response, only include username + builder.optionalUsernameField(COMMENT_USER_FIELD, user) + } + + builder.endObject() + return builder + } + + companion object { + const val ENTITY_ID_FIELD = "entity_id" + const val ENTITY_TYPE_FIELD = "entity_type" + const val COMMENT_CONTENT_FIELD = "content" + const val COMMENT_CREATED_TIME_FIELD = "created_time" + const val COMMENT_LAST_UPDATED_TIME_FIELD = "last_updated_time" + const val COMMENT_USER_FIELD = "user" + const val NO_ID = "" + + @JvmStatic + @JvmOverloads + @Throws(IOException::class) + fun parse(xcp: XContentParser, id: String = NO_ID): Comment { + lateinit var entityId: String + lateinit var entityType: String + var content = "" + lateinit var createdTime: Instant + var lastUpdatedTime: Instant? = null + var user: User? = null + + ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + ENTITY_ID_FIELD -> entityId = xcp.text() + ENTITY_TYPE_FIELD -> entityType = xcp.text() + COMMENT_CONTENT_FIELD -> content = xcp.text() + COMMENT_CREATED_TIME_FIELD -> createdTime = requireNotNull(xcp.instant()) + COMMENT_LAST_UPDATED_TIME_FIELD -> lastUpdatedTime = xcp.instant() + COMMENT_USER_FIELD -> + user = if (xcp.currentToken() == XContentParser.Token.VALUE_NULL) { + null + } else { + User.parse(xcp) + } + } + } + + return Comment( + id = id, + entityId = entityId, + entityType = entityType, + content = content, + createdTime = createdTime, + lastUpdatedTime = lastUpdatedTime, + user = user + ) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): Comment { + return Comment(sin) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/CompositeInput.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/CompositeInput.kt new file mode 100644 index 00000000..0f1e3e12 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/CompositeInput.kt @@ -0,0 +1,85 @@ +package org.opensearch.commons.alerting.model + +import org.opensearch.common.CheckedFunction +import org.opensearch.core.ParseField +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.NamedXContentRegistry +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException + +data class CompositeInput( + val sequence: Sequence +) : WorkflowInput { + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + Sequence(sin) + ) + + fun asTemplateArg(): Map { + return mapOf( + SEQUENCE_FIELD to sequence + ) + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + sequence.writeTo(out) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + .startObject(COMPOSITE_INPUT_FIELD) + .field(SEQUENCE_FIELD, sequence) + .endObject() + .endObject() + return builder + } + + override fun name(): String { + return COMPOSITE_INPUT_FIELD + } + + fun getMonitorIds(): List { + return sequence.delegates.map { delegate -> delegate.monitorId } + } + + companion object { + const val COMPOSITE_INPUT_FIELD = "composite_input" + const val SEQUENCE_FIELD = "sequence" + + val XCONTENT_REGISTRY = NamedXContentRegistry.Entry( + WorkflowInput::class.java, + ParseField(COMPOSITE_INPUT_FIELD), + CheckedFunction { CompositeInput.parse(it) } + ) + + @JvmStatic + @Throws(IOException::class) + fun parse(xcp: XContentParser): CompositeInput { + var sequence = Sequence(emptyList()) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + SEQUENCE_FIELD -> { + sequence = Sequence.parse(xcp) + } + } + } + + return CompositeInput(sequence) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): CompositeInput { + return CompositeInput(sin) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/CorrelationAlert.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/CorrelationAlert.kt new file mode 100644 index 00000000..f0a56a86 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/CorrelationAlert.kt @@ -0,0 +1,142 @@ +package org.opensearch.commons.alerting.model + +import org.opensearch.commons.authuser.User +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException +import java.time.Instant + +class CorrelationAlert : BaseAlert { + + // CorrelationAlert-specific properties + val correlatedFindingIds: List + val correlationRuleId: String + val correlationRuleName: String + + constructor( + correlatedFindingIds: List, + correlationRuleId: String, + correlationRuleName: String, + id: String, + version: Long, + schemaVersion: Int, + user: User?, + triggerName: String, + state: Alert.State, + startTime: Instant, + endTime: Instant?, + acknowledgedTime: Instant?, + errorMessage: String?, + severity: String, + actionExecutionResults: List + ) : super( + id = id, + version = version, + schemaVersion = schemaVersion, + user = user, + triggerName = triggerName, + state = state, + startTime = startTime, + endTime = endTime, + acknowledgedTime = acknowledgedTime, + errorMessage = errorMessage, + severity = severity, + actionExecutionResults = actionExecutionResults + ) { + this.correlatedFindingIds = correlatedFindingIds + this.correlationRuleId = correlationRuleId + this.correlationRuleName = correlationRuleName + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : super(sin) { + correlatedFindingIds = sin.readStringList() + correlationRuleId = sin.readString() + correlationRuleName = sin.readString() + } + + // Override to include CorrelationAlert specific fields + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + .startArray(CORRELATED_FINDING_IDS) + correlatedFindingIds.forEach { id -> + builder.value(id) + } + builder.endArray() + .field(CORRELATION_RULE_ID, correlationRuleId) + .field(CORRELATION_RULE_NAME, correlationRuleName) + super.toXContentWithUser(builder) + builder.endObject() + return builder + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + super.writeTo(out) + out.writeStringCollection(correlatedFindingIds) + out.writeString(correlationRuleId) + out.writeString(correlationRuleName) + } + override fun asTemplateArg(): Map { + val superTemplateArgs = super.asTemplateArg() + val correlationSpecificArgs = mapOf( + CORRELATED_FINDING_IDS to correlatedFindingIds, + CORRELATION_RULE_ID to correlationRuleId, + CORRELATION_RULE_NAME to correlationRuleName + ) + return superTemplateArgs + correlationSpecificArgs + } + companion object { + const val CORRELATED_FINDING_IDS = "correlated_finding_ids" + const val CORRELATION_RULE_ID = "correlation_rule_id" + const val CORRELATION_RULE_NAME = "correlation_rule_name" + + @JvmStatic + @Throws(IOException::class) + fun parse(xcp: XContentParser, id: String = NO_ID, version: Long = NO_VERSION): CorrelationAlert { + // Parse additional CorrelationAlert-specific fields + val correlatedFindingIds: MutableList = mutableListOf() + var correlationRuleId: String? = null + var correlationRuleName: String? = null + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + CORRELATED_FINDING_IDS -> { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + correlatedFindingIds.add(xcp.text()) + } + } + CORRELATION_RULE_ID -> correlationRuleId = xcp.text() + CORRELATION_RULE_NAME -> correlationRuleName = xcp.text() + } + } + + val unifiedAlert = parse(xcp, version) + return CorrelationAlert( + correlatedFindingIds = correlatedFindingIds, + correlationRuleId = requireNotNull(correlationRuleId), + correlationRuleName = requireNotNull(correlationRuleName), + id = requireNotNull(unifiedAlert.id), + version = requireNotNull(unifiedAlert.version), + schemaVersion = requireNotNull(unifiedAlert.schemaVersion), + user = unifiedAlert.user, + triggerName = requireNotNull(unifiedAlert.triggerName), + state = requireNotNull(unifiedAlert.state), + startTime = requireNotNull(unifiedAlert.startTime), + endTime = unifiedAlert.endTime, + acknowledgedTime = unifiedAlert.acknowledgedTime, + errorMessage = unifiedAlert.errorMessage, + severity = requireNotNull(unifiedAlert.severity), + actionExecutionResults = unifiedAlert.actionExecutionResults + ) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/DataSources.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/DataSources.kt new file mode 100644 index 00000000..7e995b53 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/DataSources.kt @@ -0,0 +1,220 @@ +package org.opensearch.commons.alerting.model + +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.ToXContentObject +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException + +data class DataSources( + /** Configures a custom query index name for the monitor. Creates a new index if index with given name not present.*/ + val queryIndex: String = ScheduledJob.DOC_LEVEL_QUERIES_INDEX, + + /** Configures a custom index to store findings for a monitor. Creates a new index if index with given name not present. + * If index is pre-existing, mapping is updated*/ + val findingsIndex: String = ".opensearch-alerting-finding-history-write", // AlertIndices.FINDING_HISTORY_WRITE_INDEX + + /** Configures a custom index pattern for findingsIndex alias.*/ + val findingsIndexPattern: String? = "<.opensearch-alerting-finding-history-{now/d}-1>", // AlertIndices.FINDING_HISTORY_INDEX_PATTERN + + /** Configures a custom index to store alerts for a monitor. Creates a new index if index with given name not present. + * If index is pre-existing, mapping is updated. */ + val alertsIndex: String = ".opendistro-alerting-alerts", // AlertIndices.ALERT_INDEX + + /** Configures a custom index alias to store historic alerts for a monitor.*/ + val alertsHistoryIndex: String? = ".opendistro-alerting-alert-history-write", // AlertIndices.ALERT_HISTORY_WRITE_INDEX + + /** Configures a custom index pattern for alertHistoryIndex alias.*/ + val alertsHistoryIndexPattern: String? = "<.opendistro-alerting-alert-history-{now/d}-1>", // AlertIndices.ALERT_HISTORY_INDEX_PATTERN + + /** Configures a custom index alias to store comments associated with alerts.*/ + + val commentsIndex: String? = DEFAULT_COMMENTS_INDEX, // CommentsIndices.COMMENTS_HISTORY_WRITE_INDEX + + /** Configures a custom index pattern for commentsIndex alias.*/ + val commentsIndexPattern: String? = DEFAULT_COMMENTS_INDEX_PATTERN, // CommentsIndices.COMMENTS_HISTORY_INDEX_PATTERN + + /** Configures custom mappings by field type for query index. + * Custom query index mappings are configurable, only if a custom query index is configured too. */ + val queryIndexMappingsByType: Map> = mapOf(), + + /** Configures flag to enable or disable creating and storing findings. */ + val findingsEnabled: Boolean? = false + +) : Writeable, ToXContentObject { + + init { + require(queryIndex.isNotEmpty()) { + "Query index cannot be empty" + } + require(findingsIndex.isNotEmpty()) { + "Findings index cannot be empty" + } + require(alertsIndex.isNotEmpty()) { + "Alerts index cannot be empty" + } + if (queryIndexMappingsByType.isNotEmpty()) { + require(queryIndex != ScheduledJob.DOC_LEVEL_QUERIES_INDEX) { + "Custom query index mappings are configurable only if a custom query index is configured too." + } + require( + queryIndexMappingsByType.size == 1 && + queryIndexMappingsByType.containsKey("text") && + queryIndexMappingsByType.get("text")?.size == 1 && + queryIndexMappingsByType.get("text")!!.containsKey("analyzer") + ) { + "Custom query index mappings are currently configurable only for 'text' fields and mapping parameter can only be 'analyzer'" + } + } + } + + @Throws(IOException::class) + @Suppress("UNCHECKED_CAST") + constructor(sin: StreamInput) : this( + queryIndex = sin.readString(), + findingsIndex = sin.readString(), + findingsIndexPattern = sin.readOptionalString(), + alertsIndex = sin.readString(), + alertsHistoryIndex = sin.readOptionalString(), + alertsHistoryIndexPattern = sin.readOptionalString(), + commentsIndex = sin.readOptionalString(), + commentsIndexPattern = sin.readOptionalString(), + queryIndexMappingsByType = sin.readMap() as Map>, + findingsEnabled = sin.readOptionalBoolean() + ) + + constructor( + queryIndex: String, + findingsIndex: String, + findingsIndexPattern: String?, + alertsIndex: String, + alertsHistoryIndex: String?, + alertsHistoryIndexPattern: String?, + queryIndexMappingsByType: Map>, + findingsEnabled: Boolean? + ) : this( + queryIndex = queryIndex, + findingsIndex = findingsIndex, + findingsIndexPattern = findingsIndexPattern, + alertsIndex = alertsIndex, + alertsHistoryIndex = alertsHistoryIndex, + alertsHistoryIndexPattern = alertsHistoryIndexPattern, + commentsIndex = DEFAULT_COMMENTS_INDEX, + commentsIndexPattern = DEFAULT_COMMENTS_INDEX_PATTERN, + queryIndexMappingsByType = queryIndexMappingsByType, + findingsEnabled = findingsEnabled + ) + + @Suppress("UNCHECKED_CAST") + fun asTemplateArg(): Map { + return mapOf( + QUERY_INDEX_FIELD to queryIndex, + FINDINGS_INDEX_FIELD to findingsIndex, + FINDINGS_INDEX_PATTERN_FIELD to findingsIndexPattern, + ALERTS_INDEX_FIELD to alertsIndex, + ALERTS_HISTORY_INDEX_FIELD to alertsHistoryIndex, + ALERTS_HISTORY_INDEX_PATTERN_FIELD to alertsHistoryIndexPattern, + COMMENTS_INDEX_FIELD to commentsIndex, + COMMENTS_INDEX_PATTERN_FIELD to commentsIndexPattern, + QUERY_INDEX_MAPPINGS_BY_TYPE to queryIndexMappingsByType, + FINDINGS_ENABLED_FIELD to findingsEnabled + ) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + builder.field(QUERY_INDEX_FIELD, queryIndex) + builder.field(FINDINGS_INDEX_FIELD, findingsIndex) + builder.field(FINDINGS_INDEX_PATTERN_FIELD, findingsIndexPattern) + builder.field(ALERTS_INDEX_FIELD, alertsIndex) + builder.field(ALERTS_HISTORY_INDEX_FIELD, alertsHistoryIndex) + builder.field(ALERTS_HISTORY_INDEX_PATTERN_FIELD, alertsHistoryIndexPattern) + builder.field(COMMENTS_INDEX_FIELD, commentsIndex) + builder.field(COMMENTS_INDEX_PATTERN_FIELD, commentsIndexPattern) + builder.field(QUERY_INDEX_MAPPINGS_BY_TYPE, queryIndexMappingsByType as Map) + builder.field(FINDINGS_ENABLED_FIELD, findingsEnabled) + builder.endObject() + return builder + } + + companion object { + const val QUERY_INDEX_FIELD = "query_index" + const val FINDINGS_INDEX_FIELD = "findings_index" + const val FINDINGS_INDEX_PATTERN_FIELD = "findings_index_pattern" + const val ALERTS_INDEX_FIELD = "alerts_index" + const val ALERTS_HISTORY_INDEX_FIELD = "alerts_history_index" + const val ALERTS_HISTORY_INDEX_PATTERN_FIELD = "alerts_history_index_pattern" + const val COMMENTS_INDEX_FIELD = "comments_index" + const val COMMENTS_INDEX_PATTERN_FIELD = "comments_index_pattern" + const val QUERY_INDEX_MAPPINGS_BY_TYPE = "query_index_mappings_by_type" + const val FINDINGS_ENABLED_FIELD = "findings_enabled" + + const val DEFAULT_COMMENTS_INDEX = ".opensearch-alerting-comments-history-write" + const val DEFAULT_COMMENTS_INDEX_PATTERN = "<.opensearch-alerting-comments-history-{now/d}-1>" + + @JvmStatic + @Throws(IOException::class) + @Suppress("UNCHECKED_CAST") + fun parse(xcp: XContentParser): DataSources { + var queryIndex = "" + var findingsIndex = "" + var findingsIndexPattern = "" + var alertsIndex = "" + var alertsHistoryIndex = "" + var alertsHistoryIndexPattern = "" + var commentsIndex = "" + var commentsIndexPattern = "" + var queryIndexMappingsByType: Map> = mapOf() + var findingsEnabled = false + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + QUERY_INDEX_FIELD -> queryIndex = xcp.text() + FINDINGS_INDEX_FIELD -> findingsIndex = xcp.text() + FINDINGS_INDEX_PATTERN_FIELD -> findingsIndexPattern = xcp.text() + ALERTS_INDEX_FIELD -> alertsIndex = xcp.text() + ALERTS_HISTORY_INDEX_FIELD -> alertsHistoryIndex = xcp.text() + ALERTS_HISTORY_INDEX_PATTERN_FIELD -> alertsHistoryIndexPattern = xcp.text() + COMMENTS_INDEX_FIELD -> commentsIndex = xcp.text() + COMMENTS_INDEX_PATTERN_FIELD -> commentsIndexPattern = xcp.text() + QUERY_INDEX_MAPPINGS_BY_TYPE -> queryIndexMappingsByType = xcp.map() as Map> + FINDINGS_ENABLED_FIELD -> findingsEnabled = xcp.booleanValue() + } + } + return DataSources( + queryIndex = queryIndex, + findingsIndex = findingsIndex, + findingsIndexPattern = findingsIndexPattern, + alertsIndex = alertsIndex, + alertsHistoryIndex = alertsHistoryIndex, + alertsHistoryIndexPattern = alertsHistoryIndexPattern, + commentsIndex = commentsIndex, + commentsIndexPattern = commentsIndexPattern, + queryIndexMappingsByType = queryIndexMappingsByType, + findingsEnabled = findingsEnabled + ) + } + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(queryIndex) + out.writeString(findingsIndex) + out.writeOptionalString(findingsIndexPattern) + out.writeString(alertsIndex) + out.writeOptionalString(alertsHistoryIndex) + out.writeOptionalString(alertsHistoryIndexPattern) + out.writeOptionalString(commentsIndex) + out.writeOptionalString(commentsIndexPattern) + out.writeMap(queryIndexMappingsByType as Map) + out.writeOptionalBoolean(findingsEnabled) + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/Delegate.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/Delegate.kt new file mode 100644 index 00000000..e32ae78d --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/Delegate.kt @@ -0,0 +1,120 @@ +package org.opensearch.commons.alerting.model + +import org.opensearch.commons.notifications.model.BaseModel +import org.opensearch.commons.utils.validateId +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException + +/** + * Each underlying monitors defined in the composite monitor sequence input. + * They are executed sequentially in the order mentioned. + * Optionally accepts chained findings context. + * */ +data class Delegate( + /** + * Defines the order of the monitor in delegate list + */ + val order: Int, + /** + * Id of the monitor + */ + val monitorId: String, + /** + * Keeps the track of the previously executed monitor in a chain list. + * Used for pre-filtering by getting the findings doc ids for the given monitor + */ + val chainedMonitorFindings: ChainedMonitorFindings? = null +) : BaseModel { + + init { + validateId(monitorId) + validateOrder(order) + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + order = sin.readInt(), + monitorId = sin.readString(), + chainedMonitorFindings = if (sin.readBoolean()) { + ChainedMonitorFindings(sin) + } else { + null + } + ) + + fun asTemplateArg(): Map { + return mapOf( + ORDER_FIELD to order, + MONITOR_ID_FIELD to monitorId + ) + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeInt(order) + out.writeString(monitorId) + out.writeBoolean(chainedMonitorFindings != null) + chainedMonitorFindings?.writeTo(out) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + .field(ORDER_FIELD, order) + .field(MONITOR_ID_FIELD, monitorId) + if (chainedMonitorFindings != null) { + builder.field(CHAINED_FINDINGS_FIELD, chainedMonitorFindings) + } + builder.endObject() + return builder + } + + companion object { + const val ORDER_FIELD = "order" + const val MONITOR_ID_FIELD = "monitor_id" + const val CHAINED_FINDINGS_FIELD = "chained_monitor_findings" + + @JvmStatic + @Throws(IOException::class) + fun parse(xcp: XContentParser): Delegate { + lateinit var monitorId: String + var order = 0 + var chainedMonitorFindings: ChainedMonitorFindings? = null + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + ORDER_FIELD -> { + order = xcp.intValue() + validateOrder(order) + } + MONITOR_ID_FIELD -> { + monitorId = xcp.text() + validateId(monitorId) + } + CHAINED_FINDINGS_FIELD -> { + chainedMonitorFindings = ChainedMonitorFindings.parse(xcp) + } + } + } + return Delegate(order, monitorId, chainedMonitorFindings) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): Delegate { + return Delegate(sin) + } + + fun validateOrder(order: Int) { + require(order > 0) { "Invalid delgate order" } + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/DocLevelMonitorInput.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/DocLevelMonitorInput.kt new file mode 100644 index 00000000..3193ee57 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/DocLevelMonitorInput.kt @@ -0,0 +1,117 @@ +package org.opensearch.commons.alerting.model + +import org.opensearch.common.CheckedFunction +import org.opensearch.core.ParseField +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.NamedXContentRegistry +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException + +data class DocLevelMonitorInput( + val description: String = NO_DESCRIPTION, + val indices: List, + val queries: List +) : Input { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readString(), // description + sin.readStringList(), // indices + sin.readList(::DocLevelQuery) // docLevelQueries + ) + + override fun asTemplateArg(): Map { + return mapOf( + DESCRIPTION_FIELD to description, + INDICES_FIELD to indices, + QUERIES_FIELD to queries.map { it.asTemplateArg() } + ) + } + + override fun name(): String { + return DOC_LEVEL_INPUT_FIELD + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(description) + out.writeStringCollection(indices) + out.writeCollection(queries) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + .startObject(DOC_LEVEL_INPUT_FIELD) + .field(DESCRIPTION_FIELD, description) + .field(INDICES_FIELD, indices.toTypedArray()) + .field(QUERIES_FIELD, queries.toTypedArray()) + .endObject() + .endObject() + return builder + } + + companion object { + const val DESCRIPTION_FIELD = "description" + const val INDICES_FIELD = "indices" + const val DOC_LEVEL_INPUT_FIELD = "doc_level_input" + const val QUERIES_FIELD = "queries" + + const val NO_DESCRIPTION = "" + + val XCONTENT_REGISTRY = NamedXContentRegistry.Entry( + Input::class.java, + ParseField(DOC_LEVEL_INPUT_FIELD), + CheckedFunction { parse(it) } + ) + + @JvmStatic + @Throws(IOException::class) + fun parse(xcp: XContentParser): DocLevelMonitorInput { + var description: String = NO_DESCRIPTION + val indices: MutableList = mutableListOf() + val docLevelQueries: MutableList = mutableListOf() + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + DESCRIPTION_FIELD -> description = xcp.text() + INDICES_FIELD -> { + XContentParserUtils.ensureExpectedToken( + XContentParser.Token.START_ARRAY, + xcp.currentToken(), + xcp + ) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + indices.add(xcp.text()) + } + } + QUERIES_FIELD -> { + XContentParserUtils.ensureExpectedToken( + XContentParser.Token.START_ARRAY, + xcp.currentToken(), + xcp + ) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + docLevelQueries.add(DocLevelQuery.parse(xcp)) + } + } + } + } + + return DocLevelMonitorInput(description = description, indices = indices, queries = docLevelQueries) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): DocLevelMonitorInput { + return DocLevelMonitorInput(sin) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/DocLevelQuery.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/DocLevelQuery.kt new file mode 100644 index 00000000..ebba6bf9 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/DocLevelQuery.kt @@ -0,0 +1,194 @@ +package org.opensearch.commons.alerting.model + +import org.opensearch.commons.notifications.model.BaseModel +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException +import java.lang.IllegalArgumentException +import java.util.UUID + +data class DocLevelQuery( + val id: String = UUID.randomUUID().toString(), + val name: String, + val fields: List, + val query: String, + val tags: List = mutableListOf(), + val queryFieldNames: List = mutableListOf() +) : BaseModel { + + init { + // Ensure the name and tags have valid characters + validateQueryName(name) + for (tag in tags) { + validateQueryTag(tag) + } + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readString(), // id + sin.readString(), // name + sin.readStringList(), // fields + sin.readString(), // query + sin.readStringList(), // tags, + sin.readStringList() // fieldsBeingQueried + ) + + fun asTemplateArg(): Map { + return mapOf( + QUERY_ID_FIELD to id, + NAME_FIELD to name, + FIELDS_FIELD to fields, + QUERY_FIELD to query, + TAGS_FIELD to tags, + QUERY_FIELD_NAMES_FIELD to queryFieldNames + ) + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(id) + out.writeString(name) + out.writeStringCollection(fields) + out.writeString(query) + out.writeStringCollection(tags) + out.writeStringCollection(queryFieldNames) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + .field(QUERY_ID_FIELD, id) + .field(NAME_FIELD, name) + .field(FIELDS_FIELD, fields.toTypedArray()) + .field(QUERY_FIELD, query) + .field(TAGS_FIELD, tags.toTypedArray()) + .field(QUERY_FIELD_NAMES_FIELD, queryFieldNames.toTypedArray()) + .endObject() + return builder + } + + companion object { + const val QUERY_ID_FIELD = "id" + const val NAME_FIELD = "name" + const val FIELDS_FIELD = "fields" + const val QUERY_FIELD = "query" + const val TAGS_FIELD = "tags" + const val QUERY_FIELD_NAMES_FIELD = "query_field_names" + const val NO_ID = "" + val INVALID_CHARACTERS: List = listOf(" ", "[", "]", "{", "}", "(", ")") + val QUERY_NAME_REGEX = "^.{1,256}$".toRegex() // regex to restrict string length between 1 - 256 chars + + @JvmStatic + @Throws(IOException::class) + fun parse(xcp: XContentParser): DocLevelQuery { + var id: String = UUID.randomUUID().toString() + lateinit var query: String + lateinit var name: String + val tags: MutableList = mutableListOf() + val fields: MutableList = mutableListOf() + val queryFieldNames: MutableList = mutableListOf() + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + QUERY_ID_FIELD -> id = xcp.text() + NAME_FIELD -> { + name = xcp.text() + validateQueryName(name) + } + + QUERY_FIELD -> query = xcp.text() + TAGS_FIELD -> { + XContentParserUtils.ensureExpectedToken( + XContentParser.Token.START_ARRAY, + xcp.currentToken(), + xcp + ) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + val tag = xcp.text() + validateQueryTag(tag) + tags.add(tag) + } + } + + FIELDS_FIELD -> { + XContentParserUtils.ensureExpectedToken( + XContentParser.Token.START_ARRAY, + xcp.currentToken(), + xcp + ) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + val field = xcp.text() + fields.add(field) + } + } + + QUERY_FIELD_NAMES_FIELD -> { + XContentParserUtils.ensureExpectedToken( + XContentParser.Token.START_ARRAY, + xcp.currentToken(), + xcp + ) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + val field = xcp.text() + queryFieldNames.add(field) + } + } + } + } + + return DocLevelQuery( + id = id, + name = name, + fields = fields, + query = query, + tags = tags, + queryFieldNames = queryFieldNames + ) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): DocLevelQuery { + return DocLevelQuery(sin) + } + + private fun validateQueryTag(stringVal: String) { + for (inValidChar in INVALID_CHARACTERS) { + if (stringVal.contains(inValidChar)) { + throw IllegalArgumentException( + "The query tag, $stringVal, contains an invalid character: [' ','[',']','{','}','(',')']" + ) + } + } + } + private fun validateQueryName(stringVal: String) { + if (!stringVal.matches(QUERY_NAME_REGEX)) { + throw IllegalArgumentException("The query name, $stringVal, should be between 1 - 256 characters.") + } + } + } + + // constructor for java plugins' convenience to optionally avoid passing empty list for 'fieldsBeingQueried' field + constructor( + id: String, + name: String, + fields: MutableList, + query: String, + tags: MutableList + ) : this( + id = id, + name = name, + fields = fields, + query = query, + tags = tags, + queryFieldNames = emptyList() + ) +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/DocumentLevelTrigger.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/DocumentLevelTrigger.kt new file mode 100644 index 00000000..a1f8b617 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/DocumentLevelTrigger.kt @@ -0,0 +1,169 @@ +package org.opensearch.commons.alerting.model + +import org.opensearch.common.CheckedFunction +import org.opensearch.common.UUIDs +import org.opensearch.commons.alerting.model.Trigger.Companion.ACTIONS_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.ID_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.NAME_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.SEVERITY_FIELD +import org.opensearch.commons.alerting.model.action.Action +import org.opensearch.core.ParseField +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.NamedXContentRegistry +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import org.opensearch.script.Script +import java.io.IOException + +data class DocumentLevelTrigger( + override val id: String = UUIDs.base64UUID(), + override val name: String, + override val severity: String, + override val actions: List, + val condition: Script +) : Trigger { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readString(), // id + sin.readString(), // name + sin.readString(), // severity + sin.readList(::Action), // actions + Script(sin) + ) + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + .startObject(DOCUMENT_LEVEL_TRIGGER_FIELD) + .field(ID_FIELD, id) + .field(NAME_FIELD, name) + .field(SEVERITY_FIELD, severity) + .startObject(CONDITION_FIELD) + .field(SCRIPT_FIELD, condition) + .endObject() + .field(ACTIONS_FIELD, actions.toTypedArray()) + .endObject() + .endObject() + return builder + } + + override fun name(): String { + return DOCUMENT_LEVEL_TRIGGER_FIELD + } + + /** Returns a representation of the trigger suitable for passing into painless and mustache scripts. */ + fun asTemplateArg(): Map { + return mapOf( + ID_FIELD to id, + NAME_FIELD to name, + SEVERITY_FIELD to severity, + ACTIONS_FIELD to actions.map { it.asTemplateArg() }, + CONDITION_FIELD to mapOf( + SCRIPT_FIELD to mapOf( + SOURCE_FIELD to condition.idOrCode, + LANG_FIELD to condition.lang + ) + ) + ) + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(id) + out.writeString(name) + out.writeString(severity) + out.writeCollection(actions) + condition.writeTo(out) + } + + companion object { + const val DOCUMENT_LEVEL_TRIGGER_FIELD = "document_level_trigger" + const val CONDITION_FIELD = "condition" + const val SCRIPT_FIELD = "script" + const val QUERY_IDS_FIELD = "query_ids" + const val SOURCE_FIELD = "source" + const val LANG_FIELD = "lang" + + val XCONTENT_REGISTRY = NamedXContentRegistry.Entry( + Trigger::class.java, + ParseField(DOCUMENT_LEVEL_TRIGGER_FIELD), + CheckedFunction { parseInner(it) } + ) + + @JvmStatic + @Throws(IOException::class) + fun parseInner(xcp: XContentParser): DocumentLevelTrigger { + var id = UUIDs.base64UUID() // assign a default triggerId if one is not specified + lateinit var name: String + lateinit var severity: String + lateinit var condition: Script + val queryIds: MutableList = mutableListOf() + val actions: MutableList = mutableListOf() + + if (xcp.currentToken() != XContentParser.Token.START_OBJECT && xcp.currentToken() != XContentParser.Token.FIELD_NAME) { + XContentParserUtils.throwUnknownToken(xcp.currentToken(), xcp.tokenLocation) + } + + // If the parser began on START_OBJECT, move to the next token so that the while loop enters on + // the fieldName (or END_OBJECT if it's empty). + if (xcp.currentToken() == XContentParser.Token.START_OBJECT) xcp.nextToken() + + while (xcp.currentToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + + xcp.nextToken() + when (fieldName) { + ID_FIELD -> id = xcp.text() + NAME_FIELD -> name = xcp.text() + SEVERITY_FIELD -> severity = xcp.text() + CONDITION_FIELD -> { + xcp.nextToken() + condition = Script.parse(xcp) + require(condition.lang == Script.DEFAULT_SCRIPT_LANG) { + "Invalid script language. Allowed languages are [${Script.DEFAULT_SCRIPT_LANG}]" + } + xcp.nextToken() + } + QUERY_IDS_FIELD -> { + XContentParserUtils.ensureExpectedToken( + XContentParser.Token.START_ARRAY, + xcp.currentToken(), + xcp + ) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + queryIds.add(xcp.text()) + } + } + ACTIONS_FIELD -> { + XContentParserUtils.ensureExpectedToken( + XContentParser.Token.START_ARRAY, + xcp.currentToken(), + xcp + ) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + actions.add(Action.parse(xcp)) + } + } + } + xcp.nextToken() + } + + return DocumentLevelTrigger( + name = requireNotNull(name) { "Trigger name is null" }, + severity = requireNotNull(severity) { "Trigger severity is null" }, + condition = requireNotNull(condition) { "Trigger condition is null" }, + actions = requireNotNull(actions) { "Trigger actions are null" }, + id = requireNotNull(id) { "Trigger id is null." } + ) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): DocumentLevelTrigger { + return DocumentLevelTrigger(sin) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/DocumentLevelTriggerRunResult.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/DocumentLevelTriggerRunResult.kt new file mode 100644 index 00000000..1acb354b --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/DocumentLevelTriggerRunResult.kt @@ -0,0 +1,82 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.model + +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.script.ScriptException +import java.io.IOException + +data class DocumentLevelTriggerRunResult( + override var triggerName: String, + var triggeredDocs: List, + override var error: Exception?, + var actionResultsMap: MutableMap> = mutableMapOf() +) : TriggerRunResult(triggerName, error) { + + @Throws(IOException::class) + @Suppress("UNCHECKED_CAST") + constructor(sin: StreamInput) : this( + triggerName = sin.readString(), + error = sin.readException(), + triggeredDocs = sin.readStringList(), + actionResultsMap = readActionResults(sin) + ) + + override fun internalXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + if (error is ScriptException) error = Exception((error as ScriptException).toJsonString(), error) + return builder + .field("triggeredDocs", triggeredDocs as List) + .field("action_results", actionResultsMap as Map) + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + super.writeTo(out) + out.writeStringCollection(triggeredDocs) + out.writeInt(actionResultsMap.size) + actionResultsMap.forEach { (alert, actionResults) -> + out.writeString(alert) + out.writeInt(actionResults.size) + actionResults.forEach { (id, result) -> + out.writeString(id) + result.writeTo(out) + } + } + } + + companion object { + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): TriggerRunResult { + return DocumentLevelTriggerRunResult(sin) + } + + @JvmStatic + fun readActionResults(sin: StreamInput): MutableMap> { + val actionResultsMapReconstruct: MutableMap> = mutableMapOf() + val size = sin.readInt() + var idx = 0 + while (idx < size) { + val alert = sin.readString() + val actionResultsSize = sin.readInt() + val actionRunResultElem = mutableMapOf() + var i = 0 + while (i < actionResultsSize) { + val actionId = sin.readString() + val actionResult = ActionRunResult.readFrom(sin) + actionRunResultElem[actionId] = actionResult + ++i + } + actionResultsMapReconstruct[alert] = actionRunResultElem + ++idx + } + return actionResultsMapReconstruct + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/Finding.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/Finding.kt new file mode 100644 index 00000000..d6436f8b --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/Finding.kt @@ -0,0 +1,187 @@ +package org.opensearch.commons.alerting.model + +import org.opensearch.commons.alerting.util.instant +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken +import java.io.IOException +import java.time.Instant + +/** + * A wrapper of the log event that enriches the event by also including information about the monitor it triggered. + */ +class Finding( + val id: String = NO_ID, + val relatedDocIds: List, + val correlatedDocIds: List = listOf(), + val monitorId: String, + val monitorName: String, + val index: String, + val docLevelQueries: List, + val timestamp: Instant, + /** + * Keeps the track of the workflow-monitor exact execution. + * Used for filtering the data when chaining monitors in a workflow. + */ + val executionId: String? = null +) : Writeable, ToXContent { + + constructor( + id: String = NO_ID, + relatedDocIds: List, + monitorId: String, + monitorName: String, + index: String, + docLevelQueries: List, + timestamp: Instant + ) : this ( + id = id, + relatedDocIds = relatedDocIds, + monitorId = monitorId, + monitorName = monitorName, + index = index, + docLevelQueries = docLevelQueries, + timestamp = timestamp, + executionId = null + ) + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + id = sin.readString(), + relatedDocIds = sin.readStringList(), + correlatedDocIds = sin.readStringList(), + monitorId = sin.readString(), + monitorName = sin.readString(), + index = sin.readString(), + docLevelQueries = sin.readList((DocLevelQuery)::readFrom), + timestamp = sin.readInstant(), + executionId = sin.readOptionalString() + ) + + fun asTemplateArg(): Map { + return mapOf( + FINDING_ID_FIELD to id, + RELATED_DOC_IDS_FIELD to relatedDocIds, + CORRELATED_DOC_IDS_FIELD to correlatedDocIds, + MONITOR_ID_FIELD to monitorId, + MONITOR_NAME_FIELD to monitorName, + INDEX_FIELD to index, + QUERIES_FIELD to docLevelQueries, + TIMESTAMP_FIELD to timestamp.toEpochMilli(), + EXECUTION_ID_FIELD to executionId + ) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + .field(FINDING_ID_FIELD, id) + .field(RELATED_DOC_IDS_FIELD, relatedDocIds) + .field(CORRELATED_DOC_IDS_FIELD, correlatedDocIds) + .field(MONITOR_ID_FIELD, monitorId) + .field(MONITOR_NAME_FIELD, monitorName) + .field(INDEX_FIELD, index) + .field(QUERIES_FIELD, docLevelQueries.toTypedArray()) + .field(TIMESTAMP_FIELD, timestamp.toEpochMilli()) + .field(EXECUTION_ID_FIELD, executionId) + builder.endObject() + return builder + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(id) + out.writeStringCollection(relatedDocIds) + out.writeStringCollection(correlatedDocIds) + out.writeString(monitorId) + out.writeString(monitorName) + out.writeString(index) + out.writeCollection(docLevelQueries) + out.writeInstant(timestamp) + out.writeOptionalString(executionId) + } + + companion object { + const val FINDING_ID_FIELD = "id" + const val RELATED_DOC_IDS_FIELD = "related_doc_ids" + const val CORRELATED_DOC_IDS_FIELD = "correlated_doc_ids" + const val MONITOR_ID_FIELD = "monitor_id" + const val MONITOR_NAME_FIELD = "monitor_name" + const val INDEX_FIELD = "index" + const val QUERIES_FIELD = "queries" + const val TIMESTAMP_FIELD = "timestamp" + const val EXECUTION_ID_FIELD = "execution_id" + const val NO_ID = "" + + @JvmStatic + @JvmOverloads + @Throws(IOException::class) + fun parse(xcp: XContentParser): Finding { + var id: String = NO_ID + val relatedDocIds: MutableList = mutableListOf() + val correlatedDocIds: MutableList = mutableListOf() + lateinit var monitorId: String + lateinit var monitorName: String + lateinit var index: String + val queries: MutableList = mutableListOf() + lateinit var timestamp: Instant + var executionId: String? = null + + ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + FINDING_ID_FIELD -> id = xcp.text() + RELATED_DOC_IDS_FIELD -> { + ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + relatedDocIds.add(xcp.text()) + } + } + CORRELATED_DOC_IDS_FIELD -> { + ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + correlatedDocIds.add(xcp.text()) + } + } + MONITOR_ID_FIELD -> monitorId = xcp.text() + MONITOR_NAME_FIELD -> monitorName = xcp.text() + INDEX_FIELD -> index = xcp.text() + QUERIES_FIELD -> { + ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + queries.add(DocLevelQuery.parse(xcp)) + } + } + TIMESTAMP_FIELD -> { + timestamp = requireNotNull(xcp.instant()) + } + EXECUTION_ID_FIELD -> executionId = xcp.textOrNull() + } + } + + return Finding( + id = id, + relatedDocIds = relatedDocIds, + correlatedDocIds = correlatedDocIds, + monitorId = monitorId, + monitorName = monitorName, + index = index, + docLevelQueries = queries, + timestamp = timestamp, + executionId = executionId + ) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): Finding { + return Finding(sin) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/FindingDocument.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/FindingDocument.kt new file mode 100644 index 00000000..202693fd --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/FindingDocument.kt @@ -0,0 +1,87 @@ +package org.opensearch.commons.alerting.model + +import org.apache.logging.log4j.LogManager +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException + +private val log = LogManager.getLogger(FindingDocument::class.java) + +class FindingDocument( + val index: String, + val id: String, + val found: Boolean, + val document: String +) : Writeable, ToXContent { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + index = sin.readString(), + id = sin.readString(), + found = sin.readBoolean(), + document = sin.readString() + ) + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return builder.startObject() + .field(INDEX_FIELD, index) + .field(FINDING_DOCUMENT_ID_FIELD, id) + .field(FOUND_FIELD, found) + .field(DOCUMENT_FIELD, document) + .endObject() + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(index) + out.writeString(id) + out.writeBoolean(found) + out.writeString(document) + } + + companion object { + const val INDEX_FIELD = "index" + const val FINDING_DOCUMENT_ID_FIELD = "id" + const val FOUND_FIELD = "found" + const val DOCUMENT_FIELD = "document" + const val NO_ID = "" + const val NO_INDEX = "" + + @JvmStatic + @JvmOverloads + @Throws(IOException::class) + fun parse(xcp: XContentParser, id: String = NO_ID, index: String = NO_INDEX): FindingDocument { + var found = false + var document: String = "" + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + FOUND_FIELD -> found = xcp.booleanValue() + DOCUMENT_FIELD -> document = xcp.text() + } + } + + return FindingDocument( + index = index, + id = id, + found = found, + document = document + ) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): FindingDocument { + return FindingDocument(sin) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/FindingWithDocs.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/FindingWithDocs.kt new file mode 100644 index 00000000..057e2214 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/FindingWithDocs.kt @@ -0,0 +1,78 @@ +package org.opensearch.commons.alerting.model + +import org.apache.logging.log4j.LogManager +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException + +private val log = LogManager.getLogger(Finding::class.java) + +class FindingWithDocs( + val finding: Finding, + val documents: List +) : Writeable, ToXContent { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + finding = Finding.readFrom(sin), + documents = sin.readList((FindingDocument)::readFrom) + ) + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + finding.writeTo(out) + out.writeCollection(documents) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + .field(FINDING_FIELD, finding) + .field(DOCUMENTS_FIELD, documents) + builder.endObject() + return builder + } + + companion object { + const val FINDING_FIELD = "finding" + const val DOCUMENTS_FIELD = "document_list" + + @JvmStatic + @Throws(IOException::class) + fun parse(xcp: XContentParser): FindingWithDocs { + lateinit var finding: Finding + val documents: MutableList = mutableListOf() + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + FINDING_FIELD -> finding = Finding.parse(xcp) + DOCUMENTS_FIELD -> { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + documents.add(FindingDocument.parse(xcp)) + } + } + } + } + + return FindingWithDocs( + finding = finding, + documents = documents + ) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): FindingWithDocs { + return FindingWithDocs(sin) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/IndexExecutionContext.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/IndexExecutionContext.kt new file mode 100644 index 00000000..8872b525 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/IndexExecutionContext.kt @@ -0,0 +1,66 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.model + +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import java.io.IOException + +data class IndexExecutionContext( + val queries: List, + val lastRunContext: MutableMap, // previous execution + val updatedLastRunContext: MutableMap, // without sequence numbers + val indexName: String, + val concreteIndexName: String, + val updatedIndexNames: List, + val concreteIndexNames: List, + val conflictingFields: List, + val docIds: List? = emptyList() +) : Writeable, ToXContent { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + queries = sin.readList { DocLevelQuery(sin) }, + lastRunContext = sin.readMap() as MutableMap, + updatedLastRunContext = sin.readMap() as MutableMap, + indexName = sin.readString(), + concreteIndexName = sin.readString(), + updatedIndexNames = sin.readStringList(), + concreteIndexNames = sin.readStringList(), + conflictingFields = sin.readStringList(), + docIds = sin.readOptionalStringList() + ) + + override fun writeTo(out: StreamOutput?) { + out!!.writeCollection(queries) + out.writeMap(lastRunContext) + out.writeMap(updatedLastRunContext) + out.writeString(indexName) + out.writeString(concreteIndexName) + out.writeStringCollection(updatedIndexNames) + out.writeStringCollection(concreteIndexNames) + out.writeStringCollection(conflictingFields) + out.writeOptionalStringCollection(docIds) + } + + override fun toXContent(builder: XContentBuilder?, params: ToXContent.Params?): XContentBuilder { + builder!!.startObject() + .field("queries", queries) + .field("last_run_context", lastRunContext) + .field("updated_last_run_context", updatedLastRunContext) + .field("index_name", indexName) + .field("concrete_index_name", concreteIndexName) + .field("udpated_index_names", updatedIndexNames) + .field("concrete_index_names", concreteIndexNames) + .field("conflicting_fields", conflictingFields) + .field("doc_ids", docIds) + .endObject() + return builder + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/Input.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/Input.kt new file mode 100644 index 00000000..3846cea6 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/Input.kt @@ -0,0 +1,72 @@ +package org.opensearch.commons.alerting.model + +import org.opensearch.commons.alerting.model.ClusterMetricsInput.Companion.URI_FIELD +import org.opensearch.commons.alerting.model.DocLevelMonitorInput.Companion.DOC_LEVEL_INPUT_FIELD +import org.opensearch.commons.alerting.model.SearchInput.Companion.SEARCH_FIELD +import org.opensearch.commons.alerting.model.remote.monitors.RemoteDocLevelMonitorInput +import org.opensearch.commons.alerting.model.remote.monitors.RemoteDocLevelMonitorInput.Companion.REMOTE_DOC_LEVEL_MONITOR_INPUT_FIELD +import org.opensearch.commons.alerting.model.remote.monitors.RemoteMonitorInput +import org.opensearch.commons.alerting.model.remote.monitors.RemoteMonitorInput.Companion.REMOTE_MONITOR_INPUT_FIELD +import org.opensearch.commons.notifications.model.BaseModel +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException + +interface Input : BaseModel { + + enum class Type(val value: String) { + DOCUMENT_LEVEL_INPUT(DOC_LEVEL_INPUT_FIELD), + CLUSTER_METRICS_INPUT(URI_FIELD), + SEARCH_INPUT(SEARCH_FIELD), + REMOTE_MONITOR_INPUT(REMOTE_MONITOR_INPUT_FIELD), + REMOTE_DOC_LEVEL_MONITOR_INPUT(REMOTE_DOC_LEVEL_MONITOR_INPUT_FIELD); + + override fun toString(): String { + return value + } + } + + companion object { + + @Throws(IOException::class) + fun parse(xcp: XContentParser): Input { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, xcp.nextToken(), xcp) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) + val input = if (xcp.currentName() == Type.SEARCH_INPUT.value) { + SearchInput.parseInner(xcp) + } else if (xcp.currentName() == Type.CLUSTER_METRICS_INPUT.value) { + ClusterMetricsInput.parseInner(xcp) + } else if (xcp.currentName() == Type.DOCUMENT_LEVEL_INPUT.value) { + DocLevelMonitorInput.parse(xcp) + } else if (xcp.currentName() == Type.REMOTE_MONITOR_INPUT.value) { + RemoteMonitorInput.parse(xcp) + } else { + RemoteDocLevelMonitorInput.parse(xcp) + } + XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, xcp.nextToken(), xcp) + return input + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): Input { + return when (val type = sin.readEnum(Input.Type::class.java)) { + Type.DOCUMENT_LEVEL_INPUT -> DocLevelMonitorInput(sin) + Type.CLUSTER_METRICS_INPUT -> ClusterMetricsInput(sin) + Type.SEARCH_INPUT -> SearchInput(sin) + Type.REMOTE_MONITOR_INPUT -> RemoteMonitorInput(sin) + Type.REMOTE_DOC_LEVEL_MONITOR_INPUT -> RemoteDocLevelMonitorInput(sin) + // This shouldn't be reachable but ensuring exhaustiveness as Kotlin warns + // enum can be null in Java + else -> throw IllegalStateException("Unexpected input [$type] when reading Trigger") + } + } + } + + fun name(): String + + /** Returns a representation of the schedule suitable for passing into painless and mustache scripts. */ + fun asTemplateArg(): Map = emptyMap() +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/Monitor.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/Monitor.kt new file mode 100644 index 00000000..51cb0d9f --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/Monitor.kt @@ -0,0 +1,366 @@ +package org.opensearch.commons.alerting.model + +import org.opensearch.common.CheckedFunction +import org.opensearch.commons.alerting.model.remote.monitors.RemoteMonitorTrigger +import org.opensearch.commons.alerting.util.IndexUtils.Companion.MONITOR_MAX_INPUTS +import org.opensearch.commons.alerting.util.IndexUtils.Companion.MONITOR_MAX_TRIGGERS +import org.opensearch.commons.alerting.util.IndexUtils.Companion.NO_SCHEMA_VERSION +import org.opensearch.commons.alerting.util.IndexUtils.Companion._ID +import org.opensearch.commons.alerting.util.IndexUtils.Companion._VERSION +import org.opensearch.commons.alerting.util.IndexUtils.Companion.supportedClusterMetricsSettings +import org.opensearch.commons.alerting.util.instant +import org.opensearch.commons.alerting.util.isBucketLevelMonitor +import org.opensearch.commons.alerting.util.optionalTimeField +import org.opensearch.commons.alerting.util.optionalUserField +import org.opensearch.commons.authuser.User +import org.opensearch.core.ParseField +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.NamedXContentRegistry +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException +import java.time.Instant +import java.util.regex.Pattern + +data class Monitor( + override val id: String = NO_ID, + override val version: Long = NO_VERSION, + override val name: String, + override val enabled: Boolean, + override val schedule: Schedule, + override val lastUpdateTime: Instant, + override val enabledTime: Instant?, + // TODO: Check how this behaves during rolling upgrade/multi-version cluster + // Can read/write and parsing break if it's done from an old -> new version of the plugin? + val monitorType: String, + val user: User?, + val schemaVersion: Int = NO_SCHEMA_VERSION, + val inputs: List, + val triggers: List, + val uiMetadata: Map, + val dataSources: DataSources = DataSources(), + val owner: String? = "alerting" +) : ScheduledJob { + + override val type = MONITOR_TYPE + + init { + // Ensure that trigger ids are unique within a monitor + val triggerIds = mutableSetOf() + triggers.forEach { trigger -> + // NoOpTrigger is only used in "Monitor Error Alerts" as a placeholder + require(trigger !is NoOpTrigger) + + require(triggerIds.add(trigger.id)) { "Duplicate trigger id: ${trigger.id}. Trigger ids must be unique." } + // Verify Trigger type based on Monitor type + when (monitorType) { + MonitorType.QUERY_LEVEL_MONITOR.value -> + require(trigger is QueryLevelTrigger) { "Incompatible trigger [${trigger.id}] for monitor type [$monitorType]" } + MonitorType.BUCKET_LEVEL_MONITOR.value -> + require(trigger is BucketLevelTrigger) { "Incompatible trigger [${trigger.id}] for monitor type [$monitorType]" } + MonitorType.CLUSTER_METRICS_MONITOR.value -> + require(trigger is QueryLevelTrigger) { "Incompatible trigger [${trigger.id}] for monitor type [$monitorType]" } + MonitorType.DOC_LEVEL_MONITOR.value -> + require(trigger is DocumentLevelTrigger) { "Incompatible trigger [${trigger.id}] for monitor type [$monitorType]" } + } + } + if (enabled) { + requireNotNull(enabledTime) + } else { + require(enabledTime == null) + } + require(inputs.size <= MONITOR_MAX_INPUTS) { "Monitors can only have $MONITOR_MAX_INPUTS search input." } + require(triggers.size <= MONITOR_MAX_TRIGGERS) { "Monitors can only support up to $MONITOR_MAX_TRIGGERS triggers." } + if (this.isBucketLevelMonitor()) { + inputs.forEach { input -> + require(input is SearchInput) { "Unsupported input [$input] for Monitor" } + // TODO: Keeping query validation simple for now, only term aggregations have full support for the "group by" on the + // initial release. Should either add tests for other aggregation types or add validation to prevent using them. + require(input.query.aggregations() != null && !input.query.aggregations().aggregatorFactories.isEmpty()) { + "At least one aggregation is required for the input [$input]" + } + } + } + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + id = sin.readString(), + version = sin.readLong(), + name = sin.readString(), + enabled = sin.readBoolean(), + schedule = Schedule.readFrom(sin), + lastUpdateTime = sin.readInstant(), + enabledTime = sin.readOptionalInstant(), + monitorType = sin.readString(), + user = if (sin.readBoolean()) { + User(sin) + } else { + null + }, + schemaVersion = sin.readInt(), + inputs = sin.readList((Input)::readFrom), + triggers = sin.readList((Trigger)::readFrom), + uiMetadata = suppressWarning(sin.readMap()), + dataSources = if (sin.readBoolean()) { + DataSources(sin) + } else { + DataSources() + }, + owner = sin.readOptionalString() + ) + + // This enum classifies different Monitors + // This is different from 'type' which denotes the Scheduled Job type + enum class MonitorType(val value: String) { + QUERY_LEVEL_MONITOR("query_level_monitor"), + BUCKET_LEVEL_MONITOR("bucket_level_monitor"), + CLUSTER_METRICS_MONITOR("cluster_metrics_monitor"), + DOC_LEVEL_MONITOR("doc_level_monitor"); + + override fun toString(): String { + return value + } + } + + /** Returns a representation of the monitor suitable for passing into painless and mustache scripts. */ + fun asTemplateArg(): Map { + return mapOf( + _ID to id, + _VERSION to version, + NAME_FIELD to name, + ENABLED_FIELD to enabled, + MONITOR_TYPE_FIELD to monitorType.toString(), + ENABLED_TIME_FIELD to enabledTime?.toEpochMilli(), + LAST_UPDATE_TIME_FIELD to lastUpdateTime.toEpochMilli(), + SCHEDULE_FIELD to schedule.asTemplateArg(), + INPUTS_FIELD to inputs.map { it.asTemplateArg() } + ) + } + + fun toXContentWithUser(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return createXContentBuilder(builder, params, false) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return createXContentBuilder(builder, params, true) + } + + private fun createXContentBuilder(builder: XContentBuilder, params: ToXContent.Params, secure: Boolean): XContentBuilder { + builder.startObject() + if (params.paramAsBoolean("with_type", false)) builder.startObject(type) + builder.field(TYPE_FIELD, type) + .field(SCHEMA_VERSION_FIELD, schemaVersion) + .field(NAME_FIELD, name) + .field(MONITOR_TYPE_FIELD, monitorType) + + if (!secure) { + builder.optionalUserField(USER_FIELD, user) + } + + builder.field(ENABLED_FIELD, enabled) + .optionalTimeField(ENABLED_TIME_FIELD, enabledTime) + .field(SCHEDULE_FIELD, schedule) + .field(INPUTS_FIELD, inputs.toTypedArray()) + .field(TRIGGERS_FIELD, triggers.toTypedArray()) + .optionalTimeField(LAST_UPDATE_TIME_FIELD, lastUpdateTime) + if (uiMetadata.isNotEmpty()) builder.field(UI_METADATA_FIELD, uiMetadata) + builder.field(DATA_SOURCES_FIELD, dataSources) + builder.field(OWNER_FIELD, owner) + if (params.paramAsBoolean("with_type", false)) builder.endObject() + return builder.endObject() + } + + override fun fromDocument(id: String, version: Long): Monitor = copy(id = id, version = version) + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(id) + out.writeLong(version) + out.writeString(name) + out.writeBoolean(enabled) + if (schedule is CronSchedule) { + out.writeEnum(Schedule.TYPE.CRON) + } else { + out.writeEnum(Schedule.TYPE.INTERVAL) + } + schedule.writeTo(out) + out.writeInstant(lastUpdateTime) + out.writeOptionalInstant(enabledTime) + out.writeString(monitorType) + out.writeBoolean(user != null) + user?.writeTo(out) + out.writeInt(schemaVersion) + // Outputting type with each Input so that the generic Input.readFrom() can read it + out.writeVInt(inputs.size) + inputs.forEach { + if (it is SearchInput) { + out.writeEnum(Input.Type.SEARCH_INPUT) + } else if (it is DocLevelMonitorInput) { + out.writeEnum(Input.Type.DOCUMENT_LEVEL_INPUT) + } else { + out.writeEnum(Input.Type.REMOTE_DOC_LEVEL_MONITOR_INPUT) + } + it.writeTo(out) + } + // Outputting type with each Trigger so that the generic Trigger.readFrom() can read it + out.writeVInt(triggers.size) + triggers.forEach { + when (it) { + is BucketLevelTrigger -> out.writeEnum(Trigger.Type.BUCKET_LEVEL_TRIGGER) + is DocumentLevelTrigger -> out.writeEnum(Trigger.Type.DOCUMENT_LEVEL_TRIGGER) + is RemoteMonitorTrigger -> out.writeEnum(Trigger.Type.REMOTE_MONITOR_TRIGGER) + else -> out.writeEnum(Trigger.Type.QUERY_LEVEL_TRIGGER) + } + it.writeTo(out) + } + out.writeMap(uiMetadata) + out.writeBoolean(dataSources != null) // for backward compatibility with pre-existing monitors which don't have datasources field + dataSources.writeTo(out) + out.writeOptionalString(owner) + } + + companion object { + const val MONITOR_TYPE = "monitor" + const val TYPE_FIELD = "type" + const val MONITOR_TYPE_FIELD = "monitor_type" + const val SCHEMA_VERSION_FIELD = "schema_version" + const val NAME_FIELD = "name" + const val USER_FIELD = "user" + const val ENABLED_FIELD = "enabled" + const val SCHEDULE_FIELD = "schedule" + const val TRIGGERS_FIELD = "triggers" + const val NO_ID = "" + const val NO_VERSION = 1L + const val INPUTS_FIELD = "inputs" + const val LAST_UPDATE_TIME_FIELD = "last_update_time" + const val UI_METADATA_FIELD = "ui_metadata" + const val DATA_SOURCES_FIELD = "data_sources" + const val ENABLED_TIME_FIELD = "enabled_time" + const val OWNER_FIELD = "owner" + val MONITOR_TYPE_PATTERN = Pattern.compile("[a-zA-Z0-9_]{5,25}") + + // This is defined here instead of in ScheduledJob to avoid having the ScheduledJob class know about all + // the different subclasses and creating circular dependencies + val XCONTENT_REGISTRY = NamedXContentRegistry.Entry( + ScheduledJob::class.java, + ParseField(MONITOR_TYPE), + CheckedFunction { parse(it) } + ) + + @JvmStatic + @JvmOverloads + @Throws(IOException::class) + fun parse(xcp: XContentParser, id: String = NO_ID, version: Long = NO_VERSION): Monitor { + var name: String? = null + // Default to QUERY_LEVEL_MONITOR to cover Monitors that existed before the addition of MonitorType + var monitorType: String = MonitorType.QUERY_LEVEL_MONITOR.toString() + var user: User? = null + var schedule: Schedule? = null + var lastUpdateTime: Instant? = null + var enabledTime: Instant? = null + var uiMetadata: Map = mapOf() + var enabled = true + var schemaVersion = NO_SCHEMA_VERSION + val triggers: MutableList = mutableListOf() + val inputs: MutableList = mutableListOf() + var dataSources = DataSources() + var owner = "alerting" + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + SCHEMA_VERSION_FIELD -> schemaVersion = xcp.intValue() + NAME_FIELD -> name = xcp.text() + MONITOR_TYPE_FIELD -> { + monitorType = xcp.text() + val matcher = MONITOR_TYPE_PATTERN.matcher(monitorType) + val find = matcher.matches() + if (!find) { + throw IllegalStateException("Monitor type should follow pattern ${MONITOR_TYPE_PATTERN.pattern()}") + } + } + USER_FIELD -> user = if (xcp.currentToken() == XContentParser.Token.VALUE_NULL) null else User.parse(xcp) + ENABLED_FIELD -> enabled = xcp.booleanValue() + SCHEDULE_FIELD -> schedule = Schedule.parse(xcp) + INPUTS_FIELD -> { + XContentParserUtils.ensureExpectedToken( + XContentParser.Token.START_ARRAY, + xcp.currentToken(), + xcp + ) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + val input = Input.parse(xcp) + if (input is ClusterMetricsInput) { + supportedClusterMetricsSettings?.validateApiType(input) + } + inputs.add(input) + } + } + TRIGGERS_FIELD -> { + XContentParserUtils.ensureExpectedToken( + XContentParser.Token.START_ARRAY, + xcp.currentToken(), + xcp + ) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + triggers.add(Trigger.parse(xcp)) + } + } + ENABLED_TIME_FIELD -> enabledTime = xcp.instant() + LAST_UPDATE_TIME_FIELD -> lastUpdateTime = xcp.instant() + UI_METADATA_FIELD -> uiMetadata = xcp.map() + DATA_SOURCES_FIELD -> dataSources = if (xcp.currentToken() == XContentParser.Token.VALUE_NULL) { + DataSources() + } else { + DataSources.parse(xcp) + } + OWNER_FIELD -> owner = if (xcp.currentToken() == XContentParser.Token.VALUE_NULL) owner else xcp.text() + else -> { + xcp.skipChildren() + } + } + } + + if (enabled && enabledTime == null) { + enabledTime = Instant.now() + } else if (!enabled) { + enabledTime = null + } + return Monitor( + id, + version, + requireNotNull(name) { "Monitor name is null" }, + enabled, + requireNotNull(schedule) { "Monitor schedule is null" }, + lastUpdateTime ?: Instant.now(), + enabledTime, + monitorType, + user, + schemaVersion, + inputs.toList(), + triggers.toList(), + uiMetadata, + dataSources, + owner + ) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): Monitor? { + return Monitor(sin) + } + + @Suppress("UNCHECKED_CAST") + fun suppressWarning(map: MutableMap?): MutableMap { + return map as MutableMap + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/MonitorMetadata.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/MonitorMetadata.kt new file mode 100644 index 00000000..a90f3cc3 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/MonitorMetadata.kt @@ -0,0 +1,197 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.model + +import org.opensearch.commons.alerting.model.Monitor.Companion.NO_ID +import org.opensearch.commons.alerting.util.instant +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import org.opensearch.index.seqno.SequenceNumbers +import java.io.IOException +import java.time.Instant + +data class MonitorMetadata( + val id: String, + val seqNo: Long = SequenceNumbers.UNASSIGNED_SEQ_NO, + val primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM, + val monitorId: String, + val lastActionExecutionTimes: List, + val lastRunContext: Map, + // Maps (sourceIndex + monitorId) --> concreteQueryIndex + val sourceToQueryIndexMapping: MutableMap = mutableMapOf() +) : Writeable, ToXContent { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + id = sin.readString(), + seqNo = sin.readLong(), + primaryTerm = sin.readLong(), + monitorId = sin.readString(), + lastActionExecutionTimes = sin.readList(ActionExecutionTime.Companion::readFrom), + lastRunContext = Monitor.suppressWarning(sin.readMap()), + sourceToQueryIndexMapping = sin.readMap() as MutableMap + ) + + override fun writeTo(out: StreamOutput) { + out.writeString(id) + out.writeLong(seqNo) + out.writeLong(primaryTerm) + out.writeString(monitorId) + out.writeCollection(lastActionExecutionTimes) + out.writeMap(lastRunContext) + out.writeMap(sourceToQueryIndexMapping as MutableMap) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + if (params.paramAsBoolean("with_type", false)) builder.startObject(METADATA) + builder.field(MONITOR_ID_FIELD, monitorId) + .field(LAST_ACTION_EXECUTION_FIELD, lastActionExecutionTimes.toTypedArray()) + if (lastRunContext.isNotEmpty()) builder.field(LAST_RUN_CONTEXT_FIELD, lastRunContext) + if (sourceToQueryIndexMapping.isNotEmpty()) { + builder.field(SOURCE_TO_QUERY_INDEX_MAP_FIELD, sourceToQueryIndexMapping as MutableMap) + } + if (params.paramAsBoolean("with_type", false)) builder.endObject() + return builder.endObject() + } + + companion object { + const val METADATA = "metadata" + const val MONITOR_ID_FIELD = "monitor_id" + const val LAST_ACTION_EXECUTION_FIELD = "last_action_execution_times" + const val LAST_RUN_CONTEXT_FIELD = "last_run_context" + const val SOURCE_TO_QUERY_INDEX_MAP_FIELD = "source_to_query_index_mapping" + + @JvmStatic + @JvmOverloads + @Throws(IOException::class) + fun parse( + xcp: XContentParser, + id: String = NO_ID, + seqNo: Long = SequenceNumbers.UNASSIGNED_SEQ_NO, + primaryTerm: Long = SequenceNumbers.UNASSIGNED_PRIMARY_TERM + ): MonitorMetadata { + lateinit var monitorId: String + val lastActionExecutionTimes = mutableListOf() + var lastRunContext: Map = mapOf() + var sourceToQueryIndexMapping: MutableMap = mutableMapOf() + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + MONITOR_ID_FIELD -> monitorId = xcp.text() + LAST_ACTION_EXECUTION_FIELD -> { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + lastActionExecutionTimes.add(ActionExecutionTime.parse(xcp)) + } + } + LAST_RUN_CONTEXT_FIELD -> lastRunContext = xcp.map() + SOURCE_TO_QUERY_INDEX_MAP_FIELD -> sourceToQueryIndexMapping = xcp.map() as MutableMap + } + } + + return MonitorMetadata( + if (id != NO_ID) id else "$monitorId-metadata", + seqNo = seqNo, + primaryTerm = primaryTerm, + monitorId = monitorId, + lastActionExecutionTimes = lastActionExecutionTimes, + lastRunContext = lastRunContext, + sourceToQueryIndexMapping = sourceToQueryIndexMapping + ) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): MonitorMetadata { + return MonitorMetadata(sin) + } + + /** workflowMetadataId is used as key for monitor metadata in the case when the workflow execution happens + so the monitor lastRunContext (in the case of doc level monitor) is not interfering with the monitor execution + WorkflowMetadataId will be either workflowId-metadata (when executing the workflow as it is scheduled) + or timestampWithUUID-metadata (when a workflow is executed in a dry-run mode) + In the case of temp workflow, doc level monitors must have lastRunContext created from scratch + That's why we are using workflowMetadataId - in order to ensure that the doc level monitor metadata is created from scratch + **/ + fun getId(monitor: Monitor, workflowMetadataId: String? = null): String { + return if (workflowMetadataId.isNullOrEmpty()) { "${monitor.id}-metadata" } + // WorkflowMetadataId already contains -metadata suffix + else { "$workflowMetadataId-${monitor.id}-metadata" } + } + } +} + +/** + * A value object containing action execution time. + */ +data class ActionExecutionTime( + val actionId: String, + val executionTime: Instant +) : Writeable, ToXContent { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readString(), // actionId + sin.readInstant() // executionTime + ) + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return builder.startObject() + .field(ACTION_ID_FIELD, actionId) + .field(EXECUTION_TIME_FIELD, executionTime) + .endObject() + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(actionId) + out.writeInstant(executionTime) + } + + companion object { + const val ACTION_ID_FIELD = "action_id" + const val EXECUTION_TIME_FIELD = "execution_time" + + @JvmStatic + @Throws(IOException::class) + fun parse(xcp: XContentParser): ActionExecutionTime { + lateinit var actionId: String + lateinit var executionTime: Instant + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + ACTION_ID_FIELD -> actionId = xcp.text() + EXECUTION_TIME_FIELD -> executionTime = xcp.instant()!! + } + } + + return ActionExecutionTime( + actionId, + executionTime + ) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): ActionExecutionTime { + return ActionExecutionTime(sin) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/MonitorRunResult.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/MonitorRunResult.kt new file mode 100644 index 00000000..d403313b --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/MonitorRunResult.kt @@ -0,0 +1,215 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.model + +import org.apache.logging.log4j.LogManager +import org.opensearch.OpenSearchException +import org.opensearch.commons.alerting.alerts.AlertError +import org.opensearch.commons.alerting.util.optionalTimeField +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.script.ScriptException +import java.io.IOException +import java.time.Instant + +data class MonitorRunResult( + val monitorName: String, + val periodStart: Instant, + val periodEnd: Instant, + val error: Exception? = null, + val inputResults: InputRunResults = InputRunResults(), + val triggerResults: Map = mapOf() +) : Writeable, ToXContent { + + @Throws(IOException::class) + @Suppress("UNCHECKED_CAST") + constructor(sin: StreamInput) : this( + sin.readString(), // monitorName + sin.readInstant(), // periodStart + sin.readInstant(), // periodEnd + sin.readException(), // error + InputRunResults.readFrom(sin), // inputResults + suppressWarning(sin.readMap()) as Map // triggerResults + ) + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return builder.startObject() + .field("monitor_name", monitorName) + .optionalTimeField("period_start", periodStart) + .optionalTimeField("period_end", periodEnd) + .field("error", error?.message) + .field("input_results", inputResults) + .field("trigger_results", triggerResults) + .endObject() + } + + /** Returns error information to store in the Alert. Currently it's just the stack trace but it can be more */ + fun alertError(): AlertError? { + if (error != null) { + return AlertError(Instant.now(), "Failed running monitor:\n${error.userErrorMessage()}") + } + + if (inputResults.error != null) { + return AlertError(Instant.now(), "Failed fetching inputs:\n${inputResults.error.userErrorMessage()}") + } + return null + } + + fun scriptContextError(trigger: Trigger): Exception? { + return error ?: inputResults.error ?: triggerResults[trigger.id]?.error + } + + companion object { + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): MonitorRunResult { + return MonitorRunResult(sin) + } + + @Suppress("UNCHECKED_CAST") + fun suppressWarning(map: MutableMap?): Map { + return map as Map + } + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(monitorName) + out.writeInstant(periodStart) + out.writeInstant(periodEnd) + out.writeException(error) + inputResults.writeTo(out) + out.writeMap(triggerResults) + } +} + +data class InputRunResults( + val results: List> = listOf(), + val error: Exception? = null, + val aggTriggersAfterKey: MutableMap? = null +) : Writeable, ToXContent { + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return builder.startObject() + .field("results", results) + .field("error", error?.message) + .endObject() + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeVInt(results.size) + for (map in results) { + out.writeMap(map) + } + out.writeException(error) + } + + companion object { + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): InputRunResults { + val count = sin.readVInt() // count + val list = mutableListOf>() + for (i in 0 until count) { + list.add(suppressWarning(sin.readMap())) // result(map) + } + val error = sin.readException() // error + return InputRunResults(list, error) + } + + @Suppress("UNCHECKED_CAST") + fun suppressWarning(map: MutableMap?): Map { + return map as Map + } + } + + fun afterKeysPresent(): Boolean { + aggTriggersAfterKey?.forEach { + if (it.value.afterKey != null && !it.value.lastPage) { + return true + } + } + return false + } +} + +data class TriggerAfterKey(val afterKey: Map?, val lastPage: Boolean) + +data class ActionRunResult( + val actionId: String, + val actionName: String, + val output: Map, + val throttled: Boolean = false, + val executionTime: Instant? = null, + val error: Exception? = null +) : Writeable, ToXContent { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readString(), // actionId + sin.readString(), // actionName + suppressWarning(sin.readMap()), // output + sin.readBoolean(), // throttled + sin.readOptionalInstant(), // executionTime + sin.readException() // error + ) + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return builder.startObject() + .field("id", actionId) + .field("name", actionName) + .field("output", output) + .field("throttled", throttled) + .optionalTimeField("executionTime", executionTime) + .field("error", error?.message) + .endObject() + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(actionId) + out.writeString(actionName) + out.writeMap(output) + out.writeBoolean(throttled) + out.writeOptionalInstant(executionTime) + out.writeException(error) + } + + companion object { + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): ActionRunResult { + return ActionRunResult(sin) + } + + @Suppress("UNCHECKED_CAST") + fun suppressWarning(map: MutableMap?): MutableMap { + return map as MutableMap + } + } +} + +private val logger = LogManager.getLogger(MonitorRunResult::class.java) + +/** Constructs an error message from an exception suitable for human consumption. */ +fun Throwable.userErrorMessage(): String { + return when { + this is ScriptException -> this.scriptStack.joinToString(separator = "\n", limit = 100) + this is OpenSearchException -> this.detailedMessage + this.message != null -> { + logger.info("Internal error: ${this.message}. See the opensearch.log for details", this) + this.message!! + } + else -> { + logger.info("Unknown Internal error. See the OpenSearch log for details.", this) + "Unknown Internal error. See the OpenSearch log for details." + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/NoOpTrigger.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/NoOpTrigger.kt new file mode 100644 index 00000000..3ffacb6e --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/NoOpTrigger.kt @@ -0,0 +1,78 @@ +package org.opensearch.commons.alerting.model + +import org.opensearch.common.CheckedFunction +import org.opensearch.common.UUIDs +import org.opensearch.commons.alerting.model.action.Action +import org.opensearch.core.ParseField +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.NamedXContentRegistry +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException + +data class NoOpTrigger( + override val id: String = UUIDs.base64UUID(), + override val name: String = "NoOp trigger", + override val severity: String = "", + override val actions: List = listOf() +) : Trigger { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this() + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + .startObject(NOOP_TRIGGER_FIELD) + .field(ID_FIELD, id) + .endObject() + .endObject() + return builder + } + + override fun name(): String { + return NOOP_TRIGGER_FIELD + } + + fun asTemplateArg(): Map { + return mapOf() + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + } + + companion object { + const val ID_FIELD = "id" + const val NOOP_TRIGGER_FIELD = "noop_trigger" + val XCONTENT_REGISTRY = NamedXContentRegistry.Entry( + Trigger::class.java, + ParseField(NOOP_TRIGGER_FIELD), + CheckedFunction { parseInner(it) } + ) + + @JvmStatic + @Throws(IOException::class) + fun parseInner(xcp: XContentParser): NoOpTrigger { + var id = UUIDs.base64UUID() + if (xcp.currentToken() == XContentParser.Token.START_OBJECT) xcp.nextToken() + if (xcp.currentName() == ID_FIELD) { + xcp.nextToken() + id = xcp.text() + xcp.nextToken() + } + if (xcp.currentToken() != XContentParser.Token.END_OBJECT) { + XContentParserUtils.throwUnknownToken(xcp.currentToken(), xcp.tokenLocation) + } + return NoOpTrigger(id = id) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): NoOpTrigger { + return NoOpTrigger(sin) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/QueryLevelTrigger.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/QueryLevelTrigger.kt new file mode 100644 index 00000000..a88ef9b6 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/QueryLevelTrigger.kt @@ -0,0 +1,189 @@ +package org.opensearch.commons.alerting.model + +import org.opensearch.common.CheckedFunction +import org.opensearch.common.UUIDs +import org.opensearch.commons.alerting.model.Trigger.Companion.ACTIONS_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.ID_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.NAME_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.SEVERITY_FIELD +import org.opensearch.commons.alerting.model.action.Action +import org.opensearch.core.ParseField +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.NamedXContentRegistry +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import org.opensearch.script.Script +import java.io.IOException + +data class QueryLevelTrigger( + override val id: String = UUIDs.base64UUID(), + override val name: String, + override val severity: String, + override val actions: List, + val condition: Script +) : Trigger { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readString(), // id + sin.readString(), // name + sin.readString(), // severity + sin.readList(::Action), // actions + Script(sin) // condition + ) + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + .startObject(QUERY_LEVEL_TRIGGER_FIELD) + .field(ID_FIELD, id) + .field(NAME_FIELD, name) + .field(SEVERITY_FIELD, severity) + .startObject(CONDITION_FIELD) + .field(SCRIPT_FIELD, condition) + .endObject() + .field(ACTIONS_FIELD, actions.toTypedArray()) + .endObject() + .endObject() + return builder + } + + override fun name(): String { + return QUERY_LEVEL_TRIGGER_FIELD + } + + /** Returns a representation of the trigger suitable for passing into painless and mustache scripts. */ + fun asTemplateArg(): Map { + return mapOf( + ID_FIELD to id, + NAME_FIELD to name, + SEVERITY_FIELD to severity, + ACTIONS_FIELD to actions.map { it.asTemplateArg() }, + CONDITION_FIELD to mapOf( + SCRIPT_FIELD to mapOf( + SOURCE_FIELD to condition.idOrCode, + LANG_FIELD to condition.lang + ) + ) + ) + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(id) + out.writeString(name) + out.writeString(severity) + out.writeCollection(actions) + condition.writeTo(out) + } + + companion object { + const val QUERY_LEVEL_TRIGGER_FIELD = "query_level_trigger" + const val CONDITION_FIELD = "condition" + const val SCRIPT_FIELD = "script" + const val SOURCE_FIELD = "source" + const val LANG_FIELD = "lang" + + val XCONTENT_REGISTRY = NamedXContentRegistry.Entry( + Trigger::class.java, + ParseField(QUERY_LEVEL_TRIGGER_FIELD), + CheckedFunction { parseInner(it) } + ) + + /** + * This parse method needs to account for both the old and new Trigger format. + * In the old format, only one Trigger existed (which is now QueryLevelTrigger) and it was + * not a named object. + * + * The parse() method in the Trigger interface needs to consume the outer START_OBJECT to be able + * to infer whether it is dealing with the old or new Trigger format. This means that the currentToken at + * the time this parseInner method is called could differ based on which format is being dealt with. + * + * Old Format + * ---------- + * { + * "id": ..., + * ^ + * Current token starts here + * "name" ..., + * ... + * } + * + * New Format + * ---------- + * { + * "query_level_trigger": { + * "id": ..., ^ Current token starts here + * "name": ..., + * ... + * } + * } + * + * It isn't typically conventional but this parse method will account for both START_OBJECT + * and FIELD_NAME as the starting token to cover both cases. + */ + @JvmStatic + @Throws(IOException::class) + fun parseInner(xcp: XContentParser): QueryLevelTrigger { + var id = UUIDs.base64UUID() // assign a default triggerId if one is not specified + lateinit var name: String + lateinit var severity: String + lateinit var condition: Script + val actions: MutableList = mutableListOf() + + if (xcp.currentToken() != XContentParser.Token.START_OBJECT && xcp.currentToken() != XContentParser.Token.FIELD_NAME) { + XContentParserUtils.throwUnknownToken(xcp.currentToken(), xcp.tokenLocation) + } + + // If the parser began on START_OBJECT, move to the next token so that the while loop enters on + // the fieldName (or END_OBJECT if it's empty). + if (xcp.currentToken() == XContentParser.Token.START_OBJECT) xcp.nextToken() + + while (xcp.currentToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + + xcp.nextToken() + when (fieldName) { + ID_FIELD -> id = xcp.text() + NAME_FIELD -> name = xcp.text() + SEVERITY_FIELD -> severity = xcp.text() + CONDITION_FIELD -> { + xcp.nextToken() + condition = Script.parse(xcp) + require(condition.lang == Script.DEFAULT_SCRIPT_LANG) { + "Invalid script language. Allowed languages are [${Script.DEFAULT_SCRIPT_LANG}]" + } + xcp.nextToken() + } + ACTIONS_FIELD -> { + XContentParserUtils.ensureExpectedToken( + XContentParser.Token.START_ARRAY, + xcp.currentToken(), + xcp + ) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + actions.add(Action.parse(xcp)) + } + } + } + xcp.nextToken() + } + + return QueryLevelTrigger( + name = requireNotNull(name) { "Trigger name is null" }, + severity = requireNotNull(severity) { "Trigger severity is null" }, + condition = requireNotNull(condition) { "Trigger condition is null" }, + actions = requireNotNull(actions) { "Trigger actions are null" }, + id = requireNotNull(id) { "Trigger id is null." } + ) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): QueryLevelTrigger { + return QueryLevelTrigger(sin) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/QueryLevelTriggerRunResult.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/QueryLevelTriggerRunResult.kt new file mode 100644 index 00000000..101d0067 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/QueryLevelTriggerRunResult.kt @@ -0,0 +1,66 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.model + +import org.opensearch.commons.alerting.alerts.AlertError +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.script.ScriptException +import java.io.IOException +import java.time.Instant + +open class QueryLevelTriggerRunResult( + override var triggerName: String, + open var triggered: Boolean, + override var error: Exception?, + open var actionResults: MutableMap = mutableMapOf() +) : TriggerRunResult(triggerName, error) { + + @Throws(IOException::class) + @Suppress("UNCHECKED_CAST") + constructor(sin: StreamInput) : this( + triggerName = sin.readString(), + error = sin.readException(), + triggered = sin.readBoolean(), + actionResults = sin.readMap() as MutableMap + ) + + override fun alertError(): AlertError? { + if (error != null) { + return AlertError(Instant.now(), "Failed evaluating trigger:\n${error!!.userErrorMessage()}") + } + for (actionResult in actionResults.values) { + if (actionResult.error != null) { + return AlertError(Instant.now(), "Failed running action:\n${actionResult.error.userErrorMessage()}") + } + } + return null + } + + override fun internalXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + if (error is ScriptException) error = Exception((error as ScriptException).toJsonString(), error) + return builder + .field("triggered", triggered) + .field("action_results", actionResults as Map) + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + super.writeTo(out) + out.writeBoolean(triggered) + out.writeMap(actionResults as Map) + } + + companion object { + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): TriggerRunResult { + return QueryLevelTriggerRunResult(sin) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/Schedule.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/Schedule.kt new file mode 100644 index 00000000..3d08c095 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/Schedule.kt @@ -0,0 +1,376 @@ +package org.opensearch.commons.alerting.model + +import com.cronutils.model.CronType +import com.cronutils.model.definition.CronDefinitionBuilder +import com.cronutils.model.time.ExecutionTime +import com.cronutils.parser.CronParser +import org.opensearch.commons.notifications.model.BaseModel +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException +import java.time.DateTimeException +import java.time.Duration +import java.time.Instant +import java.time.ZoneId +import java.time.ZonedDateTime +import java.time.temporal.ChronoUnit +import java.time.zone.ZoneRulesException +import java.util.Locale + +sealed class Schedule : BaseModel { + enum class TYPE { CRON, INTERVAL } + companion object { + const val CRON_FIELD = "cron" + const val EXPRESSION_FIELD = "expression" + const val TIMEZONE_FIELD = "timezone" + const val PERIOD_FIELD = "period" + const val INTERVAL_FIELD = "interval" + const val UNIT_FIELD = "unit" + + val cronParser = CronParser(CronDefinitionBuilder.instanceDefinitionFor(CronType.UNIX)) + + @JvmStatic + @Throws(IOException::class) + fun parse(xcp: XContentParser): Schedule { + var expression: String? = null + var timezone: ZoneId? = null + var interval: Int? = null + var unit: ChronoUnit? = null + var schedule: Schedule? = null + var type: TYPE? = null + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldname = xcp.currentName() + xcp.nextToken() + // If the type field has already been set the customer has provide more than one type of schedule. + if (type != null) { + throw IllegalArgumentException("You can only specify one type of schedule.") + } + when (fieldname) { + CRON_FIELD -> { + type = TYPE.CRON + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val cronFieldName = xcp.currentName() + xcp.nextToken() + when (cronFieldName) { + EXPRESSION_FIELD -> expression = xcp.textOrNull() + TIMEZONE_FIELD -> timezone = getTimeZone(xcp.text()) + } + } + } + PERIOD_FIELD -> { + type = TYPE.INTERVAL + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val cronFieldName = xcp.currentName() + xcp.nextToken() + when (cronFieldName) { + INTERVAL_FIELD -> interval = xcp.intValue() + UNIT_FIELD -> unit = ChronoUnit.valueOf(xcp.text().uppercase(Locale.getDefault())) + } + } + } + else -> { + throw IllegalArgumentException("Invalid field: [$fieldname] found in schedule.") + } + } + } + if (type == TYPE.CRON) { + schedule = CronSchedule( + requireNotNull(expression) { "Expression in cron schedule is null." }, + requireNotNull(timezone) { "Timezone in cron schedule is null." } + ) + } else if (type == TYPE.INTERVAL) { + schedule = IntervalSchedule( + requireNotNull(interval) { "Interval in period schedule is null." }, + requireNotNull(unit) { "Unit in period schedule is null." } + ) + } + return requireNotNull(schedule) { "Schedule is null." } + } + + @JvmStatic + @Throws(IllegalArgumentException::class) + private fun getTimeZone(timeZone: String): ZoneId { + try { + return ZoneId.of(timeZone) + } catch (zre: ZoneRulesException) { + throw IllegalArgumentException("Timezone $timeZone is not supported") + } catch (dte: DateTimeException) { + throw IllegalArgumentException("Timezone $timeZone is not supported") + } + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): Schedule { + val type = sin.readEnum(Schedule.TYPE::class.java) + if (type == Schedule.TYPE.CRON) { + return CronSchedule(sin) + } else { + return IntervalSchedule(sin) + } + } + } + + /** + * @param enabledTime is used in IntervalSchedule to calculate next time to execute the schedule. + */ + abstract fun nextTimeToExecute(enabledTime: Instant): Duration? + + /** + * @param expectedPreviousExecutionTime is the calculated previous execution time that should always be correct, + * the first time this is called the value passed in is the enabledTime which acts as the expectedPreviousExecutionTime + */ + abstract fun getExpectedNextExecutionTime(enabledTime: Instant, expectedPreviousExecutionTime: Instant?): Instant? + + /** + * Returns the start and end time for this schedule starting at the given start time (if provided). + * If not, the start time is assumed to be the last time the Schedule would have executed (if it's a Cron schedule) + * or [Instant.now] if it's an interval schedule. + * + * If this is a schedule that runs only once this function will return [Instant.now] for both start and end time. + */ + abstract fun getPeriodStartingAt(startTime: Instant?): Pair + + /** + * Returns the start and end time for this schedule ending at the given end time (if provided). + * If not, the end time is assumed to be the next time the Schedule would have executed (if it's a Cron schedule) + * or [Instant.now] if it's an interval schedule. + * + * If this is a schedule that runs only once this function will return [Instant.now] for both start and end time. + */ + abstract fun getPeriodEndingAt(endTime: Instant?): Pair + + abstract fun runningOnTime(lastExecutionTime: Instant?): Boolean + + /** Returns a representation of the schedule suitable for passing into painless and mustache scripts. */ + abstract fun asTemplateArg(): Map +} + +/** + * @param testInstant Normally this not be set and it should only be used in unit test to control time. + */ +data class CronSchedule( + val expression: String, + val timezone: ZoneId, + // visible for testing + @Transient val testInstant: Instant? = null +) : Schedule() { + @Transient + val executionTime: ExecutionTime = ExecutionTime.forCron(cronParser.parse(expression)) + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readString(), // expression + sin.readZoneId() // timezone + ) + + companion object { + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): CronSchedule { + return CronSchedule(sin) + } + } + + /* + * @param enabledTime is not used in CronSchedule. + */ + override fun nextTimeToExecute(enabledTime: Instant): Duration? { + val zonedDateTime = ZonedDateTime.ofInstant(testInstant ?: Instant.now(), timezone) + val timeToNextExecution = executionTime.timeToNextExecution(zonedDateTime) + return timeToNextExecution.orElse(null) + } + + override fun getExpectedNextExecutionTime(enabledTime: Instant, expectedPreviousExecutionTime: Instant?): Instant? { + val zonedDateTime = ZonedDateTime.ofInstant(expectedPreviousExecutionTime ?: testInstant ?: Instant.now(), timezone) + val nextExecution = executionTime.nextExecution(zonedDateTime) + return nextExecution.orElse(null)?.toInstant() + } + + override fun getPeriodStartingAt(startTime: Instant?): Pair { + val realStartTime = if (startTime != null) { + startTime + } else { + // Probably the first time we're running. Try to figure out the last execution time + val lastExecutionTime = executionTime.lastExecution(ZonedDateTime.now(timezone)) + // This shouldn't happen unless the cron is configured to run only once, which our current cron syntax doesn't support + if (!lastExecutionTime.isPresent) { + val currentTime = Instant.now() + return Pair(currentTime, currentTime) + } + lastExecutionTime.get().toInstant() + } + val zonedDateTime = ZonedDateTime.ofInstant(realStartTime, timezone) + val newEndTime = executionTime.nextExecution(zonedDateTime).orElse(null) + return Pair(realStartTime, newEndTime?.toInstant() ?: realStartTime) + } + + override fun getPeriodEndingAt(endTime: Instant?): Pair { + val realEndTime = if (endTime != null) { + endTime + } else { + val nextExecutionTime = executionTime.nextExecution(ZonedDateTime.now(timezone)) + // This shouldn't happen unless the cron is configured to run only once which our current cron syntax doesn't support + if (!nextExecutionTime.isPresent) { + val currentTime = Instant.now() + return Pair(currentTime, currentTime) + } + nextExecutionTime.get().toInstant() + } + val zonedDateTime = ZonedDateTime.ofInstant(realEndTime, timezone) + val newStartTime = executionTime.lastExecution(zonedDateTime).orElse(null) + return Pair(newStartTime?.toInstant() ?: realEndTime, realEndTime) + } + + override fun runningOnTime(lastExecutionTime: Instant?): Boolean { + if (lastExecutionTime == null) { + return true + } + + val zonedDateTime = ZonedDateTime.ofInstant(testInstant ?: Instant.now(), timezone) + val expectedExecutionTime = executionTime.lastExecution(zonedDateTime) + + if (!expectedExecutionTime.isPresent) { + // At this point we know lastExecutionTime is not null, this should never happen. + // If expected execution time is null, we shouldn't have executed the ScheduledJob. + return false + } + val actualExecutionTime = ZonedDateTime.ofInstant(lastExecutionTime, timezone) + + return ChronoUnit.SECONDS.between(expectedExecutionTime.get(), actualExecutionTime) == 0L + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + .startObject(CRON_FIELD) + .field(EXPRESSION_FIELD, expression) + .field(TIMEZONE_FIELD, timezone.id) + .endObject() + .endObject() + return builder + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(expression) + out.writeZoneId(timezone) + } + + override fun asTemplateArg(): Map = + mapOf( + CRON_FIELD to mapOf( + EXPRESSION_FIELD to expression, + TIMEZONE_FIELD to timezone.toString() + ) + ) +} + +data class IntervalSchedule( + val interval: Int, + val unit: ChronoUnit, + // visible for testing + @Transient val testInstant: Instant? = null +) : Schedule() { + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readInt(), // interval + sin.readEnum(ChronoUnit::class.java) // unit + ) + companion object { + @Transient + private val SUPPORTED_UNIT = listOf(ChronoUnit.MINUTES, ChronoUnit.HOURS, ChronoUnit.DAYS) + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): IntervalSchedule { + return IntervalSchedule(sin) + } + } + + init { + if (!SUPPORTED_UNIT.contains(unit)) { + throw IllegalArgumentException("Timezone $unit is not supported expected $SUPPORTED_UNIT") + } + + if (interval <= 0) { + throw IllegalArgumentException("Interval is not allowed to be 0 or negative") + } + } + + @Transient + private val intervalInMills = Duration.of(interval.toLong(), unit).toMillis() + + override fun nextTimeToExecute(enabledTime: Instant): Duration? { + val enabledTimeEpochMillis = enabledTime.toEpochMilli() + + val currentTime = testInstant ?: Instant.now() + val delta = currentTime.toEpochMilli() - enabledTimeEpochMillis + // Remainder of the Delta time is how much we have already spent waiting. + // We need to subtract remainder of that time from the interval time to get remaining schedule time to wait. + val remainingScheduleTime = intervalInMills - delta.rem(intervalInMills) + return Duration.of(remainingScheduleTime, ChronoUnit.MILLIS) + } + + override fun getExpectedNextExecutionTime(enabledTime: Instant, expectedPreviousExecutionTime: Instant?): Instant? { + val expectedPreviousExecutionTimeEpochMillis = (expectedPreviousExecutionTime ?: enabledTime).toEpochMilli() + // We still need to calculate the delta even when using expectedPreviousExecutionTime because the initial value passed in + // is the enabledTime (which also happens with cluster/node restart) + val currentTime = testInstant ?: Instant.now() + val delta = currentTime.toEpochMilli() - expectedPreviousExecutionTimeEpochMillis + // Remainder of the Delta time is how much we have already spent waiting. + // We need to subtract remainder of that time from the interval time to get remaining schedule time to wait. + val remainingScheduleTime = intervalInMills - delta.rem(intervalInMills) + return Instant.ofEpochMilli(currentTime.toEpochMilli() + remainingScheduleTime) + } + + override fun getPeriodStartingAt(startTime: Instant?): Pair { + val realStartTime = startTime ?: Instant.now() + val newEndTime = realStartTime.plusMillis(intervalInMills) + return Pair(realStartTime, newEndTime) + } + + override fun getPeriodEndingAt(endTime: Instant?): Pair { + val realEndTime = endTime ?: Instant.now() + val newStartTime = realEndTime.minusMillis(intervalInMills) + return Pair(newStartTime, realEndTime) + } + + override fun runningOnTime(lastExecutionTime: Instant?): Boolean { + if (lastExecutionTime == null) { + return true + } + + // Make sure the lastExecutionTime is less than interval time. + val delta = ChronoUnit.MILLIS.between(lastExecutionTime, testInstant ?: Instant.now()) + return 0 < delta && delta < intervalInMills + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + .startObject(PERIOD_FIELD) + .field(INTERVAL_FIELD, interval) + .field(UNIT_FIELD, unit.name) + .endObject() + .endObject() + return builder + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeInt(interval) + out.writeEnum(unit) + } + + override fun asTemplateArg(): Map = + mapOf( + PERIOD_FIELD to mapOf( + INTERVAL_FIELD to interval, + UNIT_FIELD to unit.toString() + ) + ) +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/ScheduledJob.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/ScheduledJob.kt new file mode 100644 index 00000000..cf8417c2 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/ScheduledJob.kt @@ -0,0 +1,85 @@ +package org.opensearch.commons.alerting.model + +import org.opensearch.commons.notifications.model.BaseModel +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException +import java.time.Instant + +interface ScheduledJob : BaseModel { + + fun toXContentWithType(builder: XContentBuilder): XContentBuilder = toXContent(builder, XCONTENT_WITH_TYPE) + + companion object { + /** The name of the ElasticSearch index in which we store jobs */ + const val SCHEDULED_JOBS_INDEX = ".opendistro-alerting-config" + const val DOC_LEVEL_QUERIES_INDEX = ".opensearch-alerting-queries" + + const val NO_ID = "" + + const val NO_VERSION = 1L + + private val XCONTENT_WITH_TYPE = ToXContent.MapParams(mapOf("with_type" to "true")) + + /** + * This function parses the job, delegating to the specific subtype parser registered in the [XContentParser.getXContentRegistry] + * at runtime. Each concrete job subclass is expected to register a parser in this registry. + * The Job's json representation is expected to be of the form: + * { "" : { } } + * + * If the job comes from an OpenSearch index it's [id] and [version] can also be supplied. + */ + @Throws(IOException::class) + fun parse(xcp: XContentParser, id: String = NO_ID, version: Long = NO_VERSION): ScheduledJob { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, xcp.nextToken(), xcp) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) + val job = xcp.namedObject(ScheduledJob::class.java, xcp.currentName(), null) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, xcp.nextToken(), xcp) + return job.fromDocument(id, version) + } + + /** + * This function parses the job, but expects the type to be passed in. This is for the specific + * use case in sweeper where we first want to check if the job is allowed to be swept before + * trying to fully parse it. If you need to parse a job, you most likely want to use + * the above parse function. + */ + @Throws(IOException::class) + fun parse(xcp: XContentParser, type: String, id: String = NO_ID, version: Long = NO_VERSION): ScheduledJob { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) + val job = xcp.namedObject(ScheduledJob::class.java, type, null) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, xcp.nextToken(), xcp) + return job.fromDocument(id, version) + } + } + + /** The id of the job in the [SCHEDULED_JOBS_INDEX] or [NO_ID] if not persisted */ + val id: String + + /** The version of the job in the [SCHEDULED_JOBS_INDEX] or [NO_VERSION] if not persisted */ + val version: Long + + /** The name of the job */ + val name: String + + /** The type of the job */ + val type: String + + /** Controls whether the job will be scheduled or not */ + val enabled: Boolean + + /** The schedule for running the job */ + val schedule: Schedule + + /** The last time the job was updated */ + val lastUpdateTime: Instant + + /** The time the job was enabled */ + val enabledTime: Instant? + + /** Copy constructor for persisted jobs */ + fun fromDocument(id: String, version: Long): ScheduledJob +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/SearchInput.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/SearchInput.kt new file mode 100644 index 00000000..99a5cb8d --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/SearchInput.kt @@ -0,0 +1,96 @@ +package org.opensearch.commons.alerting.model + +import org.opensearch.common.CheckedFunction +import org.opensearch.core.ParseField +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.NamedXContentRegistry +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import org.opensearch.search.builder.SearchSourceBuilder +import java.io.IOException + +data class SearchInput(val indices: List, val query: SearchSourceBuilder) : Input { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readStringList(), // indices + SearchSourceBuilder(sin) // query + ) + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return builder.startObject() + .startObject(SEARCH_FIELD) + .field(INDICES_FIELD, indices.toTypedArray()) + .field(QUERY_FIELD, query) + .endObject() + .endObject() + } + + override fun name(): String { + return SEARCH_FIELD + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeStringCollection(indices) + query.writeTo(out) + } + + companion object { + const val INDICES_FIELD = "indices" + const val QUERY_FIELD = "query" + const val SEARCH_FIELD = "search" + + val XCONTENT_REGISTRY = NamedXContentRegistry.Entry(Input::class.java, ParseField("search"), CheckedFunction { parseInner(it) }) + + @JvmStatic + @Throws(IOException::class) + fun parseInner(xcp: XContentParser): SearchInput { + val indices = mutableListOf() + lateinit var searchSourceBuilder: SearchSourceBuilder + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + when (fieldName) { + INDICES_FIELD -> { + XContentParserUtils.ensureExpectedToken( + XContentParser.Token.START_ARRAY, + xcp.currentToken(), + xcp + ) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + indices.add(xcp.text()) + } + } + QUERY_FIELD -> { + searchSourceBuilder = SearchSourceBuilder.fromXContent(xcp, false) + } + } + } + + return SearchInput( + indices, + requireNotNull(searchSourceBuilder) { "SearchInput query is null" } + ) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): SearchInput { + return SearchInput(sin) + } + } + + override fun asTemplateArg(): Map = + mapOf( + SEARCH_FIELD to mapOf( + INDICES_FIELD to indices, + QUERY_FIELD to query.toString() + ) + ) +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/Sequence.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/Sequence.kt new file mode 100644 index 00000000..e1f6150d --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/Sequence.kt @@ -0,0 +1,75 @@ +package org.opensearch.commons.alerting.model + +import org.opensearch.commons.notifications.model.BaseModel +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException + +/** Delegate monitors passed as input for composite monitors. */ +data class Sequence( + val delegates: List +) : BaseModel { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readList(::Delegate) + ) + + fun asTemplateArg(): Map { + return mapOf( + DELEGATES_FIELD to delegates + ) + } + + companion object { + const val SEQUENCE_FIELD = "sequence" + const val DELEGATES_FIELD = "delegates" + + @JvmStatic + @Throws(IOException::class) + fun parse(xcp: XContentParser): Sequence { + val delegates: MutableList = mutableListOf() + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + DELEGATES_FIELD -> { + XContentParserUtils.ensureExpectedToken( + XContentParser.Token.START_ARRAY, + xcp.currentToken(), + xcp + ) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + delegates.add(Delegate.parse(xcp)) + } + } + } + } + return Sequence(delegates) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): DocLevelMonitorInput { + return DocLevelMonitorInput(sin) + } + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeCollection(delegates) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return builder.startObject() + .field(DELEGATES_FIELD, delegates.toTypedArray()) + .endObject() + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/Table.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/Table.kt new file mode 100644 index 00000000..56d8a575 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/Table.kt @@ -0,0 +1,44 @@ +package org.opensearch.commons.alerting.model + +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import java.io.IOException + +data class Table( + val sortOrder: String, + val sortString: String, + val missing: String?, + val size: Int, + val startIndex: Int, + val searchString: String? +) : Writeable { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sortOrder = sin.readString(), + sortString = sin.readString(), + missing = sin.readOptionalString(), + size = sin.readInt(), + startIndex = sin.readInt(), + searchString = sin.readOptionalString() + ) + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(sortOrder) + out.writeString(sortString) + out.writeOptionalString(missing) + out.writeInt(size) + out.writeInt(startIndex) + out.writeOptionalString(searchString) + } + + companion object { + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): Table { + return Table(sin) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/Trigger.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/Trigger.kt new file mode 100644 index 00000000..7cfb9f41 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/Trigger.kt @@ -0,0 +1,81 @@ +package org.opensearch.commons.alerting.model + +import org.opensearch.commons.alerting.model.action.Action +import org.opensearch.commons.alerting.model.remote.monitors.RemoteMonitorTrigger +import org.opensearch.commons.notifications.model.BaseModel +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException + +interface Trigger : BaseModel { + + enum class Type(val value: String) { + DOCUMENT_LEVEL_TRIGGER(DocumentLevelTrigger.DOCUMENT_LEVEL_TRIGGER_FIELD), + QUERY_LEVEL_TRIGGER(QueryLevelTrigger.QUERY_LEVEL_TRIGGER_FIELD), + BUCKET_LEVEL_TRIGGER(BucketLevelTrigger.BUCKET_LEVEL_TRIGGER_FIELD), + NOOP_TRIGGER(NoOpTrigger.NOOP_TRIGGER_FIELD), + CHAINED_ALERT_TRIGGER(ChainedAlertTrigger.CHAINED_ALERT_TRIGGER_FIELD), + REMOTE_MONITOR_TRIGGER(RemoteMonitorTrigger.REMOTE_MONITOR_TRIGGER_FIELD); + + override fun toString(): String { + return value + } + } + + companion object { + const val ID_FIELD = "id" + const val NAME_FIELD = "name" + const val SEVERITY_FIELD = "severity" + const val ACTIONS_FIELD = "actions" + + @Throws(IOException::class) + fun parse(xcp: XContentParser): Trigger { + val trigger: Trigger + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, xcp.nextToken(), xcp) + val triggerTypeNames = Type.values().map { it.toString() } + if (triggerTypeNames.contains(xcp.currentName())) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) + trigger = xcp.namedObject(Trigger::class.java, xcp.currentName(), null) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, xcp.nextToken(), xcp) + } else { + // Infer the old Trigger (now called QueryLevelTrigger) when it is not defined as a named + // object to remain backwards compatible when parsing the old format + trigger = QueryLevelTrigger.parseInner(xcp) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, xcp.currentToken(), xcp) + } + return trigger + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): Trigger { + return when (val type = sin.readEnum(Trigger.Type::class.java)) { + Type.QUERY_LEVEL_TRIGGER -> QueryLevelTrigger(sin) + Type.BUCKET_LEVEL_TRIGGER -> BucketLevelTrigger(sin) + Type.DOCUMENT_LEVEL_TRIGGER -> DocumentLevelTrigger(sin) + Type.CHAINED_ALERT_TRIGGER -> ChainedAlertTrigger(sin) + Type.REMOTE_MONITOR_TRIGGER -> RemoteMonitorTrigger(sin) + // This shouldn't be reachable but ensuring exhaustiveness as Kotlin warns + // enum can be null in Java + else -> throw IllegalStateException("Unexpected input [$type] when reading Trigger") + } + } + } + + /** The id of the Trigger in the [SCHEDULED_JOBS_INDEX] */ + val id: String + + /** The name of the Trigger */ + val name: String + + /** The severity of the Trigger, used to classify the subsequent Alert */ + val severity: String + + /** The actions executed if the Trigger condition evaluates to true */ + val actions: List + + fun name(): String +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/TriggerRunResult.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/TriggerRunResult.kt new file mode 100644 index 00000000..84efde39 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/TriggerRunResult.kt @@ -0,0 +1,55 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.model + +import org.opensearch.commons.alerting.alerts.AlertError +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import java.io.IOException +import java.time.Instant + +abstract class TriggerRunResult( + open var triggerName: String, + open var error: Exception? = null +) : Writeable, ToXContent { + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + .field("name", triggerName) + + internalXContent(builder, params) + val msg = error?.message + + builder.field("error", msg) + .endObject() + return builder + } + + abstract fun internalXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder + + /** Returns error information to store in the Alert. Currently it's just the stack trace but it can be more */ + open fun alertError(): AlertError? { + if (error != null) { + return AlertError(Instant.now(), "Failed evaluating trigger:\n${error!!.userErrorMessage()}") + } + return null + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(triggerName) + out.writeException(error) + } + + companion object { + @Suppress("UNCHECKED_CAST") + fun suppressWarning(map: MutableMap?): MutableMap { + return map as MutableMap + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/Workflow.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/Workflow.kt new file mode 100644 index 00000000..d0e57e63 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/Workflow.kt @@ -0,0 +1,304 @@ +package org.opensearch.commons.alerting.model + +import org.opensearch.common.CheckedFunction +import org.opensearch.commons.alerting.util.IndexUtils.Companion.NO_SCHEMA_VERSION +import org.opensearch.commons.alerting.util.IndexUtils.Companion.WORKFLOW_MAX_INPUTS +import org.opensearch.commons.alerting.util.IndexUtils.Companion._ID +import org.opensearch.commons.alerting.util.IndexUtils.Companion._VERSION +import org.opensearch.commons.alerting.util.instant +import org.opensearch.commons.alerting.util.optionalTimeField +import org.opensearch.commons.alerting.util.optionalUserField +import org.opensearch.commons.authuser.User +import org.opensearch.core.ParseField +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.NamedXContentRegistry +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException +import java.time.Instant +import java.util.Locale + +data class Workflow( + override val id: String = NO_ID, + override val version: Long = NO_VERSION, + override val name: String, + override val enabled: Boolean, + override val schedule: Schedule, + override val lastUpdateTime: Instant, + override val enabledTime: Instant?, + // TODO: Check how this behaves during rolling upgrade/multi-version cluster + // Can read/write and parsing break if it's done from an old -> new version of the plugin? + val workflowType: WorkflowType, + val user: User?, + val schemaVersion: Int = NO_SCHEMA_VERSION, + val inputs: List, + val owner: String? = DEFAULT_OWNER, + val triggers: List, + val auditDelegateMonitorAlerts: Boolean? = true +) : ScheduledJob { + override val type = WORKFLOW_TYPE + + init { + if (enabled) { + requireNotNull(enabledTime) + } else { + require(enabledTime == null) + } + require(inputs.size <= WORKFLOW_MAX_INPUTS) { "Workflows can only have $WORKFLOW_MAX_INPUTS search input." } + triggers.forEach { trigger -> + run { + require(trigger is ChainedAlertTrigger) { "Incompatible trigger [${trigger.name}] for workflow. " } + } + } + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + id = sin.readString(), + version = sin.readLong(), + name = sin.readString(), + enabled = sin.readBoolean(), + schedule = Schedule.readFrom(sin), + lastUpdateTime = sin.readInstant(), + enabledTime = sin.readOptionalInstant(), + workflowType = sin.readEnum(WorkflowType::class.java), + user = if (sin.readBoolean()) { + User(sin) + } else { + null + }, + schemaVersion = sin.readInt(), + inputs = sin.readList((WorkflowInput)::readFrom), + owner = sin.readOptionalString(), + triggers = sin.readList((Trigger)::readFrom), + auditDelegateMonitorAlerts = sin.readOptionalBoolean() + ) + + // This enum classifies different workflows + // This is different from 'type' which denotes the Scheduled Job type + enum class WorkflowType(val value: String) { + COMPOSITE("composite"); + + override fun toString(): String { + return value + } + } + + /** Returns a representation of the workflow suitable for passing into painless and mustache scripts. */ + fun asTemplateArg(): Map { + return mapOf(_ID to id, _VERSION to version, NAME_FIELD to name, ENABLED_FIELD to enabled) + } + + fun toXContentWithUser(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return createXContentBuilder(builder, params, false) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return createXContentBuilder(builder, params, true) + } + + private fun createXContentBuilder( + builder: XContentBuilder, + params: ToXContent.Params, + secure: Boolean + ): XContentBuilder { + builder.startObject() + if (params.paramAsBoolean("with_type", false)) builder.startObject(type) + builder.field(TYPE_FIELD, type) + .field(SCHEMA_VERSION_FIELD, schemaVersion) + .field(NAME_FIELD, name) + .field(WORKFLOW_TYPE_FIELD, workflowType) + + if (!secure) { + builder.optionalUserField(USER_FIELD, user) + } + + builder.field(ENABLED_FIELD, enabled) + .optionalTimeField(ENABLED_TIME_FIELD, enabledTime) + .field(SCHEDULE_FIELD, schedule) + .field(INPUTS_FIELD, inputs.toTypedArray()) + .field(TRIGGERS_FIELD, triggers.toTypedArray()) + .optionalTimeField(LAST_UPDATE_TIME_FIELD, lastUpdateTime) + builder.field(OWNER_FIELD, owner) + if (auditDelegateMonitorAlerts != null) { + builder.field(AUDIT_DELEGATE_MONITOR_ALERTS_FIELD, auditDelegateMonitorAlerts) + } + if (params.paramAsBoolean("with_type", false)) builder.endObject() + return builder.endObject() + } + + override fun fromDocument(id: String, version: Long): Workflow = copy(id = id, version = version) + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(id) + out.writeLong(version) + out.writeString(name) + out.writeBoolean(enabled) + if (schedule is CronSchedule) { + out.writeEnum(Schedule.TYPE.CRON) + } else { + out.writeEnum(Schedule.TYPE.INTERVAL) + } + schedule.writeTo(out) + out.writeInstant(lastUpdateTime) + out.writeOptionalInstant(enabledTime) + out.writeEnum(workflowType) + out.writeBoolean(user != null) + user?.writeTo(out) + out.writeInt(schemaVersion) + // Outputting type with each Input so that the generic Input.readFrom() can read it + out.writeVInt(inputs.size) + inputs.forEach { + if (it is CompositeInput) out.writeEnum(WorkflowInput.Type.COMPOSITE_INPUT) + it.writeTo(out) + } + // Outputting type with each Trigger so that the generic Trigger.readFrom() can read it + out.writeOptionalString(owner) + out.writeVInt(triggers.size) + triggers.forEach { + when (it) { + is ChainedAlertTrigger -> out.writeEnum(Trigger.Type.CHAINED_ALERT_TRIGGER) + else -> throw IOException("Unsupported trigger type for workflow") + } + it.writeTo(out) + } + out.writeOptionalBoolean(auditDelegateMonitorAlerts) + } + + companion object { + const val WORKFLOW_TYPE = "workflow" + const val TYPE_FIELD = "type" + const val WORKFLOW_TYPE_FIELD = "workflow_type" + const val SCHEMA_VERSION_FIELD = "schema_version" + const val NAME_FIELD = "name" + const val USER_FIELD = "user" + const val ENABLED_FIELD = "enabled" + const val SCHEDULE_FIELD = "schedule" + const val NO_ID = "" + const val NO_VERSION = 1L + const val INPUTS_FIELD = "inputs" + const val LAST_UPDATE_TIME_FIELD = "last_update_time" + const val ENABLED_TIME_FIELD = "enabled_time" + const val TRIGGERS_FIELD = "triggers" + const val OWNER_FIELD = "owner" + const val AUDIT_DELEGATE_MONITOR_ALERTS_FIELD = "audit_delegate_monitor_alerts" + + // This is defined here instead of in ScheduledJob to avoid having the ScheduledJob class know about all + // the different subclasses and creating circular dependencies + val XCONTENT_REGISTRY = NamedXContentRegistry.Entry( + ScheduledJob::class.java, + ParseField(WORKFLOW_TYPE), + CheckedFunction { parse(it) } + ) + + @JvmStatic + @JvmOverloads + @Throws(IOException::class) + fun parse(xcp: XContentParser, id: String = NO_ID, version: Long = NO_VERSION): Workflow { + var name: String? = null + var workflowType: String = WorkflowType.COMPOSITE.toString() + var user: User? = null + var schedule: Schedule? = null + var lastUpdateTime: Instant? = null + var enabledTime: Instant? = null + var enabled = true + var schemaVersion = NO_SCHEMA_VERSION + val inputs: MutableList = mutableListOf() + val triggers: MutableList = mutableListOf() + var owner = DEFAULT_OWNER + var auditDelegateMonitorAlerts = true + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + SCHEMA_VERSION_FIELD -> schemaVersion = xcp.intValue() + NAME_FIELD -> name = xcp.text() + WORKFLOW_TYPE_FIELD -> { + workflowType = xcp.text() + val allowedTypes = WorkflowType.values().map { it.value } + if (!allowedTypes.contains(workflowType)) { + throw IllegalStateException("Workflow type should be one of $allowedTypes") + } + } + USER_FIELD -> { + user = if (xcp.currentToken() == XContentParser.Token.VALUE_NULL) null else User.parse(xcp) + } + ENABLED_FIELD -> enabled = xcp.booleanValue() + SCHEDULE_FIELD -> schedule = Schedule.parse(xcp) + Monitor.TRIGGERS_FIELD -> { + XContentParserUtils.ensureExpectedToken( + XContentParser.Token.START_ARRAY, + xcp.currentToken(), + xcp + ) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + triggers.add(Trigger.parse(xcp)) + } + } + INPUTS_FIELD -> { + XContentParserUtils.ensureExpectedToken( + XContentParser.Token.START_ARRAY, + xcp.currentToken(), + xcp + ) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + val input = WorkflowInput.parse(xcp) + inputs.add(input) + } + } + ENABLED_TIME_FIELD -> enabledTime = xcp.instant() + LAST_UPDATE_TIME_FIELD -> lastUpdateTime = xcp.instant() + AUDIT_DELEGATE_MONITOR_ALERTS_FIELD -> auditDelegateMonitorAlerts = xcp.booleanValue() + OWNER_FIELD -> { + owner = if (xcp.currentToken() == XContentParser.Token.VALUE_NULL) owner else xcp.text() + } + else -> { + xcp.skipChildren() + } + } + } + + if (enabled && enabledTime == null) { + enabledTime = Instant.now() + } else if (!enabled) { + enabledTime = null + } + return Workflow( + id, + version, + requireNotNull(name) { "Workflow name is null" }, + enabled, + requireNotNull(schedule) { "Workflow schedule is null" }, + lastUpdateTime ?: Instant.now(), + enabledTime, + WorkflowType.valueOf(workflowType.uppercase(Locale.ROOT)), + user, + schemaVersion, + inputs.toList(), + owner, + triggers, + auditDelegateMonitorAlerts + ) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): Workflow? { + return Workflow(sin) + } + + @Suppress("UNCHECKED_CAST") + fun suppressWarning(map: MutableMap?): MutableMap { + return map as MutableMap + } + + private const val DEFAULT_OWNER = "alerting" + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/WorkflowInput.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/WorkflowInput.kt new file mode 100644 index 00000000..d9fd2129 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/WorkflowInput.kt @@ -0,0 +1,48 @@ +package org.opensearch.commons.alerting.model + +import org.opensearch.commons.notifications.model.BaseModel +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException + +interface WorkflowInput : BaseModel { + + enum class Type(val value: String) { + COMPOSITE_INPUT(CompositeInput.COMPOSITE_INPUT_FIELD); + + override fun toString(): String { + return value + } + } + + companion object { + + @Throws(IOException::class) + fun parse(xcp: XContentParser): WorkflowInput { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, xcp.nextToken(), xcp) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.nextToken(), xcp) + val input = if (xcp.currentName() == Type.COMPOSITE_INPUT.value) { + CompositeInput.parse(xcp) + } else { + throw IllegalStateException("Unexpected input type when reading Input") + } + XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, xcp.nextToken(), xcp) + return input + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): WorkflowInput { + return when (val type = sin.readEnum(Type::class.java)) { + Type.COMPOSITE_INPUT -> CompositeInput(sin) + // This shouldn't be reachable but ensuring exhaustiveness as Kotlin warns + // enum can be null in Java + else -> throw IllegalStateException("Unexpected input [$type] when reading Trigger") + } + } + } + + fun name(): String +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/WorkflowMetadata.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/WorkflowMetadata.kt new file mode 100644 index 00000000..48deaed6 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/WorkflowMetadata.kt @@ -0,0 +1,106 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.model + +import org.opensearch.commons.alerting.util.instant +import org.opensearch.commons.alerting.util.optionalTimeField +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException +import java.time.Instant + +data class WorkflowMetadata( + val id: String, + val workflowId: String, + val monitorIds: List, + val latestRunTime: Instant, + val latestExecutionId: String +) : Writeable, ToXContent { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + id = sin.readString(), + workflowId = sin.readString(), + monitorIds = sin.readStringList(), + latestRunTime = sin.readInstant(), + latestExecutionId = sin.readString() + ) + + override fun writeTo(out: StreamOutput) { + out.writeString(id) + out.writeString(workflowId) + out.writeStringCollection(monitorIds) + out.writeInstant(latestRunTime) + out.writeString(latestExecutionId) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + if (params.paramAsBoolean("with_type", false)) builder.startObject(METADATA) + builder.field(WORKFLOW_ID_FIELD, workflowId) + .field(MONITOR_IDS_FIELD, monitorIds) + .optionalTimeField(LATEST_RUN_TIME, latestRunTime) + .field(LATEST_EXECUTION_ID, latestExecutionId) + if (params.paramAsBoolean("with_type", false)) builder.endObject() + return builder.endObject() + } + + companion object { + const val METADATA = "workflow_metadata" + const val WORKFLOW_ID_FIELD = "workflow_id" + const val MONITOR_IDS_FIELD = "monitor_ids" + const val LATEST_RUN_TIME = "latest_run_time" + const val LATEST_EXECUTION_ID = "latest_execution_id" + + @JvmStatic + @JvmOverloads + @Throws(IOException::class) + fun parse(xcp: XContentParser): WorkflowMetadata { + lateinit var workflowId: String + var monitorIds = mutableListOf() + lateinit var latestRunTime: Instant + lateinit var latestExecutionId: String + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + WORKFLOW_ID_FIELD -> workflowId = xcp.text() + MONITOR_IDS_FIELD -> { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + monitorIds.add(xcp.text()) + } + } + LATEST_RUN_TIME -> latestRunTime = xcp.instant()!! + LATEST_EXECUTION_ID -> latestExecutionId = xcp.text() + } + } + return WorkflowMetadata( + id = "$workflowId-metadata", + workflowId = workflowId, + monitorIds = monitorIds, + latestRunTime = latestRunTime, + latestExecutionId = latestExecutionId + ) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): WorkflowMetadata { + return WorkflowMetadata(sin) + } + + fun getId(workflowId: String? = null) = "$workflowId-metadata" + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/WorkflowRunContext.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/WorkflowRunContext.kt new file mode 100644 index 00000000..d478315e --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/WorkflowRunContext.kt @@ -0,0 +1,55 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.model + +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.ToXContentObject +import org.opensearch.core.xcontent.XContentBuilder + +data class WorkflowRunContext( + // In case of dry run it's random generated id, while in other cases it's workflowId + val workflowId: String, + val workflowMetadataId: String, + val chainedMonitorId: String?, + val matchingDocIdsPerIndex: Map>, + val auditDelegateMonitorAlerts: Boolean +) : Writeable, ToXContentObject { + companion object { + fun readFrom(sin: StreamInput): WorkflowRunContext { + return WorkflowRunContext(sin) + } + } + + constructor(sin: StreamInput) : this( + sin.readString(), + sin.readString(), + sin.readOptionalString(), + sin.readMap() as Map>, + sin.readBoolean() + ) + + override fun writeTo(out: StreamOutput) { + out.writeString(workflowId) + out.writeString(workflowMetadataId) + out.writeOptionalString(chainedMonitorId) + out.writeMap(matchingDocIdsPerIndex) + out.writeBoolean(auditDelegateMonitorAlerts) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params?): XContentBuilder { + builder.startObject() + .field("workflow_id", workflowId) + .field("workflow_metadata_id", workflowMetadataId) + .field("chained_monitor_id", chainedMonitorId) + .field("matching_doc_ids_per_index", matchingDocIdsPerIndex) + .field("audit_delegate_monitor_alerts", auditDelegateMonitorAlerts) + .endObject() + return builder + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/WorkflowRunResult.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/WorkflowRunResult.kt new file mode 100644 index 00000000..1b5fe3d8 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/WorkflowRunResult.kt @@ -0,0 +1,82 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.model + +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import java.io.IOException +import java.lang.Exception +import java.time.Instant + +data class WorkflowRunResult( + val workflowId: String, + val workflowName: String, + val monitorRunResults: List> = mutableListOf(), + val executionStartTime: Instant, + var executionEndTime: Instant? = null, + val executionId: String, + val error: Exception? = null, + val triggerResults: Map = mapOf() +) : Writeable, ToXContent { + + @Throws(IOException::class) + @Suppress("UNCHECKED_CAST") + constructor(sin: StreamInput) : this( + workflowId = sin.readString(), + workflowName = sin.readString(), + monitorRunResults = sin.readList> { s: StreamInput -> MonitorRunResult.readFrom(s) }, + executionStartTime = sin.readInstant(), + executionEndTime = sin.readOptionalInstant(), + executionId = sin.readString(), + error = sin.readException(), + triggerResults = suppressWarning(sin.readMap()) as Map + ) + + override fun writeTo(out: StreamOutput) { + out.writeString(workflowId) + out.writeString(workflowName) + out.writeList(monitorRunResults) + out.writeInstant(executionStartTime) + out.writeOptionalInstant(executionEndTime) + out.writeString(executionId) + out.writeException(error) + out.writeMap(triggerResults) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + builder.field("execution_id", executionId) + builder.field("workflow_name", workflowName) + builder.field("workflow_id", workflowId) + builder.field("trigger_results", triggerResults) + builder.startArray("monitor_run_results") + for (monitorResult in monitorRunResults) { + monitorResult.toXContent(builder, ToXContent.EMPTY_PARAMS) + } + builder.endArray() + .field("execution_start_time", executionStartTime) + .field("execution_end_time", executionEndTime) + .field("error", error?.message) + .endObject() + return builder + } + + companion object { + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): WorkflowRunResult { + return WorkflowRunResult(sin) + } + + @Suppress("UNCHECKED_CAST") + fun suppressWarning(map: MutableMap?): Map { + return map as Map + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/action/Action.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/action/Action.kt new file mode 100644 index 00000000..88d15210 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/action/Action.kt @@ -0,0 +1,186 @@ +package org.opensearch.commons.alerting.model.action + +import org.opensearch.common.UUIDs +import org.opensearch.commons.notifications.model.BaseModel +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import org.opensearch.script.Script +import java.io.IOException + +data class Action( + val name: String, + val destinationId: String, + val subjectTemplate: Script?, + val messageTemplate: Script, + val throttleEnabled: Boolean, + val throttle: Throttle?, + val id: String = UUIDs.base64UUID(), + val actionExecutionPolicy: ActionExecutionPolicy? = null +) : BaseModel { + + init { + if (subjectTemplate != null) { + require(subjectTemplate.lang == MUSTACHE) { "subject_template must be a mustache script" } + } + require(messageTemplate.lang == MUSTACHE) { "message_template must be a mustache script" } + + if (actionExecutionPolicy?.actionExecutionScope is PerExecutionActionScope) { + require(throttle == null) { "Throttle is currently not supported for per execution action scope" } + } + } + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readString(), // name + sin.readString(), // destinationId + sin.readOptionalWriteable(::Script), // subjectTemplate + Script(sin), // messageTemplate + sin.readBoolean(), // throttleEnabled + sin.readOptionalWriteable(::Throttle), // throttle + sin.readString(), // id + sin.readOptionalWriteable(::ActionExecutionPolicy) // actionExecutionPolicy + ) + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + val xContentBuilder = builder.startObject() + .field(ID_FIELD, id) + .field(NAME_FIELD, name) + .field(DESTINATION_ID_FIELD, destinationId) + .field(MESSAGE_TEMPLATE_FIELD, messageTemplate) + .field(THROTTLE_ENABLED_FIELD, throttleEnabled) + if (subjectTemplate != null) { + xContentBuilder.field(SUBJECT_TEMPLATE_FIELD, subjectTemplate) + } + if (throttle != null) { + xContentBuilder.field(THROTTLE_FIELD, throttle) + } + if (actionExecutionPolicy != null) { + xContentBuilder.field(ACTION_EXECUTION_POLICY_FIELD, actionExecutionPolicy) + } + return xContentBuilder.endObject() + } + + fun asTemplateArg(): Map { + return mapOf( + ID_FIELD to id, + NAME_FIELD to name, + DESTINATION_ID_FIELD to destinationId, + THROTTLE_ENABLED_FIELD to throttleEnabled + ) + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(name) + out.writeString(destinationId) + if (subjectTemplate != null) { + out.writeBoolean(true) + subjectTemplate.writeTo(out) + } else { + out.writeBoolean(false) + } + messageTemplate.writeTo(out) + out.writeBoolean(throttleEnabled) + if (throttle != null) { + out.writeBoolean(true) + throttle.writeTo(out) + } else { + out.writeBoolean(false) + } + out.writeString(id) + if (actionExecutionPolicy != null) { + out.writeBoolean(true) + actionExecutionPolicy.writeTo(out) + } else { + out.writeBoolean(false) + } + } + + companion object { + const val ID_FIELD = "id" + const val NAME_FIELD = "name" + const val DESTINATION_ID_FIELD = "destination_id" + const val SUBJECT_TEMPLATE_FIELD = "subject_template" + const val MESSAGE_TEMPLATE_FIELD = "message_template" + const val THROTTLE_ENABLED_FIELD = "throttle_enabled" + const val THROTTLE_FIELD = "throttle" + const val ACTION_EXECUTION_POLICY_FIELD = "action_execution_policy" + const val MUSTACHE = "mustache" + const val SUBJECT = "subject" + const val MESSAGE = "message" + const val MESSAGE_ID = "messageId" + + @JvmStatic + @Throws(IOException::class) + fun parse(xcp: XContentParser): Action { + var id = UUIDs.base64UUID() // assign a default action id if one is not specified + lateinit var name: String + lateinit var destinationId: String + var subjectTemplate: Script? = null // subject template could be null for some destinations + lateinit var messageTemplate: Script + var throttleEnabled = false + var throttle: Throttle? = null + var actionExecutionPolicy: ActionExecutionPolicy? = null + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + when (fieldName) { + ID_FIELD -> id = xcp.text() + NAME_FIELD -> name = xcp.textOrNull() + DESTINATION_ID_FIELD -> destinationId = xcp.textOrNull() + SUBJECT_TEMPLATE_FIELD -> { + subjectTemplate = if (xcp.currentToken() == XContentParser.Token.VALUE_NULL) { + null + } else { + Script.parse(xcp, Script.DEFAULT_TEMPLATE_LANG) + } + } + MESSAGE_TEMPLATE_FIELD -> messageTemplate = Script.parse(xcp, Script.DEFAULT_TEMPLATE_LANG) + THROTTLE_FIELD -> { + throttle = if (xcp.currentToken() == XContentParser.Token.VALUE_NULL) null else Throttle.parse(xcp) + } + THROTTLE_ENABLED_FIELD -> { + throttleEnabled = xcp.booleanValue() + } + ACTION_EXECUTION_POLICY_FIELD -> { + actionExecutionPolicy = if (xcp.currentToken() == XContentParser.Token.VALUE_NULL) { + null + } else { + ActionExecutionPolicy.parse(xcp) + } + } + else -> { + throw IllegalStateException("Unexpected field: $fieldName, while parsing action") + } + } + } + + if (throttleEnabled) { + requireNotNull(throttle, { "Action throttle enabled but not set throttle value" }) + } + + return Action( + requireNotNull(name) { "Action name is null" }, + requireNotNull(destinationId) { "Destination id is null" }, + subjectTemplate, + requireNotNull(messageTemplate) { "Action message template is null" }, + throttleEnabled, + throttle, + id = requireNotNull(id), + actionExecutionPolicy = actionExecutionPolicy + ) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): Action { + return Action(sin) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/action/ActionExecutionPolicy.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/action/ActionExecutionPolicy.kt new file mode 100644 index 00000000..024d9cb2 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/action/ActionExecutionPolicy.kt @@ -0,0 +1,92 @@ +package org.opensearch.commons.alerting.model.action + +import org.opensearch.commons.notifications.model.BaseModel +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException + +data class ActionExecutionPolicy( + val actionExecutionScope: ActionExecutionScope +) : BaseModel { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this ( + ActionExecutionScope.readFrom(sin) // actionExecutionScope + ) + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + .field(ACTION_EXECUTION_SCOPE, actionExecutionScope) + return builder.endObject() + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + if (actionExecutionScope is PerAlertActionScope) { + out.writeEnum(ActionExecutionScope.Type.PER_ALERT) + } else { + out.writeEnum(ActionExecutionScope.Type.PER_EXECUTION) + } + actionExecutionScope.writeTo(out) + } + + companion object { + const val ACTION_EXECUTION_SCOPE = "action_execution_scope" + + @JvmStatic + @Throws(IOException::class) + fun parse(xcp: XContentParser): ActionExecutionPolicy { + lateinit var actionExecutionScope: ActionExecutionScope + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + ACTION_EXECUTION_SCOPE -> actionExecutionScope = ActionExecutionScope.parse(xcp) + } + } + + return ActionExecutionPolicy( + requireNotNull(actionExecutionScope) { "Action execution scope is null" } + ) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): ActionExecutionPolicy { + return ActionExecutionPolicy(sin) + } + + /** + * The default [ActionExecutionPolicy] configuration for Bucket-Level Monitors. + * + * If Query-Level Monitors integrate the use of [ActionExecutionPolicy] then a separate default configuration + * will need to be made depending on the desired behavior. + */ + fun getDefaultConfigurationForBucketLevelMonitor(): ActionExecutionPolicy { + val defaultActionExecutionScope = PerAlertActionScope( + actionableAlerts = setOf(AlertCategory.DEDUPED, AlertCategory.NEW) + ) + return ActionExecutionPolicy(actionExecutionScope = defaultActionExecutionScope) + } + + /** + * The default [ActionExecutionPolicy] configuration for Document-Level Monitors. + * + * If Query-Level Monitors integrate the use of [ActionExecutionPolicy] then a separate default configuration + * will need to be made depending on the desired behavior. + */ + fun getDefaultConfigurationForDocumentLevelMonitor(): ActionExecutionPolicy { + val defaultActionExecutionScope = PerAlertActionScope( + actionableAlerts = setOf(AlertCategory.DEDUPED, AlertCategory.NEW) + ) + return ActionExecutionPolicy(actionExecutionScope = defaultActionExecutionScope) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/action/ActionExecutionScope.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/action/ActionExecutionScope.kt new file mode 100644 index 00000000..4326f4b7 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/action/ActionExecutionScope.kt @@ -0,0 +1,174 @@ +package org.opensearch.commons.alerting.model.action + +import org.opensearch.commons.notifications.model.BaseModel +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException +import java.lang.IllegalArgumentException + +sealed class ActionExecutionScope : BaseModel { + + enum class Type { PER_ALERT, PER_EXECUTION } + + companion object { + const val PER_ALERT_FIELD = "per_alert" + const val PER_EXECUTION_FIELD = "per_execution" + const val ACTIONABLE_ALERTS_FIELD = "actionable_alerts" + + @JvmStatic + @Throws(IOException::class) + fun parse(xcp: XContentParser): ActionExecutionScope { + var type: Type? = null + var actionExecutionScope: ActionExecutionScope? = null + val alertFilter = mutableSetOf() + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + // If the type field has already been set, the user has provided more than one type of schedule + if (type != null) { + throw IllegalArgumentException("You can only specify one type of action execution scope.") + } + + when (fieldName) { + PER_ALERT_FIELD -> { + type = Type.PER_ALERT + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val perAlertFieldName = xcp.currentName() + xcp.nextToken() + when (perAlertFieldName) { + ACTIONABLE_ALERTS_FIELD -> { + XContentParserUtils.ensureExpectedToken( + XContentParser.Token.START_ARRAY, + xcp.currentToken(), + xcp + ) + val allowedCategories = AlertCategory.values().map { it.toString() } + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + val alertCategory = xcp.text() + if (!allowedCategories.contains(alertCategory)) { + throw IllegalStateException("Actionable alerts should be one of $allowedCategories") + } + alertFilter.add(AlertCategory.valueOf(alertCategory)) + } + } + else -> throw IllegalArgumentException( + "Invalid field [$perAlertFieldName] found in per alert action execution scope." + ) + } + } + } + PER_EXECUTION_FIELD -> { + type = Type.PER_EXECUTION + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + // empty while block + } + } + else -> throw IllegalArgumentException("Invalid field [$fieldName] found in action execution scope.") + } + } + + if (type == Type.PER_ALERT) { + actionExecutionScope = PerAlertActionScope(alertFilter) + } else if (type == Type.PER_EXECUTION) { + actionExecutionScope = PerExecutionActionScope() + } + + return requireNotNull(actionExecutionScope) { "Action execution scope is null." } + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): ActionExecutionScope { + val type = sin.readEnum(ActionExecutionScope.Type::class.java) + return if (type == Type.PER_ALERT) { + PerAlertActionScope(sin) + } else { + PerExecutionActionScope(sin) + } + } + } + + abstract fun getExecutionScope(): Type +} + +data class PerAlertActionScope( + val actionableAlerts: Set +) : ActionExecutionScope() { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readSet { si -> si.readEnum(AlertCategory::class.java) } // alertFilter + ) + + override fun getExecutionScope(): Type = Type.PER_ALERT + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + .startObject(PER_ALERT_FIELD) + .field(ACTIONABLE_ALERTS_FIELD, actionableAlerts.toTypedArray()) + .endObject() + return builder.endObject() + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeCollection(actionableAlerts) { o, v -> o.writeEnum(v) } + } + + companion object { + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): PerAlertActionScope { + return PerAlertActionScope(sin) + } + } +} + +class PerExecutionActionScope() : ActionExecutionScope() { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this() + + override fun hashCode(): Int { + return javaClass.hashCode() + } + + // Creating an equals method that just checks class type rather than reference since this is currently stateless. + // Otherwise, it would have been a dataclass which would have handled this. + override fun equals(other: Any?): Boolean { + if (this === other) return true + if (other?.javaClass != javaClass) return false + return true + } + + override fun getExecutionScope(): Type = Type.PER_EXECUTION + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + builder.startObject() + .startObject(PER_EXECUTION_FIELD) + .endObject() + return builder.endObject() + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + // body empty + } + + companion object { + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): PerExecutionActionScope { + return PerExecutionActionScope(sin) + } + } +} + +enum class AlertCategory { DEDUPED, NEW, COMPLETED } diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/action/Throttle.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/action/Throttle.kt new file mode 100644 index 00000000..79bdb6e3 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/action/Throttle.kt @@ -0,0 +1,87 @@ +package org.opensearch.commons.alerting.model.action + +import org.apache.commons.codec.binary.StringUtils +import org.opensearch.commons.notifications.model.BaseModel +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException +import java.time.temporal.ChronoUnit +import java.util.Locale + +data class Throttle( + val value: Int, + val unit: ChronoUnit +) : BaseModel { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this ( + sin.readInt(), // value + sin.readEnum(ChronoUnit::class.java) // unit + ) + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return builder.startObject() + .field(VALUE_FIELD, value) + .field(UNIT_FIELD, unit.name) + .endObject() + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeInt(value) + out.writeEnum(unit) + } + + companion object { + const val VALUE_FIELD = "value" + const val UNIT_FIELD = "unit" + + @JvmStatic + @Throws(IOException::class) + fun parse(xcp: XContentParser): Throttle { + var value: Int = 0 + var unit: ChronoUnit = ChronoUnit.MINUTES // only support MINUTES throttle unit currently + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + when (fieldName) { + UNIT_FIELD -> { + val unitString = xcp.text().uppercase(Locale.ROOT) + require(StringUtils.equals(unitString, ChronoUnit.MINUTES.name), { "Only support MINUTES throttle unit currently" }) + unit = ChronoUnit.valueOf(unitString) + } + VALUE_FIELD -> { + val currentToken = xcp.currentToken() + require(currentToken != XContentParser.Token.VALUE_NULL, { "Throttle value can't be null" }) + when { + currentToken.isValue -> { + value = xcp.intValue() + require(value > 0, { "Can only set positive throttle period" }) + } + else -> { + XContentParserUtils.throwUnknownToken(currentToken, xcp.tokenLocation) + } + } + } + + else -> { + throw IllegalStateException("Unexpected field: $fieldName, while parsing action") + } + } + } + return Throttle(value = value, unit = requireNotNull(unit)) + } + + @JvmStatic + @Throws(IOException::class) + fun readFrom(sin: StreamInput): Throttle { + return Throttle(sin) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/remote/monitors/RemoteDocLevelMonitorInput.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/remote/monitors/RemoteDocLevelMonitorInput.kt new file mode 100644 index 00000000..1e6184f3 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/remote/monitors/RemoteDocLevelMonitorInput.kt @@ -0,0 +1,81 @@ +package org.opensearch.commons.alerting.model.remote.monitors + +import org.opensearch.commons.alerting.model.DocLevelMonitorInput +import org.opensearch.commons.alerting.model.DocLevelMonitorInput.Companion.DOC_LEVEL_INPUT_FIELD +import org.opensearch.commons.alerting.model.Input +import org.opensearch.core.common.bytes.BytesReference +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException +import java.nio.ByteBuffer + +data class RemoteDocLevelMonitorInput(val input: BytesReference, val docLevelMonitorInput: DocLevelMonitorInput) : Input { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readBytesReference(), + DocLevelMonitorInput.readFrom(sin) + ) + + override fun asTemplateArg(): Map { + val bytes = input.toBytesRef().bytes + return mapOf( + RemoteDocLevelMonitorInput.INPUT_SIZE to bytes.size, + RemoteDocLevelMonitorInput.INPUT_FIELD to bytes, + DOC_LEVEL_INPUT_FIELD to docLevelMonitorInput + ) + } + + override fun name(): String { + return REMOTE_DOC_LEVEL_MONITOR_INPUT_FIELD + } + + override fun writeTo(out: StreamOutput) { + out.writeBytesReference(input) + docLevelMonitorInput.writeTo(out) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + val bytes = input.toBytesRef().bytes + return builder.startObject() + .startObject(REMOTE_DOC_LEVEL_MONITOR_INPUT_FIELD) + .field(RemoteMonitorInput.INPUT_SIZE, bytes.size) + .field(RemoteMonitorInput.INPUT_FIELD, bytes) + .field(DOC_LEVEL_INPUT_FIELD, docLevelMonitorInput) + .endObject() + .endObject() + } + + companion object { + const val INPUT_FIELD = "input" + const val INPUT_SIZE = "size" + const val REMOTE_DOC_LEVEL_MONITOR_INPUT_FIELD = "remote_doc_level_monitor_input" + + fun parse(xcp: XContentParser): RemoteDocLevelMonitorInput { + var bytes: ByteArray? = null + var size: Int = 0 + var docLevelMonitorInput: DocLevelMonitorInput? = null + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + RemoteMonitorInput.INPUT_FIELD -> bytes = xcp.binaryValue() + RemoteMonitorInput.INPUT_SIZE -> size = xcp.intValue() + Input.Type.DOCUMENT_LEVEL_INPUT.value -> { + docLevelMonitorInput = DocLevelMonitorInput.parse(xcp) + XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, xcp.nextToken(), xcp) + } + } + } + val input = BytesReference.fromByteBuffer(ByteBuffer.wrap(bytes, 0, size)) + return RemoteDocLevelMonitorInput(input, docLevelMonitorInput!!) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/remote/monitors/RemoteMonitorInput.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/remote/monitors/RemoteMonitorInput.kt new file mode 100644 index 00000000..29a939ff --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/remote/monitors/RemoteMonitorInput.kt @@ -0,0 +1,70 @@ +package org.opensearch.commons.alerting.model.remote.monitors + +import org.opensearch.commons.alerting.model.Input +import org.opensearch.core.common.bytes.BytesReference +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException +import java.nio.ByteBuffer + +data class RemoteMonitorInput(val input: BytesReference) : Input { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readBytesReference() + ) + + override fun asTemplateArg(): Map { + val bytes = input.toBytesRef().bytes + return mapOf( + INPUT_SIZE to bytes.size, + INPUT_FIELD to bytes + ) + } + + override fun name(): String { + return REMOTE_MONITOR_INPUT_FIELD + } + + override fun writeTo(out: StreamOutput) { + out.writeBytesReference(input) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + val bytes = input.toBytesRef().bytes + return builder.startObject() + .startObject(REMOTE_MONITOR_INPUT_FIELD) + .field(INPUT_SIZE, bytes.size) + .field(INPUT_FIELD, bytes) + .endObject() + .endObject() + } + + companion object { + const val INPUT_FIELD = "input" + const val INPUT_SIZE = "size" + const val REMOTE_MONITOR_INPUT_FIELD = "remote_monitor_input" + + fun parse(xcp: XContentParser): RemoteMonitorInput { + var bytes: ByteArray? = null + var size: Int = 0 + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + INPUT_FIELD -> bytes = xcp.binaryValue() + INPUT_SIZE -> size = xcp.intValue() + } + } + val input = BytesReference.fromByteBuffer(ByteBuffer.wrap(bytes, 0, size)) + return RemoteMonitorInput(input) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/model/remote/monitors/RemoteMonitorTrigger.kt b/src/main/kotlin/org/opensearch/commons/alerting/model/remote/monitors/RemoteMonitorTrigger.kt new file mode 100644 index 00000000..0e89e5ba --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/model/remote/monitors/RemoteMonitorTrigger.kt @@ -0,0 +1,126 @@ +package org.opensearch.commons.alerting.model.remote.monitors + +import org.opensearch.common.CheckedFunction +import org.opensearch.common.UUIDs +import org.opensearch.commons.alerting.model.Trigger +import org.opensearch.commons.alerting.model.action.Action +import org.opensearch.core.ParseField +import org.opensearch.core.common.bytes.BytesReference +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.NamedXContentRegistry +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException +import java.nio.ByteBuffer + +data class RemoteMonitorTrigger( + override val id: String, + override val name: String, + override val severity: String, + override val actions: List, + val trigger: BytesReference +) : Trigger { + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readString(), + sin.readString(), + sin.readString(), + sin.readList(::Action), + sin.readBytesReference() + ) + + fun asTemplateArg(): Map { + val bytes = trigger.toBytesRef().bytes + return mapOf( + Trigger.ID_FIELD to id, + Trigger.NAME_FIELD to name, + Trigger.SEVERITY_FIELD to severity, + Trigger.ACTIONS_FIELD to actions.map { it.asTemplateArg() }, + TRIGGER_SIZE to bytes.size, + TRIGGER_FIELD to bytes + ) + } + + override fun name(): String { + return REMOTE_MONITOR_TRIGGER_FIELD + } + + override fun writeTo(out: StreamOutput) { + out.writeString(id) + out.writeString(name) + out.writeString(severity) + out.writeCollection(actions) + out.writeBytesReference(trigger) + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + val bytes = trigger.toBytesRef().bytes + return builder.startObject() + .startObject(REMOTE_MONITOR_TRIGGER_FIELD) + .field(Trigger.ID_FIELD, id) + .field(Trigger.NAME_FIELD, name) + .field(Trigger.SEVERITY_FIELD, severity) + .field(Trigger.ACTIONS_FIELD, actions.toTypedArray()) + .field(TRIGGER_SIZE, bytes.size) + .field(TRIGGER_FIELD, bytes) + .endObject() + .endObject() + } + + companion object { + const val TRIGGER_FIELD = "trigger" + const val TRIGGER_SIZE = "size" + const val REMOTE_MONITOR_TRIGGER_FIELD = "remote_monitor_trigger" + + val XCONTENT_REGISTRY = NamedXContentRegistry.Entry( + Trigger::class.java, + ParseField(REMOTE_MONITOR_TRIGGER_FIELD), + CheckedFunction { parseInner(it) } + ) + + fun parseInner(xcp: XContentParser): RemoteMonitorTrigger { + var id = UUIDs.base64UUID() // assign a default triggerId if one is not specified + lateinit var name: String + lateinit var severity: String + val actions: MutableList = mutableListOf() + var bytes: ByteArray? = null + var size: Int = 0 + + if (xcp.currentToken() != XContentParser.Token.START_OBJECT && xcp.currentToken() != XContentParser.Token.FIELD_NAME) { + XContentParserUtils.throwUnknownToken(xcp.currentToken(), xcp.tokenLocation) + } + + // If the parser began on START_OBJECT, move to the next token so that the while loop enters on + // the fieldName (or END_OBJECT if it's empty). + if (xcp.currentToken() == XContentParser.Token.START_OBJECT) xcp.nextToken() + while (xcp.currentToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + + when (fieldName) { + Trigger.ID_FIELD -> id = xcp.text() + Trigger.NAME_FIELD -> name = xcp.text() + Trigger.SEVERITY_FIELD -> severity = xcp.text() + Trigger.ACTIONS_FIELD -> { + XContentParserUtils.ensureExpectedToken( + XContentParser.Token.START_ARRAY, + xcp.currentToken(), + xcp + ) + while (xcp.nextToken() != XContentParser.Token.END_ARRAY) { + actions.add(Action.parse(xcp)) + } + } + TRIGGER_FIELD -> bytes = xcp.binaryValue() + TRIGGER_SIZE -> size = xcp.intValue() + } + xcp.nextToken() + } + val trigger = BytesReference.fromByteBuffer(ByteBuffer.wrap(bytes, 0, size)) + return RemoteMonitorTrigger(id, name, severity, actions, trigger) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/settings/SupportedClusterMetricsSettings.kt b/src/main/kotlin/org/opensearch/commons/alerting/settings/SupportedClusterMetricsSettings.kt new file mode 100644 index 00000000..e414e3ee --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/settings/SupportedClusterMetricsSettings.kt @@ -0,0 +1,7 @@ +package org.opensearch.commons.alerting.settings + +import org.opensearch.commons.alerting.model.ClusterMetricsInput + +interface SupportedClusterMetricsSettings { + fun validateApiType(clusterMetricsInput: ClusterMetricsInput) +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/util/AlertingException.kt b/src/main/kotlin/org/opensearch/commons/alerting/util/AlertingException.kt new file mode 100644 index 00000000..312758f0 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/util/AlertingException.kt @@ -0,0 +1,89 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.util + +import org.apache.logging.log4j.LogManager +import org.opensearch.OpenSearchException +import org.opensearch.OpenSearchSecurityException +import org.opensearch.OpenSearchStatusException +import org.opensearch.core.common.Strings +import org.opensearch.core.rest.RestStatus +import org.opensearch.index.IndexNotFoundException +import org.opensearch.index.engine.VersionConflictEngineException +import org.opensearch.indices.InvalidIndexNameException + +private val log = LogManager.getLogger(AlertingException::class.java) + +/** + * Converts into a user friendly message. + */ +class AlertingException(message: String, val status: RestStatus, val ex: Exception) : OpenSearchException(message, ex) { + + override fun status(): RestStatus { + return status + } + + companion object { + @JvmStatic + fun wrap(ex: Exception): OpenSearchException { + log.error("Alerting error: $ex") + + var friendlyMsg = "Unknown error" + var status = RestStatus.INTERNAL_SERVER_ERROR + when (ex) { + is IndexNotFoundException -> { + status = ex.status() + friendlyMsg = "Configured indices are not found: ${ex.index}" + } + is OpenSearchSecurityException -> { + status = ex.status() + friendlyMsg = "User doesn't have permissions to execute this action. Contact administrator." + } + is OpenSearchStatusException -> { + status = ex.status() + friendlyMsg = ex.message as String + } + is IllegalArgumentException -> { + status = RestStatus.BAD_REQUEST + friendlyMsg = ex.message as String + } + is VersionConflictEngineException -> { + status = ex.status() + friendlyMsg = ex.message as String + } + is InvalidIndexNameException -> { + status = RestStatus.BAD_REQUEST + friendlyMsg = ex.message as String + } + else -> { + if (!Strings.isNullOrEmpty(ex.message)) { + friendlyMsg = ex.message as String + } + } + } + // Wrapping the origin exception as runtime to avoid it being formatted. + // Currently, alerting-kibana is using `error.root_cause.reason` as text in the toast message. + // Below logic is to set friendly message to error.root_cause.reason. + return AlertingException(friendlyMsg, status, Exception("${ex.javaClass.name}: ${ex.message}")) + } + + @JvmStatic + fun merge(vararg ex: AlertingException): AlertingException { + var friendlyMsg = "" + var unwrappedExceptionMsg = "" + ex.forEach { + if (friendlyMsg != "") { + friendlyMsg += ", ${it.message}" + unwrappedExceptionMsg += ", ${it.ex.message}" + } else { + friendlyMsg = it.message.orEmpty() + unwrappedExceptionMsg = "${it.ex.message}" + } + } + return AlertingException(friendlyMsg, ex.first().status, Exception(unwrappedExceptionMsg)) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/alerting/util/IndexUtils.kt b/src/main/kotlin/org/opensearch/commons/alerting/util/IndexUtils.kt new file mode 100644 index 00000000..887e8430 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/alerting/util/IndexUtils.kt @@ -0,0 +1,104 @@ +package org.opensearch.commons.alerting.util + +import org.opensearch.commons.alerting.model.AggregationResultBucket +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.settings.SupportedClusterMetricsSettings +import org.opensearch.commons.authuser.User +import org.opensearch.core.common.bytes.BytesReference +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.time.Instant +import java.util.Locale + +class IndexUtils { + companion object { + /** + * This regex asserts that the string: + * The index does not start with an underscore _, hyphen -, or plus sign + + * The index does not contain two consecutive periods (e.g., `..`) + * The index does not contain any whitespace characters, commas, backslashes, forward slashes, asterisks, + * question marks, double quotes, less than or greater than signs, pipes, colons, or periods. + * The length of the index must be between 1 and 255 characters + */ + val VALID_INDEX_NAME_REGEX = Regex("""^(?![_\-\+])(?!.*\.\.)[^\s,\\\/\*\?"<>|#:\.]{1,255}$""") + + /** + * This regex asserts that the string: + * The index pattern can start with an optional period + * The index pattern can contain lowercase letters, digits, underscores, hyphens, asterisks, and periods + * The length of the index pattern must be between 1 and 255 characters + */ + val INDEX_PATTERN_REGEX = Regex("""^(?=.{1,255}$)\.?[a-z0-9_\-\*\.]+$""") + + const val NO_SCHEMA_VERSION = 0 + + const val MONITOR_MAX_INPUTS = 1 + const val WORKFLOW_MAX_INPUTS = 1 + + const val MONITOR_MAX_TRIGGERS = 10 + + const val _ID = "_id" + const val _VERSION = "_version" + + const val _SEQ_NO = "_seq_no" + const val _PRIMARY_TERM = "_primary_term" + + var supportedClusterMetricsSettings: SupportedClusterMetricsSettings? = null + } +} + +fun Monitor.isBucketLevelMonitor(): Boolean = + isMonitorOfStandardType() && + Monitor.MonitorType.valueOf(this.monitorType.uppercase(Locale.ROOT)) == Monitor.MonitorType.BUCKET_LEVEL_MONITOR + +fun XContentBuilder.optionalUserField(name: String, user: User?): XContentBuilder { + if (user == null) { + return nullField(name) + } + return this.field(name, user) +} + +fun XContentBuilder.optionalUsernameField(name: String, user: User?): XContentBuilder { + if (user == null) { + return nullField(name) + } + return this.field(name, user.name) +} + +fun XContentBuilder.optionalTimeField(name: String, instant: Instant?): XContentBuilder { + if (instant == null) { + return nullField(name) + } + // second name as readableName should be different than first name + return this.timeField(name, "${name}_in_millis", instant.toEpochMilli()) +} + +fun XContentParser.instant(): Instant? { + return when { + currentToken() == XContentParser.Token.VALUE_NULL -> null + currentToken().isValue -> Instant.ofEpochMilli(longValue()) + else -> { + XContentParserUtils.throwUnknownToken(currentToken(), tokenLocation) + null // unreachable + } + } +} + +/** + * Extension function for ES 6.3 and above that duplicates the ES 6.2 XContentBuilder.string() method. + */ +fun XContentBuilder.string(): String = BytesReference.bytes(this).utf8ToString() + +fun Monitor.isMonitorOfStandardType(): Boolean { + val standardMonitorTypes = Monitor.MonitorType.values().map { it.value.uppercase(Locale.ROOT) }.toSet() + return standardMonitorTypes.contains(this.monitorType.uppercase(Locale.ROOT)) +} + +fun getBucketKeysHash(bucketKeys: List): String = bucketKeys.joinToString(separator = "#") + +/** + * Since buckets can have multi-value keys, this converts the bucket key values to a string that can be used + * as the key for a HashMap to easily retrieve [AggregationResultBucket] based on the bucket key values. + */ +fun AggregationResultBucket.getBucketKeysHash(): String = getBucketKeysHash(this.bucketKeys) diff --git a/src/main/kotlin/org/opensearch/commons/notifications/NotificationConstants.kt b/src/main/kotlin/org/opensearch/commons/notifications/NotificationConstants.kt index 344cf0b8..74fcc600 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/NotificationConstants.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/NotificationConstants.kt @@ -36,22 +36,23 @@ object NotificationConstants { const val NAME_TAG = "name" const val DESCRIPTION_TAG = "description" const val IS_ENABLED_TAG = "is_enabled" - const val FEATURE_LIST_TAG = "feature_list" const val TITLE_TAG = "title" const val SEVERITY_TAG = "severity" const val TAGS_TAG = "tags" const val URL_TAG = "url" const val HEADER_PARAMS_TAG = "header_params" + const val TOPIC_ARN_TAG = "topic_arn" + const val ROLE_ARN_TAG = "role_arn" + const val REGION_TAG = "region" const val HOST_TAG = "host" const val PORT_TAG = "port" const val METHOD_TAG = "method" const val FROM_ADDRESS_TAG = "from_address" const val UPDATED_TIME_TAG = "last_updated_time_ms" const val CREATED_TIME_TAG = "created_time_ms" - const val TENANT_TAG = "tenant" const val CONFIG_LIST_TAG = "config_list" const val EVENT_LIST_TAG = "event_list" - const val FEATURE_CONFIG_LIST_TAG = "feature_channel_list" + const val CHANNEL_LIST_TAG = "channel_list" const val DELETE_RESPONSE_LIST_TAG = "delete_response_list" const val FROM_INDEX_TAG = "from_index" const val MAX_ITEMS_TAG = "max_items" @@ -64,7 +65,7 @@ object NotificationConstants { const val TOTAL_HIT_RELATION_TAG = "total_hit_relation" const val QUERY_TAG = "query" const val COMPACT_TAG = "compact" - const val CONFIG_TYPE_LIST_TAG = "config_type_list" + const val ALLOWED_CONFIG_TYPE_LIST_TAG = "allowed_config_type_list" const val PLUGIN_FEATURES_TAG = "plugin_features" const val DEFAULT_MAX_ITEMS = 1000 diff --git a/src/main/kotlin/org/opensearch/commons/notifications/NotificationsPluginInterface.kt b/src/main/kotlin/org/opensearch/commons/notifications/NotificationsPluginInterface.kt index e82aaba8..38359f32 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/NotificationsPluginInterface.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/NotificationsPluginInterface.kt @@ -1,52 +1,30 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications -import org.opensearch.action.ActionListener import org.opensearch.client.node.NodeClient import org.opensearch.commons.ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT +import org.opensearch.commons.notifications.action.BaseResponse import org.opensearch.commons.notifications.action.CreateNotificationConfigRequest import org.opensearch.commons.notifications.action.CreateNotificationConfigResponse import org.opensearch.commons.notifications.action.DeleteNotificationConfigRequest import org.opensearch.commons.notifications.action.DeleteNotificationConfigResponse -import org.opensearch.commons.notifications.action.GetFeatureChannelListRequest -import org.opensearch.commons.notifications.action.GetFeatureChannelListResponse +import org.opensearch.commons.notifications.action.GetChannelListRequest +import org.opensearch.commons.notifications.action.GetChannelListResponse import org.opensearch.commons.notifications.action.GetNotificationConfigRequest import org.opensearch.commons.notifications.action.GetNotificationConfigResponse -import org.opensearch.commons.notifications.action.GetNotificationEventRequest -import org.opensearch.commons.notifications.action.GetNotificationEventResponse import org.opensearch.commons.notifications.action.GetPluginFeaturesRequest import org.opensearch.commons.notifications.action.GetPluginFeaturesResponse +import org.opensearch.commons.notifications.action.LegacyPublishNotificationRequest +import org.opensearch.commons.notifications.action.LegacyPublishNotificationResponse import org.opensearch.commons.notifications.action.NotificationsActions.CREATE_NOTIFICATION_CONFIG_ACTION_TYPE import org.opensearch.commons.notifications.action.NotificationsActions.DELETE_NOTIFICATION_CONFIG_ACTION_TYPE -import org.opensearch.commons.notifications.action.NotificationsActions.GET_FEATURE_CHANNEL_LIST_ACTION_TYPE +import org.opensearch.commons.notifications.action.NotificationsActions.GET_CHANNEL_LIST_ACTION_TYPE import org.opensearch.commons.notifications.action.NotificationsActions.GET_NOTIFICATION_CONFIG_ACTION_TYPE -import org.opensearch.commons.notifications.action.NotificationsActions.GET_NOTIFICATION_EVENT_ACTION_TYPE import org.opensearch.commons.notifications.action.NotificationsActions.GET_PLUGIN_FEATURES_ACTION_TYPE +import org.opensearch.commons.notifications.action.NotificationsActions.LEGACY_PUBLISH_NOTIFICATION_ACTION_TYPE import org.opensearch.commons.notifications.action.NotificationsActions.SEND_NOTIFICATION_ACTION_TYPE import org.opensearch.commons.notifications.action.NotificationsActions.UPDATE_NOTIFICATION_CONFIG_ACTION_TYPE import org.opensearch.commons.notifications.action.SendNotificationRequest @@ -56,6 +34,10 @@ import org.opensearch.commons.notifications.action.UpdateNotificationConfigRespo import org.opensearch.commons.notifications.model.ChannelMessage import org.opensearch.commons.notifications.model.EventSource import org.opensearch.commons.utils.SecureClientWrapper +import org.opensearch.commons.utils.recreateObject +import org.opensearch.core.action.ActionListener +import org.opensearch.core.action.ActionResponse +import org.opensearch.core.common.io.stream.Writeable /** * All the transport action plugin interfaces for the Notification plugin @@ -76,7 +58,7 @@ object NotificationsPluginInterface { client.execute( CREATE_NOTIFICATION_CONFIG_ACTION_TYPE, request, - listener + wrapActionListener(listener) { response -> recreateObject(response) { CreateNotificationConfigResponse(it) } } ) } @@ -94,7 +76,7 @@ object NotificationsPluginInterface { client.execute( UPDATE_NOTIFICATION_CONFIG_ACTION_TYPE, request, - listener + wrapActionListener(listener) { response -> recreateObject(response) { UpdateNotificationConfigResponse(it) } } ) } @@ -112,7 +94,7 @@ object NotificationsPluginInterface { client.execute( DELETE_NOTIFICATION_CONFIG_ACTION_TYPE, request, - listener + wrapActionListener(listener) { response -> recreateObject(response) { DeleteNotificationConfigResponse(it) } } ) } @@ -130,25 +112,7 @@ object NotificationsPluginInterface { client.execute( GET_NOTIFICATION_CONFIG_ACTION_TYPE, request, - listener - ) - } - - /** - * Get notification events. - * @param client Node client for making transport action - * @param request The request object - * @param listener The listener for getting response - */ - fun getNotificationEvent( - client: NodeClient, - request: GetNotificationEventRequest, - listener: ActionListener - ) { - client.execute( - GET_NOTIFICATION_EVENT_ACTION_TYPE, - request, - listener + wrapActionListener(listener) { response -> recreateObject(response) { GetNotificationConfigResponse(it) } } ) } @@ -166,25 +130,25 @@ object NotificationsPluginInterface { client.execute( GET_PLUGIN_FEATURES_ACTION_TYPE, request, - listener + wrapActionListener(listener) { response -> recreateObject(response) { GetPluginFeaturesResponse(it) } } ) } /** - * Get notification channel configuration enabled for a feature. + * Get notification channel configuration. * @param client Node client for making transport action * @param request The request object * @param listener The listener for getting response */ - fun getFeatureChannelList( + fun getChannelList( client: NodeClient, - request: GetFeatureChannelListRequest, - listener: ActionListener + request: GetChannelListRequest, + listener: ActionListener ) { client.execute( - GET_FEATURE_CHANNEL_LIST_ACTION_TYPE, + GET_CHANNEL_LIST_ACTION_TYPE, request, - listener + wrapActionListener(listener) { response -> recreateObject(response) { GetChannelListResponse(it) } } ) } @@ -209,7 +173,49 @@ object NotificationsPluginInterface { wrapper.execute( SEND_NOTIFICATION_ACTION_TYPE, SendNotificationRequest(eventSource, channelMessage, channelIds, threadContext), - listener + wrapActionListener(listener) { response -> recreateObject(response) { SendNotificationResponse(it) } } ) } + + /** + * Publishes a notification API using the legacy notification implementation. No REST API. + * Internal API only for the Alerting and Index Management plugin, other consumers should use [sendNotification]. + * @param client Node client for making transport action + * @param request The legacy publish notification request + * @param listener The listener for getting response + */ + fun publishLegacyNotification( + client: NodeClient, + request: LegacyPublishNotificationRequest, + listener: ActionListener + ) { + client.execute( + LEGACY_PUBLISH_NOTIFICATION_ACTION_TYPE, + request, + wrapActionListener(listener) { response -> recreateObject(response) { LegacyPublishNotificationResponse(it) } } + ) + } + + /** + * Wrap action listener on concrete response class by a new created one on ActionResponse. + * This is required because the response may be loaded by different classloader across plugins. + * The onResponse(ActionResponse) avoids type cast exception and give a chance to recreate + * the response object. + */ + @Suppress("UNCHECKED_CAST") + private fun wrapActionListener( + listener: ActionListener, + recreate: (Writeable) -> Response + ): ActionListener { + return object : ActionListener { + override fun onResponse(response: ActionResponse) { + val recreated = response as? Response ?: recreate(response) + listener.onResponse(recreated) + } + + override fun onFailure(exception: java.lang.Exception) { + listener.onFailure(exception) + } + } as ActionListener + } } diff --git a/src/main/kotlin/org/opensearch/commons/notifications/action/BaseResponse.kt b/src/main/kotlin/org/opensearch/commons/notifications/action/BaseResponse.kt index 4bfa2271..ecbbf4e9 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/action/BaseResponse.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/action/BaseResponse.kt @@ -1,36 +1,14 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.action -import org.opensearch.action.ActionResponse -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.xcontent.ToXContentObject -import org.opensearch.rest.RestStatus +import org.opensearch.core.action.ActionResponse +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.rest.RestStatus +import org.opensearch.core.xcontent.ToXContentObject import java.io.IOException /** diff --git a/src/main/kotlin/org/opensearch/commons/notifications/action/CreateNotificationConfigRequest.kt b/src/main/kotlin/org/opensearch/commons/notifications/action/CreateNotificationConfigRequest.kt index e62e92dd..dea36ef0 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/action/CreateNotificationConfigRequest.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/action/CreateNotificationConfigRequest.kt @@ -1,47 +1,25 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.action import org.opensearch.action.ActionRequest import org.opensearch.action.ActionRequestValidationException -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.ToXContentObject -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.commons.notifications.NotificationConstants.CONFIG_ID_TAG import org.opensearch.commons.notifications.NotificationConstants.CONFIG_TAG import org.opensearch.commons.notifications.model.NotificationConfig import org.opensearch.commons.utils.fieldIfNotNull import org.opensearch.commons.utils.logger import org.opensearch.commons.utils.validateId +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.ToXContentObject +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils import java.io.IOException /** diff --git a/src/main/kotlin/org/opensearch/commons/notifications/action/CreateNotificationConfigResponse.kt b/src/main/kotlin/org/opensearch/commons/notifications/action/CreateNotificationConfigResponse.kt index 742e7be5..3e32a49b 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/action/CreateNotificationConfigResponse.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/action/CreateNotificationConfigResponse.kt @@ -1,40 +1,18 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.action -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.commons.notifications.NotificationConstants.CONFIG_ID_TAG import org.opensearch.commons.utils.logger +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils import java.io.IOException /** diff --git a/src/main/kotlin/org/opensearch/commons/notifications/action/DeleteNotificationConfigRequest.kt b/src/main/kotlin/org/opensearch/commons/notifications/action/DeleteNotificationConfigRequest.kt index 3699369f..6dd78923 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/action/DeleteNotificationConfigRequest.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/action/DeleteNotificationConfigRequest.kt @@ -1,45 +1,23 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.action import org.opensearch.action.ActionRequest import org.opensearch.action.ActionRequestValidationException import org.opensearch.action.ValidateActions -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.ToXContentObject -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.commons.notifications.NotificationConstants.CONFIG_ID_LIST_TAG import org.opensearch.commons.utils.logger import org.opensearch.commons.utils.stringList +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.ToXContentObject +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils import java.io.IOException /** diff --git a/src/main/kotlin/org/opensearch/commons/notifications/action/DeleteNotificationConfigResponse.kt b/src/main/kotlin/org/opensearch/commons/notifications/action/DeleteNotificationConfigResponse.kt index e701daa2..2337355f 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/action/DeleteNotificationConfigResponse.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/action/DeleteNotificationConfigResponse.kt @@ -1,45 +1,23 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.action -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.commons.notifications.NotificationConstants.DELETE_RESPONSE_LIST_TAG import org.opensearch.commons.utils.STRING_READER import org.opensearch.commons.utils.STRING_WRITER import org.opensearch.commons.utils.enumReader import org.opensearch.commons.utils.enumWriter import org.opensearch.commons.utils.logger -import org.opensearch.rest.RestStatus +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.rest.RestStatus +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils import java.io.IOException /** diff --git a/src/main/kotlin/org/opensearch/commons/notifications/action/GetChannelListRequest.kt b/src/main/kotlin/org/opensearch/commons/notifications/action/GetChannelListRequest.kt new file mode 100644 index 00000000..c62f38a7 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/notifications/action/GetChannelListRequest.kt @@ -0,0 +1,104 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.commons.notifications.action + +import org.opensearch.action.ActionRequest +import org.opensearch.action.ActionRequestValidationException +import org.opensearch.commons.notifications.NotificationConstants.COMPACT_TAG +import org.opensearch.commons.utils.logger +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.ToXContentObject +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException + +/** + * This request is plugin-only call. i.e. REST interface is not exposed. + */ +class GetChannelListRequest : ActionRequest, ToXContentObject { + val compact: Boolean // Dummy request parameter for transport request + + companion object { + private val log by logger(GetChannelListRequest::class.java) + + /** + * reader to create instance of class from writable. + */ + val reader = Writeable.Reader { GetChannelListRequest(it) } + + /** + * Creator used in REST communication. + * @param parser XContentParser to deserialize data from. + */ + @JvmStatic + @Throws(IOException::class) + fun parse(parser: XContentParser): GetChannelListRequest { + var compact = false + + XContentParserUtils.ensureExpectedToken( + XContentParser.Token.START_OBJECT, + parser.currentToken(), + parser + ) + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = parser.currentName() + parser.nextToken() + when (fieldName) { + COMPACT_TAG -> compact = parser.booleanValue() + else -> { + parser.skipChildren() + log.info("Unexpected field: $fieldName, while parsing GetChannelListRequest") + } + } + } + return GetChannelListRequest() + } + } + + /** + * {@inheritDoc} + */ + override fun toXContent(builder: XContentBuilder?, params: ToXContent.Params?): XContentBuilder { + return builder!!.startObject() + .field(COMPACT_TAG, compact) + .endObject() + } + + /** + * constructor for creating the class + * @param compact Dummy request parameter for transport request + */ + constructor(compact: Boolean = false) { + this.compact = compact + } + + /** + * {@inheritDoc} + */ + @Throws(IOException::class) + constructor(input: StreamInput) : super(input) { + compact = input.readBoolean() + } + + /** + * {@inheritDoc} + */ + @Throws(IOException::class) + override fun writeTo(output: StreamOutput) { + super.writeTo(output) + output.writeBoolean(compact) + } + + /** + * {@inheritDoc} + */ + override fun validate(): ActionRequestValidationException? { + return null + } +} diff --git a/src/main/kotlin/org/opensearch/commons/notifications/action/GetChannelListResponse.kt b/src/main/kotlin/org/opensearch/commons/notifications/action/GetChannelListResponse.kt new file mode 100644 index 00000000..57a94a4e --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/notifications/action/GetChannelListResponse.kt @@ -0,0 +1,70 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.commons.notifications.action + +import org.opensearch.commons.notifications.model.ChannelList +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import java.io.IOException + +/** + * Action Response for creating new configuration. + */ +class GetChannelListResponse : BaseResponse { + val searchResult: ChannelList + + companion object { + + /** + * reader to create instance of class from writable. + */ + val reader = Writeable.Reader { GetChannelListResponse(it) } + + /** + * Creator used in REST communication. + * @param parser XContentParser to deserialize data from. + */ + @JvmStatic + @Throws(IOException::class) + fun parse(parser: XContentParser): GetChannelListResponse { + return GetChannelListResponse(ChannelList(parser)) + } + } + + /** + * constructor for creating the class + * @param searchResult the notification configuration list + */ + constructor(searchResult: ChannelList) { + this.searchResult = searchResult + } + + /** + * {@inheritDoc} + */ + @Throws(IOException::class) + constructor(input: StreamInput) : super(input) { + searchResult = ChannelList(input) + } + + /** + * {@inheritDoc} + */ + @Throws(IOException::class) + override fun writeTo(output: StreamOutput) { + searchResult.writeTo(output) + } + + /** + * {@inheritDoc} + */ + override fun toXContent(builder: XContentBuilder?, params: ToXContent.Params?): XContentBuilder { + return searchResult.toXContent(builder, params) + } +} diff --git a/src/main/kotlin/org/opensearch/commons/notifications/action/GetFeatureChannelListRequest.kt b/src/main/kotlin/org/opensearch/commons/notifications/action/GetFeatureChannelListRequest.kt deleted file mode 100644 index 17e4f6fb..00000000 --- a/src/main/kotlin/org/opensearch/commons/notifications/action/GetFeatureChannelListRequest.kt +++ /dev/null @@ -1,132 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * - */ -package org.opensearch.commons.notifications.action - -import org.opensearch.action.ActionRequest -import org.opensearch.action.ActionRequestValidationException -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.ToXContentObject -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils -import org.opensearch.commons.notifications.NotificationConstants.FEATURE_TAG -import org.opensearch.commons.notifications.model.Feature -import org.opensearch.commons.utils.logger -import java.io.IOException - -/** - * This request is plugin-only call. i.e. REST interface is not exposed. - * Also the library will remove the user context while making this call - * so that user making this call need not have to set permission to this API. - * Hence the request also contains tenant info for space isolation. - */ -class GetFeatureChannelListRequest : ActionRequest, ToXContentObject { - val feature: Feature - - companion object { - private val log by logger(GetFeatureChannelListRequest::class.java) - - /** - * reader to create instance of class from writable. - */ - val reader = Writeable.Reader { GetFeatureChannelListRequest(it) } - - /** - * Creator used in REST communication. - * @param parser XContentParser to deserialize data from. - */ - @JvmStatic - @Throws(IOException::class) - fun parse(parser: XContentParser): GetFeatureChannelListRequest { - var feature: Feature? = null - - XContentParserUtils.ensureExpectedToken( - XContentParser.Token.START_OBJECT, - parser.currentToken(), - parser - ) - while (parser.nextToken() != XContentParser.Token.END_OBJECT) { - val fieldName = parser.currentName() - parser.nextToken() - when (fieldName) { - FEATURE_TAG -> feature = Feature.fromTagOrDefault(parser.text()) - else -> { - parser.skipChildren() - log.info("Unexpected field: $fieldName, while parsing GetFeatureChannelListRequest") - } - } - } - feature ?: throw IllegalArgumentException("$FEATURE_TAG field absent") - return GetFeatureChannelListRequest(feature) - } - } - - /** - * constructor for creating the class - * @param feature the caller plugin feature - */ - constructor(feature: Feature) { - this.feature = feature - } - - /** - * {@inheritDoc} - */ - @Throws(IOException::class) - constructor(input: StreamInput) : super(input) { - feature = input.readEnum(Feature::class.java) - } - - /** - * {@inheritDoc} - */ - @Throws(IOException::class) - override fun writeTo(output: StreamOutput) { - super.writeTo(output) - output.writeEnum(feature) - } - - /** - * {@inheritDoc} - */ - override fun toXContent(builder: XContentBuilder?, params: ToXContent.Params?): XContentBuilder { - builder!! - return builder.startObject() - .field(FEATURE_TAG, feature) - .endObject() - } - - /** - * {@inheritDoc} - */ - override fun validate(): ActionRequestValidationException? { - return null - } -} diff --git a/src/main/kotlin/org/opensearch/commons/notifications/action/GetFeatureChannelListResponse.kt b/src/main/kotlin/org/opensearch/commons/notifications/action/GetFeatureChannelListResponse.kt deleted file mode 100644 index 455109fc..00000000 --- a/src/main/kotlin/org/opensearch/commons/notifications/action/GetFeatureChannelListResponse.kt +++ /dev/null @@ -1,92 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * - */ -package org.opensearch.commons.notifications.action - -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.commons.notifications.model.FeatureChannelList -import java.io.IOException - -/** - * Action Response for creating new configuration. - */ -class GetFeatureChannelListResponse : BaseResponse { - val searchResult: FeatureChannelList - - companion object { - - /** - * reader to create instance of class from writable. - */ - val reader = Writeable.Reader { GetFeatureChannelListResponse(it) } - - /** - * Creator used in REST communication. - * @param parser XContentParser to deserialize data from. - */ - @JvmStatic - @Throws(IOException::class) - fun parse(parser: XContentParser): GetFeatureChannelListResponse { - return GetFeatureChannelListResponse(FeatureChannelList(parser)) - } - } - - /** - * constructor for creating the class - * @param searchResult the notification configuration list - */ - constructor(searchResult: FeatureChannelList) { - this.searchResult = searchResult - } - - /** - * {@inheritDoc} - */ - @Throws(IOException::class) - constructor(input: StreamInput) : super(input) { - searchResult = FeatureChannelList(input) - } - - /** - * {@inheritDoc} - */ - @Throws(IOException::class) - override fun writeTo(output: StreamOutput) { - searchResult.writeTo(output) - } - - /** - * {@inheritDoc} - */ - override fun toXContent(builder: XContentBuilder?, params: ToXContent.Params?): XContentBuilder { - return searchResult.toXContent(builder, params) - } -} diff --git a/src/main/kotlin/org/opensearch/commons/notifications/action/GetNotificationConfigRequest.kt b/src/main/kotlin/org/opensearch/commons/notifications/action/GetNotificationConfigRequest.kt index a410fe7e..34485a3c 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/action/GetNotificationConfigRequest.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/action/GetNotificationConfigRequest.kt @@ -1,42 +1,12 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.action import org.opensearch.action.ActionRequest import org.opensearch.action.ActionRequestValidationException import org.opensearch.action.ValidateActions -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.ToXContentObject -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.commons.notifications.NotificationConstants.CONFIG_ID_LIST_TAG import org.opensearch.commons.notifications.NotificationConstants.DEFAULT_MAX_ITEMS import org.opensearch.commons.notifications.NotificationConstants.FILTER_PARAM_LIST_TAG @@ -50,6 +20,14 @@ import org.opensearch.commons.utils.enumReader import org.opensearch.commons.utils.fieldIfNotNull import org.opensearch.commons.utils.logger import org.opensearch.commons.utils.stringList +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.ToXContentObject +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils import org.opensearch.search.sort.SortOrder import java.io.IOException diff --git a/src/main/kotlin/org/opensearch/commons/notifications/action/GetNotificationConfigResponse.kt b/src/main/kotlin/org/opensearch/commons/notifications/action/GetNotificationConfigResponse.kt index a810759b..6723c015 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/action/GetNotificationConfigResponse.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/action/GetNotificationConfigResponse.kt @@ -1,38 +1,16 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.action -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser import org.opensearch.commons.notifications.model.NotificationConfigSearchResult +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser import java.io.IOException /** diff --git a/src/main/kotlin/org/opensearch/commons/notifications/action/GetNotificationEventRequest.kt b/src/main/kotlin/org/opensearch/commons/notifications/action/GetNotificationEventRequest.kt deleted file mode 100644 index 27e20554..00000000 --- a/src/main/kotlin/org/opensearch/commons/notifications/action/GetNotificationEventRequest.kt +++ /dev/null @@ -1,193 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * - */ -package org.opensearch.commons.notifications.action - -import org.opensearch.action.ActionRequest -import org.opensearch.action.ActionRequestValidationException -import org.opensearch.action.ValidateActions -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.ToXContentObject -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils -import org.opensearch.commons.notifications.NotificationConstants.DEFAULT_MAX_ITEMS -import org.opensearch.commons.notifications.NotificationConstants.EVENT_ID_LIST_TAG -import org.opensearch.commons.notifications.NotificationConstants.FILTER_PARAM_LIST_TAG -import org.opensearch.commons.notifications.NotificationConstants.FROM_INDEX_TAG -import org.opensearch.commons.notifications.NotificationConstants.MAX_ITEMS_TAG -import org.opensearch.commons.notifications.NotificationConstants.SORT_FIELD_TAG -import org.opensearch.commons.notifications.NotificationConstants.SORT_ORDER_TAG -import org.opensearch.commons.utils.STRING_READER -import org.opensearch.commons.utils.STRING_WRITER -import org.opensearch.commons.utils.enumReader -import org.opensearch.commons.utils.fieldIfNotNull -import org.opensearch.commons.utils.logger -import org.opensearch.commons.utils.stringList -import org.opensearch.search.sort.SortOrder -import java.io.IOException - -/** - * Action Request for getting notification event. - */ -class GetNotificationEventRequest : ActionRequest, ToXContentObject { - val eventIds: Set - val fromIndex: Int - val maxItems: Int - val sortField: String? - val sortOrder: SortOrder? - val filterParams: Map - - companion object { - private val log by logger(GetNotificationEventRequest::class.java) - - /** - * reader to create instance of class from writable. - */ - val reader = Writeable.Reader { GetNotificationEventRequest(it) } - - /** - * Creator used in REST communication. - * @param parser XContentParser to deserialize data from. - */ - @JvmStatic - @Throws(IOException::class) - fun parse(parser: XContentParser): GetNotificationEventRequest { - var eventIds: Set = setOf() - var fromIndex = 0 - var maxItems = DEFAULT_MAX_ITEMS - var sortField: String? = null - var sortOrder: SortOrder? = null - var filterParams: Map = mapOf() - - XContentParserUtils.ensureExpectedToken( - XContentParser.Token.START_OBJECT, - parser.currentToken(), - parser - ) - while (parser.nextToken() != XContentParser.Token.END_OBJECT) { - val fieldName = parser.currentName() - parser.nextToken() - when (fieldName) { - EVENT_ID_LIST_TAG -> eventIds = parser.stringList().toSet() - FROM_INDEX_TAG -> fromIndex = parser.intValue() - MAX_ITEMS_TAG -> maxItems = parser.intValue() - SORT_FIELD_TAG -> sortField = parser.textOrNull() - SORT_ORDER_TAG -> sortOrder = SortOrder.fromString(parser.text()) - FILTER_PARAM_LIST_TAG -> filterParams = parser.mapStrings() - else -> { - parser.skipChildren() - log.info("Unexpected field: $fieldName, while parsing GetNotificationEventRequest") - } - } - } - return GetNotificationEventRequest(eventIds, fromIndex, maxItems, sortField, sortOrder, filterParams) - } - } - - /** - * {@inheritDoc} - */ - override fun toXContent(builder: XContentBuilder?, params: ToXContent.Params?): XContentBuilder { - return builder!!.startObject() - .field(EVENT_ID_LIST_TAG, eventIds) - .field(FROM_INDEX_TAG, fromIndex) - .field(MAX_ITEMS_TAG, maxItems) - .fieldIfNotNull(SORT_FIELD_TAG, sortField) - .fieldIfNotNull(SORT_ORDER_TAG, sortOrder) - .field(FILTER_PARAM_LIST_TAG, filterParams) - .endObject() - } - - /** - * constructor for creating the class - * @param eventIds the ids of the notification events (other parameters are not relevant if ids are present) - * @param fromIndex the starting index for paginated response - * @param maxItems the maximum number of items to return for paginated response - * @param sortField the sort field if response has many items - * @param sortOrder the sort order if response has many items - * @param filterParams the filter parameters - */ - constructor( - eventIds: Set = setOf(), - fromIndex: Int = 0, - maxItems: Int = DEFAULT_MAX_ITEMS, - sortField: String? = null, - sortOrder: SortOrder? = null, - filterParams: Map = mapOf() - ) { - this.eventIds = eventIds - this.fromIndex = fromIndex - this.maxItems = maxItems - this.sortField = sortField - this.sortOrder = sortOrder - this.filterParams = filterParams - } - - /** - * {@inheritDoc} - */ - @Throws(IOException::class) - constructor(input: StreamInput) : super(input) { - eventIds = input.readStringList().toSet() - fromIndex = input.readInt() - maxItems = input.readInt() - sortField = input.readOptionalString() - sortOrder = input.readOptionalWriteable(enumReader(SortOrder::class.java)) - filterParams = input.readMap(STRING_READER, STRING_READER) - } - - /** - * {@inheritDoc} - */ - @Throws(IOException::class) - override fun writeTo(output: StreamOutput) { - super.writeTo(output) - output.writeStringCollection(eventIds) - output.writeInt(fromIndex) - output.writeInt(maxItems) - output.writeOptionalString(sortField) - output.writeOptionalWriteable(sortOrder) - output.writeMap(filterParams, STRING_WRITER, STRING_WRITER) - } - - /** - * {@inheritDoc} - */ - override fun validate(): ActionRequestValidationException? { - var validationException: ActionRequestValidationException? = null - if (fromIndex < 0) { - validationException = ValidateActions.addValidationError("fromIndex is -ve", validationException) - } - if (maxItems <= 0) { - validationException = ValidateActions.addValidationError("maxItems is not +ve", validationException) - } - return validationException - } -} diff --git a/src/main/kotlin/org/opensearch/commons/notifications/action/GetNotificationEventResponse.kt b/src/main/kotlin/org/opensearch/commons/notifications/action/GetNotificationEventResponse.kt deleted file mode 100644 index 176d95ea..00000000 --- a/src/main/kotlin/org/opensearch/commons/notifications/action/GetNotificationEventResponse.kt +++ /dev/null @@ -1,92 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * - */ -package org.opensearch.commons.notifications.action - -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.commons.notifications.model.NotificationEventSearchResult -import java.io.IOException - -/** - * Action Response for getting notification event. - */ -class GetNotificationEventResponse : BaseResponse { - val searchResult: NotificationEventSearchResult - - companion object { - - /** - * reader to create instance of class from writable. - */ - val reader = Writeable.Reader { GetNotificationEventResponse(it) } - - /** - * Creator used in REST communication. - * @param parser XContentParser to deserialize data from. - */ - @JvmStatic - @Throws(IOException::class) - fun parse(parser: XContentParser): GetNotificationEventResponse { - return GetNotificationEventResponse(NotificationEventSearchResult(parser)) - } - } - - /** - * constructor for creating the class - * @param searchResult the notification event list - */ - constructor(searchResult: NotificationEventSearchResult) { - this.searchResult = searchResult - } - - /** - * {@inheritDoc} - */ - @Throws(IOException::class) - constructor(input: StreamInput) : super(input) { - searchResult = NotificationEventSearchResult(input) - } - - /** - * {@inheritDoc} - */ - @Throws(IOException::class) - override fun writeTo(output: StreamOutput) { - searchResult.writeTo(output) - } - - /** - * {@inheritDoc} - */ - override fun toXContent(builder: XContentBuilder?, params: ToXContent.Params?): XContentBuilder { - return searchResult.toXContent(builder, params) - } -} diff --git a/src/main/kotlin/org/opensearch/commons/notifications/action/GetPluginFeaturesRequest.kt b/src/main/kotlin/org/opensearch/commons/notifications/action/GetPluginFeaturesRequest.kt index 8b9b81b4..0eb0d14d 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/action/GetPluginFeaturesRequest.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/action/GetPluginFeaturesRequest.kt @@ -1,43 +1,21 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.action import org.opensearch.action.ActionRequest import org.opensearch.action.ActionRequestValidationException -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.ToXContentObject -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.commons.notifications.NotificationConstants.COMPACT_TAG import org.opensearch.commons.utils.logger +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.ToXContentObject +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils import java.io.IOException /** diff --git a/src/main/kotlin/org/opensearch/commons/notifications/action/GetPluginFeaturesResponse.kt b/src/main/kotlin/org/opensearch/commons/notifications/action/GetPluginFeaturesResponse.kt index 681b943e..41a80875 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/action/GetPluginFeaturesResponse.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/action/GetPluginFeaturesResponse.kt @@ -1,51 +1,29 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.action -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils -import org.opensearch.commons.notifications.NotificationConstants.CONFIG_TYPE_LIST_TAG +import org.opensearch.commons.notifications.NotificationConstants.ALLOWED_CONFIG_TYPE_LIST_TAG import org.opensearch.commons.notifications.NotificationConstants.PLUGIN_FEATURES_TAG import org.opensearch.commons.utils.STRING_READER import org.opensearch.commons.utils.STRING_WRITER import org.opensearch.commons.utils.logger import org.opensearch.commons.utils.stringList +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils import java.io.IOException /** * Action Response for getting notification plugin features. */ class GetPluginFeaturesResponse : BaseResponse { - val configTypeList: List + val allowedConfigTypeList: List val pluginFeatures: Map companion object { @@ -63,7 +41,7 @@ class GetPluginFeaturesResponse : BaseResponse { @JvmStatic @Throws(IOException::class) fun parse(parser: XContentParser): GetPluginFeaturesResponse { - var configTypeList: List? = null + var allowedConfigTypeList: List? = null var pluginFeatures: Map? = null XContentParserUtils.ensureExpectedToken( @@ -75,7 +53,7 @@ class GetPluginFeaturesResponse : BaseResponse { val fieldName = parser.currentName() parser.nextToken() when (fieldName) { - CONFIG_TYPE_LIST_TAG -> configTypeList = parser.stringList() + ALLOWED_CONFIG_TYPE_LIST_TAG -> allowedConfigTypeList = parser.stringList() PLUGIN_FEATURES_TAG -> pluginFeatures = parser.mapStrings() else -> { parser.skipChildren() @@ -83,9 +61,9 @@ class GetPluginFeaturesResponse : BaseResponse { } } } - configTypeList ?: throw IllegalArgumentException("$CONFIG_TYPE_LIST_TAG field absent") + allowedConfigTypeList ?: throw IllegalArgumentException("$ALLOWED_CONFIG_TYPE_LIST_TAG field absent") pluginFeatures ?: throw IllegalArgumentException("$PLUGIN_FEATURES_TAG field absent") - return GetPluginFeaturesResponse(configTypeList, pluginFeatures) + return GetPluginFeaturesResponse(allowedConfigTypeList, pluginFeatures) } } @@ -94,18 +72,21 @@ class GetPluginFeaturesResponse : BaseResponse { */ override fun toXContent(builder: XContentBuilder?, params: ToXContent.Params?): XContentBuilder { return builder!!.startObject() - .field(CONFIG_TYPE_LIST_TAG, configTypeList) + .field(ALLOWED_CONFIG_TYPE_LIST_TAG, allowedConfigTypeList) .field(PLUGIN_FEATURES_TAG, pluginFeatures) .endObject() } /** * constructor for creating the class - * @param configTypeList the list of config types supported by plugin + * @param allowedConfigTypeList the list of config types supported by plugin * @param pluginFeatures the map of plugin features supported to its value */ - constructor(configTypeList: List, pluginFeatures: Map) { - this.configTypeList = configTypeList + constructor( + allowedConfigTypeList: List, + pluginFeatures: Map + ) { + this.allowedConfigTypeList = allowedConfigTypeList this.pluginFeatures = pluginFeatures } @@ -114,7 +95,7 @@ class GetPluginFeaturesResponse : BaseResponse { */ @Throws(IOException::class) constructor(input: StreamInput) : super(input) { - configTypeList = input.readStringList() + allowedConfigTypeList = input.readStringList() pluginFeatures = input.readMap(STRING_READER, STRING_READER) } @@ -123,7 +104,7 @@ class GetPluginFeaturesResponse : BaseResponse { */ @Throws(IOException::class) override fun writeTo(output: StreamOutput) { - output.writeStringCollection(configTypeList) + output.writeStringCollection(allowedConfigTypeList) output.writeMap(pluginFeatures, STRING_WRITER, STRING_WRITER) } } diff --git a/src/main/kotlin/org/opensearch/commons/notifications/action/LegacyPublishNotificationRequest.kt b/src/main/kotlin/org/opensearch/commons/notifications/action/LegacyPublishNotificationRequest.kt new file mode 100644 index 00000000..7d8cf391 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/notifications/action/LegacyPublishNotificationRequest.kt @@ -0,0 +1,74 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.notifications.action + +import org.opensearch.action.ActionRequest +import org.opensearch.action.ActionRequestValidationException +import org.opensearch.commons.destination.message.LegacyBaseMessage +import org.opensearch.commons.destination.message.LegacyChimeMessage +import org.opensearch.commons.destination.message.LegacyCustomWebhookMessage +import org.opensearch.commons.destination.message.LegacyDestinationType +import org.opensearch.commons.destination.message.LegacyEmailMessage +import org.opensearch.commons.destination.message.LegacySNSMessage +import org.opensearch.commons.destination.message.LegacySlackMessage +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import java.io.IOException + +/** + * Action Request to publish notification. This is a legacy implementation. + * This should not be used going forward, instead use [SendNotificationRequest]. + */ +class LegacyPublishNotificationRequest : ActionRequest { + val baseMessage: LegacyBaseMessage + + companion object { + /** + * reader to create instance of class from writable. + */ + val reader = Writeable.Reader { LegacyPublishNotificationRequest(it) } + } + + /** + * constructor for creating the class + * @param baseMessage the base message to send + */ + constructor( + baseMessage: LegacyBaseMessage + ) { + this.baseMessage = baseMessage + } + + /** + * {@inheritDoc} + */ + @Throws(IOException::class) + constructor(input: StreamInput) : super(input) { + baseMessage = when (requireNotNull(input.readEnum(LegacyDestinationType::class.java)) { "Destination type cannot be null" }) { + LegacyDestinationType.LEGACY_CHIME -> LegacyChimeMessage(input) + LegacyDestinationType.LEGACY_CUSTOM_WEBHOOK -> LegacyCustomWebhookMessage(input) + LegacyDestinationType.LEGACY_SLACK -> LegacySlackMessage(input) + LegacyDestinationType.LEGACY_EMAIL -> LegacyEmailMessage(input) + LegacyDestinationType.LEGACY_SNS -> LegacySNSMessage(input) + } + } + + /** + * {@inheritDoc} + */ + @Throws(IOException::class) + override fun writeTo(output: StreamOutput) { + super.writeTo(output) + output.writeEnum(baseMessage.channelType) + baseMessage.writeTo(output) + } + + /** + * {@inheritDoc} + */ + override fun validate(): ActionRequestValidationException? = null +} diff --git a/src/main/kotlin/org/opensearch/commons/notifications/action/LegacyPublishNotificationResponse.kt b/src/main/kotlin/org/opensearch/commons/notifications/action/LegacyPublishNotificationResponse.kt new file mode 100644 index 00000000..c6084358 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/notifications/action/LegacyPublishNotificationResponse.kt @@ -0,0 +1,57 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.notifications.action + +import org.opensearch.commons.destination.response.LegacyDestinationResponse +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import java.io.IOException + +/** + * Action Response for legacy publish notification. + */ +class LegacyPublishNotificationResponse : BaseResponse { + val destinationResponse: LegacyDestinationResponse + + companion object { + /** + * reader to create instance of class from writable. + */ + val reader = Writeable.Reader { LegacyPublishNotificationResponse(it) } + } + + /** + * constructor for creating the class + * @param destinationResponse the response of the published notification + */ + constructor(destinationResponse: LegacyDestinationResponse) { + this.destinationResponse = destinationResponse + } + + /** + * {@inheritDoc} + */ + @Throws(IOException::class) + constructor(input: StreamInput) : super(input) { + destinationResponse = LegacyDestinationResponse(input) + } + + /** + * {@inheritDoc} + */ + @Throws(IOException::class) + override fun writeTo(output: StreamOutput) { + destinationResponse.writeTo(output) + } + + // This class is only used across transport wire and does not need to implement toXContent + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + throw IllegalStateException("Legacy notification response is not intended for REST or persistence and does not support XContent.") + } +} diff --git a/src/main/kotlin/org/opensearch/commons/notifications/action/NotificationsActions.kt b/src/main/kotlin/org/opensearch/commons/notifications/action/NotificationsActions.kt index fe7457d0..e4a8cfe5 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/action/NotificationsActions.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/action/NotificationsActions.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.action @@ -52,11 +30,6 @@ object NotificationsActions { */ const val GET_NOTIFICATION_CONFIG_NAME = "cluster:admin/opensearch/notifications/configs/get" - /** - * Get notification events transport action name. - */ - const val GET_NOTIFICATION_EVENT_NAME = "cluster:admin/opensearch/notifications/events/get" - /** * Get notification plugin features transport action name. */ @@ -65,13 +38,19 @@ object NotificationsActions { /** * Get Config List for feature. Internal only - Inter plugin communication. */ - const val GET_FEATURE_CHANNEL_LIST_NAME = "cluster:admin/opensearch/notifications/feature/channels/get" + const val GET_CHANNEL_LIST_NAME = "cluster:admin/opensearch/notifications/channels/get" /** * Send notification message. Internal only - Inter plugin communication. */ const val SEND_NOTIFICATION_NAME = "cluster:admin/opensearch/notifications/feature/send" + /** + * Publish legacy notification message. Internal only - Inter plugin communication. + * Only for the Alerting and Index Management plugins. + */ + const val LEGACY_PUBLISH_NOTIFICATION_NAME = "cluster:admin/opensearch/notifications/feature/publish" + /** * Create notification configuration transport action type. */ @@ -96,12 +75,6 @@ object NotificationsActions { val GET_NOTIFICATION_CONFIG_ACTION_TYPE = ActionType(GET_NOTIFICATION_CONFIG_NAME, ::GetNotificationConfigResponse) - /** - * Get notification events transport action type. - */ - val GET_NOTIFICATION_EVENT_ACTION_TYPE = - ActionType(GET_NOTIFICATION_EVENT_NAME, ::GetNotificationEventResponse) - /** * Get notification plugin features transport action type. */ @@ -109,14 +82,21 @@ object NotificationsActions { ActionType(GET_PLUGIN_FEATURES_NAME, ::GetPluginFeaturesResponse) /** - * Get Config List for feature transport action type. + * Get notification channel List transport action type. */ - val GET_FEATURE_CHANNEL_LIST_ACTION_TYPE = - ActionType(GET_FEATURE_CHANNEL_LIST_NAME, ::GetFeatureChannelListResponse) + val GET_CHANNEL_LIST_ACTION_TYPE = + ActionType(GET_CHANNEL_LIST_NAME, ::GetChannelListResponse) /** * Send notification transport action type. Internal only - Inter plugin communication. */ val SEND_NOTIFICATION_ACTION_TYPE = ActionType(SEND_NOTIFICATION_NAME, ::SendNotificationResponse) + + /** + * Send legacy notification transport action type. Internal only - Inter plugin communication. + * Only for the Alerting and Index Management plugins. + */ + val LEGACY_PUBLISH_NOTIFICATION_ACTION_TYPE = + ActionType(LEGACY_PUBLISH_NOTIFICATION_NAME, ::LegacyPublishNotificationResponse) } diff --git a/src/main/kotlin/org/opensearch/commons/notifications/action/SendNotificationRequest.kt b/src/main/kotlin/org/opensearch/commons/notifications/action/SendNotificationRequest.kt index 9943ed57..060ae545 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/action/SendNotificationRequest.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/action/SendNotificationRequest.kt @@ -1,42 +1,12 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.action import org.opensearch.action.ActionRequest import org.opensearch.action.ActionRequestValidationException import org.opensearch.action.ValidateActions -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.ToXContentObject -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.commons.notifications.NotificationConstants.CHANNEL_ID_LIST_TAG import org.opensearch.commons.notifications.NotificationConstants.CHANNEL_MESSAGE_TAG import org.opensearch.commons.notifications.NotificationConstants.EVENT_SOURCE_TAG @@ -46,6 +16,14 @@ import org.opensearch.commons.notifications.model.EventSource import org.opensearch.commons.utils.fieldIfNotNull import org.opensearch.commons.utils.logger import org.opensearch.commons.utils.stringList +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.ToXContentObject +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils import java.io.IOException /** diff --git a/src/main/kotlin/org/opensearch/commons/notifications/action/SendNotificationResponse.kt b/src/main/kotlin/org/opensearch/commons/notifications/action/SendNotificationResponse.kt index 38338fc5..e3642e1b 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/action/SendNotificationResponse.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/action/SendNotificationResponse.kt @@ -1,50 +1,25 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.action -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils -import org.opensearch.commons.notifications.NotificationConstants.EVENT_ID_TAG -import org.opensearch.commons.utils.logger +import org.opensearch.commons.notifications.model.NotificationEvent +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser import java.io.IOException /** * Action Response for send notification. */ class SendNotificationResponse : BaseResponse { - val notificationId: String + val notificationEvent: NotificationEvent companion object { - private val log by logger(SendNotificationResponse::class.java) /** * reader to create instance of class from writable. @@ -58,35 +33,16 @@ class SendNotificationResponse : BaseResponse { @JvmStatic @Throws(IOException::class) fun parse(parser: XContentParser): SendNotificationResponse { - var notificationId: String? = null - - XContentParserUtils.ensureExpectedToken( - XContentParser.Token.START_OBJECT, - parser.currentToken(), - parser - ) - while (parser.nextToken() != XContentParser.Token.END_OBJECT) { - val fieldName = parser.currentName() - parser.nextToken() - when (fieldName) { - EVENT_ID_TAG -> notificationId = parser.text() - else -> { - parser.skipChildren() - log.info("Unexpected field: $fieldName, while parsing SendNotificationResponse") - } - } - } - notificationId ?: throw IllegalArgumentException("$EVENT_ID_TAG field absent") - return SendNotificationResponse(notificationId) + return SendNotificationResponse(NotificationEvent.parse(parser)) } } /** * constructor for creating the class - * @param configId the id of the created notification configuration + * @param notificationEvent the id of the created notification configuration */ - constructor(configId: String) { - this.notificationId = configId + constructor(notificationEvent: NotificationEvent) { + this.notificationEvent = notificationEvent } /** @@ -94,7 +50,7 @@ class SendNotificationResponse : BaseResponse { */ @Throws(IOException::class) constructor(input: StreamInput) : super(input) { - notificationId = input.readString() + notificationEvent = NotificationEvent(input) } /** @@ -102,16 +58,13 @@ class SendNotificationResponse : BaseResponse { */ @Throws(IOException::class) override fun writeTo(output: StreamOutput) { - output.writeString(notificationId) + notificationEvent.writeTo(output) } /** * {@inheritDoc} */ override fun toXContent(builder: XContentBuilder?, params: ToXContent.Params?): XContentBuilder { - builder!! - return builder.startObject() - .field(EVENT_ID_TAG, notificationId) - .endObject() + return notificationEvent.toXContent(builder, params) } } diff --git a/src/main/kotlin/org/opensearch/commons/notifications/action/UpdateNotificationConfigRequest.kt b/src/main/kotlin/org/opensearch/commons/notifications/action/UpdateNotificationConfigRequest.kt index dcba7b71..d28060aa 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/action/UpdateNotificationConfigRequest.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/action/UpdateNotificationConfigRequest.kt @@ -1,47 +1,25 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.action import org.opensearch.action.ActionRequest import org.opensearch.action.ActionRequestValidationException import org.opensearch.action.ValidateActions -import org.opensearch.common.Strings -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.ToXContentObject -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.commons.notifications.NotificationConstants.CONFIG_ID_TAG import org.opensearch.commons.notifications.NotificationConstants.CONFIG_TAG import org.opensearch.commons.notifications.model.NotificationConfig import org.opensearch.commons.utils.logger +import org.opensearch.core.common.Strings +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.ToXContentObject +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils import java.io.IOException /** diff --git a/src/main/kotlin/org/opensearch/commons/notifications/action/UpdateNotificationConfigResponse.kt b/src/main/kotlin/org/opensearch/commons/notifications/action/UpdateNotificationConfigResponse.kt index 0e15a69b..42e71a13 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/action/UpdateNotificationConfigResponse.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/action/UpdateNotificationConfigResponse.kt @@ -1,40 +1,18 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.action -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.commons.notifications.NotificationConstants.CONFIG_ID_TAG import org.opensearch.commons.utils.logger +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils import java.io.IOException /** diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/Attachment.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/Attachment.kt index 282781f0..ff650a20 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/model/Attachment.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/model/Attachment.kt @@ -1,45 +1,23 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.commons.notifications.NotificationConstants.FILE_CONTENT_TYPE_TAG import org.opensearch.commons.notifications.NotificationConstants.FILE_DATA_TAG import org.opensearch.commons.notifications.NotificationConstants.FILE_ENCODING_TAG import org.opensearch.commons.notifications.NotificationConstants.FILE_NAME_TAG import org.opensearch.commons.utils.fieldIfNotNull import org.opensearch.commons.utils.logger +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils /** * Data class for storing attachment of channel message. diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/BaseConfigData.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/BaseConfigData.kt index a766d3e0..1b8e59bf 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/model/BaseConfigData.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/model/BaseConfigData.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/BaseModel.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/BaseModel.kt index bdc13931..2271fa7f 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/model/BaseModel.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/model/BaseModel.kt @@ -1,35 +1,13 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContentObject /** * interface for representing objects. */ -interface BaseModel : Writeable, ToXContent +interface BaseModel : Writeable, ToXContentObject diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/FeatureChannel.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/Channel.kt similarity index 71% rename from src/main/kotlin/org/opensearch/commons/notifications/model/FeatureChannel.kt rename to src/main/kotlin/org/opensearch/commons/notifications/model/Channel.kt index a108d2ff..68f42b27 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/model/FeatureChannel.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/model/Channel.kt @@ -1,51 +1,29 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model -import org.opensearch.common.Strings -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.commons.notifications.NotificationConstants.CONFIG_ID_TAG import org.opensearch.commons.notifications.NotificationConstants.CONFIG_TYPE_TAG import org.opensearch.commons.notifications.NotificationConstants.DESCRIPTION_TAG import org.opensearch.commons.notifications.NotificationConstants.IS_ENABLED_TAG import org.opensearch.commons.notifications.NotificationConstants.NAME_TAG import org.opensearch.commons.utils.logger +import org.opensearch.core.common.Strings +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils import java.io.IOException /** - * Data class representing Notification config for exposed for other plugins. + * Data class representing Notification config exposed for other plugins. */ -data class FeatureChannel( +data class Channel( val configId: String, val name: String, val description: String, @@ -59,12 +37,12 @@ data class FeatureChannel( } companion object { - private val log by logger(FeatureChannel::class.java) + private val log by logger(Channel::class.java) /** * reader to create instance of class from writable. */ - val reader = Writeable.Reader { FeatureChannel(it) } + val reader = Writeable.Reader { Channel(it) } /** * Creator used in REST communication. @@ -73,7 +51,7 @@ data class FeatureChannel( @Suppress("ComplexMethod") @JvmStatic @Throws(IOException::class) - fun parse(parser: XContentParser): FeatureChannel { + fun parse(parser: XContentParser): Channel { var configId: String? = null var name: String? = null var description = "" @@ -96,14 +74,14 @@ data class FeatureChannel( IS_ENABLED_TAG -> isEnabled = parser.booleanValue() else -> { parser.skipChildren() - log.info("Unexpected field: $fieldName, while parsing FeatureChannel") + log.info("Unexpected field: $fieldName, while parsing Channel") } } } configId ?: throw IllegalArgumentException("$CONFIG_ID_TAG field absent") name ?: throw IllegalArgumentException("$NAME_TAG field absent") configType ?: throw IllegalArgumentException("$CONFIG_TYPE_TAG field absent") - return FeatureChannel( + return Channel( configId, name, description, diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/ChannelList.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/ChannelList.kt new file mode 100644 index 00000000..b92b8425 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/notifications/model/ChannelList.kt @@ -0,0 +1,71 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.notifications.model + +import org.apache.lucene.search.TotalHits +import org.opensearch.action.search.SearchResponse +import org.opensearch.commons.notifications.NotificationConstants.CHANNEL_LIST_TAG +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.xcontent.XContentParser + +/** + * Channel search results + */ +class ChannelList : SearchResults { + + /** + * single item result constructor + */ + constructor(objectItem: Channel) : super(CHANNEL_LIST_TAG, objectItem) + + /** + * multiple items result constructor + */ + constructor(objectList: List) : this( + 0, + objectList.size.toLong(), + TotalHits.Relation.EQUAL_TO, + objectList + ) + + /** + * all param constructor + */ + constructor( + startIndex: Long, + totalHits: Long, + totalHitRelation: TotalHits.Relation, + objectList: List + ) : super(startIndex, totalHits, totalHitRelation, CHANNEL_LIST_TAG, objectList) + + /** + * Constructor used in transport action communication. + * @param input StreamInput stream to deserialize data from. + */ + constructor(input: StreamInput) : super(input, Channel.reader) + + /** + * Construct object from XContentParser + */ + constructor(parser: XContentParser) : super(parser, CHANNEL_LIST_TAG) + + /** + * Construct object from SearchResponse + */ + constructor(from: Long, response: SearchResponse, searchHitParser: SearchHitParser) : super( + from, + response, + searchHitParser, + CHANNEL_LIST_TAG + ) + + /** + * {@inheritDoc} + */ + override fun parseItem(parser: XContentParser): Channel { + return Channel.parse(parser) + } +} diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/ChannelMessage.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/ChannelMessage.kt index 5f70efa7..d733a8bc 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/model/ChannelMessage.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/model/ChannelMessage.kt @@ -1,45 +1,23 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model -import org.opensearch.common.Strings -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.commons.notifications.NotificationConstants.ATTACHMENT_TAG import org.opensearch.commons.notifications.NotificationConstants.HTML_DESCRIPTION_TAG import org.opensearch.commons.notifications.NotificationConstants.TEXT_DESCRIPTION_TAG import org.opensearch.commons.utils.fieldIfNotNull import org.opensearch.commons.utils.logger +import org.opensearch.core.common.Strings +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils import java.io.IOException /** @@ -70,7 +48,6 @@ data class ChannelMessage( @JvmStatic @Throws(IOException::class) fun parse(parser: XContentParser): ChannelMessage { - var textDescription: String? = null var htmlDescription: String? = null var attachment: Attachment? = null diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/Chime.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/Chime.kt index 2ffb0fd0..ee1c5aa8 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/model/Chime.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/model/Chime.kt @@ -1,42 +1,20 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model -import org.opensearch.common.Strings -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.commons.notifications.NotificationConstants.URL_TAG import org.opensearch.commons.utils.logger import org.opensearch.commons.utils.validateUrl +import org.opensearch.core.common.Strings +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils import java.io.IOException /** diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/ConfigType.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/ConfigType.kt index 0dbe70a6..50f1f3bc 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/model/ConfigType.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/model/ConfigType.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model @@ -57,6 +35,16 @@ enum class ConfigType(val tag: String) { return tag } }, + SNS("sns") { + override fun toString(): String { + return tag + } + }, + SES_ACCOUNT("ses_account") { + override fun toString(): String { + return tag + } + }, SMTP_ACCOUNT("smtp_account") { override fun toString(): String { return tag @@ -66,6 +54,11 @@ enum class ConfigType(val tag: String) { override fun toString(): String { return tag } + }, + MICROSOFT_TEAMS("microsoft_teams") { + override fun toString(): String { + return tag + } }; companion object { diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/DeliveryStatus.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/DeliveryStatus.kt index e824242b..2e25cc70 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/model/DeliveryStatus.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/model/DeliveryStatus.kt @@ -1,42 +1,20 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model -import org.opensearch.common.Strings -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.commons.notifications.NotificationConstants.STATUS_CODE_TAG import org.opensearch.commons.notifications.NotificationConstants.STATUS_TEXT_TAG import org.opensearch.commons.utils.logger +import org.opensearch.core.common.Strings +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils import java.io.IOException /** diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/Email.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/Email.kt index c8da0f85..a67619b0 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/model/Email.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/model/Email.kt @@ -1,45 +1,23 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model -import org.opensearch.common.Strings -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.commons.notifications.NotificationConstants.EMAIL_ACCOUNT_ID_TAG import org.opensearch.commons.notifications.NotificationConstants.EMAIL_GROUP_ID_LIST_TAG import org.opensearch.commons.notifications.NotificationConstants.RECIPIENT_LIST_TAG import org.opensearch.commons.utils.logger +import org.opensearch.commons.utils.objectList import org.opensearch.commons.utils.stringList -import org.opensearch.commons.utils.validateEmail +import org.opensearch.core.common.Strings +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils import java.io.IOException /** @@ -47,15 +25,12 @@ import java.io.IOException */ data class Email( val emailAccountID: String, - val recipients: List, + val recipients: List, val emailGroupIds: List ) : BaseConfigData { init { require(!Strings.isNullOrEmpty(emailAccountID)) { "emailAccountID is null or empty" } - recipients.forEach { - validateEmail(it) - } } companion object { @@ -79,7 +54,7 @@ data class Email( @Throws(IOException::class) fun parse(parser: XContentParser): Email { var emailAccountID: String? = null - var recipients: List = listOf() + var recipients: List = listOf() var emailGroupIds: List = listOf() XContentParserUtils.ensureExpectedToken( @@ -92,7 +67,7 @@ data class Email( parser.nextToken() when (fieldName) { EMAIL_ACCOUNT_ID_TAG -> emailAccountID = parser.text() - RECIPIENT_LIST_TAG -> recipients = parser.stringList() + RECIPIENT_LIST_TAG -> recipients = parser.objectList { EmailRecipient.parse(it) } EMAIL_GROUP_ID_LIST_TAG -> emailGroupIds = parser.stringList() else -> { parser.skipChildren() @@ -111,7 +86,7 @@ data class Email( */ constructor(input: StreamInput) : this( emailAccountID = input.readString(), - recipients = input.readStringList(), + recipients = input.readList(EmailRecipient.reader), emailGroupIds = input.readStringList() ) @@ -120,7 +95,7 @@ data class Email( */ override fun writeTo(output: StreamOutput) { output.writeString(emailAccountID) - output.writeStringCollection(recipients) + output.writeList(recipients) output.writeStringCollection(emailGroupIds) } diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/EmailGroup.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/EmailGroup.kt index f3beecd8..c5377530 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/model/EmailGroup.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/model/EmailGroup.kt @@ -1,57 +1,28 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.commons.notifications.NotificationConstants.RECIPIENT_LIST_TAG import org.opensearch.commons.utils.logger -import org.opensearch.commons.utils.stringList -import org.opensearch.commons.utils.validateEmail +import org.opensearch.commons.utils.objectList +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils import java.io.IOException /** * Data class representing Email group. */ data class EmailGroup( - val recipients: List + val recipients: List ) : BaseConfigData { - init { - recipients.forEach { - validateEmail(it) - } - } - companion object { private val log by logger(EmailGroup::class.java) @@ -72,7 +43,7 @@ data class EmailGroup( @JvmStatic @Throws(IOException::class) fun parse(parser: XContentParser): EmailGroup { - var recipients: List? = null + var recipients: List? = null XContentParserUtils.ensureExpectedToken( XContentParser.Token.START_OBJECT, @@ -83,7 +54,7 @@ data class EmailGroup( val fieldName = parser.currentName() parser.nextToken() when (fieldName) { - RECIPIENT_LIST_TAG -> recipients = parser.stringList() + RECIPIENT_LIST_TAG -> recipients = parser.objectList { EmailRecipient.parse(it) } else -> { parser.skipChildren() log.info("Unexpected field: $fieldName, while parsing EmailGroup") @@ -100,14 +71,14 @@ data class EmailGroup( * @param input StreamInput stream to deserialize data from. */ constructor(input: StreamInput) : this( - recipients = input.readStringList() + recipients = input.readList(EmailRecipient.reader) ) /** * {@inheritDoc} */ override fun writeTo(output: StreamOutput) { - output.writeStringCollection(recipients) + output.writeList(recipients) } /** diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/EmailRecipient.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/EmailRecipient.kt new file mode 100644 index 00000000..c601e840 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/notifications/model/EmailRecipient.kt @@ -0,0 +1,97 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.commons.notifications.model + +import org.opensearch.commons.notifications.NotificationConstants.RECIPIENT_TAG +import org.opensearch.commons.utils.logger +import org.opensearch.commons.utils.validateEmail +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException + +/** + * Data class representing Email recipient. + */ +data class EmailRecipient( + val recipient: String +) : BaseConfigData { + + init { + validateEmail(recipient) + } + + companion object { + private val log by logger(EmailRecipient::class.java) + + /** + * reader to create instance of class from writable. + */ + val reader = Writeable.Reader { EmailRecipient(it) } + + /** + * Parser to parse xContent + */ + val xParser = XParser { parse(it) } + + /** + * Creator used in REST communication. + * @param parser XContentParser to deserialize data from. + */ + @JvmStatic + @Throws(IOException::class) + fun parse(parser: XContentParser): EmailRecipient { + var recipient: String? = null + + XContentParserUtils.ensureExpectedToken( + XContentParser.Token.START_OBJECT, + parser.currentToken(), + parser + ) + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = parser.currentName() + parser.nextToken() + when (fieldName) { + RECIPIENT_TAG -> recipient = parser.text() + else -> { + parser.skipChildren() + log.info("Unexpected field: $fieldName, while parsing EmailRecipient") + } + } + } + recipient ?: throw IllegalArgumentException("$RECIPIENT_TAG field absent") + return EmailRecipient(recipient) + } + } + + /** + * Constructor used in transport action communication. + * @param input StreamInput stream to deserialize data from. + */ + constructor(input: StreamInput) : this( + recipient = input.readString() + ) + + /** + * {@inheritDoc} + */ + override fun writeTo(output: StreamOutput) { + output.writeString(recipient) + } + + /** + * {@inheritDoc} + */ + override fun toXContent(builder: XContentBuilder?, params: ToXContent.Params?): XContentBuilder { + builder!! + return builder.startObject() + .field(RECIPIENT_TAG, recipient) + .endObject() + } +} diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/EmailRecipientStatus.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/EmailRecipientStatus.kt index a65224de..d1d78165 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/model/EmailRecipientStatus.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/model/EmailRecipientStatus.kt @@ -1,43 +1,21 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.commons.notifications.NotificationConstants.DELIVERY_STATUS_TAG import org.opensearch.commons.notifications.NotificationConstants.RECIPIENT_TAG import org.opensearch.commons.utils.logger import org.opensearch.commons.utils.validateEmail +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils import java.io.IOException /** diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/EventSource.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/EventSource.kt index caf86dcd..91deb445 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/model/EventSource.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/model/EventSource.kt @@ -1,46 +1,23 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model -import org.opensearch.common.Strings -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils -import org.opensearch.commons.notifications.NotificationConstants.FEATURE_TAG import org.opensearch.commons.notifications.NotificationConstants.REFERENCE_ID_TAG import org.opensearch.commons.notifications.NotificationConstants.SEVERITY_TAG import org.opensearch.commons.notifications.NotificationConstants.TAGS_TAG import org.opensearch.commons.notifications.NotificationConstants.TITLE_TAG import org.opensearch.commons.utils.logger import org.opensearch.commons.utils.stringList +import org.opensearch.core.common.Strings +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils import java.io.IOException /** @@ -49,7 +26,6 @@ import java.io.IOException data class EventSource( val title: String, val referenceId: String, - val feature: Feature, val severity: SeverityType = SeverityType.INFO, val tags: List = listOf() ) : BaseModel { @@ -75,7 +51,6 @@ data class EventSource( fun parse(parser: XContentParser): EventSource { var title: String? = null var referenceId: String? = null - var feature: Feature? = null var severity: SeverityType = SeverityType.INFO var tags: List = emptyList() @@ -90,7 +65,6 @@ data class EventSource( when (fieldName) { TITLE_TAG -> title = parser.text() REFERENCE_ID_TAG -> referenceId = parser.text() - FEATURE_TAG -> feature = Feature.fromTagOrDefault(parser.text()) SEVERITY_TAG -> severity = SeverityType.fromTagOrDefault(parser.text()) TAGS_TAG -> tags = parser.stringList() else -> { @@ -101,12 +75,10 @@ data class EventSource( } title ?: throw IllegalArgumentException("$TITLE_TAG field absent") referenceId ?: throw IllegalArgumentException("$REFERENCE_ID_TAG field absent") - feature ?: throw IllegalArgumentException("$FEATURE_TAG field absent") return EventSource( title, referenceId, - feature, severity, tags ) @@ -121,7 +93,6 @@ data class EventSource( return builder.startObject() .field(TITLE_TAG, title) .field(REFERENCE_ID_TAG, referenceId) - .field(FEATURE_TAG, feature.tag) .field(SEVERITY_TAG, severity.tag) .field(TAGS_TAG, tags) .endObject() @@ -134,7 +105,6 @@ data class EventSource( constructor(input: StreamInput) : this( title = input.readString(), referenceId = input.readString(), - feature = input.readEnum(Feature::class.java), severity = input.readEnum(SeverityType::class.java), tags = input.readStringList() ) @@ -145,7 +115,6 @@ data class EventSource( override fun writeTo(output: StreamOutput) { output.writeString(title) output.writeString(referenceId) - output.writeEnum(feature) output.writeEnum(severity) output.writeStringCollection(tags) } diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/EventStatus.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/EventStatus.kt index 1233a997..8d1b5a7c 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/model/EventStatus.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/model/EventStatus.kt @@ -1,40 +1,10 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model -import org.opensearch.common.Strings -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.commons.notifications.NotificationConstants.CONFIG_ID_TAG import org.opensearch.commons.notifications.NotificationConstants.CONFIG_NAME_TAG import org.opensearch.commons.notifications.NotificationConstants.CONFIG_TYPE_TAG @@ -43,6 +13,14 @@ import org.opensearch.commons.notifications.NotificationConstants.EMAIL_RECIPIEN import org.opensearch.commons.utils.fieldIfNotNull import org.opensearch.commons.utils.logger import org.opensearch.commons.utils.objectList +import org.opensearch.core.common.Strings +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils import java.io.IOException /** @@ -64,6 +42,8 @@ data class EventStatus( ConfigType.WEBHOOK -> requireNotNull(deliveryStatus) ConfigType.SLACK -> requireNotNull(deliveryStatus) ConfigType.EMAIL -> require(emailRecipientStatus.isNotEmpty()) + ConfigType.SNS -> requireNotNull(deliveryStatus) + ConfigType.MICROSOFT_TEAMS -> requireNotNull(deliveryStatus) ConfigType.NONE -> log.info("Some config field not recognized") else -> { log.info("non-allowed config type for Status") diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/Feature.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/Feature.kt deleted file mode 100644 index 84b37139..00000000 --- a/src/main/kotlin/org/opensearch/commons/notifications/model/Feature.kt +++ /dev/null @@ -1,71 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * - */ - -package org.opensearch.commons.notifications.model - -import org.opensearch.commons.utils.EnumParser - -/** - * Features using notification plugin - */ -enum class Feature(val tag: String) { - NONE("none") { - override fun toString(): String { - return tag - } - }, - ALERTING("alerting") { - override fun toString(): String { - return tag - } - }, - INDEX_MANAGEMENT("index_management") { - override fun toString(): String { - return tag - } - }, - REPORTS("reports") { - override fun toString(): String { - return tag - } - }; - - companion object { - private val tagMap = values().associateBy { it.tag } - - val enumParser = EnumParser { fromTagOrDefault(it) } - - /** - * Get Feature from tag or NONE if not found - * @param tag the tag - * @return Feature corresponding to tag. NONE if invalid tag. - */ - fun fromTagOrDefault(tag: String): Feature { - return tagMap[tag] ?: NONE - } - } -} diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/FeatureChannelList.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/FeatureChannelList.kt deleted file mode 100644 index 6fc57b22..00000000 --- a/src/main/kotlin/org/opensearch/commons/notifications/model/FeatureChannelList.kt +++ /dev/null @@ -1,93 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * - */ - -package org.opensearch.commons.notifications.model - -import org.apache.lucene.search.TotalHits -import org.opensearch.action.search.SearchResponse -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.commons.notifications.NotificationConstants.FEATURE_CONFIG_LIST_TAG - -/** - * FeatureChannel search results - */ -class FeatureChannelList : SearchResults { - - /** - * single item result constructor - */ - constructor(objectItem: FeatureChannel) : super(FEATURE_CONFIG_LIST_TAG, objectItem) - - /** - * multiple items result constructor - */ - constructor(objectList: List) : this( - 0, - objectList.size.toLong(), - TotalHits.Relation.EQUAL_TO, - objectList - ) - - /** - * all param constructor - */ - constructor( - startIndex: Long, - totalHits: Long, - totalHitRelation: TotalHits.Relation, - objectList: List - ) : super(startIndex, totalHits, totalHitRelation, FEATURE_CONFIG_LIST_TAG, objectList) - - /** - * Constructor used in transport action communication. - * @param input StreamInput stream to deserialize data from. - */ - constructor(input: StreamInput) : super(input, FeatureChannel.reader) - - /** - * Construct object from XContentParser - */ - constructor(parser: XContentParser) : super(parser, FEATURE_CONFIG_LIST_TAG) - - /** - * Construct object from SearchResponse - */ - constructor(from: Long, response: SearchResponse, searchHitParser: SearchHitParser) : super( - from, - response, - searchHitParser, - FEATURE_CONFIG_LIST_TAG - ) - - /** - * {@inheritDoc} - */ - override fun parseItem(parser: XContentParser): FeatureChannel { - return FeatureChannel.parse(parser) - } -} diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/HttpMethodType.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/HttpMethodType.kt new file mode 100644 index 00000000..8485e4fd --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/notifications/model/HttpMethodType.kt @@ -0,0 +1,36 @@ +package org.opensearch.commons.notifications.model + +import org.opensearch.commons.utils.EnumParser + +enum class HttpMethodType(val tag: String) { + POST("POST") { + override fun toString(): String { + return tag + } + }, + PUT("PUT") { + override fun toString(): String { + return tag + } + }, + PATCH("PATCH") { + override fun toString(): String { + return tag + } + }; + + companion object { + private val tagMap = values().associateBy { it.tag } + + val enumParser = EnumParser { fromTagOrDefault(it) } + + /** + * Get HttpMethodType from tag or POST if not found + * @param tag the tag + * @return MethodType corresponding to tag. POST if invalid tag. + */ + fun fromTagOrDefault(tag: String): HttpMethodType { + return tagMap[tag] ?: POST + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/MicrosoftTeams.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/MicrosoftTeams.kt new file mode 100644 index 00000000..48e32f15 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/notifications/model/MicrosoftTeams.kt @@ -0,0 +1,99 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.commons.notifications.model + +import org.opensearch.commons.notifications.NotificationConstants.URL_TAG +import org.opensearch.commons.utils.logger +import org.opensearch.commons.utils.validateUrl +import org.opensearch.core.common.Strings +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException + +/** + * Data class representing MicrosoftTeams channel. + */ +data class MicrosoftTeams( + val url: String +) : BaseConfigData { + + init { + require(!Strings.isNullOrEmpty(url)) { "URL is null or empty" } + validateUrl(url) + } + + companion object { + private val log by logger(MicrosoftTeams::class.java) + + /** + * reader to create instance of class from writable. + */ + val reader = Writeable.Reader { MicrosoftTeams(it) } + + /** + * Parser to parse xContent + */ + val xParser = XParser { parse(it) } + + /** + * Creator used in REST communication. + * @param parser XContentParser to deserialize data from. + */ + @JvmStatic + @Throws(IOException::class) + fun parse(parser: XContentParser): MicrosoftTeams { + var url: String? = null + + XContentParserUtils.ensureExpectedToken( + XContentParser.Token.START_OBJECT, + parser.currentToken(), + parser + ) + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = parser.currentName() + parser.nextToken() + when (fieldName) { + URL_TAG -> url = parser.text() + else -> { + parser.skipChildren() + log.info("Unexpected field: $fieldName, while parsing MicrosoftTeams destination") + } + } + } + url ?: throw IllegalArgumentException("$URL_TAG field absent") + return MicrosoftTeams(url) + } + } + + /** + * Constructor used in transport action communication. + * @param input StreamInput stream to deserialize data from. + */ + constructor(input: StreamInput) : this( + url = input.readString() + ) + + /** + * {@inheritDoc} + */ + override fun writeTo(output: StreamOutput) { + output.writeString(url) + } + + /** + * {@inheritDoc} + */ + override fun toXContent(builder: XContentBuilder?, params: ToXContent.Params?): XContentBuilder { + builder!! + return builder.startObject() + .field(URL_TAG, url) + .endObject() + } +} diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/NotificationConfig.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/NotificationConfig.kt index 30d72147..5b0b8a90 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/model/NotificationConfig.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/model/NotificationConfig.kt @@ -1,52 +1,27 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model -import org.opensearch.common.Strings -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.commons.notifications.NotificationConstants.CONFIG_TYPE_TAG import org.opensearch.commons.notifications.NotificationConstants.DESCRIPTION_TAG -import org.opensearch.commons.notifications.NotificationConstants.FEATURE_LIST_TAG import org.opensearch.commons.notifications.NotificationConstants.IS_ENABLED_TAG import org.opensearch.commons.notifications.NotificationConstants.NAME_TAG import org.opensearch.commons.notifications.model.config.ConfigDataProperties.createConfigData import org.opensearch.commons.notifications.model.config.ConfigDataProperties.getReaderForConfigType import org.opensearch.commons.notifications.model.config.ConfigDataProperties.validateConfigData -import org.opensearch.commons.utils.enumSet import org.opensearch.commons.utils.fieldIfNotNull import org.opensearch.commons.utils.logger +import org.opensearch.core.common.Strings +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils import java.io.IOException -import java.util.EnumSet /** * Data class representing Notification config. @@ -55,7 +30,6 @@ data class NotificationConfig( val name: String, val description: String, val configType: ConfigType, - val features: EnumSet, val configData: BaseConfigData?, val isEnabled: Boolean = true ) : BaseModel { @@ -89,7 +63,6 @@ data class NotificationConfig( var name: String? = null var description = "" var configType: ConfigType? = null - var features: EnumSet? = null var isEnabled = true var configData: BaseConfigData? = null XContentParserUtils.ensureExpectedToken( @@ -104,7 +77,6 @@ data class NotificationConfig( NAME_TAG -> name = parser.text() DESCRIPTION_TAG -> description = parser.text() CONFIG_TYPE_TAG -> configType = ConfigType.fromTagOrDefault(parser.text()) - FEATURE_LIST_TAG -> features = parser.enumSet(Feature.enumParser) IS_ENABLED_TAG -> isEnabled = parser.booleanValue() else -> { val configTypeForTag = ConfigType.fromTagOrDefault(fieldName) @@ -119,12 +91,10 @@ data class NotificationConfig( } name ?: throw IllegalArgumentException("$NAME_TAG field absent") configType ?: throw IllegalArgumentException("$CONFIG_TYPE_TAG field absent") - features ?: throw IllegalArgumentException("$FEATURE_LIST_TAG field absent") return NotificationConfig( name, description, configType, - features, configData, isEnabled ) @@ -140,7 +110,6 @@ data class NotificationConfig( .field(NAME_TAG, name) .field(DESCRIPTION_TAG, description) .field(CONFIG_TYPE_TAG, configType.tag) - .field(FEATURE_LIST_TAG, features) .field(IS_ENABLED_TAG, isEnabled) .fieldIfNotNull(configType.tag, configData) .endObject() @@ -154,7 +123,6 @@ data class NotificationConfig( name = input.readString(), description = input.readString(), configType = input.readEnum(ConfigType::class.java), - features = input.readEnumSet(Feature::class.java), isEnabled = input.readBoolean(), configData = input.readOptionalWriteable(getReaderForConfigType(input.readEnum(ConfigType::class.java))) ) @@ -166,7 +134,6 @@ data class NotificationConfig( output.writeString(name) output.writeString(description) output.writeEnum(configType) - output.writeEnumSet(features) output.writeBoolean(isEnabled) // Reading config types multiple times in constructor output.writeEnum(configType) diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/NotificationConfigInfo.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/NotificationConfigInfo.kt index 7a949bbb..34b6285b 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/model/NotificationConfigInfo.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/model/NotificationConfigInfo.kt @@ -1,45 +1,23 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. */ -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * - */ package org.opensearch.commons.notifications.model -import org.opensearch.common.Strings -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.commons.notifications.NotificationConstants.CONFIG_ID_TAG import org.opensearch.commons.notifications.NotificationConstants.CONFIG_TAG import org.opensearch.commons.notifications.NotificationConstants.CREATED_TIME_TAG -import org.opensearch.commons.notifications.NotificationConstants.TENANT_TAG import org.opensearch.commons.notifications.NotificationConstants.UPDATED_TIME_TAG import org.opensearch.commons.utils.logger +import org.opensearch.core.common.Strings +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils import java.io.IOException import java.time.Instant @@ -50,7 +28,6 @@ data class NotificationConfigInfo( val configId: String, val lastUpdatedTime: Instant, val createdTime: Instant, - val tenant: String, val notificationConfig: NotificationConfig ) : BaseModel { @@ -76,7 +53,6 @@ data class NotificationConfigInfo( var configId: String? = null var lastUpdatedTime: Instant? = null var createdTime: Instant? = null - var tenant: String? = null var notificationConfig: NotificationConfig? = null XContentParserUtils.ensureExpectedToken( @@ -91,7 +67,6 @@ data class NotificationConfigInfo( CONFIG_ID_TAG -> configId = parser.text() UPDATED_TIME_TAG -> lastUpdatedTime = Instant.ofEpochMilli(parser.longValue()) CREATED_TIME_TAG -> createdTime = Instant.ofEpochMilli(parser.longValue()) - TENANT_TAG -> tenant = parser.text() CONFIG_TAG -> notificationConfig = NotificationConfig.parse(parser) else -> { parser.skipChildren() @@ -102,13 +77,11 @@ data class NotificationConfigInfo( configId ?: throw IllegalArgumentException("$CONFIG_ID_TAG field absent") lastUpdatedTime ?: throw IllegalArgumentException("$UPDATED_TIME_TAG field absent") createdTime ?: throw IllegalArgumentException("$CREATED_TIME_TAG field absent") - tenant = tenant ?: "" notificationConfig ?: throw IllegalArgumentException("$CONFIG_TAG field absent") return NotificationConfigInfo( configId, lastUpdatedTime, createdTime, - tenant, notificationConfig ) } @@ -122,7 +95,6 @@ data class NotificationConfigInfo( configId = input.readString(), lastUpdatedTime = input.readInstant(), createdTime = input.readInstant(), - tenant = input.readString(), notificationConfig = NotificationConfig.reader.read(input) ) @@ -133,7 +105,6 @@ data class NotificationConfigInfo( output.writeString(configId) output.writeInstant(lastUpdatedTime) output.writeInstant(createdTime) - output.writeString(tenant) notificationConfig.writeTo(output) } @@ -146,7 +117,6 @@ data class NotificationConfigInfo( .field(CONFIG_ID_TAG, configId) .field(UPDATED_TIME_TAG, lastUpdatedTime.toEpochMilli()) .field(CREATED_TIME_TAG, createdTime.toEpochMilli()) - .field(TENANT_TAG, tenant) .field(CONFIG_TAG, notificationConfig) .endObject() } diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/NotificationConfigSearchResult.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/NotificationConfigSearchResult.kt index 48952335..7a44836e 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/model/NotificationConfigSearchResult.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/model/NotificationConfigSearchResult.kt @@ -1,37 +1,15 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model import org.apache.lucene.search.TotalHits import org.opensearch.action.search.SearchResponse -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.xcontent.XContentParser import org.opensearch.commons.notifications.NotificationConstants.CONFIG_LIST_TAG +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.xcontent.XContentParser /** * NotificationConfig search results diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/NotificationEvent.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/NotificationEvent.kt index c6bf6928..2b3c8f40 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/model/NotificationEvent.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/model/NotificationEvent.kt @@ -1,42 +1,23 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils +import org.opensearch.common.xcontent.XContentType import org.opensearch.commons.notifications.NotificationConstants.EVENT_SOURCE_TAG import org.opensearch.commons.notifications.NotificationConstants.STATUS_LIST_TAG import org.opensearch.commons.utils.logger import org.opensearch.commons.utils.objectList +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.ToXContent.EMPTY_PARAMS +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentHelper +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils import java.io.IOException /** @@ -124,4 +105,14 @@ data class NotificationEvent( .field(STATUS_LIST_TAG, statusList) .endObject() } + + // Overriding toString so consuming plugins can log/output this from the sendNotification response if needed + override fun toString(): String { + return try { + XContentHelper.toXContent(this, XContentType.JSON, EMPTY_PARAMS, true).utf8ToString() + } catch (e: IOException) { + log.debug("Failed to convert NotificationEvent to string", e) + super.toString() + " threw " + e.toString() + } + } } diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/NotificationEventInfo.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/NotificationEventInfo.kt deleted file mode 100644 index 27f9d0ad..00000000 --- a/src/main/kotlin/org/opensearch/commons/notifications/model/NotificationEventInfo.kt +++ /dev/null @@ -1,153 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * - */ -package org.opensearch.commons.notifications.model - -import org.opensearch.common.Strings -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils -import org.opensearch.commons.notifications.NotificationConstants.CREATED_TIME_TAG -import org.opensearch.commons.notifications.NotificationConstants.EVENT_ID_TAG -import org.opensearch.commons.notifications.NotificationConstants.EVENT_TAG -import org.opensearch.commons.notifications.NotificationConstants.TENANT_TAG -import org.opensearch.commons.notifications.NotificationConstants.UPDATED_TIME_TAG -import org.opensearch.commons.utils.logger -import java.io.IOException -import java.time.Instant - -/** - * Data class representing Notification event with information. - */ -data class NotificationEventInfo( - val eventId: String, - val lastUpdatedTime: Instant, - val createdTime: Instant, - val tenant: String, - val notificationEvent: NotificationEvent -) : BaseModel { - - init { - require(!Strings.isNullOrEmpty(eventId)) { "event id is null or empty" } - } - - companion object { - private val log by logger(NotificationEventInfo::class.java) - - /** - * reader to create instance of class from writable. - */ - val reader = Writeable.Reader { NotificationEventInfo(it) } - - /** - * Creator used in REST communication. - * @param parser XContentParser to deserialize data from. - */ - @JvmStatic - @Throws(IOException::class) - fun parse(parser: XContentParser): NotificationEventInfo { - var eventId: String? = null - var lastUpdatedTime: Instant? = null - var createdTime: Instant? = null - var tenant: String? = null - var notificationEvent: NotificationEvent? = null - - XContentParserUtils.ensureExpectedToken( - XContentParser.Token.START_OBJECT, - parser.currentToken(), - parser - ) - while (parser.nextToken() != XContentParser.Token.END_OBJECT) { - val fieldName = parser.currentName() - parser.nextToken() - when (fieldName) { - EVENT_ID_TAG -> eventId = parser.text() - UPDATED_TIME_TAG -> lastUpdatedTime = Instant.ofEpochMilli(parser.longValue()) - CREATED_TIME_TAG -> createdTime = Instant.ofEpochMilli(parser.longValue()) - TENANT_TAG -> tenant = parser.text() - EVENT_TAG -> notificationEvent = NotificationEvent.parse(parser) - else -> { - parser.skipChildren() - log.info("Unexpected field: $fieldName, while parsing event info") - } - } - } - eventId ?: throw IllegalArgumentException("$EVENT_ID_TAG field absent") - lastUpdatedTime ?: throw IllegalArgumentException("$UPDATED_TIME_TAG field absent") - createdTime ?: throw IllegalArgumentException("$CREATED_TIME_TAG field absent") - tenant = tenant ?: "" - notificationEvent ?: throw IllegalArgumentException("$EVENT_TAG field absent") - return NotificationEventInfo( - eventId, - lastUpdatedTime, - createdTime, - tenant, - notificationEvent - ) - } - } - - /** - * {@inheritDoc} - */ - override fun toXContent(builder: XContentBuilder?, params: ToXContent.Params?): XContentBuilder { - builder!! - return builder.startObject() - .field(EVENT_ID_TAG, eventId) - .field(UPDATED_TIME_TAG, lastUpdatedTime.toEpochMilli()) - .field(CREATED_TIME_TAG, createdTime.toEpochMilli()) - .field(TENANT_TAG, tenant) - .field(EVENT_TAG, notificationEvent) - .endObject() - } - - /** - * Constructor used in transport action communication. - * @param input StreamInput stream to deserialize data from. - */ - constructor(input: StreamInput) : this( - eventId = input.readString(), - lastUpdatedTime = input.readInstant(), - createdTime = input.readInstant(), - tenant = input.readString(), - notificationEvent = NotificationEvent.reader.read(input) - ) - - /** - * {@inheritDoc} - */ - override fun writeTo(output: StreamOutput) { - output.writeString(eventId) - output.writeInstant(lastUpdatedTime) - output.writeInstant(createdTime) - output.writeString(tenant) - notificationEvent.writeTo(output) - } -} diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/NotificationEventSearchResult.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/NotificationEventSearchResult.kt deleted file mode 100644 index 71ed1ba5..00000000 --- a/src/main/kotlin/org/opensearch/commons/notifications/model/NotificationEventSearchResult.kt +++ /dev/null @@ -1,93 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * - */ - -package org.opensearch.commons.notifications.model - -import org.apache.lucene.search.TotalHits -import org.opensearch.action.search.SearchResponse -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.commons.notifications.NotificationConstants.EVENT_LIST_TAG - -/** - * Notification Event search results - */ -class NotificationEventSearchResult : SearchResults { - - /** - * single item result constructor - */ - constructor(objectItem: NotificationEventInfo) : super(EVENT_LIST_TAG, objectItem) - - /** - * multiple items result constructor - */ - constructor(objectList: List) : this( - 0, - objectList.size.toLong(), - TotalHits.Relation.EQUAL_TO, - objectList - ) - - /** - * all param constructor - */ - constructor( - startIndex: Long, - totalHits: Long, - totalHitRelation: TotalHits.Relation, - objectList: List - ) : super(startIndex, totalHits, totalHitRelation, EVENT_LIST_TAG, objectList) - - /** - * Constructor used in transport action communication. - * @param input StreamInput stream to deserialize data from. - */ - constructor(input: StreamInput) : super(input, NotificationEventInfo.reader) - - /** - * Construct object from XContentParser - */ - constructor(parser: XContentParser) : super(parser, EVENT_LIST_TAG) - - /** - * Construct object from SearchResponse - */ - constructor(from: Long, response: SearchResponse, searchHitParser: SearchHitParser) : super( - from, - response, - searchHitParser, - EVENT_LIST_TAG - ) - - /** - * {@inheritDoc} - */ - override fun parseItem(parser: XContentParser): NotificationEventInfo { - return NotificationEventInfo.parse(parser) - } -} diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/SearchResults.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/SearchResults.kt index 868cfebe..f6b3a295 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/model/SearchResults.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/model/SearchResults.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model @@ -31,17 +9,17 @@ import org.apache.lucene.search.TotalHits.Relation import org.apache.lucene.search.TotalHits.Relation.EQUAL_TO import org.apache.lucene.search.TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO import org.opensearch.action.search.SearchResponse -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent.Params -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.commons.notifications.NotificationConstants.START_INDEX_TAG import org.opensearch.commons.notifications.NotificationConstants.TOTAL_HITS_TAG import org.opensearch.commons.notifications.NotificationConstants.TOTAL_HIT_RELATION_TAG import org.opensearch.commons.utils.logger +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent.Params +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils import org.opensearch.search.SearchHit abstract class SearchResults : BaseModel { diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/SesAccount.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/SesAccount.kt new file mode 100644 index 00000000..13370ed9 --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/notifications/model/SesAccount.kt @@ -0,0 +1,119 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.notifications.model + +import org.opensearch.commons.notifications.NotificationConstants.FROM_ADDRESS_TAG +import org.opensearch.commons.notifications.NotificationConstants.REGION_TAG +import org.opensearch.commons.notifications.NotificationConstants.ROLE_ARN_TAG +import org.opensearch.commons.utils.fieldIfNotNull +import org.opensearch.commons.utils.logger +import org.opensearch.commons.utils.validateEmail +import org.opensearch.commons.utils.validateIamRoleArn +import org.opensearch.core.common.Strings +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException + +/** + * Data class representing SES account channel. + */ +data class SesAccount( + val awsRegion: String, + val roleArn: String?, + val fromAddress: String +) : BaseConfigData { + + init { + require(!Strings.isNullOrEmpty(awsRegion)) { "awsRegion is null or empty" } + validateEmail(fromAddress) + if (roleArn != null) { + validateIamRoleArn(roleArn) + } + } + + companion object { + private val log by logger(SesAccount::class.java) + + /** + * reader to create instance of class from writable. + */ + val reader = Writeable.Reader { SesAccount(it) } + + /** + * Parser to parse xContent + */ + val xParser = XParser { parse(it) } + + @JvmStatic + @Throws(IOException::class) + fun parse(parser: XContentParser): SesAccount { + var awsRegion: String? = null + var roleArn: String? = null + var fromAddress: String? = null + + XContentParserUtils.ensureExpectedToken( + XContentParser.Token.START_OBJECT, + parser.currentToken(), + parser + ) + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = parser.currentName() + parser.nextToken() + when (fieldName) { + REGION_TAG -> awsRegion = parser.text() + ROLE_ARN_TAG -> roleArn = parser.textOrNull() + FROM_ADDRESS_TAG -> fromAddress = parser.text() + else -> { + parser.skipChildren() + log.info("Unexpected field: $fieldName, while parsing SesAccount") + } + } + } + awsRegion ?: throw IllegalArgumentException("$REGION_TAG field absent") + fromAddress ?: throw IllegalArgumentException("$FROM_ADDRESS_TAG field absent") + return SesAccount( + awsRegion, + roleArn, + fromAddress + ) + } + } + + /** + * {@inheritDoc} + */ + override fun toXContent(builder: XContentBuilder?, params: ToXContent.Params?): XContentBuilder { + return builder!!.startObject() + .field(REGION_TAG, awsRegion) + .fieldIfNotNull(ROLE_ARN_TAG, roleArn) + .field(FROM_ADDRESS_TAG, fromAddress) + .endObject() + } + + /** + * Constructor used in transport action communication. + * @param input StreamInput stream to deserialize data from. + */ + constructor(input: StreamInput) : this( + awsRegion = input.readString(), + roleArn = input.readOptionalString(), + fromAddress = input.readString() + ) + + /** + * {@inheritDoc} + */ + override fun writeTo(out: StreamOutput) { + out.writeString(awsRegion) + out.writeOptionalString(roleArn) + out.writeString(fromAddress) + } +} diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/SeverityType.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/SeverityType.kt index 6f84f07b..0fa8a427 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/model/SeverityType.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/model/SeverityType.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/Slack.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/Slack.kt index e5d3c3f6..b4433b95 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/model/Slack.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/model/Slack.kt @@ -1,42 +1,20 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model -import org.opensearch.common.Strings -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.commons.notifications.NotificationConstants.URL_TAG import org.opensearch.commons.utils.logger import org.opensearch.commons.utils.validateUrl +import org.opensearch.core.common.Strings +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils import java.io.IOException /** diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/SmtpAccount.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/SmtpAccount.kt index 21e2155e..ca3a762a 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/model/SmtpAccount.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/model/SmtpAccount.kt @@ -1,45 +1,23 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model -import org.opensearch.common.Strings -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.commons.notifications.NotificationConstants.FROM_ADDRESS_TAG import org.opensearch.commons.notifications.NotificationConstants.HOST_TAG import org.opensearch.commons.notifications.NotificationConstants.METHOD_TAG import org.opensearch.commons.notifications.NotificationConstants.PORT_TAG import org.opensearch.commons.utils.logger import org.opensearch.commons.utils.validateEmail +import org.opensearch.core.common.Strings +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils import java.io.IOException /** diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/Sns.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/Sns.kt new file mode 100644 index 00000000..d0809f9f --- /dev/null +++ b/src/main/kotlin/org/opensearch/commons/notifications/model/Sns.kt @@ -0,0 +1,96 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.notifications.model + +import org.opensearch.commons.notifications.NotificationConstants.ROLE_ARN_TAG +import org.opensearch.commons.notifications.NotificationConstants.TOPIC_ARN_TAG +import org.opensearch.commons.utils.fieldIfNotNull +import org.opensearch.commons.utils.logger +import org.opensearch.commons.utils.validateIamRoleArn +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils +import java.io.IOException +import java.util.regex.Pattern + +/** + * SNS notification data model + */ +data class Sns(val topicArn: String, val roleArn: String?) : BaseConfigData { + + init { + require(SNS_ARN_REGEX.matcher(topicArn).find()) { "Invalid AWS SNS topic ARN: $topicArn" } + if (roleArn != null) { + validateIamRoleArn(roleArn) + } + } + + override fun toXContent(builder: XContentBuilder, params: ToXContent.Params): XContentBuilder { + return builder.startObject() + .field(TOPIC_ARN_TAG, topicArn) + .fieldIfNotNull(ROLE_ARN_TAG, roleArn) + .endObject() + } + + /** + * Constructor used in transport action communication. + * @param input StreamInput stream to deserialize data from. + */ + constructor(input: StreamInput) : this( + topicArn = input.readString(), + roleArn = input.readOptionalString() + ) + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + out.writeString(topicArn) + out.writeOptionalString(roleArn) + } + + companion object { + private val log by logger(Sns::class.java) + + private val SNS_ARN_REGEX = + Pattern.compile("^arn:aws(-[^:]+)?:sns:([a-zA-Z0-9-]+):([0-9]{12}):([a-zA-Z_0-9+=,.@\\-_/]+)$") + + /** + * reader to create instance of class from writable. + */ + val reader = Writeable.Reader { Sns(it) } + + /** + * Parser to parse xContent + */ + val xParser = XParser { parse(it) } + + @JvmStatic + @Throws(IOException::class) + fun parse(xcp: XContentParser): Sns { + var topicArn: String? = null + var roleArn: String? = null + + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, xcp.currentToken(), xcp) + while (xcp.nextToken() != XContentParser.Token.END_OBJECT) { + val fieldName = xcp.currentName() + xcp.nextToken() + when (fieldName) { + TOPIC_ARN_TAG -> topicArn = xcp.textOrNull() + ROLE_ARN_TAG -> roleArn = xcp.textOrNull() + else -> { + xcp.skipChildren() + log.info("Unexpected field: $fieldName, while parsing SNS destination") + } + } + } + topicArn ?: throw IllegalArgumentException("$TOPIC_ARN_TAG field absent") + return Sns(topicArn, roleArn) + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/Webhook.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/Webhook.kt index 55a4cb3e..e48f29f4 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/model/Webhook.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/model/Webhook.kt @@ -1,45 +1,24 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model -import org.opensearch.common.Strings -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.commons.notifications.NotificationConstants.HEADER_PARAMS_TAG +import org.opensearch.commons.notifications.NotificationConstants.METHOD_TAG import org.opensearch.commons.notifications.NotificationConstants.URL_TAG import org.opensearch.commons.utils.STRING_READER import org.opensearch.commons.utils.STRING_WRITER import org.opensearch.commons.utils.logger import org.opensearch.commons.utils.validateUrl +import org.opensearch.core.common.Strings +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils import java.io.IOException /** @@ -47,7 +26,8 @@ import java.io.IOException */ data class Webhook( val url: String, - val headerParams: Map = mapOf() + val headerParams: Map = mapOf(), + val method: HttpMethodType = HttpMethodType.POST ) : BaseConfigData { init { @@ -77,6 +57,7 @@ data class Webhook( fun parse(parser: XContentParser): Webhook { var url: String? = null var headerParams: Map = mapOf() + var method = HttpMethodType.POST XContentParserUtils.ensureExpectedToken( XContentParser.Token.START_OBJECT, @@ -89,6 +70,7 @@ data class Webhook( when (fieldName) { URL_TAG -> url = parser.text() HEADER_PARAMS_TAG -> headerParams = parser.mapStrings() + METHOD_TAG -> method = HttpMethodType.fromTagOrDefault(parser.text()) else -> { parser.skipChildren() log.info("Unexpected field: $fieldName, while parsing Webhook destination") @@ -96,7 +78,7 @@ data class Webhook( } } url ?: throw IllegalArgumentException("$URL_TAG field absent") - return Webhook(url, headerParams) + return Webhook(url, headerParams, method) } } @@ -108,6 +90,7 @@ data class Webhook( return builder.startObject() .field(URL_TAG, url) .field(HEADER_PARAMS_TAG, headerParams) + .field(METHOD_TAG, method.tag) .endObject() } @@ -117,7 +100,8 @@ data class Webhook( */ constructor(input: StreamInput) : this( url = input.readString(), - headerParams = input.readMap(STRING_READER, STRING_READER) + headerParams = input.readMap(STRING_READER, STRING_READER), + method = input.readEnum(HttpMethodType::class.java) ) /** @@ -126,5 +110,6 @@ data class Webhook( override fun writeTo(output: StreamOutput) { output.writeString(url) output.writeMap(headerParams, STRING_WRITER, STRING_WRITER) + output.writeEnum(method) } } diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/XParser.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/XParser.kt index 894b21a9..6e3316a4 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/model/XParser.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/model/XParser.kt @@ -1,32 +1,10 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model -import org.opensearch.common.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParser /** * Functional interface to create config data object using XContentParser diff --git a/src/main/kotlin/org/opensearch/commons/notifications/model/config/ConfigDataProperties.kt b/src/main/kotlin/org/opensearch/commons/notifications/model/config/ConfigDataProperties.kt index 1500879c..28063e30 100644 --- a/src/main/kotlin/org/opensearch/commons/notifications/model/config/ConfigDataProperties.kt +++ b/src/main/kotlin/org/opensearch/commons/notifications/model/config/ConfigDataProperties.kt @@ -1,42 +1,23 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model.config -import org.opensearch.common.io.stream.Writeable.Reader -import org.opensearch.common.xcontent.XContentParser import org.opensearch.commons.notifications.model.BaseConfigData import org.opensearch.commons.notifications.model.Chime import org.opensearch.commons.notifications.model.ConfigType import org.opensearch.commons.notifications.model.Email import org.opensearch.commons.notifications.model.EmailGroup +import org.opensearch.commons.notifications.model.MicrosoftTeams +import org.opensearch.commons.notifications.model.SesAccount import org.opensearch.commons.notifications.model.Slack import org.opensearch.commons.notifications.model.SmtpAccount +import org.opensearch.commons.notifications.model.Sns import org.opensearch.commons.notifications.model.Webhook import org.opensearch.commons.notifications.model.XParser +import org.opensearch.core.common.io.stream.Writeable.Reader +import org.opensearch.core.xcontent.XContentParser internal object ConfigDataProperties { /** @@ -53,8 +34,11 @@ internal object ConfigDataProperties { Pair(ConfigType.CHIME, ConfigProperty(Chime.reader, Chime.xParser)), Pair(ConfigType.WEBHOOK, ConfigProperty(Webhook.reader, Webhook.xParser)), Pair(ConfigType.EMAIL, ConfigProperty(Email.reader, Email.xParser)), + Pair(ConfigType.SNS, ConfigProperty(Sns.reader, Sns.xParser)), + Pair(ConfigType.SES_ACCOUNT, ConfigProperty(SesAccount.reader, SesAccount.xParser)), Pair(ConfigType.EMAIL_GROUP, ConfigProperty(EmailGroup.reader, EmailGroup.xParser)), - Pair(ConfigType.SMTP_ACCOUNT, ConfigProperty(SmtpAccount.reader, SmtpAccount.xParser)) + Pair(ConfigType.SMTP_ACCOUNT, ConfigProperty(SmtpAccount.reader, SmtpAccount.xParser)), + Pair(ConfigType.MICROSOFT_TEAMS, ConfigProperty(MicrosoftTeams.reader, MicrosoftTeams.xParser)) ) /** @@ -78,6 +62,9 @@ internal object ConfigDataProperties { ConfigType.EMAIL_GROUP -> configData is EmailGroup ConfigType.SMTP_ACCOUNT -> configData is SmtpAccount ConfigType.CHIME -> configData is Chime + ConfigType.SNS -> configData is Sns + ConfigType.SES_ACCOUNT -> configData is SesAccount + ConfigType.MICROSOFT_TEAMS -> configData is MicrosoftTeams ConfigType.NONE -> true } } diff --git a/src/main/kotlin/org/opensearch/commons/utils/EnumHelpers.kt b/src/main/kotlin/org/opensearch/commons/utils/EnumHelpers.kt index 3a18407e..6ffcdaed 100644 --- a/src/main/kotlin/org/opensearch/commons/utils/EnumHelpers.kt +++ b/src/main/kotlin/org/opensearch/commons/utils/EnumHelpers.kt @@ -1,35 +1,13 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.utils -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils import java.util.EnumSet inline fun > XContentParser.enumSet(enumParser: EnumParser): EnumSet { diff --git a/src/main/kotlin/org/opensearch/commons/utils/EnumParser.kt b/src/main/kotlin/org/opensearch/commons/utils/EnumParser.kt index 2a678f1f..b327d53c 100644 --- a/src/main/kotlin/org/opensearch/commons/utils/EnumParser.kt +++ b/src/main/kotlin/org/opensearch/commons/utils/EnumParser.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.utils diff --git a/src/main/kotlin/org/opensearch/commons/utils/Helpers.kt b/src/main/kotlin/org/opensearch/commons/utils/Helpers.kt index ae08c1d0..c31f0cab 100644 --- a/src/main/kotlin/org/opensearch/commons/utils/Helpers.kt +++ b/src/main/kotlin/org/opensearch/commons/utils/Helpers.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.utils diff --git a/src/main/kotlin/org/opensearch/commons/utils/OpenForTesting.kt b/src/main/kotlin/org/opensearch/commons/utils/OpenForTesting.kt index ccbd1536..1549e2e5 100644 --- a/src/main/kotlin/org/opensearch/commons/utils/OpenForTesting.kt +++ b/src/main/kotlin/org/opensearch/commons/utils/OpenForTesting.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.utils diff --git a/src/main/kotlin/org/opensearch/commons/utils/SecureClientWrapper.kt b/src/main/kotlin/org/opensearch/commons/utils/SecureClientWrapper.kt index 45fa29bd..ffdb1676 100644 --- a/src/main/kotlin/org/opensearch/commons/utils/SecureClientWrapper.kt +++ b/src/main/kotlin/org/opensearch/commons/utils/SecureClientWrapper.kt @@ -1,36 +1,11 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.utils -import org.opensearch.action.ActionFuture -import org.opensearch.action.ActionListener import org.opensearch.action.ActionRequest -import org.opensearch.action.ActionResponse import org.opensearch.action.ActionType import org.opensearch.action.bulk.BulkRequest import org.opensearch.action.bulk.BulkResponse @@ -60,7 +35,10 @@ import org.opensearch.action.termvectors.TermVectorsResponse import org.opensearch.action.update.UpdateRequest import org.opensearch.action.update.UpdateResponse import org.opensearch.client.Client +import org.opensearch.common.action.ActionFuture import org.opensearch.common.util.concurrent.ThreadContext +import org.opensearch.core.action.ActionListener +import org.opensearch.core.action.ActionResponse /** * Wrapper class on [Client] with security context removed. diff --git a/src/main/kotlin/org/opensearch/commons/utils/TransportHelpers.kt b/src/main/kotlin/org/opensearch/commons/utils/TransportHelpers.kt index 4763a48a..07d1ddb4 100644 --- a/src/main/kotlin/org/opensearch/commons/utils/TransportHelpers.kt +++ b/src/main/kotlin/org/opensearch/commons/utils/TransportHelpers.kt @@ -1,37 +1,17 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.utils -import org.opensearch.common.io.stream.InputStreamStreamInput -import org.opensearch.common.io.stream.OutputStreamStreamOutput -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.io.stream.StreamOutput -import org.opensearch.common.io.stream.Writeable +import org.opensearch.core.common.io.stream.InputStreamStreamInput +import org.opensearch.core.common.io.stream.NamedWriteableAwareStreamInput +import org.opensearch.core.common.io.stream.NamedWriteableRegistry +import org.opensearch.core.common.io.stream.OutputStreamStreamOutput +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable import java.io.ByteArrayInputStream import java.io.ByteArrayOutputStream @@ -58,3 +38,19 @@ inline fun recreateObject(writeable: Writeable, block: (Stream } } } + +/** + * Re create the object from the writeable. Uses NamedWriteableRegistry in order to build the aggregations. + * This method needs to be inline and reified so that when this is called from + * doExecute() of transport action, the object may be created from other JVM. + */ +inline fun recreateObject(writeable: Writeable, namedWriteableRegistry: NamedWriteableRegistry, block: (StreamInput) -> Request): Request { + ByteArrayOutputStream().use { byteArrayOutputStream -> + OutputStreamStreamOutput(byteArrayOutputStream).use { + writeable.writeTo(it) + InputStreamStreamInput(ByteArrayInputStream(byteArrayOutputStream.toByteArray())).use { streamInput -> + return block(NamedWriteableAwareStreamInput(streamInput, namedWriteableRegistry)) + } + } + } +} diff --git a/src/main/kotlin/org/opensearch/commons/utils/ValidationHelpers.kt b/src/main/kotlin/org/opensearch/commons/utils/ValidationHelpers.kt index 93bcdc77..3bca2f9b 100644 --- a/src/main/kotlin/org/opensearch/commons/utils/ValidationHelpers.kt +++ b/src/main/kotlin/org/opensearch/commons/utils/ValidationHelpers.kt @@ -1,42 +1,39 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.utils import java.net.URL +import java.util.regex.Pattern + +/** + * This regex asserts that the string: + * Starts with a lowercase letter, or digit + * Contains a sequence of characters followed by an optional colon and another sequence of characters + * The sequences of characters can include lowercase letters, uppercase letters, digits, underscores, or hyphens + * The total length of the string can range from 1 to 255 characters + */ +val CLUSTER_NAME_REGEX = Regex("^(?=.{1,255}$)[a-z0-9]([a-zA-Z0-9_-]*:?[a-zA-Z0-9_-]*)$") + +/** + * This regex asserts that the string: + * Starts with a lowercase letter, digit, or asterisk + * Contains a sequence of characters followed by an optional colon and another sequence of characters + * The sequences of characters can include lowercase letters, uppercase letters, digits, underscores, asterisks, or hyphens + * The total length of the string can range from 1 to 255 characters + */ +val CLUSTER_PATTERN_REGEX = Regex("^(?=.{1,255}$)[a-z0-9*]([a-zA-Z0-9_*-]*:?[a-zA-Z0-9_*-]*)$") // Valid ID characters = (All Base64 chars + "_-") to support UUID format and Base64 encoded IDs private val VALID_ID_CHARS: Set = (('a'..'z') + ('A'..'Z') + ('0'..'9') + '+' + '/' + '_' + '-').toSet() +// Invalid characters in a new name field: [* ? < > | #] +private val INVALID_NAME_CHARS = "^\\*\\?<>|#" + fun validateUrl(urlString: String) { require(isValidUrl(urlString)) { "Invalid URL or unsupported" } - val url = URL(urlString) - require("https" == url.protocol) // Support only HTTPS. HTTP and other protocols not supported - // TODO : Add hosts deny list } fun validateEmail(email: String) { @@ -49,8 +46,7 @@ fun validateId(idString: String) { fun isValidUrl(urlString: String): Boolean { val url = URL(urlString) // throws MalformedURLException if URL is invalid - // TODO : Add hosts deny list - return ("https" == url.protocol) // Support only HTTPS. HTTP and other protocols not supported + return ("https" == url.protocol || "http" == url.protocol) // Support only http/https, other protocols not supported } /** @@ -73,3 +69,20 @@ fun isValidEmail(email: String): Boolean { fun isValidId(idString: String): Boolean { return idString.isNotBlank() && idString.all { VALID_ID_CHARS.contains(it) } } + +fun validateIamRoleArn(roleArn: String) { + val roleArnRegex = Pattern.compile("^arn:aws(-[^:]+)?:iam::([0-9]{12}):([a-zA-Z_0-9+=,.@\\-_/]+)$") + require(roleArnRegex.matcher(roleArn).find()) { "Invalid AWS role ARN: $roleArn " } +} + +fun isValidName(name: String): Boolean { + // Regex to restrict string so that it cannot start with [_, -, +], + // contain two consecutive periods or contain invalid chars + val regex = Regex("""^(?![_\-\+])(?!.*\.\.)[^$INVALID_NAME_CHARS]+$""") + + return name.matches(regex) +} + +fun getInvalidNameChars(): String { + return INVALID_NAME_CHARS +} diff --git a/src/main/kotlin/org/opensearch/commons/utils/XContentHelpers.kt b/src/main/kotlin/org/opensearch/commons/utils/XContentHelpers.kt index 92dee033..e2fe0ddf 100644 --- a/src/main/kotlin/org/opensearch/commons/utils/XContentHelpers.kt +++ b/src/main/kotlin/org/opensearch/commons/utils/XContentHelpers.kt @@ -1,41 +1,19 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.utils -import org.opensearch.common.io.stream.StreamInput -import org.opensearch.common.xcontent.DeprecationHandler -import org.opensearch.common.xcontent.NamedXContentRegistry -import org.opensearch.common.xcontent.ToXContent -import org.opensearch.common.xcontent.ToXContentObject -import org.opensearch.common.xcontent.XContentBuilder -import org.opensearch.common.xcontent.XContentParser -import org.opensearch.common.xcontent.XContentParserUtils import org.opensearch.common.xcontent.XContentType +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.xcontent.DeprecationHandler +import org.opensearch.core.xcontent.NamedXContentRegistry +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.ToXContentObject +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.core.xcontent.XContentParserUtils import org.opensearch.rest.RestRequest fun StreamInput.createJsonParser(): XContentParser { diff --git a/src/test/java/org/opensearch/commons/InjectSecurityTest.java b/src/test/java/org/opensearch/commons/InjectSecurityTest.java index 0f6e0c83..b2dea7f3 100644 --- a/src/test/java/org/opensearch/commons/InjectSecurityTest.java +++ b/src/test/java/org/opensearch/commons/InjectSecurityTest.java @@ -1,43 +1,28 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. */ package org.opensearch.commons; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.opensearch.commons.ConfigConstants.INJECTED_USER; import static org.opensearch.commons.ConfigConstants.OPENSEARCH_SECURITY_INJECTED_ROLES; +import static org.opensearch.commons.ConfigConstants.OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT; import static org.opensearch.commons.ConfigConstants.OPENSEARCH_SECURITY_USE_INJECTED_USER_FOR_PLUGINS; import java.util.Arrays; +import java.util.HashMap; +import java.util.List; import org.junit.jupiter.api.Test; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.concurrent.ThreadContext; +import org.opensearch.commons.authuser.User; public class InjectSecurityTest { @@ -102,4 +87,99 @@ public void testInjectUser() { assertEquals("plugin", threadContext.getTransient("ctx.name")); assertNull(threadContext.getTransient(INJECTED_USER)); } + + @Test + public void testInjectUserInfo() { + Settings settings = Settings.builder().build(); + Settings headerSettings = Settings.builder().put("request.headers.default", "1").build(); + ThreadContext threadContext = new ThreadContext(headerSettings); + threadContext.putHeader("name", "opendistro"); + threadContext.putTransient("ctx.name", "plugin"); + + assertEquals("1", threadContext.getHeader("default")); + assertEquals("opendistro", threadContext.getHeader("name")); + assertEquals("plugin", threadContext.getTransient("ctx.name")); + + User user = new User( + "Bob", + List.of("backendRole1", "backendRole2"), + List.of("role1", "role2"), + List.of("attr1", "attr2"), + "tenant1" + ); + try (InjectSecurity helper = new InjectSecurity("test-name", null, threadContext)) { + helper.injectUserInfo(user); + assertEquals("1", threadContext.getHeader("default")); + assertEquals("opendistro", threadContext.getHeader("name")); + assertEquals("plugin", threadContext.getTransient("ctx.name")); + assertNotNull(threadContext.getTransient(OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT)); + assertEquals( + "Bob|backendRole1,backendRole2|role1,role2|tenant1", + threadContext.getTransient(OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT) + ); + } + assertEquals("1", threadContext.getHeader("default")); + assertEquals("opendistro", threadContext.getHeader("name")); + assertEquals("plugin", threadContext.getTransient("ctx.name")); + assertNull(threadContext.getTransient(OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT)); + } + + @Test + public void testInjectProperty() { + Settings settings = Settings.builder().put(OPENSEARCH_SECURITY_USE_INJECTED_USER_FOR_PLUGINS, false).build(); + Settings headerSettings = Settings.builder().put("request.headers.default", "1").build(); + ThreadContext threadContext = new ThreadContext(headerSettings); + threadContext.putHeader("name", "opendistro"); + threadContext.putTransient("ctx.name", "plugin"); + + assertEquals("1", threadContext.getHeader("default")); + assertEquals("opendistro", threadContext.getHeader("name")); + assertEquals("plugin", threadContext.getTransient("ctx.name")); + + try (InjectSecurity helper = new InjectSecurity("test-name", settings, threadContext)) { + helper.inject("joe", Arrays.asList("ops-role", "logs-role")); + assertEquals("1", threadContext.getHeader("default")); + assertEquals("opendistro", threadContext.getHeader("name")); + assertEquals("plugin", threadContext.getTransient("ctx.name")); + assertNotNull(threadContext.getTransient(OPENSEARCH_SECURITY_INJECTED_ROLES)); + // cannot inject property that is already set + assertFalse(helper.injectProperty(OPENSEARCH_SECURITY_INJECTED_ROLES, "new value")); + assertEquals("plugin|ops-role,logs-role", threadContext.getTransient(OPENSEARCH_SECURITY_INJECTED_ROLES)); + // cannot inject invalid property/value + assertFalse(helper.injectProperty("", "new value")); + assertFalse(helper.injectProperty(null, "new value")); + assertFalse(helper.injectProperty("property", null)); + // can inject non-set valid properties + assertTrue(helper.injectProperty("property1", true)); + assertTrue(helper.injectProperty("property2", "some value")); + assertTrue(helper.injectProperty("property3", "")); + assertTrue(helper.injectProperty("property4", new HashMap() { + { + put("key", "value"); + } + })); + // verify the set properties are not null and equal to what was set + assertNull(threadContext.getTransient("property")); + assertNotNull(threadContext.getTransient("property1")); + assertEquals(true, threadContext.getTransient("property1")); + assertNotNull(threadContext.getTransient("property2")); + assertEquals("some value", threadContext.getTransient("property2")); + assertNotNull(threadContext.getTransient("property3")); + assertEquals("", threadContext.getTransient("property3")); + assertNotNull(threadContext.getTransient("property4")); + assertEquals(new HashMap() { + { + put("key", "value"); + } + }, threadContext.getTransient("property4")); + } + assertEquals("1", threadContext.getHeader("default")); + assertEquals("opendistro", threadContext.getHeader("name")); + assertEquals("plugin", threadContext.getTransient("ctx.name")); + assertNull(threadContext.getTransient(OPENSEARCH_SECURITY_INJECTED_ROLES)); + assertNull(threadContext.getTransient("property1")); + assertNull(threadContext.getTransient("property2")); + assertNull(threadContext.getTransient("property3")); + assertNull(threadContext.getTransient("property4")); + } } diff --git a/src/test/java/org/opensearch/commons/authuser/UserTest.java b/src/test/java/org/opensearch/commons/authuser/UserTest.java index ca727e6d..df4e6602 100644 --- a/src/test/java/org/opensearch/commons/authuser/UserTest.java +++ b/src/test/java/org/opensearch/commons/authuser/UserTest.java @@ -1,27 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. */ package org.opensearch.commons.authuser; @@ -34,13 +13,15 @@ import java.io.IOException; import java.util.Arrays; +import java.util.List; import org.junit.jupiter.api.Test; -import org.opensearch.common.Strings; import org.opensearch.common.io.stream.BytesStreamOutput; -import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.concurrent.ThreadContext; +import org.opensearch.commons.ConfigConstants; +import org.opensearch.core.common.Strings; +import org.opensearch.core.common.io.stream.StreamInput; public class UserTest { @@ -223,4 +204,46 @@ public void testParseUserStringMalformed() { User user = User.parse(str); assertNull(user); } + + @Test + public void testUserIsAdminDnTrue() { + Settings settings = Settings + .builder() + .putList(ConfigConstants.OPENSEARCH_SECURITY_AUTHCZ_ADMIN_DN, List.of("CN=kirk,OU=client,O=client,L=test, C=de")) + .build(); + ThreadContext tc = new ThreadContext(Settings.EMPTY); + tc + .putTransient( + OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + "CN=kirk,OU=client,O=client,L=test, C=de|backendrole1,backendrole2|role1,role2" + ); + String str = tc.getTransient(OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT); + User user = User.parse(str); + assertTrue(user.isAdminDn(settings)); + } + + @Test + public void testUserIsAdminDnFalse() { + Settings settings = Settings + .builder() + .putList(ConfigConstants.OPENSEARCH_SECURITY_AUTHCZ_ADMIN_DN, List.of("CN=spock,OU=client,O=client,L=test, C=de")) + .build(); + ThreadContext tc = new ThreadContext(Settings.EMPTY); + tc + .putTransient( + OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT, + "CN=kirk,OU=client,O=client,L=test, C=de|backendrole1,backendrole2|role1,role2" + ); + String str = tc.getTransient(OPENSEARCH_SECURITY_USER_INFO_THREAD_CONTEXT); + User user = User.parse(str); + assertFalse(user.isAdminDn(settings)); + } + + @Test + public void testUserOrSettingsAreNullOrEmpty() { + Settings settings = Settings.EMPTY; + User user = User.parse("username|backend_role1|role1"); + assertFalse(user.isAdminDn(null)); + assertFalse(user.isAdminDn(settings)); + } } diff --git a/src/test/java/org/opensearch/commons/destination/message/LegacyChimeMessageTest.java b/src/test/java/org/opensearch/commons/destination/message/LegacyChimeMessageTest.java new file mode 100644 index 00000000..b5f57b46 --- /dev/null +++ b/src/test/java/org/opensearch/commons/destination/message/LegacyChimeMessageTest.java @@ -0,0 +1,79 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.destination.message; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; + +import java.io.IOException; + +import org.junit.jupiter.api.Test; +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.common.io.stream.StreamInput; + +public class LegacyChimeMessageTest { + + @Test + public void testBuildingLegacyChimeMessage() { + LegacyChimeMessage message = new LegacyChimeMessage.Builder("custom_webhook") + .withMessage("Hello world") + .withUrl("https://amazon.com") + .build(); + + assertEquals("custom_webhook", message.destinationName); + assertEquals(LegacyDestinationType.LEGACY_CHIME, message.getChannelType()); + assertEquals("Hello world", message.getMessageContent()); + assertEquals("https://amazon.com", message.url); + } + + @Test + public void testRoundTrippingLegacyChimeMessage() throws IOException { + LegacyChimeMessage message = new LegacyChimeMessage.Builder("custom_webhook") + .withMessage("Hello world") + .withUrl("https://amazon.com") + .build(); + BytesStreamOutput out = new BytesStreamOutput(); + message.writeTo(out); + + StreamInput in = StreamInput.wrap(out.bytes().toBytesRef().bytes); + LegacyChimeMessage newMessage = new LegacyChimeMessage(in); + + assertEquals(newMessage.destinationName, message.destinationName); + assertEquals(newMessage.getChannelType(), message.getChannelType()); + assertEquals(newMessage.getMessageContent(), message.getMessageContent()); + assertEquals(newMessage.url, message.url); + } + + @Test + public void testContentMissingMessage() { + try { + new LegacyChimeMessage.Builder("custom_webhook").withUrl("https://amazon.com").build(); + fail("Building legacy chime message without message should fail"); + } catch (IllegalArgumentException e) { + assertEquals("Message content is missing", e.getMessage()); + } + } + + @Test + public void testUrlMissingMessage() { + try { + new LegacyChimeMessage.Builder("custom_webhook").withMessage("Hello world").build(); + fail("Building legacy chime message without url should fail"); + } catch (IllegalArgumentException e) { + assertEquals("url is invalid or empty", e.getMessage()); + } + } + + @Test + public void testMissingDestinationName() { + try { + new LegacyChimeMessage.Builder(null).withMessage("Hello world").withUrl("https://amazon.com").build(); + fail("Building legacy chime message with null destination name should fail"); + } catch (IllegalArgumentException e) { + assertEquals("Channel name must be defined", e.getMessage()); + } + } +} diff --git a/src/test/java/org/opensearch/commons/destination/message/LegacyCustomWebhookMessageTest.java b/src/test/java/org/opensearch/commons/destination/message/LegacyCustomWebhookMessageTest.java new file mode 100644 index 00000000..2e68952c --- /dev/null +++ b/src/test/java/org/opensearch/commons/destination/message/LegacyCustomWebhookMessageTest.java @@ -0,0 +1,156 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.destination.message; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPost; +import org.junit.jupiter.api.Test; +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.common.io.stream.StreamInput; + +public class LegacyCustomWebhookMessageTest { + + @Test + public void testBuildingLegacyCustomWebhookMessage() { + LegacyCustomWebhookMessage message = new LegacyCustomWebhookMessage.Builder("custom_webhook") + .withMessage("Hello world") + .withUrl("https://amazon.com") + .build(); + + assertEquals("custom_webhook", message.destinationName); + assertEquals(LegacyDestinationType.LEGACY_CUSTOM_WEBHOOK, message.getChannelType()); + assertEquals("Hello world", message.getMessageContent()); + assertEquals("https://amazon.com", message.getUrl()); + } + + @Test + public void testRoundTrippingLegacyCustomWebhookMessageWithUrl() throws IOException { + LegacyCustomWebhookMessage message = new LegacyCustomWebhookMessage.Builder("custom_webhook") + .withMessage("Hello world") + .withUrl("https://amazon.com") + .build(); + BytesStreamOutput out = new BytesStreamOutput(); + message.writeTo(out); + + StreamInput in = StreamInput.wrap(out.bytes().toBytesRef().bytes); + LegacyCustomWebhookMessage newMessage = new LegacyCustomWebhookMessage(in); + + assertEquals(newMessage.destinationName, message.destinationName); + assertEquals(newMessage.getChannelType(), message.getChannelType()); + assertEquals(newMessage.getMessageContent(), message.getMessageContent()); + assertEquals(newMessage.getUrl(), message.getUrl()); + } + + @Test + public void testRoundTrippingLegacyCustomWebhookMessageWithHostFails() throws IOException { + Map queryParams = new HashMap(); + queryParams.put("token", "sometoken"); + Map headers = new HashMap(); + headers.put("x-token", "sometoken"); + LegacyCustomWebhookMessage message = new LegacyCustomWebhookMessage.Builder("custom_webhook") + .withMessage("Hello world") + .withHost("hooks.chime.aws") + .withPath("incomingwebhooks/abc") + .withMethod(HttpPost.METHOD_NAME) + .withQueryParams(queryParams) + .withHeaderParams(headers) + .withPort(8000) + .withScheme("https") + .build(); + BytesStreamOutput out = new BytesStreamOutput(); + try { + message.writeTo(out); + fail("Writing LegacyCustomWebhookMessage with host instead of url to stream output should fail"); + } catch (IllegalStateException e) { + assertEquals("Cannot use LegacyCustomWebhookMessage across transport wire without defining full url.", e.getMessage()); + } + } + + @Test + public void testContentMissingMessage() { + try { + new LegacyCustomWebhookMessage.Builder("custom_webhook").withUrl("https://amazon.com").build(); + fail("Building legacy custom webhook message without message should fail"); + } catch (IllegalArgumentException e) { + assertEquals("Message content is missing", e.getMessage()); + } + } + + @Test + public void testMissingDestinationName() { + try { + new LegacyCustomWebhookMessage.Builder(null).withMessage("Hello world").withUrl("https://amazon.com").build(); + fail("Building legacy custom webhook message with null destination name should fail"); + } catch (IllegalArgumentException e) { + assertEquals("Channel name must be defined", e.getMessage()); + } + } + + @Test + public void testUnsupportedHttpMethods() { + try { + new LegacyCustomWebhookMessage.Builder("custom_webhook") + .withMessage("Hello world") + .withUrl("https://amazon.com") + .withMethod(HttpGet.METHOD_NAME) + .build(); + fail("Building legacy custom webhook message with unsupported http methods should fail"); + } catch (IllegalArgumentException e) { + assertEquals("Invalid method supplied. Only POST, PUT and PATCH are allowed", e.getMessage()); + } + } + + @Test + public void testURLandHostNameMissingOrEmpty() { + try { + new LegacyCustomWebhookMessage.Builder("custom_webhook").withMessage("Hello world").withMethod(HttpGet.METHOD_NAME).build(); + fail("Building legacy custom webhook message missing or empty url and host name should fail"); + } catch (IllegalArgumentException e) { + assertEquals("Either fully qualified URL or host name should be provided", e.getMessage()); + } + + try { + new LegacyCustomWebhookMessage.Builder("custom_webhook") + .withMessage("Hello world") + .withUrl("") + .withMethod(HttpGet.METHOD_NAME) + .build(); + fail("Building legacy custom webhook message with missing or empty url and host name should fail"); + } catch (IllegalArgumentException e) { + assertEquals("Either fully qualified URL or host name should be provided", e.getMessage()); + } + + try { + new LegacyCustomWebhookMessage.Builder("custom_webhook") + .withMessage("Hello world") + .withHost("") + .withMethod(HttpGet.METHOD_NAME) + .build(); + fail("Building legacy custom webhook message with missing or empty url and host name should fail"); + } catch (IllegalArgumentException e) { + assertEquals("Either fully qualified URL or host name should be provided", e.getMessage()); + } + + try { + new LegacyCustomWebhookMessage.Builder("custom_webhook") + .withMessage("Hello world") + .withUrl("") + .withHost("") + .withMethod(HttpGet.METHOD_NAME) + .build(); + fail("Building legacy custom webhook message with missing or empty url and host name should fail"); + } catch (IllegalArgumentException e) { + assertEquals("Either fully qualified URL or host name should be provided", e.getMessage()); + } + } +} diff --git a/src/test/java/org/opensearch/commons/destination/message/LegacyEmailMessageTest.java b/src/test/java/org/opensearch/commons/destination/message/LegacyEmailMessageTest.java new file mode 100644 index 00000000..bc4546fd --- /dev/null +++ b/src/test/java/org/opensearch/commons/destination/message/LegacyEmailMessageTest.java @@ -0,0 +1,295 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.destination.message; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; + +import org.junit.jupiter.api.Test; +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.common.io.stream.StreamInput; + +public class LegacyEmailMessageTest { + + @Test + public void testBuildingLegacyEmailMessage() { + LegacyEmailMessage message = new LegacyEmailMessage.Builder("email") + .withAccountName("test_email") + .withHost("smtp.test.com") + .withPort(123) + .withMethod("none") + .withFrom("test@email.com") + .withRecipients(Arrays.asList("test2@email.com", "test3@email.com")) + .withSubject("Test Subject") + .withMessage("Hello world") + .build(); + + assertEquals("email", message.destinationName); + assertEquals(LegacyDestinationType.LEGACY_EMAIL, message.getChannelType()); + assertEquals("test_email", message.getAccountName()); + assertEquals("smtp.test.com", message.getHost()); + assertEquals(123, message.getPort()); + assertEquals("none", message.getMethod()); + assertEquals("test@email.com", message.getFrom()); + assertEquals(Arrays.asList("test2@email.com", "test3@email.com"), message.getRecipients()); + assertEquals("Test Subject", message.getSubject()); + assertEquals("Hello world", message.getMessage()); + } + + @Test + public void testRoundTrippingLegacyEmailMessage() throws IOException { + LegacyEmailMessage message = new LegacyEmailMessage.Builder("email") + .withAccountName("test_email") + .withHost("smtp.test.com") + .withPort(123) + .withMethod("none") + .withFrom("test@email.com") + .withRecipients(Arrays.asList("test2@email.com", "test3@email.com")) + .withSubject("Test Subject") + .withMessage("Hello world") + .build(); + BytesStreamOutput out = new BytesStreamOutput(); + message.writeTo(out); + + StreamInput in = StreamInput.wrap(out.bytes().toBytesRef().bytes); + LegacyEmailMessage newMessage = new LegacyEmailMessage(in); + + assertEquals(newMessage.destinationName, message.destinationName); + assertEquals(newMessage.getChannelType(), message.getChannelType()); + assertEquals(newMessage.getAccountName(), message.getAccountName()); + assertEquals(newMessage.getHost(), message.getHost()); + assertEquals(newMessage.getPort(), message.getPort()); + assertEquals(newMessage.getMethod(), message.getMethod()); + assertEquals(newMessage.getFrom(), message.getFrom()); + assertEquals(newMessage.getRecipients(), message.getRecipients()); + assertEquals(newMessage.getSubject(), message.getSubject()); + assertEquals(newMessage.getMessage(), message.getMessage()); + } + + @Test + public void testContentMissingMessage() { + try { + new LegacyEmailMessage.Builder("email") + .withAccountName("test_email") + .withHost("smtp.test.com") + .withPort(123) + .withMethod("none") + .withFrom("test@email.com") + .withRecipients(Arrays.asList("test2@email.com", "test3@email.com")) + .withSubject("Test Subject") + .build(); + fail("Building legacy email message without message should fail"); + } catch (IllegalArgumentException e) { + assertEquals("Message content is missing", e.getMessage()); + } + } + + @Test + public void testMissingDestinationName() { + try { + new LegacyEmailMessage.Builder(null) + .withAccountName("test_email") + .withHost("smtp.test.com") + .withPort(123) + .withMethod("none") + .withFrom("test@email.com") + .withRecipients(Arrays.asList("test2@email.com", "test3@email.com")) + .withSubject("Test Subject") + .withMessage("Hello world") + .build(); + fail("Building legacy email message with null destination name should fail"); + } catch (IllegalArgumentException e) { + assertEquals("Channel name must be defined", e.getMessage()); + } + } + + @Test + public void testUnsupportedMethods() { + try { + new LegacyEmailMessage.Builder("email") + .withAccountName("test_email") + .withHost("smtp.test.com") + .withPort(123) + .withMethod("unsupported") + .withFrom("test@email.com") + .withRecipients(Arrays.asList("test2@email.com", "test3@email.com")) + .withSubject("Test Subject") + .withMessage("Hello world") + .build(); + fail("Building legacy email message with unsupported method should fail"); + } catch (IllegalArgumentException e) { + assertEquals("Invalid method supplied. Only none, ssl and start_tls are allowed", e.getMessage()); + } + } + + @Test + public void testAccountNameMissingOrEmpty() { + try { + new LegacyEmailMessage.Builder("email") + .withHost("smtp.test.com") + .withPort(123) + .withMethod("none") + .withFrom("test@email.com") + .withRecipients(Arrays.asList("test2@email.com", "test3@email.com")) + .withSubject("Test Subject") + .withMessage("Hello world") + .build(); + fail("Building legacy email message with missing account name should fail"); + } catch (IllegalArgumentException e) { + assertEquals("Account name should be provided", e.getMessage()); + } + + try { + new LegacyEmailMessage.Builder("email") + .withAccountName("") + .withHost("smtp.test.com") + .withPort(123) + .withMethod("none") + .withFrom("test@email.com") + .withRecipients(Arrays.asList("test2@email.com", "test3@email.com")) + .withSubject("Test Subject") + .withMessage("Hello world") + .build(); + fail("Building legacy email message with empty account name should fail"); + } catch (IllegalArgumentException e) { + assertEquals("Account name should be provided", e.getMessage()); + } + } + + @Test + public void testHostMissingOrEmpty() { + try { + new LegacyEmailMessage.Builder("email") + .withAccountName("test_email") + .withPort(123) + .withMethod("none") + .withFrom("test@email.com") + .withRecipients(Arrays.asList("test2@email.com", "test3@email.com")) + .withSubject("Test Subject") + .withMessage("Hello world") + .build(); + fail("Building legacy email message with missing host should fail"); + } catch (IllegalArgumentException e) { + assertEquals("Host name should be provided", e.getMessage()); + } + + try { + new LegacyEmailMessage.Builder("email") + .withAccountName("test_email") + .withHost("") + .withPort(123) + .withMethod("none") + .withFrom("test@email.com") + .withRecipients(Arrays.asList("test2@email.com", "test3@email.com")) + .withSubject("Test Subject") + .withMessage("Hello world") + .build(); + fail("Building legacy email message with empty host should fail"); + } catch (IllegalArgumentException e) { + assertEquals("Host name should be provided", e.getMessage()); + } + } + + @Test + public void testFromMissingOrEmpty() { + try { + new LegacyEmailMessage.Builder("email") + .withAccountName("test_email") + .withHost("smtp.test.com") + .withPort(123) + .withMethod("none") + .withRecipients(Arrays.asList("test2@email.com", "test3@email.com")) + .withSubject("Test Subject") + .withMessage("Hello world") + .build(); + fail("Building legacy email message with missing from should fail"); + } catch (IllegalArgumentException e) { + assertEquals("From address should be provided", e.getMessage()); + } + + try { + new LegacyEmailMessage.Builder("email") + .withAccountName("test_email") + .withHost("smtp.test.com") + .withPort(123) + .withMethod("none") + .withFrom("") + .withRecipients(Arrays.asList("test2@email.com", "test3@email.com")) + .withSubject("Test Subject") + .withMessage("Hello world") + .build(); + fail("Building legacy email message with empty from should fail"); + } catch (IllegalArgumentException e) { + assertEquals("From address should be provided", e.getMessage()); + } + } + + @Test + public void testRecipientsMissingOrEmpty() { + try { + new LegacyEmailMessage.Builder("email") + .withAccountName("test_email") + .withHost("smtp.test.com") + .withPort(123) + .withMethod("none") + .withFrom("test@email.com") + .withSubject("Test Subject") + .withMessage("Hello world") + .build(); + fail("Building legacy email message with missing recipients should fail"); + } catch (IllegalArgumentException e) { + assertEquals("List of recipients should be provided", e.getMessage()); + } + + try { + new LegacyEmailMessage.Builder("email") + .withAccountName("test_email") + .withHost("smtp.test.com") + .withPort(123) + .withMethod("none") + .withFrom("test@email.com") + .withRecipients(List.of()) + .withSubject("Test Subject") + .withMessage("Hello world") + .build(); + fail("Building legacy email message with empty recipients should fail"); + } catch (IllegalArgumentException e) { + assertEquals("List of recipients should be provided", e.getMessage()); + } + } + + @Test + public void testSubjectDefaultsToDestinationNameWhenMissingOrEmpty() { + LegacyEmailMessage message = new LegacyEmailMessage.Builder("email") + .withAccountName("test_email") + .withHost("smtp.test.com") + .withPort(123) + .withMethod("none") + .withFrom("test@email.com") + .withRecipients(Arrays.asList("test2@email.com", "test3@email.com")) + .withMessage("Hello world") + .build(); + + assertEquals("email", message.getSubject()); + + message = new LegacyEmailMessage.Builder("email") + .withAccountName("test_email") + .withHost("smtp.test.com") + .withPort(123) + .withMethod("none") + .withFrom("test@email.com") + .withRecipients(Arrays.asList("test2@email.com", "test3@email.com")) + .withSubject("") + .withMessage("Hello world") + .build(); + + assertEquals("email", message.getSubject()); + } +} diff --git a/src/test/java/org/opensearch/commons/destination/message/LegacySNSMessageTest.java b/src/test/java/org/opensearch/commons/destination/message/LegacySNSMessageTest.java new file mode 100644 index 00000000..a9fd1dd3 --- /dev/null +++ b/src/test/java/org/opensearch/commons/destination/message/LegacySNSMessageTest.java @@ -0,0 +1,91 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.destination.message; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class LegacySNSMessageTest { + + @Test + public void testCreateRoleArnMissingMessage() { + try { + LegacySNSMessage message = new LegacySNSMessage.Builder("sms").withMessage("dummyMessage").build(); + } catch (Exception ex) { + assertEquals("Role arn is missing/invalid: null", ex.getMessage()); + throw ex; + } + } + + @Test + public void testCreateTopicArnMissingMessage() { + try { + LegacySNSMessage message = new LegacySNSMessage.Builder("sms") + .withMessage("dummyMessage") + .withRole("arn:aws:iam::853806060000:role/domain/abc") + .build(); + } catch (Exception ex) { + assertEquals("Topic arn is missing/invalid: null", ex.getMessage()); + throw ex; + } + } + + @Test + public void testCreateContentMissingMessage() { + try { + LegacySNSMessage message = new LegacySNSMessage.Builder("sms") + .withRole("arn:aws:iam::853806060000:role/domain/abc") + .withTopicArn("arn:aws:sns:us-west-2:475313751589:test-notification") + .build(); + } catch (Exception ex) { + assertEquals("Message content is missing", ex.getMessage()); + throw ex; + } + } + + @Test + public void testInValidRoleMessage() { + try { + LegacySNSMessage message = new LegacySNSMessage.Builder("sms") + .withMessage("dummyMessage") + .withRole("dummyRole") + .withTopicArn("arn:aws:sns:us-west-2:475313751589:test-notification") + .build(); + } catch (Exception ex) { + assertEquals("Role arn is missing/invalid: dummyRole", ex.getMessage()); + throw ex; + } + } + + @Test + public void testValidMessage() { + LegacySNSMessage message = new LegacySNSMessage.Builder("sms") + .withMessage("dummyMessage") + .withRole("arn:aws:iam::853806060000:role/domain/abc") + .withTopicArn("arn:aws:sns:us-west-2:475313751589:test-notification") + .build(); + assertEquals(LegacyDestinationType.LEGACY_SNS, message.getChannelType()); + assertEquals("sms", message.getChannelName()); + assertEquals("dummyMessage", message.getMessage()); + assertEquals("arn:aws:iam::853806060000:role/domain/abc", message.getRoleArn()); + assertEquals("arn:aws:sns:us-west-2:475313751589:test-notification", message.getTopicArn()); + } + + @Test + public void testInValidChannelName() { + try { + LegacySNSMessage message = new LegacySNSMessage.Builder("") + .withMessage("dummyMessage") + .withRole("arn:aws:iam::853806060000:role/domain/abc") + .withTopicArn("arn:aws:sns:us-west-2:475313751589:test-notification") + .build(); + } catch (Exception ex) { + assertEquals("Channel name must be defined", ex.getMessage()); + throw ex; + } + } +} diff --git a/src/test/java/org/opensearch/commons/destination/message/LegacySlackMessageTest.java b/src/test/java/org/opensearch/commons/destination/message/LegacySlackMessageTest.java new file mode 100644 index 00000000..2ea3ea59 --- /dev/null +++ b/src/test/java/org/opensearch/commons/destination/message/LegacySlackMessageTest.java @@ -0,0 +1,89 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.destination.message; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; + +import java.io.IOException; + +import org.junit.jupiter.api.Test; +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.common.io.stream.StreamInput; + +public class LegacySlackMessageTest { + + @Test + public void testBuildingLegacySlackMessage() { + LegacySlackMessage message = new LegacySlackMessage.Builder("custom_webhook") + .withMessage("Hello world") + .withUrl("https://amazon.com") + .build(); + + assertEquals("custom_webhook", message.destinationName); + assertEquals(LegacyDestinationType.LEGACY_SLACK, message.getChannelType()); + assertEquals("Hello world", message.getMessageContent()); + assertEquals("https://amazon.com", message.url); + } + + @Test + public void testRoundTrippingLegacySlackMessage() throws IOException { + LegacySlackMessage message = new LegacySlackMessage.Builder("custom_webhook") + .withMessage("Hello world") + .withUrl("https://amazon.com") + .build(); + BytesStreamOutput out = new BytesStreamOutput(); + message.writeTo(out); + + StreamInput in = StreamInput.wrap(out.bytes().toBytesRef().bytes); + LegacySlackMessage newMessage = new LegacySlackMessage(in); + + assertEquals(newMessage.destinationName, message.destinationName); + assertEquals(newMessage.getChannelType(), message.getChannelType()); + assertEquals(newMessage.getMessageContent(), message.getMessageContent()); + assertEquals(newMessage.url, message.url); + } + + @Test + public void testContentMissingMessage() { + try { + new LegacySlackMessage.Builder("custom_webhook").withUrl("https://amazon.com").build(); + fail("Building legacy slack message without message should fail"); + } catch (IllegalArgumentException e) { + assertEquals("Message content is missing", e.getMessage()); + } + } + + @Test + public void testUrlMissingMessage() { + try { + new LegacySlackMessage.Builder("custom_webhook").withMessage("Hello world").build(); + fail("Building legacy slack message without url should fail"); + } catch (IllegalArgumentException e) { + assertEquals("url is invalid or empty", e.getMessage()); + } + } + + @Test + public void testMissingDestinationName() { + try { + new LegacySlackMessage.Builder(null).withMessage("Hello world").withUrl("https://amazon.com").build(); + fail("Building legacy slack message with null destination name should fail"); + } catch (IllegalArgumentException e) { + assertEquals("Channel name must be defined", e.getMessage()); + } + } + + @Test + public void testUrlEmptyMessage() { + try { + new LegacySlackMessage.Builder("custom_webhook").withMessage("Hello world").withUrl("").build(); + fail("Building legacy slack message with empty url should fail"); + } catch (IllegalArgumentException e) { + assertEquals("Fully qualified URL is missing/invalid: ", e.getMessage()); + } + } +} diff --git a/src/test/java/org/opensearch/commons/destination/response/LegacyDestinationResponseTest.java b/src/test/java/org/opensearch/commons/destination/response/LegacyDestinationResponseTest.java new file mode 100644 index 00000000..dc7cc079 --- /dev/null +++ b/src/test/java/org/opensearch/commons/destination/response/LegacyDestinationResponseTest.java @@ -0,0 +1,61 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.destination.response; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; + +import java.io.IOException; + +import org.junit.jupiter.api.Test; +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.common.io.stream.StreamInput; + +public class LegacyDestinationResponseTest { + + @Test + public void testBuildingLegacyDestinationResponse() { + LegacyDestinationResponse res = new LegacyDestinationResponse.Builder() + .withStatusCode(200) + .withResponseContent("Hello world") + .build(); + + assertEquals(200, res.statusCode); + assertEquals("Hello world", res.getResponseContent()); + } + + @Test + public void testRoundTrippingLegacyDestinationResponse() throws IOException { + LegacyDestinationResponse res = new LegacyDestinationResponse.Builder() + .withStatusCode(200) + .withResponseContent("Hello world") + .build(); + BytesStreamOutput out = new BytesStreamOutput(); + res.writeTo(out); + + StreamInput in = StreamInput.wrap(out.bytes().toBytesRef().bytes); + LegacyDestinationResponse newRes = new LegacyDestinationResponse(in); + + assertEquals(res.statusCode, newRes.statusCode, "Round tripping doesn't work"); + assertEquals(res.getResponseContent(), newRes.getResponseContent(), "Round tripping doesn't work"); + } + + @Test + public void testMissingLegacyDestinationResponse() { + try { + new LegacyDestinationResponse.Builder().withStatusCode(200).build(); + fail("Creating LegacyDestinationResponse without response content should fail"); + } catch (IllegalArgumentException ignored) {} + } + + @Test + public void testMissingLegacyDestinationStatusCode() { + try { + new LegacyDestinationResponse.Builder().withResponseContent("Hello world").build(); + fail("Creating LegacyDestinationResponse without status code should fail"); + } catch (IllegalArgumentException ignored) {} + } +} diff --git a/src/test/java/org/opensearch/commons/destination/util/UtilTest.java b/src/test/java/org/opensearch/commons/destination/util/UtilTest.java new file mode 100644 index 00000000..506beb42 --- /dev/null +++ b/src/test/java/org/opensearch/commons/destination/util/UtilTest.java @@ -0,0 +1,65 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.destination.util; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import org.junit.Test; + +public class UtilTest { + + @Test + public void testValidSNSTopicArn() { + String topicArn = "arn:aws:sns:us-west-2:475313751589:test-notification"; + assertTrue("topic arn should be valid", Util.isValidSNSArn(topicArn)); + topicArn = "arn:aws-cn:sns:us-west-2:475313751589:test-notification"; + assertTrue("topic arn should be valid", Util.isValidSNSArn(topicArn)); + topicArn = "arn:aws-cn:sns:us-west-2:475313751589:test-notification.fifo"; + assertTrue("topic arn should be valid", Util.isValidSNSArn(topicArn)); + } + + @Test + public void testInvalidSNSTopicArn() { + String topicArn = "arn:aws:sns1:us-west-2:475313751589:test-notification"; + assertFalse("topic arn should be Invalid", Util.isValidSNSArn(topicArn)); + topicArn = "arn:aws:sns:us-west-2:475313751589:test-notification.fifo.fifo"; + assertFalse("topic arn should be Invalid", Util.isValidSNSArn(topicArn)); + topicArn = "arn:aws:sns:us-west-2:475313751589:test-notification.fi"; + assertFalse("topic arn should be Invalid", Util.isValidSNSArn(topicArn)); + topicArn = "arn:aws:sns:us-west-2:475313751589:test-notifica.tion"; + assertFalse("topic arn should be Invalid", Util.isValidSNSArn(topicArn)); + topicArn = "arn:aws:sns:us-west-2:475313751589:test-notification&fifo"; + assertFalse("topic arn should be Invalid", Util.isValidSNSArn(topicArn)); + } + + @Test + public void testIAMRoleArn() { + String roleArn = "arn:aws:iam::853806060000:role/domain/abc"; + assertTrue("IAM role arn should be valid", Util.isValidIAMArn(roleArn)); + roleArn = "arn:aws:iam::853806060000:role/domain/a@+=.,-_bc"; + assertTrue("IAM role arn should be valid", Util.isValidIAMArn(roleArn)); + } + + @Test + public void testInvalidIAMRoleArn() { + String roleArn = "arn:aws:iam::85380606000000000:role/domain/010-asdf"; + assertFalse("IAM role arn should be Invalid", Util.isValidIAMArn(roleArn)); + } + + @Test + public void testGetRegion() { + String topicArn = "arn:aws:sns:us-west-2:475313751589:test-notification"; + assertEquals(Util.getRegion(topicArn), "us-west-2"); + } + + @Test(expected = IllegalArgumentException.class) + public void testInvalidGetRegion() { + String topicArn = "arn:aws:abs:us-west-2:475313751589:test-notification"; + assertEquals(Util.getRegion(topicArn), "us-west-2"); + } +} diff --git a/src/test/java/org/opensearch/commons/rest/IntegrationTests.java b/src/test/java/org/opensearch/commons/rest/IntegrationTests.java index ee465086..4fc80ae9 100644 --- a/src/test/java/org/opensearch/commons/rest/IntegrationTests.java +++ b/src/test/java/org/opensearch/commons/rest/IntegrationTests.java @@ -1,27 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. */ package org.opensearch.commons.rest; @@ -59,7 +38,7 @@ private Request createSampleRequest() { @Test public void testCreateRestClientWithUser() throws Exception { - RestClient client = new SecureRestClientBuilder("localhost", 9200, true, "admin", "admin").build(); + RestClient client = new SecureRestClientBuilder("localhost", 9200, true, "admin", "myStrongPassword123").build(); Response response = client.performRequest(createSampleRequest()); String responseBody = EntityUtils.toString(response.getEntity()); assertEquals(200, response.getStatusLine().getStatusCode()); diff --git a/src/test/java/org/opensearch/commons/rest/SecureRestClientBuilderTest.java b/src/test/java/org/opensearch/commons/rest/SecureRestClientBuilderTest.java index 1fe5a51e..4a034f7e 100644 --- a/src/test/java/org/opensearch/commons/rest/SecureRestClientBuilderTest.java +++ b/src/test/java/org/opensearch/commons/rest/SecureRestClientBuilderTest.java @@ -1,27 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. */ package org.opensearch.commons.rest; diff --git a/src/test/java/org/opensearch/commons/rest/TrustStoreTest.java b/src/test/java/org/opensearch/commons/rest/TrustStoreTest.java index bea767d0..271b1892 100644 --- a/src/test/java/org/opensearch/commons/rest/TrustStoreTest.java +++ b/src/test/java/org/opensearch/commons/rest/TrustStoreTest.java @@ -1,27 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. */ package org.opensearch.commons.rest; @@ -41,7 +20,7 @@ public class TrustStoreTest { public void testCreate() throws Exception { String resourceName = "sample.pem"; String absolutePath = new File(getClass().getClassLoader().getResource(resourceName).getFile()).getAbsolutePath(); - assertTrue(absolutePath.endsWith("/sample.pem")); + assertTrue(absolutePath.endsWith(File.separator + "sample.pem")); KeyStore store = new TrustStore(absolutePath).create(); assertNotNull(store); diff --git a/src/test/kotlin/org/opensearch/commons/alerting/AlertTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/AlertTests.kt new file mode 100644 index 00000000..4a5f2346 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/AlertTests.kt @@ -0,0 +1,87 @@ +package org.opensearch.commons.alerting + +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test +import org.opensearch.commons.alerting.model.Alert +import java.time.Instant +import java.time.temporal.ChronoUnit + +class AlertTests { + @Test + fun `test alert as template args`() { + val alert = randomAlert().copy(acknowledgedTime = null, lastNotificationTime = null) + + val templateArgs = alert.asTemplateArg() + + assertEquals(templateArgs[Alert.ALERT_ID_FIELD], alert.id, "Template args id does not match") + assertEquals(templateArgs[Alert.ALERT_VERSION_FIELD], alert.version, "Template args version does not match") + assertEquals(templateArgs[Alert.STATE_FIELD], alert.state.toString(), "Template args state does not match") + assertEquals(templateArgs[Alert.ERROR_MESSAGE_FIELD], alert.errorMessage, "Template args error message does not match") + assertEquals(templateArgs[Alert.ACKNOWLEDGED_TIME_FIELD], null, "Template args acknowledged time does not match") + assertEquals(templateArgs[Alert.END_TIME_FIELD], alert.endTime?.toEpochMilli(), "Template args end time does not") + assertEquals(templateArgs[Alert.START_TIME_FIELD], alert.startTime.toEpochMilli(), "Template args start time does not") + assertEquals(templateArgs[Alert.LAST_NOTIFICATION_TIME_FIELD], null, "Template args last notification time does not match") + assertEquals(templateArgs[Alert.SEVERITY_FIELD], alert.severity, "Template args severity does not match") + assertEquals(templateArgs[Alert.CLUSTERS_FIELD], alert.clusters?.joinToString(","), "Template args clusters does not match") + } + + @Test + fun `test agg alert as template args`() { + val alert = randomAlertWithAggregationResultBucket().copy(acknowledgedTime = null, lastNotificationTime = null) + + val templateArgs = alert.asTemplateArg() + + assertEquals(templateArgs[Alert.ALERT_ID_FIELD], alert.id, "Template args id does not match") + assertEquals(templateArgs[Alert.ALERT_VERSION_FIELD], alert.version, "Template args version does not match") + assertEquals(templateArgs[Alert.STATE_FIELD], alert.state.toString(), "Template args state does not match") + assertEquals(templateArgs[Alert.ERROR_MESSAGE_FIELD], alert.errorMessage, "Template args error message does not match") + assertEquals(templateArgs[Alert.ACKNOWLEDGED_TIME_FIELD], null, "Template args acknowledged time does not match") + assertEquals(templateArgs[Alert.END_TIME_FIELD], alert.endTime?.toEpochMilli(), "Template args end time does not") + assertEquals(templateArgs[Alert.START_TIME_FIELD], alert.startTime.toEpochMilli(), "Template args start time does not") + assertEquals(templateArgs[Alert.LAST_NOTIFICATION_TIME_FIELD], null, "Template args last notification time does not match") + assertEquals(templateArgs[Alert.SEVERITY_FIELD], alert.severity, "Template args severity does not match") + assertEquals(templateArgs[Alert.CLUSTERS_FIELD], alert.clusters?.joinToString(","), "Template args clusters does not match") + assertEquals( + templateArgs[Alert.BUCKET_KEYS], + alert.aggregationResultBucket?.bucketKeys?.joinToString(","), + "Template args bucketKeys do not match" + ) + assertEquals( + templateArgs[Alert.PARENTS_BUCKET_PATH], + alert.aggregationResultBucket?.parentBucketPath, + "Template args parentBucketPath does not match" + ) + } + + @Test + fun `test alert acknowledged`() { + val ackAlert = randomAlert().copy(state = Alert.State.ACKNOWLEDGED) + Assertions.assertTrue(ackAlert.isAcknowledged(), "Alert is not acknowledged") + + val activeAlert = randomAlert().copy(state = Alert.State.ACTIVE) + Assertions.assertFalse(activeAlert.isAcknowledged(), "Alert is acknowledged") + } + + @Test + fun `test alert in audit state`() { + val auditAlert = Alert( + randomQueryLevelMonitor(), + randomQueryLevelTrigger(), + Instant.now().truncatedTo(ChronoUnit.MILLIS), + null, + actionExecutionResults = listOf(randomActionExecutionResult()) + ) + Assertions.assertFalse(auditAlert.isAcknowledged(), "Alert should not be in acknowledged state") + } + + @Test + fun `test chained alert`() { + val workflow = randomWorkflow() + val trigger = randomChainedAlertTrigger() + val alert = randomChainedAlert(workflow = workflow, trigger = trigger) + assertEquals(alert.monitorId, "") + assertEquals(alert.id, "") + assertEquals(workflow.id, alert.workflowId) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/AlertingPluginInterfaceTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/AlertingPluginInterfaceTests.kt new file mode 100644 index 00000000..9b209032 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/AlertingPluginInterfaceTests.kt @@ -0,0 +1,285 @@ +package org.opensearch.commons.alerting + +import com.nhaarman.mockitokotlin2.whenever +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.extension.ExtendWith +import org.mockito.Answers +import org.mockito.ArgumentMatchers +import org.mockito.Mock +import org.mockito.Mockito +import org.mockito.Mockito.mock +import org.mockito.junit.jupiter.MockitoExtension +import org.opensearch.action.ActionType +import org.opensearch.action.search.SearchResponse +import org.opensearch.client.node.NodeClient +import org.opensearch.common.settings.Settings +import org.opensearch.commons.alerting.action.AcknowledgeAlertRequest +import org.opensearch.commons.alerting.action.AcknowledgeAlertResponse +import org.opensearch.commons.alerting.action.AcknowledgeChainedAlertRequest +import org.opensearch.commons.alerting.action.DeleteMonitorRequest +import org.opensearch.commons.alerting.action.DeleteMonitorResponse +import org.opensearch.commons.alerting.action.DeleteWorkflowRequest +import org.opensearch.commons.alerting.action.DeleteWorkflowResponse +import org.opensearch.commons.alerting.action.GetAlertsRequest +import org.opensearch.commons.alerting.action.GetAlertsResponse +import org.opensearch.commons.alerting.action.GetFindingsRequest +import org.opensearch.commons.alerting.action.GetFindingsResponse +import org.opensearch.commons.alerting.action.GetMonitorRequest +import org.opensearch.commons.alerting.action.GetMonitorResponse +import org.opensearch.commons.alerting.action.GetWorkflowAlertsRequest +import org.opensearch.commons.alerting.action.GetWorkflowAlertsResponse +import org.opensearch.commons.alerting.action.IndexMonitorRequest +import org.opensearch.commons.alerting.action.IndexMonitorResponse +import org.opensearch.commons.alerting.action.IndexWorkflowRequest +import org.opensearch.commons.alerting.action.IndexWorkflowResponse +import org.opensearch.commons.alerting.action.PublishFindingsRequest +import org.opensearch.commons.alerting.action.SearchMonitorRequest +import org.opensearch.commons.alerting.action.SubscribeFindingsResponse +import org.opensearch.commons.alerting.model.FindingDocument +import org.opensearch.commons.alerting.model.FindingWithDocs +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.Workflow +import org.opensearch.core.action.ActionListener +import org.opensearch.core.common.io.stream.NamedWriteableRegistry +import org.opensearch.core.rest.RestStatus +import org.opensearch.index.seqno.SequenceNumbers +import org.opensearch.search.SearchModule + +@Suppress("UNCHECKED_CAST") +@ExtendWith(MockitoExtension::class) +internal class AlertingPluginInterfaceTests { + + @Mock(answer = Answers.RETURNS_DEEP_STUBS) + private lateinit var client: NodeClient + + @Test + fun indexMonitor() { + val monitor = randomQueryLevelMonitor() + + val request = mock(IndexMonitorRequest::class.java) + val response = IndexMonitorResponse( + Monitor.NO_ID, + Monitor.NO_VERSION, + SequenceNumbers.UNASSIGNED_SEQ_NO, + SequenceNumbers.UNASSIGNED_PRIMARY_TERM, + monitor + ) + val listener: ActionListener = + mock(ActionListener::class.java) as ActionListener + val namedWriteableRegistry = NamedWriteableRegistry(SearchModule(Settings.EMPTY, emptyList()).namedWriteables) + + Mockito.doAnswer { + (it.getArgument(2) as ActionListener) + .onResponse(response) + }.whenever(client).execute(Mockito.any(ActionType::class.java), Mockito.any(), Mockito.any()) + + AlertingPluginInterface.indexMonitor(client, request, namedWriteableRegistry, listener) + Mockito.verify(listener, Mockito.times(1)).onResponse(ArgumentMatchers.eq(response)) + } + + @Test + fun indexWorkflow() { + val workflow = randomWorkflow() + + val request = mock(IndexWorkflowRequest::class.java) + val response = IndexWorkflowResponse( + Workflow.NO_ID, + Workflow.NO_VERSION, + SequenceNumbers.UNASSIGNED_SEQ_NO, + SequenceNumbers.UNASSIGNED_PRIMARY_TERM, + workflow + ) + val listener: ActionListener = + mock(ActionListener::class.java) as ActionListener + + Mockito.doAnswer { + (it.getArgument(2) as ActionListener) + .onResponse(response) + }.whenever(client).execute(Mockito.any(ActionType::class.java), Mockito.any(), Mockito.any()) + + AlertingPluginInterface.indexWorkflow(client, request, listener) + Mockito.verify(listener, Mockito.times(1)).onResponse(ArgumentMatchers.eq(response)) + } + + @Test + fun indexBucketMonitor() { + val monitor = randomBucketLevelMonitor() + + val request = mock(IndexMonitorRequest::class.java) + val response = IndexMonitorResponse( + Monitor.NO_ID, + Monitor.NO_VERSION, + SequenceNumbers.UNASSIGNED_SEQ_NO, + SequenceNumbers.UNASSIGNED_PRIMARY_TERM, + monitor + ) + val listener: ActionListener = + mock(ActionListener::class.java) as ActionListener + val namedWriteableRegistry = NamedWriteableRegistry(SearchModule(Settings.EMPTY, emptyList()).namedWriteables) + + Mockito.doAnswer { + (it.getArgument(2) as ActionListener) + .onResponse(response) + }.whenever(client).execute(Mockito.any(ActionType::class.java), Mockito.any(), Mockito.any()) + AlertingPluginInterface.indexMonitor(client, request, namedWriteableRegistry, listener) + Mockito.verify(listener, Mockito.times(1)).onResponse(ArgumentMatchers.eq(response)) + Mockito.verify(listener, Mockito.times(1)).onResponse(ArgumentMatchers.eq(response)) + } + + @Test + fun deleteMonitor() { + val request = mock(DeleteMonitorRequest::class.java) + val response = DeleteMonitorResponse(Monitor.NO_ID, Monitor.NO_VERSION) + val listener: ActionListener = + mock(ActionListener::class.java) as ActionListener + + Mockito.doAnswer { + (it.getArgument(2) as ActionListener) + .onResponse(response) + }.whenever(client).execute(Mockito.any(ActionType::class.java), Mockito.any(), Mockito.any()) + + AlertingPluginInterface.deleteMonitor(client, request, listener) + } + + @Test + fun deleteWorkflow() { + val request = mock(DeleteWorkflowRequest::class.java) + val response = DeleteWorkflowResponse(Workflow.NO_ID, Workflow.NO_VERSION) + val listener: ActionListener = + mock(ActionListener::class.java) as ActionListener + + Mockito.doAnswer { + (it.getArgument(2) as ActionListener) + .onResponse(response) + }.whenever(client).execute(Mockito.any(ActionType::class.java), Mockito.any(), Mockito.any()) + + AlertingPluginInterface.deleteWorkflow(client, request, listener) + } + + @Test + fun getAlerts() { + val monitor = randomQueryLevelMonitor() + val alert = randomAlert(monitor) + val request = mock(GetAlertsRequest::class.java) + val response = GetAlertsResponse(listOf(alert), 1) + val listener: ActionListener = + mock(ActionListener::class.java) as ActionListener + + Mockito.doAnswer { + (it.getArgument(2) as ActionListener) + .onResponse(response) + }.whenever(client).execute(Mockito.any(ActionType::class.java), Mockito.any(), Mockito.any()) + AlertingPluginInterface.getAlerts(client, request, listener) + Mockito.verify(listener, Mockito.times(1)).onResponse(ArgumentMatchers.eq(response)) + } + + @Test + fun getWorkflowAlerts() { + val request = mock(GetWorkflowAlertsRequest::class.java) + val response = GetWorkflowAlertsResponse(listOf(randomChainedAlert()), emptyList(), 1) + val listener: ActionListener = + mock(ActionListener::class.java) as ActionListener + + Mockito.doAnswer { + (it.getArgument(2) as ActionListener) + .onResponse(response) + }.whenever(client).execute(Mockito.any(ActionType::class.java), Mockito.any(), Mockito.any()) + AlertingPluginInterface.getWorkflowAlerts(client, request, listener) + Mockito.verify(listener, Mockito.times(1)).onResponse(ArgumentMatchers.eq(response)) + } + + @Test + fun getFindings() { + val finding = randomFinding() + val documentIds = finding.relatedDocIds + val relatedDocs = mutableListOf() + val request = mock(GetFindingsRequest::class.java) + val documents: Map = mutableMapOf() + for (docId in documentIds) { + val key = "${finding.index}|$docId" + documents[key]?.let { document -> relatedDocs.add(document) } + } + val findingWithDocs = FindingWithDocs(finding, relatedDocs) + val response = GetFindingsResponse(RestStatus.OK, 1, listOf(findingWithDocs)) + val listener: ActionListener = + mock(ActionListener::class.java) as ActionListener + + Mockito.doAnswer { + (it.getArgument(2) as ActionListener) + .onResponse(response) + }.whenever(client).execute(Mockito.any(ActionType::class.java), Mockito.any(), Mockito.any()) + AlertingPluginInterface.getFindings(client, request, listener) + Mockito.verify(listener, Mockito.times(1)).onResponse(ArgumentMatchers.eq(response)) + } + + @Test + fun publishFindings() { + val request = mock(PublishFindingsRequest::class.java) + val response = SubscribeFindingsResponse(status = RestStatus.OK) + val listener: ActionListener = + mock(ActionListener::class.java) as ActionListener + + Mockito.doAnswer { + (it.getArgument(2) as ActionListener) + .onResponse(response) + }.whenever(client).execute(Mockito.any(ActionType::class.java), Mockito.any(), Mockito.any()) + AlertingPluginInterface.publishFinding(client, request, listener) + Mockito.verify(listener, Mockito.times(1)).onResponse(ArgumentMatchers.eq(response)) + } + + @Test + fun acknowledgeAlerts() { + val request = mock(AcknowledgeAlertRequest::class.java) + val response = AcknowledgeAlertResponse(acknowledged = listOf(), failed = listOf(), missing = listOf()) + val listener: ActionListener = + mock(ActionListener::class.java) as ActionListener + Mockito.doAnswer { + (it.getArgument(2) as ActionListener) + .onResponse(response) + }.whenever(client).execute(Mockito.any(ActionType::class.java), Mockito.any(), Mockito.any()) + AlertingPluginInterface.acknowledgeAlerts(client, request, listener) + Mockito.verify(listener, Mockito.times(1)).onResponse(ArgumentMatchers.eq(response)) + } + + @Test + fun acknowledgeChainedAlerts() { + val request = mock(AcknowledgeChainedAlertRequest::class.java) + val response = AcknowledgeAlertResponse(acknowledged = listOf(), failed = listOf(), missing = listOf()) + val listener: ActionListener = + mock(ActionListener::class.java) as ActionListener + Mockito.doAnswer { + (it.getArgument(2) as ActionListener) + .onResponse(response) + }.whenever(client).execute(Mockito.any(ActionType::class.java), Mockito.any(), Mockito.any()) + AlertingPluginInterface.acknowledgeChainedAlerts(client, request, listener) + Mockito.verify(listener, Mockito.times(1)).onResponse(ArgumentMatchers.eq(response)) + } + + @Test + fun getMonitor() { + val request = mock(GetMonitorRequest::class.java) + val response = GetMonitorResponse("test-id", 1, 1, 1, null, null) + val listener: ActionListener = + mock(ActionListener::class.java) as ActionListener + Mockito.doAnswer { + (it.getArgument(2) as ActionListener) + .onResponse(response) + }.whenever(client).execute(Mockito.any(ActionType::class.java), Mockito.any(), Mockito.any()) + AlertingPluginInterface.getMonitor(client, request, listener) + Mockito.verify(listener, Mockito.times(1)).onResponse(ArgumentMatchers.eq(response)) + } + + @Test + fun searchMonitors() { + val request = mock(SearchMonitorRequest::class.java) + val response = mock(SearchResponse::class.java) + val listener: ActionListener = + mock(ActionListener::class.java) as ActionListener + Mockito.doAnswer { + (it.getArgument(2) as ActionListener) + .onResponse(response) + }.whenever(client).execute(Mockito.any(ActionType::class.java), Mockito.any(), Mockito.any()) + AlertingPluginInterface.searchMonitors(client, request, listener) + Mockito.verify(listener, Mockito.times(1)).onResponse(ArgumentMatchers.eq(response)) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/CorrelationAlertTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/CorrelationAlertTests.kt new file mode 100644 index 00000000..aa315aeb --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/CorrelationAlertTests.kt @@ -0,0 +1,92 @@ +package org.opensearch.commons.alerting + +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test +import org.opensearch.commons.alerting.model.Alert +import org.opensearch.commons.alerting.model.CorrelationAlert +import org.opensearch.commons.utils.recreateObject +import java.time.temporal.ChronoUnit + +class CorrelationAlertTests { + + @Test + fun `test correlation alert as template args`() { + // Create sample data for CorrelationAlert + val correlationAlert = randomCorrelationAlert("alertId1", Alert.State.ACTIVE) + + // Generate template args using asTemplateArg() function + val templateArgs = createCorrelationAlertTemplateArgs(correlationAlert) + + assertEquals( + templateArgs["correlated_finding_ids"], + correlationAlert.correlatedFindingIds, + "Template args correlatedFindingIds does not match" + ) + assertEquals( + templateArgs["correlation_rule_id"], + correlationAlert.correlationRuleId, + "Template args correlationRuleId does not match" + ) + assertEquals( + templateArgs["correlation_rule_name"], + correlationAlert.correlationRuleName, + "Template args correlationRuleName does not match" + ) + + // Verify inherited properties from BaseAlert + assertEquals(templateArgs["id"], correlationAlert.id, "alertId1") + assertEquals(templateArgs["version"], correlationAlert.version, "Template args version does not match") + assertEquals(templateArgs["user"], correlationAlert.user, "Template args user does not match") + assertEquals( + templateArgs["trigger_name"], + correlationAlert.triggerName, + "Template args triggerName does not match" + ) + assertEquals(templateArgs["state"], correlationAlert.state, "Template args state does not match") + assertEquals(templateArgs["start_time"], correlationAlert.startTime, "Template args startTime does not match") + assertEquals(templateArgs["end_time"], correlationAlert.endTime, "Template args endTime does not match") + assertEquals( + templateArgs["acknowledged_time"], + correlationAlert.acknowledgedTime, + "Template args acknowledgedTime does not match" + ) + assertEquals( + templateArgs["error_message"], + correlationAlert.errorMessage, + "Template args errorMessage does not match" + ) + assertEquals(templateArgs["severity"], correlationAlert.severity, "Template args severity does not match") + assertEquals( + templateArgs["action_execution_results"], + correlationAlert.actionExecutionResults, + "Template args actionExecutionResults does not match" + ) + } + + @Test + fun `test alert acknowledged`() { + val ackCorrelationAlert = randomCorrelationAlert("alertId1", Alert.State.ACKNOWLEDGED) + Assertions.assertTrue(ackCorrelationAlert.isAcknowledged(), "Alert is not acknowledged") + + val activeCorrelationAlert = randomCorrelationAlert("alertId1", Alert.State.ACTIVE) + Assertions.assertFalse(activeCorrelationAlert.isAcknowledged(), "Alert is acknowledged") + } + + @Test + fun `Feature Correlation Alert serialize and deserialize should be equal`() { + val correlationAlert = randomCorrelationAlert("alertId1", Alert.State.ACTIVE) + val recreatedAlert = recreateObject(correlationAlert) { CorrelationAlert(it) } + assertEquals(correlationAlert.correlatedFindingIds, recreatedAlert.correlatedFindingIds) + assertEquals(correlationAlert.correlationRuleId, recreatedAlert.correlationRuleId) + assertEquals(correlationAlert.correlationRuleName, recreatedAlert.correlationRuleName) + assertEquals(correlationAlert.triggerName, recreatedAlert.triggerName) + assertEquals(correlationAlert.state, recreatedAlert.state) + val expectedStartTime = correlationAlert.startTime.truncatedTo(ChronoUnit.MILLIS) + val actualStartTime = recreatedAlert.startTime.truncatedTo(ChronoUnit.MILLIS) + assertEquals(expectedStartTime, actualStartTime) + assertEquals(correlationAlert.severity, recreatedAlert.severity) + assertEquals(correlationAlert.id, recreatedAlert.id) + assertEquals(correlationAlert.actionExecutionResults, recreatedAlert.actionExecutionResults) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/MonitorTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/MonitorTests.kt new file mode 100644 index 00000000..bc34bf94 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/MonitorTests.kt @@ -0,0 +1,46 @@ +package org.opensearch.commons.alerting + +import org.junit.jupiter.api.Test +import org.opensearch.commons.alerting.model.Trigger +import org.opensearch.test.OpenSearchTestCase +import java.lang.IllegalArgumentException +import java.time.Instant + +internal class MonitorTests { + @Test + fun `test enabled time`() { + val monitor = randomQueryLevelMonitor() + val enabledMonitor = monitor.copy(enabled = true, enabledTime = Instant.now()) + try { + enabledMonitor.copy(enabled = false) + OpenSearchTestCase.fail("Disabling monitor with enabled time set should fail.") + } catch (e: IllegalArgumentException) { + } + + val disabledMonitor = monitor.copy(enabled = false, enabledTime = null) + + try { + disabledMonitor.copy(enabled = true) + OpenSearchTestCase.fail("Enabling monitor without enabled time should fail") + } catch (e: IllegalArgumentException) { + } + } + + @Test + fun `test max triggers`() { + val monitor = randomQueryLevelMonitor() + + val tooManyTriggers = mutableListOf() + var i = 0 + while (i <= 10) { + tooManyTriggers.add(randomQueryLevelTrigger()) + ++i + } + + try { + monitor.copy(triggers = tooManyTriggers) + OpenSearchTestCase.fail("Monitor with too many triggers should be rejected.") + } catch (e: IllegalArgumentException) { + } + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/TestHelpers.kt b/src/test/kotlin/org/opensearch/commons/alerting/TestHelpers.kt new file mode 100644 index 00000000..ccba0b47 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/TestHelpers.kt @@ -0,0 +1,793 @@ +package org.opensearch.commons.alerting + +import com.carrotsearch.randomizedtesting.generators.RandomNumbers +import com.carrotsearch.randomizedtesting.generators.RandomStrings +import junit.framework.TestCase.assertNull +import org.apache.http.Header +import org.apache.http.HttpEntity +import org.opensearch.client.Request +import org.opensearch.client.RequestOptions +import org.opensearch.client.Response +import org.opensearch.client.RestClient +import org.opensearch.client.WarningsHandler +import org.opensearch.common.UUIDs +import org.opensearch.common.settings.Settings +import org.opensearch.common.xcontent.LoggingDeprecationHandler +import org.opensearch.common.xcontent.XContentFactory +import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.alerting.aggregation.bucketselectorext.BucketSelectorExtAggregationBuilder +import org.opensearch.commons.alerting.aggregation.bucketselectorext.BucketSelectorExtFilter +import org.opensearch.commons.alerting.model.ActionExecutionResult +import org.opensearch.commons.alerting.model.ActionRunResult +import org.opensearch.commons.alerting.model.AggregationResultBucket +import org.opensearch.commons.alerting.model.Alert +import org.opensearch.commons.alerting.model.BaseAlert +import org.opensearch.commons.alerting.model.BucketLevelTrigger +import org.opensearch.commons.alerting.model.BucketLevelTriggerRunResult +import org.opensearch.commons.alerting.model.ChainedAlertTrigger +import org.opensearch.commons.alerting.model.ChainedMonitorFindings +import org.opensearch.commons.alerting.model.ClusterMetricsInput +import org.opensearch.commons.alerting.model.CompositeInput +import org.opensearch.commons.alerting.model.CorrelationAlert +import org.opensearch.commons.alerting.model.Delegate +import org.opensearch.commons.alerting.model.DocLevelMonitorInput +import org.opensearch.commons.alerting.model.DocLevelQuery +import org.opensearch.commons.alerting.model.DocumentLevelTrigger +import org.opensearch.commons.alerting.model.DocumentLevelTriggerRunResult +import org.opensearch.commons.alerting.model.Finding +import org.opensearch.commons.alerting.model.Input +import org.opensearch.commons.alerting.model.InputRunResults +import org.opensearch.commons.alerting.model.IntervalSchedule +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.MonitorRunResult +import org.opensearch.commons.alerting.model.NoOpTrigger +import org.opensearch.commons.alerting.model.QueryLevelTrigger +import org.opensearch.commons.alerting.model.QueryLevelTriggerRunResult +import org.opensearch.commons.alerting.model.Schedule +import org.opensearch.commons.alerting.model.SearchInput +import org.opensearch.commons.alerting.model.Sequence +import org.opensearch.commons.alerting.model.Trigger +import org.opensearch.commons.alerting.model.Workflow +import org.opensearch.commons.alerting.model.WorkflowInput +import org.opensearch.commons.alerting.model.action.Action +import org.opensearch.commons.alerting.model.action.ActionExecutionPolicy +import org.opensearch.commons.alerting.model.action.ActionExecutionScope +import org.opensearch.commons.alerting.model.action.AlertCategory +import org.opensearch.commons.alerting.model.action.PerAlertActionScope +import org.opensearch.commons.alerting.model.action.PerExecutionActionScope +import org.opensearch.commons.alerting.model.action.Throttle +import org.opensearch.commons.alerting.model.remote.monitors.RemoteMonitorTrigger +import org.opensearch.commons.alerting.util.getBucketKeysHash +import org.opensearch.commons.alerting.util.string +import org.opensearch.commons.authuser.User +import org.opensearch.core.xcontent.NamedXContentRegistry +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.index.query.QueryBuilders +import org.opensearch.script.Script +import org.opensearch.script.ScriptType +import org.opensearch.search.SearchModule +import org.opensearch.search.aggregations.bucket.terms.IncludeExclude +import org.opensearch.search.aggregations.bucket.terms.TermsAggregationBuilder +import org.opensearch.search.builder.SearchSourceBuilder +import java.time.Instant +import java.time.temporal.ChronoUnit +import java.util.Random +import java.util.UUID + +const val ALL_ACCESS_ROLE = "all_access" + +fun randomQueryLevelMonitor( + name: String = RandomStrings.randomAsciiLettersOfLength(Random(), 10), + user: User = randomUser(), + inputs: List = listOf(SearchInput(emptyList(), SearchSourceBuilder().query(QueryBuilders.matchAllQuery()))), + schedule: Schedule = IntervalSchedule(interval = 5, unit = ChronoUnit.MINUTES), + enabled: Boolean = Random().nextBoolean(), + triggers: List = (1..RandomNumbers.randomIntBetween(Random(), 0, 10)).map { randomQueryLevelTrigger() }, + enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, + lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), + withMetadata: Boolean = false +): Monitor { + return Monitor( + name = name, monitorType = Monitor.MonitorType.QUERY_LEVEL_MONITOR.value, enabled = enabled, inputs = inputs, + schedule = schedule, triggers = triggers, enabledTime = enabledTime, lastUpdateTime = lastUpdateTime, user = user, + uiMetadata = if (withMetadata) mapOf("foo" to "bar") else mapOf() + ) +} + +// Monitor of older versions without security. +fun randomQueryLevelMonitorWithoutUser( + name: String = RandomStrings.randomAsciiLettersOfLength(Random(), 10), + inputs: List = listOf(SearchInput(emptyList(), SearchSourceBuilder().query(QueryBuilders.matchAllQuery()))), + schedule: Schedule = IntervalSchedule(interval = 5, unit = ChronoUnit.MINUTES), + enabled: Boolean = Random().nextBoolean(), + triggers: List = (1..RandomNumbers.randomIntBetween(Random(), 0, 10)).map { randomQueryLevelTrigger() }, + enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, + lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), + withMetadata: Boolean = false +): Monitor { + return Monitor( + name = name, monitorType = Monitor.MonitorType.QUERY_LEVEL_MONITOR.value, enabled = enabled, inputs = inputs, + schedule = schedule, triggers = triggers, enabledTime = enabledTime, lastUpdateTime = lastUpdateTime, user = null, + uiMetadata = if (withMetadata) mapOf("foo" to "bar") else mapOf() + ) +} + +fun randomBucketLevelMonitor( + name: String = RandomStrings.randomAsciiLettersOfLength(Random(), 10), + user: User = randomUser(), + inputs: List = listOf( + SearchInput( + emptyList(), + SearchSourceBuilder().query(QueryBuilders.matchAllQuery()) + .aggregation(TermsAggregationBuilder("test_agg").field("test_field")) + ) + ), + schedule: Schedule = IntervalSchedule(interval = 5, unit = ChronoUnit.MINUTES), + enabled: Boolean = Random().nextBoolean(), + triggers: List = (1..RandomNumbers.randomIntBetween(Random(), 0, 10)).map { randomBucketLevelTrigger() }, + enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, + lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), + withMetadata: Boolean = false +): Monitor { + return Monitor( + name = name, monitorType = Monitor.MonitorType.BUCKET_LEVEL_MONITOR.value, enabled = enabled, inputs = inputs, + schedule = schedule, triggers = triggers, enabledTime = enabledTime, lastUpdateTime = lastUpdateTime, user = user, + uiMetadata = if (withMetadata) mapOf("foo" to "bar") else mapOf() + ) +} + +fun randomClusterMetricsMonitor( + name: String = RandomStrings.randomAsciiLettersOfLength(Random(), 10), + user: User = randomUser(), + inputs: List = listOf(randomClusterMetricsInput()), + schedule: Schedule = IntervalSchedule(interval = 5, unit = ChronoUnit.MINUTES), + enabled: Boolean = Random().nextBoolean(), + triggers: List = (1..RandomNumbers.randomIntBetween(Random(), 0, 10)).map { randomQueryLevelTrigger() }, + enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, + lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), + withMetadata: Boolean = false +): Monitor { + return Monitor( + name = name, monitorType = Monitor.MonitorType.CLUSTER_METRICS_MONITOR.value, enabled = enabled, inputs = inputs, + schedule = schedule, triggers = triggers, enabledTime = enabledTime, lastUpdateTime = lastUpdateTime, user = user, + uiMetadata = if (withMetadata) mapOf("foo" to "bar") else mapOf() + ) +} + +fun randomDocumentLevelMonitor( + name: String = RandomStrings.randomAsciiLettersOfLength(Random(), 10), + user: User? = randomUser(), + inputs: List = listOf(DocLevelMonitorInput("description", listOf("index"), emptyList())), + schedule: Schedule = IntervalSchedule(interval = 5, unit = ChronoUnit.MINUTES), + enabled: Boolean = Random().nextBoolean(), + triggers: List = (1..RandomNumbers.randomIntBetween(Random(), 0, 10)).map { randomQueryLevelTrigger() }, + enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, + lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), + withMetadata: Boolean = false +): Monitor { + return Monitor( + name = name, monitorType = Monitor.MonitorType.DOC_LEVEL_MONITOR.value, enabled = enabled, inputs = inputs, + schedule = schedule, triggers = triggers, enabledTime = enabledTime, lastUpdateTime = lastUpdateTime, user = user, + uiMetadata = if (withMetadata) mapOf("foo" to "bar") else mapOf() + ) +} + +fun randomWorkflow( + name: String = RandomStrings.randomAsciiLettersOfLength(Random(), 10), + user: User? = randomUser(), + monitorIds: List? = null, + schedule: Schedule = IntervalSchedule(interval = 5, unit = ChronoUnit.MINUTES), + enabled: Boolean = Random().nextBoolean(), + enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, + lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), + triggers: List = listOf(randomChainedAlertTrigger()), + auditDelegateMonitorAlerts: Boolean? = true +): Workflow { + val delegates = mutableListOf() + if (!monitorIds.isNullOrEmpty()) { + delegates.add(Delegate(1, monitorIds[0])) + for (i in 1 until monitorIds.size) { + // Order of monitors in workflow will be the same like forwarded meaning that the first monitorId will be used as second monitor chained finding + delegates.add(Delegate(i + 1, monitorIds [i], ChainedMonitorFindings(monitorIds[i - 1]))) + } + } + var input = listOf(CompositeInput(Sequence(delegates))) + if (input == null) { + input = listOf( + CompositeInput( + Sequence( + listOf(Delegate(1, "delegate1")) + ) + ) + ) + } + return Workflow( + name = name, workflowType = Workflow.WorkflowType.COMPOSITE, enabled = enabled, inputs = input, + schedule = schedule, enabledTime = enabledTime, lastUpdateTime = lastUpdateTime, user = user, + triggers = triggers, auditDelegateMonitorAlerts = auditDelegateMonitorAlerts + ) +} + +fun randomWorkflowWithDelegates( + name: String = RandomStrings.randomAsciiLettersOfLength(Random(), 10), + user: User? = randomUser(), + input: List, + schedule: Schedule = IntervalSchedule(interval = 5, unit = ChronoUnit.MINUTES), + enabled: Boolean = Random().nextBoolean(), + enabledTime: Instant? = if (enabled) Instant.now().truncatedTo(ChronoUnit.MILLIS) else null, + lastUpdateTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), + triggers: List = (1..RandomNumbers.randomIntBetween(Random(), 0, 10)).map { randomChainedAlertTrigger() } +): Workflow { + return Workflow( + name = name, workflowType = Workflow.WorkflowType.COMPOSITE, enabled = enabled, inputs = input, + schedule = schedule, enabledTime = enabledTime, lastUpdateTime = lastUpdateTime, user = user, + triggers = triggers + ) +} + +fun Workflow.toJsonStringWithUser(): String { + val builder = XContentFactory.jsonBuilder() + return this.toXContentWithUser(builder, ToXContent.EMPTY_PARAMS).string() +} + +fun randomSequence( + delegates: List = listOf(randomDelegate()) +): Sequence { + return Sequence(delegates) +} + +fun randomDelegate( + order: Int = 1, + monitorId: String = RandomStrings.randomAsciiLettersOfLength(Random(), 10), + chainedMonitorFindings: ChainedMonitorFindings? = null +): Delegate { + return Delegate(order, monitorId, chainedMonitorFindings) +} + +fun randomQueryLevelTrigger( + id: String = UUIDs.base64UUID(), + name: String = RandomStrings.randomAsciiLettersOfLength(Random(), 10), + severity: String = "1", + condition: Script = randomScript(), + actions: List = mutableListOf(), + destinationId: String = "" +): QueryLevelTrigger { + return QueryLevelTrigger( + id = id, + name = name, + severity = severity, + condition = condition, + actions = if (actions.isEmpty()) (0..RandomNumbers.randomIntBetween(Random(), 0, 10)).map { randomAction(destinationId = destinationId) } else actions + ) +} + +fun randomBucketLevelTrigger( + id: String = UUIDs.base64UUID(), + name: String = RandomStrings.randomAsciiLettersOfLength(Random(), 10), + severity: String = "1", + bucketSelector: BucketSelectorExtAggregationBuilder = randomBucketSelectorExtAggregationBuilder(name = id), + actions: List = mutableListOf(), + destinationId: String = "" +): BucketLevelTrigger { + return BucketLevelTrigger( + id = id, + name = name, + severity = severity, + bucketSelector = bucketSelector, + actions = if (actions.isEmpty()) randomActionsForBucketLevelTrigger(destinationId = destinationId) else actions + ) +} + +fun randomActionsForBucketLevelTrigger(min: Int = 0, max: Int = 10, destinationId: String = ""): List = + (min..RandomNumbers.randomIntBetween(Random(), 0, max)).map { randomActionWithPolicy(destinationId = destinationId) } + +fun randomDocumentLevelTrigger( + id: String = UUIDs.base64UUID(), + name: String = RandomStrings.randomAsciiLettersOfLength(Random(), 10), + severity: String = "1", + condition: Script = randomScript(), + actions: List = mutableListOf(), + destinationId: String = "" +): DocumentLevelTrigger { + return DocumentLevelTrigger( + id = id, + name = name, + severity = severity, + condition = condition, + actions = if (actions.isEmpty() && destinationId.isNotBlank()) { + (0..RandomNumbers.randomIntBetween(Random(), 0, 10)).map { randomAction(destinationId = destinationId) } + } else { + actions + } + ) +} + +fun randomChainedAlertTrigger( + id: String = UUIDs.base64UUID(), + name: String = RandomStrings.randomAsciiLettersOfLength(Random(), 10), + severity: String = "1", + condition: Script = randomScript(), + actions: List = mutableListOf(), + destinationId: String = "" +): ChainedAlertTrigger { + return ChainedAlertTrigger( + id = id, + name = name, + severity = severity, + condition = condition, + actions = if (actions.isEmpty() && destinationId.isNotBlank()) { + (0..RandomNumbers.randomIntBetween(Random(), 0, 10)).map { randomAction(destinationId = destinationId) } + } else { + actions + } + ) +} + +fun randomBucketSelectorExtAggregationBuilder( + name: String = RandomStrings.randomAsciiLettersOfLength(Random(), 10), + bucketsPathsMap: MutableMap = mutableMapOf("avg" to "10"), + script: Script = randomBucketSelectorScript(params = bucketsPathsMap), + parentBucketPath: String = "testPath", + filter: BucketSelectorExtFilter = BucketSelectorExtFilter(IncludeExclude("foo*", "bar*")) +): BucketSelectorExtAggregationBuilder { + return BucketSelectorExtAggregationBuilder(name, bucketsPathsMap, script, parentBucketPath, filter) +} + +fun randomBucketSelectorScript( + idOrCode: String = "params.avg >= 0", + params: Map = mutableMapOf("avg" to "10") +): Script { + return Script(Script.DEFAULT_SCRIPT_TYPE, Script.DEFAULT_SCRIPT_LANG, idOrCode, emptyMap(), params) +} + +fun randomScript(source: String = "return " + Random().nextBoolean().toString()): Script = Script(source) + +fun randomTemplateScript( + source: String, + params: Map = emptyMap() +): Script = Script(ScriptType.INLINE, Script.DEFAULT_TEMPLATE_LANG, source, params) + +fun randomAction( + name: String = RandomStrings.randomAsciiLettersOfLength(Random(), 10), + template: Script = randomTemplateScript("Hello World"), + destinationId: String = "", + throttleEnabled: Boolean = false, + throttle: Throttle = randomThrottle() +) = Action(name, destinationId, template, template, throttleEnabled, throttle, actionExecutionPolicy = null) + +fun randomActionWithPolicy( + name: String = RandomStrings.randomAsciiLettersOfLength(Random(), 10), + template: Script = randomTemplateScript("Hello World"), + destinationId: String = "", + throttleEnabled: Boolean = false, + throttle: Throttle = randomThrottle(), + actionExecutionPolicy: ActionExecutionPolicy? = randomActionExecutionPolicy() +): Action { + return if (actionExecutionPolicy?.actionExecutionScope is PerExecutionActionScope) { + // Return null for throttle when using PerExecutionActionScope since throttling is currently not supported for it + Action(name, destinationId, template, template, throttleEnabled, null, actionExecutionPolicy = actionExecutionPolicy) + } else { + Action(name, destinationId, template, template, throttleEnabled, throttle, actionExecutionPolicy = actionExecutionPolicy) + } +} + +fun randomThrottle( + value: Int = RandomNumbers.randomIntBetween(Random(), 60, 120), + unit: ChronoUnit = ChronoUnit.MINUTES +) = Throttle(value, unit) + +fun randomActionExecutionPolicy( + actionExecutionScope: ActionExecutionScope = randomActionExecutionScope() +) = ActionExecutionPolicy(actionExecutionScope) + +fun randomActionExecutionScope(): ActionExecutionScope { + return if (Random().nextBoolean()) { + val alertCategories = AlertCategory.values() + PerAlertActionScope(actionableAlerts = (1..RandomNumbers.randomIntBetween(Random(), 0, alertCategories.size)).map { alertCategories[it - 1] }.toSet()) + } else { + PerExecutionActionScope() + } +} + +fun randomDocLevelQuery( + id: String = RandomStrings.randomAsciiLettersOfLength(Random(), 10), + query: String = RandomStrings.randomAsciiLettersOfLength(Random(), 10), + name: String = "${RandomNumbers.randomIntBetween(Random(), 0, 5)}", + tags: List = mutableListOf(0..RandomNumbers.randomIntBetween(Random(), 0, 10)).map { RandomStrings.randomAsciiLettersOfLength(Random(), 10) } +): DocLevelQuery { + return DocLevelQuery(id = id, query = query, name = name, tags = tags, fields = listOf("*")) +} + +fun randomDocLevelMonitorInput( + description: String = RandomStrings.randomAsciiLettersOfLength(Random(), RandomNumbers.randomIntBetween(Random(), 0, 10)), + indices: List = listOf(1..RandomNumbers.randomIntBetween(Random(), 0, 10)).map { RandomStrings.randomAsciiLettersOfLength(Random(), 10) }, + queries: List = listOf(1..RandomNumbers.randomIntBetween(Random(), 0, 10)).map { randomDocLevelQuery() } +): DocLevelMonitorInput { + return DocLevelMonitorInput(description = description, indices = indices, queries = queries) +} + +fun randomSearchInput( + indices: List = listOf(1..RandomNumbers.randomIntBetween(Random(), 0, 10)).map { RandomStrings.randomAsciiLettersOfLength(Random(), 10) }, + query: SearchSourceBuilder = SearchSourceBuilder().query(QueryBuilders.matchAllQuery()) +): SearchInput { + return SearchInput(indices, query) +} + +fun randomClusterMetricsInput( + path: String = ClusterMetricsInput.ClusterMetricType.values() + .filter { it.defaultPath.isNotBlank() && !it.requiresPathParams } + .random() + .defaultPath, + pathParams: String = "", + url: String = "" +): ClusterMetricsInput { + return ClusterMetricsInput(path, pathParams, url) +} + +fun ChainedMonitorFindings.toJsonString(): String { + val builder = XContentFactory.jsonBuilder() + return this.toXContent(builder, ToXContent.EMPTY_PARAMS).string() +} + +fun Workflow.toJsonString(): String { + val builder = XContentFactory.jsonBuilder() + return this.toXContentWithUser(builder, ToXContent.EMPTY_PARAMS).string() +} + +fun Monitor.toJsonString(): String { + val builder = XContentFactory.jsonBuilder() + return this.toXContent(builder, ToXContent.EMPTY_PARAMS).string() +} + +fun Monitor.toJsonStringWithUser(): String { + val builder = XContentFactory.jsonBuilder() + return this.toXContentWithUser(builder, ToXContent.EMPTY_PARAMS).string() +} + +fun randomUser(): User { + return User( + RandomStrings.randomAsciiLettersOfLength(Random(), 10), + listOf( + RandomStrings.randomAsciiLettersOfLength(Random(), 10), + RandomStrings.randomAsciiLettersOfLength(Random(), 10) + ), + listOf(RandomStrings.randomAsciiLettersOfLength(Random(), 10), ALL_ACCESS_ROLE), + listOf("test_attr=test") + ) +} + +fun randomUserEmpty(): User { + return User("", listOf(), listOf(), listOf()) +} + +/** + * Wrapper for [RestClient.performRequest] which was deprecated in ES 6.5 and is used in tests. This provides + * a single place to suppress deprecation warnings. This will probably need further work when the API is removed entirely + * but that's an exercise for another day. + */ +@Suppress("DEPRECATION") +fun RestClient.makeRequest( + method: String, + endpoint: String, + params: Map = emptyMap(), + entity: HttpEntity? = null, + vararg headers: Header +): Response { + val request = Request(method, endpoint) + // TODO: remove PERMISSIVE option after moving system index access to REST API call + val options = RequestOptions.DEFAULT.toBuilder() + options.setWarningsHandler(WarningsHandler.PERMISSIVE) + headers.forEach { options.addHeader(it.name, it.value) } + request.options = options.build() + params.forEach { request.addParameter(it.key, it.value) } + if (entity != null) { + request.entity = entity + } + return performRequest(request) +} + +/** + * Wrapper for [RestClient.performRequest] which was deprecated in ES 6.5 and is used in tests. This provides + * a single place to suppress deprecation warnings. This will probably need further work when the API is removed entirely + * but that's an exercise for another day. + */ +@Suppress("DEPRECATION") +fun RestClient.makeRequest( + method: String, + endpoint: String, + entity: HttpEntity? = null, + vararg headers: Header +): Response { + val request = Request(method, endpoint) + val options = RequestOptions.DEFAULT.toBuilder() + // TODO: remove PERMISSIVE option after moving system index access to REST API call + options.setWarningsHandler(WarningsHandler.PERMISSIVE) + headers.forEach { options.addHeader(it.name, it.value) } + request.options = options.build() + if (entity != null) { + request.entity = entity + } + return performRequest(request) +} + +fun builder(): XContentBuilder { + return XContentBuilder.builder(XContentType.JSON.xContent()) +} + +fun parser(xc: String): XContentParser { + val parser = XContentType.JSON.xContent().createParser(xContentRegistry(), LoggingDeprecationHandler.INSTANCE, xc) + parser.nextToken() + return parser +} + +fun parser(xc: ByteArray): XContentParser { + val parser = XContentType.JSON.xContent().createParser(xContentRegistry(), LoggingDeprecationHandler.INSTANCE, xc) + parser.nextToken() + return parser +} + +fun xContentRegistry(): NamedXContentRegistry { + return NamedXContentRegistry( + listOf( + SearchInput.XCONTENT_REGISTRY, + DocLevelMonitorInput.XCONTENT_REGISTRY, + QueryLevelTrigger.XCONTENT_REGISTRY, + BucketLevelTrigger.XCONTENT_REGISTRY, + DocumentLevelTrigger.XCONTENT_REGISTRY, + ChainedAlertTrigger.XCONTENT_REGISTRY, + NoOpTrigger.XCONTENT_REGISTRY, + RemoteMonitorTrigger.XCONTENT_REGISTRY + ) + SearchModule(Settings.EMPTY, emptyList()).namedXContents + ) +} + +fun assertUserNull(map: Map) { + val user = map["user"] + assertNull("User is not null", user) +} + +fun assertUserNull(monitor: Monitor) { + assertNull("User is not null", monitor.user) +} + +fun randomAlert(monitor: Monitor = randomQueryLevelMonitor()): Alert { + val trigger = randomQueryLevelTrigger() + val actionExecutionResults = mutableListOf(randomActionExecutionResult(), randomActionExecutionResult()) + val clusterCount = (-1..5).random() + val clusters = if (clusterCount == -1) null else (0..clusterCount).map { "index-$it" } + return Alert( + monitor, + trigger, + Instant.now().truncatedTo(ChronoUnit.MILLIS), + null, + actionExecutionResults = actionExecutionResults, + clusters = clusters + ) +} + +fun randomChainedAlert( + workflow: Workflow = randomWorkflow(), + trigger: ChainedAlertTrigger = randomChainedAlertTrigger() +): Alert { + return Alert( + startTime = Instant.now(), + lastNotificationTime = Instant.now(), + state = Alert.State.ACTIVE, + errorMessage = null, + executionId = UUID.randomUUID().toString(), + chainedAlertTrigger = trigger, + workflow = workflow, + associatedAlertIds = listOf("a1") + ) +} + +fun randomActionExecutionResult( + actionId: String = UUIDs.base64UUID(), + lastExecutionTime: Instant = Instant.now().truncatedTo(ChronoUnit.MILLIS), + throttledCount: Int = 0 +) = ActionExecutionResult(actionId, lastExecutionTime, throttledCount) + +fun randomAlertWithAggregationResultBucket(monitor: Monitor = randomBucketLevelMonitor()): Alert { + val trigger = randomBucketLevelTrigger() + val actionExecutionResults = mutableListOf(randomActionExecutionResult(), randomActionExecutionResult()) + return Alert( + monitor, + trigger, + Instant.now().truncatedTo(ChronoUnit.MILLIS), + null, + actionExecutionResults = actionExecutionResults, + aggregationResultBucket = AggregationResultBucket( + "parent_bucket_path_1", + listOf("bucket_key_1"), + mapOf("k1" to "val1", "k2" to "val2") + ) + ) +} + +fun randomFinding( + id: String = UUIDs.base64UUID(), + relatedDocIds: List = listOf(UUIDs.base64UUID()), + monitorId: String = UUIDs.base64UUID(), + monitorName: String = UUIDs.base64UUID(), + index: String = UUIDs.base64UUID(), + docLevelQueries: List = listOf(randomDocLevelQuery()), + timestamp: Instant = Instant.now() +): Finding { + return Finding( + id = id, + relatedDocIds = relatedDocIds, + monitorId = monitorId, + monitorName = monitorName, + index = index, + docLevelQueries = docLevelQueries, + timestamp = timestamp + ) +} + +fun randomCorrelationAlert( + id: String, + state: Alert.State +): CorrelationAlert { + val correlatedFindingIds = listOf("finding1", "finding2") + val correlationRuleId = "rule1" + val correlationRuleName = "Rule 1" + val id = id + val version = 1L + val schemaVersion = 1 + val user = randomUser() + val triggerName = "Trigger 1" + val state = state + val startTime = Instant.now() + val endTime: Instant? = null + val acknowledgedTime: Instant? = null + val errorMessage: String? = null + val severity = "high" + val actionExecutionResults = listOf(randomActionExecutionResult()) + + return CorrelationAlert( + correlatedFindingIds, correlationRuleId, correlationRuleName, + id, version, schemaVersion, user, triggerName, state, + startTime, endTime, acknowledgedTime, errorMessage, severity, + actionExecutionResults + ) +} + +fun createUnifiedAlertTemplateArgs(unifiedAlert: BaseAlert): Map { + return mapOf( + BaseAlert.ALERT_ID_FIELD to unifiedAlert.id, + BaseAlert.ALERT_VERSION_FIELD to unifiedAlert.version, + BaseAlert.SCHEMA_VERSION_FIELD to unifiedAlert.schemaVersion, + BaseAlert.USER_FIELD to unifiedAlert.user, + BaseAlert.TRIGGER_NAME_FIELD to unifiedAlert.triggerName, + BaseAlert.STATE_FIELD to unifiedAlert.state, + BaseAlert.START_TIME_FIELD to unifiedAlert.startTime, + BaseAlert.END_TIME_FIELD to unifiedAlert.endTime, + BaseAlert.ACKNOWLEDGED_TIME_FIELD to unifiedAlert.acknowledgedTime, + BaseAlert.ERROR_MESSAGE_FIELD to unifiedAlert.errorMessage, + BaseAlert.SEVERITY_FIELD to unifiedAlert.severity, + BaseAlert.ACTION_EXECUTION_RESULTS_FIELD to unifiedAlert.actionExecutionResults + ) +} + +fun createCorrelationAlertTemplateArgs(correlationAlert: CorrelationAlert): Map { + val unifiedAlertTemplateArgs = createUnifiedAlertTemplateArgs(correlationAlert) + return unifiedAlertTemplateArgs + mapOf( + CorrelationAlert.CORRELATED_FINDING_IDS to correlationAlert.correlatedFindingIds, + CorrelationAlert.CORRELATION_RULE_ID to correlationAlert.correlationRuleId, + CorrelationAlert.CORRELATION_RULE_NAME to correlationAlert.correlationRuleName + ) +} + +fun randomInputRunResults(): InputRunResults { + return InputRunResults(listOf(), null) +} + +fun randomActionRunResult(): ActionRunResult { + val map = mutableMapOf() + map.plus(Pair("key1", "val1")) + map.plus(Pair("key2", "val2")) + return ActionRunResult( + "1234", + "test-action", + map, + false, + Instant.now(), + null + ) +} + +fun randomDocumentLevelTriggerRunResult(): DocumentLevelTriggerRunResult { + val map = mutableMapOf() + map.plus(Pair("key1", randomActionRunResult())) + map.plus(Pair("key2", randomActionRunResult())) + return DocumentLevelTriggerRunResult( + "trigger-name", + mutableListOf(UUIDs.randomBase64UUID().toString()), + null, + mutableMapOf(Pair("alertId", map)) + ) +} +fun randomDocumentLevelMonitorRunResult(): MonitorRunResult { + val triggerResults = mutableMapOf() + val triggerRunResult = randomDocumentLevelTriggerRunResult() + triggerResults.plus(Pair("test", triggerRunResult)) + + return MonitorRunResult( + "test-monitor", + Instant.now(), + Instant.now(), + null, + randomInputRunResults(), + triggerResults + ) +} + +fun randomBucketLevelTriggerRunResult(): BucketLevelTriggerRunResult { + val map = mutableMapOf() + map.plus(Pair("key1", randomActionRunResult())) + map.plus(Pair("key2", randomActionRunResult())) + + val aggBucket1 = AggregationResultBucket( + "parent_bucket_path_1", + listOf("bucket_key_1"), + mapOf("k1" to "val1", "k2" to "val2") + ) + val aggBucket2 = AggregationResultBucket( + "parent_bucket_path_2", + listOf("bucket_key_2"), + mapOf("k1" to "val1", "k2" to "val2") + ) + + val actionResultsMap: MutableMap> = mutableMapOf() + actionResultsMap[aggBucket1.getBucketKeysHash()] = map + actionResultsMap[aggBucket2.getBucketKeysHash()] = map + + return BucketLevelTriggerRunResult( + "trigger-name", + null, + mapOf( + aggBucket1.getBucketKeysHash() to aggBucket1, + aggBucket2.getBucketKeysHash() to aggBucket2 + ), + actionResultsMap + ) +} + +fun randomBucketLevelMonitorRunResult(): MonitorRunResult { + val triggerResults = mutableMapOf() + val triggerRunResult = randomBucketLevelTriggerRunResult() + triggerResults.plus(Pair("test", triggerRunResult)) + + return MonitorRunResult( + "test-monitor", + Instant.now(), + Instant.now(), + null, + randomInputRunResults(), + triggerResults + ) +} + +fun randomQueryLevelTriggerRunResult(): QueryLevelTriggerRunResult { + val map = mutableMapOf() + map.plus(Pair("key1", randomActionRunResult())) + map.plus(Pair("key2", randomActionRunResult())) + return QueryLevelTriggerRunResult("trigger-name", true, null, map) +} + +fun randomQueryLevelMonitorRunResult(): MonitorRunResult { + val triggerResults = mutableMapOf() + val triggerRunResult = randomQueryLevelTriggerRunResult() + triggerResults.plus(Pair("test", triggerRunResult)) + + return MonitorRunResult( + "test-monitor", + Instant.now(), + Instant.now(), + null, + randomInputRunResults(), + triggerResults + ) +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/AcknowledgeAlertRequestTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/AcknowledgeAlertRequestTests.kt new file mode 100644 index 00000000..cbd9e98a --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/AcknowledgeAlertRequestTests.kt @@ -0,0 +1,29 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.action + +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertNotNull +import org.junit.jupiter.api.Test +import org.opensearch.action.support.WriteRequest +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.core.common.io.stream.StreamInput + +class AcknowledgeAlertRequestTests { + + @Test + fun `test acknowledge alert request`() { + val req = AcknowledgeAlertRequest("1234", mutableListOf("1", "2", "3", "4"), WriteRequest.RefreshPolicy.IMMEDIATE) + assertNotNull(req) + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = AcknowledgeAlertRequest(sin) + assertEquals("1234", newReq.monitorId) + assertEquals(4, newReq.alertIds.size) + assertEquals(WriteRequest.RefreshPolicy.IMMEDIATE, newReq.refreshPolicy) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/AcknowledgeAlertResponseTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/AcknowledgeAlertResponseTests.kt new file mode 100644 index 00000000..df9a083b --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/AcknowledgeAlertResponseTests.kt @@ -0,0 +1,89 @@ +package org.opensearch.commons.alerting.action + +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertNotNull +import org.junit.jupiter.api.Test +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.commons.alerting.alerts.AlertError +import org.opensearch.commons.alerting.model.ActionExecutionResult +import org.opensearch.commons.alerting.model.Alert +import org.opensearch.commons.alerting.randomUser +import org.opensearch.core.common.io.stream.StreamInput +import java.time.Instant + +class AcknowledgeAlertResponseTests { + + @Test + fun `test acknowledge alert response`() { + val acknowledged = mutableListOf( + Alert( + id = "1234", + version = 0L, + schemaVersion = 1, + monitorId = "monitor-1234", + workflowId = "", + workflowName = "", + monitorName = "test-monitor", + monitorVersion = 0L, + monitorUser = randomUser(), + triggerId = "trigger-14", + triggerName = "test-trigger", + findingIds = ArrayList(), + relatedDocIds = ArrayList(), + state = Alert.State.ACKNOWLEDGED, + startTime = Instant.now(), + endTime = Instant.now(), + lastNotificationTime = Instant.now(), + acknowledgedTime = Instant.now(), + errorMessage = null, + errorHistory = ArrayList(), + severity = "sev-2", + actionExecutionResults = ArrayList(), + aggregationResultBucket = null, + executionId = null, + associatedAlertIds = emptyList() + ) + ) + val failed = mutableListOf( + Alert( + id = "1234", + version = 0L, + schemaVersion = 1, + monitorId = "monitor-1234", + workflowId = "", + workflowName = "", + monitorName = "test-monitor", + monitorVersion = 0L, + monitorUser = randomUser(), + triggerId = "trigger-14", + triggerName = "test-trigger", + findingIds = ArrayList(), + relatedDocIds = ArrayList(), + state = Alert.State.ERROR, + startTime = Instant.now(), + endTime = Instant.now(), + lastNotificationTime = Instant.now(), + acknowledgedTime = Instant.now(), + errorMessage = null, + errorHistory = mutableListOf(AlertError(Instant.now(), "Error msg")), + severity = "sev-2", + actionExecutionResults = mutableListOf(ActionExecutionResult("7890", null, 0)), + aggregationResultBucket = null, + executionId = null, + associatedAlertIds = emptyList() + ) + ) + val missing = mutableListOf("1", "2", "3", "4") + + val req = AcknowledgeAlertResponse(acknowledged, failed, missing) + assertNotNull(req) + + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = AcknowledgeAlertResponse(sin) + assertEquals(1, newReq.acknowledged.size) + assertEquals(1, newReq.failed.size) + assertEquals(4, newReq.missing.size) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/AcknowledgeChainedAlertRequestTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/AcknowledgeChainedAlertRequestTests.kt new file mode 100644 index 00000000..b624edfa --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/AcknowledgeChainedAlertRequestTests.kt @@ -0,0 +1,29 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.action + +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertNotNull +import org.junit.jupiter.api.Assertions.assertNull +import org.junit.jupiter.api.Test +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.core.common.io.stream.StreamInput + +class AcknowledgeChainedAlertRequestTests { + + @Test + fun `test acknowledge chained alert request`() { + val req = AcknowledgeChainedAlertRequest("1234", mutableListOf("1", "2", "3", "4")) + assertNotNull(req) + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = AcknowledgeChainedAlertRequest(sin) + assertEquals("1234", newReq.workflowId) + assertEquals(4, newReq.alertIds.size) + assertNull(newReq.validate()) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/DeleteCommentRequestTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/DeleteCommentRequestTests.kt new file mode 100644 index 00000000..70a22953 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/DeleteCommentRequestTests.kt @@ -0,0 +1,22 @@ +package org.opensearch.commons.alerting.action + +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertNotNull +import org.junit.jupiter.api.Test +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.core.common.io.stream.StreamInput + +class DeleteCommentRequestTests { + @Test + fun `test delete comment request writing and parsing`() { + val req = DeleteCommentRequest("1234") + assertNotNull(req) + assertEquals("1234", req.commentId) + + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = DeleteCommentRequest(sin) + assertEquals("1234", newReq.commentId) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/DeleteCommentResponseTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/DeleteCommentResponseTests.kt new file mode 100644 index 00000000..f10067ac --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/DeleteCommentResponseTests.kt @@ -0,0 +1,22 @@ +package org.opensearch.commons.alerting.action + +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertNotNull +import org.junit.jupiter.api.Test +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.core.common.io.stream.StreamInput + +class DeleteCommentResponseTests { + @Test + fun `test delete comment response writing and parsing`() { + val res = DeleteCommentResponse(id = "123") + assertNotNull(res) + assertEquals("123", res.commentId) + + val out = BytesStreamOutput() + res.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newRes = DeleteCommentResponse(sin) + assertEquals("123", newRes.commentId) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/DeleteMonitorRequestTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/DeleteMonitorRequestTests.kt new file mode 100644 index 00000000..b307ec6c --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/DeleteMonitorRequestTests.kt @@ -0,0 +1,26 @@ +package org.opensearch.commons.alerting.action + +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertNotNull +import org.junit.jupiter.api.Test +import org.opensearch.action.support.WriteRequest +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.core.common.io.stream.StreamInput + +class DeleteMonitorRequestTests { + + @Test + fun `test delete monitor request`() { + val req = DeleteMonitorRequest("1234", WriteRequest.RefreshPolicy.IMMEDIATE) + assertNotNull(req) + assertEquals("1234", req.monitorId) + assertEquals("true", req.refreshPolicy.value) + + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = DeleteMonitorRequest(sin) + assertEquals("1234", newReq.monitorId) + assertEquals("true", newReq.refreshPolicy.value) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/DeleteWorkflowRequestTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/DeleteWorkflowRequestTests.kt new file mode 100644 index 00000000..80fb24d4 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/DeleteWorkflowRequestTests.kt @@ -0,0 +1,22 @@ +package org.opensearch.commons.alerting.action + +import org.junit.Assert +import org.junit.Test +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.core.common.io.stream.StreamInput + +class DeleteWorkflowRequestTests { + + @Test + fun `test delete workflow request`() { + val req = DeleteWorkflowRequest("1234", true) + Assert.assertNotNull(req) + Assert.assertEquals("1234", req.workflowId) + + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = DeleteWorkflowRequest(sin) + Assert.assertEquals("1234", newReq.workflowId) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/DocLevelMonitorFanOutRequestTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/DocLevelMonitorFanOutRequestTests.kt new file mode 100644 index 00000000..dda45483 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/DocLevelMonitorFanOutRequestTests.kt @@ -0,0 +1,92 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.action + +import org.junit.Assert.assertEquals +import org.junit.jupiter.api.Test +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.commons.alerting.model.ActionExecutionTime +import org.opensearch.commons.alerting.model.DocLevelMonitorInput +import org.opensearch.commons.alerting.model.DocLevelQuery +import org.opensearch.commons.alerting.model.IndexExecutionContext +import org.opensearch.commons.alerting.model.IntervalSchedule +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.model.MonitorMetadata +import org.opensearch.commons.alerting.model.Workflow +import org.opensearch.commons.alerting.model.WorkflowRunContext +import org.opensearch.commons.alerting.randomDocumentLevelMonitor +import org.opensearch.commons.alerting.randomDocumentLevelTrigger +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.index.shard.ShardId +import org.opensearch.index.seqno.SequenceNumbers +import org.opensearch.script.Script +import java.time.Instant +import java.time.temporal.ChronoUnit +import java.util.UUID + +class DocLevelMonitorFanOutRequestTests { + + @Test + fun `test doc level monitor fan out request as stream`() { + val docQuery = DocLevelQuery(query = "test_field:\"us-west-2\"", fields = listOf(), name = "3") + val docLevelInput = DocLevelMonitorInput("description", listOf("test-index"), listOf(docQuery)) + + val trigger = randomDocumentLevelTrigger(condition = Script("return true")) + val monitor = randomDocumentLevelMonitor( + inputs = listOf(docLevelInput), + triggers = listOf(trigger), + enabled = true, + schedule = IntervalSchedule(1, ChronoUnit.MINUTES) + ) + val monitorMetadata = MonitorMetadata( + "test", + SequenceNumbers.UNASSIGNED_SEQ_NO, + SequenceNumbers.UNASSIGNED_PRIMARY_TERM, + Monitor.NO_ID, + listOf(ActionExecutionTime("", Instant.now())), + mutableMapOf("index" to mutableMapOf("1" to "1")), + mutableMapOf("test-index" to ".opensearch-sap-test_windows-queries-000001") + ) + val indexExecutionContext = IndexExecutionContext( + listOf(docQuery), + mutableMapOf("index" to mutableMapOf("1" to "1")), + mutableMapOf("index" to mutableMapOf("1" to "1")), + "test-index", + "test-index", + listOf("test-index"), + listOf("test-index"), + listOf("test-field"), + listOf("1", "2") + ) + val workflowRunContext = WorkflowRunContext( + Workflow.NO_ID, + Workflow.NO_ID, + Monitor.NO_ID, + mutableMapOf("index" to listOf("1")), + true + ) + val docLevelMonitorFanOutRequest = DocLevelMonitorFanOutRequest( + monitor, + false, + monitorMetadata, + UUID.randomUUID().toString(), + indexExecutionContext, + listOf(ShardId("test-index", UUID.randomUUID().toString(), 0)), + listOf("test-index"), + workflowRunContext + ) + val out = BytesStreamOutput() + docLevelMonitorFanOutRequest.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newDocLevelMonitorFanOutRequest = DocLevelMonitorFanOutRequest(sin) + assertEquals(docLevelMonitorFanOutRequest.monitor, newDocLevelMonitorFanOutRequest.monitor) + assertEquals(docLevelMonitorFanOutRequest.executionId, newDocLevelMonitorFanOutRequest.executionId) + assertEquals(docLevelMonitorFanOutRequest.monitorMetadata, newDocLevelMonitorFanOutRequest.monitorMetadata) + assertEquals(docLevelMonitorFanOutRequest.indexExecutionContext, newDocLevelMonitorFanOutRequest.indexExecutionContext) + assertEquals(docLevelMonitorFanOutRequest.shardIds, newDocLevelMonitorFanOutRequest.shardIds) + assertEquals(docLevelMonitorFanOutRequest.workflowRunContext, newDocLevelMonitorFanOutRequest.workflowRunContext) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/DocLevelMonitorFanOutResponseTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/DocLevelMonitorFanOutResponseTests.kt new file mode 100644 index 00000000..645b7d5c --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/DocLevelMonitorFanOutResponseTests.kt @@ -0,0 +1,60 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.action + +import org.junit.Assert.assertEquals +import org.junit.jupiter.api.Test +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.commons.alerting.model.InputRunResults +import org.opensearch.commons.alerting.randomDocumentLevelTriggerRunResult +import org.opensearch.core.common.io.stream.StreamInput + +class DocLevelMonitorFanOutResponseTests { + + @Test + fun `test doc level monitor fan out response with errors as stream`() { + val docLevelMonitorFanOutResponse = DocLevelMonitorFanOutResponse( + "nodeid", + "eid", + "monitorId", + mutableMapOf("index" to mutableMapOf("1" to "1")), + InputRunResults(error = null), + mapOf("1" to randomDocumentLevelTriggerRunResult(), "2" to randomDocumentLevelTriggerRunResult()) + ) + val out = BytesStreamOutput() + docLevelMonitorFanOutResponse.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newDocLevelMonitorFanOutResponse = DocLevelMonitorFanOutResponse(sin) + assertEquals(docLevelMonitorFanOutResponse.nodeId, newDocLevelMonitorFanOutResponse.nodeId) + assertEquals(docLevelMonitorFanOutResponse.executionId, newDocLevelMonitorFanOutResponse.executionId) + assertEquals(docLevelMonitorFanOutResponse.monitorId, newDocLevelMonitorFanOutResponse.monitorId) + assertEquals(docLevelMonitorFanOutResponse.lastRunContexts, newDocLevelMonitorFanOutResponse.lastRunContexts) + assertEquals(docLevelMonitorFanOutResponse.inputResults, newDocLevelMonitorFanOutResponse.inputResults) + assertEquals(docLevelMonitorFanOutResponse.triggerResults, newDocLevelMonitorFanOutResponse.triggerResults) + } + + @Test + fun `test doc level monitor fan out response as stream`() { + val workflow = DocLevelMonitorFanOutResponse( + "nodeid", + "eid", + "monitorId", + mapOf("index" to mapOf("1" to "1")) as MutableMap, + InputRunResults(), + mapOf("1" to randomDocumentLevelTriggerRunResult(), "2" to randomDocumentLevelTriggerRunResult()) + ) + val out = BytesStreamOutput() + workflow.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newWorkflow = DocLevelMonitorFanOutResponse(sin) + assertEquals(workflow.nodeId, newWorkflow.nodeId) + assertEquals(workflow.executionId, newWorkflow.executionId) + assertEquals(workflow.monitorId, newWorkflow.monitorId) + assertEquals(workflow.lastRunContexts, newWorkflow.lastRunContexts) + assertEquals(workflow.inputResults, newWorkflow.inputResults) + assertEquals(workflow.triggerResults, newWorkflow.triggerResults) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/GetAlertsRequestTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/GetAlertsRequestTests.kt new file mode 100644 index 00000000..58f61550 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/GetAlertsRequestTests.kt @@ -0,0 +1,72 @@ +package org.opensearch.commons.alerting.action + +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertNotNull +import org.junit.jupiter.api.Assertions.assertNull +import org.junit.jupiter.api.Assertions.assertTrue +import org.junit.jupiter.api.Test +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.commons.alerting.model.Table +import org.opensearch.core.common.io.stream.StreamInput + +internal class GetAlertsRequestTests { + + @Test + fun `test get alerts request`() { + val table = Table("asc", "sortString", null, 1, 0, "") + + val req = GetAlertsRequest( + table = table, + severityLevel = "1", + alertState = "active", + monitorId = null, + alertIndex = null, + monitorIds = listOf("1", "2"), + alertIds = listOf("alert1", "alert2"), + workflowIds = listOf("w1", "w2") + ) + assertNotNull(req) + + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = GetAlertsRequest(sin) + + assertEquals("1", newReq.severityLevel) + assertEquals("active", newReq.alertState) + assertNull(newReq.monitorId) + assertEquals(table, newReq.table) + assertTrue(newReq.monitorIds!!.contains("1")) + assertTrue(newReq.monitorIds!!.contains("2")) + assertTrue(newReq.alertIds!!.contains("alert1")) + assertTrue(newReq.alertIds!!.contains("alert2")) + assertTrue(newReq.workflowIds!!.contains("w1")) + assertTrue(newReq.workflowIds!!.contains("w2")) + } + + @Test + fun `test get alerts request with filter`() { + val table = Table("asc", "sortString", null, 1, 0, "") + val req = GetAlertsRequest(table, "1", "active", null, null) + assertNotNull(req) + + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = GetAlertsRequest(sin) + + assertEquals("1", newReq.severityLevel) + assertEquals("active", newReq.alertState) + assertNull(newReq.monitorId) + assertEquals(table, newReq.table) + } + + @Test + fun `test validate returns null`() { + val table = Table("asc", "sortString", null, 1, 0, "") + + val req = GetAlertsRequest(table, "1", "active", null, null) + assertNotNull(req) + assertNull(req.validate()) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/GetAlertsResponseTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/GetAlertsResponseTests.kt new file mode 100644 index 00000000..6a1cb877 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/GetAlertsResponseTests.kt @@ -0,0 +1,108 @@ +package org.opensearch.commons.alerting.action + +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertNotNull +import org.junit.jupiter.api.Test +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.commons.alerting.builder +import org.opensearch.commons.alerting.model.Alert +import org.opensearch.commons.alerting.randomUser +import org.opensearch.commons.alerting.util.string +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.xcontent.ToXContent +import java.time.Instant +import java.util.Collections + +class GetAlertsResponseTests { + + @Test + fun `test get alerts response with no alerts`() { + val req = GetAlertsResponse(Collections.emptyList(), 0) + assertNotNull(req) + + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = GetAlertsResponse(sin) + Assertions.assertTrue(newReq.alerts.isEmpty()) + assertEquals(0, newReq.totalAlerts) + } + + @Test + fun `test get alerts response with alerts`() { + val alert = Alert( + monitorId = "id", + monitorName = "name", + monitorVersion = Alert.NO_VERSION, + monitorUser = randomUser(), + triggerId = "triggerId", + triggerName = "triggerNamer", + state = Alert.State.ACKNOWLEDGED, + startTime = Instant.now(), + lastNotificationTime = null, + errorMessage = null, + errorHistory = emptyList(), + severity = "high", + actionExecutionResults = emptyList(), + schemaVersion = 0, + aggregationResultBucket = null, + findingIds = emptyList(), + relatedDocIds = emptyList(), + executionId = "executionId", + workflowId = "workflowId", + workflowName = "", + associatedAlertIds = emptyList() + ) + val req = GetAlertsResponse(listOf(alert), 1) + assertNotNull(req) + + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = GetAlertsResponse(sin) + assertEquals(1, newReq.alerts.size) + assertEquals(alert, newReq.alerts[0]) + assertEquals(1, newReq.totalAlerts) + assertEquals(newReq.alerts[0].workflowId, "workflowId") + } + + @Test + fun `test toXContent for get alerts response`() { + val now = Instant.now() + val alert = Alert( + monitorId = "id", + monitorName = "name", + monitorVersion = Alert.NO_VERSION, + monitorUser = randomUser(), + triggerId = "triggerId", + triggerName = "triggerNamer", + state = Alert.State.ACKNOWLEDGED, + startTime = now, + lastNotificationTime = null, + errorMessage = null, + errorHistory = emptyList(), + severity = "high", + actionExecutionResults = emptyList(), + schemaVersion = 0, + aggregationResultBucket = null, + findingIds = emptyList(), + relatedDocIds = emptyList(), + executionId = "executionId", + workflowId = "wid", + workflowName = "", + associatedAlertIds = emptyList() + ) + + val req = GetAlertsResponse(listOf(alert), 1) + var actualXContentString = req.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() + val expectedXContentString = "{\"alerts\":[{\"id\":\"\",\"version\":-1,\"monitor_id\":\"id\",\"workflow_id\":\"wid\"," + + "\"workflow_name\":\"\",\"associated_alert_ids\":[],\"schema_version\":0,\"monitor_version\":-1," + + "\"monitor_name\":\"name\",\"execution_id\":\"executionId\",\"trigger_id\":\"triggerId\"," + + "\"trigger_name\":\"triggerNamer\",\"finding_ids\":[],\"related_doc_ids\":[],\"state\":\"ACKNOWLEDGED\"," + + "\"error_message\":null,\"alert_history\":[],\"severity\":\"high\",\"action_execution_results\":[]," + + "\"start_time\":${now.toEpochMilli()},\"last_notification_time\":null,\"end_time\":null," + + "\"acknowledged_time\":null}],\"totalAlerts\":1}" + assertEquals(expectedXContentString, actualXContentString) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/GetFindingsRequestTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/GetFindingsRequestTests.kt new file mode 100644 index 00000000..238837cc --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/GetFindingsRequestTests.kt @@ -0,0 +1,43 @@ +package org.opensearch.commons.alerting.action + +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertNotNull +import org.junit.jupiter.api.Assertions.assertNull +import org.junit.jupiter.api.Assertions.assertTrue +import org.junit.jupiter.api.Test +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.commons.alerting.model.Table +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.index.query.QueryBuilders + +internal class GetFindingsRequestTests { + + @Test + fun `test get findings request`() { + val table = Table("asc", "sortString", null, 1, 0, "") + val boolQueryBuilder = QueryBuilders.boolQuery() + val req = GetFindingsRequest("2121", table, "1", "finding_index_name", listOf("1", "2"), boolQueryBuilder) + assertNotNull(req) + + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = GetFindingsRequest(sin) + + assertEquals("1", newReq.monitorId) + assertEquals("2121", newReq.findingId) + assertEquals("finding_index_name", newReq.findingIndex) + assertEquals(table, newReq.table) + assertTrue(newReq.monitorIds!!.contains("1")) + assertTrue(newReq.monitorIds!!.contains("2")) + } + + @Test + fun `test validate returns null`() { + val table = Table("asc", "sortString", null, 1, 0, "") + val boolQueryBuilder = QueryBuilders.boolQuery() + val req = GetFindingsRequest("2121", table, "1", "active", listOf("1", "2"), boolQueryBuilder) + assertNotNull(req) + assertNull(req.validate()) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/GetFindingsResponseTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/GetFindingsResponseTests.kt new file mode 100644 index 00000000..0c30b640 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/GetFindingsResponseTests.kt @@ -0,0 +1,61 @@ +package org.opensearch.commons.alerting.action + +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.commons.alerting.model.DocLevelQuery +import org.opensearch.commons.alerting.model.Finding +import org.opensearch.commons.alerting.model.FindingDocument +import org.opensearch.commons.alerting.model.FindingWithDocs +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.rest.RestStatus +import java.time.Instant + +internal class GetFindingsResponseTests { + + @Test + fun `test get findings response`() { + // Alerting GetFindingsResponse mock #1 + val finding1 = Finding( + "1", + listOf("doc1", "doc2", "doc3"), + listOf("doc1", "doc2", "doc3"), + "monitor_id1", + "monitor_name1", + "test_index1", + listOf(DocLevelQuery("1", "myQuery", listOf(), "fieldA:valABC", listOf())), + Instant.now() + ) + val findingDocument1 = FindingDocument("test_index1", "doc1", true, "document 1 payload") + val findingDocument2 = FindingDocument("test_index1", "doc2", true, "document 2 payload") + val findingDocument3 = FindingDocument("test_index1", "doc3", true, "document 3 payload") + + val findingWithDocs1 = FindingWithDocs(finding1, listOf(findingDocument1, findingDocument2, findingDocument3)) + + // Alerting GetFindingsResponse mock #2 + + // Alerting GetFindingsResponse mock #2 + val finding2 = Finding( + "1", + listOf("doc21", "doc22"), + listOf("doc21", "doc22"), + "monitor_id2", + "monitor_name2", + "test_index2", + listOf(DocLevelQuery("1", "myQuery", listOf(), "fieldA:valABC", listOf())), + Instant.now() + ) + val findingDocument21 = FindingDocument("test_index2", "doc21", true, "document 21 payload") + val findingDocument22 = FindingDocument("test_index2", "doc22", true, "document 22 payload") + + val findingWithDocs2 = FindingWithDocs(finding2, listOf(findingDocument21, findingDocument22)) + + val req = GetFindingsResponse(RestStatus.OK, 2, listOf(findingWithDocs1, findingWithDocs2)) + Assertions.assertNotNull(req) + + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = GetFindingsResponse(sin) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/GetMonitorRequestTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/GetMonitorRequestTests.kt new file mode 100644 index 00000000..7670650e --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/GetMonitorRequestTests.kt @@ -0,0 +1,57 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.action + +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.rest.RestRequest +import org.opensearch.search.fetch.subphase.FetchSourceContext +import org.opensearch.test.OpenSearchTestCase + +class GetMonitorRequestTests : OpenSearchTestCase() { + + fun `test get monitor request`() { + val req = GetMonitorRequest("1234", 1L, RestRequest.Method.GET, FetchSourceContext.FETCH_SOURCE) + assertNotNull(req) + + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = GetMonitorRequest(sin) + assertEquals("1234", newReq.monitorId) + assertEquals(1L, newReq.version) + assertEquals(RestRequest.Method.GET, newReq.method) + assertEquals(FetchSourceContext.FETCH_SOURCE, newReq.srcContext) + } + + fun `test get monitor request without src context`() { + val req = GetMonitorRequest("1234", 1L, RestRequest.Method.GET, null) + assertNotNull(req) + + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = GetMonitorRequest(sin) + assertEquals("1234", newReq.monitorId) + assertEquals(1L, newReq.version) + assertEquals(RestRequest.Method.GET, newReq.method) + assertEquals(null, newReq.srcContext) + } + + fun `test head monitor request`() { + val req = GetMonitorRequest("1234", 2L, RestRequest.Method.HEAD, FetchSourceContext.FETCH_SOURCE) + assertNotNull(req) + + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = GetMonitorRequest(sin) + assertEquals("1234", newReq.monitorId) + assertEquals(2L, newReq.version) + assertEquals(RestRequest.Method.HEAD, newReq.method) + assertEquals(FetchSourceContext.FETCH_SOURCE, newReq.srcContext) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/GetMonitorResponseTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/GetMonitorResponseTests.kt new file mode 100644 index 00000000..eb3f08e4 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/GetMonitorResponseTests.kt @@ -0,0 +1,63 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.action + +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.commons.alerting.model.CronSchedule +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.randomUser +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.test.OpenSearchTestCase +import java.time.Instant +import java.time.ZoneId + +class GetMonitorResponseTests : OpenSearchTestCase() { + + fun `test get monitor response`() { + val req = GetMonitorResponse("1234", 1L, 2L, 0L, null, null) + assertNotNull(req) + + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = GetMonitorResponse(sin) + assertEquals("1234", newReq.id) + assertEquals(1L, newReq.version) + assertEquals(null, newReq.monitor) + } + + fun `test get monitor response with monitor`() { + val cronExpression = "31 * * * *" // Run at minute 31. + val testInstance = Instant.ofEpochSecond(1538164858L) + + val cronSchedule = CronSchedule(cronExpression, ZoneId.of("Asia/Kolkata"), testInstance) + val monitor = Monitor( + id = "123", + version = 0L, + name = "test-monitor", + enabled = true, + schedule = cronSchedule, + lastUpdateTime = Instant.now(), + enabledTime = Instant.now(), + monitorType = Monitor.MonitorType.QUERY_LEVEL_MONITOR.value, + user = randomUser(), + schemaVersion = 0, + inputs = mutableListOf(), + triggers = mutableListOf(), + uiMetadata = mutableMapOf() + ) + val req = GetMonitorResponse("1234", 1L, 2L, 0L, monitor, null) + assertNotNull(req) + + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = GetMonitorResponse(sin) + assertEquals("1234", newReq.id) + assertEquals(1L, newReq.version) + assertNotNull(newReq.monitor) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/GetWorkflowAlertsRequestTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/GetWorkflowAlertsRequestTests.kt new file mode 100644 index 00000000..425151cd --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/GetWorkflowAlertsRequestTests.kt @@ -0,0 +1,99 @@ +package org.opensearch.commons.alerting.action + +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertNotNull +import org.junit.jupiter.api.Assertions.assertNull +import org.junit.jupiter.api.Assertions.assertTrue +import org.junit.jupiter.api.Test +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.commons.alerting.model.Table +import org.opensearch.core.common.io.stream.StreamInput + +internal class GetWorkflowAlertsRequestTests { + + @Test + fun `test get alerts request`() { + val table = Table("asc", "sortString", null, 1, 0, "") + + val req = GetWorkflowAlertsRequest( + table = table, + severityLevel = "1", + alertState = "active", + getAssociatedAlerts = true, + workflowIds = listOf("w1", "w2"), + alertIds = emptyList(), + alertIndex = null, + associatedAlertsIndex = null, + monitorIds = emptyList() + ) + assertNotNull(req) + + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = GetWorkflowAlertsRequest(sin) + + assertEquals("1", newReq.severityLevel) + assertEquals("active", newReq.alertState) + assertEquals(table, newReq.table) + assertTrue(newReq.workflowIds!!.contains("w1")) + assertTrue(newReq.workflowIds!!.contains("w2")) + assertTrue(newReq.alertIds!!.isEmpty()) + assertTrue(newReq.monitorIds!!.isEmpty()) + assertNull(newReq.alertIndex) + assertNull(newReq.associatedAlertsIndex) + assertTrue(newReq.getAssociatedAlerts) + } + + @Test + fun `test get alerts request with custom alerts and associated alerts indices`() { + val table = Table("asc", "sortString", null, 1, 0, "") + + val req = GetWorkflowAlertsRequest( + table = table, + severityLevel = "1", + alertState = "active", + getAssociatedAlerts = true, + workflowIds = listOf("w1", "w2"), + alertIds = emptyList(), + alertIndex = "alertIndex", + associatedAlertsIndex = "associatedAlertsIndex", + monitorIds = emptyList() + ) + assertNotNull(req) + + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = GetWorkflowAlertsRequest(sin) + + assertEquals("1", newReq.severityLevel) + assertEquals("active", newReq.alertState) + assertEquals(table, newReq.table) + assertTrue(newReq.workflowIds!!.contains("w1")) + assertTrue(newReq.workflowIds!!.contains("w2")) + assertTrue(newReq.alertIds!!.isEmpty()) + assertTrue(newReq.monitorIds!!.isEmpty()) + assertEquals(newReq.alertIndex, "alertIndex") + assertEquals(newReq.associatedAlertsIndex, "associatedAlertsIndex") + assertTrue(newReq.getAssociatedAlerts) + } + + @Test + fun `test validate returns null`() { + val table = Table("asc", "sortString", null, 1, 0, "") + + val req = GetWorkflowAlertsRequest( + table = table, + severityLevel = "1", + alertState = "active", + getAssociatedAlerts = true, + workflowIds = listOf("w1, w2"), + alertIds = emptyList(), + alertIndex = null, + associatedAlertsIndex = null + ) + assertNotNull(req) + assertNull(req.validate()) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/GetWorkflowAlertsResponseTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/GetWorkflowAlertsResponseTests.kt new file mode 100644 index 00000000..947fead8 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/GetWorkflowAlertsResponseTests.kt @@ -0,0 +1,96 @@ +package org.opensearch.commons.alerting.action + +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertNotNull +import org.junit.jupiter.api.Assertions.assertTrue +import org.junit.jupiter.api.Test +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.commons.alerting.builder +import org.opensearch.commons.alerting.model.Alert +import org.opensearch.commons.alerting.randomAlert +import org.opensearch.commons.alerting.randomChainedAlert +import org.opensearch.commons.alerting.randomUser +import org.opensearch.commons.alerting.util.string +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.xcontent.ToXContent +import java.time.Instant +import java.util.Collections + +class GetWorkflowAlertsResponseTests { + + @Test + fun `test get alerts response with no alerts`() { + val req = GetWorkflowAlertsResponse(Collections.emptyList(), emptyList(), 0) + assertNotNull(req) + + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = GetWorkflowAlertsResponse(sin) + assertTrue(newReq.alerts.isEmpty()) + assertTrue(newReq.associatedAlerts.isEmpty()) + assertEquals(0, newReq.totalAlerts) + } + + @Test + fun `test get alerts response with alerts`() { + val chainedAlert1 = randomChainedAlert() + val chainedAlert2 = randomChainedAlert() + val alert1 = randomAlert() + val alert2 = randomAlert() + val req = GetWorkflowAlertsResponse(listOf(chainedAlert1, chainedAlert2), listOf(alert1, alert2), 2) + assertNotNull(req) + + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = GetWorkflowAlertsResponse(sin) + assertEquals(2, newReq.alerts.size) + assertEquals(2, newReq.associatedAlerts.size) + assertEquals(2, newReq.totalAlerts) + assertTrue(newReq.alerts.contains(chainedAlert1)) + assertTrue(newReq.alerts.contains(chainedAlert2)) + assertTrue(newReq.associatedAlerts.contains(alert1)) + assertTrue(newReq.associatedAlerts.contains(alert2)) + } + + @Test + fun `test toXContent for get alerts response`() { + val alert = Alert( + monitorId = "id", + monitorName = "name", + monitorVersion = Alert.NO_VERSION, + monitorUser = randomUser(), + triggerId = "triggerId", + triggerName = "triggerNamer", + state = Alert.State.ACKNOWLEDGED, + startTime = Instant.ofEpochMilli(1688591410974), + lastNotificationTime = null, + errorMessage = null, + errorHistory = emptyList(), + severity = "high", + actionExecutionResults = emptyList(), + schemaVersion = 0, + aggregationResultBucket = null, + findingIds = emptyList(), + relatedDocIds = emptyList(), + executionId = "executionId", + workflowId = "wid", + workflowName = "", + associatedAlertIds = emptyList() + ) + + val req = GetWorkflowAlertsResponse(listOf(alert), emptyList(), 1) + var actualXContentString = req.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() + val expectedXContentString = + "{\"alerts\":[{\"id\":\"\",\"version\":-1,\"monitor_id\":\"id\"," + + "\"workflow_id\":\"wid\",\"workflow_name\":\"\",\"associated_alert_ids\":[]," + + "\"schema_version\":0,\"monitor_version\":-1,\"monitor_name\":\"name\",\"execution_id\":" + + "\"executionId\",\"trigger_id\":\"triggerId\",\"trigger_name\":\"triggerNamer\",\"finding_ids\":[]," + + "\"related_doc_ids\":[],\"state\":\"ACKNOWLEDGED\",\"error_message\":null,\"alert_history\":[]," + + "\"severity\":\"high\",\"action_execution_results\":[],\"start_time\":1688591410974," + + "\"last_notification_time\":null,\"end_time\":null,\"acknowledged_time\":null}]," + + "\"associatedAlerts\":[],\"totalAlerts\":1}" + assertEquals(expectedXContentString, actualXContentString) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/GetWorkflowResponseTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/GetWorkflowResponseTests.kt new file mode 100644 index 00000000..e21bb430 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/GetWorkflowResponseTests.kt @@ -0,0 +1,75 @@ +package org.opensearch.commons.alerting.action + +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.commons.alerting.model.CompositeInput +import org.opensearch.commons.alerting.model.IntervalSchedule +import org.opensearch.commons.alerting.model.Workflow +import org.opensearch.commons.alerting.randomDelegate +import org.opensearch.commons.alerting.randomUser +import org.opensearch.commons.alerting.randomWorkflow +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.rest.RestStatus +import java.time.Instant +import java.time.temporal.ChronoUnit + +class GetWorkflowResponseTests { + + @Test + fun testGetWorkflowResponse() { + val workflow = randomWorkflow(auditDelegateMonitorAlerts = false) + val response = GetWorkflowResponse( + id = "id", + version = 1, + seqNo = 1, + primaryTerm = 1, + status = RestStatus.OK, + workflow = workflow + ) + val out = BytesStreamOutput() + response.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newRes = GetWorkflowResponse(sin) + Assertions.assertEquals("id", newRes.id) + Assertions.assertFalse(newRes.workflow!!.auditDelegateMonitorAlerts!!) + Assertions.assertEquals(workflow.name, newRes.workflow!!.name) + Assertions.assertEquals(workflow.owner, newRes.workflow!!.owner) + } + + @Test + fun testGetWorkflowResponseWhereAuditDelegateMonitorAlertsFlagIsNotSet() { + val workflow = Workflow( + id = "", + version = Workflow.NO_VERSION, + name = "test", + enabled = true, + schemaVersion = 2, + schedule = IntervalSchedule(1, ChronoUnit.MINUTES), + lastUpdateTime = Instant.now(), + enabledTime = Instant.now(), + workflowType = Workflow.WorkflowType.COMPOSITE, + user = randomUser(), + inputs = listOf(CompositeInput(org.opensearch.commons.alerting.model.Sequence(listOf(randomDelegate())))), + owner = "", + triggers = listOf() + ) + val response = GetWorkflowResponse( + id = "id", + version = 1, + seqNo = 1, + primaryTerm = 1, + status = RestStatus.OK, + workflow = workflow + ) + val out = BytesStreamOutput() + response.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newRes = GetWorkflowResponse(sin) + Assertions.assertEquals("id", newRes.id) + Assertions.assertTrue(newRes.workflow!!.auditDelegateMonitorAlerts!!) + Assertions.assertEquals(workflow.name, newRes.workflow!!.name) + Assertions.assertEquals(workflow.owner, newRes.workflow!!.owner) + Assertions.assertEquals(workflow.auditDelegateMonitorAlerts, newRes.workflow!!.auditDelegateMonitorAlerts) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/IndexCommentRequestTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/IndexCommentRequestTests.kt new file mode 100644 index 00000000..c9afb0d6 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/IndexCommentRequestTests.kt @@ -0,0 +1,44 @@ +package org.opensearch.commons.alerting.action + +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertNotNull +import org.junit.jupiter.api.Test +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.rest.RestRequest + +class IndexCommentRequestTests { + @Test + fun `test index comment post request`() { + val req = IndexCommentRequest("123", "alert", "456", 1L, 2L, RestRequest.Method.POST, "comment") + assertNotNull(req) + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = IndexCommentRequest(sin) + assertEquals("123", newReq.entityId) + assertEquals("alert", newReq.entityType) + assertEquals("456", newReq.commentId) + assertEquals(1L, newReq.seqNo) + assertEquals(2L, newReq.primaryTerm) + assertEquals(RestRequest.Method.POST, newReq.method) + assertEquals("comment", newReq.content) + } + + @Test + fun `test index comment put request`() { + val req = IndexCommentRequest("123", "alert", "456", 1L, 2L, RestRequest.Method.PUT, "comment") + assertNotNull(req) + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = IndexCommentRequest(sin) + assertEquals("123", newReq.entityId) + assertEquals("alert", newReq.entityType) + assertEquals("456", newReq.commentId) + assertEquals(1L, newReq.seqNo) + assertEquals(2L, newReq.primaryTerm) + assertEquals(RestRequest.Method.PUT, newReq.method) + assertEquals("comment", newReq.content) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/IndexCommentResponseTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/IndexCommentResponseTests.kt new file mode 100644 index 00000000..57e4801b --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/IndexCommentResponseTests.kt @@ -0,0 +1,35 @@ +package org.opensearch.commons.alerting.action + +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.commons.alerting.model.Comment +import org.opensearch.commons.alerting.randomUser +import org.opensearch.core.common.io.stream.StreamInput +import java.time.Instant + +class IndexCommentResponseTests { + @Test + fun `test index comment response with comment`() { + val comment = Comment( + "123", + "alert", + "456", + "comment", + Instant.now(), + Instant.now(), + randomUser() + ) + val req = IndexCommentResponse("1234", 1L, 2L, comment) + Assertions.assertNotNull(req) + + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = IndexCommentResponse(sin) + Assertions.assertEquals("1234", newReq.id) + Assertions.assertEquals(1L, newReq.seqNo) + Assertions.assertEquals(2L, newReq.primaryTerm) + Assertions.assertNotNull(newReq.comment) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/IndexMonitorRequestTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/IndexMonitorRequestTests.kt new file mode 100644 index 00000000..6efd68b6 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/IndexMonitorRequestTests.kt @@ -0,0 +1,114 @@ +package org.opensearch.commons.alerting.action + +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.opensearch.action.support.WriteRequest +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.common.settings.Settings +import org.opensearch.commons.alerting.model.SearchInput +import org.opensearch.commons.alerting.randomBucketLevelMonitor +import org.opensearch.commons.alerting.randomQueryLevelMonitor +import org.opensearch.commons.utils.recreateObject +import org.opensearch.core.common.io.stream.NamedWriteableAwareStreamInput +import org.opensearch.core.common.io.stream.NamedWriteableRegistry +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.rest.RestRequest +import org.opensearch.search.SearchModule +import org.opensearch.search.builder.SearchSourceBuilder + +class IndexMonitorRequestTests { + + @Test + fun `test index monitor post request`() { + val req = IndexMonitorRequest( + "1234", + 1L, + 2L, + WriteRequest.RefreshPolicy.IMMEDIATE, + RestRequest.Method.POST, + randomQueryLevelMonitor().copy(inputs = listOf(SearchInput(emptyList(), SearchSourceBuilder()))) + ) + Assertions.assertNotNull(req) + + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = IndexMonitorRequest(sin) + Assertions.assertEquals("1234", newReq.monitorId) + Assertions.assertEquals(1L, newReq.seqNo) + Assertions.assertEquals(2L, newReq.primaryTerm) + Assertions.assertEquals(RestRequest.Method.POST, newReq.method) + Assertions.assertNotNull(newReq.monitor) + } + + @Test + fun `test index bucket monitor post request`() { + val req = IndexMonitorRequest( + "1234", + 1L, + 2L, + WriteRequest.RefreshPolicy.IMMEDIATE, + RestRequest.Method.POST, + randomBucketLevelMonitor() + ) + Assertions.assertNotNull(req) + + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val namedWriteableRegistry = NamedWriteableRegistry(SearchModule(Settings.EMPTY, emptyList()).namedWriteables) + val newReq = IndexMonitorRequest(NamedWriteableAwareStreamInput(sin, namedWriteableRegistry)) + Assertions.assertEquals("1234", newReq.monitorId) + Assertions.assertEquals(1L, newReq.seqNo) + Assertions.assertEquals(2L, newReq.primaryTerm) + Assertions.assertEquals(RestRequest.Method.POST, newReq.method) + Assertions.assertNotNull(newReq.monitor) + } + + @Test + fun `Index bucket monitor serialize and deserialize transport object should be equal`() { + val bucketLevelMonitorRequest = IndexMonitorRequest( + "1234", + 1L, + 2L, + WriteRequest.RefreshPolicy.IMMEDIATE, + RestRequest.Method.POST, + randomBucketLevelMonitor() + ) + + Assertions.assertThrows(UnsupportedOperationException::class.java) { + recreateObject(bucketLevelMonitorRequest) { IndexMonitorRequest(it) } + } + + val recreatedObject = recreateObject(bucketLevelMonitorRequest, NamedWriteableRegistry(SearchModule(Settings.EMPTY, emptyList()).namedWriteables)) { IndexMonitorRequest(it) } + Assertions.assertEquals(bucketLevelMonitorRequest.monitorId, recreatedObject.monitorId) + Assertions.assertEquals(bucketLevelMonitorRequest.seqNo, recreatedObject.seqNo) + Assertions.assertEquals(bucketLevelMonitorRequest.primaryTerm, recreatedObject.primaryTerm) + Assertions.assertEquals(bucketLevelMonitorRequest.method, recreatedObject.method) + Assertions.assertNotNull(recreatedObject.monitor) + Assertions.assertEquals(bucketLevelMonitorRequest.monitor, recreatedObject.monitor) + } + + @Test + fun `test index monitor put request`() { + val req = IndexMonitorRequest( + "1234", + 1L, + 2L, + WriteRequest.RefreshPolicy.IMMEDIATE, + RestRequest.Method.PUT, + randomQueryLevelMonitor().copy(inputs = listOf(SearchInput(emptyList(), SearchSourceBuilder()))) + ) + Assertions.assertNotNull(req) + + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = IndexMonitorRequest(sin) + Assertions.assertEquals("1234", newReq.monitorId) + Assertions.assertEquals(1L, newReq.seqNo) + Assertions.assertEquals(2L, newReq.primaryTerm) + Assertions.assertEquals(RestRequest.Method.PUT, newReq.method) + Assertions.assertNotNull(newReq.monitor) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/IndexMonitorResponseTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/IndexMonitorResponseTests.kt new file mode 100644 index 00000000..ca3afa3e --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/IndexMonitorResponseTests.kt @@ -0,0 +1,47 @@ +package org.opensearch.commons.alerting.action + +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.commons.alerting.model.CronSchedule +import org.opensearch.commons.alerting.model.Monitor +import org.opensearch.commons.alerting.randomUser +import org.opensearch.core.common.io.stream.StreamInput +import java.time.Instant +import java.time.ZoneId + +class IndexMonitorResponseTests { + + @Test + fun `test index monitor response with monitor`() { + val cronExpression = "31 * * * *" // Run at minute 31. + val testInstance = Instant.ofEpochSecond(1538164858L) + + val cronSchedule = CronSchedule(cronExpression, ZoneId.of("Asia/Kolkata"), testInstance) + val monitor = Monitor( + id = "123", + version = 0L, + name = "test-monitor", + enabled = true, + schedule = cronSchedule, + lastUpdateTime = Instant.now(), + enabledTime = Instant.now(), + monitorType = Monitor.MonitorType.QUERY_LEVEL_MONITOR.value, + user = randomUser(), + schemaVersion = 0, + inputs = mutableListOf(), + triggers = mutableListOf(), + uiMetadata = mutableMapOf() + ) + val req = IndexMonitorResponse("1234", 1L, 2L, 0L, monitor) + Assertions.assertNotNull(req) + + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = IndexMonitorResponse(sin) + Assertions.assertEquals("1234", newReq.id) + Assertions.assertEquals(1L, newReq.version) + Assertions.assertNotNull(newReq.monitor) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/IndexWorkflowRequestTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/IndexWorkflowRequestTests.kt new file mode 100644 index 00000000..600ac506 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/IndexWorkflowRequestTests.kt @@ -0,0 +1,299 @@ +package org.opensearch.commons.alerting.action + +import org.junit.Assert +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.opensearch.action.support.WriteRequest +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.common.settings.Settings +import org.opensearch.commons.alerting.model.ChainedMonitorFindings +import org.opensearch.commons.alerting.model.CompositeInput +import org.opensearch.commons.alerting.model.Delegate +import org.opensearch.commons.alerting.model.Sequence +import org.opensearch.commons.alerting.randomWorkflow +import org.opensearch.commons.alerting.randomWorkflowWithDelegates +import org.opensearch.commons.utils.recreateObject +import org.opensearch.core.common.io.stream.NamedWriteableAwareStreamInput +import org.opensearch.core.common.io.stream.NamedWriteableRegistry +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.rest.RestRequest +import org.opensearch.search.SearchModule +import java.lang.Exception +import java.lang.IllegalArgumentException +import java.util.UUID +import kotlin.test.assertNotNull +import kotlin.test.assertNull +import kotlin.test.assertTrue + +class IndexWorkflowRequestTests { + + @Test + fun `test index workflow post request`() { + val req = IndexWorkflowRequest( + "1234", + 1L, + 2L, + WriteRequest.RefreshPolicy.IMMEDIATE, + RestRequest.Method.POST, + randomWorkflow(auditDelegateMonitorAlerts = false) + ) + Assertions.assertNotNull(req) + + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = IndexWorkflowRequest(sin) + Assertions.assertEquals("1234", newReq.workflowId) + Assertions.assertEquals(1L, newReq.seqNo) + Assertions.assertEquals(2L, newReq.primaryTerm) + Assertions.assertEquals(RestRequest.Method.POST, newReq.method) + Assertions.assertNotNull(newReq.workflow) + Assertions.assertFalse(newReq.workflow.auditDelegateMonitorAlerts!!) + } + + @Test + fun `test index composite workflow post request`() { + val req = IndexWorkflowRequest( + "1234", + 1L, + 2L, + WriteRequest.RefreshPolicy.IMMEDIATE, + RestRequest.Method.POST, + randomWorkflow() + ) + Assertions.assertNotNull(req) + + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val namedWriteableRegistry = NamedWriteableRegistry(SearchModule(Settings.EMPTY, emptyList()).namedWriteables) + val newReq = IndexWorkflowRequest(NamedWriteableAwareStreamInput(sin, namedWriteableRegistry)) + Assertions.assertEquals("1234", newReq.workflowId) + Assertions.assertEquals(1L, newReq.seqNo) + Assertions.assertEquals(2L, newReq.primaryTerm) + Assertions.assertEquals(RestRequest.Method.POST, newReq.method) + Assertions.assertNotNull(newReq.workflow) + } + + @Test + fun `Index composite workflow serialize and deserialize transport object should be equal`() { + val compositeWorkflowRequest = IndexWorkflowRequest( + "1234", + 1L, + 2L, + WriteRequest.RefreshPolicy.IMMEDIATE, + RestRequest.Method.POST, + randomWorkflow() + ) + + val recreatedObject = recreateObject( + compositeWorkflowRequest, + NamedWriteableRegistry(SearchModule(Settings.EMPTY, emptyList()).namedWriteables) + ) { IndexWorkflowRequest(it) } + Assertions.assertEquals(compositeWorkflowRequest.workflowId, recreatedObject.workflowId) + Assertions.assertEquals(compositeWorkflowRequest.seqNo, recreatedObject.seqNo) + Assertions.assertEquals(compositeWorkflowRequest.primaryTerm, recreatedObject.primaryTerm) + Assertions.assertEquals(compositeWorkflowRequest.method, recreatedObject.method) + Assertions.assertNotNull(recreatedObject.workflow) + Assertions.assertEquals(compositeWorkflowRequest.workflow, recreatedObject.workflow) + } + + @Test + fun `test index workflow put request`() { + val req = IndexWorkflowRequest( + "1234", + 1L, + 2L, + WriteRequest.RefreshPolicy.IMMEDIATE, + RestRequest.Method.PUT, + randomWorkflow() + ) + Assertions.assertNotNull(req) + + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = IndexWorkflowRequest(sin) + Assertions.assertEquals("1234", newReq.workflowId) + Assertions.assertEquals(1L, newReq.seqNo) + Assertions.assertEquals(2L, newReq.primaryTerm) + Assertions.assertEquals(RestRequest.Method.PUT, newReq.method) + Assertions.assertNotNull(newReq.workflow) + } + + @Test + fun `test validate`() { + val req = IndexWorkflowRequest( + "1234", + 1L, + 2L, + WriteRequest.RefreshPolicy.IMMEDIATE, + RestRequest.Method.PUT, + randomWorkflow(monitorIds = emptyList()) + ) + Assertions.assertNotNull(req) + // Empty input list + var validate = req.validate() + Assert.assertTrue(validate != null) + Assert.assertTrue(validate!!.message!!.contains("Delegates list can not be empty.;")) + // Duplicate delegate + val req1 = IndexWorkflowRequest( + "1234", + 1L, + 2L, + WriteRequest.RefreshPolicy.IMMEDIATE, + RestRequest.Method.PUT, + randomWorkflow(monitorIds = listOf("1L", "1L", "2L")) + ) + validate = req1.validate() + Assert.assertTrue(validate != null) + Assert.assertTrue(validate!!.message!!.contains("Duplicate delegates not allowed")) + // Sequence not correct + var delegates = listOf( + Delegate(1, "monitor-1"), + Delegate(1, "monitor-2"), + Delegate(2, "monitor-3") + ) + val req2 = IndexWorkflowRequest( + "1234", + 1L, + 2L, + WriteRequest.RefreshPolicy.IMMEDIATE, + RestRequest.Method.PUT, + randomWorkflowWithDelegates( + input = listOf(CompositeInput(Sequence(delegates = delegates))) + ) + ) + validate = req2.validate() + Assert.assertTrue(validate != null) + Assert.assertTrue(validate!!.message!!.contains("Sequence ordering of delegate monitor shouldn't contain duplicate order values")) + // Chained finding sequence not correct + delegates = listOf( + Delegate(1, "monitor-1"), + Delegate(2, "monitor-2", ChainedMonitorFindings("monitor-1")), + Delegate(3, "monitor-3", ChainedMonitorFindings("monitor-x")) + ) + val req3 = IndexWorkflowRequest( + "1234", + 1L, + 2L, + WriteRequest.RefreshPolicy.IMMEDIATE, + RestRequest.Method.PUT, + randomWorkflowWithDelegates( + input = listOf(CompositeInput(Sequence(delegates = delegates))) + ) + ) + validate = req3.validate() + Assert.assertTrue(validate != null) + Assert.assertTrue(validate!!.message!!.contains("Chained Findings Monitor monitor-x doesn't exist in sequence")) + // Order not correct + delegates = listOf( + Delegate(1, "monitor-1"), + Delegate(3, "monitor-2", ChainedMonitorFindings("monitor-1")), + Delegate(2, "monitor-3", ChainedMonitorFindings("monitor-2")) + ) + val req4 = IndexWorkflowRequest( + "1234", + 1L, + 2L, + WriteRequest.RefreshPolicy.IMMEDIATE, + RestRequest.Method.PUT, + randomWorkflowWithDelegates( + input = listOf(CompositeInput(Sequence(delegates = delegates))) + ) + ) + validate = req4.validate() + Assert.assertTrue(validate != null) + Assert.assertTrue(validate!!.message!!.contains("Chained Findings Monitor monitor-2 should be executed before monitor monitor-3")) + // Max monitor size + val monitorsIds = mutableListOf() + for (i in 0..25) { + monitorsIds.add(UUID.randomUUID().toString()) + } + val req5 = IndexWorkflowRequest( + "1234", + 1L, + 2L, + WriteRequest.RefreshPolicy.IMMEDIATE, + RestRequest.Method.PUT, + randomWorkflow( + monitorIds = monitorsIds + ) + ) + validate = req5.validate() + Assert.assertTrue(validate != null) + Assert.assertTrue(validate!!.message!!.contains("Delegates list can not be larger then 25.")) + // Input list empty + val req6 = IndexWorkflowRequest( + "1234", + 1L, + 2L, + WriteRequest.RefreshPolicy.IMMEDIATE, + RestRequest.Method.PUT, + randomWorkflowWithDelegates( + input = emptyList() + ) + ) + validate = req6.validate() + Assert.assertTrue(validate != null) + Assert.assertTrue(validate!!.message!!.contains("Input list can not be empty.")) + // Input list multiple elements + delegates = listOf( + Delegate(1, "monitor-1") + ) + + // Chained finding list of monitors valid + delegates = listOf( + Delegate(1, "monitor-1"), + Delegate(2, "monitor-2"), + Delegate(3, "monitor-3", ChainedMonitorFindings(null, listOf("monitor-1", "monitor-2"))) + + ) + val req7 = IndexWorkflowRequest( + "1234", + 1L, + 2L, + WriteRequest.RefreshPolicy.IMMEDIATE, + RestRequest.Method.PUT, + randomWorkflowWithDelegates( + input = listOf(CompositeInput(Sequence(delegates = delegates))) + ) + ) + assertNull(req7.validate()) + try { + IndexWorkflowRequest( + "1234", + 1L, + 2L, + WriteRequest.RefreshPolicy.IMMEDIATE, + RestRequest.Method.PUT, + randomWorkflowWithDelegates( + input = listOf(CompositeInput(Sequence(delegates = delegates)), CompositeInput(Sequence(delegates = delegates))) + ) + ) + } catch (ex: Exception) { + Assert.assertTrue(ex is IllegalArgumentException) + Assert.assertTrue(ex.message!!.contains("Workflows can only have 1 search input.")) + } + + // Chained finding list of monitors invalid order and old field null + delegates = listOf( + Delegate(1, "monitor-1"), + Delegate(3, "monitor-2"), + Delegate(2, "monitor-3", ChainedMonitorFindings(null, listOf("monitor-1", "monitor-2"))) + + ) + val req8 = IndexWorkflowRequest( + "1234", + 1L, + 2L, + WriteRequest.RefreshPolicy.IMMEDIATE, + RestRequest.Method.PUT, + randomWorkflowWithDelegates( + input = listOf(CompositeInput(Sequence(delegates = delegates))) + ) + ) + assertNotNull(req8.validate()) + assertTrue(req8.validate()!!.message!!.contains("should be executed before monitor")) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/IndexWorkflowResponseTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/IndexWorkflowResponseTests.kt new file mode 100644 index 00000000..6594010e --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/IndexWorkflowResponseTests.kt @@ -0,0 +1,61 @@ +package org.opensearch.commons.alerting.action + +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.commons.alerting.model.ChainedAlertTrigger +import org.opensearch.commons.alerting.model.CronSchedule +import org.opensearch.commons.alerting.model.Workflow +import org.opensearch.commons.alerting.randomChainedAlertTrigger +import org.opensearch.commons.alerting.randomUser +import org.opensearch.core.common.io.stream.StreamInput +import java.time.Instant +import java.time.ZoneId + +class IndexWorkflowResponseTests { + + @Test + fun `test index workflow response with workflow`() { + val cronExpression = "31 * * * *" // Run at minute 31. + val testInstance = Instant.ofEpochSecond(1538164858L) + + val cronSchedule = CronSchedule(cronExpression, ZoneId.of("Asia/Kolkata"), testInstance) + val workflow = Workflow( + id = "123", + version = 0L, + name = "test-workflow", + enabled = true, + schedule = cronSchedule, + lastUpdateTime = Instant.now(), + enabledTime = Instant.now(), + workflowType = Workflow.WorkflowType.COMPOSITE, + user = randomUser(), + schemaVersion = 0, + inputs = mutableListOf(), + triggers = listOf(randomChainedAlertTrigger()) + ) + val req = IndexWorkflowResponse("1234", 1L, 2L, 0L, workflow) + Assertions.assertNotNull(req) + + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = IndexWorkflowResponse(sin) + Assertions.assertEquals("1234", newReq.id) + Assertions.assertEquals(1L, newReq.version) + Assertions.assertNotNull(newReq.workflow) + Assertions.assertEquals(newReq.workflow.triggers.size, 1) + Assertions.assertEquals(newReq.workflow.triggers.get(0).name, req.workflow.triggers.get(0).name) + Assertions.assertEquals(newReq.workflow.triggers.get(0).id, req.workflow.triggers.get(0).id) + Assertions.assertEquals(newReq.workflow.triggers.get(0).severity, req.workflow.triggers.get(0).severity) + Assertions.assertEquals( + (newReq.workflow.triggers.get(0) as ChainedAlertTrigger).condition.idOrCode, + (req.workflow.triggers.get(0) as ChainedAlertTrigger).condition.idOrCode + ) + + Assertions.assertEquals( + (newReq.workflow.triggers.get(0) as ChainedAlertTrigger).condition.lang, + (req.workflow.triggers.get(0) as ChainedAlertTrigger).condition.lang + ) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/PublishFindingsRequestTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/PublishFindingsRequestTests.kt new file mode 100644 index 00000000..bbfc7793 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/PublishFindingsRequestTests.kt @@ -0,0 +1,28 @@ +package org.opensearch.commons.alerting.action + +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertNotNull +import org.junit.jupiter.api.Test +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.commons.alerting.randomFinding +import org.opensearch.core.common.io.stream.StreamInput + +class PublishFindingsRequestTests { + + @Test + fun `test delete monitor request`() { + val finding = randomFinding() + val monitorId = "mid" + val req = PublishFindingsRequest(monitorId, finding) + assertNotNull(req) + assertEquals(monitorId, req.monitorId) + assertEquals(finding, req.finding) + + val out = BytesStreamOutput() + req.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = PublishFindingsRequest(sin) + assertEquals(monitorId, newReq.monitorId) + assertEquals(finding.id, newReq.finding.id) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/SearchCommentRequestTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/SearchCommentRequestTests.kt new file mode 100644 index 00000000..596d16c4 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/SearchCommentRequestTests.kt @@ -0,0 +1,27 @@ +package org.opensearch.commons.alerting.action + +import org.opensearch.action.search.SearchRequest +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.common.unit.TimeValue +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.search.builder.SearchSourceBuilder +import org.opensearch.test.OpenSearchTestCase +import org.opensearch.test.rest.OpenSearchRestTestCase +import java.util.concurrent.TimeUnit + +class SearchCommentRequestTests : OpenSearchTestCase() { + fun `test search comments request`() { + val searchSourceBuilder = SearchSourceBuilder().from(0).size(100).timeout(TimeValue(60, TimeUnit.SECONDS)) + val searchRequest = SearchRequest().indices(OpenSearchRestTestCase.randomAlphaOfLength(10)).source(searchSourceBuilder) + val searchCommentRequest = SearchCommentRequest(searchRequest) + assertNotNull(searchCommentRequest) + + val out = BytesStreamOutput() + searchCommentRequest.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = SearchCommentRequest(sin) + + assertNotNull(newReq.searchRequest) + assertEquals(1, newReq.searchRequest.indices().size) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/action/SearchMonitorRequestTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/action/SearchMonitorRequestTests.kt new file mode 100644 index 00000000..169814ea --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/action/SearchMonitorRequestTests.kt @@ -0,0 +1,33 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.alerting.action + +import org.opensearch.action.search.SearchRequest +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.common.unit.TimeValue +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.search.builder.SearchSourceBuilder +import org.opensearch.test.OpenSearchTestCase +import org.opensearch.test.rest.OpenSearchRestTestCase +import java.util.concurrent.TimeUnit + +class SearchMonitorRequestTests : OpenSearchTestCase() { + + fun `test search monitors request`() { + val searchSourceBuilder = SearchSourceBuilder().from(0).size(100).timeout(TimeValue(60, TimeUnit.SECONDS)) + val searchRequest = SearchRequest().indices(OpenSearchRestTestCase.randomAlphaOfLength(10)).source(searchSourceBuilder) + val searchMonitorRequest = SearchMonitorRequest(searchRequest) + assertNotNull(searchMonitorRequest) + + val out = BytesStreamOutput() + searchMonitorRequest.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newReq = SearchMonitorRequest(sin) + + assertNotNull(newReq.searchRequest) + assertEquals(1, newReq.searchRequest.indices().size) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/alerts/AlertErrorTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/alerts/AlertErrorTests.kt new file mode 100644 index 00000000..c5c6d439 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/alerts/AlertErrorTests.kt @@ -0,0 +1,25 @@ +package org.opensearch.commons.alerting.alerts + +import org.junit.Assert +import org.junit.jupiter.api.Test +import java.time.Instant + +class AlertErrorTests { + + @Test + fun `test alertError obfuscates IP addresses in message`() { + val message = + "AlertingException[[5f32db4e2a4fa94f6778cb895dae7a24][10.212.77.91:9300][indices:admin/create]]; " + + "nested: Exception[org.opensearch.transport.RemoteTransportException: [5f32db4e2a4fa94f6778cb895dae7a24][10.212.77.91:9300]" + + "[indices:admin/create]];; java.lang.Exception: org.opensearch.transport.RemoteTransportException: [5f32db4e2a4fa94f6778cb895" + + "dae7a24][10.212.77.91:9300][indices:admin/create]" + val alertError = AlertError(Instant.now(), message = message) + Assert.assertEquals( + alertError.message, + "AlertingException[[5f32db4e2a4fa94f6778cb895dae7a24][x.x.x.x:9300][indices:admin/create]]; " + + "nested: Exception[org.opensearch.transport.RemoteTransportException: [5f32db4e2a4fa94f6778cb895dae7a24][x.x.x.x:9300]" + + "[indices:admin/create]];; java.lang.Exception: org.opensearch.transport.RemoteTransportException: " + + "[5f32db4e2a4fa94f6778cb895dae7a24][x.x.x.x:9300][indices:admin/create]" + ) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/model/BucketLevelTriggerTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/model/BucketLevelTriggerTests.kt new file mode 100644 index 00000000..1a9e3cbe --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/model/BucketLevelTriggerTests.kt @@ -0,0 +1,54 @@ +package org.opensearch.commons.alerting.model + +import org.junit.jupiter.api.Test +import org.opensearch.commons.alerting.model.BucketLevelTrigger.Companion.CONDITION_FIELD +import org.opensearch.commons.alerting.model.BucketLevelTrigger.Companion.LANG_FIELD +import org.opensearch.commons.alerting.model.BucketLevelTrigger.Companion.PARENT_BUCKET_PATH +import org.opensearch.commons.alerting.model.BucketLevelTrigger.Companion.SCRIPT_FIELD +import org.opensearch.commons.alerting.model.BucketLevelTrigger.Companion.SOURCE_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.ACTIONS_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.ID_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.NAME_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.SEVERITY_FIELD +import org.opensearch.commons.alerting.randomBucketLevelTrigger +import kotlin.test.assertEquals +import kotlin.test.assertNotNull + +class BucketLevelTriggerTests { + + @Test + fun `test BucketLevelTrigger asTemplateArgs`() { + val trigger = randomBucketLevelTrigger() + + val templateArgs = trigger.asTemplateArg() + + assertEquals(trigger.id, templateArgs[ID_FIELD], "Template arg field 'id' doesn't match") + assertEquals(trigger.name, templateArgs[NAME_FIELD], "Template arg field 'name' doesn't match") + assertEquals(trigger.severity, templateArgs[SEVERITY_FIELD], "Template arg field 'severity' doesn't match") + val actions = templateArgs[ACTIONS_FIELD] as List<*> + assertEquals( + trigger.actions.size, + actions.size, + "Template arg field 'actions' doesn't match" + ) + assertEquals( + trigger.getParentBucketPath(), + templateArgs[PARENT_BUCKET_PATH], + "Template arg field 'parentBucketPath' doesn't match" + ) + val condition = templateArgs[CONDITION_FIELD] as? Map<*, *> + assertNotNull(condition, "Template arg field 'condition' is empty") + val script = condition[SCRIPT_FIELD] as? Map<*, *> + assertNotNull(script, "Template arg field 'condition.script' is empty") + assertEquals( + trigger.bucketSelector.script.idOrCode, + script[SOURCE_FIELD], + "Template arg field 'script.source' doesn't match" + ) + assertEquals( + trigger.bucketSelector.script.lang, + script[LANG_FIELD], + "Template arg field 'script.lang' doesn't match" + ) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/model/ClusterMetricsInputTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/model/ClusterMetricsInputTests.kt new file mode 100644 index 00000000..6d1c1055 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/model/ClusterMetricsInputTests.kt @@ -0,0 +1,594 @@ +package org.opensearch.commons.alerting.model + +import org.junit.jupiter.api.Test +import kotlin.test.assertEquals +import kotlin.test.assertFailsWith + +class ClusterMetricsInputTests { + private var path = "/_cluster/health" + private var pathParams = "" + private var url = "" + + private val validClusters = listOf( + "cluster-name", + "cluster:name" + ) + + private val invalidClusters = listOf( + // Character length less than 1 should return FALSE + "", + + // Character length greater than 255 should return FALSE + (0..255).joinToString(separator = "") { "a" }, + + // Invalid characters should return FALSE + "cluster-#name", + "cluster:#name", + + // More than 1 `:` character should return FALSE + "bad:cluster:name" + ) + + @Test + fun `test valid ClusterMetricsInput creation using HTTP URI component fields`() { + // GIVEN + val testUrl = "http://localhost:9200/_cluster/health" + + // WHEN + val clusterMetricsInput = ClusterMetricsInput(path, pathParams, url) + + // THEN + assertEquals(path, clusterMetricsInput.path) + assertEquals(pathParams, clusterMetricsInput.pathParams) + assertEquals(testUrl, clusterMetricsInput.url) + assertEquals(emptyList(), clusterMetricsInput.clusters) + } + + @Test + fun `test valid ClusterMetricsInput creation using HTTP url field`() { + // GIVEN + path = "" + url = "http://localhost:9200/_cluster/health" + + // WHEN + val clusterMetricsInput = ClusterMetricsInput(path, pathParams, url) + + // THEN + assertEquals(url, clusterMetricsInput.url) + assertEquals(emptyList(), clusterMetricsInput.clusters) + } + + @Test + fun `test valid ClusterMetricsInput creation using HTTPS url field`() { + // GIVEN + path = "" + url = "https://localhost:9200/_cluster/health" + + // WHEN + val clusterMetricsInput = ClusterMetricsInput(path, pathParams, url) + + // THEN + assertEquals(url, clusterMetricsInput.url) + assertEquals(emptyList(), clusterMetricsInput.clusters) + } + + @Test + fun `test invalid path`() { + // GIVEN + path = "///" + + // WHEN + THEN + assertFailsWith("Invalid URL.") { + ClusterMetricsInput(path, pathParams, url) + } + } + + @Test + fun `test invalid url`() { + // GIVEN + url = "///" + + // WHEN + THEN + assertFailsWith("Invalid URL.") { + ClusterMetricsInput(path, pathParams, url) + } + } + + @Test + fun `test url field and URI component fields create equal URI`() { + // GIVEN + url = "http://localhost:9200/_cluster/health" + + // WHEN + val clusterMetricsInput = ClusterMetricsInput(path, pathParams, url) + + // THEN + assertEquals(path, clusterMetricsInput.path) + assertEquals(pathParams, clusterMetricsInput.pathParams) + assertEquals(url, clusterMetricsInput.url) + assertEquals(url, clusterMetricsInput.constructedUri.toString()) + assertEquals(emptyList(), clusterMetricsInput.clusters) + } + + @Test + fun `test url field and URI component fields with path params create equal URI`() { + // GIVEN + path = "/_cluster/health" + pathParams = "index1,index2,index3,index4,index5" + url = "http://localhost:9200/_cluster/health/index1,index2,index3,index4,index5" + + // WHEN + val clusterMetricsInput = ClusterMetricsInput(path, pathParams, url) + + // THEN + assertEquals(path, clusterMetricsInput.path) + assertEquals(pathParams, clusterMetricsInput.pathParams) + assertEquals(url, clusterMetricsInput.url) + assertEquals(url, clusterMetricsInput.constructedUri.toString()) + assertEquals(emptyList(), clusterMetricsInput.clusters) + } + + @Test + fun `test url field and URI component fields create different URI`() { + // GIVEN + url = "http://localhost:9200/_cluster/stats" + + // WHEN + THEN + assertFailsWith("The provided URL and URI fields form different URLs.") { + ClusterMetricsInput(path, pathParams, url) + } + } + + @Test + fun `test url field and URI component fields with path params create different URI`() { + // GIVEN + pathParams = "index1,index2,index3,index4,index5" + url = "http://localhost:9200/_cluster/stats/index1,index2,index3,index4,index5" + + // WHEN + THEN + assertFailsWith("The provided URL and URI fields form different URLs.") { + ClusterMetricsInput(path, pathParams, url) + } + } + + @Test + fun `test ClusterMetricsInput creation when all inputs are empty`() { + // GIVEN + path = "" + pathParams = "" + url = "" + + // WHEN + THEN + assertFailsWith("The uri.api_type field, uri.path field, or uri.uri field must be defined.") { + ClusterMetricsInput(path, pathParams, url) + } + } + + @Test + fun `test ClusterMetricsInput creation when all inputs but path params are empty`() { + // GIVEN + path = "" + pathParams = "index1,index2,index3,index4,index5" + url = "" + + // WHEN + THEN + assertFailsWith("The uri.api_type field, uri.path field, or uri.uri field must be defined.") { + ClusterMetricsInput(path, pathParams, url) + } + } + + @Test + fun `test invalid scheme in url field`() { + // GIVEN + path = "" + url = "invalidScheme://localhost:9200/_cluster/health" + + // WHEN + THEN + assertFailsWith("Invalid URL.") { + ClusterMetricsInput(path, pathParams, url) + } + } + + @Test + fun `test invalid host in url field`() { + // GIVEN + path = "" + url = "http://127.0.0.1:9200/_cluster/health" + + // WHEN + THEN + assertFailsWith("Only host '${ClusterMetricsInput.SUPPORTED_HOST}' is supported.") { + ClusterMetricsInput(path, pathParams, url) + } + } + + @Test + fun `test invalid port in url field`() { + // GIVEN + path = "" + url = "http://localhost:${ClusterMetricsInput.SUPPORTED_PORT + 1}/_cluster/health" + + // WHEN + THEN + assertFailsWith("Only port '${ClusterMetricsInput.SUPPORTED_PORT}' is supported.") { + ClusterMetricsInput(path, pathParams, url) + } + } + + @Test + fun `test parsePathParams with no path params`() { + // GIVEN + val testUrl = "http://localhost:9200/_cluster/health" + val clusterMetricsInput = ClusterMetricsInput(path, pathParams, url) + + // WHEN + val params = clusterMetricsInput.parsePathParams() + + // THEN + assertEquals(pathParams, params) + assertEquals(testUrl, clusterMetricsInput.constructedUri.toString()) + assertEquals(emptyList(), clusterMetricsInput.clusters) + } + + @Test + fun `test parsePathParams with path params as URI field`() { + // GIVEN + path = "/_cluster/health" + pathParams = "index1,index2,index3,index4,index5" + val testUrl = "http://localhost:9200/_cluster/health/index1,index2,index3,index4,index5" + val clusterMetricsInput = ClusterMetricsInput(path, pathParams, url) + + // WHEN + val params = clusterMetricsInput.parsePathParams() + + // THEN + assertEquals(pathParams, params) + assertEquals(testUrl, clusterMetricsInput.constructedUri.toString()) + assertEquals(emptyList(), clusterMetricsInput.clusters) + } + + @Test + fun `test parsePathParams with path params in url`() { + // GIVEN + path = "" + val testParams = "index1,index2,index3,index4,index5" + url = "http://localhost:9200/_cluster/health/index1,index2,index3,index4,index5" + val clusterMetricsInput = ClusterMetricsInput(path, pathParams, url) + + // WHEN + val params = clusterMetricsInput.parsePathParams() + + // THEN + assertEquals(testParams, params) + assertEquals(url, clusterMetricsInput.constructedUri.toString()) + assertEquals(emptyList(), clusterMetricsInput.clusters) + } + + @Test + fun `test parsePathParams with no path params for ApiType that requires path params`() { + // GIVEN + path = "/_cat/snapshots" + + // WHEN + THEN + assertFailsWith("The API requires path parameters.") { + ClusterMetricsInput(path, pathParams, url) + } + } + + @Test + fun `test parsePathParams with path params for ApiType that doesn't support path params`() { + // GIVEN + path = "/_cluster/settings" + pathParams = "index1,index2,index3,index4,index5" + val clusterMetricsInput = ClusterMetricsInput(path, pathParams, url) + + // WHEN + THEN + assertFailsWith("The API does not use path parameters.") { + clusterMetricsInput.parsePathParams() + } + } + + @Test + fun `test parsePathParams with path params containing illegal characters`() { + var testCount = 0 // Start off with count of 1 to account for ApiType.BLANK + ILLEGAL_PATH_PARAMETER_CHARACTERS.forEach { character -> + // GIVEN + pathParams = "index1,index2,$character,index4,index5" + val clusterMetricsInput = ClusterMetricsInput(path, pathParams, url) + + // WHEN + THEN + assertFailsWith( + "The provided path parameters contain invalid characters or spaces. Please omit: " + ILLEGAL_PATH_PARAMETER_CHARACTERS.joinToString(" ") + ) { + clusterMetricsInput.parsePathParams() + } + testCount++ + } + assertEquals(ILLEGAL_PATH_PARAMETER_CHARACTERS.size, testCount) + } + + @Test + fun `test ClusterMetricsInput correctly determines ApiType when path is provided as URI component`() { + var testCount = 1 // Start off with count of 1 to account for ApiType.BLANK + ClusterMetricsInput.ClusterMetricType.values() + .filter { enum -> enum != ClusterMetricsInput.ClusterMetricType.BLANK } + .forEach { testApiType -> + // GIVEN + path = testApiType.defaultPath + pathParams = if (testApiType.supportsPathParams) "index1,index2,index3,index4,index5" else "" + + // WHEN + val clusterMetricsInput = ClusterMetricsInput(path, pathParams, url) + + // THEN + assertEquals(testApiType, clusterMetricsInput.clusterMetricType) + testCount++ + } + assertEquals(ClusterMetricsInput.ClusterMetricType.values().size, testCount) + } + + @Test + fun `test ClusterMetricsInput correctly determines ApiType when path and path params are provided as URI components`() { + var testCount = 1 // Start off with count of 1 to account for ApiType.BLANK + ClusterMetricsInput.ClusterMetricType.values() + .filter { enum -> enum != ClusterMetricsInput.ClusterMetricType.BLANK } + .forEach { testApiType -> + // GIVEN + path = testApiType.defaultPath + pathParams = "index1,index2,index3,index4,index5" + + // WHEN + val clusterMetricsInput = ClusterMetricsInput(path, pathParams, url) + + // THEN + assertEquals(testApiType, clusterMetricsInput.clusterMetricType) + testCount++ + } + assertEquals(ClusterMetricsInput.ClusterMetricType.values().size, testCount) + } + + @Test + fun `test ClusterMetricsInput correctly determines ApiType when path is provided in URL field`() { + var testCount = 1 // Start off with count of 1 to account for ApiType.BLANK + ClusterMetricsInput.ClusterMetricType.values() + .filter { enum -> enum != ClusterMetricsInput.ClusterMetricType.BLANK } + .forEach { testApiType -> + // GIVEN + path = "" + pathParams = if (testApiType.supportsPathParams) "index1,index2,index3,index4,index5" else "" + url = "http://localhost:9200${testApiType.defaultPath}" + + // WHEN + val clusterMetricsInput = ClusterMetricsInput(path, pathParams, url) + + // THEN + assertEquals(testApiType, clusterMetricsInput.clusterMetricType) + testCount++ + } + assertEquals(ClusterMetricsInput.ClusterMetricType.values().size, testCount) + } + + @Test + fun `test ClusterMetricsInput correctly determines ApiType when path and path params are provided in URL field`() { + var testCount = 1 // Start off with count of 1 to account for ApiType.BLANK + ClusterMetricsInput.ClusterMetricType.values() + .filter { enum -> enum != ClusterMetricsInput.ClusterMetricType.BLANK } + .forEach { testApiType -> + // GIVEN + path = "" + pathParams = if (testApiType.supportsPathParams) "/index1,index2,index3,index4,index5" else "" + url = "http://localhost:9200${testApiType.defaultPath}$pathParams" + + // WHEN + val clusterMetricsInput = ClusterMetricsInput(path, pathParams, url) + + // THEN + assertEquals(testApiType, clusterMetricsInput.clusterMetricType) + testCount++ + } + assertEquals(ClusterMetricsInput.ClusterMetricType.values().size, testCount) + } + + @Test + fun `test ClusterMetricsInput cannot determine ApiType when invalid path is provided as URI component`() { + // GIVEN + path = "/_cat/paws" + + // WHEN + THEN + assertFailsWith("The API could not be determined from the provided URI.") { + ClusterMetricsInput(path, pathParams, url) + } + } + + @Test + fun `test ClusterMetricsInput cannot determine ApiType when invalid path and path params are provided as URI components`() { + // GIVEN + path = "/_cat/paws" + pathParams = "index1,index2,index3,index4,index5" + + // WHEN + THEN + assertFailsWith("The API could not be determined from the provided URI.") { + ClusterMetricsInput(path, pathParams, url) + } + } + + @Test + fun `test ClusterMetricsInput cannot determine ApiType when invaid path is provided in URL`() { + // GIVEN + path = "" + url = "http://localhost:9200/_cat/paws" + + // WHEN + THEN + assertFailsWith("The API could not be determined from the provided URI.") { + ClusterMetricsInput(path, pathParams, url) + } + } + + @Test + fun `test ClusterMetricsInput cannot determine ApiType when invaid path and path params are provided in URL`() { + // GIVEN + path = "" + url = "http://localhost:9200/_cat/paws/index1,index2,index3,index4,index5" + + // WHEN + THEN + assertFailsWith("The API could not be determined from the provided URI.") { + ClusterMetricsInput(path, pathParams, url) + } + } + + @Test + fun `test parseEmptyFields populates empty path and path_params when url is provided`() { + // GIVEN + path = "" + pathParams = "" + val testPath = "/_cluster/health" + val testPathParams = "index1,index2,index3,index4,index5" + url = "http://localhost:9200$testPath$testPathParams" + + // WHEN + val clusterMetricsInput = ClusterMetricsInput(path, pathParams, url) + + // THEN + assertEquals(testPath, clusterMetricsInput.path) + assertEquals(testPathParams, clusterMetricsInput.pathParams) + assertEquals(url, clusterMetricsInput.url) + assertEquals(emptyList(), clusterMetricsInput.clusters) + } + + @Test + fun `test parseEmptyFields populates empty url field when path and path_params are provided`() { + // GIVEN + path = "/_cluster/health" + pathParams = "index1,index2,index3,index4,index5" + val testUrl = "http://localhost:9200$path/$pathParams" + + // WHEN + val clusterMetricsInput = ClusterMetricsInput(path, pathParams, url) + + // THEN + assertEquals(path, clusterMetricsInput.path) + assertEquals(pathParams, clusterMetricsInput.pathParams) + assertEquals(testUrl, clusterMetricsInput.url) + assertEquals(emptyList(), clusterMetricsInput.clusters) + } + + @Test + fun `test a single valid cluster`() { + validClusters.forEach { + // GIVEN + path = "/_cluster/health" + pathParams = "index1,index2,index3,index4,index5" + url = "" + val clusters = listOf(it) + + // WHEN + val clusterMetricsInput = ClusterMetricsInput( + path = path, + pathParams = pathParams, + url = url, + clusters = clusters + ) + + // THEN + assertEquals(path, clusterMetricsInput.path) + assertEquals(pathParams, clusterMetricsInput.pathParams) + assertEquals(clusters, clusterMetricsInput.clusters) + } + } + + @Test + fun `test multiple valid clusters`() { + // GIVEN + path = "/_cluster/health" + pathParams = "index1,index2,index3,index4,index5" + url = "" + val clusters = validClusters + + // WHEN + val clusterMetricsInput = ClusterMetricsInput( + path = path, + pathParams = pathParams, + url = url, + clusters = clusters + ) + + // THEN + assertEquals(path, clusterMetricsInput.path) + assertEquals(pathParams, clusterMetricsInput.pathParams) + assertEquals(clusters, clusterMetricsInput.clusters) + } + + @Test + fun `test a single invalid cluster`() { + invalidClusters.forEach { + // GIVEN + path = "/_cluster/health" + pathParams = "index1,index2,index3,index4,index5" + url = "" + val clusters = listOf(it) + + // WHEN + THEN + assertFailsWith("The API could not be determined from the provided URI.") { + ClusterMetricsInput( + path = path, + pathParams = pathParams, + url = url, + clusters = clusters + ) + } + } + } + + @Test + fun `test multiple invalid clusters`() { + // GIVEN + path = "/_cluster/health" + pathParams = "index1,index2,index3,index4,index5" + url = "" + val clusters = invalidClusters + + // WHEN + THEN + assertFailsWith("The API could not be determined from the provided URI.") { + ClusterMetricsInput( + path = path, + pathParams = pathParams, + url = url, + clusters = clusters + ) + } + } + + @Test + fun `test url field contains invalid characters`() { + // GIVEN + path = "" + pathParams = "" + url = "http://localhost:9200/${ILLEGAL_PATH_PARAMETER_CHARACTERS.joinToString("")}" + + // WHEN + THEN + assertFailsWith("Invalid URL syntax.") { + ClusterMetricsInput( + path = path, + pathParams = pathParams, + url = url, + clusters = listOf() + ) + } + } + + @Test + fun `test URI fields provided and url contains invalid characters`() { + // GIVEN + path = "/_cluster/health" + pathParams = "index1,index2,index3,index4,index5" + url = "http://localhost:9200/${ILLEGAL_PATH_PARAMETER_CHARACTERS.joinToString("")}" + + // WHEN + THEN + assertFailsWith("Invalid URL syntax.") { + ClusterMetricsInput( + path = path, + pathParams = pathParams, + url = url, + clusters = listOf() + ) + } + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/model/CompositeInputTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/model/CompositeInputTests.kt new file mode 100644 index 00000000..ad0d2b24 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/model/CompositeInputTests.kt @@ -0,0 +1,101 @@ +package org.opensearch.commons.alerting.model + +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.opensearch.commons.alerting.randomDelegate +import org.opensearch.commons.alerting.randomSequence + +class CompositeInputTests { + @Test + fun `test sequence asTemplateArgs`() { + val sequence = randomSequence() + // WHEN + val templateArgs = sequence.asTemplateArg() + + // THEN + val templateDelegates = templateArgs + Assertions.assertEquals( + templateDelegates[Sequence.DELEGATES_FIELD], + sequence.delegates, + "Template args 'id' field does not match:" + ) + } + + @Test + fun `test delegate asTemplateArgs`() { + val delegate = randomDelegate() + // WHEN + val templateArgs = delegate.asTemplateArg() + + // THEN + val templateDelegates = templateArgs + Assertions.assertEquals( + templateDelegates[Delegate.ORDER_FIELD], + delegate.order, + "Template args 'id' field does not match:" + ) + Assertions.assertEquals( + templateDelegates[Delegate.MONITOR_ID_FIELD], + delegate.monitorId, + "Template args 'id' field does not match:" + ) + } + + @Test + fun `test create Delegate with illegal order value`() { + try { + randomDelegate(-1) + Assertions.fail("Expecting an illegal argument exception") + } catch (e: IllegalArgumentException) { + Assertions.assertEquals( + "Invalid delgate order", + e.message + ) + } + } + + @Test + fun `test create Delegate with illegal monitorId value`() { + try { + randomDelegate(1, "") + Assertions.fail("Expecting an illegal argument exception") + } catch (e: IllegalArgumentException) { + e.message?.let { + Assertions.assertTrue( + it.contains("Invalid characters in id") + + ) + } + } + } + + @Test + fun `test create Chained Findings with illegal monitorId value and empty monitorIds list`() { + try { + ChainedMonitorFindings("") + Assertions.fail("Expecting an illegal argument exception") + } catch (e: IllegalArgumentException) { + e.message?.let { + Assertions.assertTrue( + it.contains("at least one of fields, 'monitorIds' and 'monitorId' should be provided") + + ) + } + } + } + + @Test + fun `test create Chained Findings with null monitorId value and monitorIds list with blank monitorIds`() { + try { + ChainedMonitorFindings("", listOf("", "")) + Assertions.fail("Expecting an illegal argument exception") + } catch (e: IllegalArgumentException) { + e.message?.let { + Assertions.assertTrue( + it.contains("Invalid characters in id") + + ) + } + } + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/model/DataSourcesTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/model/DataSourcesTests.kt new file mode 100644 index 00000000..5f67f2a3 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/model/DataSourcesTests.kt @@ -0,0 +1,36 @@ +package org.opensearch.commons.alerting.model + +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.core.common.io.stream.StreamInput + +class DataSourcesTests { + @Test + fun `Test DataSources construction with no comments indices`() { + val dataSources = DataSources( + ScheduledJob.DOC_LEVEL_QUERIES_INDEX, + ".opensearch-alerting-finding-history-write", + "<.opensearch-alerting-finding-history-{now/d}-1>", + ".opendistro-alerting-alerts", + ".opendistro-alerting-alert-history-write", + "<.opendistro-alerting-alert-history-{now/d}-1>", + mapOf(), + false + ) + Assertions.assertNotNull(dataSources) + + val out = BytesStreamOutput() + dataSources.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newDataSources = DataSources(sin) + Assertions.assertEquals(ScheduledJob.DOC_LEVEL_QUERIES_INDEX, newDataSources.queryIndex) + Assertions.assertEquals(".opensearch-alerting-finding-history-write", newDataSources.findingsIndex) + Assertions.assertEquals("<.opensearch-alerting-finding-history-{now/d}-1>", newDataSources.findingsIndexPattern) + Assertions.assertEquals(".opendistro-alerting-alerts", newDataSources.alertsIndex) + Assertions.assertEquals(".opendistro-alerting-alert-history-write", newDataSources.alertsHistoryIndex) + Assertions.assertEquals("<.opendistro-alerting-alert-history-{now/d}-1>", newDataSources.alertsHistoryIndexPattern) + Assertions.assertEquals(mapOf>(), newDataSources.queryIndexMappingsByType) + Assertions.assertEquals(false, newDataSources.findingsEnabled) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/model/DocLevelMonitorInputTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/model/DocLevelMonitorInputTests.kt new file mode 100644 index 00000000..e99dc3c8 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/model/DocLevelMonitorInputTests.kt @@ -0,0 +1,128 @@ +package org.opensearch.commons.alerting.model + +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.opensearch.common.xcontent.XContentType +import org.opensearch.commons.alerting.randomDocLevelMonitorInput +import org.opensearch.commons.alerting.randomDocLevelQuery +import org.opensearch.commons.alerting.util.string +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import java.lang.IllegalArgumentException + +class DocLevelMonitorInputTests { + @Test + fun `test DocLevelQuery asTemplateArgs`() { + // GIVEN + val query = randomDocLevelQuery() + + // WHEN + val templateArgs = query.asTemplateArg() + + // THEN + Assertions.assertEquals( + templateArgs[DocLevelQuery.QUERY_ID_FIELD], + query.id, + "Template args 'id' field does not match:" + ) + Assertions.assertEquals( + templateArgs[DocLevelQuery.QUERY_FIELD], + query.query, + "Template args 'query' field does not match:" + ) + Assertions.assertEquals( + templateArgs[DocLevelQuery.NAME_FIELD], + query.name, + "Template args 'name' field does not match:" + ) + Assertions.assertEquals( + templateArgs[DocLevelQuery.TAGS_FIELD], + query.tags, + "Template args 'tags' field does not match:" + ) + } + + @Test + fun `test create Doc Level Query with invalid name length`() { + val stringBuilder = StringBuilder() + + // test empty string + val emptyString = stringBuilder.toString() + try { + randomDocLevelQuery(name = emptyString) + Assertions.fail("Expecting an illegal argument exception") + } catch (e: IllegalArgumentException) { + Assertions.assertEquals( + "The query name, $emptyString, should be between 1 - 256 characters.", + e.message + ) + } + + // test string with 257 chars + repeat(257) { + stringBuilder.append("a") + } + val badString = stringBuilder.toString() + + try { + randomDocLevelQuery(name = badString) + Assertions.fail("Expecting an illegal argument exception") + } catch (e: IllegalArgumentException) { + Assertions.assertEquals( + "The query name, $badString, should be between 1 - 256 characters.", + e.message + ) + } + } + + @Test + @Throws(IllegalArgumentException::class) + fun `test create Doc Level Query with invalid characters for tags`() { + val badString = "[(){}]" + try { + randomDocLevelQuery(tags = listOf(badString)) + Assertions.fail("Expecting an illegal argument exception") + } catch (e: IllegalArgumentException) { + Assertions.assertEquals( + "The query tag, $badString, contains an invalid character: [' ','[',']','{','}','(',')']", + e.message + ) + } + } + + @Test + fun `test DocLevelMonitorInput asTemplateArgs`() { + // GIVEN + val input = randomDocLevelMonitorInput() + + // test + input.toXContent(XContentBuilder.builder(XContentType.JSON.xContent()), ToXContent.EMPTY_PARAMS).string() + // assertEquals("test", inputString) + // test end + // WHEN + val templateArgs = input.asTemplateArg() + + // THEN + Assertions.assertEquals( + templateArgs[DocLevelMonitorInput.DESCRIPTION_FIELD], + input.description, + "Template args 'description' field does not match:" + ) + Assertions.assertEquals( + templateArgs[DocLevelMonitorInput.INDICES_FIELD], + input.indices, + "Template args 'indices' field does not match:" + ) + Assertions.assertEquals( + input.queries.size, + (templateArgs[DocLevelMonitorInput.QUERIES_FIELD] as List<*>).size, + "Template args 'queries' field does not contain the expected number of queries:" + ) + input.queries.forEach { + Assertions.assertTrue( + (templateArgs[DocLevelMonitorInput.QUERIES_FIELD] as List<*>).contains(it.asTemplateArg()), + "Template args 'queries' field does not match:" + ) + } + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/model/DocumentLevelTriggerTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/model/DocumentLevelTriggerTests.kt new file mode 100644 index 00000000..7375223c --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/model/DocumentLevelTriggerTests.kt @@ -0,0 +1,48 @@ +package org.opensearch.commons.alerting.model + +import org.junit.jupiter.api.Test +import org.opensearch.commons.alerting.model.DocumentLevelTrigger.Companion.CONDITION_FIELD +import org.opensearch.commons.alerting.model.DocumentLevelTrigger.Companion.LANG_FIELD +import org.opensearch.commons.alerting.model.DocumentLevelTrigger.Companion.SCRIPT_FIELD +import org.opensearch.commons.alerting.model.DocumentLevelTrigger.Companion.SOURCE_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.ACTIONS_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.ID_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.NAME_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.SEVERITY_FIELD +import org.opensearch.commons.alerting.randomDocumentLevelTrigger +import kotlin.test.assertEquals +import kotlin.test.assertNotNull + +class DocumentLevelTriggerTests { + + @Test + fun `test DocumentLevelTrigger asTemplateArgs`() { + val trigger = randomDocumentLevelTrigger() + + val templateArgs = trigger.asTemplateArg() + + assertEquals(trigger.id, templateArgs[ID_FIELD], "Template arg field 'id' doesn't match") + assertEquals(trigger.name, templateArgs[NAME_FIELD], "Template arg field 'name' doesn't match") + assertEquals(trigger.severity, templateArgs[SEVERITY_FIELD], "Template arg field 'severity' doesn't match") + val actions = templateArgs[ACTIONS_FIELD] as List<*> + assertEquals( + trigger.actions.size, + actions.size, + "Template arg field 'actions' doesn't match" + ) + val condition = templateArgs[CONDITION_FIELD] as? Map<*, *> + assertNotNull(condition, "Template arg field 'condition' is empty") + val script = condition[SCRIPT_FIELD] as? Map<*, *> + assertNotNull(script, "Template arg field 'condition.script' is empty") + assertEquals( + trigger.condition.idOrCode, + script[SOURCE_FIELD], + "Template arg field 'script.source' doesn't match" + ) + assertEquals( + trigger.condition.lang, + script[LANG_FIELD], + "Template arg field 'script.lang' doesn't match" + ) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/model/FindingTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/model/FindingTests.kt new file mode 100644 index 00000000..10579a41 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/model/FindingTests.kt @@ -0,0 +1,40 @@ +package org.opensearch.commons.alerting.model + +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test +import org.opensearch.commons.alerting.randomFinding + +internal class FindingTests { + @Test + fun `test finding asTemplateArgs`() { + // GIVEN + val finding = randomFinding() + + // WHEN + val templateArgs = finding.asTemplateArg() + + // THEN + assertEquals(templateArgs[Finding.FINDING_ID_FIELD], finding.id, "Template args 'id' field does not match:") + assertEquals( + templateArgs[Finding.RELATED_DOC_IDS_FIELD], + finding.relatedDocIds, + "Template args 'relatedDocIds' field does not match:" + ) + assertEquals(templateArgs[Finding.MONITOR_ID_FIELD], finding.monitorId, "Template args 'monitorId' field does not match:") + assertEquals( + templateArgs[Finding.MONITOR_NAME_FIELD], + finding.monitorName, + "Template args 'monitorName' field does not match:" + ) + assertEquals( + templateArgs[Finding.QUERIES_FIELD], + finding.docLevelQueries, + "Template args 'queries' field does not match:" + ) + assertEquals( + templateArgs[Finding.TIMESTAMP_FIELD], + finding.timestamp.toEpochMilli(), + "Template args 'timestamp' field does not match:" + ) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/model/MockScheduledJob.kt b/src/test/kotlin/org/opensearch/commons/alerting/model/MockScheduledJob.kt new file mode 100644 index 00000000..59f3771b --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/model/MockScheduledJob.kt @@ -0,0 +1,31 @@ +package org.opensearch.commons.alerting.model + +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentBuilder +import java.io.IOException +import java.time.Instant + +class MockScheduledJob( + override val id: String, + override val version: Long, + override val name: String, + override val type: String, + override val enabled: Boolean, + override val schedule: Schedule, + override var lastUpdateTime: Instant, + override val enabledTime: Instant? +) : ScheduledJob { + override fun fromDocument(id: String, version: Long): ScheduledJob { + TODO("not implemented") + } + + override fun toXContent(builder: XContentBuilder?, params: ToXContent.Params?): XContentBuilder { + TODO("not implemented") + } + + @Throws(IOException::class) + override fun writeTo(out: StreamOutput) { + TODO("not implemented") + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/model/MonitorsTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/model/MonitorsTests.kt new file mode 100644 index 00000000..9529e44d --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/model/MonitorsTests.kt @@ -0,0 +1,49 @@ +package org.opensearch.commons.alerting.model + +import org.junit.jupiter.api.Test +import org.opensearch.commons.alerting.randomQueryLevelMonitor +import org.opensearch.commons.alerting.util.IndexUtils +import kotlin.test.assertEquals +import kotlin.test.assertNotNull + +class MonitorsTests { + + @Test + fun `test monitor asTemplateArgs`() { + val monitor = randomQueryLevelMonitor(enabled = true) + + val templateArgs = monitor.asTemplateArg() + + assertEquals(monitor.id, templateArgs[IndexUtils._ID], "Template arg field 'id' doesn't match") + assertEquals( + monitor.version, + templateArgs[IndexUtils._VERSION], + "Template arg field 'version' doesn't match" + ) + assertEquals(monitor.name, templateArgs[Monitor.NAME_FIELD], "Template arg field 'name' doesn't match") + assertEquals( + monitor.enabled, + templateArgs[Monitor.ENABLED_FIELD], + "Template arg field 'enabled' doesn't match" + ) + assertEquals( + monitor.monitorType.toString(), + templateArgs[Monitor.MONITOR_TYPE_FIELD], + "Template arg field 'monitoryType' doesn't match" + ) + assertEquals( + monitor.enabledTime?.toEpochMilli(), + templateArgs[Monitor.ENABLED_TIME_FIELD], + "Template arg field 'enabledTime' doesn't match" + ) + assertEquals( + monitor.lastUpdateTime.toEpochMilli(), + templateArgs[Monitor.LAST_UPDATE_TIME_FIELD], + "Template arg field 'lastUpdateTime' doesn't match" + ) + assertNotNull(templateArgs[Monitor.SCHEDULE_FIELD], "Template arg field 'schedule' not set") + val inputs = templateArgs[Monitor.INPUTS_FIELD] as? List<*> + assertNotNull(inputs, "Template arg field 'inputs' not set") + assertEquals(1, inputs.size, "Template arg field 'inputs' is not populated") + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/model/QueryLevelTriggerTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/model/QueryLevelTriggerTests.kt new file mode 100644 index 00000000..824e1b1e --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/model/QueryLevelTriggerTests.kt @@ -0,0 +1,48 @@ +package org.opensearch.commons.alerting.model + +import org.junit.jupiter.api.Test +import org.opensearch.commons.alerting.model.QueryLevelTrigger.Companion.CONDITION_FIELD +import org.opensearch.commons.alerting.model.QueryLevelTrigger.Companion.LANG_FIELD +import org.opensearch.commons.alerting.model.QueryLevelTrigger.Companion.SCRIPT_FIELD +import org.opensearch.commons.alerting.model.QueryLevelTrigger.Companion.SOURCE_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.ACTIONS_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.ID_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.NAME_FIELD +import org.opensearch.commons.alerting.model.Trigger.Companion.SEVERITY_FIELD +import org.opensearch.commons.alerting.randomQueryLevelTrigger +import kotlin.test.assertEquals +import kotlin.test.assertNotNull + +class QueryLevelTriggerTests { + + @Test + fun `test QueryLevelTrigger asTemplateArgs`() { + val trigger = randomQueryLevelTrigger() + + val templateArgs = trigger.asTemplateArg() + + assertEquals(trigger.id, templateArgs[ID_FIELD], "Template arg field 'id' doesn't match") + assertEquals(trigger.name, templateArgs[NAME_FIELD], "Template arg field 'name' doesn't match") + assertEquals(trigger.severity, templateArgs[SEVERITY_FIELD], "Template arg field 'severity' doesn't match") + val actions = templateArgs[ACTIONS_FIELD] as List<*> + assertEquals( + trigger.actions.size, + actions.size, + "Template arg field 'actions' doesn't match" + ) + val condition = templateArgs[CONDITION_FIELD] as? Map<*, *> + assertNotNull(condition, "Template arg field 'condition' is empty") + val script = condition[SCRIPT_FIELD] as? Map<*, *> + assertNotNull(script, "Template arg field 'condition.script' is empty") + assertEquals( + trigger.condition.idOrCode, + script[SOURCE_FIELD], + "Template arg field 'script.source' doesn't match" + ) + assertEquals( + trigger.condition.lang, + script[LANG_FIELD], + "Template arg field 'script.lang' doesn't match" + ) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/model/ScheduleTest.kt b/src/test/kotlin/org/opensearch/commons/alerting/model/ScheduleTest.kt new file mode 100644 index 00000000..e0423d56 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/model/ScheduleTest.kt @@ -0,0 +1,393 @@ +package org.opensearch.commons.alerting.model + +import org.junit.jupiter.api.Test +import org.opensearch.commons.alerting.model.Schedule.Companion.CRON_FIELD +import org.opensearch.commons.alerting.model.Schedule.Companion.EXPRESSION_FIELD +import org.opensearch.commons.alerting.model.Schedule.Companion.INTERVAL_FIELD +import org.opensearch.commons.alerting.model.Schedule.Companion.PERIOD_FIELD +import org.opensearch.commons.alerting.model.Schedule.Companion.TIMEZONE_FIELD +import org.opensearch.commons.alerting.model.Schedule.Companion.UNIT_FIELD +import org.opensearch.commons.alerting.util.string +import org.opensearch.core.xcontent.ToXContent +import java.time.Instant +import java.time.ZoneId +import java.time.ZonedDateTime +import java.time.temporal.ChronoUnit +import kotlin.test.assertEquals +import kotlin.test.assertFailsWith +import kotlin.test.assertFalse +import kotlin.test.assertNotNull +import kotlin.test.assertTrue + +class ScheduleTest : XContentTestBase { + @Test + fun `test time zone conversion`() { + val cronExpression = "31 * * * *" // Run at minute 31. + // This is 2018-09-27 20:00:58 GMT which will in conversion lead to 30min 58 seconds IST + val testInstance = Instant.ofEpochSecond(1538164858L) + + val cronSchedule = CronSchedule(cronExpression, ZoneId.of("Asia/Kolkata"), testInstance) + val nextTimeToExecute = cronSchedule.nextTimeToExecute(Instant.now()) + assertNotNull(nextTimeToExecute, "There should be next execute time.") + assertEquals(2L, nextTimeToExecute.seconds, "Execute time should be 2 seconds") + } + + @Test + fun `test time zone`() { + val cronExpression = "0 11 * * 3" // Run at 11:00 on Wednesday. + // This is 2018-09-26 01:59:58 GMT which will in conversion lead to Wednesday 10:59:58 JST + val testInstance = Instant.ofEpochSecond(1537927198L) + + val cronSchedule = CronSchedule(cronExpression, ZoneId.of("Asia/Tokyo"), testInstance) + val nextTimeToExecute = cronSchedule.nextTimeToExecute(Instant.now()) + assertNotNull(nextTimeToExecute, "There should be next execute time.") + assertEquals(2L, nextTimeToExecute.seconds, "Execute time should be 2 seconds") + } + + @Test + fun `test cron calculates next time to execute after restart`() { + val cronExpression = "* * * * *" + // This is 2018-09-26 01:59:58 GMT + val testInstance = Instant.ofEpochSecond(1537927198L) + // This enabled time represents GMT: Wednesday, September 19, 2018 3:19:51 AM + val enabledTimeInstance = Instant.ofEpochSecond(1537327191) + + val cronSchedule = CronSchedule(cronExpression, ZoneId.of("America/Los_Angeles"), testInstance) + // The nextTimeToExecute should be the minute after the test instance, not enabledTimeInstance, replicating a cluster restart + val nextTimeToExecute = cronSchedule.getExpectedNextExecutionTime(enabledTimeInstance, null) + assertNotNull(nextTimeToExecute, "There should be next execute time") + assertEquals( + testInstance.plusSeconds(2L), + nextTimeToExecute, + "nextTimeToExecute should be 2 seconds after test instance" + ) + } + + @Test + fun `test cron calculates next time to execute using cached previous time`() { + val cronExpression = "* * * * *" + // This is 2018-09-26 01:59:58 GMT + val previousExecutionTimeInstance = Instant.ofEpochSecond(1537927198L) + // This enabled time represents GMT: Wednesday, September 19, 2018 3:19:51 AM + val enabledTimeInstance = Instant.ofEpochSecond(1537327191) + + val cronSchedule = CronSchedule(cronExpression, ZoneId.of("America/Los_Angeles")) + // The nextTimeToExecute should be the minute after the previous execution time instance, not enabledTimeInstance + val nextTimeToExecute = + cronSchedule.getExpectedNextExecutionTime(enabledTimeInstance, previousExecutionTimeInstance) + assertNotNull(nextTimeToExecute, "There should be next execute time") + assertEquals( + previousExecutionTimeInstance.plusSeconds(2L), + nextTimeToExecute, + "nextTimeToExecute should be 2 seconds after test instance" + ) + } + + @Test + fun `test interval calculates next time to execute using enabled time`() { + // This enabled time represents 2018-09-26 01:59:58 GMT + val enabledTimeInstance = Instant.ofEpochSecond(1537927138L) + // This is 2018-09-26 01:59:59 GMT, which is 61 seconds after enabledTime + val testInstance = Instant.ofEpochSecond(1537927199L) + + val intervalSchedule = IntervalSchedule(1, ChronoUnit.MINUTES, testInstance) + + // The nextTimeToExecute should be 120 seconds after the enabled time + val nextTimeToExecute = intervalSchedule.getExpectedNextExecutionTime(enabledTimeInstance, null) + assertNotNull(nextTimeToExecute, "There should be next execute time") + assertEquals( + enabledTimeInstance.plusSeconds(120L), + nextTimeToExecute, + "nextTimeToExecute should be 120 seconds seconds after enabled time" + ) + } + + @Test + fun `test interval calculates next time to execute using cached previous time`() { + // This is 2018-09-26 01:59:58 GMT + val previousExecutionTimeInstance = Instant.ofEpochSecond(1537927198L) + // This is 2018-09-26 02:00:00 GMT + val testInstance = Instant.ofEpochSecond(1537927200L) + // This enabled time represents 2018-09-26 01:58:58 GMT + val enabledTimeInstance = Instant.ofEpochSecond(1537927138L) + + val intervalSchedule = IntervalSchedule(1, ChronoUnit.MINUTES, testInstance) + + // The nextTimeToExecute should be the minute after the previous execution time instance + val nextTimeToExecute = + intervalSchedule.getExpectedNextExecutionTime(enabledTimeInstance, previousExecutionTimeInstance) + assertNotNull(nextTimeToExecute, "There should be next execute time") + assertEquals( + previousExecutionTimeInstance.plusSeconds(60L), + nextTimeToExecute, + "nextTimeToExecute should be 60 seconds after previous execution time" + ) + } + + @Test + fun `test cron schedule round trip`() { + val cronExpression = "0 * * * *" + val cronSchedule = CronSchedule(cronExpression, ZoneId.of("Asia/Tokyo")) + + val scheduleString = cronSchedule.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() + val parsedSchedule = Schedule.parse(parser(scheduleString)) + + assertTrue(parsedSchedule is CronSchedule, "Parsed scheduled is not Cron Scheduled Type.") + assertEquals(cronSchedule, parsedSchedule, "Round tripping Cron Schedule doesn't work") + } + + @Test + fun `test interval schedule round trip`() { + val intervalSchedule = IntervalSchedule(1, ChronoUnit.MINUTES) + + val scheduleString = intervalSchedule.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() + val parsedSchedule = Schedule.parse(parser(scheduleString)) + assertTrue(parsedSchedule is IntervalSchedule, "Parsed scheduled is not Interval Scheduled Type.") + assertEquals(intervalSchedule, parsedSchedule, "Round tripping Interval Schedule doesn't work") + } + + @Test + fun `test cron invalid missing timezone`() { + val scheduleString = "{\"cron\":{\"expression\":\"0 * * * *\"}}" + assertFailsWith(IllegalArgumentException::class, "Expected IllegalArgumentException") { + Schedule.parse(parser(scheduleString)) + } + } + + @Test + fun `test cron invalid timezone rule`() { + val scheduleString = "{\"cron\":{\"expression\":\"0 * * * *\",\"timezone\":\"Going/Nowhere\"}}" + assertFailsWith(IllegalArgumentException::class, "Expected IllegalArgumentException") { + Schedule.parse(parser(scheduleString)) + } + } + + @Test + fun `test cron invalid timezone offset`() { + val scheduleString = "{\"cron\":{\"expression\":\"0 * * * *\",\"timezone\":\"+++9\"}}" + assertFailsWith(IllegalArgumentException::class, "Expected IllegalArgumentException") { + Schedule.parse(parser(scheduleString)) + } + } + + @Test + fun `test invalid type`() { + val scheduleString = "{\"foobarzzz\":{\"expression\":\"0 * * * *\",\"timezone\":\"+++9\"}}" + assertFailsWith(IllegalArgumentException::class, "Expected IllegalArgumentException") { + Schedule.parse( + parser( + scheduleString + ) + ) + } + } + + @Test + fun `test two types`() { + val scheduleString = + "{\"cron\":{\"expression\":\"0 * * * *\",\"timezone\":\"Asia/Tokyo\"}, \"period\":{\"interval\":\"1\",\"unit\":\"Minutes\"}}" + assertFailsWith(IllegalArgumentException::class, "Expected IllegalArgumentException") { + Schedule.parse(parser(scheduleString)) + } + } + + @Test + fun `test invalid cron expression`() { + val scheduleString = "{\"cron\":{\"expression\":\"5 * 1 * * *\",\"timezone\":\"Asia/Tokyo\"}}" + assertFailsWith(IllegalArgumentException::class, "Expected IllegalArgumentException") { + Schedule.parse(parser(scheduleString)) + } + } + + @Test + fun `test interval period starting at`() { + val intervalSchedule = IntervalSchedule(1, ChronoUnit.MINUTES) + + val (periodStartTime, periodEndTime) = intervalSchedule.getPeriodStartingAt(null) + + assertEquals(periodStartTime, periodEndTime.minus(1, ChronoUnit.MINUTES), "Period didn't match interval") + + val startTime = Instant.now() + // Kotlin has destructuring declarations but no destructuring assignments? Gee, thanks... + val (periodStartTime2, _) = intervalSchedule.getPeriodStartingAt(startTime) + assertEquals(startTime, periodStartTime2, "Periods doesn't start at provided start time") + } + + @Test + fun `test interval period ending at`() { + val intervalSchedule = IntervalSchedule(1, ChronoUnit.MINUTES) + + val (periodStartTime, periodEndTime) = intervalSchedule.getPeriodEndingAt(null) + + assertEquals(periodStartTime, periodEndTime.minus(1, ChronoUnit.MINUTES), "Period didn't match interval") + + val endTime = Instant.now() + // destructuring declarations but no destructuring assignments? Gee, thanks... https://youtrack.jetbrains.com/issue/KT-11362 + val (_, periodEndTime2) = intervalSchedule.getPeriodEndingAt(endTime) + assertEquals(endTime, periodEndTime2, "Periods doesn't end at provided end time") + } + + @Test + fun `test cron period starting at`() { + val cronSchedule = CronSchedule("0 * * * *", ZoneId.of("Asia/Tokyo")) + + val (startTime1, endTime) = cronSchedule.getPeriodStartingAt(null) + assertTrue(startTime1 <= Instant.now(), "startTime is in future; should be the last execution time") + assertTrue(cronSchedule.executionTime.isMatch(ZonedDateTime.ofInstant(endTime, ZoneId.of("Asia/Tokyo")))) + + val (startTime, _) = cronSchedule.getPeriodStartingAt(endTime) + assertEquals(startTime, endTime, "Subsequent period doesn't start at provided end time") + } + + @Test + fun `test cron period ending at`() { + val cronSchedule = CronSchedule("0 * * * *", ZoneId.of("Asia/Tokyo")) + + val (startTime, endTime1) = cronSchedule.getPeriodEndingAt(null) + assertTrue(endTime1 >= Instant.now(), "endTime is in past; should be the next execution time") + assertTrue(cronSchedule.executionTime.isMatch(ZonedDateTime.ofInstant(startTime, ZoneId.of("Asia/Tokyo")))) + + val (_, endTime2) = cronSchedule.getPeriodEndingAt(startTime) + assertEquals(endTime2, startTime, "Previous period doesn't end at provided start time") + } + + @Test + fun `cron job not running on time`() { + val cronSchedule = createTestCronSchedule() + + val lastExecutionTime = 1539715560L + assertFalse(cronSchedule.runningOnTime(Instant.ofEpochSecond(lastExecutionTime))) + } + + @Test + fun `cron job running on time`() { + val cronSchedule = createTestCronSchedule() + + val lastExecutionTime = 1539715620L + assertTrue(cronSchedule.runningOnTime(Instant.ofEpochSecond(lastExecutionTime))) + } + + @Test + fun `period job running exactly at interval`() { + val testInstance = Instant.ofEpochSecond(1539715678L) + val enabledTime = Instant.ofEpochSecond(1539615178L) + val intervalSchedule = IntervalSchedule(1, ChronoUnit.MINUTES, testInstance) + + val nextTimeToExecute = intervalSchedule.nextTimeToExecute(enabledTime) + assertNotNull(nextTimeToExecute, "There should be next execute time.") + assertEquals(60L, nextTimeToExecute.seconds, "Excepted 60 seconds but was ${nextTimeToExecute.seconds}") + } + + @Test + fun `period job 3 minutes`() { + val testInstance = Instant.ofEpochSecond(1539615226L) + val enabledTime = Instant.ofEpochSecond(1539615144L) + val intervalSchedule = IntervalSchedule(3, ChronoUnit.MINUTES, testInstance) + + val nextTimeToExecute = intervalSchedule.nextTimeToExecute(enabledTime) + assertNotNull(nextTimeToExecute, "There should be next execute time.") + assertEquals(98L, nextTimeToExecute.seconds, "Excepted 98 seconds but was ${nextTimeToExecute.seconds}") + } + + @Test + fun `period job running on time`() { + val intervalSchedule = createTestIntervalSchedule() + + val lastExecutionTime = 1539715620L + assertTrue(intervalSchedule.runningOnTime(Instant.ofEpochSecond(lastExecutionTime))) + } + + @Test + fun `period job not running on time`() { + val intervalSchedule = createTestIntervalSchedule() + + val lastExecutionTime = 1539715560L + assertFalse(intervalSchedule.runningOnTime(Instant.ofEpochSecond(lastExecutionTime))) + } + + @Test + fun `period job test null last execution time`() { + val intervalSchedule = createTestIntervalSchedule() + + assertTrue(intervalSchedule.runningOnTime(null)) + } + + private fun createTestIntervalSchedule(): IntervalSchedule { + val testInstance = Instant.ofEpochSecond(1539715678L) + val enabledTime = Instant.ofEpochSecond(1539615146L) + val intervalSchedule = IntervalSchedule(1, ChronoUnit.MINUTES, testInstance) + + val nextTimeToExecute = intervalSchedule.nextTimeToExecute(enabledTime) + assertNotNull(nextTimeToExecute, "There should be next execute time.") + assertEquals(28L, nextTimeToExecute.seconds, "Excepted 28 seconds but was ${nextTimeToExecute.seconds}") + + return intervalSchedule + } + + private fun createTestCronSchedule(): CronSchedule { + val cronExpression = "* * * * *" + val testInstance = Instant.ofEpochSecond(1539715678L) + + val cronSchedule = CronSchedule(cronExpression, ZoneId.of("UTC"), testInstance) + val nextTimeToExecute = cronSchedule.nextTimeToExecute(Instant.now()) + assertNotNull(nextTimeToExecute, "There should be next execute time.") + assertEquals(2L, nextTimeToExecute.seconds, "Execute time should be 2 seconds") + + return cronSchedule + } + + @Test + fun `test invalid interval units`() { + assertFailsWith(IllegalArgumentException::class, "Expected IllegalArgumentException") { + IntervalSchedule(1, ChronoUnit.SECONDS) + } + + assertFailsWith(IllegalArgumentException::class, "Expected IllegalArgumentException") { + IntervalSchedule(1, ChronoUnit.MONTHS) + } + + assertFailsWith(IllegalArgumentException::class, "Expected IllegalArgumentException") { + IntervalSchedule(-1, ChronoUnit.MINUTES) + } + } + + @Test + fun `test IntervalSchedule as asTemplateArgs`() { + val schedule = createTestIntervalSchedule() + + val templateArgs = schedule.asTemplateArg() + + val period = templateArgs[PERIOD_FIELD] as? Map<*, *> + assertNotNull(period, "Template arg field 'period' is empty") + assertEquals( + schedule.interval, + period[INTERVAL_FIELD], + "Template arg field 'interval' doesn't match" + ) + assertEquals( + schedule.unit.toString(), + period[UNIT_FIELD], + "Template arg field 'unit' doesn't match" + ) + } + + @Test + fun `test CronSchedule as asTemplateArgs`() { + val schedule = createTestCronSchedule() + + val templateArgs = schedule.asTemplateArg() + + val cron = templateArgs[CRON_FIELD] as? Map<*, *> + assertNotNull(cron, "Template arg field 'cron' is empty") + assertEquals( + schedule.expression, + cron[EXPRESSION_FIELD], + "Template arg field 'expression' doesn't match" + ) + assertEquals( + schedule.timezone.toString(), + cron[TIMEZONE_FIELD], + "Template arg field 'timezone' doesn't match" + ) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/model/SearchInputTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/model/SearchInputTests.kt new file mode 100644 index 00000000..0fc0f656 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/model/SearchInputTests.kt @@ -0,0 +1,32 @@ +package org.opensearch.commons.alerting.model + +import org.junit.jupiter.api.Test +import org.opensearch.commons.alerting.model.SearchInput.Companion.INDICES_FIELD +import org.opensearch.commons.alerting.model.SearchInput.Companion.QUERY_FIELD +import org.opensearch.commons.alerting.model.SearchInput.Companion.SEARCH_FIELD +import org.opensearch.commons.alerting.randomSearchInput +import kotlin.test.assertEquals +import kotlin.test.assertNotNull + +class SearchInputTests { + + @Test + fun `test SearchInput asTemplateArgs`() { + val searchInput = randomSearchInput() + + val templateArgs = searchInput.asTemplateArg() + + val search = templateArgs[SEARCH_FIELD] as? Map<*, *> + assertNotNull(search, "Template arg field 'search' is empty") + assertEquals( + searchInput.indices, + search[INDICES_FIELD], + "Template arg field 'indices' doesn't match" + ) + assertEquals( + searchInput.query.toString(), + search[QUERY_FIELD], + "Template arg field 'query' doesn't match" + ) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/model/WriteableTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/model/WriteableTests.kt new file mode 100644 index 00000000..170317b2 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/model/WriteableTests.kt @@ -0,0 +1,488 @@ +package org.opensearch.commons.alerting.model + +import org.junit.Assert +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.opensearch.common.UUIDs +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.commons.alerting.model.action.Action +import org.opensearch.commons.alerting.model.action.ActionExecutionPolicy +import org.opensearch.commons.alerting.model.action.Throttle +import org.opensearch.commons.alerting.model.remote.monitors.RemoteDocLevelMonitorInput +import org.opensearch.commons.alerting.model.remote.monitors.RemoteMonitorInput +import org.opensearch.commons.alerting.model.remote.monitors.RemoteMonitorTrigger +import org.opensearch.commons.alerting.randomAction +import org.opensearch.commons.alerting.randomActionExecutionPolicy +import org.opensearch.commons.alerting.randomBucketLevelMonitorRunResult +import org.opensearch.commons.alerting.randomBucketLevelTrigger +import org.opensearch.commons.alerting.randomBucketLevelTriggerRunResult +import org.opensearch.commons.alerting.randomChainedAlertTrigger +import org.opensearch.commons.alerting.randomDocLevelQuery +import org.opensearch.commons.alerting.randomDocumentLevelMonitorRunResult +import org.opensearch.commons.alerting.randomDocumentLevelTrigger +import org.opensearch.commons.alerting.randomInputRunResults +import org.opensearch.commons.alerting.randomQueryLevelMonitor +import org.opensearch.commons.alerting.randomQueryLevelMonitorRunResult +import org.opensearch.commons.alerting.randomQueryLevelTrigger +import org.opensearch.commons.alerting.randomQueryLevelTriggerRunResult +import org.opensearch.commons.alerting.randomThrottle +import org.opensearch.commons.alerting.randomUser +import org.opensearch.commons.alerting.randomUserEmpty +import org.opensearch.commons.alerting.util.IndexUtils +import org.opensearch.commons.authuser.User +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.common.io.stream.StreamOutput +import org.opensearch.core.common.io.stream.Writeable +import org.opensearch.search.builder.SearchSourceBuilder +import org.opensearch.test.OpenSearchTestCase +import java.io.IOException +import java.time.Instant +import java.time.temporal.ChronoUnit +import kotlin.test.assertTrue + +class WriteableTests { + + @Test + fun `test throttle as stream`() { + val throttle = randomThrottle() + val out = BytesStreamOutput() + throttle.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newThrottle = Throttle(sin) + Assertions.assertEquals(throttle, newThrottle, "Round tripping Throttle doesn't work") + } + + @Test + fun `test action as stream`() { + val action = randomAction() + val out = BytesStreamOutput() + action.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newAction = Action(sin) + Assertions.assertEquals(action, newAction, "Round tripping Action doesn't work") + } + + @Test + fun `test action as stream with null subject template`() { + val action = randomAction().copy(subjectTemplate = null) + val out = BytesStreamOutput() + action.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newAction = Action(sin) + Assertions.assertEquals(action, newAction, "Round tripping Action doesn't work") + } + + @Test + fun `test action as stream with null throttle`() { + val action = randomAction().copy(throttle = null) + val out = BytesStreamOutput() + action.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newAction = Action(sin) + Assertions.assertEquals(action, newAction, "Round tripping Action doesn't work") + } + + @Test + fun `test action as stream with throttled enabled and null throttle`() { + val action = randomAction().copy(throttle = null).copy(throttleEnabled = true) + val out = BytesStreamOutput() + action.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newAction = Action(sin) + Assertions.assertEquals(action, newAction, "Round tripping Action doesn't work") + } + + @Test + fun `test query-level monitor as stream`() { + val monitor = randomQueryLevelMonitor().copy(inputs = listOf(SearchInput(emptyList(), SearchSourceBuilder()))) + val out = BytesStreamOutput() + monitor.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newMonitor = Monitor(sin) + Assertions.assertEquals(monitor, newMonitor, "Round tripping QueryLevelMonitor doesn't work") + } + + @Test + fun `test query-level trigger as stream`() { + val trigger = randomQueryLevelTrigger() + val out = BytesStreamOutput() + trigger.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newTrigger = QueryLevelTrigger.readFrom(sin) + Assertions.assertEquals(trigger, newTrigger, "Round tripping QueryLevelTrigger doesn't work") + } + + @Test + fun `test bucket-level trigger as stream`() { + val trigger = randomBucketLevelTrigger() + val out = BytesStreamOutput() + trigger.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newTrigger = BucketLevelTrigger.readFrom(sin) + Assertions.assertEquals(trigger, newTrigger, "Round tripping BucketLevelTrigger doesn't work") + } + + @Test + fun `test doc-level trigger as stream`() { + val trigger = randomDocumentLevelTrigger() + val out = BytesStreamOutput() + trigger.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newTrigger = DocumentLevelTrigger.readFrom(sin) + Assertions.assertEquals(trigger, newTrigger, "Round tripping DocumentLevelTrigger doesn't work") + } + + @Test + fun `test doc-level query as stream`() { + val dlq = randomDocLevelQuery() + val out = BytesStreamOutput() + dlq.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newDlq = DocLevelQuery.readFrom(sin) + Assertions.assertEquals(dlq, newDlq, "Round tripping DocLevelQuery doesn't work") + assertTrue(newDlq.queryFieldNames.isEmpty()) + } + + @Test + fun `test doc-level query with query Field Names as stream`() { + val dlq = randomDocLevelQuery().copy(queryFieldNames = listOf("f1", "f2")) + val out = BytesStreamOutput() + dlq.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newDlq = DocLevelQuery.readFrom(sin) + assertTrue(newDlq.queryFieldNames.contains(dlq.queryFieldNames[0])) + assertTrue(newDlq.queryFieldNames.contains(dlq.queryFieldNames[1])) + Assertions.assertEquals(dlq, newDlq, "Round tripping DocLevelQuery doesn't work") + } + + @Test + fun `test chained alert trigger as stream`() { + val trigger = randomChainedAlertTrigger() + val out = BytesStreamOutput() + trigger.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newTrigger = ChainedAlertTrigger.readFrom(sin) + Assertions.assertEquals(trigger, newTrigger, "Round tripping DocumentLevelTrigger doesn't work") + } + + @Test + fun `test searchinput as stream`() { + val input = SearchInput(emptyList(), SearchSourceBuilder()) + val out = BytesStreamOutput() + input.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newInput = SearchInput(sin) + Assertions.assertEquals(input, newInput, "Round tripping MonitorRunResult doesn't work") + } + + @Test + fun `test user as stream`() { + val user = randomUser() + val out = BytesStreamOutput() + user.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newUser = User(sin) + Assertions.assertEquals(user, newUser, "Round tripping User doesn't work") + } + + @Test + fun `test empty user as stream`() { + val user = randomUserEmpty() + val out = BytesStreamOutput() + user.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newUser = User(sin) + Assertions.assertEquals(user, newUser, "Round tripping User doesn't work") + } + + @Test + fun `test action execution policy as stream`() { + val actionExecutionPolicy = randomActionExecutionPolicy() + val out = BytesStreamOutput() + actionExecutionPolicy.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newActionExecutionPolicy = ActionExecutionPolicy.readFrom(sin) + Assertions.assertEquals( + actionExecutionPolicy, + newActionExecutionPolicy, + "Round tripping ActionExecutionPolicy doesn't work" + ) + } + + @Test + fun `test Comment object`() { + val user = randomUser() + val createdTime = Instant.now() + val comment = Comment( + "123", + "456", + "alert", + "content", + createdTime, + null, + user + ) + Assertions.assertNotNull(comment) + val out = BytesStreamOutput() + comment.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newComment = Comment(sin) + Assertions.assertEquals("123", newComment.id) + Assertions.assertEquals("456", newComment.entityId) + Assertions.assertEquals("alert", newComment.entityType) + Assertions.assertEquals("content", newComment.content) + Assertions.assertEquals(createdTime, newComment.createdTime) + Assertions.assertEquals(user, newComment.user) + } + + @Test + fun `test actionrunresult as stream`() { + val actionRunResult = randomActionRunResult() + val out = BytesStreamOutput() + actionRunResult.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newActionRunResult = ActionRunResult(sin) + OpenSearchTestCase.assertEquals( + "Round tripping ActionRunResult doesn't work", + actionRunResult, + newActionRunResult + ) + } + + @Test + fun `test query-level triggerrunresult as stream`() { + val runResult = randomQueryLevelTriggerRunResult() + val out = BytesStreamOutput() + runResult.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newRunResult = QueryLevelTriggerRunResult(sin) + OpenSearchTestCase.assertEquals(runResult.triggerName, newRunResult.triggerName) + OpenSearchTestCase.assertEquals(runResult.triggered, newRunResult.triggered) + OpenSearchTestCase.assertEquals(runResult.error, newRunResult.error) + OpenSearchTestCase.assertEquals(runResult.actionResults, newRunResult.actionResults) + } + + @Test + fun `test bucket-level triggerrunresult as stream`() { + val runResult = randomBucketLevelTriggerRunResult() + val out = BytesStreamOutput() + runResult.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newRunResult = BucketLevelTriggerRunResult(sin) + OpenSearchTestCase.assertEquals("Round tripping ActionRunResult doesn't work", runResult, newRunResult) + } + + @Test + fun `test doc-level triggerrunresult as stream`() { + val runResult = randomDocumentLevelTriggerRunResult() + val out = BytesStreamOutput() + runResult.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newRunResult = DocumentLevelTriggerRunResult(sin) + OpenSearchTestCase.assertEquals("Round tripping ActionRunResult doesn't work", runResult, newRunResult) + } + + @Test + fun `test inputrunresult as stream`() { + val runResult = randomInputRunResults() + val out = BytesStreamOutput() + runResult.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newRunResult = InputRunResults.readFrom(sin) + OpenSearchTestCase.assertEquals("Round tripping InputRunResults doesn't work", runResult, newRunResult) + } + + @Test + fun `test query-level monitorrunresult as stream`() { + val runResult = randomQueryLevelMonitorRunResult() + val out = BytesStreamOutput() + runResult.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newRunResult = MonitorRunResult(sin) + OpenSearchTestCase.assertEquals("Round tripping MonitorRunResult doesn't work", runResult, newRunResult) + } + + @Test + fun `test bucket-level monitorrunresult as stream`() { + val runResult = randomBucketLevelMonitorRunResult() + val out = BytesStreamOutput() + runResult.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newRunResult = MonitorRunResult(sin) + OpenSearchTestCase.assertEquals("Round tripping MonitorRunResult doesn't work", runResult, newRunResult) + } + + @Test + fun `test doc-level monitorrunresult as stream`() { + val runResult = randomDocumentLevelMonitorRunResult() + val out = BytesStreamOutput() + runResult.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newRunResult = MonitorRunResult(sin) + OpenSearchTestCase.assertEquals("Round tripping MonitorRunResult doesn't work", runResult, newRunResult) + } + + @Test + fun `test DocumentLevelTriggerRunResult as stream`() { + val workflow = randomDocumentLevelTriggerRunResult() + val out = BytesStreamOutput() + workflow.writeTo(out) + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newWorkflow = DocumentLevelTriggerRunResult(sin) + Assert.assertEquals("Round tripping dltrr failed", newWorkflow, workflow) + } + + @Test + fun `test RemoteMonitorInput as stream`() { + val myMonitorInput = MyMonitorInput(1, "hello", MyMonitorInput(2, "world", null)) + val myObjOut = BytesStreamOutput() + myMonitorInput.writeTo(myObjOut) + val remoteMonitorInput = RemoteMonitorInput(myObjOut.bytes()) + + val out = BytesStreamOutput() + remoteMonitorInput.writeTo(out) + + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newRemoteMonitorInput = RemoteMonitorInput(sin) + val newMyMonitorInput = MyMonitorInput(StreamInput.wrap(newRemoteMonitorInput.input.toBytesRef().bytes)) + Assert.assertEquals("Round tripping RemoteMonitorInput failed", newMyMonitorInput, myMonitorInput) + } + + @Test + fun `test RemoteMonitorTrigger as stream`() { + val myMonitorTrigger = MyMonitorTrigger(1, "hello", MyMonitorTrigger(2, "world", null)) + val myObjOut = BytesStreamOutput() + myMonitorTrigger.writeTo(myObjOut) + val remoteMonitorTrigger = RemoteMonitorTrigger("id", "name", "1", listOf(), myObjOut.bytes()) + + val out = BytesStreamOutput() + remoteMonitorTrigger.writeTo(out) + + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newRemoteMonitorTrigger = RemoteMonitorTrigger(sin) + val newMyMonitorTrigger = MyMonitorTrigger(StreamInput.wrap(newRemoteMonitorTrigger.trigger.toBytesRef().bytes)) + Assert.assertEquals("Round tripping RemoteMonitorTrigger failed", newMyMonitorTrigger, myMonitorTrigger) + } + + @Test + fun `test RemoteDocLevelMonitorInput as stream`() { + val myMonitorInput = MyMonitorInput(1, "hello", MyMonitorInput(2, "world", null)) + val myObjOut = BytesStreamOutput() + myMonitorInput.writeTo(myObjOut) + val docLevelMonitorInput = DocLevelMonitorInput( + "test", + listOf("test"), + listOf(randomDocLevelQuery()) + ) + val remoteDocLevelMonitorInput = RemoteDocLevelMonitorInput(myObjOut.bytes(), docLevelMonitorInput) + + val out = BytesStreamOutput() + remoteDocLevelMonitorInput.writeTo(out) + + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newRemoteDocLevelMonitorInput = RemoteDocLevelMonitorInput(sin) + val newMyMonitorInput = MyMonitorInput(StreamInput.wrap(newRemoteDocLevelMonitorInput.input.toBytesRef().bytes)) + Assert.assertEquals("Round tripping RemoteMonitorInput failed", newMyMonitorInput, myMonitorInput) + val newDocLevelMonitorInput = newRemoteDocLevelMonitorInput.docLevelMonitorInput + Assert.assertEquals("Round tripping DocLevelMonitorInput failed", newDocLevelMonitorInput, docLevelMonitorInput) + } + + @Test + fun `test RemoteMonitor as stream`() { + val myMonitorInput = MyMonitorInput(1, "hello", MyMonitorInput(2, "world", null)) + var myObjOut = BytesStreamOutput() + myMonitorInput.writeTo(myObjOut) + val docLevelMonitorInput = DocLevelMonitorInput( + "test", + listOf("test"), + listOf(randomDocLevelQuery()) + ) + val remoteDocLevelMonitorInput = RemoteDocLevelMonitorInput(myObjOut.bytes(), docLevelMonitorInput) + + val myMonitorTrigger = MyMonitorTrigger(1, "hello", MyMonitorTrigger(2, "world", null)) + myObjOut = BytesStreamOutput() + myMonitorTrigger.writeTo(myObjOut) + val remoteMonitorTrigger = RemoteMonitorTrigger("id", "name", "1", listOf(), myObjOut.bytes()) + + val monitor = Monitor( + Monitor.NO_ID, + Monitor.NO_VERSION, + "hello", + true, + IntervalSchedule(1, ChronoUnit.MINUTES), + Instant.now(), + Instant.now(), + "remote_doc_level_monitor", + null, + IndexUtils.NO_SCHEMA_VERSION, + listOf(remoteDocLevelMonitorInput), + listOf(remoteMonitorTrigger), + mapOf() + ) + + val out = BytesStreamOutput() + monitor.writeTo(out) + + val sin = StreamInput.wrap(out.bytes().toBytesRef().bytes) + val newMonitor = Monitor(sin) + Assert.assertEquals("Round tripping RemoteMonitor failed", monitor, newMonitor) + } + + fun randomDocumentLevelTriggerRunResult(): DocumentLevelTriggerRunResult { + val map = mutableMapOf() + map.plus(Pair("key1", randomActionRunResult())) + map.plus(Pair("key2", randomActionRunResult())) + return DocumentLevelTriggerRunResult( + "trigger-name", + mutableListOf(UUIDs.randomBase64UUID().toString()), + null, + mutableMapOf(Pair("alertId", map)) + ) + } + + fun randomActionRunResult(): ActionRunResult { + val map = mutableMapOf() + map.plus(Pair("key1", "val1")) + map.plus(Pair("key2", "val2")) + return ActionRunResult( + "1234", + "test-action", + map, + false, + Instant.now(), + null + ) + } +} + +data class MyMonitorInput(val a: Int, val b: String, val c: MyMonitorInput?) : Writeable { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readInt(), + sin.readString(), + sin.readOptionalWriteable { MyMonitorInput(it) } + ) + + override fun writeTo(out: StreamOutput) { + out.writeInt(a) + out.writeString(b) + out.writeOptionalWriteable(c) + } +} + +data class MyMonitorTrigger(val a: Int, val b: String, val c: MyMonitorTrigger?) : Writeable { + + @Throws(IOException::class) + constructor(sin: StreamInput) : this( + sin.readInt(), + sin.readString(), + sin.readOptionalWriteable { MyMonitorTrigger(it) } + ) + + override fun writeTo(out: StreamOutput) { + out.writeInt(a) + out.writeString(b) + out.writeOptionalWriteable(c) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/model/XContentTestBase.kt b/src/test/kotlin/org/opensearch/commons/alerting/model/XContentTestBase.kt new file mode 100644 index 00000000..9a42c780 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/model/XContentTestBase.kt @@ -0,0 +1,27 @@ +package org.opensearch.commons.alerting.model + +import org.opensearch.common.settings.Settings +import org.opensearch.common.xcontent.LoggingDeprecationHandler +import org.opensearch.common.xcontent.XContentType +import org.opensearch.core.xcontent.NamedXContentRegistry +import org.opensearch.core.xcontent.XContentBuilder +import org.opensearch.core.xcontent.XContentParser +import org.opensearch.search.SearchModule + +interface XContentTestBase { + fun builder(): XContentBuilder { + return XContentBuilder.builder(XContentType.JSON.xContent()) + } + + fun parser(xc: String): XContentParser { + val parser = XContentType.JSON.xContent().createParser(xContentRegistry(), LoggingDeprecationHandler.INSTANCE, xc) + parser.nextToken() + return parser + } + + fun xContentRegistry(): NamedXContentRegistry { + return NamedXContentRegistry( + listOf(SearchInput.XCONTENT_REGISTRY) + SearchModule(Settings.EMPTY, emptyList()).namedXContents + ) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/model/XContentTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/model/XContentTests.kt new file mode 100644 index 00000000..42e5ab53 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/model/XContentTests.kt @@ -0,0 +1,614 @@ +package org.opensearch.commons.alerting.model + +import org.junit.Assert.assertEquals +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import org.opensearch.common.io.stream.BytesStreamOutput +import org.opensearch.common.xcontent.XContentFactory +import org.opensearch.common.xcontent.json.JsonXContent +import org.opensearch.commons.alerting.builder +import org.opensearch.commons.alerting.model.action.Action +import org.opensearch.commons.alerting.model.action.ActionExecutionPolicy +import org.opensearch.commons.alerting.model.action.PerExecutionActionScope +import org.opensearch.commons.alerting.model.action.Throttle +import org.opensearch.commons.alerting.model.remote.monitors.RemoteDocLevelMonitorInput +import org.opensearch.commons.alerting.model.remote.monitors.RemoteMonitorInput +import org.opensearch.commons.alerting.model.remote.monitors.RemoteMonitorTrigger +import org.opensearch.commons.alerting.parser +import org.opensearch.commons.alerting.randomAction +import org.opensearch.commons.alerting.randomActionExecutionPolicy +import org.opensearch.commons.alerting.randomActionExecutionResult +import org.opensearch.commons.alerting.randomActionWithPolicy +import org.opensearch.commons.alerting.randomAlert +import org.opensearch.commons.alerting.randomBucketLevelMonitor +import org.opensearch.commons.alerting.randomBucketLevelTrigger +import org.opensearch.commons.alerting.randomDocLevelQuery +import org.opensearch.commons.alerting.randomQueryLevelMonitor +import org.opensearch.commons.alerting.randomQueryLevelMonitorWithoutUser +import org.opensearch.commons.alerting.randomQueryLevelTrigger +import org.opensearch.commons.alerting.randomThrottle +import org.opensearch.commons.alerting.randomUser +import org.opensearch.commons.alerting.randomUserEmpty +import org.opensearch.commons.alerting.randomWorkflow +import org.opensearch.commons.alerting.toJsonString +import org.opensearch.commons.alerting.toJsonStringWithUser +import org.opensearch.commons.alerting.util.string +import org.opensearch.commons.authuser.User +import org.opensearch.core.common.io.stream.StreamInput +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.index.query.QueryBuilders +import org.opensearch.search.builder.SearchSourceBuilder +import org.opensearch.test.OpenSearchTestCase +import java.time.Instant +import java.time.temporal.ChronoUnit +import kotlin.test.assertFailsWith + +class XContentTests { + + @Test + fun `test action parsing`() { + val action = randomAction() + val actionString = action.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() + val parsedAction = Action.parse(parser(actionString)) + Assertions.assertEquals(action, parsedAction, "Round tripping Action doesn't work") + } + + @Test + fun `test action parsing with null subject template`() { + val action = randomAction().copy(subjectTemplate = null) + val actionString = action.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() + val parsedAction = Action.parse(parser(actionString)) + Assertions.assertEquals(action, parsedAction, "Round tripping Action doesn't work") + } + + @Test + fun `test action parsing with null throttle`() { + val action = randomAction().copy(throttle = null) + val actionString = action.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() + val parsedAction = Action.parse(parser(actionString)) + Assertions.assertEquals(action, parsedAction, "Round tripping Action doesn't work") + } + + fun `test action parsing with throttled enabled and null throttle`() { + val action = randomAction().copy(throttle = null).copy(throttleEnabled = true) + val actionString = action.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() + assertFailsWith("Action throttle enabled but not set throttle value") { + Action.parse(parser(actionString)) + } + } + + @Test + fun `test action with per execution scope does not support throttling`() { + try { + randomActionWithPolicy().copy( + throttleEnabled = true, + throttle = Throttle(value = 5, unit = ChronoUnit.MINUTES), + actionExecutionPolicy = ActionExecutionPolicy(PerExecutionActionScope()) + ) + Assertions.fail("Creating an action with per execution scope and throttle enabled did not fail.") + } catch (ignored: IllegalArgumentException) { + } + } + + @Test + fun `test throttle parsing`() { + val throttle = randomThrottle() + val throttleString = throttle.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() + val parsedThrottle = Throttle.parse(parser(throttleString)) + Assertions.assertEquals(throttle, parsedThrottle, "Round tripping Monitor doesn't work") + } + + @Test + fun `test throttle parsing with wrong unit`() { + val throttle = randomThrottle() + val throttleString = throttle.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() + val wrongThrottleString = throttleString.replace("MINUTES", "wrongunit") + + assertFailsWith("Only support MINUTES throttle unit") { + Throttle.parse( + parser( + wrongThrottleString + ) + ) + } + } + + @Test + fun `test throttle parsing with negative value`() { + val throttle = randomThrottle().copy(value = -1) + val throttleString = throttle.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() + + assertFailsWith("Can only set positive throttle period") { + Throttle.parse( + parser( + throttleString + ) + ) + } + } + + fun `test query-level monitor parsing`() { + val monitor = randomQueryLevelMonitor() + + val monitorString = monitor.toJsonStringWithUser() + val parsedMonitor = Monitor.parse(parser(monitorString)) + assertEquals("Round tripping QueryLevelMonitor doesn't work", monitor, parsedMonitor) + } + + @Test + fun `test monitor parsing with no name`() { + val monitorStringWithoutName = """ + { + "type": "monitor", + "enabled": false, + "schedule": { + "period": { + "interval": 1, + "unit": "MINUTES" + } + }, + "inputs": [], + "triggers": [] + } + """.trimIndent() + + assertFailsWith("Monitor name is null") { + Monitor.parse( + parser( + monitorStringWithoutName + ) + ) + } + } + + @Test + fun `test monitor parsing with no schedule`() { + val monitorStringWithoutSchedule = """ + { + "type": "monitor", + "name": "asdf", + "enabled": false, + "inputs": [], + "triggers": [] + } + """.trimIndent() + + assertFailsWith("Monitor schedule is null") { + Monitor.parse(parser(monitorStringWithoutSchedule)) + } + } + + @Test + fun `test bucket-level monitor parsing`() { + val monitor = randomBucketLevelMonitor() + + val monitorString = monitor.toJsonStringWithUser() + val parsedMonitor = Monitor.parse(parser(monitorString)) + Assertions.assertEquals(monitor, parsedMonitor, "Round tripping BucketLevelMonitor doesn't work") + } + + @Test + fun `test composite workflow parsing`() { + val workflow = randomWorkflow() + val monitorString = workflow.toJsonStringWithUser() + val parsedMonitor = Workflow.parse(parser(monitorString)) + Assertions.assertEquals(workflow, parsedMonitor, "Round tripping BucketLevelMonitor doesn't work") + } + + @Test + fun `test composite workflow parsing with auditDelegateMonitorAlerts flag disabled`() { + val workflow = randomWorkflow(auditDelegateMonitorAlerts = false) + val monitorString = workflow.toJsonStringWithUser() + val parsedMonitor = Workflow.parse(parser(monitorString)) + Assertions.assertEquals(workflow, parsedMonitor, "Round tripping BucketLevelMonitor doesn't work") + } + + @Test + fun `test query-level trigger parsing`() { + val trigger = randomQueryLevelTrigger() + + val triggerString = trigger.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() + val parsedTrigger = Trigger.parse(parser(triggerString)) + + Assertions.assertEquals(trigger, parsedTrigger, "Round tripping QueryLevelTrigger doesn't work") + } + + @Test + fun `test bucket-level trigger parsing`() { + val trigger = randomBucketLevelTrigger() + + val triggerString = trigger.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() + val parsedTrigger = Trigger.parse(parser(triggerString)) + + Assertions.assertEquals(trigger, parsedTrigger, "Round tripping BucketLevelTrigger doesn't work") + } + + @Test + fun `test no-op trigger parsing`() { + val trigger = NoOpTrigger() + + val triggerString = trigger.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() + val parsedTrigger = Trigger.parse(parser(triggerString)) + + Assertions.assertEquals(trigger, parsedTrigger, "Round tripping NoOpTrigger doesn't work") + } + + @Test + fun `test creating a monitor with duplicate trigger ids fails`() { + try { + val repeatedTrigger = randomQueryLevelTrigger() + randomQueryLevelMonitor().copy(triggers = listOf(repeatedTrigger, repeatedTrigger)) + Assertions.fail("Creating a monitor with duplicate triggers did not fail.") + } catch (ignored: IllegalArgumentException) { + } + } + + @Test + fun `test user parsing`() { + val user = randomUser() + val userString = user.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() + val parsedUser = User.parse(parser(userString)) + Assertions.assertEquals(user, parsedUser, "Round tripping user doesn't work") + } + + @Test + fun `test empty user parsing`() { + val user = randomUserEmpty() + val userString = user.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() + + val parsedUser = User.parse(parser(userString)) + Assertions.assertEquals(user, parsedUser, "Round tripping user doesn't work") + Assertions.assertEquals("", parsedUser.name) + Assertions.assertEquals(0, parsedUser.roles.size) + } + + @Test + fun `test query-level monitor parsing without user`() { + val monitor = randomQueryLevelMonitorWithoutUser() + + val monitorString = monitor.toJsonString() + val parsedMonitor = Monitor.parse(parser(monitorString)) + Assertions.assertEquals(monitor, parsedMonitor, "Round tripping QueryLevelMonitor doesn't work") + Assertions.assertNull(parsedMonitor.user) + } + + @Test + fun `test workflow parsing`() { + val workflow = randomWorkflow(monitorIds = listOf("1", "2", "3")) + val monitorString = workflow.toJsonString() + val parsedWorkflow = Workflow.parse(parser(monitorString)) + Assertions.assertEquals(workflow, parsedWorkflow, "Round tripping workflow failed") + } + + @Test + fun `test chainedMonitorFindings parsing`() { + val cmf1 = ChainedMonitorFindings(monitorId = "m1") + val cmf1String = cmf1.toJsonString() + Assertions.assertEquals( + ChainedMonitorFindings.parse(parser(cmf1String)), + cmf1, + "Round tripping chained monitor findings failed" + ) + val cmf2 = ChainedMonitorFindings(monitorIds = listOf("m1", "m2")) + val cmf2String = cmf2.toJsonString() + Assertions.assertEquals( + ChainedMonitorFindings.parse(parser(cmf2String)), + cmf2, + "Round tripping chained monitor findings failed" + ) + } + + @Test + fun `test old monitor format parsing`() { + val monitorString = """ + { + "type": "monitor", + "schema_version": 3, + "name": "asdf", + "user": { + "name": "admin123", + "backend_roles": [], + "roles": [ + "all_access", + "security_manager" + ], + "custom_attribute_names": [], + "user_requested_tenant": null + }, + "enabled": true, + "enabled_time": 1613530078244, + "schedule": { + "period": { + "interval": 1, + "unit": "MINUTES" + } + }, + "inputs": [ + { + "search": { + "indices": [ + "test_index" + ], + "query": { + "size": 0, + "query": { + "bool": { + "filter": [ + { + "range": { + "order_date": { + "from": "{{period_end}}||-1h", + "to": "{{period_end}}", + "include_lower": true, + "include_upper": true, + "format": "epoch_millis", + "boost": 1.0 + } + } + } + ], + "adjust_pure_negative": true, + "boost": 1.0 + } + }, + "aggregations": {} + } + } + } + ], + "triggers": [ + { + "id": "e_sc0XcB98Q42rHjTh4K", + "name": "abc", + "severity": "1", + "condition": { + "script": { + "source": "ctx.results[0].hits.total.value > 100000", + "lang": "painless" + } + }, + "actions": [] + } + ], + "last_update_time": 1614121489719 + } + """.trimIndent() + val parsedMonitor = Monitor.parse(parser(monitorString)) + Assertions.assertEquals( + Monitor.MonitorType.QUERY_LEVEL_MONITOR.value, + parsedMonitor.monitorType, + "Incorrect monitor type" + ) + Assertions.assertEquals(1, parsedMonitor.triggers.size, "Incorrect trigger count") + val trigger = parsedMonitor.triggers.first() + Assertions.assertTrue(trigger is QueryLevelTrigger, "Incorrect trigger type") + Assertions.assertEquals("abc", trigger.name, "Incorrect name for parsed trigger") + } + + @Test + fun `test creating an query-level monitor with invalid trigger type fails`() { + try { + val bucketLevelTrigger = randomBucketLevelTrigger() + randomQueryLevelMonitor().copy(triggers = listOf(bucketLevelTrigger)) + Assertions.fail("Creating a query-level monitor with bucket-level triggers did not fail.") + } catch (ignored: IllegalArgumentException) { + } + } + + @Test + fun `test creating an bucket-level monitor with invalid trigger type fails`() { + try { + val queryLevelTrigger = randomQueryLevelTrigger() + randomBucketLevelMonitor().copy(triggers = listOf(queryLevelTrigger)) + Assertions.fail("Creating a bucket-level monitor with query-level triggers did not fail.") + } catch (ignored: IllegalArgumentException) { + } + } + + @Test + fun `test creating an bucket-level monitor with invalid input fails`() { + try { + val invalidInput = SearchInput(emptyList(), SearchSourceBuilder().query(QueryBuilders.matchAllQuery())) + randomBucketLevelMonitor().copy(inputs = listOf(invalidInput)) + Assertions.fail("Creating an bucket-level monitor with an invalid input did not fail.") + } catch (ignored: IllegalArgumentException) { + } + } + + @Test + fun `test action execution policy`() { + val actionExecutionPolicy = randomActionExecutionPolicy() + val actionExecutionPolicyString = actionExecutionPolicy.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() + val parsedActionExecutionPolicy = ActionExecutionPolicy.parse(parser(actionExecutionPolicyString)) + Assertions.assertEquals( + actionExecutionPolicy, + parsedActionExecutionPolicy, + "Round tripping ActionExecutionPolicy doesn't work" + ) + } + + @Test + fun `test doc level query toXcontent`() { + val dlq = DocLevelQuery("id", "name", listOf("f1", "f2"), "query", listOf("t1", "t2")) + val dlqString = dlq.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() + val parsedDlq = DocLevelQuery.parse(parser(dlqString)) + Assertions.assertEquals( + dlq, + parsedDlq, + "Round tripping Doc level query doesn't work" + ) + } + + @Test + fun `test doc level query toXcontent with query field names`() { + val dlq = DocLevelQuery("id", "name", listOf("f1", "f2"), "query", listOf("t1", "t2"), listOf("f1", "f2")) + val dlqString = dlq.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() + val parsedDlq = DocLevelQuery.parse(parser(dlqString)) + Assertions.assertEquals( + dlq, + parsedDlq, + "Round tripping Doc level query doesn't work" + ) + } + + @Test + fun `test alert parsing`() { + val alert = randomAlert() + + val alertString = alert.toXContentWithUser(builder()).string() + val parsedAlert = Alert.parse(parser(alertString)) + + assertEquals("Round tripping alert doesn't work", alert, parsedAlert) + } + + @Test + fun `test alert parsing with noop trigger`() { + val monitor = randomQueryLevelMonitor() + val alert = Alert( + id = "", + monitor = monitor, + trigger = NoOpTrigger(), + startTime = Instant.now().truncatedTo(ChronoUnit.MILLIS), + errorMessage = "some error", + lastNotificationTime = Instant.now(), + workflowId = "", + executionId = "", + clusters = listOf() + ) + assertEquals("Round tripping alert doesn't work", alert.triggerName, "NoOp trigger") + } + + @Test + fun `test alert parsing without user`() { + val alertStr = "{\"id\":\"\",\"version\":-1,\"monitor_id\":\"\",\"schema_version\":0,\"monitor_version\":1," + + "\"monitor_name\":\"ARahqfRaJG\",\"trigger_id\":\"fhe1-XQBySl0wQKDBkOG\",\"trigger_name\":\"ffELMuhlro\"," + + "\"state\":\"ACTIVE\",\"error_message\":null,\"alert_history\":[],\"severity\":\"1\",\"action_execution_results\"" + + ":[{\"action_id\":\"ghe1-XQBySl0wQKDBkOG\",\"last_execution_time\":1601917224583,\"throttled_count\":-1478015168}," + + "{\"action_id\":\"gxe1-XQBySl0wQKDBkOH\",\"last_execution_time\":1601917224583,\"throttled_count\":-768533744}]," + + "\"start_time\":1601917224599,\"last_notification_time\":null,\"end_time\":null,\"acknowledged_time\":null," + + "\"clusters\":[\"cluster-1\",\"cluster-2\"]}" + val parsedAlert = Alert.parse(parser(alertStr)) + OpenSearchTestCase.assertNull(parsedAlert.monitorUser) + } + + @Test + fun `test alert parsing with user as null`() { + val alertStr = + "{\"id\":\"\",\"version\":-1,\"monitor_id\":\"\",\"schema_version\":0,\"monitor_version\":1,\"monitor_user\":null," + + "\"monitor_name\":\"ARahqfRaJG\",\"trigger_id\":\"fhe1-XQBySl0wQKDBkOG\",\"trigger_name\":\"ffELMuhlro\"," + + "\"state\":\"ACTIVE\",\"error_message\":null,\"alert_history\":[],\"severity\":\"1\",\"action_execution_results\"" + + ":[{\"action_id\":\"ghe1-XQBySl0wQKDBkOG\",\"last_execution_time\":1601917224583,\"throttled_count\":-1478015168}," + + "{\"action_id\":\"gxe1-XQBySl0wQKDBkOH\",\"last_execution_time\":1601917224583,\"throttled_count\":-768533744}]," + + "\"start_time\":1601917224599,\"last_notification_time\":null,\"end_time\":null,\"acknowledged_time\":null," + + "\"clusters\":[\"cluster-1\",\"cluster-2\"]}" + val parsedAlert = Alert.parse(parser(alertStr)) + OpenSearchTestCase.assertNull(parsedAlert.monitorUser) + } + + @Test + fun `test action execution result parsing`() { + val actionExecutionResult = randomActionExecutionResult() + + val actionExecutionResultString = actionExecutionResult.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() + val parsedActionExecutionResultString = ActionExecutionResult.parse(parser(actionExecutionResultString)) + + assertEquals("Round tripping alert doesn't work", actionExecutionResult, parsedActionExecutionResultString) + } + + @Test + fun `test DataSources parsing`() { + val dataSources = DataSources( + ScheduledJob.DOC_LEVEL_QUERIES_INDEX, + ".opensearch-alerting-finding-history-write", + "<.opensearch-alerting-finding-history-{now/d}-1>", + ".opendistro-alerting-alerts", + ".opendistro-alerting-alert-history-write", + "<.opendistro-alerting-alert-history-{now/d}-1>", + mapOf(), + false + ) + Assertions.assertNotNull(dataSources) + + val dataSourcesString = dataSources.toXContent(builder(), ToXContent.EMPTY_PARAMS).string() + val parsedDataSources = DataSources.parse(parser(dataSourcesString)) + Assertions.assertEquals(dataSources, parsedDataSources, "Round tripping DataSources doesn't work") + } + + @Test + fun `test Comment parsing`() { + val comment = Comment( + "123", + "456", + "alert", + "content", + Instant.now().truncatedTo(ChronoUnit.MILLIS), + null, + randomUser() + ) + Assertions.assertNotNull(comment) + + val commentString = comment.toXContentWithUser(builder()).string() + val parsedComment = Comment.parse(parser(commentString), "123") + Assertions.assertEquals(comment, parsedComment, "Round tripping Comment doesn't work") + } + + @Test + fun `test MonitorMetadata`() { + val monitorMetadata = MonitorMetadata( + id = "monitorId-metadata", + monitorId = "monitorId", + lastActionExecutionTimes = emptyList(), + lastRunContext = emptyMap(), + sourceToQueryIndexMapping = mutableMapOf() + ) + val monitorMetadataString = monitorMetadata.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS).string() + val parsedMonitorMetadata = MonitorMetadata.parse(parser(monitorMetadataString)) + assertEquals("Round tripping MonitorMetadata doesn't work", monitorMetadata, parsedMonitorMetadata) + } + + @Test + fun `test RemoteMonitorInput`() { + val myMonitorInput = MyMonitorInput(1, "hello", MyMonitorInput(2, "world", null)) + val myObjOut = BytesStreamOutput() + myMonitorInput.writeTo(myObjOut) + val remoteMonitorInput = RemoteMonitorInput(myObjOut.bytes()) + + val xContent = remoteMonitorInput.toXContent(JsonXContent.contentBuilder(), ToXContent.EMPTY_PARAMS).string() + val parsedRemoteMonitorInput = RemoteMonitorInput.parse(parser(xContent)) + val parsedMyMonitorInput = MyMonitorInput(StreamInput.wrap(parsedRemoteMonitorInput.input.toBytesRef().bytes)) + assertEquals("Round tripping RemoteMonitorInput doesn't work", myMonitorInput, parsedMyMonitorInput) + } + + @Test + fun `test RemoteMonitorTrigger`() { + val myMonitorTrigger = MyMonitorTrigger(1, "hello", MyMonitorTrigger(2, "world", null)) + val myObjOut = BytesStreamOutput() + myMonitorTrigger.writeTo(myObjOut) + val remoteMonitorTrigger = RemoteMonitorTrigger("id", "name", "1", listOf(), myObjOut.bytes()) + + val xContent = remoteMonitorTrigger.toXContent(JsonXContent.contentBuilder(), ToXContent.EMPTY_PARAMS).string() + val parsedRemoteMonitorTrigger = Trigger.parse(parser(xContent)) as RemoteMonitorTrigger + val parsedMyMonitorTrigger = MyMonitorTrigger(StreamInput.wrap(parsedRemoteMonitorTrigger.trigger.toBytesRef().bytes)) + assertEquals("Round tripping RemoteMonitorTrigger doesn't work", myMonitorTrigger, parsedMyMonitorTrigger) + } + + @Test + fun `test RemoteDocLevelMonitorInput`() { + val myMonitorInput = MyMonitorInput(1, "hello", MyMonitorInput(2, "world", null)) + val myObjOut = BytesStreamOutput() + myMonitorInput.writeTo(myObjOut) + val docLevelMonitorInput = DocLevelMonitorInput( + "test", + listOf("test"), + listOf(randomDocLevelQuery()) + ) + val remoteDocLevelMonitorInput = RemoteDocLevelMonitorInput(myObjOut.bytes(), docLevelMonitorInput) + + val xContent = remoteDocLevelMonitorInput.toXContent(JsonXContent.contentBuilder(), ToXContent.EMPTY_PARAMS).string() + val parsedRemoteDocLevelMonitorInput = RemoteDocLevelMonitorInput.parse(parser(xContent)) + val parsedMyMonitorInput = MyMonitorInput(StreamInput.wrap(parsedRemoteDocLevelMonitorInput.input.toBytesRef().bytes)) + assertEquals("Round tripping RemoteDocLevelMonitorInput doesn't work", myMonitorInput, parsedMyMonitorInput) + val parsedDocLevelMonitorInput = parsedRemoteDocLevelMonitorInput.docLevelMonitorInput + assertEquals("Round tripping RemoteDocLevelMonitorInput doesn't work", docLevelMonitorInput, parsedDocLevelMonitorInput) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/alerting/model/action/ActionTests.kt b/src/test/kotlin/org/opensearch/commons/alerting/model/action/ActionTests.kt new file mode 100644 index 00000000..dcbf8998 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/alerting/model/action/ActionTests.kt @@ -0,0 +1,40 @@ +package org.opensearch.commons.alerting.model.action + +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test +import org.opensearch.commons.alerting.model.action.Action.Companion.DESTINATION_ID_FIELD +import org.opensearch.commons.alerting.model.action.Action.Companion.ID_FIELD +import org.opensearch.commons.alerting.model.action.Action.Companion.NAME_FIELD +import org.opensearch.commons.alerting.model.action.Action.Companion.THROTTLE_ENABLED_FIELD +import org.opensearch.commons.alerting.randomAction + +class ActionTests { + + @Test + fun `test action asTemplateArgs`() { + val action = randomAction() + + val templateArgs = action.asTemplateArg() + + assertEquals( + action.id, + templateArgs[ID_FIELD], + "Template arg field 'id' doesn't match" + ) + assertEquals( + action.name, + templateArgs[NAME_FIELD], + "Template arg field 'name' doesn't match" + ) + assertEquals( + action.destinationId, + templateArgs[DESTINATION_ID_FIELD], + "Template arg field 'destinationId' doesn't match" + ) + assertEquals( + action.throttleEnabled, + templateArgs[THROTTLE_ENABLED_FIELD], + "Template arg field 'throttleEnabled' doesn't match" + ) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/notifications/NotificationsPluginInterfaceTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/NotificationsPluginInterfaceTests.kt new file mode 100644 index 00000000..dd97cfda --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/notifications/NotificationsPluginInterfaceTests.kt @@ -0,0 +1,246 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.notifications + +import com.nhaarman.mockitokotlin2.whenever +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.extension.ExtendWith +import org.mockito.Answers +import org.mockito.ArgumentMatchers.eq +import org.mockito.Mock +import org.mockito.Mockito.any +import org.mockito.Mockito.doAnswer +import org.mockito.Mockito.mock +import org.mockito.Mockito.times +import org.mockito.Mockito.verify +import org.mockito.junit.jupiter.MockitoExtension +import org.opensearch.action.ActionType +import org.opensearch.client.node.NodeClient +import org.opensearch.commons.destination.response.LegacyDestinationResponse +import org.opensearch.commons.notifications.action.CreateNotificationConfigRequest +import org.opensearch.commons.notifications.action.CreateNotificationConfigResponse +import org.opensearch.commons.notifications.action.DeleteNotificationConfigRequest +import org.opensearch.commons.notifications.action.DeleteNotificationConfigResponse +import org.opensearch.commons.notifications.action.GetChannelListRequest +import org.opensearch.commons.notifications.action.GetChannelListResponse +import org.opensearch.commons.notifications.action.GetNotificationConfigRequest +import org.opensearch.commons.notifications.action.GetNotificationConfigResponse +import org.opensearch.commons.notifications.action.GetPluginFeaturesRequest +import org.opensearch.commons.notifications.action.GetPluginFeaturesResponse +import org.opensearch.commons.notifications.action.LegacyPublishNotificationRequest +import org.opensearch.commons.notifications.action.LegacyPublishNotificationResponse +import org.opensearch.commons.notifications.action.SendNotificationResponse +import org.opensearch.commons.notifications.action.UpdateNotificationConfigRequest +import org.opensearch.commons.notifications.action.UpdateNotificationConfigResponse +import org.opensearch.commons.notifications.model.Channel +import org.opensearch.commons.notifications.model.ChannelList +import org.opensearch.commons.notifications.model.ChannelMessage +import org.opensearch.commons.notifications.model.ConfigType +import org.opensearch.commons.notifications.model.DeliveryStatus +import org.opensearch.commons.notifications.model.EventSource +import org.opensearch.commons.notifications.model.EventStatus +import org.opensearch.commons.notifications.model.NotificationConfig +import org.opensearch.commons.notifications.model.NotificationConfigInfo +import org.opensearch.commons.notifications.model.NotificationConfigSearchResult +import org.opensearch.commons.notifications.model.NotificationEvent +import org.opensearch.commons.notifications.model.SeverityType +import org.opensearch.commons.notifications.model.Slack +import org.opensearch.core.action.ActionListener +import org.opensearch.core.rest.RestStatus +import java.time.Instant + +@Suppress("UNCHECKED_CAST") +@ExtendWith(MockitoExtension::class) +internal class NotificationsPluginInterfaceTests { + + @Mock(answer = Answers.RETURNS_DEEP_STUBS) + private lateinit var client: NodeClient + + @Test + fun createNotificationConfig() { + val request = mock(CreateNotificationConfigRequest::class.java) + val response = CreateNotificationConfigResponse("configId") + val listener: ActionListener = + mock(ActionListener::class.java) as ActionListener + + doAnswer { + (it.getArgument(2) as ActionListener) + .onResponse(response) + }.whenever(client).execute(any(ActionType::class.java), any(), any()) + + NotificationsPluginInterface.createNotificationConfig(client, request, listener) + verify(listener, times(1)).onResponse(eq(response)) + } + + @Test + fun updateNotificationConfig() { + val request = mock(UpdateNotificationConfigRequest::class.java) + val response = UpdateNotificationConfigResponse("configId") + val listener: ActionListener = + mock(ActionListener::class.java) as ActionListener + + doAnswer { + (it.getArgument(2) as ActionListener) + .onResponse(response) + }.whenever(client).execute(any(ActionType::class.java), any(), any()) + + NotificationsPluginInterface.updateNotificationConfig(client, request, listener) + verify(listener, times(1)).onResponse(eq(response)) + } + + @Test + fun deleteNotificationConfig() { + val request = mock(DeleteNotificationConfigRequest::class.java) + val response = DeleteNotificationConfigResponse(mapOf(Pair("sample_config_id", RestStatus.OK))) + val listener: ActionListener = + mock(ActionListener::class.java) as ActionListener + + doAnswer { + (it.getArgument(2) as ActionListener) + .onResponse(response) + }.whenever(client).execute(any(ActionType::class.java), any(), any()) + + NotificationsPluginInterface.deleteNotificationConfig(client, request, listener) + verify(listener, times(1)).onResponse(eq(response)) + } + + @Test + fun getNotificationConfig() { + val request = mock(GetNotificationConfigRequest::class.java) + val response = mockGetNotificationConfigResponse() + val listener: ActionListener = + mock(ActionListener::class.java) as ActionListener + + doAnswer { + (it.getArgument(2) as ActionListener) + .onResponse(response) + }.whenever(client).execute(any(ActionType::class.java), any(), any()) + + NotificationsPluginInterface.getNotificationConfig(client, request, listener) + verify(listener, times(1)).onResponse(eq(response)) + } + + @Test + fun getPluginFeatures() { + val request = mock(GetPluginFeaturesRequest::class.java) + val response = GetPluginFeaturesResponse( + listOf("config_type_1", "config_type_2", "config_type_3"), + mapOf( + Pair("FeatureKey1", "FeatureValue1"), + Pair("FeatureKey2", "FeatureValue2"), + Pair("FeatureKey3", "FeatureValue3") + ) + ) + val listener: ActionListener = + mock(ActionListener::class.java) as ActionListener + + doAnswer { + (it.getArgument(2) as ActionListener) + .onResponse(response) + }.whenever(client).execute(any(ActionType::class.java), any(), any()) + + NotificationsPluginInterface.getPluginFeatures(client, request, listener) + verify(listener, times(1)).onResponse(eq(response)) + } + + @Test + fun getChannelList() { + val sampleConfig = Channel( + "config_id", + "name", + "description", + ConfigType.SLACK + ) + + val request = mock(GetChannelListRequest::class.java) + val response = GetChannelListResponse(ChannelList(sampleConfig)) + val listener: ActionListener = + mock(ActionListener::class.java) as ActionListener + + doAnswer { + (it.getArgument(2) as ActionListener) + .onResponse(response) + }.whenever(client).execute(any(ActionType::class.java), any(), any()) + + NotificationsPluginInterface.getChannelList(client, request, listener) + verify(listener, times(1)).onResponse(eq(response)) + } + + @Test + fun sendNotification() { + val notificationInfo = EventSource( + "title", + "reference_id", + SeverityType.HIGH, + listOf("tag1", "tag2") + ) + val channelMessage = ChannelMessage( + "text_description", + "htmlDescription", + null + ) + + val sampleStatus = EventStatus( + "config_id", + "name", + ConfigType.SLACK, + deliveryStatus = DeliveryStatus("404", "invalid recipient") + ) + + val sampleEvent = NotificationEvent(notificationInfo, listOf(sampleStatus)) + + val response = SendNotificationResponse(sampleEvent) + val listener: ActionListener = + mock(ActionListener::class.java) as ActionListener + + doAnswer { + (it.getArgument(2) as ActionListener) + .onResponse(response) + }.whenever(client).execute(any(ActionType::class.java), any(), any()) + + NotificationsPluginInterface.sendNotification( + client, + notificationInfo, + channelMessage, + listOf("channelId1", "channelId2"), + listener + ) + verify(listener, times(1)).onResponse(eq(response)) + } + + @Test + fun publishLegacyNotification() { + val request = mock(LegacyPublishNotificationRequest::class.java) + val res = LegacyPublishNotificationResponse(LegacyDestinationResponse.Builder().withStatusCode(200).withResponseContent("Nice!").build()) + val l: ActionListener = + mock(ActionListener::class.java) as ActionListener + + doAnswer { + (it.getArgument(2) as ActionListener) + .onResponse(res) + }.whenever(client).execute(any(ActionType::class.java), any(), any()) + + NotificationsPluginInterface.publishLegacyNotification(client, request, l) + verify(l, times(1)).onResponse(eq(res)) + } + + private fun mockGetNotificationConfigResponse(): GetNotificationConfigResponse { + val sampleSlack = Slack("https://domain.com/sample_url#1234567890") + val sampleConfig = NotificationConfig( + "name", + "description", + ConfigType.SLACK, + configData = sampleSlack + ) + val configInfo = NotificationConfigInfo( + "config_id", + Instant.now(), + Instant.now(), + sampleConfig + ) + return GetNotificationConfigResponse(NotificationConfigSearchResult(configInfo)) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/notifications/action/CreateNotificationConfigRequestTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/action/CreateNotificationConfigRequestTests.kt index 679ded46..73c446ac 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/action/CreateNotificationConfigRequestTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/action/CreateNotificationConfigRequestTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.action @@ -35,8 +13,9 @@ import org.opensearch.commons.notifications.model.Chime import org.opensearch.commons.notifications.model.ConfigType import org.opensearch.commons.notifications.model.Email import org.opensearch.commons.notifications.model.EmailGroup -import org.opensearch.commons.notifications.model.Feature +import org.opensearch.commons.notifications.model.EmailRecipient import org.opensearch.commons.notifications.model.MethodType +import org.opensearch.commons.notifications.model.MicrosoftTeams import org.opensearch.commons.notifications.model.NotificationConfig import org.opensearch.commons.notifications.model.Slack import org.opensearch.commons.notifications.model.SmtpAccount @@ -44,7 +23,6 @@ import org.opensearch.commons.notifications.model.Webhook import org.opensearch.commons.utils.createObjectFromJsonString import org.opensearch.commons.utils.getJsonString import org.opensearch.commons.utils.recreateObject -import java.util.EnumSet internal class CreateNotificationConfigRequestTests { @@ -54,9 +32,8 @@ internal class CreateNotificationConfigRequestTests { "name", "description", ConfigType.WEBHOOK, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleWebhook + configData = sampleWebhook, + isEnabled = true ) } @@ -66,9 +43,8 @@ internal class CreateNotificationConfigRequestTests { "name", "description", ConfigType.SLACK, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleSlack + configData = sampleSlack, + isEnabled = true ) } @@ -78,37 +54,44 @@ internal class CreateNotificationConfigRequestTests { "name", "description", ConfigType.CHIME, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleChime + configData = sampleChime, + isEnabled = true + ) + } + private fun createMicrosoftTeamsContentConfigObject(): NotificationConfig { + val sampleMicrosoftTeams = MicrosoftTeams("https://domain.com/sample_microsoft_teams_url#1234567890") + return NotificationConfig( + "name", + "description", + ConfigType.MICROSOFT_TEAMS, + configData = sampleMicrosoftTeams, + isEnabled = true ) } private fun createEmailGroupContentConfigObject(): NotificationConfig { - val sampleEmailGroup = EmailGroup(listOf("dummy@company.com")) + val sampleEmailGroup = EmailGroup(listOf(EmailRecipient("dummy@company.com"))) return NotificationConfig( "name", "description", ConfigType.EMAIL_GROUP, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleEmailGroup + configData = sampleEmailGroup, + isEnabled = true ) } private fun createEmailContentConfigObject(): NotificationConfig { val sampleEmail = Email( emailAccountID = "sample_1@dummy.com", - recipients = listOf("sample_2@dummy.com"), + recipients = listOf(EmailRecipient("sample_2@dummy.com")), emailGroupIds = listOf("sample_3@dummy.com") ) return NotificationConfig( "name", "description", ConfigType.EMAIL, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleEmail + configData = sampleEmail, + isEnabled = true ) } @@ -123,9 +106,8 @@ internal class CreateNotificationConfigRequestTests { "name", "description", ConfigType.SMTP_ACCOUNT, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleSmtpAccount + configData = sampleSmtpAccount, + isEnabled = true ) } @@ -144,6 +126,21 @@ internal class CreateNotificationConfigRequestTests { assertEquals(configRequest.notificationConfig, recreatedObject.notificationConfig) } + @Test + fun `Create config serialize and deserialize transport object should be equal microsoft teams`() { + val configRequest = CreateNotificationConfigRequest( + createMicrosoftTeamsContentConfigObject() + ) + val recreatedObject = + recreateObject(configRequest) { + CreateNotificationConfigRequest( + it + ) + } + assertNull(recreatedObject.validate()) + assertEquals(configRequest.notificationConfig, recreatedObject.notificationConfig) + } + @Test fun `Create config serialize and deserialize transport object should be equal slack`() { val configRequest = CreateNotificationConfigRequest( @@ -219,6 +216,16 @@ internal class CreateNotificationConfigRequestTests { assertEquals(configRequest.notificationConfig, recreatedObject.notificationConfig) } + @Test + fun `Create config serialize and deserialize using json object should be equal microsoft teams`() { + val configRequest = CreateNotificationConfigRequest( + createMicrosoftTeamsContentConfigObject() + ) + val jsonString = getJsonString(configRequest) + val recreatedObject = createObjectFromJsonString(jsonString) { CreateNotificationConfigRequest.parse(it) } + assertEquals(configRequest.notificationConfig, recreatedObject.notificationConfig) + } + @Test fun `Create config serialize and deserialize using json object should be equal`() { val configRequest = CreateNotificationConfigRequest( @@ -286,9 +293,8 @@ internal class CreateNotificationConfigRequestTests { "name", "description", ConfigType.SLACK, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleSlack + configData = sampleSlack, + isEnabled = true ) val jsonString = """ @@ -297,7 +303,6 @@ internal class CreateNotificationConfigRequestTests { "name":"name", "description":"description", "config_type":"slack", - "feature_list":["index_management"], "is_enabled":true, "slack":{"url":"https://domain.com/sample_slack_url#1234567890"} } @@ -307,6 +312,33 @@ internal class CreateNotificationConfigRequestTests { assertEquals(config, recreatedObject.notificationConfig) } + @Test + fun `Create config should deserialize json object using parser microsoft teams`() { + val sampleMicrosoftTeams = MicrosoftTeams("https://domain.com/sample_microsoft_teams_url#1234567890") + val config = NotificationConfig( + "name", + "description", + ConfigType.MICROSOFT_TEAMS, + configData = sampleMicrosoftTeams, + isEnabled = true + ) + + val jsonString = """ + { + "config_id":"config_id1", + "config":{ + "name":"name", + "description":"description", + "config_type":"microsoft_teams", + "is_enabled":true, + "microsoft_teams":{"url":"https://domain.com/sample_microsoft_teams_url#1234567890"} + } + } + """.trimIndent() + val recreatedObject = createObjectFromJsonString(jsonString) { CreateNotificationConfigRequest.parse(it) } + assertEquals(config, recreatedObject.notificationConfig) + } + @Test fun `Create config should deserialize json object using parser webhook`() { val sampleWebhook = Webhook("https://domain.com/sample_webhook_url#1234567890") @@ -314,9 +346,8 @@ internal class CreateNotificationConfigRequestTests { "name", "description", ConfigType.WEBHOOK, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleWebhook + configData = sampleWebhook, + isEnabled = true ) val jsonString = """ @@ -325,7 +356,6 @@ internal class CreateNotificationConfigRequestTests { "name":"name", "description":"description", "config_type":"webhook", - "feature_list":["index_management"], "is_enabled":true, "webhook":{"url":"https://domain.com/sample_webhook_url#1234567890"} } @@ -342,9 +372,8 @@ internal class CreateNotificationConfigRequestTests { "name", "description", ConfigType.CHIME, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleChime + configData = sampleChime, + isEnabled = true ) val jsonString = """ @@ -354,7 +383,6 @@ internal class CreateNotificationConfigRequestTests { "name":"name", "description":"description", "config_type":"chime", - "feature_list":["index_management"], "is_enabled":true, "chime":{"url":"https://domain.com/sample_chime_url#1234567890"} } @@ -366,14 +394,13 @@ internal class CreateNotificationConfigRequestTests { @Test fun `Create config should deserialize json object using parser Email Group`() { - val sampleEmailGroup = EmailGroup(listOf("dummy@company.com")) + val sampleEmailGroup = EmailGroup(listOf(EmailRecipient("dummy@company.com"))) val config = NotificationConfig( "name", "description", ConfigType.EMAIL_GROUP, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleEmailGroup + configData = sampleEmailGroup, + isEnabled = true ) val jsonString = """ @@ -383,9 +410,8 @@ internal class CreateNotificationConfigRequestTests { "name":"name", "description":"description", "config_type":"email_group", - "feature_list":["index_management"], "is_enabled":true, - "email_group":{"recipient_list":["dummy@company.com"]} + "email_group":{"recipient_list":[{"recipient":"dummy@company.com"}]} } } """.trimIndent() @@ -397,16 +423,15 @@ internal class CreateNotificationConfigRequestTests { fun `Update config should deserialize json object using parser Email`() { val sampleEmail = Email( emailAccountID = "sample_1@dummy.com", - recipients = listOf("sample_2@dummy.com"), + recipients = listOf(EmailRecipient("sample_2@dummy.com")), emailGroupIds = listOf("sample_3@dummy.com") ) val config = NotificationConfig( "name", "description", ConfigType.EMAIL, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleEmail + configData = sampleEmail, + isEnabled = true ) val jsonString = """ @@ -416,10 +441,12 @@ internal class CreateNotificationConfigRequestTests { "name":"name", "description":"description", "config_type":"email", - "feature_list":["index_management"], "is_enabled":true, - "email":{"email_account_id":"sample_1@dummy.com","recipient_list":["sample_2@dummy.com"], - "email_group_id_list":["sample_3@dummy.com"] } + "email":{ + "email_account_id":"sample_1@dummy.com", + "recipient_list":[{"recipient":"sample_2@dummy.com"}], + "email_group_id_list":["sample_3@dummy.com"] + } } } """.trimIndent() @@ -439,9 +466,8 @@ internal class CreateNotificationConfigRequestTests { "name", "description", ConfigType.SMTP_ACCOUNT, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleSmtpAccount + configData = sampleSmtpAccount, + isEnabled = true ) val jsonString = """ @@ -451,7 +477,6 @@ internal class CreateNotificationConfigRequestTests { "name":"name", "description":"description", "config_type":"smtp_account", - "feature_list":["index_management"], "is_enabled":true, "smtp_account":{"host":"http://dummy.com", "port":11,"method": "ssl", "from_address": "sample@dummy.com" } } @@ -482,7 +507,6 @@ internal class CreateNotificationConfigRequestTests { "name":"name", "description":"description", "config_type":"chime", - "feature_list":["index_management"], "is_enabled":true, "chime":{"url":"https://domain.com/sample_chime_url#1234567890"} } @@ -507,9 +531,8 @@ internal class CreateNotificationConfigRequestTests { "name", "description", ConfigType.SLACK, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleSlack + configData = sampleSlack, + isEnabled = true ) val jsonString = """ @@ -518,7 +541,6 @@ internal class CreateNotificationConfigRequestTests { "name":"name", "description":"description", "config_type":"slack", - "feature_list":["index_management"], "is_enabled":true, "slack":{"url":"https://domain.com/sample_slack_url#1234567890"}, "extra_field_1":["extra", "value"], diff --git a/src/test/kotlin/org/opensearch/commons/notifications/action/CreateNotificationConfigResponseTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/action/CreateNotificationConfigResponseTests.kt index b9f0f4e1..0f580fcd 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/action/CreateNotificationConfigResponseTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/action/CreateNotificationConfigResponseTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.action diff --git a/src/test/kotlin/org/opensearch/commons/notifications/action/DeleteNotificationConfigRequestTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/action/DeleteNotificationConfigRequestTests.kt index 4c0c016d..c44d1458 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/action/DeleteNotificationConfigRequestTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/action/DeleteNotificationConfigRequestTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.action diff --git a/src/test/kotlin/org/opensearch/commons/notifications/action/DeleteNotificationConfigResponseTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/action/DeleteNotificationConfigResponseTests.kt index 5c479b47..a05fbca5 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/action/DeleteNotificationConfigResponseTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/action/DeleteNotificationConfigResponseTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.action @@ -33,7 +11,7 @@ import org.junit.jupiter.api.assertThrows import org.opensearch.commons.utils.createObjectFromJsonString import org.opensearch.commons.utils.getJsonString import org.opensearch.commons.utils.recreateObject -import org.opensearch.rest.RestStatus +import org.opensearch.core.rest.RestStatus internal class DeleteNotificationConfigResponseTests { diff --git a/src/test/kotlin/org/opensearch/commons/notifications/action/GetChannelListRequestTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/action/GetChannelListRequestTests.kt new file mode 100644 index 00000000..aaa29e21 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/notifications/action/GetChannelListRequestTests.kt @@ -0,0 +1,60 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.commons.notifications.action + +import com.fasterxml.jackson.core.JsonParseException +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.assertThrows +import org.opensearch.commons.utils.createObjectFromJsonString +import org.opensearch.commons.utils.getJsonString +import org.opensearch.commons.utils.recreateObject + +internal class GetChannelListRequestTests { + + private fun assertGetRequestEquals( + expected: GetChannelListRequest, + actual: GetChannelListRequest + ) { + assertEquals(expected.compact, actual.compact) + } + + @Test + fun `Get request serialize and deserialize transport object should be equal`() { + val configRequest = GetChannelListRequest() + val recreatedObject = recreateObject(configRequest) { GetChannelListRequest(it) } + assertGetRequestEquals(configRequest, recreatedObject) + } + + @Test + fun `Get request serialize and deserialize using json object should be equal`() { + val configRequest = GetChannelListRequest() + val jsonString = getJsonString(configRequest) + val recreatedObject = createObjectFromJsonString(jsonString) { GetChannelListRequest.parse(it) } + assertGetRequestEquals(configRequest, recreatedObject) + } + + @Test + fun `Get request should throw exception when invalid json object is passed`() { + val jsonString = "sample message" + assertThrows { + createObjectFromJsonString(jsonString) { GetChannelListRequest.parse(it) } + } + } + + @Test + fun `Get request should safely ignore extra field in json object`() { + val configRequest = GetChannelListRequest() + val jsonString = """ + { + "extra_field_1":["extra", "value"], + "extra_field_2":{"extra":"value"}, + "extra_field_3":"extra value 3" + } + """.trimIndent() + val recreatedObject = createObjectFromJsonString(jsonString) { GetChannelListRequest.parse(it) } + assertGetRequestEquals(configRequest, recreatedObject) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/notifications/action/GetFeatureChannelListResponseTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/action/GetChannelListResponseTests.kt similarity index 64% rename from src/test/kotlin/org/opensearch/commons/notifications/action/GetFeatureChannelListResponseTests.kt rename to src/test/kotlin/org/opensearch/commons/notifications/action/GetChannelListResponseTests.kt index 149e966f..4f19ab4b 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/action/GetFeatureChannelListResponseTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/action/GetChannelListResponseTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.action @@ -30,18 +8,18 @@ import org.apache.lucene.search.TotalHits import org.junit.jupiter.api.Assertions import org.junit.jupiter.api.Assertions.assertEquals import org.junit.jupiter.api.Test +import org.opensearch.commons.notifications.model.Channel +import org.opensearch.commons.notifications.model.ChannelList import org.opensearch.commons.notifications.model.ConfigType -import org.opensearch.commons.notifications.model.FeatureChannel -import org.opensearch.commons.notifications.model.FeatureChannelList import org.opensearch.commons.utils.createObjectFromJsonString import org.opensearch.commons.utils.getJsonString import org.opensearch.commons.utils.recreateObject -internal class GetFeatureChannelListResponseTests { +internal class GetChannelListResponseTests { private fun assertSearchResultEquals( - expected: FeatureChannelList, - actual: FeatureChannelList + expected: ChannelList, + actual: ChannelList ) { assertEquals(expected.startIndex, actual.startIndex) assertEquals(expected.totalHits, actual.totalHits) @@ -52,112 +30,124 @@ internal class GetFeatureChannelListResponseTests { @Test fun `Get Response serialize and deserialize with config object should be equal`() { - val sampleConfig = FeatureChannel( + val sampleConfig = Channel( "config_id", "name", "description", ConfigType.SLACK ) - val searchResult = FeatureChannelList(sampleConfig) - val getResponse = GetFeatureChannelListResponse(searchResult) - val recreatedObject = recreateObject(getResponse) { GetFeatureChannelListResponse(it) } + val searchResult = ChannelList(sampleConfig) + val getResponse = GetChannelListResponse(searchResult) + val recreatedObject = recreateObject(getResponse) { GetChannelListResponse(it) } assertSearchResultEquals(searchResult, recreatedObject.searchResult) } @Test fun `Get Response serialize and deserialize with multiple config object should be equal`() { - val sampleConfig1 = FeatureChannel( + val sampleConfig1 = Channel( "config_id1", "name1", "description1", ConfigType.SLACK ) - val sampleConfig2 = FeatureChannel( + val sampleConfig2 = Channel( "config_id2", "name2", "description2", ConfigType.CHIME ) - val sampleConfig3 = FeatureChannel( + val sampleConfig3 = Channel( "config_id3", "name3", "description3", ConfigType.WEBHOOK ) - val searchResult = FeatureChannelList( + val sampleConfig4 = Channel( + "config_id5", + "name4", + "description4", + ConfigType.MICROSOFT_TEAMS + ) + val searchResult = ChannelList( 100, 1000, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO, - listOf(sampleConfig1, sampleConfig2, sampleConfig3) + listOf(sampleConfig1, sampleConfig2, sampleConfig3, sampleConfig4) ) - val getResponse = GetFeatureChannelListResponse(searchResult) - val recreatedObject = recreateObject(getResponse) { GetFeatureChannelListResponse(it) } + val getResponse = GetChannelListResponse(searchResult) + val recreatedObject = recreateObject(getResponse) { GetChannelListResponse(it) } assertSearchResultEquals(searchResult, recreatedObject.searchResult) } @Test fun `Get Response serialize and deserialize using json config object should be equal`() { - val sampleConfig = FeatureChannel( + val sampleConfig = Channel( "config_id", "name", "description", ConfigType.EMAIL_GROUP ) - val searchResult = FeatureChannelList(sampleConfig) - val getResponse = GetFeatureChannelListResponse(searchResult) + val searchResult = ChannelList(sampleConfig) + val getResponse = GetChannelListResponse(searchResult) val jsonString = getJsonString(getResponse) - val recreatedObject = createObjectFromJsonString(jsonString) { GetFeatureChannelListResponse.parse(it) } + val recreatedObject = createObjectFromJsonString(jsonString) { GetChannelListResponse.parse(it) } assertSearchResultEquals(searchResult, recreatedObject.searchResult) } @Test fun `Get Response serialize and deserialize using json with multiple config object should be equal`() { - val sampleConfig1 = FeatureChannel( + val sampleConfig1 = Channel( "config_id1", "name1", "description1", ConfigType.SLACK ) - val sampleConfig2 = FeatureChannel( + val sampleConfig2 = Channel( "config_id2", "name2", "description2", ConfigType.CHIME ) - val sampleConfig3 = FeatureChannel( + val sampleConfig3 = Channel( "config_id3", "name3", "description3", ConfigType.WEBHOOK ) - val searchResult = FeatureChannelList( + val sampleConfig4 = Channel( + "config_id5", + "name4", + "description4", + ConfigType.MICROSOFT_TEAMS + ) + val searchResult = ChannelList( 100, 1000, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO, - listOf(sampleConfig1, sampleConfig2, sampleConfig3) + listOf(sampleConfig1, sampleConfig2, sampleConfig3, sampleConfig4) ) - val getResponse = GetFeatureChannelListResponse(searchResult) + val getResponse = GetChannelListResponse(searchResult) val jsonString = getJsonString(getResponse) - val recreatedObject = createObjectFromJsonString(jsonString) { GetFeatureChannelListResponse.parse(it) } + val recreatedObject = createObjectFromJsonString(jsonString) { GetChannelListResponse.parse(it) } assertSearchResultEquals(searchResult, recreatedObject.searchResult) } @Test fun `Get Response should use isEnabled=true if absent in json object`() { - val sampleConfig = FeatureChannel( + val sampleConfig = Channel( "config_id", "name", "description", ConfigType.EMAIL, true ) - val searchResult = FeatureChannelList(sampleConfig) + val searchResult = ChannelList(sampleConfig) val jsonString = """ { "start_index":"0", "total_hits":"1", "total_hit_relation":"eq", - "feature_channel_list":[ + "channel_list":[ { "config_id":"config_id", "name":"name", @@ -167,25 +157,25 @@ internal class GetFeatureChannelListResponseTests { ] } """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { GetFeatureChannelListResponse.parse(it) } + val recreatedObject = createObjectFromJsonString(jsonString) { GetChannelListResponse.parse(it) } assertSearchResultEquals(searchResult, recreatedObject.searchResult) } @Test fun `Get Response should safely ignore extra field in json object`() { - val sampleConfig = FeatureChannel( + val sampleConfig = Channel( "config_id", "name", "description", ConfigType.EMAIL ) - val searchResult = FeatureChannelList(sampleConfig) + val searchResult = ChannelList(sampleConfig) val jsonString = """ { "start_index":"0", "total_hits":"1", "total_hit_relation":"eq", - "feature_channel_list":[ + "channel_list":[ { "config_id":"config_id", "name":"name", @@ -199,22 +189,22 @@ internal class GetFeatureChannelListResponseTests { "extra_field_3":"extra value 3" } """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { GetFeatureChannelListResponse.parse(it) } + val recreatedObject = createObjectFromJsonString(jsonString) { GetChannelListResponse.parse(it) } assertSearchResultEquals(searchResult, recreatedObject.searchResult) } @Test fun `Get Response should safely fallback to default if startIndex, totalHits or totalHitRelation field absent in json object`() { - val sampleConfig = FeatureChannel( + val sampleConfig = Channel( "config_id", "name", "description", ConfigType.EMAIL ) - val searchResult = FeatureChannelList(sampleConfig) + val searchResult = ChannelList(sampleConfig) val jsonString = """ { - "feature_channel_list":[ + "channel_list":[ { "config_id":"config_id", "name":"name", @@ -225,12 +215,12 @@ internal class GetFeatureChannelListResponseTests { ] } """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { GetFeatureChannelListResponse.parse(it) } + val recreatedObject = createObjectFromJsonString(jsonString) { GetChannelListResponse.parse(it) } assertSearchResultEquals(searchResult, recreatedObject.searchResult) } @Test - fun `Get Response should throw exception if featureChannelList is absent in json`() { + fun `Get Response should throw exception if channelList is absent in json`() { val jsonString = """ { "start_index":"0", @@ -239,7 +229,7 @@ internal class GetFeatureChannelListResponseTests { } """.trimIndent() Assertions.assertThrows(IllegalArgumentException::class.java) { - createObjectFromJsonString(jsonString) { GetFeatureChannelListResponse.parse(it) } + createObjectFromJsonString(jsonString) { GetChannelListResponse.parse(it) } } } } diff --git a/src/test/kotlin/org/opensearch/commons/notifications/action/GetFeatureChannelListRequestTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/action/GetFeatureChannelListRequestTests.kt deleted file mode 100644 index c9788a80..00000000 --- a/src/test/kotlin/org/opensearch/commons/notifications/action/GetFeatureChannelListRequestTests.kt +++ /dev/null @@ -1,95 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * - */ -package org.opensearch.commons.notifications.action - -import com.fasterxml.jackson.core.JsonParseException -import org.junit.jupiter.api.Assertions.assertEquals -import org.junit.jupiter.api.Test -import org.junit.jupiter.api.assertThrows -import org.opensearch.commons.notifications.model.Feature -import org.opensearch.commons.utils.createObjectFromJsonString -import org.opensearch.commons.utils.getJsonString -import org.opensearch.commons.utils.recreateObject - -internal class GetFeatureChannelListRequestTests { - - private fun assertGetRequestEquals( - expected: GetFeatureChannelListRequest, - actual: GetFeatureChannelListRequest - ) { - assertEquals(expected.feature, actual.feature) - } - - @Test - fun `Get request serialize and deserialize transport object should be equal`() { - val configRequest = GetFeatureChannelListRequest(Feature.REPORTS) - val recreatedObject = recreateObject(configRequest) { GetFeatureChannelListRequest(it) } - assertGetRequestEquals(configRequest, recreatedObject) - } - - @Test - fun `Get request serialize and deserialize using json object should be equal`() { - val configRequest = GetFeatureChannelListRequest(Feature.INDEX_MANAGEMENT) - val jsonString = getJsonString(configRequest) - val recreatedObject = createObjectFromJsonString(jsonString) { GetFeatureChannelListRequest.parse(it) } - assertGetRequestEquals(configRequest, recreatedObject) - } - - @Test - fun `Get request should throw exception when invalid json object is passed`() { - val jsonString = "sample message" - assertThrows { - createObjectFromJsonString(jsonString) { GetFeatureChannelListRequest.parse(it) } - } - } - - @Test - fun `Get request should safely ignore extra field in json object`() { - val configRequest = GetFeatureChannelListRequest(Feature.ALERTING) - val jsonString = """ - { - "feature":"${configRequest.feature}", - "extra_field_1":["extra", "value"], - "extra_field_2":{"extra":"value"}, - "extra_field_3":"extra value 3" - } - """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { GetFeatureChannelListRequest.parse(it) } - assertGetRequestEquals(configRequest, recreatedObject) - } - - @Test - fun `Get request should throw exception if feature field is absent in json object`() { - val jsonString = """ - { - } - """.trimIndent() - assertThrows { - createObjectFromJsonString(jsonString) { GetFeatureChannelListRequest.parse(it) } - } - } -} diff --git a/src/test/kotlin/org/opensearch/commons/notifications/action/GetNotificationConfigRequestTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/action/GetNotificationConfigRequestTests.kt index cf892209..8873202d 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/action/GetNotificationConfigRequestTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/action/GetNotificationConfigRequestTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.action diff --git a/src/test/kotlin/org/opensearch/commons/notifications/action/GetNotificationConfigResponseTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/action/GetNotificationConfigResponseTests.kt index a2e14b40..a0c025ac 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/action/GetNotificationConfigResponseTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/action/GetNotificationConfigResponseTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.action @@ -32,7 +10,7 @@ import org.junit.jupiter.api.Assertions.assertEquals import org.junit.jupiter.api.Test import org.opensearch.commons.notifications.model.Chime import org.opensearch.commons.notifications.model.ConfigType -import org.opensearch.commons.notifications.model.Feature +import org.opensearch.commons.notifications.model.MicrosoftTeams import org.opensearch.commons.notifications.model.NotificationConfig import org.opensearch.commons.notifications.model.NotificationConfigInfo import org.opensearch.commons.notifications.model.NotificationConfigSearchResult @@ -41,7 +19,6 @@ import org.opensearch.commons.utils.createObjectFromJsonString import org.opensearch.commons.utils.getJsonString import org.opensearch.commons.utils.recreateObject import java.time.Instant -import java.util.EnumSet internal class GetNotificationConfigResponseTests { @@ -63,14 +40,12 @@ internal class GetNotificationConfigResponseTests { "name", "description", ConfigType.SLACK, - EnumSet.of(Feature.REPORTS), configData = sampleSlack ) val configInfo = NotificationConfigInfo( "config_id", Instant.now(), Instant.now(), - "tenant", sampleConfig ) val searchResult = NotificationConfigSearchResult(configInfo) @@ -85,35 +60,43 @@ internal class GetNotificationConfigResponseTests { "name", "description", ConfigType.SLACK, - EnumSet.of(Feature.REPORTS), configData = Slack("https://domain.com/sample_url#1234567890") ) val configInfo1 = NotificationConfigInfo( "config_id1", Instant.now(), Instant.now(), - "tenant", sampleConfig1 ) val sampleConfig2 = NotificationConfig( "name", "description", ConfigType.CHIME, - EnumSet.of(Feature.INDEX_MANAGEMENT), configData = Chime("https://domain.com/sample_url#1234567890") ) val configInfo2 = NotificationConfigInfo( "config_id2", Instant.now(), Instant.now(), - "tenant", sampleConfig2 ) + val sampleConfig3 = NotificationConfig( + "name", + "description", + ConfigType.MICROSOFT_TEAMS, + configData = MicrosoftTeams("https://domain.com/sample_url#1234567890") + ) + val configInfo3 = NotificationConfigInfo( + "config_id3", + Instant.now(), + Instant.now(), + sampleConfig3 + ) val searchResult = NotificationConfigSearchResult( 100, 1000, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO, - listOf(configInfo1, configInfo2) + listOf(configInfo1, configInfo2, configInfo3) ) val searchResponse = GetNotificationConfigResponse(searchResult) val recreatedObject = recreateObject(searchResponse) { GetNotificationConfigResponse(it) } @@ -129,14 +112,12 @@ internal class GetNotificationConfigResponseTests { "name", "description", ConfigType.SLACK, - EnumSet.of(Feature.REPORTS), configData = sampleSlack ) val configInfo = NotificationConfigInfo( "config_id", lastUpdatedTimeMs, createdTimeMs, - "tenant", sampleConfig ) val searchResult = NotificationConfigSearchResult(configInfo) @@ -154,35 +135,43 @@ internal class GetNotificationConfigResponseTests { "name", "description", ConfigType.SLACK, - EnumSet.of(Feature.REPORTS), configData = Slack("https://domain.com/sample_url#1234567890") ) val configInfo1 = NotificationConfigInfo( "config_id1", lastUpdatedTimeMs, createdTimeMs, - "tenant", sampleConfig1 ) val sampleConfig2 = NotificationConfig( "name", "description", ConfigType.CHIME, - EnumSet.of(Feature.INDEX_MANAGEMENT), configData = Chime("https://domain.com/sample_url#1234567890") ) val configInfo2 = NotificationConfigInfo( "config_id2", lastUpdatedTimeMs, createdTimeMs, - "tenant", sampleConfig2 ) + val sampleConfig3 = NotificationConfig( + "name", + "description", + ConfigType.MICROSOFT_TEAMS, + configData = MicrosoftTeams("https://domain.com/sample_url#1234567890") + ) + val configInfo3 = NotificationConfigInfo( + "config_id3", + lastUpdatedTimeMs, + createdTimeMs, + sampleConfig3 + ) val searchResult = NotificationConfigSearchResult( 100, 1000, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO, - listOf(configInfo1, configInfo2) + listOf(configInfo1, configInfo2, configInfo3) ) val searchResponse = GetNotificationConfigResponse(searchResult) val jsonString = getJsonString(searchResponse) @@ -199,15 +188,13 @@ internal class GetNotificationConfigResponseTests { "name", "description", ConfigType.SLACK, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleSlack + configData = sampleSlack, + isEnabled = true ) val configInfo = NotificationConfigInfo( "config-Id", lastUpdatedTimeMs, createdTimeMs, - "selectedTenant", sampleConfig ) val searchResult = NotificationConfigSearchResult(configInfo) @@ -221,12 +208,10 @@ internal class GetNotificationConfigResponseTests { "config_id":"config-Id", "last_updated_time_ms":"${lastUpdatedTimeMs.toEpochMilli()}", "created_time_ms":"${createdTimeMs.toEpochMilli()}", - "tenant":"selectedTenant", "config":{ "name":"name", "description":"description", "config_type":"slack", - "feature_list":["index_management"], "is_enabled":true, "slack":{"url":"https://domain.com/sample_slack_url#1234567890"} } @@ -250,15 +235,13 @@ internal class GetNotificationConfigResponseTests { "name", "description", ConfigType.SLACK, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleSlack + configData = sampleSlack, + isEnabled = true ) val configInfo = NotificationConfigInfo( "config-Id", lastUpdatedTimeMs, createdTimeMs, - "selectedTenant", sampleConfig ) val searchResult = NotificationConfigSearchResult(configInfo) @@ -269,12 +252,10 @@ internal class GetNotificationConfigResponseTests { "config_id":"config-Id", "last_updated_time_ms":"${lastUpdatedTimeMs.toEpochMilli()}", "created_time_ms":"${createdTimeMs.toEpochMilli()}", - "tenant":"selectedTenant", "config":{ "name":"name", "description":"description", "config_type":"slack", - "feature_list":["index_management"], "is_enabled":true, "slack":{"url":"https://domain.com/sample_slack_url#1234567890"} } @@ -299,8 +280,7 @@ internal class GetNotificationConfigResponseTests { { "config_id":"config-Id", "last_updated_time_ms":"${lastUpdatedTimeMs.toEpochMilli()}", - "created_time_ms":"${createdTimeMs.toEpochMilli()}", - "tenant":"selectedTenant" + "created_time_ms":"${createdTimeMs.toEpochMilli()}" } ] } diff --git a/src/test/kotlin/org/opensearch/commons/notifications/action/GetNotificationEventRequestTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/action/GetNotificationEventRequestTests.kt deleted file mode 100644 index 5e12d5f1..00000000 --- a/src/test/kotlin/org/opensearch/commons/notifications/action/GetNotificationEventRequestTests.kt +++ /dev/null @@ -1,280 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * - */ -package org.opensearch.commons.notifications.action - -import com.fasterxml.jackson.core.JsonParseException -import org.junit.jupiter.api.Assertions.assertEquals -import org.junit.jupiter.api.Test -import org.junit.jupiter.api.assertThrows -import org.opensearch.commons.utils.createObjectFromJsonString -import org.opensearch.commons.utils.getJsonString -import org.opensearch.commons.utils.recreateObject -import org.opensearch.search.sort.SortOrder - -internal class GetNotificationEventRequestTests { - - private fun assertGetRequestEquals( - expected: GetNotificationEventRequest, - actual: GetNotificationEventRequest - ) { - assertEquals(expected.eventIds, actual.eventIds) - assertEquals(expected.fromIndex, actual.fromIndex) - assertEquals(expected.maxItems, actual.maxItems) - assertEquals(expected.sortField, actual.sortField) - assertEquals(expected.sortOrder, actual.sortOrder) - assertEquals(expected.filterParams, actual.filterParams) - } - - @Test - fun `Get request serialize and deserialize transport object should be equal`() { - val configRequest = GetNotificationEventRequest( - setOf("sample_event_id"), - 0, - 10, - "sortField", - SortOrder.DESC, - mapOf(Pair("filterKey", "filterValue")) - ) - val recreatedObject = recreateObject(configRequest) { GetNotificationEventRequest(it) } - assertGetRequestEquals(configRequest, recreatedObject) - } - - @Test - fun `Get request serialize and deserialize using json object should be equal`() { - val configRequest = GetNotificationEventRequest( - setOf("sample_event_id"), - 0, - 10, - "sortField", - SortOrder.ASC, - mapOf(Pair("filterKey", "filterValue")) - ) - val jsonString = getJsonString(configRequest) - val recreatedObject = createObjectFromJsonString(jsonString) { GetNotificationEventRequest.parse(it) } - assertGetRequestEquals(configRequest, recreatedObject) - } - - @Test - fun `Get request with all field should deserialize json object using parser`() { - val configRequest = GetNotificationEventRequest( - setOf("sample_event_id"), - 10, - 100, - "sortField", - SortOrder.DESC, - mapOf( - Pair("filterKey1", "filterValue1"), - Pair("filterKey2", "true"), - Pair("filterKey3", "filter,Value,3"), - Pair("filterKey4", "4") - ) - ) - val jsonString = """ - { - "event_id_list":["${configRequest.eventIds.first()}"], - "from_index":"10", - "max_items":"100", - "sort_field":"sortField", - "sort_order":"desc", - "filter_param_list": { - "filterKey1":"filterValue1", - "filterKey2":"true", - "filterKey3":"filter,Value,3", - "filterKey4":"4" - } - } - """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { GetNotificationEventRequest.parse(it) } - assertGetRequestEquals(configRequest, recreatedObject) - } - - @Test - fun `Get request with only event_id field should deserialize json object using parser`() { - val configRequest = GetNotificationEventRequest(eventIds = setOf("sample_event_id")) - val jsonString = """ - { - "event_id_list":["${configRequest.eventIds.first()}"] - } - """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { GetNotificationEventRequest.parse(it) } - assertGetRequestEquals(configRequest, recreatedObject) - } - - @Test - fun `Get request with only from_index field should deserialize json object using parser`() { - val configRequest = GetNotificationEventRequest(fromIndex = 20) - val jsonString = """ - { - "from_index":"20" - } - """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { GetNotificationEventRequest.parse(it) } - assertGetRequestEquals(configRequest, recreatedObject) - } - - @Test - fun `Get request with only max_items field should deserialize json object using parser`() { - val configRequest = GetNotificationEventRequest(maxItems = 100) - val jsonString = """ - { - "max_items":"100" - } - """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { GetNotificationEventRequest.parse(it) } - assertGetRequestEquals(configRequest, recreatedObject) - } - - @Test - fun `Get request with only sort_field field should deserialize json object using parser`() { - val configRequest = GetNotificationEventRequest(sortField = "sample_sort_field") - val jsonString = """ - { - "sort_field":"sample_sort_field" - } - """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { GetNotificationEventRequest.parse(it) } - assertGetRequestEquals(configRequest, recreatedObject) - } - - @Test - fun `Get request with only sort_order=asc field should deserialize json object using parser`() { - val configRequest = GetNotificationEventRequest(sortOrder = SortOrder.ASC) - val jsonString = """ - { - "sort_order":"asc" - } - """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { GetNotificationEventRequest.parse(it) } - assertGetRequestEquals(configRequest, recreatedObject) - } - - @Test - fun `Get request with only sort_order=ASC field should deserialize json object using parser`() { - val configRequest = GetNotificationEventRequest(sortOrder = SortOrder.ASC) - val jsonString = """ - { - "sort_order":"ASC" - } - """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { GetNotificationEventRequest.parse(it) } - assertGetRequestEquals(configRequest, recreatedObject) - } - - @Test - fun `Get request with only sort_order=desc field should deserialize json object using parser`() { - val configRequest = GetNotificationEventRequest(sortOrder = SortOrder.DESC) - val jsonString = """ - { - "sort_order":"desc" - } - """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { GetNotificationEventRequest.parse(it) } - assertGetRequestEquals(configRequest, recreatedObject) - } - - @Test - fun `Get request with only sort_order=DESC field should deserialize json object using parser`() { - val configRequest = GetNotificationEventRequest(sortOrder = SortOrder.DESC) - val jsonString = """ - { - "sort_order":"DESC" - } - """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { GetNotificationEventRequest.parse(it) } - assertGetRequestEquals(configRequest, recreatedObject) - } - - @Test - fun `Get request with invalid sort_order should throw exception`() { - val jsonString = """ - { - "sort_order":"descending" - } - """.trimIndent() - assertThrows { - createObjectFromJsonString(jsonString) { GetNotificationEventRequest.parse(it) } - } - } - - @Test - fun `Get request with only filter_param_list field should deserialize json object using parser`() { - val configRequest = GetNotificationEventRequest( - filterParams = mapOf( - Pair("filterKey1", "filterValue1"), - Pair("filterKey2", "true"), - Pair("filterKey3", "filter,Value,3"), - Pair("filterKey4", "4") - ) - ) - val jsonString = """ - { - "filter_param_list": { - "filterKey1":"filterValue1", - "filterKey2":"true", - "filterKey3":"filter,Value,3", - "filterKey4":"4" - } - } - """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { GetNotificationEventRequest.parse(it) } - assertGetRequestEquals(configRequest, recreatedObject) - } - - @Test - fun `Get request no field should deserialize json object using parser`() { - val configRequest = GetNotificationEventRequest() - val jsonString = """ - { - } - """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { GetNotificationEventRequest.parse(it) } - assertGetRequestEquals(configRequest, recreatedObject) - } - - @Test - fun `Get request should throw exception when invalid json object is passed`() { - val jsonString = "sample message" - assertThrows { - createObjectFromJsonString(jsonString) { GetNotificationEventRequest.parse(it) } - } - } - - @Test - fun `Get request should safely ignore extra field in json object`() { - val configRequest = GetNotificationEventRequest(eventIds = setOf("sample_event_id")) - val jsonString = """ - { - "event_id_list":["${configRequest.eventIds.first()}"], - "extra_field_1":["extra", "value"], - "extra_field_2":{"extra":"value"}, - "extra_field_3":"extra value 3" - } - """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { GetNotificationEventRequest.parse(it) } - assertGetRequestEquals(configRequest, recreatedObject) - } -} diff --git a/src/test/kotlin/org/opensearch/commons/notifications/action/GetNotificationEventResponseTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/action/GetNotificationEventResponseTests.kt deleted file mode 100644 index 4f7d72d1..00000000 --- a/src/test/kotlin/org/opensearch/commons/notifications/action/GetNotificationEventResponseTests.kt +++ /dev/null @@ -1,391 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * - */ -package org.opensearch.commons.notifications.action - -import org.apache.lucene.search.TotalHits -import org.junit.jupiter.api.Assertions -import org.junit.jupiter.api.Assertions.assertEquals -import org.junit.jupiter.api.Test -import org.opensearch.commons.notifications.model.ConfigType -import org.opensearch.commons.notifications.model.DeliveryStatus -import org.opensearch.commons.notifications.model.EventSource -import org.opensearch.commons.notifications.model.EventStatus -import org.opensearch.commons.notifications.model.Feature -import org.opensearch.commons.notifications.model.NotificationEvent -import org.opensearch.commons.notifications.model.NotificationEventInfo -import org.opensearch.commons.notifications.model.NotificationEventSearchResult -import org.opensearch.commons.notifications.model.SeverityType -import org.opensearch.commons.utils.createObjectFromJsonString -import org.opensearch.commons.utils.getJsonString -import org.opensearch.commons.utils.recreateObject -import java.time.Instant - -internal class GetNotificationEventResponseTests { - - private fun assertSearchResultEquals( - expected: NotificationEventSearchResult, - actual: NotificationEventSearchResult - ) { - assertEquals(expected.startIndex, actual.startIndex) - assertEquals(expected.totalHits, actual.totalHits) - assertEquals(expected.totalHitRelation, actual.totalHitRelation) - assertEquals(expected.objectListFieldName, actual.objectListFieldName) - assertEquals(expected.objectList, actual.objectList) - } - - @Test - fun `Search result serialize and deserialize with event object should be equal`() { - val sampleEventSource = EventSource( - "title", - "reference_id", - Feature.ALERTING, - severity = SeverityType.INFO - ) - val sampleStatus = EventStatus( - "config_id", - "name", - ConfigType.SLACK, - deliveryStatus = DeliveryStatus("404", "invalid recipient") - ) - val sampleEvent = NotificationEvent(sampleEventSource, listOf(sampleStatus)) - val eventInfo = NotificationEventInfo( - "event_id", - Instant.now(), - Instant.now(), - "tenant", - sampleEvent - ) - val searchResult = NotificationEventSearchResult(eventInfo) - val searchResponse = GetNotificationEventResponse(searchResult) - val recreatedObject = recreateObject(searchResponse) { GetNotificationEventResponse(it) } - assertSearchResultEquals(searchResult, recreatedObject.searchResult) - } - - @Test - fun `Search result serialize and deserialize with multiple event status object should be equal`() { - val eventSource1 = EventSource( - "title 1", - "reference_id_1", - Feature.ALERTING, - severity = SeverityType.INFO - ) - val eventSource2 = EventSource( - "title 2", - "reference_id_2", - Feature.REPORTS, - severity = SeverityType.HIGH - ) - val status1 = EventStatus( - "config_id1", - "name", - ConfigType.SLACK, - deliveryStatus = DeliveryStatus("200", "success") - ) - val status2 = EventStatus( - "config_id2", - "name", - ConfigType.CHIME, - deliveryStatus = DeliveryStatus("404", "invalid recipient") - ) - val eventInfo1 = NotificationEventInfo( - "event_id1", - Instant.now(), - Instant.now(), - "tenant", - NotificationEvent(eventSource1, listOf(status1)) - ) - val eventInfo2 = NotificationEventInfo( - "event_id2", - Instant.now(), - Instant.now(), - "tenant", - NotificationEvent(eventSource2, listOf(status2)) - ) - val eventInfo3 = NotificationEventInfo( - "event_id3", - Instant.now(), - Instant.now(), - "tenant", - NotificationEvent(eventSource1, listOf(status1, status2)) - ) - val eventInfo4 = NotificationEventInfo( - "event_id4", - Instant.now(), - Instant.now(), - "tenant", - NotificationEvent(eventSource2, listOf(status1, status2)) - ) - val searchResult = NotificationEventSearchResult( - 100, - 1000, - TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO, - listOf(eventInfo1, eventInfo2, eventInfo3, eventInfo4) - ) - val searchResponse = GetNotificationEventResponse(searchResult) - val recreatedObject = recreateObject(searchResponse) { GetNotificationEventResponse(it) } - assertSearchResultEquals(searchResult, recreatedObject.searchResult) - } - - @Test - fun `Search result serialize and deserialize using json event object should be equal`() { - val lastUpdatedTimeMs = Instant.ofEpochMilli(Instant.now().toEpochMilli()) - val createdTimeMs = lastUpdatedTimeMs.minusSeconds(1000) - val sampleEventSource = EventSource( - "title", - "reference_id", - Feature.ALERTING, - severity = SeverityType.INFO - ) - val sampleStatus = EventStatus( - "config_id", - "name", - ConfigType.SLACK, - deliveryStatus = DeliveryStatus("404", "invalid recipient") - ) - val sampleEvent = NotificationEvent(sampleEventSource, listOf(sampleStatus)) - val eventInfo = NotificationEventInfo( - "event_id", - lastUpdatedTimeMs, - createdTimeMs, - "tenant", - sampleEvent - ) - val searchResult = NotificationEventSearchResult(eventInfo) - val searchResponse = GetNotificationEventResponse(searchResult) - val jsonString = getJsonString(searchResponse) - val recreatedObject = createObjectFromJsonString(jsonString) { GetNotificationEventResponse.parse(it) } - assertSearchResultEquals(searchResult, recreatedObject.searchResult) - } - - @Test - fun `Search result serialize and deserialize using json with multiple event object should be equal`() { - val lastUpdatedTimeMs = Instant.ofEpochMilli(Instant.now().toEpochMilli()) - val createdTimeMs = lastUpdatedTimeMs.minusSeconds(1000) - val eventSource1 = EventSource( - "title 1", - "reference_id_1", - Feature.ALERTING, - severity = SeverityType.INFO - ) - val eventSource2 = EventSource( - "title 2", - "reference_id_2", - Feature.REPORTS, - severity = SeverityType.HIGH - ) - val status1 = EventStatus( - "config_id1", - "name", - ConfigType.SLACK, - deliveryStatus = DeliveryStatus("200", "success") - ) - val status2 = EventStatus( - "config_id2", - "name", - ConfigType.CHIME, - deliveryStatus = DeliveryStatus("404", "invalid recipient") - ) - val eventInfo1 = NotificationEventInfo( - "event_id1", - lastUpdatedTimeMs, - createdTimeMs, - "tenant", - NotificationEvent(eventSource1, listOf(status1)) - ) - val eventInfo2 = NotificationEventInfo( - "event_id2", - lastUpdatedTimeMs, - createdTimeMs, - "tenant", - NotificationEvent(eventSource2, listOf(status2)) - ) - val searchResult = NotificationEventSearchResult( - 100, - 1000, - TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO, - listOf(eventInfo1, eventInfo2) - ) - val searchResponse = GetNotificationEventResponse(searchResult) - val jsonString = getJsonString(searchResponse) - val recreatedObject = createObjectFromJsonString(jsonString) { GetNotificationEventResponse.parse(it) } - assertSearchResultEquals(searchResult, recreatedObject.searchResult) - } - - @Test - fun `Search result should safely ignore extra field in json object`() { - val lastUpdatedTimeMs = Instant.ofEpochMilli(Instant.now().toEpochMilli()) - val createdTimeMs = lastUpdatedTimeMs.minusSeconds(1000) - val sampleEventSource = EventSource( - "title", - "reference_id", - Feature.ALERTING, - severity = SeverityType.INFO - ) - val sampleStatus = EventStatus( - "config_id", - "name", - ConfigType.SLACK, - deliveryStatus = DeliveryStatus("200", "success") - ) - val sampleEvent = NotificationEvent(sampleEventSource, listOf(sampleStatus)) - val eventInfo = NotificationEventInfo( - "event_id", - lastUpdatedTimeMs, - createdTimeMs, - "selectedTenant", - sampleEvent - ) - val searchResult = NotificationEventSearchResult(eventInfo) - val jsonString = """ - { - "start_index":"0", - "total_hits":"1", - "total_hit_relation":"eq", - "event_list":[ - { - "event_id":"event_id", - "last_updated_time_ms":"${lastUpdatedTimeMs.toEpochMilli()}", - "created_time_ms":"${createdTimeMs.toEpochMilli()}", - "tenant":"selectedTenant", - "event":{ - "event_source":{ - "title":"title", - "reference_id":"reference_id", - "feature":"alerting", - "severity":"info", - "tags":[] - }, - "status_list":[ - { - "config_id":"config_id", - "config_type":"slack", - "config_name":"name", - "delivery_status": - { - "status_code":"200", - "status_text":"success" - } - } - ] - } - } - ], - "extra_field_1":["extra", "value"], - "extra_field_2":{"extra":"value"}, - "extra_field_3":"extra value 3" - } - """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { GetNotificationEventResponse.parse(it) } - assertSearchResultEquals(searchResult, recreatedObject.searchResult) - } - - @Test - fun `Search result should safely fallback to default if startIndex, totalHits or totalHitRelation field absent in json object`() { - val lastUpdatedTimeMs = Instant.ofEpochMilli(Instant.now().toEpochMilli()) - val createdTimeMs = lastUpdatedTimeMs.minusSeconds(1000) - val sampleEventSource = EventSource( - "title", - "reference_id", - Feature.ALERTING, - severity = SeverityType.INFO - ) - val sampleStatus = EventStatus( - "config_id", - "name", - ConfigType.SLACK, - deliveryStatus = DeliveryStatus("200", "success") - ) - val sampleEvent = NotificationEvent(sampleEventSource, listOf(sampleStatus)) - val eventInfo = NotificationEventInfo( - "event_id", - lastUpdatedTimeMs, - createdTimeMs, - "selectedTenant", - sampleEvent - ) - val searchResult = NotificationEventSearchResult(eventInfo) - val jsonString = """ - { - "event_list":[ - { - "event_id":"event_id", - "last_updated_time_ms":"${lastUpdatedTimeMs.toEpochMilli()}", - "created_time_ms":"${createdTimeMs.toEpochMilli()}", - "tenant":"selectedTenant", - "event":{ - "event_source":{ - "title":"title", - "reference_id":"reference_id", - "feature":"alerting", - "severity":"info", - "tags":[] - }, - "status_list":[ - { - "config_id":"config_id", - "config_type":"slack", - "config_name":"name", - "delivery_status": - { - "status_code":"200", - "status_text":"success" - } - } - ] - } - } - ] - } - """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { GetNotificationEventResponse.parse(it) } - assertSearchResultEquals(searchResult, recreatedObject.searchResult) - } - - @Test - fun `Search result should throw exception if event is absent in json`() { - val lastUpdatedTimeMs = Instant.ofEpochMilli(Instant.now().toEpochMilli()) - val createdTimeMs = lastUpdatedTimeMs.minusSeconds(1000) - val jsonString = """ - { - "start_index":"0", - "total_hits":"1", - "total_hit_relation":"eq", - "event_list":[ - { - "event_id":"event_id", - "last_updated_time_ms":"${lastUpdatedTimeMs.toEpochMilli()}", - "created_time_ms":"${createdTimeMs.toEpochMilli()}", - "tenant":"selectedTenant" - } - ] - } - """.trimIndent() - Assertions.assertThrows(IllegalArgumentException::class.java) { - createObjectFromJsonString(jsonString) { GetNotificationEventResponse.parse(it) } - } - } -} diff --git a/src/test/kotlin/org/opensearch/commons/notifications/action/GetPluginFeaturesRequestTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/action/GetPluginFeaturesRequestTests.kt index 3861fdd9..1c85ed70 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/action/GetPluginFeaturesRequestTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/action/GetPluginFeaturesRequestTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.action diff --git a/src/test/kotlin/org/opensearch/commons/notifications/action/GetPluginFeaturesResponseTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/action/GetPluginFeaturesResponseTests.kt index 3103825f..1aa065a2 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/action/GetPluginFeaturesResponseTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/action/GetPluginFeaturesResponseTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.action @@ -39,7 +17,7 @@ internal class GetPluginFeaturesResponseTests { expected: GetPluginFeaturesResponse, actual: GetPluginFeaturesResponse ) { - assertEquals(expected.configTypeList, actual.configTypeList) + assertEquals(expected.allowedConfigTypeList, actual.allowedConfigTypeList) assertEquals(expected.pluginFeatures, actual.pluginFeatures) } @@ -84,7 +62,7 @@ internal class GetPluginFeaturesResponseTests { ) val jsonString = """ { - "config_type_list":["config_type_1", "config_type_2", "config_type_3"], + "allowed_config_type_list":["config_type_1", "config_type_2", "config_type_3"], "plugin_features":{ "FeatureKey1":"FeatureValue1", "FeatureKey2":"FeatureValue2", @@ -100,7 +78,7 @@ internal class GetPluginFeaturesResponseTests { } @Test - fun `Get Response should throw exception if config_type_list is absent in json`() { + fun `Get Response should throw exception if allowed_config_type_list is absent in json`() { val jsonString = """ { "plugin_features":{ diff --git a/src/test/kotlin/org/opensearch/commons/notifications/action/LegacyPublishNotificationRequestTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/action/LegacyPublishNotificationRequestTests.kt new file mode 100644 index 00000000..e4b990a5 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/notifications/action/LegacyPublishNotificationRequestTests.kt @@ -0,0 +1,34 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.notifications.action + +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertNull +import org.junit.jupiter.api.Test +import org.opensearch.commons.destination.message.LegacyChimeMessage +import org.opensearch.commons.utils.recreateObject + +internal class LegacyPublishNotificationRequestTests { + + private fun assertRequestEquals( + expected: LegacyPublishNotificationRequest, + actual: LegacyPublishNotificationRequest + ) { + assertEquals(expected.baseMessage.channelName, actual.baseMessage.channelName) + assertEquals(expected.baseMessage.channelType, actual.baseMessage.channelType) + assertEquals(expected.baseMessage.messageContent, actual.baseMessage.messageContent) + assertEquals(expected.baseMessage.url, actual.baseMessage.url) + assertNull(actual.validate()) + } + + @Test + fun `publish request serialize and deserialize transport object should be equal`() { + val baseMessage = LegacyChimeMessage.Builder("chime_message").withMessage("Hello world").withUrl("https://amazon.com").build() + val request = LegacyPublishNotificationRequest(baseMessage) + val recreatedObject = recreateObject(request) { LegacyPublishNotificationRequest(it) } + assertRequestEquals(request, recreatedObject) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/notifications/action/LegacyPublishNotificationResponseTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/action/LegacyPublishNotificationResponseTests.kt new file mode 100644 index 00000000..4a75a82e --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/notifications/action/LegacyPublishNotificationResponseTests.kt @@ -0,0 +1,23 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.notifications.action + +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test +import org.opensearch.commons.destination.response.LegacyDestinationResponse +import org.opensearch.commons.utils.recreateObject + +internal class LegacyPublishNotificationResponseTests { + + @Test + fun `Create response serialize and deserialize transport object should be equal`() { + val res = LegacyDestinationResponse.Builder().withStatusCode(200).withResponseContent("Hello world").build() + val configResponse = LegacyPublishNotificationResponse(res) + val recreatedObject = recreateObject(configResponse) { LegacyPublishNotificationResponse(it) } + assertEquals(configResponse.destinationResponse.statusCode, recreatedObject.destinationResponse.statusCode) + assertEquals(configResponse.destinationResponse.responseContent, recreatedObject.destinationResponse.responseContent) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/notifications/action/SendNotificationRequestTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/action/SendNotificationRequestTests.kt index 9c8bd2b4..70e0bd6c 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/action/SendNotificationRequestTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/action/SendNotificationRequestTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.action @@ -34,7 +12,6 @@ import org.junit.jupiter.api.Test import org.junit.jupiter.api.assertThrows import org.opensearch.commons.notifications.model.ChannelMessage import org.opensearch.commons.notifications.model.EventSource -import org.opensearch.commons.notifications.model.Feature import org.opensearch.commons.notifications.model.SeverityType import org.opensearch.commons.utils.createObjectFromJsonString import org.opensearch.commons.utils.getJsonString @@ -58,7 +35,6 @@ internal class SendNotificationRequestTests { val notificationInfo = EventSource( "title", "reference_id", - Feature.REPORTS, SeverityType.HIGH, listOf("tag1", "tag2") ) @@ -82,7 +58,6 @@ internal class SendNotificationRequestTests { val notificationInfo = EventSource( "title", "reference_id", - Feature.INDEX_MANAGEMENT, SeverityType.CRITICAL, listOf("tag1", "tag2") ) @@ -115,7 +90,6 @@ internal class SendNotificationRequestTests { val notificationInfo = EventSource( "title", "reference_id", - Feature.ALERTING, SeverityType.HIGH, listOf("tag1", "tag2") ) @@ -135,7 +109,6 @@ internal class SendNotificationRequestTests { "event_source":{ "title":"${notificationInfo.title}", "reference_id":"${notificationInfo.referenceId}", - "feature":"${notificationInfo.feature}", "severity":"${notificationInfo.severity}", "tags":["tag1", "tag2"] }, @@ -159,7 +132,6 @@ internal class SendNotificationRequestTests { val notificationInfo = EventSource( "title", "reference_id", - Feature.REPORTS, SeverityType.INFO, listOf("tag1", "tag2") ) @@ -179,7 +151,6 @@ internal class SendNotificationRequestTests { "event_source":{ "title":"${notificationInfo.title}", "reference_id":"${notificationInfo.referenceId}", - "feature":"${notificationInfo.feature}", "severity":"${notificationInfo.severity}", "tags":["tag1", "tag2"] }, @@ -216,7 +187,6 @@ internal class SendNotificationRequestTests { "event_source":{ "title":"title", "reference_id":"reference_id", - "feature":"feature", "severity":"High", "tags":["tag1", "tag2"] }, @@ -235,7 +205,6 @@ internal class SendNotificationRequestTests { "event_source":{ "title":"title", "reference_id":"reference_id", - "feature":"feature", "severity":"High", "tags":["tag1", "tag2"] }, @@ -256,7 +225,6 @@ internal class SendNotificationRequestTests { "event_source":{ "title":"title", "reference_id":"reference_id", - "feature":"feature", "severity":"High", "tags":["tag1", "tag2"] }, diff --git a/src/test/kotlin/org/opensearch/commons/notifications/action/SendNotificationResponseTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/action/SendNotificationResponseTests.kt index 511c5760..1de3c8fa 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/action/SendNotificationResponseTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/action/SendNotificationResponseTests.kt @@ -1,35 +1,19 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.action import com.fasterxml.jackson.core.JsonParseException +import org.junit.Test import org.junit.jupiter.api.Assertions.assertEquals -import org.junit.jupiter.api.Test import org.junit.jupiter.api.assertThrows +import org.opensearch.commons.notifications.model.ConfigType +import org.opensearch.commons.notifications.model.DeliveryStatus +import org.opensearch.commons.notifications.model.EventSource +import org.opensearch.commons.notifications.model.EventStatus +import org.opensearch.commons.notifications.model.NotificationEvent +import org.opensearch.commons.notifications.model.SeverityType import org.opensearch.commons.utils.createObjectFromJsonString import org.opensearch.commons.utils.getJsonString import org.opensearch.commons.utils.recreateObject @@ -38,25 +22,27 @@ internal class SendNotificationResponseTests { @Test fun `Create response serialize and deserialize transport object should be equal`() { - val configResponse = SendNotificationResponse("sample_notification_id") - val recreatedObject = recreateObject(configResponse) { SendNotificationResponse(it) } - assertEquals(configResponse.notificationId, recreatedObject.notificationId) + val sampleEvent = getSampleEvent() + + val recreatedObject = recreateObject(sampleEvent) { SendNotificationResponse(it) } + assertEquals(sampleEvent, recreatedObject) } @Test fun `Create response serialize and deserialize using json object should be equal`() { - val configResponse = SendNotificationResponse("sample_notification_id") - val jsonString = getJsonString(configResponse) + val sampleEvent = getSampleEvent() + + val jsonString = getJsonString(sampleEvent) val recreatedObject = createObjectFromJsonString(jsonString) { SendNotificationResponse.parse(it) } - assertEquals(configResponse.notificationId, recreatedObject.notificationId) + assertEquals(sampleEvent, recreatedObject) } @Test fun `Create response should deserialize json object using parser`() { - val notificationId = "sample_notification_id" - val jsonString = "{\"event_id\":\"$notificationId\"}" + val sampleEvent = getSampleEvent() + val jsonString = "{\"event_id\":\"$sampleEvent\"}" val recreatedObject = createObjectFromJsonString(jsonString) { SendNotificationResponse.parse(it) } - assertEquals(notificationId, recreatedObject.notificationId) + assertEquals(sampleEvent, recreatedObject) } @Test @@ -77,16 +63,32 @@ internal class SendNotificationResponseTests { @Test fun `Create response should safely ignore extra field in json object`() { - val notificationId = "sample_notification_id" + val sampleEvent = getSampleEvent() val jsonString = """ { - "event_id":"$notificationId", + "event_id":"$sampleEvent", "extra_field_1":["extra", "value"], "extra_field_2":{"extra":"value"}, "extra_field_3":"extra value 3" } """.trimIndent() val recreatedObject = createObjectFromJsonString(jsonString) { SendNotificationResponse.parse(it) } - assertEquals(notificationId, recreatedObject.notificationId) + assertEquals(sampleEvent, recreatedObject) + } + + private fun getSampleEvent(): NotificationEvent { + val sampleEventSource = EventSource( + "title", + "reference_id", + severity = SeverityType.INFO + ) + val sampleStatus = EventStatus( + "config_id", + "name", + ConfigType.SLACK, + deliveryStatus = DeliveryStatus("404", "invalid recipient") + ) + + return NotificationEvent(sampleEventSource, listOf(sampleStatus)) } } diff --git a/src/test/kotlin/org/opensearch/commons/notifications/action/UpdateNotificationConfigRequestTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/action/UpdateNotificationConfigRequestTests.kt index 607e8077..fecd7710 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/action/UpdateNotificationConfigRequestTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/action/UpdateNotificationConfigRequestTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.action @@ -35,8 +13,9 @@ import org.opensearch.commons.notifications.model.Chime import org.opensearch.commons.notifications.model.ConfigType import org.opensearch.commons.notifications.model.Email import org.opensearch.commons.notifications.model.EmailGroup -import org.opensearch.commons.notifications.model.Feature +import org.opensearch.commons.notifications.model.EmailRecipient import org.opensearch.commons.notifications.model.MethodType +import org.opensearch.commons.notifications.model.MicrosoftTeams import org.opensearch.commons.notifications.model.NotificationConfig import org.opensearch.commons.notifications.model.Slack import org.opensearch.commons.notifications.model.SmtpAccount @@ -44,7 +23,6 @@ import org.opensearch.commons.notifications.model.Webhook import org.opensearch.commons.utils.createObjectFromJsonString import org.opensearch.commons.utils.getJsonString import org.opensearch.commons.utils.recreateObject -import java.util.EnumSet internal class UpdateNotificationConfigRequestTests { @@ -54,21 +32,28 @@ internal class UpdateNotificationConfigRequestTests { "name", "description", ConfigType.WEBHOOK, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleWebhook + configData = sampleWebhook, + isEnabled = true + ) + } + private fun createMicrosoftTeamsContentConfigObject(): NotificationConfig { + val sampleMicrosoftTeams = MicrosoftTeams("https://domain.com/sample_microsoft_teams_url#1234567890") + return NotificationConfig( + "name", + "description", + ConfigType.MICROSOFT_TEAMS, + configData = sampleMicrosoftTeams, + isEnabled = true ) } - private fun createSlackContentConfigObject(): NotificationConfig { val sampleSlack = Slack("https://domain.com/sample_slack_url#1234567890") return NotificationConfig( "name", "description", ConfigType.SLACK, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleSlack + configData = sampleSlack, + isEnabled = true ) } @@ -78,37 +63,34 @@ internal class UpdateNotificationConfigRequestTests { "name", "description", ConfigType.CHIME, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleChime + configData = sampleChime, + isEnabled = true ) } private fun createEmailGroupContentConfigObject(): NotificationConfig { - val sampleEmailGroup = EmailGroup(listOf("dummy@company.com")) + val sampleEmailGroup = EmailGroup(listOf(EmailRecipient("dummy@company.com"))) return NotificationConfig( "name", "description", ConfigType.EMAIL_GROUP, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleEmailGroup + configData = sampleEmailGroup, + isEnabled = true ) } private fun createEmailContentConfigObject(): NotificationConfig { val sampleEmail = Email( emailAccountID = "sample_1@dummy.com", - recipients = listOf("sample_2@dummy.com"), + recipients = listOf(EmailRecipient("sample_2@dummy.com")), emailGroupIds = listOf("sample_3@dummy.com") ) return NotificationConfig( "name", "description", ConfigType.EMAIL, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleEmail + configData = sampleEmail, + isEnabled = true ) } @@ -123,9 +105,8 @@ internal class UpdateNotificationConfigRequestTests { "name", "description", ConfigType.SMTP_ACCOUNT, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleSmtpAccount + configData = sampleSmtpAccount, + isEnabled = true ) } @@ -139,6 +120,16 @@ internal class UpdateNotificationConfigRequestTests { assertEquals("config_id", recreatedObject.configId) } + @Test + fun `Update config serialize and deserialize transport object should be equal Microsoft Teams`() { + val configRequest = UpdateNotificationConfigRequest("config_id", createMicrosoftTeamsContentConfigObject()) + val recreatedObject = + recreateObject(configRequest) { UpdateNotificationConfigRequest(it) } + assertNull(recreatedObject.validate()) + assertEquals(configRequest.notificationConfig, recreatedObject.notificationConfig) + assertEquals("config_id", recreatedObject.configId) + } + @Test fun `Update config serialize and deserialize transport object should be equal Slack`() { val configRequest = UpdateNotificationConfigRequest("config_id", createSlackContentConfigObject()) @@ -198,6 +189,15 @@ internal class UpdateNotificationConfigRequestTests { assertEquals("config_id", recreatedObject.configId) } + @Test + fun `Update config serialize and deserialize using json object should be equal microsoft Teams`() { + val configRequest = UpdateNotificationConfigRequest("config_id", createMicrosoftTeamsContentConfigObject()) + val jsonString = getJsonString(configRequest) + val recreatedObject = createObjectFromJsonString(jsonString) { UpdateNotificationConfigRequest.parse(it) } + assertEquals(configRequest.notificationConfig, recreatedObject.notificationConfig) + assertEquals("config_id", recreatedObject.configId) + } + @Test fun `Update config serialize and deserialize using json object should be equal slack`() { val configRequest = UpdateNotificationConfigRequest("config_id", createSlackContentConfigObject()) @@ -250,9 +250,8 @@ internal class UpdateNotificationConfigRequestTests { "name", "description", ConfigType.SLACK, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleSlack + configData = sampleSlack, + isEnabled = true ) val jsonString = """ @@ -280,9 +279,8 @@ internal class UpdateNotificationConfigRequestTests { "name", "description", ConfigType.WEBHOOK, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleWebhook + configData = sampleWebhook, + isEnabled = true ) val jsonString = """ @@ -310,9 +308,8 @@ internal class UpdateNotificationConfigRequestTests { "name", "description", ConfigType.CHIME, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleChime + configData = sampleChime, + isEnabled = true ) val jsonString = """ @@ -335,14 +332,13 @@ internal class UpdateNotificationConfigRequestTests { @Test fun `Update config should deserialize json object using parser Email Group`() { - val sampleEmailGroup = EmailGroup(listOf("dummy@company.com")) + val sampleEmailGroup = EmailGroup(listOf(EmailRecipient("dummy@company.com"))) val config = NotificationConfig( "name", "description", ConfigType.EMAIL_GROUP, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleEmailGroup + configData = sampleEmailGroup, + isEnabled = true ) val jsonString = """ @@ -354,7 +350,7 @@ internal class UpdateNotificationConfigRequestTests { "config_type":"email_group", "feature_list":["index_management"], "is_enabled":true, - "email_group":{"recipient_list":["dummy@company.com"]} + "email_group":{"recipient_list":[{"recipient":"dummy@company.com"}]} } } """.trimIndent() @@ -367,16 +363,15 @@ internal class UpdateNotificationConfigRequestTests { fun `Update config should deserialize json object using parser Email`() { val sampleEmail = Email( emailAccountID = "sample_1@dummy.com", - recipients = listOf("sample_2@dummy.com"), + recipients = listOf(EmailRecipient("sample_2@dummy.com")), emailGroupIds = listOf("sample_3@dummy.com") ) val config = NotificationConfig( "name", "description", ConfigType.EMAIL, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleEmail + configData = sampleEmail, + isEnabled = true ) val jsonString = """ @@ -388,8 +383,11 @@ internal class UpdateNotificationConfigRequestTests { "config_type":"email", "feature_list":["index_management"], "is_enabled":true, - "email":{"email_account_id":"sample_1@dummy.com","recipient_list":["sample_2@dummy.com"], - "email_group_id_list":["sample_3@dummy.com"] } + "email":{ + "email_account_id":"sample_1@dummy.com", + "recipient_list":[{"recipient":"sample_2@dummy.com"}], + "email_group_id_list":["sample_3@dummy.com"] + } } } """.trimIndent() @@ -410,9 +408,8 @@ internal class UpdateNotificationConfigRequestTests { "name", "description", ConfigType.SMTP_ACCOUNT, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleSmtpAccount + configData = sampleSmtpAccount, + isEnabled = true ) val jsonString = """ @@ -448,9 +445,8 @@ internal class UpdateNotificationConfigRequestTests { "name", "description", ConfigType.SLACK, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleSlack + configData = sampleSlack, + isEnabled = true ) val jsonString = """ diff --git a/src/test/kotlin/org/opensearch/commons/notifications/action/UpdateNotificationConfigResponseTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/action/UpdateNotificationConfigResponseTests.kt index aa2839c3..ca0b18cc 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/action/UpdateNotificationConfigResponseTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/action/UpdateNotificationConfigResponseTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.action diff --git a/src/test/kotlin/org/opensearch/commons/notifications/model/AttachmentTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/model/AttachmentTests.kt index b0873cfe..5aa127fe 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/model/AttachmentTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/model/AttachmentTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model diff --git a/src/test/kotlin/org/opensearch/commons/notifications/model/FeatureChannelListTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/model/ChannelListTests.kt similarity index 59% rename from src/test/kotlin/org/opensearch/commons/notifications/model/FeatureChannelListTests.kt rename to src/test/kotlin/org/opensearch/commons/notifications/model/ChannelListTests.kt index 5bd3db62..cad52261 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/model/FeatureChannelListTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/model/ChannelListTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model @@ -34,11 +12,11 @@ import org.opensearch.commons.utils.createObjectFromJsonString import org.opensearch.commons.utils.getJsonString import org.opensearch.commons.utils.recreateObject -internal class FeatureChannelListTests { +internal class ChannelListTests { private fun assertSearchResultEquals( - expected: FeatureChannelList, - actual: FeatureChannelList + expected: ChannelList, + actual: ChannelList ) { assertEquals(expected.startIndex, actual.startIndex) assertEquals(expected.totalHits, actual.totalHits) @@ -49,129 +27,150 @@ internal class FeatureChannelListTests { @Test fun `Feature Channel List serialize and deserialize using transport should be equal`() { - val featureChannel = FeatureChannel( + val channel = Channel( "configId", "name", "description", ConfigType.SLACK, true ) - val featureChannelList = FeatureChannelList(featureChannel) - val recreatedObject = recreateObject(featureChannelList) { FeatureChannelList(it) } - assertSearchResultEquals(featureChannelList, recreatedObject) + val channelList = ChannelList(channel) + val recreatedObject = recreateObject(channelList) { ChannelList(it) } + assertSearchResultEquals(channelList, recreatedObject) } @Test fun `Feature Channel List serialize and deserialize multiple object with default values should be equal`() { - val featureChannel1 = FeatureChannel( + val channel1 = Channel( "configId1", "name1", "description1", ConfigType.SLACK, true ) - val featureChannel2 = FeatureChannel( + val channel2 = Channel( "configId2", "name2", "description2", ConfigType.CHIME, true ) - val featureChannelList = FeatureChannelList(listOf(featureChannel1, featureChannel2)) - val expectedResult = FeatureChannelList( + val channel3 = Channel( + "configId3", + "name3", + "description3", + ConfigType.MICROSOFT_TEAMS, + true + ) + val channelList = ChannelList(listOf(channel1, channel2, channel3)) + val expectedResult = ChannelList( 0, - 2, + 3, TotalHits.Relation.EQUAL_TO, - listOf(featureChannel1, featureChannel2) + listOf(channel1, channel2, channel3) ) - val recreatedObject = recreateObject(featureChannelList) { FeatureChannelList(it) } + val recreatedObject = recreateObject(channelList) { ChannelList(it) } assertSearchResultEquals(expectedResult, recreatedObject) } @Test fun `Feature Channel List serialize and deserialize with multiple object should be equal`() { - val featureChannel1 = FeatureChannel( + val channel1 = Channel( "configId1", "name1", "description1", ConfigType.SLACK, true ) - val featureChannel2 = FeatureChannel( + val channel2 = Channel( "configId2", "name2", "description2", ConfigType.CHIME, true ) - val featureChannelList = FeatureChannelList( + val channel3 = Channel( + "configId3", + "name3", + "description3", + ConfigType.MICROSOFT_TEAMS, + true + ) + val channelList = ChannelList( 100, 1000, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO, - listOf(featureChannel1, featureChannel2) + listOf(channel1, channel2, channel3) ) - val recreatedObject = recreateObject(featureChannelList) { FeatureChannelList(it) } - assertSearchResultEquals(featureChannelList, recreatedObject) + val recreatedObject = recreateObject(channelList) { ChannelList(it) } + assertSearchResultEquals(channelList, recreatedObject) } @Test fun `Feature Channel List serialize and deserialize using json should be equal`() { - val featureChannel = FeatureChannel( + val channel = Channel( "configId", "name", "description", ConfigType.SLACK, true ) - val featureChannelList = FeatureChannelList(featureChannel) - val jsonString = getJsonString(featureChannelList) - val recreatedObject = createObjectFromJsonString(jsonString) { FeatureChannelList(it) } - assertSearchResultEquals(featureChannelList, recreatedObject) + val channelList = ChannelList(channel) + val jsonString = getJsonString(channelList) + val recreatedObject = createObjectFromJsonString(jsonString) { ChannelList(it) } + assertSearchResultEquals(channelList, recreatedObject) } @Test fun `Feature Channel List serialize and deserialize using json with multiple object should be equal`() { - val featureChannel1 = FeatureChannel( + val channel1 = Channel( "configId1", "name1", "description1", ConfigType.SLACK, true ) - val featureChannel2 = FeatureChannel( + val channel2 = Channel( "configId2", "name2", "description2", ConfigType.CHIME, true ) - val featureChannelList = FeatureChannelList( + val channel3 = Channel( + "configId3", + "name3", + "description3", + ConfigType.MICROSOFT_TEAMS, + true + ) + val channelList = ChannelList( 100, 1000, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO, - listOf(featureChannel1, featureChannel2) + listOf(channel1, channel2, channel3) ) - val jsonString = getJsonString(featureChannelList) - val recreatedObject = createObjectFromJsonString(jsonString) { FeatureChannelList(it) } - assertSearchResultEquals(featureChannelList, recreatedObject) + val jsonString = getJsonString(channelList) + val recreatedObject = createObjectFromJsonString(jsonString) { ChannelList(it) } + assertSearchResultEquals(channelList, recreatedObject) } @Test fun `Feature Channel List should safely ignore extra field in json object`() { - val featureChannel = FeatureChannel( + val channel = Channel( "configId", "name", "description", ConfigType.SLACK, true ) - val featureChannelList = FeatureChannelList(featureChannel) + val channelList = ChannelList(channel) val jsonString = """ { "start_index":"0", "total_hits":"1", "total_hit_relation":"eq", - "feature_channel_list":[ + "channel_list":[ { "config_id":"configId", "name":"name", @@ -185,23 +184,23 @@ internal class FeatureChannelListTests { "extra_field_3":"extra value 3" } """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { FeatureChannelList(it) } - assertSearchResultEquals(featureChannelList, recreatedObject) + val recreatedObject = createObjectFromJsonString(jsonString) { ChannelList(it) } + assertSearchResultEquals(channelList, recreatedObject) } @Test fun `Feature Channel List should safely fallback to default if startIndex, totalHits or totalHitRelation field absent in json object`() { - val featureChannel = FeatureChannel( + val channel = Channel( "configId", "name", "description", ConfigType.SLACK, true ) - val featureChannelList = FeatureChannelList(featureChannel) + val channelList = ChannelList(channel) val jsonString = """ { - "feature_channel_list":[ + "channel_list":[ { "config_id":"configId", "name":"name", @@ -212,12 +211,12 @@ internal class FeatureChannelListTests { ] } """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { FeatureChannelList(it) } - assertSearchResultEquals(featureChannelList, recreatedObject) + val recreatedObject = createObjectFromJsonString(jsonString) { ChannelList(it) } + assertSearchResultEquals(channelList, recreatedObject) } @Test - fun `Feature Channel List should throw exception if feature_channel_list is absent in json`() { + fun `Channel List should throw exception if channel_list is absent in json`() { val jsonString = """ { "start_index":"0", @@ -226,7 +225,7 @@ internal class FeatureChannelListTests { } """.trimIndent() Assertions.assertThrows(IllegalArgumentException::class.java) { - createObjectFromJsonString(jsonString) { FeatureChannelList(it) } + createObjectFromJsonString(jsonString) { ChannelList(it) } } } } diff --git a/src/test/kotlin/org/opensearch/commons/notifications/model/ChannelMessageTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/model/ChannelMessageTests.kt index 7cda9d66..0a9f3f87 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/model/ChannelMessageTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/model/ChannelMessageTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model diff --git a/src/test/kotlin/org/opensearch/commons/notifications/model/FeatureChannelTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/model/ChannelTests.kt similarity index 57% rename from src/test/kotlin/org/opensearch/commons/notifications/model/FeatureChannelTests.kt rename to src/test/kotlin/org/opensearch/commons/notifications/model/ChannelTests.kt index d2b8f009..3f64cc5a 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/model/FeatureChannelTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/model/ChannelTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model @@ -33,38 +11,38 @@ import org.opensearch.commons.utils.createObjectFromJsonString import org.opensearch.commons.utils.getJsonString import org.opensearch.commons.utils.recreateObject -internal class FeatureChannelTests { +internal class ChannelTests { @Test - fun `FeatureChannel Object serialize and deserialize using transport should be equal`() { - val featureChannel = FeatureChannel( + fun `Channel Object serialize and deserialize using transport should be equal`() { + val channel = Channel( "configId", "name", "description", ConfigType.SLACK, true ) - val recreatedObject = recreateObject(featureChannel) { FeatureChannel(it) } - assertEquals(featureChannel, recreatedObject) + val recreatedObject = recreateObject(channel) { Channel(it) } + assertEquals(channel, recreatedObject) } @Test - fun `FeatureChannel Object serialize and deserialize using json should be equal`() { - val featureChannel = FeatureChannel( + fun `Channel Object serialize and deserialize using json should be equal`() { + val channel = Channel( "configId", "name", "description", ConfigType.CHIME, false ) - val jsonString = getJsonString(featureChannel) - val recreatedObject = createObjectFromJsonString(jsonString) { FeatureChannel.parse(it) } - assertEquals(featureChannel, recreatedObject) + val jsonString = getJsonString(channel) + val recreatedObject = createObjectFromJsonString(jsonString) { Channel.parse(it) } + assertEquals(channel, recreatedObject) } @Test - fun `FeatureChannel Json parsing should safely ignore extra fields`() { - val featureChannel = FeatureChannel( + fun `Channel Json parsing should safely ignore extra fields`() { + val channel = Channel( "configId", "name", "description", @@ -83,13 +61,13 @@ internal class FeatureChannelTests { "extra_field_3":"extra value 3" } """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { FeatureChannel.parse(it) } - assertEquals(featureChannel, recreatedObject) + val recreatedObject = createObjectFromJsonString(jsonString) { Channel.parse(it) } + assertEquals(channel, recreatedObject) } @Test - fun `FeatureChannel Json parsing should safely ignore unknown config type`() { - val featureChannel = FeatureChannel( + fun `Channel Json parsing should safely ignore unknown config type`() { + val channel = Channel( "configId", "name", "description", @@ -105,13 +83,13 @@ internal class FeatureChannelTests { "is_enabled":true } """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { FeatureChannel.parse(it) } - assertEquals(featureChannel, recreatedObject) + val recreatedObject = createObjectFromJsonString(jsonString) { Channel.parse(it) } + assertEquals(channel, recreatedObject) } @Test - fun `FeatureChannel Json parsing should safely parse if description is absent`() { - val featureChannel = FeatureChannel( + fun `Channel Json parsing should safely parse if description is absent`() { + val channel = Channel( "configId", "name", "", @@ -126,13 +104,13 @@ internal class FeatureChannelTests { "is_enabled":true } """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { FeatureChannel.parse(it) } - assertEquals(featureChannel, recreatedObject) + val recreatedObject = createObjectFromJsonString(jsonString) { Channel.parse(it) } + assertEquals(channel, recreatedObject) } @Test - fun `FeatureChannel Json parsing should safely parse if is_enabled is absent`() { - val featureChannel = FeatureChannel( + fun `Channel Json parsing should safely parse if is_enabled is absent`() { + val channel = Channel( "configId", "name", "description", @@ -147,12 +125,12 @@ internal class FeatureChannelTests { "config_type":"slack" } """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { FeatureChannel.parse(it) } - assertEquals(featureChannel, recreatedObject) + val recreatedObject = createObjectFromJsonString(jsonString) { Channel.parse(it) } + assertEquals(channel, recreatedObject) } @Test - fun `FeatureChannel Json parsing should throw exception if config_id is absent`() { + fun `Channel Json parsing should throw exception if config_id is absent`() { val jsonString = """ { "name":"name", @@ -162,12 +140,12 @@ internal class FeatureChannelTests { } """.trimIndent() Assertions.assertThrows(IllegalArgumentException::class.java) { - createObjectFromJsonString(jsonString) { FeatureChannel.parse(it) } + createObjectFromJsonString(jsonString) { Channel.parse(it) } } } @Test - fun `FeatureChannel Json parsing should throw exception if config_id is empty`() { + fun `Channel Json parsing should throw exception if config_id is empty`() { val jsonString = """ { "config_id":"", @@ -178,12 +156,12 @@ internal class FeatureChannelTests { } """.trimIndent() Assertions.assertThrows(IllegalArgumentException::class.java) { - createObjectFromJsonString(jsonString) { FeatureChannel.parse(it) } + createObjectFromJsonString(jsonString) { Channel.parse(it) } } } @Test - fun `FeatureChannel Json parsing should throw exception if name is absent`() { + fun `Channel Json parsing should throw exception if name is absent`() { val jsonString = """ { "config_id":"configId", @@ -193,12 +171,12 @@ internal class FeatureChannelTests { } """.trimIndent() Assertions.assertThrows(IllegalArgumentException::class.java) { - createObjectFromJsonString(jsonString) { FeatureChannel.parse(it) } + createObjectFromJsonString(jsonString) { Channel.parse(it) } } } @Test - fun `FeatureChannel Json parsing should throw exception if name is empty`() { + fun `Channel Json parsing should throw exception if name is empty`() { val jsonString = """ { "config_id":"configId", @@ -209,12 +187,12 @@ internal class FeatureChannelTests { } """.trimIndent() Assertions.assertThrows(IllegalArgumentException::class.java) { - createObjectFromJsonString(jsonString) { FeatureChannel.parse(it) } + createObjectFromJsonString(jsonString) { Channel.parse(it) } } } @Test - fun `FeatureChannel Json parsing should throw exception if config_type is absent`() { + fun `Channel Json parsing should throw exception if config_type is absent`() { val jsonString = """ { "config_id":"configId", @@ -224,7 +202,7 @@ internal class FeatureChannelTests { } """.trimIndent() Assertions.assertThrows(IllegalArgumentException::class.java) { - createObjectFromJsonString(jsonString) { FeatureChannel.parse(it) } + createObjectFromJsonString(jsonString) { Channel.parse(it) } } } } diff --git a/src/test/kotlin/org/opensearch/commons/notifications/model/ChimeTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/model/ChimeTests.kt index f70dc097..8e7f434e 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/model/ChimeTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/model/ChimeTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model @@ -89,11 +67,11 @@ internal class ChimeTests { } @Test - fun `Chime should throw exception when url protocol is not https`() { + fun `Chime should throw exception when url protocol is not https or http`() { assertThrows { - Chime("http://domain.com/sample_url#1234567890") + Chime("ftp://domain.com/sample_url#1234567890") } - val jsonString = "{\"url\":\"http://domain.com/sample_url\"}" + val jsonString = "{\"url\":\"ftp://domain.com/sample_url\"}" assertThrows { createObjectFromJsonString(jsonString) { Chime.parse(it) } } diff --git a/src/test/kotlin/org/opensearch/commons/notifications/model/ConfigTypeTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/model/ConfigTypeTests.kt index 28b1ec7e..6be8d68b 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/model/ConfigTypeTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/model/ConfigTypeTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model diff --git a/src/test/kotlin/org/opensearch/commons/notifications/model/DeliveryStatusTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/model/DeliveryStatusTests.kt index 377d2be9..33c42e99 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/model/DeliveryStatusTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/model/DeliveryStatusTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model diff --git a/src/test/kotlin/org/opensearch/commons/notifications/model/EmailGroupTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/model/EmailGroupTests.kt index 5146d785..15ab3ade 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/model/EmailGroupTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/model/EmailGroupTests.kt @@ -1,35 +1,12 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model import com.fasterxml.jackson.core.JsonParseException import org.junit.jupiter.api.Assertions.assertEquals import org.junit.jupiter.api.Test -import org.junit.jupiter.api.assertDoesNotThrow import org.junit.jupiter.api.assertThrows import org.opensearch.commons.utils.createObjectFromJsonString import org.opensearch.commons.utils.getJsonString @@ -37,56 +14,26 @@ import org.opensearch.commons.utils.recreateObject internal class EmailGroupTests { - private fun checkValidEmailAddress(emailAddress: String) { - assertDoesNotThrow("should accept $emailAddress") { - EmailGroup(listOf(emailAddress)) - } - } - - private fun checkInvalidEmailAddress(emailAddress: String) { - assertThrows("Should throw an Exception for invalid email $emailAddress") { - EmailGroup(listOf(emailAddress)) - } - } - - @Test - fun `EmailGroup should accept valid email address`() { - checkValidEmailAddress("email1234@email.com") - checkValidEmailAddress("email+1234@email.com") - checkValidEmailAddress("email-1234@email.com") - checkValidEmailAddress("email_1234@email.com") - checkValidEmailAddress("email.1234@email.com") - checkValidEmailAddress("e.ma_il-1+2@test-email-domain.co.uk") - checkValidEmailAddress("email-.+_=#|@domain.com") - checkValidEmailAddress("e@mail.com") - } - - @Test - fun `EmailGroup should throw exception for invalid email address`() { - checkInvalidEmailAddress("email") - checkInvalidEmailAddress("email@") - checkInvalidEmailAddress("email@1234@email.com") - checkInvalidEmailAddress(".email@email.com") - checkInvalidEmailAddress("email.@email.com") - checkInvalidEmailAddress("email..1234@email.com") - checkInvalidEmailAddress("email@email..com") - checkInvalidEmailAddress("email@.com") - checkInvalidEmailAddress("email@email.com.") - checkInvalidEmailAddress("email@.email.com") - checkInvalidEmailAddress("email@email.com-") - checkInvalidEmailAddress("email@email_domain.com") - } - @Test fun `EmailGroup serialize and deserialize transport object should be equal`() { - val sampleEmailGroup = EmailGroup(listOf("email1@email.com", "email2@email.com")) + val sampleEmailGroup = EmailGroup( + listOf( + EmailRecipient("email1@email.com"), + EmailRecipient("email2@email.com") + ) + ) val recreatedObject = recreateObject(sampleEmailGroup) { EmailGroup(it) } assertEquals(sampleEmailGroup, recreatedObject) } @Test fun `EmailGroup serialize and deserialize using json object should be equal`() { - val sampleEmailGroup = EmailGroup(listOf("email1@email.com", "email2@email.com")) + val sampleEmailGroup = EmailGroup( + listOf( + EmailRecipient("email1@email.com"), + EmailRecipient("email2@email.com") + ) + ) val jsonString = getJsonString(sampleEmailGroup) val recreatedObject = createObjectFromJsonString(jsonString) { EmailGroup.parse(it) } assertEquals(sampleEmailGroup, recreatedObject) @@ -94,12 +41,17 @@ internal class EmailGroupTests { @Test fun `EmailGroup should deserialize json object using parser`() { - val sampleEmailGroup = EmailGroup(listOf("email1@email.com", "email2@email.com")) + val sampleEmailGroup = EmailGroup( + listOf( + EmailRecipient("email1@email.com"), + EmailRecipient("email2@email.com") + ) + ) val jsonString = """ { "recipient_list":[ - "${sampleEmailGroup.recipients[0]}", - "${sampleEmailGroup.recipients[1]}" + {"recipient":"${sampleEmailGroup.recipients[0].recipient}"}, + {"recipient":"${sampleEmailGroup.recipients[1].recipient}"} ] }" """.trimIndent() @@ -117,12 +69,17 @@ internal class EmailGroupTests { @Test fun `EmailGroup should throw exception when recipients is replaced with recipients2 in json object`() { - val sampleEmailGroup = EmailGroup(listOf("email1@email.com", "email2@email.com")) + val sampleEmailGroup = EmailGroup( + listOf( + EmailRecipient("email1@email.com"), + EmailRecipient("email2@email.com") + ) + ) val jsonString = """ { "recipient_list2":[ - "${sampleEmailGroup.recipients[0]}", - "${sampleEmailGroup.recipients[1]}" + {"recipient":"${sampleEmailGroup.recipients[0]}"}, + {"recipient":"${sampleEmailGroup.recipients[1]}"} ] }" """.trimIndent() @@ -133,8 +90,17 @@ internal class EmailGroupTests { @Test fun `EmailGroup should safely ignore extra field in json object`() { - val sampleEmailGroup = EmailGroup(listOf("email@email.com")) - val jsonString = "{\"recipient_list\":[\"${sampleEmailGroup.recipients[0]}\"], \"another\":\"field\"}" + val sampleEmailGroup = EmailGroup(listOf(EmailRecipient("email1@email.com"))) + val jsonString = """ + { + "recipient_list":[ + {"recipient":"${sampleEmailGroup.recipients[0].recipient}"} + ], + "extra_field_1":["extra", "value"], + "extra_field_2":{"extra":"value"}, + "extra_field_3":"extra value 3" + }" + """.trimIndent() val recreatedObject = createObjectFromJsonString(jsonString) { EmailGroup.parse(it) } assertEquals(sampleEmailGroup, recreatedObject) } diff --git a/src/test/kotlin/org/opensearch/commons/notifications/model/EmailRecipientStatusTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/model/EmailRecipientStatusTests.kt index 31bb80bd..c9bd89af 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/model/EmailRecipientStatusTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/model/EmailRecipientStatusTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model diff --git a/src/test/kotlin/org/opensearch/commons/notifications/model/EmailRecipientTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/model/EmailRecipientTests.kt new file mode 100644 index 00000000..56a5f752 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/notifications/model/EmailRecipientTests.kt @@ -0,0 +1,120 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.commons.notifications.model + +import com.fasterxml.jackson.core.JsonParseException +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.assertDoesNotThrow +import org.junit.jupiter.api.assertThrows +import org.opensearch.commons.utils.createObjectFromJsonString +import org.opensearch.commons.utils.getJsonString +import org.opensearch.commons.utils.recreateObject + +internal class EmailRecipientTests { + + private fun checkValidEmailAddress(emailAddress: String) { + assertDoesNotThrow("should accept $emailAddress") { + EmailRecipient(emailAddress) + } + } + + private fun checkInvalidEmailAddress(emailAddress: String) { + assertThrows("Should throw an Exception for invalid email $emailAddress") { + EmailRecipient(emailAddress) + } + } + + @Test + fun `EmailRecipient should accept valid email address`() { + checkValidEmailAddress("email1234@email.com") + checkValidEmailAddress("email+1234@email.com") + checkValidEmailAddress("email-1234@email.com") + checkValidEmailAddress("email_1234@email.com") + checkValidEmailAddress("email.1234@email.com") + checkValidEmailAddress("e.ma_il-1+2@test-email-domain.co.uk") + checkValidEmailAddress("email-.+_=#|@domain.com") + checkValidEmailAddress("e@mail.com") + } + + @Test + fun `EmailRecipient should throw exception for invalid email address`() { + checkInvalidEmailAddress("email") + checkInvalidEmailAddress("email@") + checkInvalidEmailAddress("email@1234@email.com") + checkInvalidEmailAddress(".email@email.com") + checkInvalidEmailAddress("email.@email.com") + checkInvalidEmailAddress("email..1234@email.com") + checkInvalidEmailAddress("email@email..com") + checkInvalidEmailAddress("email@.com") + checkInvalidEmailAddress("email@email.com.") + checkInvalidEmailAddress("email@.email.com") + checkInvalidEmailAddress("email@email.com-") + checkInvalidEmailAddress("email@email_domain.com") + } + + @Test + fun `EmailRecipient serialize and deserialize transport object should be equal`() { + val sampleEmailRecipient = EmailRecipient("email1@email.com") + val recreatedObject = recreateObject(sampleEmailRecipient) { EmailRecipient(it) } + assertEquals(sampleEmailRecipient, recreatedObject) + } + + @Test + fun `EmailRecipient serialize and deserialize using json object should be equal`() { + val sampleEmailRecipient = EmailRecipient("email1@email.com") + val jsonString = getJsonString(sampleEmailRecipient) + val recreatedObject = createObjectFromJsonString(jsonString) { EmailRecipient.parse(it) } + assertEquals(sampleEmailRecipient, recreatedObject) + } + + @Test + fun `EmailRecipient should deserialize json object using parser`() { + val sampleEmailRecipient = EmailRecipient("email1@email.com") + val jsonString = """ + { + "recipient": "${sampleEmailRecipient.recipient}" + }" + """.trimIndent() + val recreatedObject = createObjectFromJsonString(jsonString) { EmailRecipient.parse(it) } + assertEquals(sampleEmailRecipient, recreatedObject) + } + + @Test + fun `EmailRecipient should throw exception when invalid json object is passed`() { + val jsonString = "sample message" + assertThrows { + createObjectFromJsonString(jsonString) { EmailRecipient.parse(it) } + } + } + + @Test + fun `EmailRecipient should throw exception when recipients is replaced with recipients2 in json object`() { + val sampleEmailRecipient = EmailRecipient("email1@email.com") + val jsonString = """ + { + "recipient2": "${sampleEmailRecipient.recipient}" + }" + """.trimIndent() + assertThrows { + createObjectFromJsonString(jsonString) { EmailRecipient.parse(it) } + } + } + + @Test + fun `EmailRecipient should safely ignore extra field in json object`() { + val sampleEmailRecipient = EmailRecipient("email@email.com") + val jsonString = """ + { + "recipient": "${sampleEmailRecipient.recipient}", + "extra_field_1":["extra", "value"], + "extra_field_2":{"extra":"value"}, + "extra_field_3":"extra value 3" + }" + """.trimIndent() + val recreatedObject = createObjectFromJsonString(jsonString) { EmailRecipient.parse(it) } + assertEquals(sampleEmailRecipient, recreatedObject) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/notifications/model/EmailTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/model/EmailTests.kt index 6590f08e..8624b138 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/model/EmailTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/model/EmailTests.kt @@ -1,35 +1,12 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model import com.fasterxml.jackson.core.JsonParseException import org.junit.jupiter.api.Assertions.assertEquals import org.junit.jupiter.api.Test -import org.junit.jupiter.api.assertDoesNotThrow import org.junit.jupiter.api.assertThrows import org.opensearch.commons.utils.createObjectFromJsonString import org.opensearch.commons.utils.getJsonString @@ -37,51 +14,14 @@ import org.opensearch.commons.utils.recreateObject internal class EmailTests { - private fun checkValidEmailAddress(emailAddress: String) { - assertDoesNotThrow("should accept $emailAddress") { - Email("sampleId", listOf(emailAddress), listOf()) - } - } - - private fun checkInvalidEmailAddress(emailAddress: String) { - assertThrows("Should throw an Exception for invalid email $emailAddress") { - Email("sampleId", listOf(emailAddress), listOf()) - } - } - - @Test - fun `Email should accept valid email address`() { - checkValidEmailAddress("email1234@email.com") - checkValidEmailAddress("email+1234@email.com") - checkValidEmailAddress("email-1234@email.com") - checkValidEmailAddress("email_1234@email.com") - checkValidEmailAddress("email.1234@email.com") - checkValidEmailAddress("e.ma_il-1+2@test-email-domain.co.uk") - checkValidEmailAddress("email-.+_=#|@domain.com") - checkValidEmailAddress("e@mail.com") - } - - @Test - fun `Email should throw exception for invalid email address`() { - checkInvalidEmailAddress("email") - checkInvalidEmailAddress("email@") - checkInvalidEmailAddress("email@1234@email.com") - checkInvalidEmailAddress(".email@email.com") - checkInvalidEmailAddress("email.@email.com") - checkInvalidEmailAddress("email..1234@email.com") - checkInvalidEmailAddress("email@email..com") - checkInvalidEmailAddress("email@.com") - checkInvalidEmailAddress("email@email.com.") - checkInvalidEmailAddress("email@.email.com") - checkInvalidEmailAddress("email@email.com-") - checkInvalidEmailAddress("email@email_domain.com") - } - @Test fun `Email serialize and deserialize transport object should be equal`() { val sampleEmail = Email( "sampleAccountId", - listOf("email1@email.com", "email2@email.com"), + listOf( + EmailRecipient("email1@email.com"), + EmailRecipient("email2@email.com") + ), listOf("sample_group_id_1", "sample_group_id_2") ) val recreatedObject = recreateObject(sampleEmail) { Email(it) } @@ -92,7 +32,10 @@ internal class EmailTests { fun `Email serialize and deserialize using json object should be equal`() { val sampleEmail = Email( "sampleAccountId", - listOf("email1@email.com", "email2@email.com"), + listOf( + EmailRecipient("email1@email.com"), + EmailRecipient("email2@email.com") + ), listOf("sample_group_id_1", "sample_group_id_2") ) val jsonString = getJsonString(sampleEmail) @@ -104,15 +47,18 @@ internal class EmailTests { fun `Email should deserialize json object using parser`() { val sampleEmail = Email( "sampleAccountId", - listOf("email1@email.com", "email2@email.com"), + listOf( + EmailRecipient("email1@email.com"), + EmailRecipient("email2@email.com") + ), listOf("sample_group_id_1", "sample_group_id_2") ) val jsonString = """ { "email_account_id":"${sampleEmail.emailAccountID}", "recipient_list":[ - "${sampleEmail.recipients[0]}", - "${sampleEmail.recipients[1]}" + {"recipient":"${sampleEmail.recipients[0].recipient}"}, + {"recipient":"${sampleEmail.recipients[1].recipient}"} ], "email_group_id_list":[ "${sampleEmail.emailGroupIds[0]}", @@ -136,15 +82,18 @@ internal class EmailTests { fun `Email should throw exception when emailAccountID is replaced with emailAccountID2 in json object`() { val sampleEmail = Email( "sampleAccountId", - listOf("email1@email.com", "email2@email.com"), + listOf( + EmailRecipient("email1@email.com"), + EmailRecipient("email2@email.com") + ), listOf("sample_group_id_1", "sample_group_id_2") ) val jsonString = """ { "email_account_id2":"${sampleEmail.emailAccountID}", "recipient_list":[ - "${sampleEmail.recipients[0]}", - "${sampleEmail.recipients[1]}" + {"recipient":"${sampleEmail.recipients[0]}"}, + {"recipient":"${sampleEmail.recipients[1]}"} ], "email_group_id_list":[ "${sampleEmail.emailGroupIds[0]}", diff --git a/src/test/kotlin/org/opensearch/commons/notifications/model/EventSourceTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/model/EventSourceTests.kt index 883c5047..b36a2fc0 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/model/EventSourceTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/model/EventSourceTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model @@ -40,7 +18,6 @@ internal class EventSourceTests { val sampleEventSource = EventSource( "title", "reference_id", - Feature.ALERTING, severity = SeverityType.INFO ) val recreatedObject = recreateObject(sampleEventSource) { EventSource(it) } @@ -52,7 +29,6 @@ internal class EventSourceTests { val sampleEventSource = EventSource( "title", "reference_id", - Feature.ALERTING, severity = SeverityType.INFO ) @@ -66,9 +42,8 @@ internal class EventSourceTests { val sampleEventSource = EventSource( "title", "reference_id", - Feature.ALERTING, - tags = listOf("tag1", "tag2"), - severity = SeverityType.INFO + severity = SeverityType.INFO, + tags = listOf("tag1", "tag2") ) val jsonString = """ { @@ -87,13 +62,12 @@ internal class EventSourceTests { } @Test - fun `Event source should safely ignore unknown feature type in json object`() { + fun `Event source should safely accepts unknown feature type in json object`() { val sampleEventSource = EventSource( "title", "reference_id", - Feature.NONE, - tags = listOf("tag1", "tag2"), - severity = SeverityType.INFO + severity = SeverityType.INFO, + tags = listOf("tag1", "tag2") ) val jsonString = """ { @@ -114,9 +88,8 @@ internal class EventSourceTests { EventSource( "", "reference_id", - Feature.ALERTING, - tags = listOf("tag1", "tag2"), - severity = SeverityType.INFO + severity = SeverityType.INFO, + tags = listOf("tag1", "tag2") ) } } diff --git a/src/test/kotlin/org/opensearch/commons/notifications/model/EventStatusTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/model/EventStatusTests.kt index e9a1634f..94b48e64 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/model/EventStatusTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/model/EventStatusTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model @@ -140,6 +118,17 @@ internal class EventStatusTests { } } + @Test + fun `Event throw exception if deliveryStatus is empty for config type MicrosoftTeams`() { + Assertions.assertThrows(IllegalArgumentException::class.java) { + EventStatus( + "config_id", + "name", + ConfigType.MICROSOFT_TEAMS + ) + } + } + @Test fun `Event throw exception if deliveryStatus is empty for config type Webhook`() { Assertions.assertThrows(IllegalArgumentException::class.java) { diff --git a/src/test/kotlin/org/opensearch/commons/notifications/model/FeatureTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/model/FeatureTests.kt deleted file mode 100644 index 215c2c2c..00000000 --- a/src/test/kotlin/org/opensearch/commons/notifications/model/FeatureTests.kt +++ /dev/null @@ -1,56 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * - */ -package org.opensearch.commons.notifications.model - -import org.junit.jupiter.api.Assertions.assertEquals -import org.junit.jupiter.api.Test -import org.opensearch.commons.notifications.model.Feature.Companion.enumParser -import org.opensearch.commons.notifications.model.Feature.Companion.fromTagOrDefault - -internal class FeatureTests { - - @Test - fun `toString should return tag`() { - Feature.values().forEach { - assertEquals(it.tag, it.toString()) - } - } - - @Test - fun `fromTagOrDefault should return corresponding enum`() { - Feature.values().forEach { - assertEquals(it, fromTagOrDefault(it.tag)) - } - } - - @Test - fun `EnumParser fromTagOrDefault should return corresponding enum`() { - Feature.values().forEach { - assertEquals(it, enumParser.fromTagOrDefault(it.tag)) - } - } -} diff --git a/src/test/kotlin/org/opensearch/commons/notifications/model/FilterConfigListTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/model/FilterConfigListTests.kt index f4ceee7f..11488e38 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/model/FilterConfigListTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/model/FilterConfigListTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model @@ -37,8 +15,8 @@ import org.opensearch.commons.utils.recreateObject internal class FilterConfigListTests { private fun assertSearchResultEquals( - expected: FeatureChannelList, - actual: FeatureChannelList + expected: ChannelList, + actual: ChannelList ) { assertEquals(expected.startIndex, actual.startIndex) assertEquals(expected.totalHits, actual.totalHits) @@ -49,108 +27,120 @@ internal class FilterConfigListTests { @Test fun `Search result serialize and deserialize with config object should be equal`() { - val sampleConfig = FeatureChannel( + val sampleConfig = Channel( "config_id", "name", "description", ConfigType.SLACK ) - val searchResult = FeatureChannelList(sampleConfig) - val recreatedObject = recreateObject(searchResult) { FeatureChannelList(it) } + val searchResult = ChannelList(sampleConfig) + val recreatedObject = recreateObject(searchResult) { ChannelList(it) } assertSearchResultEquals(searchResult, recreatedObject) } @Test fun `Search result serialize and deserialize with multiple config object should be equal`() { - val sampleConfig1 = FeatureChannel( + val sampleConfig1 = Channel( "config_id1", "name1", "description1", ConfigType.SLACK ) - val sampleConfig2 = FeatureChannel( + val sampleConfig2 = Channel( "config_id2", "name2", "description2", ConfigType.CHIME ) - val sampleConfig3 = FeatureChannel( + val sampleConfig3 = Channel( "config_id3", "name3", "description3", ConfigType.WEBHOOK ) - val searchResult = FeatureChannelList( + val sampleConfig4 = Channel( + "config_id4", + "name4", + "description4", + ConfigType.MICROSOFT_TEAMS + ) + val searchResult = ChannelList( 100, 1000, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO, - listOf(sampleConfig1, sampleConfig2, sampleConfig3) + listOf(sampleConfig1, sampleConfig2, sampleConfig3, sampleConfig4) ) - val recreatedObject = recreateObject(searchResult) { FeatureChannelList(it) } + val recreatedObject = recreateObject(searchResult) { ChannelList(it) } assertSearchResultEquals(searchResult, recreatedObject) } @Test fun `Search result serialize and deserialize using json config object should be equal`() { - val sampleConfig = FeatureChannel( + val sampleConfig = Channel( "config_id", "name", "description", ConfigType.EMAIL_GROUP ) - val searchResult = FeatureChannelList(sampleConfig) + val searchResult = ChannelList(sampleConfig) val jsonString = getJsonString(searchResult) - val recreatedObject = createObjectFromJsonString(jsonString) { FeatureChannelList(it) } + val recreatedObject = createObjectFromJsonString(jsonString) { ChannelList(it) } assertSearchResultEquals(searchResult, recreatedObject) } @Test fun `Search result serialize and deserialize using json with multiple config object should be equal`() { - val sampleConfig1 = FeatureChannel( + val sampleConfig1 = Channel( "config_id1", "name1", "description1", ConfigType.SLACK ) - val sampleConfig2 = FeatureChannel( + val sampleConfig2 = Channel( "config_id2", "name2", "description2", ConfigType.CHIME ) - val sampleConfig3 = FeatureChannel( + val sampleConfig3 = Channel( "config_id3", "name3", "description3", ConfigType.WEBHOOK ) - val searchResult = FeatureChannelList( + val sampleConfig4 = Channel( + "config_id4", + "name4", + "description4", + ConfigType.MICROSOFT_TEAMS + ) + val searchResult = ChannelList( 100, 1000, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO, - listOf(sampleConfig1, sampleConfig2, sampleConfig3) + listOf(sampleConfig1, sampleConfig2, sampleConfig3, sampleConfig4) ) val jsonString = getJsonString(searchResult) - val recreatedObject = createObjectFromJsonString(jsonString) { FeatureChannelList(it) } + val recreatedObject = createObjectFromJsonString(jsonString) { ChannelList(it) } assertSearchResultEquals(searchResult, recreatedObject) } @Test fun `Search result should use isEnabled=true if absent in json object`() { - val sampleConfig = FeatureChannel( + val sampleConfig = Channel( "config_id", "name", "description", ConfigType.EMAIL, true ) - val searchResult = FeatureChannelList(sampleConfig) + val searchResult = ChannelList(sampleConfig) val jsonString = """ { "start_index":"0", "total_hits":"1", "total_hit_relation":"eq", - "feature_channel_list":[ + "channel_list":[ { "config_id":"config_id", "name":"name", @@ -160,25 +150,25 @@ internal class FilterConfigListTests { ] } """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { FeatureChannelList(it) } + val recreatedObject = createObjectFromJsonString(jsonString) { ChannelList(it) } assertSearchResultEquals(searchResult, recreatedObject) } @Test fun `Search result should safely ignore extra field in json object`() { - val sampleConfig = FeatureChannel( + val sampleConfig = Channel( "config_id", "name", "description", ConfigType.EMAIL ) - val searchResult = FeatureChannelList(sampleConfig) + val searchResult = ChannelList(sampleConfig) val jsonString = """ { "start_index":"0", "total_hits":"1", "total_hit_relation":"eq", - "feature_channel_list":[ + "channel_list":[ { "config_id":"config_id", "name":"name", @@ -192,22 +182,22 @@ internal class FilterConfigListTests { "extra_field_3":"extra value 3" } """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { FeatureChannelList(it) } + val recreatedObject = createObjectFromJsonString(jsonString) { ChannelList(it) } assertSearchResultEquals(searchResult, recreatedObject) } @Test fun `Search result should safely fallback to default if startIndex, totalHits or totalHitRelation field absent in json object`() { - val sampleConfig = FeatureChannel( + val sampleConfig = Channel( "config_id", "name", "description", ConfigType.EMAIL ) - val searchResult = FeatureChannelList(sampleConfig) + val searchResult = ChannelList(sampleConfig) val jsonString = """ { - "feature_channel_list":[ + "channel_list":[ { "config_id":"config_id", "name":"name", @@ -218,12 +208,12 @@ internal class FilterConfigListTests { ] } """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { FeatureChannelList(it) } + val recreatedObject = createObjectFromJsonString(jsonString) { ChannelList(it) } assertSearchResultEquals(searchResult, recreatedObject) } @Test - fun `Search result should throw exception if featureChannelList is absent in json`() { + fun `Search result should throw exception if channelList is absent in json`() { val jsonString = """ { "start_index":"0", @@ -232,7 +222,7 @@ internal class FilterConfigListTests { } """.trimIndent() Assertions.assertThrows(IllegalArgumentException::class.java) { - createObjectFromJsonString(jsonString) { FeatureChannelList(it) } + createObjectFromJsonString(jsonString) { ChannelList(it) } } } } diff --git a/src/test/kotlin/org/opensearch/commons/notifications/model/FilterConfigTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/model/FilterConfigTests.kt index b5dd66b6..c3557e89 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/model/FilterConfigTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/model/FilterConfigTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model @@ -37,45 +15,45 @@ internal class FilterConfigTests { @Test fun `Config serialize and deserialize with default isEnabled flag should be equal`() { - val sampleConfig = FeatureChannel( + val sampleConfig = Channel( "config_id", "name", "description", ConfigType.SLACK ) - val recreatedObject = recreateObject(sampleConfig) { FeatureChannel(it) } + val recreatedObject = recreateObject(sampleConfig) { Channel(it) } assertEquals(sampleConfig, recreatedObject) } @Test fun `Config serialize and deserialize with isEnabled=false should be equal`() { - val sampleConfig = FeatureChannel( + val sampleConfig = Channel( "config_id", "name", "description", ConfigType.CHIME, false ) - val recreatedObject = recreateObject(sampleConfig) { FeatureChannel(it) } + val recreatedObject = recreateObject(sampleConfig) { Channel(it) } assertEquals(sampleConfig, recreatedObject) } @Test fun `Config serialize and deserialize using json object with default isEnabled flag should be equal`() { - val sampleConfig = FeatureChannel( + val sampleConfig = Channel( "config_id", "name", "description", ConfigType.WEBHOOK ) val jsonString = getJsonString(sampleConfig) - val recreatedObject = createObjectFromJsonString(jsonString) { FeatureChannel.parse(it) } + val recreatedObject = createObjectFromJsonString(jsonString) { Channel.parse(it) } assertEquals(sampleConfig, recreatedObject) } @Test fun `Config serialize and deserialize using json object with isEnabled=false should be equal`() { - val sampleConfig = FeatureChannel( + val sampleConfig = Channel( "config_id", "name", "description", @@ -83,13 +61,13 @@ internal class FilterConfigTests { false ) val jsonString = getJsonString(sampleConfig) - val recreatedObject = createObjectFromJsonString(jsonString) { FeatureChannel.parse(it) } + val recreatedObject = createObjectFromJsonString(jsonString) { Channel.parse(it) } assertEquals(sampleConfig, recreatedObject) } @Test fun `Config should safely ignore extra field in json object`() { - val sampleConfig = FeatureChannel( + val sampleConfig = Channel( "config_id", "name", "description", @@ -107,13 +85,13 @@ internal class FilterConfigTests { "extra_field_3":"extra value 3" } """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { FeatureChannel.parse(it) } + val recreatedObject = createObjectFromJsonString(jsonString) { Channel.parse(it) } assertEquals(sampleConfig, recreatedObject) } @Test fun `Config should safely ignore unknown config type in json object`() { - val sampleConfig = FeatureChannel( + val sampleConfig = Channel( "config_id", "name", "description", @@ -127,14 +105,14 @@ internal class FilterConfigTests { "config_type":"NewConfig" } """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { FeatureChannel.parse(it) } + val recreatedObject = createObjectFromJsonString(jsonString) { Channel.parse(it) } assertEquals(sampleConfig, recreatedObject) } @Test fun `Config throw exception if configId is empty`() { Assertions.assertThrows(IllegalArgumentException::class.java) { - FeatureChannel( + Channel( "", "name", "description", @@ -146,7 +124,7 @@ internal class FilterConfigTests { @Test fun `Config throw exception if name is empty`() { Assertions.assertThrows(IllegalArgumentException::class.java) { - FeatureChannel( + Channel( "config_id", "", "description", diff --git a/src/test/kotlin/org/opensearch/commons/notifications/model/MethodTypeTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/model/MethodTypeTests.kt index 75685d53..f9982d7c 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/model/MethodTypeTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/model/MethodTypeTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model diff --git a/src/test/kotlin/org/opensearch/commons/notifications/model/MicrosoftTeamsTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/model/MicrosoftTeamsTests.kt new file mode 100644 index 00000000..4543c06b --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/notifications/model/MicrosoftTeamsTests.kt @@ -0,0 +1,87 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.commons.notifications.model + +import com.fasterxml.jackson.core.JsonParseException +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.assertThrows +import org.opensearch.commons.utils.createObjectFromJsonString +import org.opensearch.commons.utils.getJsonString +import org.opensearch.commons.utils.recreateObject +import java.net.MalformedURLException + +internal class MicrosoftTeamsTests { + + @Test + fun `Microsoft Teams serialize and deserialize transport object should be equal`() { + val sampleMicrosoftTeams = MicrosoftTeams("https://domain.com/sample_url#1234567890") + val recreatedObject = recreateObject(sampleMicrosoftTeams) { MicrosoftTeams(it) } + assertEquals(sampleMicrosoftTeams, recreatedObject) + } + + @Test + fun `Microsoft Teams serialize and deserialize using json object should be equal`() { + val sampleMicrosoftTeams = MicrosoftTeams("https://domain.com/sample_url#1234567890") + val jsonString = getJsonString(sampleMicrosoftTeams) + val recreatedObject = createObjectFromJsonString(jsonString) { MicrosoftTeams.parse(it) } + assertEquals(sampleMicrosoftTeams, recreatedObject) + } + + @Test + fun `Microsoft Teams should deserialize json object using parser`() { + val sampleMicrosoftTeams = MicrosoftTeams("https://domain.com/sample_url#1234567890") + val jsonString = "{\"url\":\"${sampleMicrosoftTeams.url}\"}" + val recreatedObject = createObjectFromJsonString(jsonString) { MicrosoftTeams.parse(it) } + assertEquals(sampleMicrosoftTeams, recreatedObject) + } + + @Test + fun `Microsoft Teams should throw exception when invalid json object is passed`() { + val jsonString = "sample message" + assertThrows { + createObjectFromJsonString(jsonString) { MicrosoftTeams.parse(it) } + } + } + + @Test + fun `Microsoft Teams should throw exception when url is replace with url2 in json object`() { + val sampleMicrosoftTeams = MicrosoftTeams("https://domain.com/sample_url#1234567890") + val jsonString = "{\"url2\":\"${sampleMicrosoftTeams.url}\"}" + assertThrows { + createObjectFromJsonString(jsonString) { MicrosoftTeams.parse(it) } + } + } + + @Test + fun `Microsoft Teams should throw exception when url is not proper`() { + assertThrows { + MicrosoftTeams("domain.com/sample_url#1234567890") + } + val jsonString = "{\"url\":\"domain.com/sample_url\"}" + assertThrows { + createObjectFromJsonString(jsonString) { MicrosoftTeams.parse(it) } + } + } + + @Test + fun `Microsoft Teams should throw exception when url protocol is not https or http`() { + assertThrows { + MicrosoftTeams("ftp://domain.com/sample_url#1234567890") + } + val jsonString = "{\"url\":\"ftp://domain.com/sample_url\"}" + assertThrows { + createObjectFromJsonString(jsonString) { MicrosoftTeams.parse(it) } + } + } + + @Test + fun `Microsoft Teams should safely ignore extra field in json object`() { + val sampleMicrosoftTeams = MicrosoftTeams("https://domain.com/sample_url#1234567890") + val jsonString = "{\"url\":\"${sampleMicrosoftTeams.url}\", \"another\":\"field\"}" + val recreatedObject = createObjectFromJsonString(jsonString) { MicrosoftTeams.parse(it) } + assertEquals(sampleMicrosoftTeams, recreatedObject) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/notifications/model/NotificationConfigInfoTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/model/NotificationConfigInfoTests.kt index 79c1035d..c4e28f33 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/model/NotificationConfigInfoTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/model/NotificationConfigInfoTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model @@ -33,7 +11,6 @@ import org.opensearch.commons.utils.createObjectFromJsonString import org.opensearch.commons.utils.getJsonString import org.opensearch.commons.utils.recreateObject import java.time.Instant -import java.util.EnumSet internal class NotificationConfigInfoTests { @@ -44,14 +21,12 @@ internal class NotificationConfigInfoTests { "name", "description", ConfigType.SLACK, - EnumSet.of(Feature.REPORTS), configData = sampleSlack ) val configInfo = NotificationConfigInfo( "config_id", Instant.now(), Instant.now(), - "tenant", sampleConfig ) val recreatedObject = recreateObject(configInfo) { NotificationConfigInfo(it) } @@ -67,14 +42,12 @@ internal class NotificationConfigInfoTests { "name", "description", ConfigType.SLACK, - EnumSet.of(Feature.REPORTS), configData = sampleSlack ) val configInfo = NotificationConfigInfo( "config_id", lastUpdatedTimeMs, createdTimeMs, - "tenant", sampleConfig ) val jsonString = getJsonString(configInfo) @@ -82,45 +55,6 @@ internal class NotificationConfigInfoTests { assertEquals(configInfo, recreatedObject) } - @Test - fun `Config info should take default tenant when field is absent in json object`() { - val lastUpdatedTimeMs = Instant.ofEpochMilli(Instant.now().toEpochMilli()) - val createdTimeMs = lastUpdatedTimeMs.minusSeconds(1000) - val sampleSlack = Slack("https://domain.com/sample_slack_url#1234567890") - val sampleConfig = NotificationConfig( - "name", - "description", - ConfigType.SLACK, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleSlack - ) - val configInfo = NotificationConfigInfo( - "config-Id", - lastUpdatedTimeMs, - createdTimeMs, - "", // Default tenant - sampleConfig - ) - val jsonString = """ - { - "config_id":"config-Id", - "last_updated_time_ms":"${lastUpdatedTimeMs.toEpochMilli()}", - "created_time_ms":"${createdTimeMs.toEpochMilli()}", - "config":{ - "name":"name", - "description":"description", - "config_type":"slack", - "feature_list":["index_management"], - "is_enabled":true, - "slack":{"url":"https://domain.com/sample_slack_url#1234567890"} - } - } - """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { NotificationConfigInfo.parse(it) } - assertEquals(configInfo, recreatedObject) - } - @Test fun `Config info should safely ignore extra field in json object`() { val lastUpdatedTimeMs = Instant.ofEpochMilli(Instant.now().toEpochMilli()) @@ -130,15 +64,13 @@ internal class NotificationConfigInfoTests { "name", "description", ConfigType.SLACK, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleSlack + configData = sampleSlack, + isEnabled = true ) val configInfo = NotificationConfigInfo( "config-Id", lastUpdatedTimeMs, createdTimeMs, - "selectedTenant", sampleConfig ) val jsonString = """ @@ -146,7 +78,6 @@ internal class NotificationConfigInfoTests { "config_id":"config-Id", "last_updated_time_ms":"${lastUpdatedTimeMs.toEpochMilli()}", "created_time_ms":"${createdTimeMs.toEpochMilli()}", - "tenant":"selectedTenant", "config":{ "name":"name", "description":"description", @@ -171,7 +102,6 @@ internal class NotificationConfigInfoTests { "name", "description", ConfigType.SLACK, - EnumSet.of(Feature.REPORTS), configData = sampleSlack ) Assertions.assertThrows(IllegalArgumentException::class.java) { @@ -179,7 +109,6 @@ internal class NotificationConfigInfoTests { "", Instant.now(), Instant.now(), - "tenant", sampleConfig ) } @@ -193,7 +122,6 @@ internal class NotificationConfigInfoTests { { "last_updated_time_ms":"${lastUpdatedTimeMs.toEpochMilli()}", "created_time_ms":"${createdTimeMs.toEpochMilli()}", - "tenant":"selectedTenant", "config":{ "name":"name", "description":"description", @@ -217,7 +145,6 @@ internal class NotificationConfigInfoTests { { "config_id":"config-Id", "created_time_ms":"${createdTimeMs.toEpochMilli()}", - "tenant":"selectedTenant", "config":{ "name":"name", "description":"description", @@ -240,7 +167,6 @@ internal class NotificationConfigInfoTests { { "config_id":"config-Id", "last_updated_time_ms":"${lastUpdatedTimeMs.toEpochMilli()}", - "tenant":"selectedTenant", "config":{ "name":"name", "description":"description", @@ -264,8 +190,7 @@ internal class NotificationConfigInfoTests { { "config_id":"config-Id", "last_updated_time_ms":"${lastUpdatedTimeMs.toEpochMilli()}", - "created_time_ms":"${createdTimeMs.toEpochMilli()}", - "tenant":"selectedTenant" + "created_time_ms":"${createdTimeMs.toEpochMilli()}" } """.trimIndent() Assertions.assertThrows(IllegalArgumentException::class.java) { diff --git a/src/test/kotlin/org/opensearch/commons/notifications/model/NotificationConfigSearchResultsTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/model/NotificationConfigSearchResultsTests.kt index a9543533..06bb3557 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/model/NotificationConfigSearchResultsTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/model/NotificationConfigSearchResultsTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model @@ -34,7 +12,6 @@ import org.opensearch.commons.utils.createObjectFromJsonString import org.opensearch.commons.utils.getJsonString import org.opensearch.commons.utils.recreateObject import java.time.Instant -import java.util.EnumSet internal class NotificationConfigSearchResultsTests { @@ -56,14 +33,12 @@ internal class NotificationConfigSearchResultsTests { "name", "description", ConfigType.SLACK, - EnumSet.of(Feature.REPORTS), configData = sampleSlack ) val configInfo = NotificationConfigInfo( "config_id", Instant.now(), Instant.now(), - "tenant", sampleConfig ) val searchResult = NotificationConfigSearchResult(configInfo) @@ -77,28 +52,24 @@ internal class NotificationConfigSearchResultsTests { "name", "description", ConfigType.SLACK, - EnumSet.of(Feature.REPORTS), configData = Slack("https://domain.com/sample_url#1234567890") ) val configInfo1 = NotificationConfigInfo( "config_id1", Instant.now(), Instant.now(), - "tenant", sampleConfig1 ) val sampleConfig2 = NotificationConfig( "name", "description", ConfigType.CHIME, - EnumSet.of(Feature.INDEX_MANAGEMENT), configData = Chime("https://domain.com/sample_url#1234567890") ) val configInfo2 = NotificationConfigInfo( "config_id2", Instant.now(), Instant.now(), - "tenant", sampleConfig2 ) val searchResult = NotificationConfigSearchResult(listOf(configInfo1, configInfo2)) @@ -118,28 +89,24 @@ internal class NotificationConfigSearchResultsTests { "name", "description", ConfigType.SLACK, - EnumSet.of(Feature.REPORTS), configData = Slack("https://domain.com/sample_url#1234567890") ) val configInfo1 = NotificationConfigInfo( "config_id1", Instant.now(), Instant.now(), - "tenant", sampleConfig1 ) val sampleConfig2 = NotificationConfig( "name", "description", ConfigType.CHIME, - EnumSet.of(Feature.INDEX_MANAGEMENT), configData = Chime("https://domain.com/sample_url#1234567890") ) val configInfo2 = NotificationConfigInfo( "config_id2", Instant.now(), Instant.now(), - "tenant", sampleConfig2 ) val searchResult = NotificationConfigSearchResult( @@ -161,14 +128,12 @@ internal class NotificationConfigSearchResultsTests { "name", "description", ConfigType.SLACK, - EnumSet.of(Feature.REPORTS), configData = sampleSlack ) val configInfo = NotificationConfigInfo( "config_id", lastUpdatedTimeMs, createdTimeMs, - "tenant", sampleConfig ) val searchResult = NotificationConfigSearchResult(configInfo) @@ -185,28 +150,24 @@ internal class NotificationConfigSearchResultsTests { "name", "description", ConfigType.SLACK, - EnumSet.of(Feature.REPORTS), configData = Slack("https://domain.com/sample_url#1234567890") ) val configInfo1 = NotificationConfigInfo( "config_id1", lastUpdatedTimeMs, createdTimeMs, - "tenant", sampleConfig1 ) val sampleConfig2 = NotificationConfig( "name", "description", ConfigType.CHIME, - EnumSet.of(Feature.INDEX_MANAGEMENT), configData = Chime("https://domain.com/sample_url#1234567890") ) val configInfo2 = NotificationConfigInfo( "config_id2", lastUpdatedTimeMs, createdTimeMs, - "tenant", sampleConfig2 ) val searchResult = NotificationConfigSearchResult( @@ -229,15 +190,13 @@ internal class NotificationConfigSearchResultsTests { "name", "description", ConfigType.SLACK, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleSlack + configData = sampleSlack, + isEnabled = true ) val configInfo = NotificationConfigInfo( "config-Id", lastUpdatedTimeMs, createdTimeMs, - "selectedTenant", sampleConfig ) val searchResult = NotificationConfigSearchResult(configInfo) @@ -251,7 +210,6 @@ internal class NotificationConfigSearchResultsTests { "config_id":"config-Id", "last_updated_time_ms":"${lastUpdatedTimeMs.toEpochMilli()}", "created_time_ms":"${createdTimeMs.toEpochMilli()}", - "tenant":"selectedTenant", "config":{ "name":"name", "description":"description", @@ -280,15 +238,13 @@ internal class NotificationConfigSearchResultsTests { "name", "description", ConfigType.SLACK, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleSlack + configData = sampleSlack, + isEnabled = true ) val configInfo = NotificationConfigInfo( "config-Id", lastUpdatedTimeMs, createdTimeMs, - "selectedTenant", sampleConfig ) val searchResult = NotificationConfigSearchResult(configInfo) @@ -299,7 +255,6 @@ internal class NotificationConfigSearchResultsTests { "config_id":"config-Id", "last_updated_time_ms":"${lastUpdatedTimeMs.toEpochMilli()}", "created_time_ms":"${createdTimeMs.toEpochMilli()}", - "tenant":"selectedTenant", "config":{ "name":"name", "description":"description", @@ -329,8 +284,7 @@ internal class NotificationConfigSearchResultsTests { { "config_id":"config-Id", "last_updated_time_ms":"${lastUpdatedTimeMs.toEpochMilli()}", - "created_time_ms":"${createdTimeMs.toEpochMilli()}", - "tenant":"selectedTenant" + "created_time_ms":"${createdTimeMs.toEpochMilli()}" } ] } diff --git a/src/test/kotlin/org/opensearch/commons/notifications/model/NotificationConfigTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/model/NotificationConfigTests.kt index ab45e2fc..ee17777a 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/model/NotificationConfigTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/model/NotificationConfigTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model @@ -31,7 +9,6 @@ import org.junit.jupiter.api.Test import org.opensearch.commons.utils.createObjectFromJsonString import org.opensearch.commons.utils.getJsonString import org.opensearch.commons.utils.recreateObject -import java.util.EnumSet internal class NotificationConfigTests { @@ -42,7 +19,6 @@ internal class NotificationConfigTests { "name", "description", ConfigType.SLACK, - EnumSet.of(Feature.REPORTS), configData = sampleSlack ) val recreatedObject = recreateObject(sampleConfig) { NotificationConfig(it) } @@ -56,7 +32,6 @@ internal class NotificationConfigTests { "name", "description", ConfigType.SLACK, - EnumSet.of(Feature.REPORTS), configData = sampleSlack ) val jsonString = getJsonString(sampleConfig) @@ -71,7 +46,6 @@ internal class NotificationConfigTests { "name", "description", ConfigType.CHIME, - EnumSet.of(Feature.ALERTING), configData = sampleChime ) val recreatedObject = recreateObject(sampleConfig) { NotificationConfig(it) } @@ -85,7 +59,6 @@ internal class NotificationConfigTests { "name", "description", ConfigType.CHIME, - EnumSet.of(Feature.ALERTING), configData = sampleChime ) val jsonString = getJsonString(sampleConfig) @@ -93,6 +66,33 @@ internal class NotificationConfigTests { assertEquals(sampleConfig, recreatedObject) } + @Test + fun `Config serialize and deserialize with microsoft teams object should be equal`() { + val sampleMicrosoftTeams = MicrosoftTeams("https://domain.com/sample_url#1234567890") + val sampleConfig = NotificationConfig( + "name", + "description", + ConfigType.MICROSOFT_TEAMS, + configData = sampleMicrosoftTeams + ) + val recreatedObject = recreateObject(sampleConfig) { NotificationConfig(it) } + assertEquals(sampleConfig, recreatedObject) + } + + @Test + fun `Config serialize and deserialize with json microsoft teams object should be equal`() { + val sampleMicrosoftTeams = MicrosoftTeams("https://domain.com/sample_url#1234567890") + val sampleConfig = NotificationConfig( + "name", + "description", + ConfigType.MICROSOFT_TEAMS, + configData = sampleMicrosoftTeams + ) + val jsonString = getJsonString(sampleConfig) + val recreatedObject = createObjectFromJsonString(jsonString) { NotificationConfig.parse(it) } + assertEquals(sampleConfig, recreatedObject) + } + @Test fun `Config serialize and deserialize with webhook object should be equal`() { val sampleWebhook = Webhook("https://domain.com/sample_url#1234567890") @@ -100,7 +100,6 @@ internal class NotificationConfigTests { "name", "description", ConfigType.WEBHOOK, - EnumSet.of(Feature.INDEX_MANAGEMENT), configData = sampleWebhook ) val recreatedObject = recreateObject(sampleConfig) { NotificationConfig(it) } @@ -114,7 +113,6 @@ internal class NotificationConfigTests { "name", "description", ConfigType.WEBHOOK, - EnumSet.of(Feature.INDEX_MANAGEMENT), configData = sampleWebhook ) val jsonString = getJsonString(sampleConfig) @@ -124,12 +122,11 @@ internal class NotificationConfigTests { @Test fun `Config serialize and deserialize with email object should be equal`() { - val sampleEmail = Email("id_1234567890", listOf("email@domain.com"), listOf("groupId")) + val sampleEmail = Email("id_1234567890", listOf(EmailRecipient("email@domain.com")), listOf("groupId")) val sampleConfig = NotificationConfig( "name", "description", ConfigType.EMAIL, - EnumSet.of(Feature.INDEX_MANAGEMENT), configData = sampleEmail ) val recreatedObject = recreateObject(sampleConfig) { NotificationConfig(it) } @@ -138,12 +135,11 @@ internal class NotificationConfigTests { @Test fun `Config serialize and deserialize with json email object should be equal`() { - val sampleEmail = Email("id_1234567890", listOf("email@domain.com"), listOf("groupId")) + val sampleEmail = Email("id_1234567890", listOf(EmailRecipient("email@domain.com")), listOf("groupId")) val sampleConfig = NotificationConfig( "name", "description", ConfigType.EMAIL, - EnumSet.of(Feature.INDEX_MANAGEMENT), configData = sampleEmail ) val jsonString = getJsonString(sampleConfig) @@ -158,7 +154,6 @@ internal class NotificationConfigTests { "name", "description", ConfigType.SMTP_ACCOUNT, - EnumSet.of(Feature.INDEX_MANAGEMENT), configData = smtpAccount ) val jsonString = getJsonString(sampleConfig) @@ -173,7 +168,6 @@ internal class NotificationConfigTests { "name", "description", ConfigType.SMTP_ACCOUNT, - EnumSet.of(Feature.INDEX_MANAGEMENT), configData = sampleSmtpAccount ) val recreatedObject = recreateObject(sampleConfig) { NotificationConfig(it) } @@ -182,12 +176,11 @@ internal class NotificationConfigTests { @Test fun `Config serialize and deserialize with json emailGroup object should be equal`() { - val sampleEmailGroup = EmailGroup(listOf("email@domain.com")) + val sampleEmailGroup = EmailGroup(listOf(EmailRecipient("email@domain.com"))) val sampleConfig = NotificationConfig( "name", "description", ConfigType.EMAIL_GROUP, - EnumSet.of(Feature.INDEX_MANAGEMENT), configData = sampleEmailGroup ) val jsonString = getJsonString(sampleConfig) @@ -197,12 +190,11 @@ internal class NotificationConfigTests { @Test fun `Config serialize and deserialize with emailGroup object should be equal`() { - val sampleEmailGroup = EmailGroup(listOf("email@domain.com")) + val sampleEmailGroup = EmailGroup(listOf(EmailRecipient("email@domain.com"))) val sampleConfig = NotificationConfig( "name", "description", ConfigType.EMAIL_GROUP, - EnumSet.of(Feature.INDEX_MANAGEMENT), configData = sampleEmailGroup ) val recreatedObject = recreateObject(sampleConfig) { NotificationConfig(it) } @@ -210,16 +202,14 @@ internal class NotificationConfigTests { } @Test - fun `Config should safely ignore unknown config type in json object`() { val sampleSlack = Slack("https://domain.com/sample_slack_url#1234567890") val sampleConfig = NotificationConfig( "name", "description", ConfigType.NONE, - EnumSet.of(Feature.INDEX_MANAGEMENT), - isEnabled = true, - configData = sampleSlack + configData = sampleSlack, + isEnabled = true ) val jsonString = """ { @@ -240,15 +230,14 @@ internal class NotificationConfigTests { } @Test - fun `Config should safely ignore unknown feature type in json object`() { + fun `Config should safely accepts unknown feature type in json object`() { val sampleWebhook = Webhook("https://domain.com/sample_webhook_url#1234567890") val sampleConfig = NotificationConfig( "name", "description", ConfigType.WEBHOOK, - EnumSet.of(Feature.INDEX_MANAGEMENT, Feature.NONE), - isEnabled = true, - configData = sampleWebhook + configData = sampleWebhook, + isEnabled = true ) val jsonString = """ { diff --git a/src/test/kotlin/org/opensearch/commons/notifications/model/NotificationEventInfoTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/model/NotificationEventInfoTests.kt deleted file mode 100644 index e23ca1e0..00000000 --- a/src/test/kotlin/org/opensearch/commons/notifications/model/NotificationEventInfoTests.kt +++ /dev/null @@ -1,364 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * - */ -package org.opensearch.commons.notifications.model - -import org.junit.jupiter.api.Assertions -import org.junit.jupiter.api.Assertions.assertEquals -import org.junit.jupiter.api.Test -import org.opensearch.commons.utils.createObjectFromJsonString -import org.opensearch.commons.utils.getJsonString -import org.opensearch.commons.utils.recreateObject -import java.time.Instant - -internal class NotificationEventInfoTests { - - @Test - fun `Event info serialize and deserialize with event object should be equal`() { - val sampleEventSource = EventSource( - "title", - "reference_id", - Feature.ALERTING, - severity = SeverityType.INFO - ) - val sampleStatus = EventStatus( - "config_id", - "name", - ConfigType.SLACK, - deliveryStatus = DeliveryStatus("404", "invalid recipient") - ) - val sampleEvent = NotificationEvent(sampleEventSource, listOf(sampleStatus)) - val eventInfo = NotificationEventInfo( - "event_id", - Instant.now(), - Instant.now(), - "tenant", - sampleEvent - ) - val recreatedObject = recreateObject(eventInfo) { NotificationEventInfo(it) } - assertEquals(eventInfo, recreatedObject) - } - - @Test - fun `Event info serialize and deserialize using json event object should be equal`() { - val lastUpdatedTimeMs = Instant.ofEpochMilli(Instant.now().toEpochMilli()) - val createdTimeMs = lastUpdatedTimeMs.minusSeconds(1000) - val sampleEventSource = EventSource( - "title", - "reference_id", - Feature.ALERTING, - severity = SeverityType.INFO - ) - val sampleStatus = EventStatus( - "config_id", - "name", - ConfigType.SLACK, - deliveryStatus = DeliveryStatus("200", "success") - ) - val sampleEvent = NotificationEvent(sampleEventSource, listOf(sampleStatus)) - val eventInfo = NotificationEventInfo( - "event_id", - lastUpdatedTimeMs, - createdTimeMs, - "tenant", - sampleEvent - ) - val jsonString = getJsonString(eventInfo) - val recreatedObject = createObjectFromJsonString(jsonString) { NotificationEventInfo.parse(it) } - assertEquals(eventInfo, recreatedObject) - } - - @Test - fun `Event info should take default tenant when field is absent in json object`() { - val lastUpdatedTimeMs = Instant.ofEpochMilli(Instant.now().toEpochMilli()) - val createdTimeMs = lastUpdatedTimeMs.minusSeconds(1000) - val sampleEventSource = EventSource( - "title", - "reference_id", - Feature.ALERTING, - severity = SeverityType.INFO - ) - val sampleStatus = EventStatus( - "config_id", - "name", - ConfigType.SLACK, - deliveryStatus = DeliveryStatus("200", "success") - ) - val sampleEvent = NotificationEvent(sampleEventSource, listOf(sampleStatus)) - val eventInfo = NotificationEventInfo( - "event_id", - lastUpdatedTimeMs, - createdTimeMs, - "tenant", - sampleEvent - ) - val jsonString = """ - { - "event_id":"event_id", - "last_updated_time_ms":"${lastUpdatedTimeMs.toEpochMilli()}", - "created_time_ms":"${createdTimeMs.toEpochMilli()}", - "tenant":"tenant", - "event":{ - "event_source":{ - "title":"title", - "reference_id":"reference_id", - "feature":"alerting", - "severity":"info", - "tags":[] - }, - "status_list":[ - { - "config_id":"config_id", - "config_type":"slack", - "config_name":"name", - "delivery_status": - { - "status_code":"200", - "status_text":"success" - } - } - ] - } - } - """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { NotificationEventInfo.parse(it) } - assertEquals(eventInfo, recreatedObject) - } - - @Test - fun `Event info should safely ignore extra field in json object`() { - val lastUpdatedTimeMs = Instant.ofEpochMilli(Instant.now().toEpochMilli()) - val createdTimeMs = lastUpdatedTimeMs.minusSeconds(1000) - val sampleEventSource = EventSource( - "title", - "reference_id", - Feature.ALERTING, - severity = SeverityType.INFO - ) - val sampleStatus = EventStatus( - "config_id", - "name", - ConfigType.SLACK, - deliveryStatus = DeliveryStatus("200", "success") - ) - val sampleEvent = NotificationEvent(sampleEventSource, listOf(sampleStatus)) - val eventInfo = NotificationEventInfo( - "event_id", - lastUpdatedTimeMs, - createdTimeMs, - "tenant", - sampleEvent - ) - val jsonString = """ - { - "event_id":"event_id", - "last_updated_time_ms":"${lastUpdatedTimeMs.toEpochMilli()}", - "created_time_ms":"${createdTimeMs.toEpochMilli()}", - "tenant":"tenant", - "event":{ - "event_source":{ - "title":"title", - "reference_id":"reference_id", - "feature":"alerting", - "severity":"info", - "tags":[] - }, - "status_list":[ - { - "config_id":"config_id", - "config_type":"slack", - "config_name":"name", - "delivery_status": - { - "status_code":"200", - "status_text":"success" - } - } - ] - }, - "extra_field_1":["extra", "value"], - "extra_field_2":{"extra":"value"}, - "extra_field_3":"extra value 3" - } - """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { NotificationEventInfo.parse(it) } - assertEquals(eventInfo, recreatedObject) - } - - @Test - fun `Event info should throw exception if event_id is empty`() { - val lastUpdatedTimeMs = Instant.ofEpochMilli(Instant.now().toEpochMilli()) - val createdTimeMs = lastUpdatedTimeMs.minusSeconds(1000) - val sampleEventSource = EventSource( - "title", - "reference_id", - Feature.ALERTING, - severity = SeverityType.INFO - ) - val sampleStatus = EventStatus( - "event_id", - "name", - ConfigType.SLACK, - deliveryStatus = DeliveryStatus("200", "success") - ) - val sampleEvent = NotificationEvent(sampleEventSource, listOf(sampleStatus)) - Assertions.assertThrows(IllegalArgumentException::class.java) { - NotificationEventInfo( - "", - lastUpdatedTimeMs, - createdTimeMs, - "tenant", - sampleEvent - ) - } - } - - @Test - fun `Event info should throw exception if event_id is absent in json`() { - val lastUpdatedTimeMs = Instant.ofEpochMilli(Instant.now().toEpochMilli()) - val createdTimeMs = lastUpdatedTimeMs.minusSeconds(1000) - val jsonString = """ - { - "last_updated_time_ms":"${lastUpdatedTimeMs.toEpochMilli()}", - "created_time_ms":"${createdTimeMs.toEpochMilli()}", - "event":{ - "event_source":{ - "title":"title", - "reference_id":"reference_id", - "feature":"alerting", - "severity":"info", - "tags":["tag1", "tag2"] - }, - "status_list":[ - { - "event_id":"event_id", - "config_type":"slack", - "config_name":"name", - "delivery_status": - { - "status_code":"200", - "status_text":"success" - } - } - ] - } - } - """.trimIndent() - Assertions.assertThrows(IllegalArgumentException::class.java) { - createObjectFromJsonString(jsonString) { NotificationEventInfo.parse(it) } - } - } - - @Test - fun `Event info should throw exception if lastUpdatedTimeMs is absent in json`() { - val lastUpdatedTimeMs = Instant.ofEpochMilli(Instant.now().toEpochMilli()) - val createdTimeMs = lastUpdatedTimeMs.minusSeconds(1000) - val jsonString = """ - { - "event_id":"event_id", - "created_time_ms":"${createdTimeMs.toEpochMilli()}", - "tenant":"selectedTenant", - "event":{ - "event_source":{ - "title":"title", - "reference_id":"reference_id", - "feature":"alerting", - "severity":"info", - "tags":["tag1", "tag2"] - }, - "status_list":[ - { - "event_id":"event_id", - "config_type":"slack", - "config_name":"name", - "delivery_status": - { - "status_code":"200", - "status_text":"success" - } - } - ] - } - } - """.trimIndent() - Assertions.assertThrows(IllegalArgumentException::class.java) { - createObjectFromJsonString(jsonString) { NotificationEventInfo.parse(it) } - } - } - - @Test - fun `Event info should throw exception if createdTimeMs is absent in json`() { - val lastUpdatedTimeMs = Instant.ofEpochMilli(Instant.now().toEpochMilli()) - val jsonString = """ - { - "event_id":"event_id", - "last_updated_time_ms":"${lastUpdatedTimeMs.toEpochMilli()}", - "tenant":"selectedTenant", - "event":{ - "event_source":{ - "title":"title", - "reference_id":"reference_id", - "feature":"alerting", - "severity":"info", - "tags":["tag1", "tag2"] - }, - "status_list":[ - { - "event_id":"event_id", - "config_type":"slack", - "config_name":"name", - "delivery_status": - { - "status_code":"200", - "status_text":"success" - } - } - ] - } - } - """.trimIndent() - Assertions.assertThrows(IllegalArgumentException::class.java) { - createObjectFromJsonString(jsonString) { NotificationEventInfo.parse(it) } - } - } - - @Test - fun `Event info should throw exception if event is absent in json`() { - val lastUpdatedTimeMs = Instant.ofEpochMilli(Instant.now().toEpochMilli()) - val createdTimeMs = lastUpdatedTimeMs.minusSeconds(1000) - val jsonString = """ - { - "event_id":"event_id", - "last_updated_time_ms":"${lastUpdatedTimeMs.toEpochMilli()}", - "created_time_ms":"${createdTimeMs.toEpochMilli()}", - "tenant":"selectedTenant" - } - """.trimIndent() - Assertions.assertThrows(IllegalArgumentException::class.java) { - createObjectFromJsonString(jsonString) { NotificationEventInfo.parse(it) } - } - } -} diff --git a/src/test/kotlin/org/opensearch/commons/notifications/model/NotificationEventSearchResultTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/model/NotificationEventSearchResultTests.kt deleted file mode 100644 index d09fe9d2..00000000 --- a/src/test/kotlin/org/opensearch/commons/notifications/model/NotificationEventSearchResultTests.kt +++ /dev/null @@ -1,445 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * - */ -package org.opensearch.commons.notifications.model - -import org.apache.lucene.search.TotalHits -import org.junit.jupiter.api.Assertions -import org.junit.jupiter.api.Assertions.assertEquals -import org.junit.jupiter.api.Test -import org.opensearch.commons.utils.createObjectFromJsonString -import org.opensearch.commons.utils.getJsonString -import org.opensearch.commons.utils.recreateObject -import java.time.Instant - -internal class NotificationEventSearchResultTests { - - private fun assertSearchResultEquals( - expected: NotificationEventSearchResult, - actual: NotificationEventSearchResult - ) { - assertEquals(expected.startIndex, actual.startIndex) - assertEquals(expected.totalHits, actual.totalHits) - assertEquals(expected.totalHitRelation, actual.totalHitRelation) - assertEquals(expected.objectListFieldName, actual.objectListFieldName) - assertEquals(expected.objectList, actual.objectList) - } - - @Test - fun `Search result serialize and deserialize with event object should be equal`() { - val sampleEventSource = EventSource( - "title", - "reference_id", - Feature.ALERTING, - severity = SeverityType.INFO - ) - val sampleStatus = EventStatus( - "config_id", - "name", - ConfigType.SLACK, - deliveryStatus = DeliveryStatus("404", "invalid recipient") - ) - val sampleEvent = NotificationEvent(sampleEventSource, listOf(sampleStatus)) - val eventInfo = NotificationEventInfo( - "event_id", - Instant.now(), - Instant.now(), - "tenant", - sampleEvent - ) - val searchResult = NotificationEventSearchResult(eventInfo) - val recreatedObject = recreateObject(searchResult) { NotificationEventSearchResult(it) } - assertSearchResultEquals(searchResult, recreatedObject) - } - - @Test - fun `Search result serialize and deserialize with multiple event default values should be equal`() { - val eventSource1 = EventSource( - "title 1", - "reference_id_1", - Feature.ALERTING, - severity = SeverityType.INFO - ) - val eventSource2 = EventSource( - "title 2", - "reference_id_2", - Feature.REPORTS, - severity = SeverityType.HIGH - ) - val status1 = EventStatus( - "config_id1", - "name", - ConfigType.SLACK, - deliveryStatus = DeliveryStatus("200", "success") - ) - val status2 = EventStatus( - "config_id2", - "name", - ConfigType.CHIME, - deliveryStatus = DeliveryStatus("404", "invalid recipient") - ) - val eventInfo1 = NotificationEventInfo( - "event_id1", - Instant.now(), - Instant.now(), - "tenant", - NotificationEvent(eventSource1, listOf(status1)) - ) - val eventInfo2 = NotificationEventInfo( - "event_id2", - Instant.now(), - Instant.now(), - "tenant", - NotificationEvent(eventSource2, listOf(status2)) - ) - val eventInfo3 = NotificationEventInfo( - "event_id3", - Instant.now(), - Instant.now(), - "tenant", - NotificationEvent(eventSource1, listOf(status1, status2)) - ) - val eventInfo4 = NotificationEventInfo( - "event_id4", - Instant.now(), - Instant.now(), - "tenant", - NotificationEvent(eventSource2, listOf(status1, status2)) - ) - val searchResult = NotificationEventSearchResult( - listOf(eventInfo1, eventInfo2, eventInfo3, eventInfo4) - ) - val expectedResult = NotificationEventSearchResult( - 0, - 4, - TotalHits.Relation.EQUAL_TO, - listOf(eventInfo1, eventInfo2, eventInfo3, eventInfo4) - ) - val recreatedObject = recreateObject(searchResult) { NotificationEventSearchResult(it) } - assertSearchResultEquals(expectedResult, recreatedObject) - } - - @Test - fun `Search result serialize and deserialize with multiple event status object should be equal`() { - val eventSource1 = EventSource( - "title 1", - "reference_id_1", - Feature.ALERTING, - severity = SeverityType.INFO - ) - val eventSource2 = EventSource( - "title 2", - "reference_id_2", - Feature.REPORTS, - severity = SeverityType.HIGH - ) - val status1 = EventStatus( - "config_id1", - "name", - ConfigType.SLACK, - deliveryStatus = DeliveryStatus("200", "success") - ) - val status2 = EventStatus( - "config_id2", - "name", - ConfigType.CHIME, - deliveryStatus = DeliveryStatus("404", "invalid recipient") - ) - val eventInfo1 = NotificationEventInfo( - "event_id1", - Instant.now(), - Instant.now(), - "tenant", - NotificationEvent(eventSource1, listOf(status1)) - ) - val eventInfo2 = NotificationEventInfo( - "event_id2", - Instant.now(), - Instant.now(), - "tenant", - NotificationEvent(eventSource2, listOf(status2)) - ) - val eventInfo3 = NotificationEventInfo( - "event_id3", - Instant.now(), - Instant.now(), - "tenant", - NotificationEvent(eventSource1, listOf(status1, status2)) - ) - val eventInfo4 = NotificationEventInfo( - "event_id4", - Instant.now(), - Instant.now(), - "tenant", - NotificationEvent(eventSource2, listOf(status1, status2)) - ) - val searchResult = NotificationEventSearchResult( - 100, - 1000, - TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO, - listOf(eventInfo1, eventInfo2, eventInfo3, eventInfo4) - ) - val recreatedObject = recreateObject(searchResult) { NotificationEventSearchResult(it) } - assertSearchResultEquals(searchResult, recreatedObject) - } - - @Test - fun `Search result serialize and deserialize using json event object should be equal`() { - val lastUpdatedTimeMs = Instant.ofEpochMilli(Instant.now().toEpochMilli()) - val createdTimeMs = lastUpdatedTimeMs.minusSeconds(1000) - val sampleEventSource = EventSource( - "title", - "reference_id", - Feature.ALERTING, - severity = SeverityType.INFO - ) - val sampleStatus = EventStatus( - "config_id", - "name", - ConfigType.SLACK, - deliveryStatus = DeliveryStatus("404", "invalid recipient") - ) - val sampleEvent = NotificationEvent(sampleEventSource, listOf(sampleStatus)) - val eventInfo = NotificationEventInfo( - "event_id", - lastUpdatedTimeMs, - createdTimeMs, - "tenant", - sampleEvent - ) - val searchResult = NotificationEventSearchResult(eventInfo) - val jsonString = getJsonString(searchResult) - val recreatedObject = createObjectFromJsonString(jsonString) { NotificationEventSearchResult(it) } - assertSearchResultEquals(searchResult, recreatedObject) - } - - @Test - fun `Search result serialize and deserialize using json with multiple event object should be equal`() { - val lastUpdatedTimeMs = Instant.ofEpochMilli(Instant.now().toEpochMilli()) - val createdTimeMs = lastUpdatedTimeMs.minusSeconds(1000) - val eventSource1 = EventSource( - "title 1", - "reference_id_1", - Feature.ALERTING, - severity = SeverityType.INFO - ) - val eventSource2 = EventSource( - "title 2", - "reference_id_2", - Feature.REPORTS, - severity = SeverityType.HIGH - ) - val status1 = EventStatus( - "config_id1", - "name", - ConfigType.SLACK, - deliveryStatus = DeliveryStatus("200", "success") - ) - val status2 = EventStatus( - "config_id2", - "name", - ConfigType.CHIME, - deliveryStatus = DeliveryStatus("404", "invalid recipient") - ) - val eventInfo1 = NotificationEventInfo( - "event_id1", - lastUpdatedTimeMs, - createdTimeMs, - "tenant", - NotificationEvent(eventSource1, listOf(status1)) - ) - val eventInfo2 = NotificationEventInfo( - "event_id2", - lastUpdatedTimeMs, - createdTimeMs, - "tenant", - NotificationEvent(eventSource2, listOf(status2)) - ) - val searchResult = NotificationEventSearchResult( - 100, - 1000, - TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO, - listOf(eventInfo1, eventInfo2) - ) - val jsonString = getJsonString(searchResult) - val recreatedObject = createObjectFromJsonString(jsonString) { NotificationEventSearchResult(it) } - assertSearchResultEquals(searchResult, recreatedObject) - } - - @Test - fun `Search result should safely ignore extra field in json object`() { - val lastUpdatedTimeMs = Instant.ofEpochMilli(Instant.now().toEpochMilli()) - val createdTimeMs = lastUpdatedTimeMs.minusSeconds(1000) - val sampleEventSource = EventSource( - "title", - "reference_id", - Feature.ALERTING, - severity = SeverityType.INFO - ) - val sampleStatus = EventStatus( - "config_id", - "name", - ConfigType.SLACK, - deliveryStatus = DeliveryStatus("200", "success") - ) - val sampleEvent = NotificationEvent(sampleEventSource, listOf(sampleStatus)) - val eventInfo = NotificationEventInfo( - "event_id", - lastUpdatedTimeMs, - createdTimeMs, - "selectedTenant", - sampleEvent - ) - val searchResult = NotificationEventSearchResult(eventInfo) - val jsonString = """ - { - "start_index":"0", - "total_hits":"1", - "total_hit_relation":"eq", - "event_list":[ - { - "event_id":"event_id", - "last_updated_time_ms":"${lastUpdatedTimeMs.toEpochMilli()}", - "created_time_ms":"${createdTimeMs.toEpochMilli()}", - "tenant":"selectedTenant", - "event":{ - "event_source":{ - "title":"title", - "reference_id":"reference_id", - "feature":"alerting", - "severity":"info", - "tags":[] - }, - "status_list":[ - { - "config_id":"config_id", - "config_type":"slack", - "config_name":"name", - "delivery_status": - { - "status_code":"200", - "status_text":"success" - } - } - ] - } - } - ], - "extra_field_1":["extra", "value"], - "extra_field_2":{"extra":"value"}, - "extra_field_3":"extra value 3" - } - """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { NotificationEventSearchResult(it) } - assertSearchResultEquals(searchResult, recreatedObject) - } - - @Test - fun `Search result should safely fallback to default if startIndex, totalHits or totalHitRelation field absent in json object`() { - val lastUpdatedTimeMs = Instant.ofEpochMilli(Instant.now().toEpochMilli()) - val createdTimeMs = lastUpdatedTimeMs.minusSeconds(1000) - val sampleEventSource = EventSource( - "title", - "reference_id", - Feature.ALERTING, - severity = SeverityType.INFO - ) - val sampleStatus = EventStatus( - "config_id", - "name", - ConfigType.SLACK, - deliveryStatus = DeliveryStatus("200", "success") - ) - val sampleEvent = NotificationEvent(sampleEventSource, listOf(sampleStatus)) - val eventInfo = NotificationEventInfo( - "event_id", - lastUpdatedTimeMs, - createdTimeMs, - "selectedTenant", - sampleEvent - ) - val searchResult = NotificationEventSearchResult(eventInfo) - val jsonString = """ - { - "event_list":[ - { - "event_id":"event_id", - "last_updated_time_ms":"${lastUpdatedTimeMs.toEpochMilli()}", - "created_time_ms":"${createdTimeMs.toEpochMilli()}", - "tenant":"selectedTenant", - "event":{ - "event_source":{ - "title":"title", - "reference_id":"reference_id", - "feature":"alerting", - "severity":"info", - "tags":[] - }, - "status_list":[ - { - "config_id":"config_id", - "config_type":"slack", - "config_name":"name", - "delivery_status": - { - "status_code":"200", - "status_text":"success" - } - } - ] - } - } - ] - } - """.trimIndent() - val recreatedObject = createObjectFromJsonString(jsonString) { NotificationEventSearchResult(it) } - assertSearchResultEquals(searchResult, recreatedObject) - } - - @Test - fun `Search result should throw exception if event is absent in json`() { - val lastUpdatedTimeMs = Instant.ofEpochMilli(Instant.now().toEpochMilli()) - val createdTimeMs = lastUpdatedTimeMs.minusSeconds(1000) - val jsonString = """ - { - "start_index":"0", - "total_hits":"1", - "total_hit_relation":"eq", - "event_list":[ - { - "event_id":"event_id", - "last_updated_time_ms":"${lastUpdatedTimeMs.toEpochMilli()}", - "created_time_ms":"${createdTimeMs.toEpochMilli()}", - "tenant":"selectedTenant" - } - ] - } - """.trimIndent() - Assertions.assertThrows(IllegalArgumentException::class.java) { - createObjectFromJsonString(jsonString) { NotificationEventSearchResult(it) } - } - } -} diff --git a/src/test/kotlin/org/opensearch/commons/notifications/model/NotificationEventTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/model/NotificationEventTests.kt index 4a135d0c..57df6066 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/model/NotificationEventTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/model/NotificationEventTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model @@ -40,7 +18,6 @@ internal class NotificationEventTests { val sampleEventSource = EventSource( "title", "reference_id", - Feature.ALERTING, severity = SeverityType.INFO ) val sampleStatus = EventStatus( @@ -59,7 +36,6 @@ internal class NotificationEventTests { val sampleEventSource = EventSource( "title", "reference_id", - Feature.REPORTS, severity = SeverityType.INFO ) val sampleStatus = EventStatus( @@ -79,9 +55,8 @@ internal class NotificationEventTests { val sampleEventSource = EventSource( "title", "reference_id", - Feature.ALERTING, - tags = listOf("tag1", "tag2"), - severity = SeverityType.INFO + severity = SeverityType.INFO, + tags = listOf("tag1", "tag2") ) val status1 = EventStatus( "config_id1", diff --git a/src/test/kotlin/org/opensearch/commons/notifications/model/SesAccountTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/model/SesAccountTests.kt new file mode 100644 index 00000000..e5a52837 --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/notifications/model/SesAccountTests.kt @@ -0,0 +1,180 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.notifications.model + +import com.fasterxml.jackson.core.JsonParseException +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.assertThrows +import org.opensearch.commons.utils.createObjectFromJsonString +import org.opensearch.commons.utils.getJsonString +import org.opensearch.commons.utils.recreateObject + +internal class SesAccountTests { + + @Test + fun `SES should throw exception if empty region`() { + assertThrows { + SesAccount("", null, "from@domain.com") + } + val jsonString = """ + { + "region":"", + "from_address":"from@domain.com" + } + """.trimIndent() + assertThrows { + createObjectFromJsonString(jsonString) { SesAccount.parse(it) } + } + } + + @Test + fun `SES should throw exception if invalid role ARN`() { + assertThrows { + SesAccount("us-east-1", "arn:aws:iam:us-east-1:0123456789:role-test", "from@domain.com") + } + val jsonString = """ + { + "region":"us-east-1", + "role_arn":"arn:aws:iam:us-east-1:0123456789:role-test", + "from_address":"from@domain.com" + } + """.trimIndent() + assertThrows { + createObjectFromJsonString(jsonString) { SesAccount.parse(it) } + } + } + + @Test + fun `SES should throw exception when email id is invalid`() { + val jsonString = """ + { + "region":"us-east-1", + "from_address":".from@domain.com" + } + """.trimIndent() + assertThrows { + createObjectFromJsonString(jsonString) { SesAccount.parse(it) } + } + } + + @Test + fun `SES serialize and deserialize transport object should be equal`() { + val sesAccount = SesAccount("us-east-1", "arn:aws:iam::012345678912:role/iam-test", "from@domain.com") + val recreatedObject = recreateObject(sesAccount) { SesAccount(it) } + assertEquals(sesAccount, recreatedObject) + } + + @Test + fun `SES serialize and deserialize using json object should be equal`() { + val sesAccount = SesAccount("us-east-1", "arn:aws:iam::012345678912:role/iam-test", "from@domain.com") + val jsonString = getJsonString(sesAccount) + val recreatedObject = createObjectFromJsonString(jsonString) { SesAccount.parse(it) } + assertEquals(sesAccount, recreatedObject) + } + + @Test + fun `SES serialize and deserialize using json object should be equal with null roleArn`() { + val sesAccount = SesAccount("us-east-1", null, "from@domain.com") + val jsonString = getJsonString(sesAccount) + val recreatedObject = createObjectFromJsonString(jsonString) { SesAccount.parse(it) } + assertEquals(sesAccount, recreatedObject) + } + + @Test + fun `SES should deserialize json object using parser`() { + val sesAccount = SesAccount("us-east-1", "arn:aws:iam::012345678912:role/iam-test", "from@domain.com") + val jsonString = """ + { + "region":"${sesAccount.awsRegion}", + "role_arn":"${sesAccount.roleArn}", + "from_address":"${sesAccount.fromAddress}" + } + """.trimIndent() + val recreatedObject = createObjectFromJsonString(jsonString) { SesAccount.parse(it) } + assertEquals(sesAccount, recreatedObject) + } + + @Test + fun `SES should deserialize json object will null role_arn using parser`() { + val sesAccount = SesAccount("us-east-1", null, "from@domain.com") + val jsonString = """ + { + "region":"${sesAccount.awsRegion}", + "role_arn":null, + "from_address":"${sesAccount.fromAddress}" + } + """.trimIndent() + val recreatedObject = createObjectFromJsonString(jsonString) { SesAccount.parse(it) } + assertEquals(sesAccount, recreatedObject) + } + + @Test + fun `SES should deserialize json object will missing role_arn using parser`() { + val sesAccount = SesAccount("us-east-1", null, "from@domain.com") + val jsonString = """ + { + "region":"${sesAccount.awsRegion}", + "from_address":"${sesAccount.fromAddress}" + } + """.trimIndent() + val recreatedObject = createObjectFromJsonString(jsonString) { SesAccount.parse(it) } + assertEquals(sesAccount, recreatedObject) + } + + @Test + fun `SES should throw exception when invalid json object is passed`() { + val jsonString = "sample message" + assertThrows { + createObjectFromJsonString(jsonString) { SesAccount.parse(it) } + } + } + + @Test + fun `SES should throw exception when region is replace with region2 in json object`() { + val jsonString = """ + { + "region2":"us-east-1", + "role_arn":"arn:aws:iam::012345678912:role/iam-test", + "from_address":"from@domain.com" + } + """.trimIndent() + assertThrows { + createObjectFromJsonString(jsonString) { SesAccount.parse(it) } + } + } + + @Test + fun `SES should throw exception when from_address is replace with from_address2 in json object`() { + val jsonString = """ + { + "region":"us-east-1", + "role_arn":"arn:aws:iam::012345678912:role/iam-test", + "from_address2":"from@domain.com" + } + """.trimIndent() + assertThrows { + createObjectFromJsonString(jsonString) { SesAccount.parse(it) } + } + } + + @Test + fun `SES should safely ignore extra field in json object`() { + val sesAccount = SesAccount("us-east-1", "arn:aws:iam::012345678912:role/iam-test", "from@domain.com") + val jsonString = """ + { + "region":"${sesAccount.awsRegion}", + "role_arn":"${sesAccount.roleArn}", + "from_address":"${sesAccount.fromAddress}", + "extra_field_1":["extra", "value"], + "extra_field_2":{"extra":"value"}, + "extra_field_3":"extra value 3" + } + """.trimIndent() + val recreatedObject = createObjectFromJsonString(jsonString) { SesAccount.parse(it) } + assertEquals(sesAccount, recreatedObject) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/notifications/model/SeverityTypeTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/model/SeverityTypeTests.kt index 71fe1460..35f7b218 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/model/SeverityTypeTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/model/SeverityTypeTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model diff --git a/src/test/kotlin/org/opensearch/commons/notifications/model/SlackTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/model/SlackTests.kt index b5bf7783..5c64c6ae 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/model/SlackTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/model/SlackTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model @@ -89,11 +67,11 @@ internal class SlackTests { } @Test - fun `Slack should throw exception when url protocol is not https`() { + fun `Slack should throw exception when url protocol is not https or http`() { assertThrows { - Slack("http://domain.com/sample_url#1234567890") + Slack("ftp://domain.com/sample_url#1234567890") } - val jsonString = "{\"url\":\"http://domain.com/sample_url\"}" + val jsonString = "{\"url\":\"ftp://domain.com/sample_url\"}" assertThrows { createObjectFromJsonString(jsonString) { Slack.parse(it) } } diff --git a/src/test/kotlin/org/opensearch/commons/notifications/model/SmtpAccountTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/model/SmtpAccountTests.kt index bc924681..6617d1f6 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/model/SmtpAccountTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/model/SmtpAccountTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model @@ -93,7 +71,8 @@ internal class SmtpAccountTests { fun `SmtpAccount should safely ignore extra field in json object`() { val sampleSmtpAccount = SmtpAccount( "domain.com", - 1234, MethodType.START_TLS, + 1234, + MethodType.START_TLS, "from@domain.com" ) val jsonString = """ diff --git a/src/test/kotlin/org/opensearch/commons/notifications/model/SnsTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/model/SnsTests.kt new file mode 100644 index 00000000..f81e8d3f --- /dev/null +++ b/src/test/kotlin/org/opensearch/commons/notifications/model/SnsTests.kt @@ -0,0 +1,112 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.commons.notifications.model + +import com.fasterxml.jackson.core.JsonParseException +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Assertions.assertThrows +import org.junit.jupiter.api.Assertions.fail +import org.junit.jupiter.api.Test +import org.opensearch.commons.utils.createObjectFromJsonString +import org.opensearch.commons.utils.getJsonString +import org.opensearch.commons.utils.recreateObject + +internal class SnsTests { + + @Test + fun `SNS should throw exception if empty topic`() { + assertThrows(IllegalArgumentException::class.java) { + Sns("", null) + } + val jsonString = "{\"topic_arn\":\"\"}" + assertThrows(IllegalArgumentException::class.java) { + createObjectFromJsonString(jsonString) { Sns.parse(it) } + } + } + + @Test + fun `SNS should throw exception if invalid topic ARN`() { + assertThrows(IllegalArgumentException::class.java) { + Sns("arn:aws:es:us-east-1:012345678989:test", null) + } + val jsonString = "{\"topic_arn\":\"arn:aws:es:us-east-1:012345678989:test\"}" + assertThrows(IllegalArgumentException::class.java) { + createObjectFromJsonString(jsonString) { Sns.parse(it) } + } + } + + @Test + fun `SNS should throw exception if invalid role ARN`() { + assertThrows(IllegalArgumentException::class.java) { + Sns("arn:aws:sns:us-east-1:012345678912:topic-test", "arn:aws:iam:us-east-1:0123456789:role-test") + } + val jsonString = + "{\"topic_arn\":\"arn:aws:sns:us-east-1:012345678912:topic-test\",\"role_arn\":\"arn:aws:iam:us-east-1:0123456789:role-test\"}" + assertThrows(IllegalArgumentException::class.java) { + createObjectFromJsonString(jsonString) { Sns.parse(it) } + } + } + + @Test + fun `test SNS correctly validates SNS FIFO topic ARN`() { + try { + Sns( + "arn:aws:sns:ap-southeast-2:333654771707:sns-fifo-alerting.fifo", + "arn:aws:iam::012345678912:role/iam-test" + ) + } catch (e: Exception) { + fail("Expected fifo sns topic ARN to be validated successfully", e) + } + } + + @Test + fun `SNS serialize and deserialize transport object should be equal`() { + val sampleSns = Sns("arn:aws:sns:us-east-1:012345678912:topic-test", "arn:aws:iam::012345678912:role/iam-test") + val recreatedObject = recreateObject(sampleSns) { Sns(it) } + Assertions.assertEquals(sampleSns, recreatedObject) + } + + @Test + fun `SNS serialize and deserialize using json object should be equal`() { + val sampleSns = Sns("arn:aws:sns:us-east-1:012345678912:topic-test", "arn:aws:iam::012345678912:role/iam-test") + val jsonString = getJsonString(sampleSns) + val recreatedObject = createObjectFromJsonString(jsonString) { Sns.parse(it) } + Assertions.assertEquals(sampleSns, recreatedObject) + } + + @Test + fun `SNS should deserialize json object using parser`() { + val sampleSns = Sns("arn:aws:sns:us-east-1:012345678912:topic-test", "arn:aws:iam::012345678912:role/iam-test") + val jsonString = "{\"topic_arn\":\"${sampleSns.topicArn}\",\"role_arn\":\"${sampleSns.roleArn}\"}" + val recreatedObject = createObjectFromJsonString(jsonString) { Sns.parse(it) } + Assertions.assertEquals(sampleSns, recreatedObject) + } + + @Test + fun `SNS should throw exception when invalid json object is passed`() { + val jsonString = "sample message" + assertThrows(JsonParseException::class.java) { + createObjectFromJsonString(jsonString) { Sns.parse(it) } + } + } + + @Test + fun `SNS should throw exception when arn is replace with arn2 in json object`() { + val sampleSns = Sns("arn:aws:sns:us-east-1:012345678912:topic-test", "arn:aws:iam::012345678912:role/iam-test") + val jsonString = "{\"topic_arn2\":\"${sampleSns.topicArn}\",\"role_arn\":\"${sampleSns.roleArn}\"}" + assertThrows(IllegalArgumentException::class.java) { + createObjectFromJsonString(jsonString) { Sns.parse(it) } + } + } + + @Test + fun `SNS should safely ignore extra field in json object`() { + val sampleSns = Sns("arn:aws:sns:us-east-1:012345678912:topic-test", null) + val jsonString = "{\"topic_arn\":\"${sampleSns.topicArn}\", \"another\":\"field\"}" + val recreatedObject = createObjectFromJsonString(jsonString) { Sns.parse(it) } + Assertions.assertEquals(sampleSns, recreatedObject) + } +} diff --git a/src/test/kotlin/org/opensearch/commons/notifications/model/WebhookTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/model/WebhookTests.kt index 9ca39c7c..3272203d 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/model/WebhookTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/model/WebhookTests.kt @@ -1,28 +1,6 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.notifications.model @@ -46,7 +24,11 @@ internal class WebhookTests { @Test fun `Webhook serialize and deserialize using json object should be equal`() { - val sampleWebhook = Webhook("https://domain.com/sample_url#1234567890", mapOf(Pair("key", "value"))) + val sampleWebhook = Webhook( + "http://domain.com/sample_url#1234567890", + mapOf(Pair("key", "value")), + HttpMethodType.PUT + ) val jsonString = getJsonString(sampleWebhook) val recreatedObject = createObjectFromJsonString(jsonString) { Webhook.parse(it) } assertEquals(sampleWebhook, recreatedObject) @@ -54,13 +36,18 @@ internal class WebhookTests { @Test fun `Webhook should deserialize json object using parser`() { - val sampleWebhook = Webhook("https://domain.com/sample_url#1234567890", mapOf(Pair("key", "value"))) + val sampleWebhook = Webhook( + "https://domain.com/sample_url#1234567890", + mapOf(Pair("key", "value")), + HttpMethodType.PATCH + ) val jsonString = """ { "url":"${sampleWebhook.url}", "header_params":{ "key":"value" - } + }, + "method":"PATCH" } """.trimIndent() val recreatedObject = createObjectFromJsonString(jsonString) { Webhook.parse(it) } @@ -96,11 +83,11 @@ internal class WebhookTests { } @Test - fun `Webhook should throw exception when url protocol is not https`() { + fun `Webhook should throw exception when url protocol is not https or http`() { assertThrows { - Webhook("http://domain.com/sample_url#1234567890") + Webhook("ftp://domain.com/sample_url#1234567890") } - val jsonString = "{\"url\":\"http://domain.com/sample_url\"}" + val jsonString = "{\"url\":\"ftp://domain.com/sample_url\"}" assertThrows { createObjectFromJsonString(jsonString) { Webhook.parse(it) } } diff --git a/src/test/kotlin/org/opensearch/commons/notifications/model/config/ConfigPropertiesTests.kt b/src/test/kotlin/org/opensearch/commons/notifications/model/config/ConfigPropertiesTests.kt index 0217c53f..c3d20dee 100644 --- a/src/test/kotlin/org/opensearch/commons/notifications/model/config/ConfigPropertiesTests.kt +++ b/src/test/kotlin/org/opensearch/commons/notifications/model/config/ConfigPropertiesTests.kt @@ -5,7 +5,9 @@ import org.opensearch.commons.notifications.model.Chime import org.opensearch.commons.notifications.model.ConfigType import org.opensearch.commons.notifications.model.Email import org.opensearch.commons.notifications.model.EmailGroup +import org.opensearch.commons.notifications.model.EmailRecipient import org.opensearch.commons.notifications.model.MethodType +import org.opensearch.commons.notifications.model.MicrosoftTeams import org.opensearch.commons.notifications.model.Slack import org.opensearch.commons.notifications.model.SmtpAccount import org.opensearch.commons.notifications.model.Webhook @@ -21,6 +23,11 @@ internal class ConfigPropertiesTests { assertEquals(getReaderForConfigType(ConfigType.SLACK), Slack.reader) } + @Test + fun `Validate config property reader Microsoft Teams`() { + assertEquals(getReaderForConfigType(ConfigType.MICROSOFT_TEAMS), MicrosoftTeams.reader) + } + @Test fun `Validate config property reader chime`() { assertEquals(getReaderForConfigType(ConfigType.CHIME), Chime.reader) @@ -47,7 +54,7 @@ internal class ConfigPropertiesTests { } @Test - fun `Validate config data parse slack`() { + fun `Validate config data parse slack`() { val sampleSlack = Slack("https://domain.com/sample_url#1234567890") val jsonString = getJsonString(sampleSlack) val recreatedObject = createObjectFromJsonString(jsonString) { createConfigData(ConfigType.SLACK, it) } @@ -70,9 +77,17 @@ internal class ConfigPropertiesTests { assertEquals(sampleWebhook, recreatedObject) } + @Test + fun `Validate config data parse Microsoft Teams`() { + val sampleMicrosoftTeams = MicrosoftTeams("https://domain.com/sample_url#1234567890") + val jsonString = getJsonString(sampleMicrosoftTeams) + val recreatedObject = createObjectFromJsonString(jsonString) { createConfigData(ConfigType.MICROSOFT_TEAMS, it) } + assertEquals(sampleMicrosoftTeams, recreatedObject) + } + @Test fun `Validate config data parse EmailGroup`() { - val sampleEmailGroup = EmailGroup(listOf("email1@email.com", "email2@email.com")) + val sampleEmailGroup = EmailGroup(listOf(EmailRecipient("email1@email.com"), EmailRecipient("email2@email.com"))) val jsonString = getJsonString(sampleEmailGroup) val recreatedObject = createObjectFromJsonString(jsonString) { createConfigData(ConfigType.EMAIL_GROUP, it) } assertEquals(sampleEmailGroup, recreatedObject) diff --git a/src/test/kotlin/org/opensearch/commons/utils/TestHelpers.kt b/src/test/kotlin/org/opensearch/commons/utils/TestHelpers.kt index 35f5d3b9..1170851b 100644 --- a/src/test/kotlin/org/opensearch/commons/utils/TestHelpers.kt +++ b/src/test/kotlin/org/opensearch/commons/utils/TestHelpers.kt @@ -1,38 +1,16 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -/* - * Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"). - * You may not use this file except in compliance with the License. - * A copy of the License is located at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * or in the "license" file accompanying this file. This file is distributed - * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language governing - * permissions and limitations under the License. - * */ package org.opensearch.commons.utils -import org.opensearch.common.xcontent.DeprecationHandler -import org.opensearch.common.xcontent.NamedXContentRegistry -import org.opensearch.common.xcontent.ToXContent import org.opensearch.common.xcontent.XContentFactory -import org.opensearch.common.xcontent.XContentParser import org.opensearch.common.xcontent.XContentType +import org.opensearch.core.xcontent.DeprecationHandler +import org.opensearch.core.xcontent.NamedXContentRegistry +import org.opensearch.core.xcontent.ToXContent +import org.opensearch.core.xcontent.XContentParser import java.io.ByteArrayOutputStream fun getJsonString(xContent: ToXContent): String { diff --git a/src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker b/src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker new file mode 100644 index 00000000..ca6ee9ce --- /dev/null +++ b/src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker @@ -0,0 +1 @@ +mock-maker-inline \ No newline at end of file diff --git a/src/test/resources/sample.pem b/src/test/resources/sample.pem index 7ba92534..a1fc20a7 100644 --- a/src/test/resources/sample.pem +++ b/src/test/resources/sample.pem @@ -1,28 +1,25 @@ -----BEGIN CERTIFICATE----- -MIIEyTCCA7GgAwIBAgIGAWLrc1O2MA0GCSqGSIb3DQEBCwUAMIGPMRMwEQYKCZIm -iZPyLGQBGRYDY29tMRcwFQYKCZImiZPyLGQBGRYHZXhhbXBsZTEZMBcGA1UECgwQ -RXhhbXBsZSBDb20gSW5jLjEhMB8GA1UECwwYRXhhbXBsZSBDb20gSW5jLiBSb290 -IENBMSEwHwYDVQQDDBhFeGFtcGxlIENvbSBJbmMuIFJvb3QgQ0EwHhcNMTgwNDIy -MDM0MzQ3WhcNMjgwNDE5MDM0MzQ3WjBeMRIwEAYKCZImiZPyLGQBGRYCZGUxDTAL -BgNVBAcMBHRlc3QxDTALBgNVBAoMBG5vZGUxDTALBgNVBAsMBG5vZGUxGzAZBgNV -BAMMEm5vZGUtMC5leGFtcGxlLmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC -AQoCggEBAJa+f476vLB+AwK53biYByUwN+40D8jMIovGXm6wgT8+9Sbs899dDXgt -9CE1Beo65oP1+JUz4c7UHMrCY3ePiDt4cidHVzEQ2g0YoVrQWv0RedS/yx/DKhs8 -Pw1O715oftP53p/2ijD5DifFv1eKfkhFH+lwny/vMSNxellpl6NxJTiJVnQ9HYOL -gf2t971ITJHnAuuxUF48HcuNovW4rhtkXef8kaAN7cE3LU+A9T474ULNCKkEFPIl -ZAKN3iJNFdVsxrTU+CUBHzk73Do1cCkEvJZ0ZFjp0Z3y8wLY/gqWGfGVyA9l2CUq -eIZNf55PNPtGzOrvvONiui48vBKH1LsCAwEAAaOCAVkwggFVMIG8BgNVHSMEgbQw -gbGAFJI1DOAPHitF9k0583tfouYSl0BzoYGVpIGSMIGPMRMwEQYKCZImiZPyLGQB -GRYDY29tMRcwFQYKCZImiZPyLGQBGRYHZXhhbXBsZTEZMBcGA1UECgwQRXhhbXBs -ZSBDb20gSW5jLjEhMB8GA1UECwwYRXhhbXBsZSBDb20gSW5jLiBSb290IENBMSEw -HwYDVQQDDBhFeGFtcGxlIENvbSBJbmMuIFJvb3QgQ0GCAQEwHQYDVR0OBBYEFKyv -78ZmFjVKM9g7pMConYH7FVBHMAwGA1UdEwEB/wQCMAAwDgYDVR0PAQH/BAQDAgXg -MCAGA1UdJQEB/wQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjA1BgNVHREELjAsiAUq -AwQFBYISbm9kZS0wLmV4YW1wbGUuY29tgglsb2NhbGhvc3SHBH8AAAEwDQYJKoZI -hvcNAQELBQADggEBAIOKuyXsFfGv1hI/Lkpd/73QNqjqJdxQclX57GOMWNbOM5H0 -5/9AOIZ5JQsWULNKN77aHjLRr4owq2jGbpc/Z6kAd+eiatkcpnbtbGrhKpOtoEZy -8KuslwkeixpzLDNISSbkeLpXz4xJI1ETMN/VG8ZZP1bjzlHziHHDu0JNZ6TnNzKr -XzCGMCohFfem8vnKNnKUneMQMvXd3rzUaAgvtf7Hc2LTBlf4fZzZF1EkwdSXhaMA -1lkfHiqOBxtgeDLxCHESZ2fqgVqsWX+t3qHQfivcPW6txtDyrFPRdJOGhiMGzT/t -e/9kkAtQRgpTb3skYdIOOUOV0WGQ60kJlFhAzIs= +MIIEPDCCAySgAwIBAgIUZjrlDPP8azRDPZchA/XEsx0X2iIwDQYJKoZIhvcNAQEL +BQAwgY8xEzARBgoJkiaJk/IsZAEZFgNjb20xFzAVBgoJkiaJk/IsZAEZFgdleGFt +cGxlMRkwFwYDVQQKDBBFeGFtcGxlIENvbSBJbmMuMSEwHwYDVQQLDBhFeGFtcGxl +IENvbSBJbmMuIFJvb3QgQ0ExITAfBgNVBAMMGEV4YW1wbGUgQ29tIEluYy4gUm9v +dCBDQTAeFw0yMzA4MjkwNDIzMTJaFw0zMzA4MjYwNDIzMTJaMFcxCzAJBgNVBAYT +AmRlMQ0wCwYDVQQHDAR0ZXN0MQ0wCwYDVQQKDARub2RlMQ0wCwYDVQQLDARub2Rl +MRswGQYDVQQDDBJub2RlLTAuZXhhbXBsZS5jb20wggEiMA0GCSqGSIb3DQEBAQUA +A4IBDwAwggEKAoIBAQCm93kXteDQHMAvbUPNPW5pyRHKDD42XGWSgq0k1D29C/Ud +yL21HLzTJa49ZU2ldIkSKs9JqbkHdyK0o8MO6L8dotLoYbxDWbJFW8bp1w6tDTU0 +HGkn47XVu3EwbfrTENg3jFu+Oem6a/501SzITzJWtS0cn2dIFOBimTVpT/4Zv5qr +XA6Cp4biOmoTYWhi/qQl8d0IaADiqoZ1MvZbZ6x76qTrRAbg+UWkpTEXoH1xTc8n +dibR7+HP6OTqCKvo1NhE8uP4pY+fWd6b6l+KLo3IKpfTbAIJXIO+M67FLtWKtttD +ao94B069skzKk6FPgW/OZh6PRCD0oxOavV+ld2SjAgMBAAGjgcYwgcMwRwYDVR0R +BEAwPogFKgMEBQWCEm5vZGUtMC5leGFtcGxlLmNvbYIJbG9jYWxob3N0hxAAAAAA +AAAAAAAAAAAAAAABhwR/AAABMAsGA1UdDwQEAwIF4DAdBgNVHSUEFjAUBggrBgEF +BQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQU0/qDQaY10jIo +wCjLUpz/HfQXyt8wHwYDVR0jBBgwFoAUF4ffoFrrZhKn1dD4uhJFPLcrAJwwDQYJ +KoZIhvcNAQELBQADggEBAD2hkndVih6TWxoe/oOW0i2Bq7ScNO/n7/yHWL04HJmR +MaHv/Xjc8zLFLgHuHaRvC02ikWIJyQf5xJt0Oqu2GVbqXH9PBGKuEP2kCsRRyU27 +zTclAzfQhqmKBTYQ/3lJ3GhRQvXIdYTe+t4aq78TCawp1nSN+vdH/1geG6QjMn5N +1FU8tovDd4x8Ib/0dv8RJx+n9gytI8n/giIaDCEbfLLpe4EkV5e5UNpOnRgJjjuy +vtZutc81TQnzBtkS9XuulovDE0qI+jQrKkKu8xgGLhgH0zxnPkKtUg2I3Aq6zl1L +zYkEOUF8Y25J6WeY88Yfnc0iigI+Pnz5NK8R9GL7TYo= -----END CERTIFICATE----- diff --git a/src/test/resources/test-kirk.jks b/src/test/resources/test-kirk.jks index 174dbda6..6dbc51e7 100644 Binary files a/src/test/resources/test-kirk.jks and b/src/test/resources/test-kirk.jks differ