diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 855a0949..f1b13fd6 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,2 +1,2 @@ # This should match the owning team set up in https://github.com/orgs/opensearch-project/teams -* @opensearch-project/geospatial \ No newline at end of file +* @heemin32 @navneet1v @VijayanB @vamshin @jmazanec15 @naveentatikonda @junqiu-lei @martin-gaievski diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index e6b91a11..1309a5eb 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -3,18 +3,60 @@ on: push: branches: - "*" + - "feature/**" pull_request: branches: - "*" + - "feature/**" jobs: - Build: + Get-CI-Image-Tag: + uses: opensearch-project/opensearch-build/.github/workflows/get-ci-image-tag.yml@main + with: + product: opensearch + + Build-linux: + needs: Get-CI-Image-Tag strategy: matrix: - java: [11, 17] + java: [11, 17, 21] name: Build and Test geospatial Plugin runs-on: ubuntu-latest + container: + # using the same image which is used by opensearch-build team to build the OpenSearch Distribution + # this image tag is subject to change as more dependencies and updates will arrive over time + image: ${{ needs.Get-CI-Image-Tag.outputs.ci-image-version-linux }} + # need to switch to root so that github actions can install runner binary on container without permission issues. + options: --user root + + steps: + - name: Checkout geospatial + uses: actions/checkout@v1 + + - name: Setup Java ${{ matrix.java }} + uses: actions/setup-java@v1 + with: + java-version: ${{ matrix.java }} + + - name: Run build + run: | + chown -R 1000:1000 `pwd` + su `id -un 1000` -c "./gradlew build" + + - name: Upload Coverage Report + uses: codecov/codecov-action@v1 + with: + token: ${{ secrets.CODECOV_TOKEN }} + + Build-windows-macos: + strategy: + matrix: + java: [11, 17, 21] + os: [windows-latest, macos-latest] + + name: Build and Test geospatial Plugin + runs-on: ${{ matrix.os }} steps: - name: Checkout geospatial @@ -32,4 +74,4 @@ jobs: - name: Upload Coverage Report uses: codecov/codecov-action@v1 with: - token: ${{ secrets.CODECOV_TOKEN }} \ No newline at end of file + token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/auto-release.yml b/.github/workflows/auto-release.yml new file mode 100644 index 00000000..b0bd22ed --- /dev/null +++ b/.github/workflows/auto-release.yml @@ -0,0 +1,28 @@ +name: Releases + +on: + push: + tags: + - '*' + +jobs: + build: + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - name: GitHub App token + id: github_app_token + uses: tibdex/github-app-token@v1.5.0 + with: + app_id: ${{ secrets.APP_ID }} + private_key: ${{ secrets.APP_PRIVATE_KEY }} + installation_id: 22958780 + - name: Get tag + id: tag + uses: dawidd6/action-get-tag@v1 + - uses: actions/checkout@v2 + - uses: ncipollo/release-action@v1 + with: + github_token: ${{ steps.github_app_token.outputs.token }} + bodyFile: release-notes/opensearch-geospatial.release-notes-${{steps.tag.outputs.tag}}.md diff --git a/.github/workflows/changelog_verifier.yml b/.github/workflows/changelog_verifier.yml new file mode 100644 index 00000000..992a38b6 --- /dev/null +++ b/.github/workflows/changelog_verifier.yml @@ -0,0 +1,18 @@ +name: "Changelog Verifier" +on: + pull_request: + types: [opened, edited, review_requested, synchronize, reopened, ready_for_review, labeled, unlabeled] + +jobs: + # Enforces the update of a changelog file on every pull request + verify-changelog: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + token: ${{ secrets.GITHUB_TOKEN }} + ref: ${{ github.event.pull_request.head.sha }} + + - uses: dangoslen/changelog-enforcer@v3 + with: + skipLabels: "autocut, skip-changelog" diff --git a/.github/workflows/dco.yml b/.github/workflows/dco.yml deleted file mode 100644 index d4c9ff2c..00000000 --- a/.github/workflows/dco.yml +++ /dev/null @@ -1,18 +0,0 @@ -name: Developer Certificate of Origin Check - -on: [pull_request] - -jobs: - check: - runs-on: ubuntu-latest - - steps: - - name: Get PR Commits - id: 'get-pr-commits' - uses: tim-actions/get-pr-commits@v1.1.0 - with: - token: ${{ secrets.GITHUB_TOKEN }} - - name: DCO Check - uses: tim-actions/dco@v1.1.0 - with: - commits: ${{ steps.get-pr-commits.outputs.commits }} \ No newline at end of file diff --git a/.github/workflows/maven-publish.yml b/.github/workflows/maven-publish.yml new file mode 100644 index 00000000..724e3a21 --- /dev/null +++ b/.github/workflows/maven-publish.yml @@ -0,0 +1,35 @@ +name: Publish snapshots to maven + +on: + workflow_dispatch: + push: + branches: + - 'main' + - '1.*' + - '2.*' + +jobs: + build-and-publish-snapshots: + runs-on: ubuntu-latest + + permissions: + id-token: write + contents: write + + steps: + - uses: actions/setup-java@v3 + with: + distribution: temurin # Temurin is a distribution of adoptium + java-version: 11 + - uses: actions/checkout@v3 + - uses: aws-actions/configure-aws-credentials@v1 + with: + role-to-assume: ${{ secrets.PUBLISH_SNAPSHOTS_ROLE }} + aws-region: us-east-1 + - name: publish snapshots to maven + run: | + export SONATYPE_USERNAME=$(aws secretsmanager get-secret-value --secret-id maven-snapshots-username --query SecretString --output text) + export SONATYPE_PASSWORD=$(aws secretsmanager get-secret-value --secret-id maven-snapshots-password --query SecretString --output text) + echo "::add-mask::$SONATYPE_USERNAME" + echo "::add-mask::$SONATYPE_PASSWORD" + ./gradlew publishPluginZipPublicationToSnapshotsRepository diff --git a/.github/workflows/test_security.yml b/.github/workflows/test_security.yml new file mode 100644 index 00000000..88328c14 --- /dev/null +++ b/.github/workflows/test_security.yml @@ -0,0 +1,88 @@ +name: Test Geospatial on Secure Cluster +on: + schedule: + - cron: '0 0 * * *' # every night + push: + branches: + - "*" + - "feature/**" + pull_request: + branches: + - "*" + - "feature/**" + +jobs: + Build-ad: + strategy: + matrix: + java: [ 11,17,21 ] + os: [ubuntu-latest] + fail-fast: true + + name: Test Geospatial on Secure Cluster + runs-on: ${{ matrix.os }} + + steps: + - name: Checkout Geospatial + uses: actions/checkout@v1 + + - name: Setup Java ${{ matrix.java }} + uses: actions/setup-java@v1 + with: + java-version: ${{ matrix.java }} + + - name: Assemble Geospatial + run: | + ./gradlew assemble + + # example of variables: + # plugin = opensearch-geospatial-2.7.0.0-SNAPSHOT.zip + # version = 2.7.0 + # plugin_version = 2.7.0.0 + # qualifier = `SNAPSHOT` + - name: Pull and Run Docker + run: | + plugin=`basename $(ls build/distributions/*.zip)` + version=`echo $plugin|awk -F- '{print $3}'| cut -d. -f 1-3` + plugin_version=`echo $plugin|awk -F- '{print $3}'| cut -d. -f 1-4` + qualifier=`echo $plugin|awk -F- '{print $4}'| cut -d. -f 1-1` + if [ $qualifier != `SNAPSHOT` ]; + then + docker_version=$version-$qualifier + else + docker_version=$version + fi + echo plugin version plugin_version qualifier docker_version + echo "($plugin) ($version) ($plugin_version) ($qualifier) ($docker_version)" + + cd .. + if docker pull opensearchstaging/opensearch:$docker_version + then + echo "FROM opensearchstaging/opensearch:$docker_version" >> Dockerfile + echo "RUN if [ -d /usr/share/opensearch/plugins/opensearch-geospatial ]; then /usr/share/opensearch/bin/opensearch-plugin remove opensearch-geospatial; fi" >> Dockerfile + echo "ADD geospatial/build/distributions/$plugin /tmp/" >> Dockerfile + echo "RUN /usr/share/opensearch/bin/opensearch-plugin install --batch file:/tmp/$plugin" >> Dockerfile + docker build -t opensearch-geospatial:test . + echo "imagePresent=true" >> $GITHUB_ENV + else + echo "imagePresent=false" >> $GITHUB_ENV + fi + + - name: Run Docker Image + if: env.imagePresent == 'true' + run: | + cd .. + docker run -p 9200:9200 -d -p 9600:9600 -e "discovery.type=single-node" opensearch-geospatial:test + sleep 90 + + - name: Run Geospatial Integ Test + if: env.imagePresent == 'true' + run: | + security=`curl -XGET https://localhost:9200/_cat/plugins?v -u admin:admin --insecure |grep opensearch-security|wc -l` + if [ $security -gt 0 ] + then + echo "Security plugin is available" + ./gradlew integTest -Dtests.rest.cluster=localhost:9200 -Dtests.cluster=localhost:9200 -Dtests.clustername="docker-cluster" -Dhttps=true -Duser=admin -Dpassword=admin + else + echo "Security plugin is NOT available, skipping integration tests" + fi diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 00000000..2cdb3c0e --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,25 @@ +# CHANGELOG +All notable changes to this project are documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +See the [CONTRIBUTING guide](./CONTRIBUTING.md#Changelog) for instructions on how to add changelog entries. + +## [Unreleased 3.0](https://github.com/opensearch-project/geospatial/compare/2.x...HEAD) +### Features +### Enhancements +### Bug Fixes +### Infrastructure +### Documentation +### Maintenance +### Refactoring + +## [Unreleased 2.x](https://github.com/opensearch-project/geospatial/compare/2.11...2.x) +### Features +* Add denylist ip config for datasource endpoint ([#573](https://github.com/opensearch-project/geospatial/pull/573)) +### Enhancements +### Bug Fixes +### Infrastructure +### Documentation +### Maintenance +* Upgrade gradle to 8.4 ([#596](https://github.com/opensearch-project/geospatial/pull/596)) +### Refactoring diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 3ca8a3ea..4f2f1c57 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -6,6 +6,7 @@ - [Documentation Changes](#documentation-changes) - [Contributing Code](#contributing-code) - [Developer Certificate of Origin](#developer-certificate-of-origin) +- [Changelog](#changelog) - [Review Process](#review-process) ## Contributing to OpenSearch @@ -89,6 +90,42 @@ Signed-off-by: Jane Smith You may type this line on your own when writing your commit messages. However, if your user.name and user.email are set in your git configs, you can use `-s` or `– – signoff` to add the `Signed-off-by` line to the end of the commit message. +## Changelog + +OpenSearch maintains version specific changelog by enforcing a change to the ongoing [CHANGELOG](CHANGELOG.md) file adhering to the [Keep A Changelog](https://keepachangelog.com/en/1.0.0/) format. The purpose of the changelog is for the contributors and maintainers to incrementally build the release notes throughout the development process to avoid a painful and error-prone process of attempting to compile the release notes at release time. On each release the "unreleased" entries of the changelog are moved to the appropriate release notes document in the `./release-notes` folder. Also, incrementally building the changelog provides a concise, human-readable list of significant features that have been added to the unreleased version under development. + +### Which changes require a CHANGELOG entry? +Changelogs are intended for operators/administrators, developers integrating with libraries and APIs, and end-users interacting with OpenSearch Dashboards and/or the REST API (collectively referred to as "user"). In short, any change that a user of OpenSearch might want to be aware of should be included in the changelog. The changelog is _not_ intended to replace the git commit log that developers of OpenSearch itself rely upon. The following are some examples of changes that should be in the changelog: + +- A newly added feature +- A fix for a user-facing bug +- Dependency updates +- Fixes for security issues + +The following are some examples where a changelog entry is not necessary: + +- Adding, modifying, or fixing tests +- An incremental PR for a larger feature (such features should include _one_ changelog entry for the feature) +- Documentation changes or code refactoring +- Build-related changes + +Any PR that does not include a changelog entry will result in a failure of the validation workflow in GitHub. If the contributor and maintainers agree that no changelog entry is required, then the `skip-changelog` label can be applied to the PR which will result in the workflow passing. + +### How to add my changes to [CHANGELOG](CHANGELOG.md)? + +Adding in the change is two step process: +1. Add your changes to the corresponding section within the CHANGELOG file with dummy pull request information, publish the PR +2. Update the entry for your change in [`CHANGELOG.md`](CHANGELOG.md) and make sure that you reference the pull request there. + +### Where should I put my CHANGELOG entry? +Please review the [branching strategy](https://github.com/opensearch-project/.github/blob/main/RELEASING.md#opensearch-branching) document. The changelog on the `main` branch will contain sections for the _next major_ and _next minor_ releases. Your entry should go into the section it is intended to be released in. In practice, most changes to `main` will be backported to the next minor release so most entries will likely be in that section. + +The following examples assume the _next major_ release on main is 3.0, then _next minor_ release is 2.5, and the _current_ release is 2.4. + +- **Add a new feature to release in next minor:** Add a changelog entry to `[Unreleased 2.x]` on main, then backport to 2.x (including the changelog entry). +- **Introduce a breaking API change to release in next major:** Add a changelog entry to `[Unreleased 3.0]` on main, do not backport. +- **Upgrade a dependency to fix a CVE:** Add a changelog entry to `[Unreleased 2.x]` on main, then backport to 2.x (including the changelog entry), then backport to 2.4 and ensure the changelog entry is added to `[Unreleased 2.4.1]`. + ## Review Process We deeply appreciate everyone who takes the time to make a contribution. We will review all contributions as quickly as possible. As a reminder, [opening an issue](https://github.com/opensearch-project/geospatial/issues) discussing your change before you make it is the best way to smooth the PR process. This will prevent a rejection because someone else is already working on the problem, or because the solution is incompatible with the architectural direction. diff --git a/MAINTAINERS.md b/MAINTAINERS.md index 876bdd76..f030c04e 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -1,71 +1,16 @@ -- [Overview](#overview) -- [Current Maintainers](#current-maintainers) -- [Maintainer Responsibilities](#maintainer-responsibilities) - - [Uphold Code of Conduct](#uphold-code-of-conduct) - - [Prioritize Security](#prioritize-security) - - [Review Pull Requests](#review-pull-requests) - - [Triage Open Issues](#triage-open-issues) - - [Be Responsive](#be-responsive) - - [Maintain Overall Health of the Repo](#maintain-overall-health-of-the-repo) - - [Use Semver](#use-semver) - - [Release Frequently](#release-frequently) - - [Promote Other Maintainers](#promote-other-maintainers) - ## Overview -This document explains who the maintainers are (see below), what they do in this repo, and how they should be doing it. If you're interested in contributing, see [CONTRIBUTING](CONTRIBUTING.md). +This document contains a list of maintainers in this repo. See [opensearch-project/.github/RESPONSIBILITIES.md](https://github.com/opensearch-project/.github/blob/main/RESPONSIBILITIES.md#maintainer-responsibilities) that explains what the role of maintainer means, what maintainers do in this and other repos, and how they should be doing it. If you're interested in contributing, and becoming a maintainer, see [CONTRIBUTING](CONTRIBUTING.md). ## Current Maintainers -| Maintainer | GitHub ID | Affiliation | -| ------------------------ | --------------------------------------- | ----------- | -| Jack Mazanec | [jmazanec15](https://github.com/jmazanec15) | Amazon | -| Vamshi Vijay Nakkirtha | [vamshin](https://github.com/vamshin) | Amazon | -| Vijayan Balasubramanian | [VijayanB](https://github.com/VijayanB) | Amazon | - -## Maintainer Responsibilities - -Maintainers are active and visible members of the community, and have [maintain-level permissions on a repository](https://docs.github.com/en/organizations/managing-access-to-your-organizations-repositories/repository-permission-levels-for-an-organization). Use those privileges to serve the community and evolve code as follows. - -### Uphold Code of Conduct - -Model the behavior set forward by the [Code of Conduct](CODE_OF_CONDUCT.md) and raise any violations to other maintainers and admins. - -### Prioritize Security - -Security is your number one priority. Maintainer's Github keys must be password protected securely and any reported security vulnerabilities are addressed before features or bugs. - -Note that this repository is monitored and supported 24/7 by Amazon Security, see [Reporting a Vulnerability](SECURITY.md) for details. - -### Review Pull Requests - -Review pull requests regularly, comment, suggest, reject, merge and close. Accept only high quality pull-requests. Provide code reviews and guidance on incomming pull requests. Don't let PRs be stale and do your best to be helpful to contributors. - -### Triage Open Issues - -Manage labels, review issues regularly, and triage by labelling them. - -All repositories in this organization have a standard set of labels, including `bug`, `documentation`, `duplicate`, `enhancement`, `good first issue`, `help wanted`, `blocker`, `invalid`, `question`, `wontfix`, and `untriaged`, along with release labels, such as `v1.0.0`, `v1.1.0`, `v2.0.0`, `patch`, and `backport`. - -Use labels to target an issue or a PR for a given release, add `help wanted` to good issues for new community members, and `blocker` for issues that scare you or need immediate attention. Request for more information from a submitter if an issue is not clear. Create new labels as needed by the project. - -### Be Responsive - -Respond to enhancement requests, and forum posts. Allocate time to reviewing and commenting on issues and conversations as they come in. - -### Maintain Overall Health of the Repo - -Keep the `main` branch at production quality at all times. Backport features as needed. Cut release branches and tags to enable future patches. - -### Use Semver - -Use and enforce [semantic versioning](https://semver.org/) and do not let breaking changes be made outside of major releases. - -### Release Frequently - -Make frequent project releases to the community. - -### Promote Other Maintainers - -Assist, add, and remove [MAINTAINERS](MAINTAINERS.md). Exercise good judgement, and propose high quality contributors to become co-maintainers. - +| Maintainer | GitHub ID | Affiliation | +|-------------------------|-------------------------------------------------------|-------------| +| Heemin Kim | [heemin32](https://github.com/heemin32) | Amazon | +| Jack Mazanec | [jmazanec15](https://github.com/jmazanec15) | Amazon | +| Junqiu Lei | [junqiu-lei](https://github.com/junqiu-lei) | Amazon | +| Martin Gaievski | [martin-gaievski](https://github.com/martin-gaievski) | Amazon | +| Naveen Tatikonda | [naveentatikonda](https://github.com/naveentatikonda) | Amazon | +| Navneet Verma | [navneet1v](https://github.com/navneet1v) | Amazon | +| Vamshi Vijay Nakkirtha | [vamshin](https://github.com/vamshin) | Amazon | +| Vijayan Balasubramanian | [VijayanB](https://github.com/VijayanB) | Amazon | diff --git a/build.gradle b/build.gradle index bdcb0179..41dec00f 100644 --- a/build.gradle +++ b/build.gradle @@ -5,6 +5,8 @@ import org.opensearch.gradle.test.RestIntegTestTask +import java.util.concurrent.Callable + apply plugin: 'java' apply plugin: 'idea' apply plugin: 'opensearch.opensearchplugin' @@ -35,6 +37,7 @@ opensearchplugin { classname "${projectPath}.${pathToPlugin}.${pluginClassName}" licenseFile rootProject.file('LICENSE') noticeFile rootProject.file('NOTICE') + extendedPlugins = ['opensearch-job-scheduler'] } // This requires an additional Jar not published as part of build-tools @@ -45,8 +48,20 @@ validateNebulaPom.enabled = false buildscript { ext { - opensearch_version = System.getProperty("opensearch.version", "2.2.0-SNAPSHOT") - version_qualifier = System.getProperty("build.version_qualifier", "") + opensearch_version = System.getProperty("opensearch.version", "2.12.0-SNAPSHOT") + buildVersionQualifier = System.getProperty("build.version_qualifier", "") + isSnapshot = "true" == System.getProperty("build.snapshot", "true") + // 2.2.0-SNAPSHOT -> 2.2.0.0-SNAPSHOT + version_tokens = opensearch_version.tokenize('-') + opensearch_build = version_tokens[0] + '.0' + plugin_no_snapshot = opensearch_build + if (buildVersionQualifier) { + opensearch_build += "-${buildVersionQualifier}" + plugin_no_snapshot += "-${buildVersionQualifier}" + } + if (isSnapshot) { + opensearch_build += "-SNAPSHOT" + } opensearch_group = "org.opensearch" } @@ -59,8 +74,8 @@ buildscript { dependencies { classpath "${opensearch_group}.gradle:build-tools:${opensearch_version}" - classpath "com.diffplug.spotless:spotless-plugin-gradle:5.6.1" - classpath "io.freefair.gradle:lombok-plugin:6.4.3" + classpath "com.diffplug.spotless:spotless-plugin-gradle:6.20.0" + classpath "io.freefair.gradle:lombok-plugin:8.4" } } apply plugin: "com.diffplug.spotless" @@ -70,13 +85,8 @@ ext { } allprojects { - version = opensearch_version.tokenize('-')[0] + '.0' - if (version_qualifier) { - version += "-${version_qualifier}" - } - if (isSnapshot) { - version += "-SNAPSHOT" - } + group = opensearch_group + version = "${opensearch_build}" targetCompatibility = JavaVersion.VERSION_11 sourceCompatibility = JavaVersion.VERSION_11 } @@ -101,11 +111,22 @@ test { } publishing { + repositories { + maven { + name = "Snapshots" + url = "https://aws.oss.sonatype.org/content/repositories/snapshots" + credentials { + username "$System.env.SONATYPE_USERNAME" + password "$System.env.SONATYPE_PASSWORD" + } + } + } publications { pluginZip(MavenPublication) { publication -> pom { name = pluginName description = pluginDescription + groupId = "org.opensearch.plugin" licenses { license { name = "The Apache License, Version 2.0" @@ -124,16 +145,26 @@ publishing { } +configurations { + zipArchive +} + //****************************************************************************/ // Dependencies //****************************************************************************/ dependencies { + implementation "org.opensearch.plugin:geo:${opensearch_version}" + api project(":libs:h3") yamlRestTestRuntimeOnly "org.apache.logging.log4j:log4j-core:${versions.log4j}" testImplementation "org.hamcrest:hamcrest:${versions.hamcrest}" - testImplementation 'org.json:json:20211205' - implementation "org.apache.commons:commons-lang3:3.12.0" + testImplementation 'org.json:json:20231013' + implementation "org.apache.commons:commons-lang3:3.13.0" implementation "org.locationtech.spatial4j:spatial4j:${versions.spatial4j}" implementation "org.locationtech.jts:jts-core:${versions.jts}" + implementation "org.apache.commons:commons-csv:1.10.0" + zipArchive group: 'org.opensearch.plugin', name:'opensearch-job-scheduler', version: "${opensearch_build}" + compileOnly "org.opensearch:opensearch-job-scheduler-spi:${opensearch_build}" + implementation "com.github.seancfoley:ipaddress:5.4.0" } licenseHeaders.enabled = true @@ -186,8 +217,6 @@ integTest { testClusters.integTest { testDistribution = "ARCHIVE" - // This installs our plugin into the testClusters - plugin(project.tasks.bundlePlugin.archiveFile) // Cluster shrink exception thrown if we try to set numberOfNodes to 1, so only apply if > 1 if (_numNodes > 1) numberOfNodes = _numNodes // When running integration tests it doesn't forward the --debug-jvm to the cluster anymore @@ -200,6 +229,49 @@ testClusters.integTest { debugPort += 1 } } + + // This installs our plugin into the testClusters + plugin(project.tasks.bundlePlugin.archiveFile) + plugin(provider(new Callable(){ + @Override + RegularFile call() throws Exception { + return new RegularFile() { + @Override + File getAsFile() { + return configurations.zipArchive.asFileTree.getSingleFile() + } + } + } + })) + + // opensearch-geospatial plugin is being added to the list of plugins for the testCluster during build before + // the opensearch-job-scheduler plugin, which is causing build failures. From the stack trace, this looks like a bug. + // + // Exception in thread "main" java.lang.IllegalArgumentException: Missing plugin [opensearch-job-scheduler], dependency of [opensearch-geospatial] + // at org.opensearch.plugins.PluginsService.addSortedBundle(PluginsService.java:515) + // + // A temporary hack is to reorder the plugins list after evaluation but prior to task execution when the plugins are installed. + // See https://github.com/opensearch-project/anomaly-detection/blob/fd547014fdde5114bbc9c8e49fe7aaa37eb6e793/build.gradle#L400-L422 + nodes.each { node -> + def plugins = node.plugins + def firstPlugin = plugins.get(0) + plugins.remove(0) + plugins.add(firstPlugin) + } +} + +testClusters.yamlRestTest { + plugin(provider(new Callable(){ + @Override + RegularFile call() throws Exception { + return new RegularFile() { + @Override + File getAsFile() { + return configurations.zipArchive.asFileTree.getSingleFile() + } + } + } + })) } run { @@ -210,6 +282,26 @@ tasks.withType(RestIntegTestTask)*.configure { classpath += files(project.configurations.runtimeClasspath.findAll { it.name.contains("log4j-core") }) } +task integTestRemote(type: RestIntegTestTask) { + testClassesDirs = sourceSets.test.output.classesDirs + classpath = sourceSets.test.runtimeClasspath + + systemProperty "https", System.getProperty("https") + systemProperty "user", System.getProperty("user") + systemProperty "password", System.getProperty("password") + + systemProperty 'cluster.number_of_nodes', "${_numNodes}" + + systemProperty 'tests.security.manager', 'false' + + // Run tests with remote cluster only if rest case is defined + if (System.getProperty("tests.rest.cluster") != null) { + filter { + includeTestsMatching "org.opensearch.geospatial.*IT" + } + } +} + spotless { java { removeUnusedImports() @@ -221,12 +313,23 @@ spotless { } jacocoTestReport { - dependsOn integTest, test, yamlRestTest + dependsOn test reports { - xml.enabled = true - html.enabled = true + xml.getRequired().set(true) + html.getRequired().set(true) } } check.dependsOn jacocoTestCoverageVerification -jacocoTestCoverageVerification.dependsOn jacocoTestReport \ No newline at end of file +jacocoTestCoverageVerification.dependsOn jacocoTestReport + +// updateVersion: Task to auto increment to the next development iteration +task updateVersion { + onlyIf { System.getProperty('newVersion') } + doLast { + ext.newVersion = System.getProperty('newVersion') + println "Setting version to ${newVersion}." + // String tokenization to support -SNAPSHOT + ant.replaceregexp(file:'build.gradle', match: '"opensearch.version", "\\d.*"', replace: '"opensearch.version", "' + newVersion.tokenize('-')[0] + '-SNAPSHOT"', flags:'g', byline:true) + } +} diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index 41d9927a..7454180f 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 00e33ede..e411586a 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,5 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-7.4.1-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.4-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/libs/build.gradle b/libs/build.gradle new file mode 100644 index 00000000..3d2794e9 --- /dev/null +++ b/libs/build.gradle @@ -0,0 +1,8 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +subprojects { + apply plugin: 'opensearch.build' + description 'OpenSearch Geospatial H3 library' +} diff --git a/libs/h3/LICENSE.txt b/libs/h3/LICENSE.txt new file mode 100644 index 00000000..3ab280eb --- /dev/null +++ b/libs/h3/LICENSE.txt @@ -0,0 +1,204 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +This project is based on a modification of https://github.com/uber/h3 which is licensed under the Apache 2.0 License. diff --git a/libs/h3/NOTICE.txt b/libs/h3/NOTICE.txt new file mode 100644 index 00000000..5201d8e6 --- /dev/null +++ b/libs/h3/NOTICE.txt @@ -0,0 +1,25 @@ +OpenSearch (https://opensearch.org/) +Copyright OpenSearch Contributors + +-- +Elastic-hex + +Copyright 2022 Elasticsearch B.V. + +-- + +This project is based on a modification of https://github.com/uber/h3 which is licensed under the Apache 2.0 License. + +Copyright 2017-2021 Uber Technologies, Inc. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/libs/h3/build.gradle b/libs/h3/build.gradle new file mode 100644 index 00000000..c0ff95e4 --- /dev/null +++ b/libs/h3/build.gradle @@ -0,0 +1,76 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +apply plugin: 'opensearch.build' +apply plugin: 'opensearch.publish' + +tasks.named('forbiddenApisMain').configure { + replaceSignatureFiles 'jdk-signatures' +} + +repositories { + mavenLocal() + maven { url "https://aws.oss.sonatype.org/content/repositories/snapshots" } + mavenCentral() + maven { url "https://plugins.gradle.org/m2/" } +} + +dependencies { + api "org.apache.logging.log4j:log4j-api:${versions.log4j}" + api "org.apache.logging.log4j:log4j-core:${versions.log4j}" + testImplementation "org.opensearch.test:framework:${opensearch_version}" + testImplementation "org.apache.commons:commons-compress:1.21" + testImplementation "org.apache.lucene:lucene-spatial3d:${versions.lucene}" +} +licenseFile = "LICENSE.txt" +noticeFile = "NOTICE.txt" + +project.dependencyLicenses.enabled = false +project.thirdPartyAudit.enabled = false +project.loggerUsageCheck.enabled = false +project.forbiddenApis.ignoreFailures = true + +publishing { + publications { + pluginZip(MavenPublication) { publication -> + pom { + name = "opensearch-geospatial-h3" + description = 'OpenSearch Geospatial H3 library' + licenses { + license { + name = "The Apache License, Version 2.0" + url = "http://www.apache.org/licenses/LICENSE-2.0.txt" + } + } + developers { + developer { + name = "OpenSearch" + url = "https://github.com/opensearch-project/geospatial/libs/h3" + } + } + } + } + } +} + +validatePluginZipPom.dependsOn(generatePomFileForNebulaPublication) +validateNebulaPom.dependsOn(generatePomFileForPluginZipPublication) diff --git a/libs/h3/src/main/java/org/opensearch/geospatial/h3/BaseCells.java b/libs/h3/src/main/java/org/opensearch/geospatial/h3/BaseCells.java new file mode 100644 index 00000000..5e5bd93c --- /dev/null +++ b/libs/h3/src/main/java/org/opensearch/geospatial/h3/BaseCells.java @@ -0,0 +1,656 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + * This project is based on a modification of https://github.com/uber/h3 which is licensed under the Apache 2.0 License. + * + * Copyright 2016-2018 Uber Technologies, Inc. + */ +package org.opensearch.geospatial.h3; + +/** + * Base cell related lookup tables and access functions. + */ +final class BaseCells { + + private static class BaseCellData { + // "home" face and normalized ijk coordinates on that face + final int homeFace; + final int homeI; + final int homeJ; + final int homeK; + // is this base cell a pentagon? + final boolean isPentagon; + // if a pentagon, what are its two clockwise offset + final int[] cwOffsetPent; + + /// faces? + BaseCellData(int homeFace, int homeI, int homeJ, int homeK, boolean isPentagon, int[] cwOffsetPent) { + this.homeFace = homeFace; + this.homeI = homeI; + this.homeJ = homeJ; + this.homeK = homeK; + this.isPentagon = isPentagon; + this.cwOffsetPent = cwOffsetPent; + } + } + + /** + * Resolution 0 base cell data table. + *

+ * For each base cell, gives the "home" face and ijk+ coordinates on that face, + * whether or not the base cell is a pentagon. Additionally, if the base cell + * is a pentagon, the two cw offset rotation adjacent faces are given (-1 + * indicates that no cw offset rotation faces exist for this base cell). + */ + private static final BaseCellData[] baseCellData = new BaseCellData[] { + new BaseCellData(1, 1, 0, 0, false, new int[] { 0, 0 }), // base cell 0 + new BaseCellData(2, 1, 1, 0, false, new int[] { 0, 0 }), // base cell 1 + new BaseCellData(1, 0, 0, 0, false, new int[] { 0, 0 }), // base cell 2 + new BaseCellData(2, 1, 0, 0, false, new int[] { 0, 0 }), // base cell 3 + new BaseCellData(0, 2, 0, 0, true, new int[] { -1, -1 }), // base cell 4 + new BaseCellData(1, 1, 1, 0, false, new int[] { 0, 0 }), // base cell 5 + new BaseCellData(1, 0, 0, 1, false, new int[] { 0, 0 }), // base cell 6 + new BaseCellData(2, 0, 0, 0, false, new int[] { 0, 0 }), // base cell 7 + new BaseCellData(0, 1, 0, 0, false, new int[] { 0, 0 }), // base cell 8 + new BaseCellData(2, 0, 1, 0, false, new int[] { 0, 0 }), // base cell 9 + new BaseCellData(1, 0, 1, 0, false, new int[] { 0, 0 }), // base cell 10 + new BaseCellData(1, 0, 1, 1, false, new int[] { 0, 0 }), // base cell 11 + new BaseCellData(3, 1, 0, 0, false, new int[] { 0, 0 }), // base cell 12 + new BaseCellData(3, 1, 1, 0, false, new int[] { 0, 0 }), // base cell 13 + new BaseCellData(11, 2, 0, 0, true, new int[] { 2, 6 }), // base cell 14 + new BaseCellData(4, 1, 0, 0, false, new int[] { 0, 0 }), // base cell 15 + new BaseCellData(0, 0, 0, 0, false, new int[] { 0, 0 }), // base cell 16 + new BaseCellData(6, 0, 1, 0, false, new int[] { 0, 0 }), // base cell 17 + new BaseCellData(0, 0, 0, 1, false, new int[] { 0, 0 }), // base cell 18 + new BaseCellData(2, 0, 1, 1, false, new int[] { 0, 0 }), // base cell 19 + new BaseCellData(7, 0, 0, 1, false, new int[] { 0, 0 }), // base cell 20 + new BaseCellData(2, 0, 0, 1, false, new int[] { 0, 0 }), // base cell 21 + new BaseCellData(0, 1, 1, 0, false, new int[] { 0, 0 }), // base cell 22 + new BaseCellData(6, 0, 0, 1, false, new int[] { 0, 0 }), // base cell 23 + new BaseCellData(10, 2, 0, 0, true, new int[] { 1, 5 }), // base cell 24 + new BaseCellData(6, 0, 0, 0, false, new int[] { 0, 0 }), // base cell 25 + new BaseCellData(3, 0, 0, 0, false, new int[] { 0, 0 }), // base cell 26 + new BaseCellData(11, 1, 0, 0, false, new int[] { 0, 0 }), // base cell 27 + new BaseCellData(4, 1, 1, 0, false, new int[] { 0, 0 }), // base cell 28 + new BaseCellData(3, 0, 1, 0, false, new int[] { 0, 0 }), // base cell 29 + new BaseCellData(0, 0, 1, 1, false, new int[] { 0, 0 }), // base cell 30 + new BaseCellData(4, 0, 0, 0, false, new int[] { 0, 0 }), // base cell 31 + new BaseCellData(5, 0, 1, 0, false, new int[] { 0, 0 }), // base cell 32 + new BaseCellData(0, 0, 1, 0, false, new int[] { 0, 0 }), // base cell 33 + new BaseCellData(7, 0, 1, 0, false, new int[] { 0, 0 }), // base cell 34 + new BaseCellData(11, 1, 1, 0, false, new int[] { 0, 0 }), // base cell 35 + new BaseCellData(7, 0, 0, 0, false, new int[] { 0, 0 }), // base cell 36 + new BaseCellData(10, 1, 0, 0, false, new int[] { 0, 0 }), // base cell 37 + new BaseCellData(12, 2, 0, 0, true, new int[] { 3, 7 }), // base cell 38 + new BaseCellData(6, 1, 0, 1, false, new int[] { 0, 0 }), // base cell 39 + new BaseCellData(7, 1, 0, 1, false, new int[] { 0, 0 }), // base cell 40 + new BaseCellData(4, 0, 0, 1, false, new int[] { 0, 0 }), // base cell 41 + new BaseCellData(3, 0, 0, 1, false, new int[] { 0, 0 }), // base cell 42 + new BaseCellData(3, 0, 1, 1, false, new int[] { 0, 0 }), // base cell 43 + new BaseCellData(4, 0, 1, 0, false, new int[] { 0, 0 }), // base cell 44 + new BaseCellData(6, 1, 0, 0, false, new int[] { 0, 0 }), // base cell 45 + new BaseCellData(11, 0, 0, 0, false, new int[] { 0, 0 }), // base cell 46 + new BaseCellData(8, 0, 0, 1, false, new int[] { 0, 0 }), // base cell 47 + new BaseCellData(5, 0, 0, 1, false, new int[] { 0, 0 }), // base cell 48 + new BaseCellData(14, 2, 0, 0, true, new int[] { 0, 9 }), // base cell 49 + new BaseCellData(5, 0, 0, 0, false, new int[] { 0, 0 }), // base cell 50 + new BaseCellData(12, 1, 0, 0, false, new int[] { 0, 0 }), // base cell 51 + new BaseCellData(10, 1, 1, 0, false, new int[] { 0, 0 }), // base cell 52 + new BaseCellData(4, 0, 1, 1, false, new int[] { 0, 0 }), // base cell 53 + new BaseCellData(12, 1, 1, 0, false, new int[] { 0, 0 }), // base cell 54 + new BaseCellData(7, 1, 0, 0, false, new int[] { 0, 0 }), // base cell 55 + new BaseCellData(11, 0, 1, 0, false, new int[] { 0, 0 }), // base cell 56 + new BaseCellData(10, 0, 0, 0, false, new int[] { 0, 0 }), // base cell 57 + new BaseCellData(13, 2, 0, 0, true, new int[] { 4, 8 }), // base cell 58 + new BaseCellData(10, 0, 0, 1, false, new int[] { 0, 0 }), // base cell 59 + new BaseCellData(11, 0, 0, 1, false, new int[] { 0, 0 }), // base cell 60 + new BaseCellData(9, 0, 1, 0, false, new int[] { 0, 0 }), // base cell 61 + new BaseCellData(8, 0, 1, 0, false, new int[] { 0, 0 }), // base cell 62 + new BaseCellData(6, 2, 0, 0, true, new int[] { 11, 15 }), // base cell 63 + new BaseCellData(8, 0, 0, 0, false, new int[] { 0, 0 }), // base cell 64 + new BaseCellData(9, 0, 0, 1, false, new int[] { 0, 0 }), // base cell 65 + new BaseCellData(14, 1, 0, 0, false, new int[] { 0, 0 }), // base cell 66 + new BaseCellData(5, 1, 0, 1, false, new int[] { 0, 0 }), // base cell 67 + new BaseCellData(16, 0, 1, 1, false, new int[] { 0, 0 }), // base cell 68 + new BaseCellData(8, 1, 0, 1, false, new int[] { 0, 0 }), // base cell 69 + new BaseCellData(5, 1, 0, 0, false, new int[] { 0, 0 }), // base cell 70 + new BaseCellData(12, 0, 0, 0, false, new int[] { 0, 0 }), // base cell 71 + new BaseCellData(7, 2, 0, 0, true, new int[] { 12, 16 }), // base cell 72 + new BaseCellData(12, 0, 1, 0, false, new int[] { 0, 0 }), // base cell 73 + new BaseCellData(10, 0, 1, 0, false, new int[] { 0, 0 }), // base cell 74 + new BaseCellData(9, 0, 0, 0, false, new int[] { 0, 0 }), // base cell 75 + new BaseCellData(13, 1, 0, 0, false, new int[] { 0, 0 }), // base cell 76 + new BaseCellData(16, 0, 0, 1, false, new int[] { 0, 0 }), // base cell 77 + new BaseCellData(15, 0, 1, 1, false, new int[] { 0, 0 }), // base cell 78 + new BaseCellData(15, 0, 1, 0, false, new int[] { 0, 0 }), // base cell 79 + new BaseCellData(16, 0, 1, 0, false, new int[] { 0, 0 }), // base cell 80 + new BaseCellData(14, 1, 1, 0, false, new int[] { 0, 0 }), // base cell 81 + new BaseCellData(13, 1, 1, 0, false, new int[] { 0, 0 }), // base cell 82 + new BaseCellData(5, 2, 0, 0, true, new int[] { 10, 19 }), // base cell 83 + new BaseCellData(8, 1, 0, 0, false, new int[] { 0, 0 }), // base cell 84 + new BaseCellData(14, 0, 0, 0, false, new int[] { 0, 0 }), // base cell 85 + new BaseCellData(9, 1, 0, 1, false, new int[] { 0, 0 }), // base cell 86 + new BaseCellData(14, 0, 0, 1, false, new int[] { 0, 0 }), // base cell 87 + new BaseCellData(17, 0, 0, 1, false, new int[] { 0, 0 }), // base cell 88 + new BaseCellData(12, 0, 0, 1, false, new int[] { 0, 0 }), // base cell 89 + new BaseCellData(16, 0, 0, 0, false, new int[] { 0, 0 }), // base cell 90 + new BaseCellData(17, 0, 1, 1, false, new int[] { 0, 0 }), // base cell 91 + new BaseCellData(15, 0, 0, 1, false, new int[] { 0, 0 }), // base cell 92 + new BaseCellData(16, 1, 0, 1, false, new int[] { 0, 0 }), // base cell 93 + new BaseCellData(9, 1, 0, 0, false, new int[] { 0, 0 }), // base cell 94 + new BaseCellData(15, 0, 0, 0, false, new int[] { 0, 0 }), // base cell 95 + new BaseCellData(13, 0, 0, 0, false, new int[] { 0, 0 }), // base cell 96 + new BaseCellData(8, 2, 0, 0, true, new int[] { 13, 17 }), // base cell 97 + new BaseCellData(13, 0, 1, 0, false, new int[] { 0, 0 }), // base cell 98 + new BaseCellData(17, 1, 0, 1, false, new int[] { 0, 0 }), // base cell 99 + new BaseCellData(19, 0, 1, 0, false, new int[] { 0, 0 }), // base cell 100 + new BaseCellData(14, 0, 1, 0, false, new int[] { 0, 0 }), // base cell 101 + new BaseCellData(19, 0, 1, 1, false, new int[] { 0, 0 }), // base cell 102 + new BaseCellData(17, 0, 1, 0, false, new int[] { 0, 0 }), // base cell 103 + new BaseCellData(13, 0, 0, 1, false, new int[] { 0, 0 }), // base cell 104 + new BaseCellData(17, 0, 0, 0, false, new int[] { 0, 0 }), // base cell 105 + new BaseCellData(16, 1, 0, 0, false, new int[] { 0, 0 }), // base cell 106 + new BaseCellData(9, 2, 0, 0, true, new int[] { 14, 18 }), // base cell 107 + new BaseCellData(15, 1, 0, 1, false, new int[] { 0, 0 }), // base cell 108 + new BaseCellData(15, 1, 0, 0, false, new int[] { 0, 0 }), // base cell 109 + new BaseCellData(18, 0, 1, 1, false, new int[] { 0, 0 }), // base cell 110 + new BaseCellData(18, 0, 0, 1, false, new int[] { 0, 0 }), // base cell 111 + new BaseCellData(19, 0, 0, 1, false, new int[] { 0, 0 }), // base cell 112 + new BaseCellData(17, 1, 0, 0, false, new int[] { 0, 0 }), // base cell 113 + new BaseCellData(19, 0, 0, 0, false, new int[] { 0, 0 }), // base cell 114 + new BaseCellData(18, 0, 1, 0, false, new int[] { 0, 0 }), // base cell 115 + new BaseCellData(18, 1, 0, 1, false, new int[] { 0, 0 }), // base cell 116 + new BaseCellData(19, 2, 0, 0, true, new int[] { -1, -1 }), // base cell 117 + new BaseCellData(19, 1, 0, 0, false, new int[] { 0, 0 }), // base cell 118 + new BaseCellData(18, 0, 0, 0, false, new int[] { 0, 0 }), // base cell 119 + new BaseCellData(19, 1, 0, 1, false, new int[] { 0, 0 }), // base cell 120 + new BaseCellData(18, 1, 0, 0, false, new int[] { 0, 0 }) // base cell 121 + }; + + /** + * base cell at a given ijk and required rotations into its system + */ + private static class BaseCellRotation { + final int baseCell; // base cell number + final int ccwRot60; // number of ccw 60 degree rotations relative to current + /// face + + BaseCellRotation(int baseCell, int ccwRot60) { + this.baseCell = baseCell; + this.ccwRot60 = ccwRot60; + } + } + + /** @brief Resolution 0 base cell lookup table for each face. + * + * Given the face number and a resolution 0 ijk+ coordinate in that face's + * face-centered ijk coordinate system, gives the base cell located at that + * coordinate and the number of 60 ccw rotations to rotate into that base + * cell's orientation. + * + * Valid lookup coordinates are from (0, 0, 0) to (2, 2, 2). + * + * This table can be accessed using the functions `_faceIjkToBaseCell` and + * `_faceIjkToBaseCellCCWrot60` + */ + private static final BaseCellRotation[][][][] faceIjkBaseCells = new BaseCellRotation[][][][] { + {// face 0 + { + // i 0 + { new BaseCellRotation(16, 0), new BaseCellRotation(18, 0), new BaseCellRotation(24, 0) }, // j 0 + { new BaseCellRotation(33, 0), new BaseCellRotation(30, 0), new BaseCellRotation(32, 3) }, // j 1 + { new BaseCellRotation(49, 1), new BaseCellRotation(48, 3), new BaseCellRotation(50, 3) } // j 2 + }, + { + // i 1 + { new BaseCellRotation(8, 0), new BaseCellRotation(5, 5), new BaseCellRotation(10, 5) }, // j 0 + { new BaseCellRotation(22, 0), new BaseCellRotation(16, 0), new BaseCellRotation(18, 0) }, // j 1 + { new BaseCellRotation(41, 1), new BaseCellRotation(33, 0), new BaseCellRotation(30, 0) } // j 2 + }, + { + // i 2 + { new BaseCellRotation(4, 0), new BaseCellRotation(0, 5), new BaseCellRotation(2, 5) }, // j 0 + { new BaseCellRotation(15, 1), new BaseCellRotation(8, 0), new BaseCellRotation(5, 5) }, // j 1 + { new BaseCellRotation(31, 1), new BaseCellRotation(22, 0), new BaseCellRotation(16, 0) } // j 2 + } }, + {// face 1 + { + // i 0 + { new BaseCellRotation(2, 0), new BaseCellRotation(6, 0), new BaseCellRotation(14, 0) }, // j 0 + { new BaseCellRotation(10, 0), new BaseCellRotation(11, 0), new BaseCellRotation(17, 3) }, // j 1 + { new BaseCellRotation(24, 1), new BaseCellRotation(23, 3), new BaseCellRotation(25, 3) } // j 2 + }, + { + // i 1 + { new BaseCellRotation(0, 0), new BaseCellRotation(1, 5), new BaseCellRotation(9, 5) }, // j 0 + { new BaseCellRotation(5, 0), new BaseCellRotation(2, 0), new BaseCellRotation(6, 0) }, // j 1 + { new BaseCellRotation(18, 1), new BaseCellRotation(10, 0), new BaseCellRotation(11, 0) } // j 2 + }, + { + // i 2 + { new BaseCellRotation(4, 1), new BaseCellRotation(3, 5), new BaseCellRotation(7, 5) }, // j 0 + { new BaseCellRotation(8, 1), new BaseCellRotation(0, 0), new BaseCellRotation(1, 5) }, // j 1 + { new BaseCellRotation(16, 1), new BaseCellRotation(5, 0), new BaseCellRotation(2, 0) } // j 2 + } }, + {// face 2 + { + // i 0 + { new BaseCellRotation(7, 0), new BaseCellRotation(21, 0), new BaseCellRotation(38, 0) }, // j 0 + { new BaseCellRotation(9, 0), new BaseCellRotation(19, 0), new BaseCellRotation(34, 3) }, // j 1 + { new BaseCellRotation(14, 1), new BaseCellRotation(20, 3), new BaseCellRotation(36, 3) } // j 2 + }, + { + // i 1 + { new BaseCellRotation(3, 0), new BaseCellRotation(13, 5), new BaseCellRotation(29, 5) }, // j 0 + { new BaseCellRotation(1, 0), new BaseCellRotation(7, 0), new BaseCellRotation(21, 0) }, // j 1 + { new BaseCellRotation(6, 1), new BaseCellRotation(9, 0), new BaseCellRotation(19, 0) } // j 2 + }, + { + // i 2 + { new BaseCellRotation(4, 2), new BaseCellRotation(12, 5), new BaseCellRotation(26, 5) }, // j 0 + { new BaseCellRotation(0, 1), new BaseCellRotation(3, 0), new BaseCellRotation(13, 5) }, // j 1 + { new BaseCellRotation(2, 1), new BaseCellRotation(1, 0), new BaseCellRotation(7, 0) } // j 2 + } }, + {// face 3 + { + // i 0 + { new BaseCellRotation(26, 0), new BaseCellRotation(42, 0), new BaseCellRotation(58, 0) }, // j 0 + { new BaseCellRotation(29, 0), new BaseCellRotation(43, 0), new BaseCellRotation(62, 3) }, // j 1 + { new BaseCellRotation(38, 1), new BaseCellRotation(47, 3), new BaseCellRotation(64, 3) } // j 2 + }, + { + // i 1 + { new BaseCellRotation(12, 0), new BaseCellRotation(28, 5), new BaseCellRotation(44, 5) }, // j 0 + { new BaseCellRotation(13, 0), new BaseCellRotation(26, 0), new BaseCellRotation(42, 0) }, // j 1 + { new BaseCellRotation(21, 1), new BaseCellRotation(29, 0), new BaseCellRotation(43, 0) } // j 2 + }, + { + // i 2 + { new BaseCellRotation(4, 3), new BaseCellRotation(15, 5), new BaseCellRotation(31, 5) }, // j 0 + { new BaseCellRotation(3, 1), new BaseCellRotation(12, 0), new BaseCellRotation(28, 5) }, // j 1 + { new BaseCellRotation(7, 1), new BaseCellRotation(13, 0), new BaseCellRotation(26, 0) } // j 2 + } }, + {// face 4 + { + // i 0 + { new BaseCellRotation(31, 0), new BaseCellRotation(41, 0), new BaseCellRotation(49, 0) }, // j 0 + { new BaseCellRotation(44, 0), new BaseCellRotation(53, 0), new BaseCellRotation(61, 3) }, // j 1 + { new BaseCellRotation(58, 1), new BaseCellRotation(65, 3), new BaseCellRotation(75, 3) } // j 2 + }, + { + // i 1 + { new BaseCellRotation(15, 0), new BaseCellRotation(22, 5), new BaseCellRotation(33, 5) }, // j 0 + { new BaseCellRotation(28, 0), new BaseCellRotation(31, 0), new BaseCellRotation(41, 0) }, // j 1 + { new BaseCellRotation(42, 1), new BaseCellRotation(44, 0), new BaseCellRotation(53, 0) } // j 2 + }, + { + // i 2 + { new BaseCellRotation(4, 4), new BaseCellRotation(8, 5), new BaseCellRotation(16, 5) }, // j 0 + { new BaseCellRotation(12, 1), new BaseCellRotation(15, 0), new BaseCellRotation(22, 5) }, // j 1 + { new BaseCellRotation(26, 1), new BaseCellRotation(28, 0), new BaseCellRotation(31, 0) } // j 2 + } }, + {// face 5 + { + // i 0 + { new BaseCellRotation(50, 0), new BaseCellRotation(48, 0), new BaseCellRotation(49, 3) }, // j 0 + { new BaseCellRotation(32, 0), new BaseCellRotation(30, 3), new BaseCellRotation(33, 3) }, // j 1 + { new BaseCellRotation(24, 3), new BaseCellRotation(18, 3), new BaseCellRotation(16, 3) } // j 2 + }, + { + // i 1 + { new BaseCellRotation(70, 0), new BaseCellRotation(67, 0), new BaseCellRotation(66, 3) }, // j 0 + { new BaseCellRotation(52, 3), new BaseCellRotation(50, 0), new BaseCellRotation(48, 0) }, // j 1 + { new BaseCellRotation(37, 3), new BaseCellRotation(32, 0), new BaseCellRotation(30, 3) } // j 2 + }, + { + // i 2 + { new BaseCellRotation(83, 0), new BaseCellRotation(87, 3), new BaseCellRotation(85, 3) }, // j 0 + { new BaseCellRotation(74, 3), new BaseCellRotation(70, 0), new BaseCellRotation(67, 0) }, // j 1 + { new BaseCellRotation(57, 1), new BaseCellRotation(52, 3), new BaseCellRotation(50, 0) } // j 2 + } }, + {// face 6 + { + // i 0 + { new BaseCellRotation(25, 0), new BaseCellRotation(23, 0), new BaseCellRotation(24, 3) }, // j 0 + { new BaseCellRotation(17, 0), new BaseCellRotation(11, 3), new BaseCellRotation(10, 3) }, // j 1 + { new BaseCellRotation(14, 3), new BaseCellRotation(6, 3), new BaseCellRotation(2, 3) } // j 2 + }, + { + // i 1 + { new BaseCellRotation(45, 0), new BaseCellRotation(39, 0), new BaseCellRotation(37, 3) }, // j 0 + { new BaseCellRotation(35, 3), new BaseCellRotation(25, 0), new BaseCellRotation(23, 0) }, // j 1 + { new BaseCellRotation(27, 3), new BaseCellRotation(17, 0), new BaseCellRotation(11, 3) } // j 2 + }, + { + // i 2 + { new BaseCellRotation(63, 0), new BaseCellRotation(59, 3), new BaseCellRotation(57, 3) }, // j 0 + { new BaseCellRotation(56, 3), new BaseCellRotation(45, 0), new BaseCellRotation(39, 0) }, // j 1 + { new BaseCellRotation(46, 3), new BaseCellRotation(35, 3), new BaseCellRotation(25, 0) } // j 2 + } }, + {// face 7 + { + // i 0 + { new BaseCellRotation(36, 0), new BaseCellRotation(20, 0), new BaseCellRotation(14, 3) }, // j 0 + { new BaseCellRotation(34, 0), new BaseCellRotation(19, 3), new BaseCellRotation(9, 3) }, // j 1 + { new BaseCellRotation(38, 3), new BaseCellRotation(21, 3), new BaseCellRotation(7, 3) } // j 2 + }, + { + // i 1 + { new BaseCellRotation(55, 0), new BaseCellRotation(40, 0), new BaseCellRotation(27, 3) }, // j 0 + { new BaseCellRotation(54, 3), new BaseCellRotation(36, 0), new BaseCellRotation(20, 0) }, // j 1 + { new BaseCellRotation(51, 3), new BaseCellRotation(34, 0), new BaseCellRotation(19, 3) } // j 2 + }, + { + // i 2 + { new BaseCellRotation(72, 0), new BaseCellRotation(60, 3), new BaseCellRotation(46, 3) }, // j 0 + { new BaseCellRotation(73, 3), new BaseCellRotation(55, 0), new BaseCellRotation(40, 0) }, // j 1 + { new BaseCellRotation(71, 3), new BaseCellRotation(54, 3), new BaseCellRotation(36, 0) } // j 2 + } }, + {// face 8 + { + // i 0 + { new BaseCellRotation(64, 0), new BaseCellRotation(47, 0), new BaseCellRotation(38, 3) }, // j 0 + { new BaseCellRotation(62, 0), new BaseCellRotation(43, 3), new BaseCellRotation(29, 3) }, // j 1 + { new BaseCellRotation(58, 3), new BaseCellRotation(42, 3), new BaseCellRotation(26, 3) } // j 2 + }, + { + // i 1 + { new BaseCellRotation(84, 0), new BaseCellRotation(69, 0), new BaseCellRotation(51, 3) }, // j 0 + { new BaseCellRotation(82, 3), new BaseCellRotation(64, 0), new BaseCellRotation(47, 0) }, // j 1 + { new BaseCellRotation(76, 3), new BaseCellRotation(62, 0), new BaseCellRotation(43, 3) } // j 2 + }, + { + // i 2 + { new BaseCellRotation(97, 0), new BaseCellRotation(89, 3), new BaseCellRotation(71, 3) }, // j 0 + { new BaseCellRotation(98, 3), new BaseCellRotation(84, 0), new BaseCellRotation(69, 0) }, // j 1 + { new BaseCellRotation(96, 3), new BaseCellRotation(82, 3), new BaseCellRotation(64, 0) } // j 2 + } }, + {// face 9 + { + // i 0 + { new BaseCellRotation(75, 0), new BaseCellRotation(65, 0), new BaseCellRotation(58, 3) }, // j 0 + { new BaseCellRotation(61, 0), new BaseCellRotation(53, 3), new BaseCellRotation(44, 3) }, // j 1 + { new BaseCellRotation(49, 3), new BaseCellRotation(41, 3), new BaseCellRotation(31, 3) } // j 2 + }, + { + // i 1 + { new BaseCellRotation(94, 0), new BaseCellRotation(86, 0), new BaseCellRotation(76, 3) }, // j 0 + { new BaseCellRotation(81, 3), new BaseCellRotation(75, 0), new BaseCellRotation(65, 0) }, // j 1 + { new BaseCellRotation(66, 3), new BaseCellRotation(61, 0), new BaseCellRotation(53, 3) } // j 2 + }, + { + // i 2 + { new BaseCellRotation(107, 0), new BaseCellRotation(104, 3), new BaseCellRotation(96, 3) }, // j 0 + { new BaseCellRotation(101, 3), new BaseCellRotation(94, 0), new BaseCellRotation(86, 0) }, // j 1 + { new BaseCellRotation(85, 3), new BaseCellRotation(81, 3), new BaseCellRotation(75, 0) } // j 2 + } }, + {// face 10 + { + // i 0 + { new BaseCellRotation(57, 0), new BaseCellRotation(59, 0), new BaseCellRotation(63, 3) }, // j 0 + { new BaseCellRotation(74, 0), new BaseCellRotation(78, 3), new BaseCellRotation(79, 3) }, // j 1 + { new BaseCellRotation(83, 3), new BaseCellRotation(92, 3), new BaseCellRotation(95, 3) } // j 2 + }, + { + // i 1 + { new BaseCellRotation(37, 0), new BaseCellRotation(39, 3), new BaseCellRotation(45, 3) }, // j 0 + { new BaseCellRotation(52, 0), new BaseCellRotation(57, 0), new BaseCellRotation(59, 0) }, // j 1 + { new BaseCellRotation(70, 3), new BaseCellRotation(74, 0), new BaseCellRotation(78, 3) } // j 2 + }, + { + // i 2 + { new BaseCellRotation(24, 0), new BaseCellRotation(23, 3), new BaseCellRotation(25, 3) }, // j 0 + { new BaseCellRotation(32, 3), new BaseCellRotation(37, 0), new BaseCellRotation(39, 3) }, // j 1 + { new BaseCellRotation(50, 3), new BaseCellRotation(52, 0), new BaseCellRotation(57, 0) } // j 2 + } }, + {// face 11 + { + // i 0 + { new BaseCellRotation(46, 0), new BaseCellRotation(60, 0), new BaseCellRotation(72, 3) }, // j 0 + { new BaseCellRotation(56, 0), new BaseCellRotation(68, 3), new BaseCellRotation(80, 3) }, // j 1 + { new BaseCellRotation(63, 3), new BaseCellRotation(77, 3), new BaseCellRotation(90, 3) } // j 2 + }, + { + // i 1 + { new BaseCellRotation(27, 0), new BaseCellRotation(40, 3), new BaseCellRotation(55, 3) }, // j 0 + { new BaseCellRotation(35, 0), new BaseCellRotation(46, 0), new BaseCellRotation(60, 0) }, // j 1 + { new BaseCellRotation(45, 3), new BaseCellRotation(56, 0), new BaseCellRotation(68, 3) } // j 2 + }, + { + // i 2 + { new BaseCellRotation(14, 0), new BaseCellRotation(20, 3), new BaseCellRotation(36, 3) }, // j 0 + { new BaseCellRotation(17, 3), new BaseCellRotation(27, 0), new BaseCellRotation(40, 3) }, // j 1 + { new BaseCellRotation(25, 3), new BaseCellRotation(35, 0), new BaseCellRotation(46, 0) } // j 2 + } }, + {// face 12 + { + // i 0 + { new BaseCellRotation(71, 0), new BaseCellRotation(89, 0), new BaseCellRotation(97, 3) }, // j 0 + { new BaseCellRotation(73, 0), new BaseCellRotation(91, 3), new BaseCellRotation(103, 3) }, // j 1 + { new BaseCellRotation(72, 3), new BaseCellRotation(88, 3), new BaseCellRotation(105, 3) } // j 2 + }, + { + // i 1 + { new BaseCellRotation(51, 0), new BaseCellRotation(69, 3), new BaseCellRotation(84, 3) }, // j 0 + { new BaseCellRotation(54, 0), new BaseCellRotation(71, 0), new BaseCellRotation(89, 0) }, // j 1 + { new BaseCellRotation(55, 3), new BaseCellRotation(73, 0), new BaseCellRotation(91, 3) } // j 2 + }, + { + // i 2 + { new BaseCellRotation(38, 0), new BaseCellRotation(47, 3), new BaseCellRotation(64, 3) }, // j 0 + { new BaseCellRotation(34, 3), new BaseCellRotation(51, 0), new BaseCellRotation(69, 3) }, // j 1 + { new BaseCellRotation(36, 3), new BaseCellRotation(54, 0), new BaseCellRotation(71, 0) } // j 2 + } }, + {// face 13 + { + // i 0 + { new BaseCellRotation(96, 0), new BaseCellRotation(104, 0), new BaseCellRotation(107, 3) }, // j 0 + { new BaseCellRotation(98, 0), new BaseCellRotation(110, 3), new BaseCellRotation(115, 3) }, // j 1 + { new BaseCellRotation(97, 3), new BaseCellRotation(111, 3), new BaseCellRotation(119, 3) } // j 2 + }, + { + // i 1 + { new BaseCellRotation(76, 0), new BaseCellRotation(86, 3), new BaseCellRotation(94, 3) }, // j 0 + { new BaseCellRotation(82, 0), new BaseCellRotation(96, 0), new BaseCellRotation(104, 0) }, // j 1 + { new BaseCellRotation(84, 3), new BaseCellRotation(98, 0), new BaseCellRotation(110, 3) } // j 2 + }, + { + // i 2 + { new BaseCellRotation(58, 0), new BaseCellRotation(65, 3), new BaseCellRotation(75, 3) }, // j 0 + { new BaseCellRotation(62, 3), new BaseCellRotation(76, 0), new BaseCellRotation(86, 3) }, // j 1 + { new BaseCellRotation(64, 3), new BaseCellRotation(82, 0), new BaseCellRotation(96, 0) } // j 2 + } }, + {// face 14 + { + // i 0 + { new BaseCellRotation(85, 0), new BaseCellRotation(87, 0), new BaseCellRotation(83, 3) }, // j 0 + { new BaseCellRotation(101, 0), new BaseCellRotation(102, 3), new BaseCellRotation(100, 3) }, // j 1 + { new BaseCellRotation(107, 3), new BaseCellRotation(112, 3), new BaseCellRotation(114, 3) } // j 2 + }, + { + // i 1 + { new BaseCellRotation(66, 0), new BaseCellRotation(67, 3), new BaseCellRotation(70, 3) }, // j 0 + { new BaseCellRotation(81, 0), new BaseCellRotation(85, 0), new BaseCellRotation(87, 0) }, // j 1 + { new BaseCellRotation(94, 3), new BaseCellRotation(101, 0), new BaseCellRotation(102, 3) } // j 2 + }, + { + // i 2 + { new BaseCellRotation(49, 0), new BaseCellRotation(48, 3), new BaseCellRotation(50, 3) }, // j 0 + { new BaseCellRotation(61, 3), new BaseCellRotation(66, 0), new BaseCellRotation(67, 3) }, // j 1 + { new BaseCellRotation(75, 3), new BaseCellRotation(81, 0), new BaseCellRotation(85, 0) } // j 2 + } }, + {// face 15 + { + // i 0 + { new BaseCellRotation(95, 0), new BaseCellRotation(92, 0), new BaseCellRotation(83, 0) }, // j 0 + { new BaseCellRotation(79, 0), new BaseCellRotation(78, 0), new BaseCellRotation(74, 3) }, // j 1 + { new BaseCellRotation(63, 1), new BaseCellRotation(59, 3), new BaseCellRotation(57, 3) } // j 2 + }, + { + // i 1 + { new BaseCellRotation(109, 0), new BaseCellRotation(108, 0), new BaseCellRotation(100, 5) }, // j 0 + { new BaseCellRotation(93, 1), new BaseCellRotation(95, 0), new BaseCellRotation(92, 0) }, // j 1 + { new BaseCellRotation(77, 1), new BaseCellRotation(79, 0), new BaseCellRotation(78, 0) } // j 2 + }, + { + // i 2 + { new BaseCellRotation(117, 4), new BaseCellRotation(118, 5), new BaseCellRotation(114, 5) }, // j 0 + { new BaseCellRotation(106, 1), new BaseCellRotation(109, 0), new BaseCellRotation(108, 0) }, // j 1 + { new BaseCellRotation(90, 1), new BaseCellRotation(93, 1), new BaseCellRotation(95, 0) } // j 2 + } }, + {// face 16 + { + // i 0 + { new BaseCellRotation(90, 0), new BaseCellRotation(77, 0), new BaseCellRotation(63, 0) }, // j 0 + { new BaseCellRotation(80, 0), new BaseCellRotation(68, 0), new BaseCellRotation(56, 3) }, // j 1 + { new BaseCellRotation(72, 1), new BaseCellRotation(60, 3), new BaseCellRotation(46, 3) } // j 2 + }, + { + // i 1 + { new BaseCellRotation(106, 0), new BaseCellRotation(93, 0), new BaseCellRotation(79, 5) }, // j 0 + { new BaseCellRotation(99, 1), new BaseCellRotation(90, 0), new BaseCellRotation(77, 0) }, // j 1 + { new BaseCellRotation(88, 1), new BaseCellRotation(80, 0), new BaseCellRotation(68, 0) } // j 2 + }, + { + // i 2 + { new BaseCellRotation(117, 3), new BaseCellRotation(109, 5), new BaseCellRotation(95, 5) }, // j 0 + { new BaseCellRotation(113, 1), new BaseCellRotation(106, 0), new BaseCellRotation(93, 0) }, // j 1 + { new BaseCellRotation(105, 1), new BaseCellRotation(99, 1), new BaseCellRotation(90, 0) } // j 2 + } }, + {// face 17 + { + // i 0 + { new BaseCellRotation(105, 0), new BaseCellRotation(88, 0), new BaseCellRotation(72, 0) }, // j 0 + { new BaseCellRotation(103, 0), new BaseCellRotation(91, 0), new BaseCellRotation(73, 3) }, // j 1 + { new BaseCellRotation(97, 1), new BaseCellRotation(89, 3), new BaseCellRotation(71, 3) } // j 2 + }, + { + // i 1 + { new BaseCellRotation(113, 0), new BaseCellRotation(99, 0), new BaseCellRotation(80, 5) }, // j 0 + { new BaseCellRotation(116, 1), new BaseCellRotation(105, 0), new BaseCellRotation(88, 0) }, // j 1 + { new BaseCellRotation(111, 1), new BaseCellRotation(103, 0), new BaseCellRotation(91, 0) } // j 2 + }, + { + // i 2 + { new BaseCellRotation(117, 2), new BaseCellRotation(106, 5), new BaseCellRotation(90, 5) }, // j 0 + { new BaseCellRotation(121, 1), new BaseCellRotation(113, 0), new BaseCellRotation(99, 0) }, // j 1 + { new BaseCellRotation(119, 1), new BaseCellRotation(116, 1), new BaseCellRotation(105, 0) } // j 2 + } }, + {// face 18 + { + // i 0 + { new BaseCellRotation(119, 0), new BaseCellRotation(111, 0), new BaseCellRotation(97, 0) }, // j 0 + { new BaseCellRotation(115, 0), new BaseCellRotation(110, 0), new BaseCellRotation(98, 3) }, // j 1 + { new BaseCellRotation(107, 1), new BaseCellRotation(104, 3), new BaseCellRotation(96, 3) } // j 2 + }, + { + // i 1 + { new BaseCellRotation(121, 0), new BaseCellRotation(116, 0), new BaseCellRotation(103, 5) }, // j 0 + { new BaseCellRotation(120, 1), new BaseCellRotation(119, 0), new BaseCellRotation(111, 0) }, // j 1 + { new BaseCellRotation(112, 1), new BaseCellRotation(115, 0), new BaseCellRotation(110, 0) } // j 2 + }, + { + // i 2 + { new BaseCellRotation(117, 1), new BaseCellRotation(113, 5), new BaseCellRotation(105, 5) }, // j 0 + { new BaseCellRotation(118, 1), new BaseCellRotation(121, 0), new BaseCellRotation(116, 0) }, // j 1 + { new BaseCellRotation(114, 1), new BaseCellRotation(120, 1), new BaseCellRotation(119, 0) } // j 2 + } }, + {// face 19 + { + // i 0 + { new BaseCellRotation(114, 0), new BaseCellRotation(112, 0), new BaseCellRotation(107, 0) }, // j 0 + { new BaseCellRotation(100, 0), new BaseCellRotation(102, 0), new BaseCellRotation(101, 3) }, // j 1 + { new BaseCellRotation(83, 1), new BaseCellRotation(87, 3), new BaseCellRotation(85, 3) } // j 2 + }, + { + // i 1 + { new BaseCellRotation(118, 0), new BaseCellRotation(120, 0), new BaseCellRotation(115, 5) }, // j 0 + { new BaseCellRotation(108, 1), new BaseCellRotation(114, 0), new BaseCellRotation(112, 0) }, // j 1 + { new BaseCellRotation(92, 1), new BaseCellRotation(100, 0), new BaseCellRotation(102, 0) } // j 2 + }, + { + // i 2 + { new BaseCellRotation(117, 0), new BaseCellRotation(121, 5), new BaseCellRotation(119, 5) }, // j 0 + { new BaseCellRotation(109, 1), new BaseCellRotation(118, 0), new BaseCellRotation(120, 0) }, // j 1 + { new BaseCellRotation(95, 1), new BaseCellRotation(108, 1), new BaseCellRotation(114, 0) } // j 2 + } } }; + + /** + * Return whether or not the indicated base cell is a pentagon. + */ + public static boolean isBaseCellPentagon(int baseCell) { + if (baseCell < 0 || baseCell >= Constants.NUM_BASE_CELLS) { // LCOV_EXCL_BR_LINE + // Base cells less than zero can not be represented in an index + return false; + } + return baseCellData[baseCell].isPentagon; + } + + /** + * Return whether or not the indicated base cell is a pentagon. + */ + public static FaceIJK getBaseFaceIJK(int baseCell) { + if (baseCell < 0 || baseCell >= Constants.NUM_BASE_CELLS) { // LCOV_EXCL_BR_LINE + // Base cells less than zero can not be represented in an index + throw new IllegalArgumentException("Illegal base cell"); + } + BaseCellData cellData = baseCellData[baseCell]; + return new FaceIJK(cellData.homeFace, new CoordIJK(cellData.homeI, cellData.homeJ, cellData.homeK)); + } + + /** Find base cell given FaceIJK. + * + * Given the face number and a resolution 0 ijk+ coordinate in that face's + * face-centered ijk coordinate system, return the base cell located at that + * coordinate. + * + * Valid ijk+ lookup coordinates are from (0, 0, 0) to (2, 2, 2). + */ + public static int getBaseCell(FaceIJK faceIJK) { + return faceIjkBaseCells[faceIJK.face][faceIJK.coord.i][faceIJK.coord.j][faceIJK.coord.k].baseCell; + } + + /** Find base cell given FaceIJK. + * + * Given the face number and a resolution 0 ijk+ coordinate in that face's + * face-centered ijk coordinate system, return the number of 60' ccw rotations + * to rotate into the coordinate system of the base cell at that coordinates. + * + * Valid ijk+ lookup coordinates are from (0, 0, 0) to (2, 2, 2). + */ + public static int getBaseCellCCWrot60(FaceIJK faceIJK) { + return faceIjkBaseCells[faceIJK.face][faceIJK.coord.i][faceIJK.coord.j][faceIJK.coord.k].ccwRot60; + } + + /** Return whether or not the tested face is a cw offset face. + */ + public static boolean baseCellIsCwOffset(int baseCell, int testFace) { + return baseCellData[baseCell].cwOffsetPent[0] == testFace || baseCellData[baseCell].cwOffsetPent[1] == testFace; + } + + /** Return whether the indicated base cell is a pentagon where all + * neighbors are oriented towards it. */ + public static boolean isBaseCellPolarPentagon(int baseCell) { + return baseCell == 4 || baseCell == 117; + } + +} diff --git a/libs/h3/src/main/java/org/opensearch/geospatial/h3/CellBoundary.java b/libs/h3/src/main/java/org/opensearch/geospatial/h3/CellBoundary.java new file mode 100644 index 00000000..72dbd832 --- /dev/null +++ b/libs/h3/src/main/java/org/opensearch/geospatial/h3/CellBoundary.java @@ -0,0 +1,61 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + * This project is based on a modification of https://github.com/uber/h3 which is licensed under the Apache 2.0 License. + * + * Copyright 2016-2021 Uber Technologies, Inc. + */ +package org.opensearch.geospatial.h3; + +/** + * cell boundary points as {@link LatLng} + */ +public final class CellBoundary { + + /** Maximum number of cell boundary vertices; worst case is pentagon: + * 5 original verts + 5 edge crossings + */ + private static final int MAX_CELL_BNDRY_VERTS = 10; + /** How many points it holds */ + private int numVertext; + /** The actual points */ + private final LatLng[] points = new LatLng[MAX_CELL_BNDRY_VERTS]; + + CellBoundary() {} + + void add(LatLng point) { + points[numVertext++] = point; + } + + /** Number of points in this boundary */ + public int numPoints() { + return numVertext; + } + + /** Return the point at the given position*/ + public LatLng getLatLon(int i) { + if (i >= numVertext) { + throw new IndexOutOfBoundsException(); + } + return points[i]; + } +} diff --git a/libs/h3/src/main/java/org/opensearch/geospatial/h3/Constants.java b/libs/h3/src/main/java/org/opensearch/geospatial/h3/Constants.java new file mode 100644 index 00000000..6164eec8 --- /dev/null +++ b/libs/h3/src/main/java/org/opensearch/geospatial/h3/Constants.java @@ -0,0 +1,81 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + * This project is based on a modification of https://github.com/uber/h3 which is licensed under the Apache 2.0 License. + * + * Copyright 2016-2017, 2020 Uber Technologies, Inc. + */ +package org.opensearch.geospatial.h3; + +/** + * Constants used by more than one source code file. + */ +final class Constants { + /** + * sqrt(3) / 2.0 + */ + public static double M_SQRT3_2 = 0.8660254037844386467637231707529361834714; + /** + * H3 version 1 has 16 resolutions, numbered 0 through 15 + * min H3 resolution + */ + public static int MIN_H3_RES = 0; + /** + * max H3 resolution; + */ + public static int MAX_H3_RES = 15; + /** + * The number of H3 base cells + */ + public static int NUM_BASE_CELLS = 122; + /** + * The number of vertices in a hexagon + */ + public static int NUM_HEX_VERTS = 6; + /** + * The number of vertices in a pentagon + */ + public static int NUM_PENT_VERTS = 5; + /** + * H3 index modes + */ + public static int H3_CELL_MODE = 1; + /** + * square root of 7 + */ + public static final double M_SQRT7 = 2.6457513110645905905016157536392604257102; + /** + * scaling factor from hex2d resolution 0 unit length + * (or distance between adjacent cell center points + * on the plane) to gnomonic unit length. + */ + public static double RES0_U_GNOMONIC = 0.38196601125010500003; + /** + * rotation angle between Class II and Class III resolution axes + * (asin(sqrt(3.0 / 28.0))) + */ + public static double M_AP7_ROT_RADS = 0.333473172251832115336090755351601070065900389; + /** + * threshold epsilon + */ + public static double EPSILON = 0.0000000000000001; +} diff --git a/libs/h3/src/main/java/org/opensearch/geospatial/h3/CoordIJK.java b/libs/h3/src/main/java/org/opensearch/geospatial/h3/CoordIJK.java new file mode 100644 index 00000000..afe4dd1b --- /dev/null +++ b/libs/h3/src/main/java/org/opensearch/geospatial/h3/CoordIJK.java @@ -0,0 +1,398 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + * This project is based on a modification of https://github.com/uber/h3 which is licensed under the Apache 2.0 License. + * + * Copyright 2016-2018, 2020-2021 Uber Technologies, Inc. + */ +package org.opensearch.geospatial.h3; + +/** + * Mutable IJK hexagon coordinates + * + * Each axis is spaced 120 degrees apart. + * + * References two Vec2d cartesian coordinate systems: + * + * 1. gnomonic: face-centered polyhedral gnomonic projection space with + * traditional scaling and x-axes aligned with the face Class II + * i-axes. + * + * 2. hex2d: local face-centered coordinate system scaled a specific H3 grid + * resolution unit length and with x-axes aligned with the local + * i-axes + */ +final class CoordIJK { + + /** CoordIJK unit vectors corresponding to the 7 H3 digits. + */ + private static final int[][] UNIT_VECS = { + { 0, 0, 0 }, // direction 0 + { 0, 0, 1 }, // direction 1 + { 0, 1, 0 }, // direction 2 + { 0, 1, 1 }, // direction 3 + { 1, 0, 0 }, // direction 4 + { 1, 0, 1 }, // direction 5 + { 1, 1, 0 } // direction 6 + }; + + /** H3 digit representing ijk+ axes direction. + * Values will be within the lowest 3 bits of an integer. + */ + public enum Direction { + + CENTER_DIGIT(0), + K_AXES_DIGIT(1), + J_AXES_DIGIT(2), + JK_AXES_DIGIT(J_AXES_DIGIT.digit() | K_AXES_DIGIT.digit()), + I_AXES_DIGIT(4), + IK_AXES_DIGIT(I_AXES_DIGIT.digit() | K_AXES_DIGIT.digit()), + IJ_AXES_DIGIT(I_AXES_DIGIT.digit() | J_AXES_DIGIT.digit()), + INVALID_DIGIT(7), + NUM_DIGITS(INVALID_DIGIT.digit()), + PENTAGON_SKIPPED_DIGIT(K_AXES_DIGIT.digit()); + + Direction(int digit) { + this.digit = digit; + } + + private final int digit; + + public int digit() { + return digit; + } + + } + + int i; // i component + int j; // j component + int k; // k component + + CoordIJK(int i, int j, int k) { + this.i = i; + this.j = j; + this.k = k; + } + + /** + * Find the center point in 2D cartesian coordinates of a hex. + * + */ + public Vec2d ijkToHex2d() { + int i = this.i - this.k; + int j = this.j - this.k; + return new Vec2d(i - 0.5 * j, j * Constants.M_SQRT3_2); + } + + /** + * Add ijk coordinates. + * + * @param i the i coordinate + * @param j the j coordinate + * @param k the k coordinate + */ + + public void ijkAdd(int i, int j, int k) { + this.i += i; + this.j += j; + this.k += k; + } + + /** + * Subtract ijk coordinates. + * + * @param i the i coordinate + * @param j the j coordinate + * @param k the k coordinate + */ + public void ijkSub(int i, int j, int k) { + this.i -= i; + this.j -= j; + this.k -= k; + } + + /** + * Normalizes ijk coordinates by setting the ijk coordinates + * to the smallest possible values. + */ + public void ijkNormalize() { + // remove any negative values + if (i < 0) { + j -= i; + k -= i; + i = 0; + } + + if (j < 0) { + i -= j; + k -= j; + j = 0; + } + + if (k < 0) { + i -= k; + j -= k; + k = 0; + } + + // remove the min value if needed + int min = i; + if (j < min) { + min = j; + } + if (k < min) { + min = k; + } + if (min > 0) { + i -= min; + j -= min; + k -= min; + } + } + + /** + * Find the normalized ijk coordinates of the hex centered on the current + * hex at the next finer aperture 7 counter-clockwise resolution. + */ + public void downAp7() { + // res r unit vectors in res r+1 + // iVec (3, 0, 1) + // jVec (1, 3, 0) + // kVec (0, 1, 3) + final int i = this.i * 3 + this.j * 1 + this.k * 0; + final int j = this.i * 0 + this.j * 3 + this.k * 1; + final int k = this.i * 1 + this.j * 0 + this.k * 3; + this.i = i; + this.j = j; + this.k = k; + ijkNormalize(); + } + + /** + * Find the normalized ijk coordinates of the hex centered on the current + * hex at the next finer aperture 7 clockwise resolution. + */ + public void downAp7r() { + // iVec (3, 1, 0) + // jVec (0, 3, 1) + // kVec (1, 0, 3) + final int i = this.i * 3 + this.j * 0 + this.k * 1; + final int j = this.i * 1 + this.j * 3 + this.k * 0; + final int k = this.i * 0 + this.j * 1 + this.k * 3; + this.i = i; + this.j = j; + this.k = k; + ijkNormalize(); + } + + /** + * Find the normalized ijk coordinates of the hex centered on the current + * hex at the next finer aperture 3 counter-clockwise resolution. + */ + public void downAp3() { + // res r unit vectors in res r+1 + // iVec (2, 0, 1) + // jVec (1, 2, 0) + // kVec (0, 1, 2) + final int i = this.i * 2 + this.j * 1 + this.k * 0; + final int j = this.i * 0 + this.j * 2 + this.k * 1; + final int k = this.i * 1 + this.j * 0 + this.k * 2; + this.i = i; + this.j = j; + this.k = k; + ijkNormalize(); + } + + /** + * Find the normalized ijk coordinates of the hex centered on the current + * hex at the next finer aperture 3 clockwise resolution. + */ + public void downAp3r() { + // res r unit vectors in res r+1 + // iVec (2, 1, 0) + // jVec (0, 2, 1) + // kVec (1, 0, 2) + final int i = this.i * 2 + this.j * 0 + this.k * 1; + final int j = this.i * 1 + this.j * 2 + this.k * 0; + final int k = this.i * 0 + this.j * 1 + this.k * 2; + this.i = i; + this.j = j; + this.k = k; + ijkNormalize(); + } + + /** + * Rotates ijk coordinates 60 degrees clockwise. + * + */ + public void ijkRotate60cw() { + // unit vector rotations + // iVec (1, 0, 1) + // jVec (1, 1, 0) + // kVec (0, 1, 1) + final int i = this.i * 1 + this.j * 1 + this.k * 0; + final int j = this.i * 0 + this.j * 1 + this.k * 1; + final int k = this.i * 1 + this.j * 0 + this.k * 1; + this.i = i; + this.j = j; + this.k = k; + ijkNormalize(); + } + + /** + * Rotates ijk coordinates 60 degrees counter-clockwise. + */ + public void ijkRotate60ccw() { + // unit vector rotations + // iVec (1, 1, 0) + // jVec (0, 1, 1) + // kVec (1, 0, 1) + final int i = this.i * 1 + this.j * 0 + this.k * 1; + final int j = this.i * 1 + this.j * 1 + this.k * 0; + final int k = this.i * 0 + this.j * 1 + this.k * 1; + this.i = i; + this.j = j; + this.k = k; + ijkNormalize(); + } + + /** + * Find the normalized ijk coordinates of the hex in the specified digit + * direction from the current ijk coordinates. + * @param digit The digit direction from the original ijk coordinates. + */ + public void neighbor(int digit) { + if (digit > Direction.CENTER_DIGIT.digit() && digit < Direction.NUM_DIGITS.digit()) { + ijkAdd(UNIT_VECS[digit][0], UNIT_VECS[digit][1], UNIT_VECS[digit][2]); + ijkNormalize(); + } + } + + /** + * Find the normalized ijk coordinates of the indexing parent of a cell in a + * clockwise aperture 7 grid. + */ + public void upAp7r() { + i = this.i - this.k; + j = this.j - this.k; + int i = (int) Math.round((2 * this.i + this.j) / 7.0); + int j = (int) Math.round((3 * this.j - this.i) / 7.0); + this.i = i; + this.j = j; + this.k = 0; + ijkNormalize(); + } + + /** + * Find the normalized ijk coordinates of the indexing parent of a cell in a + * counter-clockwise aperture 7 grid. + * + */ + public void upAp7() { + i = this.i - this.k; + j = this.j - this.k; + int i = (int) Math.round((3 * this.i - this.j) / 7.0); + int j = (int) Math.round((this.i + 2 * this.j) / 7.0); + this.i = i; + this.j = j; + this.k = 0; + ijkNormalize(); + } + + /** + * Determines the H3 digit corresponding to a unit vector in ijk coordinates. + * + * @return The H3 digit (0-6) corresponding to the ijk unit vector, or + * INVALID_DIGIT on failure. + */ + public int unitIjkToDigit() { + ijkNormalize(); + int digit = Direction.INVALID_DIGIT.digit(); + for (int i = Direction.CENTER_DIGIT.digit(); i < Direction.NUM_DIGITS.digit(); i++) { + if (ijkMatches(UNIT_VECS[i])) { + digit = i; + break; + } + } + return digit; + } + + /** + * Returns whether or not two ijk coordinates contain exactly the same + * component values. + * + * @param c The set of ijk coordinates. + * @return true if the two addresses match, 0 if they do not. + */ + private boolean ijkMatches(int[] c) { + return (i == c[0] && j == c[1] && k == c[2]); + } + + /** + * Rotates indexing digit 60 degrees clockwise. Returns result. + * + * @param digit Indexing digit (between 1 and 6 inclusive) + */ + public static int rotate60cw(int digit) { + switch (digit) { + case 1: // K_AXES_DIGIT + return Direction.JK_AXES_DIGIT.digit(); + case 3: // JK_AXES_DIGIT: + return Direction.J_AXES_DIGIT.digit(); + case 2: // J_AXES_DIGIT: + return Direction.IJ_AXES_DIGIT.digit(); + case 6: // IJ_AXES_DIGIT + return Direction.I_AXES_DIGIT.digit(); + case 4: // I_AXES_DIGIT + return Direction.IK_AXES_DIGIT.digit(); + case 5: // IK_AXES_DIGIT + return Direction.K_AXES_DIGIT.digit(); + default: + return digit; + } + } + + /** + * Rotates indexing digit 60 degrees counter-clockwise. Returns result. + * + * @param digit Indexing digit (between 1 and 6 inclusive) + */ + public static int rotate60ccw(int digit) { + switch (digit) { + case 1: // K_AXES_DIGIT + return Direction.IK_AXES_DIGIT.digit(); + case 5: // IK_AXES_DIGIT + return Direction.I_AXES_DIGIT.digit(); + case 4: // I_AXES_DIGIT + return Direction.IJ_AXES_DIGIT.digit(); + case 6: // IJ_AXES_DIGIT + return Direction.J_AXES_DIGIT.digit(); + case 2: // J_AXES_DIGIT: + return Direction.JK_AXES_DIGIT.digit(); + case 3: // JK_AXES_DIGIT: + return Direction.K_AXES_DIGIT.digit(); + default: + return digit; + } + } + +} diff --git a/libs/h3/src/main/java/org/opensearch/geospatial/h3/FaceIJK.java b/libs/h3/src/main/java/org/opensearch/geospatial/h3/FaceIJK.java new file mode 100644 index 00000000..7a37c232 --- /dev/null +++ b/libs/h3/src/main/java/org/opensearch/geospatial/h3/FaceIJK.java @@ -0,0 +1,817 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + * This project is based on a modification of https://github.com/uber/h3 which is licensed under the Apache 2.0 License. + * + * Copyright 2016-2021 Uber Technologies, Inc. + */ +package org.opensearch.geospatial.h3; + +/** + * Mutable face number and ijk coordinates on that face-centered coordinate system. + * + * References the Vec2d cartesian coordinate systems hex2d: local face-centered + * coordinate system scaled a specific H3 grid resolution unit length and + * with x-axes aligned with the local i-axes + */ +final class FaceIJK { + + /** enum representing overage type */ + enum Overage { + /** + * Digit representing overage type + */ + NO_OVERAGE, + /** + * On face edge (only occurs on substrate grids) + */ + FACE_EDGE, + /** + * Overage on new face interior + */ + NEW_FACE + } + + // indexes for faceNeighbors table + /** + * IJ quadrant faceNeighbors table direction + */ + private static final int IJ = 1; + /** + * KI quadrant faceNeighbors table direction + */ + private static final int KI = 2; + /** + * JK quadrant faceNeighbors table direction + */ + private static final int JK = 3; + + /** + * overage distance table + */ + private static final int[] maxDimByCIIres = { + 2, // res 0 + -1, // res 1 + 14, // res 2 + -1, // res 3 + 98, // res 4 + -1, // res 5 + 686, // res 6 + -1, // res 7 + 4802, // res 8 + -1, // res 9 + 33614, // res 10 + -1, // res 11 + 235298, // res 12 + -1, // res 13 + 1647086, // res 14 + -1, // res 15 + 11529602 // res 16 + }; + + /** + * unit scale distance table + */ + private static final int[] unitScaleByCIIres = { + 1, // res 0 + -1, // res 1 + 7, // res 2 + -1, // res 3 + 49, // res 4 + -1, // res 5 + 343, // res 6 + -1, // res 7 + 2401, // res 8 + -1, // res 9 + 16807, // res 10 + -1, // res 11 + 117649, // res 12 + -1, // res 13 + 823543, // res 14 + -1, // res 15 + 5764801 // res 16 + }; + + /** + * direction from the origin face to the destination face, relative to + * the origin face's coordinate system, or -1 if not adjacent. + */ + private static final int[][] adjacentFaceDir = new int[][] { + { 0, KI, -1, -1, IJ, JK, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1 }, // face 0 + { IJ, 0, KI, -1, -1, -1, JK, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1 }, // face 1 + { -1, IJ, 0, KI, -1, -1, -1, JK, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1 }, // face 2 + { -1, -1, IJ, 0, KI, -1, -1, -1, JK, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1 }, // face 3 + { KI, -1, -1, IJ, 0, -1, -1, -1, -1, JK, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1 }, // face 4 + { JK, -1, -1, -1, -1, 0, -1, -1, -1, -1, IJ, -1, -1, -1, KI, -1, -1, -1, -1, -1 }, // face 5 + { -1, JK, -1, -1, -1, -1, 0, -1, -1, -1, KI, IJ, -1, -1, -1, -1, -1, -1, -1, -1 }, // face 6 + { -1, -1, JK, -1, -1, -1, -1, 0, -1, -1, -1, KI, IJ, -1, -1, -1, -1, -1, -1, -1 }, // face 7 + { -1, -1, -1, JK, -1, -1, -1, -1, 0, -1, -1, -1, KI, IJ, -1, -1, -1, -1, -1, -1 }, // face 8 + { -1, -1, -1, -1, JK, -1, -1, -1, -1, 0, -1, -1, -1, KI, IJ, -1, -1, -1, -1, -1 }, // face 9 + { -1, -1, -1, -1, -1, IJ, KI, -1, -1, -1, 0, -1, -1, -1, -1, JK, -1, -1, -1, -1 }, // face 10 + { -1, -1, -1, -1, -1, -1, IJ, KI, -1, -1, -1, 0, -1, -1, -1, -1, JK, -1, -1, -1 }, // face 11 + { -1, -1, -1, -1, -1, -1, -1, IJ, KI, -1, -1, -1, 0, -1, -1, -1, -1, JK, -1, -1 }, // face 12 + { -1, -1, -1, -1, -1, -1, -1, -1, IJ, KI, -1, -1, -1, 0, -1, -1, -1, -1, JK, -1 }, // face 13 + { -1, -1, -1, -1, -1, KI, -1, -1, -1, IJ, -1, -1, -1, -1, 0, -1, -1, -1, -1, JK }, // face 14 + { -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, JK, -1, -1, -1, -1, 0, IJ, -1, -1, KI }, // face 15 + { -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, JK, -1, -1, -1, KI, 0, IJ, -1, -1 }, // face 16 + { -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, JK, -1, -1, -1, KI, 0, IJ, -1 }, // face 17 + { -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, JK, -1, -1, -1, KI, 0, IJ }, // face 18 + { -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, JK, IJ, -1, -1, KI, 0 } // face 19 + }; + + /** Maximum input for any component to face-to-base-cell lookup functions */ + private static final int MAX_FACE_COORD = 2; + + /** + * Information to transform into an adjacent face IJK system + */ + private static class FaceOrientIJK { + // face number + final int face; + // res 0 translation relative to primary face + final int translateI; + final int translateJ; + final int translateK; + // number of 60 degree ccw rotations relative to primary + final int ccwRot60; + + // face + FaceOrientIJK(int face, int translateI, int translateJ, int translateK, int ccwRot60) { + this.face = face; + this.translateI = translateI; + this.translateJ = translateJ; + this.translateK = translateK; + this.ccwRot60 = ccwRot60; + } + } + + /** + * Definition of which faces neighbor each other. + */ + private static final FaceOrientIJK[][] faceNeighbors = new FaceOrientIJK[][] { + { + // face 0 + new FaceOrientIJK(0, 0, 0, 0, 0), // central face + new FaceOrientIJK(4, 2, 0, 2, 1), // ij quadrant + new FaceOrientIJK(1, 2, 2, 0, 5), // ki quadrant + new FaceOrientIJK(5, 0, 2, 2, 3) // jk quadrant + }, + { + // face 1 + new FaceOrientIJK(1, 0, 0, 0, 0), // central face + new FaceOrientIJK(0, 2, 0, 2, 1), // ij quadrant + new FaceOrientIJK(2, 2, 2, 0, 5), // ki quadrant + new FaceOrientIJK(6, 0, 2, 2, 3) // jk quadrant + }, + { + // face 2 + new FaceOrientIJK(2, 0, 0, 0, 0), // central face + new FaceOrientIJK(1, 2, 0, 2, 1), // ij quadrant + new FaceOrientIJK(3, 2, 2, 0, 5), // ki quadrant + new FaceOrientIJK(7, 0, 2, 2, 3) // jk quadrant + }, + { + // face 3 + new FaceOrientIJK(3, 0, 0, 0, 0), // central face + new FaceOrientIJK(2, 2, 0, 2, 1), // ij quadrant + new FaceOrientIJK(4, 2, 2, 0, 5), // ki quadrant + new FaceOrientIJK(8, 0, 2, 2, 3) // jk quadrant + }, + { + // face 4 + new FaceOrientIJK(4, 0, 0, 0, 0), // central face + new FaceOrientIJK(3, 2, 0, 2, 1), // ij quadrant + new FaceOrientIJK(0, 2, 2, 0, 5), // ki quadrant + new FaceOrientIJK(9, 0, 2, 2, 3) // jk quadrant + }, + { + // face 5 + new FaceOrientIJK(5, 0, 0, 0, 0), // central face + new FaceOrientIJK(10, 2, 2, 0, 3), // ij quadrant + new FaceOrientIJK(14, 2, 0, 2, 3), // ki quadrant + new FaceOrientIJK(0, 0, 2, 2, 3) // jk quadrant + }, + { + // face 6 + new FaceOrientIJK(6, 0, 0, 0, 0), // central face + new FaceOrientIJK(11, 2, 2, 0, 3), // ij quadrant + new FaceOrientIJK(10, 2, 0, 2, 3), // ki quadrant + new FaceOrientIJK(1, 0, 2, 2, 3) // jk quadrant + }, + { + // face 7 + new FaceOrientIJK(7, 0, 0, 0, 0), // central face + new FaceOrientIJK(12, 2, 2, 0, 3), // ij quadrant + new FaceOrientIJK(11, 2, 0, 2, 3), // ki quadrant + new FaceOrientIJK(2, 0, 2, 2, 3) // jk quadrant + }, + { + // face 8 + new FaceOrientIJK(8, 0, 0, 0, 0), // central face + new FaceOrientIJK(13, 2, 2, 0, 3), // ij quadrant + new FaceOrientIJK(12, 2, 0, 2, 3), // ki quadrant + new FaceOrientIJK(3, 0, 2, 2, 3) // jk quadrant + }, + { + // face 9 + new FaceOrientIJK(9, 0, 0, 0, 0), // central face + new FaceOrientIJK(14, 2, 2, 0, 3), // ij quadrant + new FaceOrientIJK(13, 2, 0, 2, 3), // ki quadrant + new FaceOrientIJK(4, 0, 2, 2, 3) // jk quadrant + }, + { + // face 10 + new FaceOrientIJK(10, 0, 0, 0, 0), // central face + new FaceOrientIJK(5, 2, 2, 0, 3), // ij quadrant + new FaceOrientIJK(6, 2, 0, 2, 3), // ki quadrant + new FaceOrientIJK(15, 0, 2, 2, 3) // jk quadrant + }, + { + // face 11 + new FaceOrientIJK(11, 0, 0, 0, 0), // central face + new FaceOrientIJK(6, 2, 2, 0, 3), // ij quadrant + new FaceOrientIJK(7, 2, 0, 2, 3), // ki quadrant + new FaceOrientIJK(16, 0, 2, 2, 3) // jk quadrant + }, + { + // face 12 + new FaceOrientIJK(12, 0, 0, 0, 0), // central face + new FaceOrientIJK(7, 2, 2, 0, 3), // ij quadrant + new FaceOrientIJK(8, 2, 0, 2, 3), // ki quadrant + new FaceOrientIJK(17, 0, 2, 2, 3) // jk quadrant + }, + { + // face 13 + new FaceOrientIJK(13, 0, 0, 0, 0), // central face + new FaceOrientIJK(8, 2, 2, 0, 3), // ij quadrant + new FaceOrientIJK(9, 2, 0, 2, 3), // ki quadrant + new FaceOrientIJK(18, 0, 2, 2, 3) // jk quadrant + }, + { + // face 14 + new FaceOrientIJK(14, 0, 0, 0, 0), // central face + new FaceOrientIJK(9, 2, 2, 0, 3), // ij quadrant + new FaceOrientIJK(5, 2, 0, 2, 3), // ki quadrant + new FaceOrientIJK(19, 0, 2, 2, 3) // jk quadrant + }, + { + // face 15 + new FaceOrientIJK(15, 0, 0, 0, 0), // central face + new FaceOrientIJK(16, 2, 0, 2, 1), // ij quadrant + new FaceOrientIJK(19, 2, 2, 0, 5), // ki quadrant + new FaceOrientIJK(10, 0, 2, 2, 3) // jk quadrant + }, + { + // face 16 + new FaceOrientIJK(16, 0, 0, 0, 0), // central face + new FaceOrientIJK(17, 2, 0, 2, 1), // ij quadrant + new FaceOrientIJK(15, 2, 2, 0, 5), // ki quadrant + new FaceOrientIJK(11, 0, 2, 2, 3) // jk quadrant + }, + { + // face 17 + new FaceOrientIJK(17, 0, 0, 0, 0), // central face + new FaceOrientIJK(18, 2, 0, 2, 1), // ij quadrant + new FaceOrientIJK(16, 2, 2, 0, 5), // ki quadrant + new FaceOrientIJK(12, 0, 2, 2, 3) // jk quadrant + }, + { + // face 18 + new FaceOrientIJK(18, 0, 0, 0, 0), // central face + new FaceOrientIJK(19, 2, 0, 2, 1), // ij quadrant + new FaceOrientIJK(17, 2, 2, 0, 5), // ki quadrant + new FaceOrientIJK(13, 0, 2, 2, 3) // jk quadrant + }, + { + // face 19 + new FaceOrientIJK(19, 0, 0, 0, 0), // central face + new FaceOrientIJK(15, 2, 0, 2, 1), // ij quadrant + new FaceOrientIJK(18, 2, 2, 0, 5), // ki quadrant + new FaceOrientIJK(14, 0, 2, 2, 3) // jk quadrant + } }; + + int face; // face number + CoordIJK coord; // ijk coordinates on that face + + FaceIJK(int face, CoordIJK coord) { + this.face = face; + this.coord = coord; + } + + /** + * Adjusts this FaceIJK address so that the resulting cell address is + * relative to the correct icosahedral face. + * + * @param res The H3 resolution of the cell. + * @param pentLeading4 Whether or not the cell is a pentagon with a leading + * digit 4. + * @param substrate Whether or not the cell is in a substrate grid. + * @return 0 if on original face (no overage); 1 if on face edge (only occurs + * on substrate grids); 2 if overage on new face interior + */ + public Overage adjustOverageClassII(int res, boolean pentLeading4, boolean substrate) { + Overage overage = Overage.NO_OVERAGE; + // get the maximum dimension value; scale if a substrate grid + int maxDim = maxDimByCIIres[res]; + if (substrate) { + maxDim *= 3; + } + + // check for overage + if (substrate && this.coord.i + this.coord.j + this.coord.k == maxDim) { // on edge + overage = Overage.FACE_EDGE; + } else if (this.coord.i + this.coord.j + this.coord.k > maxDim) { // overage + overage = Overage.NEW_FACE; + final FaceOrientIJK fijkOrient; + if (this.coord.k > 0) { + if (this.coord.j > 0) { // jk "quadrant" + fijkOrient = faceNeighbors[this.face][JK]; + } else { // ik "quadrant" + fijkOrient = faceNeighbors[this.face][KI]; + // adjust for the pentagonal missing sequence + if (pentLeading4) { + // translate origin to center of pentagon + this.coord.ijkSub(maxDim, 0, 0); + // rotate to adjust for the missing sequence + this.coord.ijkRotate60cw(); + // translate the origin back to the center of the triangle + this.coord.ijkAdd(maxDim, 0, 0); + } + } + } else { // ij "quadrant" + fijkOrient = faceNeighbors[this.face][IJ]; + } + + this.face = fijkOrient.face; + + // rotate and translate for adjacent face + for (int i = 0; i < fijkOrient.ccwRot60; i++) { + this.coord.ijkRotate60ccw(); + } + + int unitScale = unitScaleByCIIres[res]; + if (substrate) { + unitScale *= 3; + } + this.coord.ijkAdd(fijkOrient.translateI * unitScale, fijkOrient.translateJ * unitScale, fijkOrient.translateK * unitScale); + this.coord.ijkNormalize(); + + // overage points on pentagon boundaries can end up on edges + if (substrate && this.coord.i + this.coord.j + this.coord.k == maxDim) { // on edge + overage = Overage.FACE_EDGE; + } + } + return overage; + } + + /** + * Computes the center point in spherical coordinates of a cell given by + * a FaceIJK address at a specified resolution. + * + * @param res The H3 resolution of the cell. + */ + public LatLng faceIjkToGeo(int res) { + Vec2d v = coord.ijkToHex2d(); + return v.hex2dToGeo(face, res, false); + } + + /** + * Computes the cell boundary in spherical coordinates for a pentagonal cell + * for this FaceIJK address at a specified resolution. + * + * @param res The H3 resolution of the cell. + * @param start The first topological vertex to return. + * @param length The number of topological vertexes to return. + */ + public CellBoundary faceIjkPentToCellBoundary(int res, int start, int length) { + FaceIJK[] fijkVerts = new FaceIJK[Constants.NUM_PENT_VERTS]; + int adjRes = faceIjkPentToVerts(res, fijkVerts); + + // If we're returning the entire loop, we need one more iteration in case + // of a distortion vertex on the last edge + int additionalIteration = length == Constants.NUM_PENT_VERTS ? 1 : 0; + + // convert each vertex to lat/lng + // adjust the face of each vertex as appropriate and introduce + // edge-crossing vertices as needed + CellBoundary boundary = new CellBoundary(); + FaceIJK lastFijk = null; + for (int vert = start; vert < start + length + additionalIteration; vert++) { + int v = vert % Constants.NUM_PENT_VERTS; + + FaceIJK fijk = fijkVerts[v]; + + fijk.adjustPentVertOverage(adjRes); + + // all Class III pentagon edges cross icosa edges + // note that Class II pentagons have vertices on the edge, + // not edge intersections + if (H3Index.isResolutionClassIII(res) && vert > start) { + // find hex2d of the two vertexes on the last face + FaceIJK tmpFijk = new FaceIJK(fijk.face, new CoordIJK(fijk.coord.i, fijk.coord.j, fijk.coord.k)); + + Vec2d orig2d0 = lastFijk.coord.ijkToHex2d(); + + int currentToLastDir = adjacentFaceDir[tmpFijk.face][lastFijk.face]; + + FaceOrientIJK fijkOrient = faceNeighbors[tmpFijk.face][currentToLastDir]; + + tmpFijk.face = fijkOrient.face; + CoordIJK ijk = tmpFijk.coord; + + // rotate and translate for adjacent face + for (int i = 0; i < fijkOrient.ccwRot60; i++) { + ijk.ijkRotate60ccw(); + } + + int unitScale = unitScaleByCIIres[adjRes] * 3; + ijk.ijkAdd(fijkOrient.translateI * unitScale, fijkOrient.translateJ * unitScale, fijkOrient.translateK * unitScale); + ijk.ijkNormalize(); + + Vec2d orig2d1 = ijk.ijkToHex2d(); + + // find the appropriate icosa face edge vertexes + int maxDim = maxDimByCIIres[adjRes]; + Vec2d v0 = new Vec2d(3.0 * maxDim, 0.0); + Vec2d v1 = new Vec2d(-1.5 * maxDim, 3.0 * Constants.M_SQRT3_2 * maxDim); + Vec2d v2 = new Vec2d(-1.5 * maxDim, -3.0 * Constants.M_SQRT3_2 * maxDim); + + Vec2d edge0; + Vec2d edge1; + switch (adjacentFaceDir[tmpFijk.face][fijk.face]) { + case IJ: + edge0 = v0; + edge1 = v1; + break; + case JK: + edge0 = v1; + edge1 = v2; + break; + case KI: + default: + assert (adjacentFaceDir[tmpFijk.face][fijk.face] == KI); + edge0 = v2; + edge1 = v0; + break; + } + + // find the intersection and add the lat/lng point to the result + Vec2d inter = Vec2d.v2dIntersect(orig2d0, orig2d1, edge0, edge1); + LatLng point = inter.hex2dToGeo(tmpFijk.face, adjRes, true); + boundary.add(point); + } + + // convert vertex to lat/lng and add to the result + // vert == start + NUM_PENT_VERTS is only used to test for possible + // intersection on last edge + if (vert < start + Constants.NUM_PENT_VERTS) { + Vec2d vec = fijk.coord.ijkToHex2d(); + LatLng point = vec.hex2dToGeo(fijk.face, adjRes, true); + boundary.add(point); + } + + lastFijk = fijk; + } + return boundary; + } + + /** + * Generates the cell boundary in spherical coordinates for a cell given by this + * FaceIJK address at a specified resolution. + * + * @param res The H3 resolution of the cell. + * @param start The first topological vertex to return. + * @param length The number of topological vertexes to return. + */ + public CellBoundary faceIjkToCellBoundary(int res, int start, int length) { + FaceIJK fijkVerts[] = new FaceIJK[Constants.NUM_HEX_VERTS]; + int adjRes = faceIjkToVerts(res, fijkVerts); + // If we're returning the entire loop, we need one more iteration in case + // of a distortion vertex on the last edge + int additionalIteration = length == Constants.NUM_HEX_VERTS ? 1 : 0; + + // convert each vertex to lat/lng + // adjust the face of each vertex as appropriate and introduce + // edge-crossing vertices as needed + CellBoundary boundary = new CellBoundary(); + int lastFace = -1; + Overage lastOverage = Overage.NO_OVERAGE; + for (int vert = start; vert < start + length + additionalIteration; vert++) { + int v = vert % Constants.NUM_HEX_VERTS; + + FaceIJK fijk = new FaceIJK(fijkVerts[v].face, new CoordIJK(fijkVerts[v].coord.i, fijkVerts[v].coord.j, fijkVerts[v].coord.k)); + + // + final boolean pentLeading4 = false; // may change in c code when calling method + Overage overage = fijk.adjustOverageClassII(adjRes, pentLeading4, true); + + /* + Check for edge-crossing. Each face of the underlying icosahedron is a + different projection plane. So if an edge of the hexagon crosses an + icosahedron edge, an additional vertex must be introduced at that + intersection point. Then each half of the cell edge can be projected + to geographic coordinates using the appropriate icosahedron face + projection. Note that Class II cell edges have vertices on the face + edge, with no edge line intersections. + */ + if (H3Index.isResolutionClassIII(res) && vert > start && fijk.face != lastFace && lastOverage != Overage.FACE_EDGE) { + // find hex2d of the two vertexes on original face + int lastV = (v + 5) % Constants.NUM_HEX_VERTS; + Vec2d orig2d0 = fijkVerts[lastV].coord.ijkToHex2d(); + Vec2d orig2d1 = fijkVerts[v].coord.ijkToHex2d(); + + // find the appropriate icosa face edge vertexes + int maxDim = maxDimByCIIres[adjRes]; + Vec2d v0 = new Vec2d(3.0 * maxDim, 0.0); + Vec2d v1 = new Vec2d(-1.5 * maxDim, 3.0 * Constants.M_SQRT3_2 * maxDim); + Vec2d v2 = new Vec2d(-1.5 * maxDim, -3.0 * Constants.M_SQRT3_2 * maxDim); + + int face2 = ((lastFace == this.face) ? fijk.face : lastFace); + final Vec2d edge0; + final Vec2d edge1; + switch (adjacentFaceDir[this.face][face2]) { + case IJ: + edge0 = v0; + edge1 = v1; + break; + case JK: + edge0 = v1; + edge1 = v2; + break; + // case KI: + default: + assert (adjacentFaceDir[this.face][face2] == KI); + edge0 = v2; + edge1 = v0; + break; + } + + // find the intersection and add the lat/lng point to the result + Vec2d inter = Vec2d.v2dIntersect(orig2d0, orig2d1, edge0, edge1); + /* + If a point of intersection occurs at a hexagon vertex, then each + adjacent hexagon edge will lie completely on a single icosahedron + face, and no additional vertex is required. + */ + boolean isIntersectionAtVertex = orig2d0.equals(inter) || orig2d1.equals(inter); + if (isIntersectionAtVertex == false) { + LatLng point = inter.hex2dToGeo(this.face, adjRes, true); + boundary.add(point); + } + } + + // convert vertex to lat/lng and add to the result + // vert == start + NUM_HEX_VERTS is only used to test for possible + // intersection on last edge + if (vert < start + Constants.NUM_HEX_VERTS) { + Vec2d vec = fijk.coord.ijkToHex2d(); + LatLng point = vec.hex2dToGeo(fijk.face, adjRes, true); + boundary.add(point); + } + lastFace = fijk.face; + lastOverage = overage; + } + return boundary; + } + + /** + * compute the corresponding H3Index. + * @param res The cell resolution. + * @return The encoded H3Index (or H3_NULL on failure). + */ + public long faceIjkToH3(int res) { + // initialize the index + long h = H3Index.H3_INIT; + h = H3Index.H3_set_mode(h, Constants.H3_CELL_MODE); + h = H3Index.H3_set_resolution(h, res); + + // check for res 0/base cell + if (res == 0) { + if (coord.i > MAX_FACE_COORD || coord.j > MAX_FACE_COORD || coord.k > MAX_FACE_COORD) { + // out of range input + throw new IllegalArgumentException(" out of range input"); + } + + return H3Index.H3_set_base_cell(h, BaseCells.getBaseCell(this)); + } + + // we need to find the correct base cell FaceIJK for this H3 index; + // start with the passed in face and resolution res ijk coordinates + // in that face's coordinate system + + // build the H3Index from finest res up + // adjust r for the fact that the res 0 base cell offsets the indexing + // digits + for (int r = res - 1; r >= 0; r--) { + int lastI = coord.i; + int lastJ = coord.j; + int lastK = coord.k; + CoordIJK lastCenter; + if (H3Index.isResolutionClassIII(r + 1)) { + // rotate ccw + coord.upAp7(); + lastCenter = new CoordIJK(coord.i, coord.j, coord.k); + lastCenter.downAp7(); + } else { + // rotate cw + coord.upAp7r(); + lastCenter = new CoordIJK(coord.i, coord.j, coord.k); + lastCenter.downAp7r(); + } + + CoordIJK diff = new CoordIJK(lastI - lastCenter.i, lastJ - lastCenter.j, lastK - lastCenter.k); + diff.ijkNormalize(); + h = H3Index.H3_set_index_digit(h, r + 1, diff.unitIjkToDigit()); + } + + // we should now hold the IJK of the base cell in the + // coordinate system of the current face + + if (coord.i > MAX_FACE_COORD || coord.j > MAX_FACE_COORD || coord.k > MAX_FACE_COORD) { + // out of range input + throw new IllegalArgumentException(" out of range input"); + } + + // lookup the correct base cell + int baseCell = BaseCells.getBaseCell(this); + h = H3Index.H3_set_base_cell(h, baseCell); + + // rotate if necessary to get canonical base cell orientation + // for this base cell + int numRots = BaseCells.getBaseCellCCWrot60(this); + if (BaseCells.isBaseCellPentagon(baseCell)) { + // force rotation out of missing k-axes sub-sequence + if (H3Index.h3LeadingNonZeroDigit(h) == CoordIJK.Direction.K_AXES_DIGIT.digit()) { + // check for a cw/ccw offset face; default is ccw + if (BaseCells.baseCellIsCwOffset(baseCell, face)) { + h = H3Index.h3Rotate60cw(h); + } else { + h = H3Index.h3Rotate60ccw(h); + } + } + + for (int i = 0; i < numRots; i++) { + h = H3Index.h3RotatePent60ccw(h); + } + } else { + for (int i = 0; i < numRots; i++) { + h = H3Index.h3Rotate60ccw(h); + } + } + + return h; + } + + /** + * Populate the vertices of this cell as substrate FaceIJK addresses. + * + * @param res The H3 resolution of the cell. This may be adjusted if + * necessary for the substrate grid resolution. + */ + private int faceIjkToVerts(int res, FaceIJK[] fijkVerts) { + // get the correct set of substrate vertices for this resolution + CoordIJK[] verts; + if (H3Index.isResolutionClassIII(res)) { + // the vertexes of an origin-centered cell in a Class III resolution on a + // substrate grid with aperture sequence 33r7r. The aperture 3 gets us the + // vertices, and the 3r7r gets us to Class II. + // vertices listed ccw from the i-axes + verts = new CoordIJK[] { + new CoordIJK(5, 4, 0), // 0 + new CoordIJK(1, 5, 0), // 1 + new CoordIJK(0, 5, 4), // 2 + new CoordIJK(0, 1, 5), // 3 + new CoordIJK(4, 0, 5), // 4 + new CoordIJK(5, 0, 1) // 5 + }; + } else { + // the vertexes of an origin-centered cell in a Class II resolution on a + // substrate grid with aperture sequence 33r. The aperture 3 gets us the + // vertices, and the 3r gets us back to Class II. + // vertices listed ccw from the i-axes + verts = new CoordIJK[] { + new CoordIJK(2, 1, 0), // 0 + new CoordIJK(1, 2, 0), // 1 + new CoordIJK(0, 2, 1), // 2 + new CoordIJK(0, 1, 2), // 3 + new CoordIJK(1, 0, 2), // 4 + new CoordIJK(2, 0, 1) // 5 + }; + } + + // adjust the center point to be in an aperture 33r substrate grid + // these should be composed for speed + this.coord.downAp3(); + this.coord.downAp3r(); + + // if res is Class III we need to add a cw aperture 7 to get to + // icosahedral Class II + if (H3Index.isResolutionClassIII(res)) { + this.coord.downAp7r(); + res += 1; + } + + // The center point is now in the same substrate grid as the origin + // cell vertices. Add the center point substate coordinates + // to each vertex to translate the vertices to that cell. + + for (int v = 0; v < Constants.NUM_HEX_VERTS; v++) { + verts[v].ijkAdd(this.coord.i, this.coord.j, this.coord.k); + verts[v].ijkNormalize(); + fijkVerts[v] = new FaceIJK(this.face, verts[v]); + } + return res; + } + + /** + * Populate the vertices of this pentagon cell as substrate FaceIJK addresses + * + * @param res The H3 resolution of the cell. This may be adjusted if + * necessary for the substrate grid resolution. + */ + private int faceIjkPentToVerts(int res, FaceIJK[] fijkVerts) { + // get the correct set of substrate vertices for this resolution + CoordIJK[] verts; + if (H3Index.isResolutionClassIII(res)) { + // the vertexes of an origin-centered pentagon in a Class II resolution on a + // substrate grid with aperture sequence 33r. The aperture 3 gets us the + // vertices, and the 3r gets us back to Class II. + // vertices listed ccw from the i-axes + verts = new CoordIJK[] { + new CoordIJK(5, 4, 0), // 0 + new CoordIJK(1, 5, 0), // 1 + new CoordIJK(0, 5, 4), // 2 + new CoordIJK(0, 1, 5), // 3 + new CoordIJK(4, 0, 5) // 4 + }; + } else { + // the vertexes of an origin-centered pentagon in a Class III resolution on + // a substrate grid with aperture sequence 33r7r. The aperture 3 gets us the + // vertices, and the 3r7r gets us to Class II. vertices listed ccw from the + // i-axes + verts = new CoordIJK[] { + new CoordIJK(2, 1, 0), // 0 + new CoordIJK(1, 2, 0), // 1 + new CoordIJK(0, 2, 1), // 2 + new CoordIJK(0, 1, 2), // 3 + new CoordIJK(1, 0, 2) // 4 + }; + } + + // adjust the center point to be in an aperture 33r substrate grid + // these should be composed for speed + this.coord.downAp3(); + this.coord.downAp3r(); + + // if res is Class III we need to add a cw aperture 7 to get to + // icosahedral Class II + if (H3Index.isResolutionClassIII(res)) { + this.coord.downAp7r(); + res += 1; + } + + // The center point is now in the same substrate grid as the origin + // cell vertices. Add the center point substate coordinates + // to each vertex to translate the vertices to that cell. + for (int v = 0; v < Constants.NUM_PENT_VERTS; v++) { + verts[v].ijkAdd(this.coord.i, this.coord.j, this.coord.k); + verts[v].ijkNormalize(); + fijkVerts[v] = new FaceIJK(this.face, verts[v]); + } + return res; + } + + /** + * Adjusts a FaceIJK address for a pentagon vertex in a substrate grid in + * place so that the resulting cell address is relative to the correct + * icosahedral face. + * + * @param res The H3 resolution of the cell. + */ + private Overage adjustPentVertOverage(int res) { + Overage overage; + do { + overage = adjustOverageClassII(res, false, true); + } while (overage == Overage.NEW_FACE); + return overage; + } +} diff --git a/libs/h3/src/main/java/org/opensearch/geospatial/h3/H3.java b/libs/h3/src/main/java/org/opensearch/geospatial/h3/H3.java new file mode 100644 index 00000000..ad55d00a --- /dev/null +++ b/libs/h3/src/main/java/org/opensearch/geospatial/h3/H3.java @@ -0,0 +1,311 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + * This project is based on a modification of https://github.com/uber/h3 which is licensed under the Apache 2.0 License. + * + * Copyright 2016-2021 Uber Technologies, Inc. + */ +package org.opensearch.geospatial.h3; + +import java.util.Arrays; + +import static java.lang.Math.toRadians; + +/** + * Defines the public API of the H3 library. + */ +public final class H3 { + + public static int MIN_H3_RES = Constants.MIN_H3_RES; + public static int MAX_H3_RES = Constants.MAX_H3_RES; + + /** + * Converts from long representation of an index to String representation. + */ + public static String h3ToString(long h3) { + return Long.toHexString(h3); + } + + /** + * Converts from String representation of an index to long representation. + */ + public static long stringToH3(String h3Address) { + return Long.parseUnsignedLong(h3Address, 16); + } + + /** determines if an H3 cell is a pentagon */ + public static boolean isPentagon(long h3) { + return H3Index.H3_is_pentagon(h3); + } + + /** determines if an H3 cell in string format is a pentagon */ + public static boolean isPentagon(String h3Address) { + return isPentagon(stringToH3(h3Address)); + } + + /** Returns true if this is a valid H3 index */ + public static boolean h3IsValid(long h3) { + if (H3Index.H3_get_high_bit(h3) != 0) { + return false; + } + + if (H3Index.H3_get_mode(h3) != Constants.H3_CELL_MODE) { + return false; + } + + if (H3Index.H3_get_reserved_bits(h3) != 0) { + return false; + } + + int baseCell = H3Index.H3_get_base_cell(h3); + if (baseCell < 0 || baseCell >= Constants.NUM_BASE_CELLS) { // LCOV_EXCL_BR_LINE + // Base cells less than zero can not be represented in an index + return false; + } + + int res = H3Index.H3_get_resolution(h3); + if (res < Constants.MIN_H3_RES || res > Constants.MAX_H3_RES) { // LCOV_EXCL_BR_LINE + // Resolutions less than zero can not be represented in an index + return false; + } + + boolean foundFirstNonZeroDigit = false; + for (int r = 1; r <= res; r++) { + int digit = H3Index.H3_get_index_digit(h3, r); + + if (foundFirstNonZeroDigit == false && digit != CoordIJK.Direction.CENTER_DIGIT.digit()) { + foundFirstNonZeroDigit = true; + if (BaseCells.isBaseCellPentagon(baseCell) && digit == CoordIJK.Direction.K_AXES_DIGIT.digit()) { + return false; + } + } + + if (digit < CoordIJK.Direction.CENTER_DIGIT.digit() || digit >= CoordIJK.Direction.NUM_DIGITS.digit()) { + return false; + } + } + + for (int r = res + 1; r <= Constants.MAX_H3_RES; r++) { + int digit = H3Index.H3_get_index_digit(h3, r); + if (digit != CoordIJK.Direction.INVALID_DIGIT.digit()) { + return false; + } + } + return true; + } + + /** Returns true if this is a valid H3 index */ + public static boolean h3IsValid(String h3Address) { + return h3IsValid(stringToH3(h3Address)); + } + + /** + * Return all base cells + */ + public static long[] getLongRes0Cells() { + long[] cells = new long[Constants.NUM_BASE_CELLS]; + for (int bc = 0; bc < Constants.NUM_BASE_CELLS; bc++) { + long baseCell = H3Index.H3_INIT; + baseCell = H3Index.H3_set_mode(baseCell, Constants.H3_CELL_MODE); + baseCell = H3Index.H3_set_base_cell(baseCell, bc); + cells[bc] = baseCell; + } + return cells; + } + + /** + * Return all base cells + */ + public static String[] getStringRes0Cells() { + return h3ToStringList(getLongRes0Cells()); + } + + /** + * Find the {@link LatLng} center point of the cell. + */ + public static LatLng h3ToLatLng(long h3) { + final FaceIJK fijk = H3Index.h3ToFaceIjk(h3); + return fijk.faceIjkToGeo(H3Index.H3_get_resolution(h3)); + } + + /** + * Find the {@link LatLng} center point of the cell. + */ + public static LatLng h3ToLatLng(String h3Address) { + return h3ToLatLng(stringToH3(h3Address)); + } + + /** + * Find the cell {@link CellBoundary} coordinates for the cell + */ + public static CellBoundary h3ToGeoBoundary(long h3) { + FaceIJK fijk = H3Index.h3ToFaceIjk(h3); + if (H3Index.H3_is_pentagon(h3)) { + return fijk.faceIjkPentToCellBoundary(H3Index.H3_get_resolution(h3), 0, Constants.NUM_PENT_VERTS); + } else { + return fijk.faceIjkToCellBoundary(H3Index.H3_get_resolution(h3), 0, Constants.NUM_HEX_VERTS); + } + } + + /** + * Find the cell {@link CellBoundary} coordinates for the cell + */ + public static CellBoundary h3ToGeoBoundary(String h3Address) { + return h3ToGeoBoundary(stringToH3(h3Address)); + } + + /** + * Find the H3 index of the resolution res cell containing the lat/lon (in degrees) + * + * @param lat Latitude in degrees. + * @param lng Longitude in degrees. + * @param res Resolution, 0 <= res <= 15 + * @return The H3 index. + * @throws IllegalArgumentException latitude, longitude, or resolution are out of range. + */ + public static long geoToH3(double lat, double lng, int res) { + checkResolution(res); + return new LatLng(toRadians(lat), toRadians(lng)).geoToFaceIJK(res).faceIjkToH3(res); + } + + /** + * Find the H3 index of the resolution res cell containing the lat/lon (in degrees) + * + * @param lat Latitude in degrees. + * @param lng Longitude in degrees. + * @param res Resolution, 0 <= res <= 15 + * @return The H3 index. + * @throws IllegalArgumentException Latitude, longitude, or resolution is out of range. + */ + public static String geoToH3Address(double lat, double lng, int res) { + return h3ToString(geoToH3(lat, lng, res)); + } + + /** + * Returns the parent of the given index. + */ + public static long h3ToParent(long h3) { + int childRes = H3Index.H3_get_resolution(h3); + if (childRes == 0) { + throw new IllegalArgumentException("Input is a base cell"); + } + long parentH = H3Index.H3_set_resolution(h3, childRes - 1); + return H3Index.H3_set_index_digit(parentH, childRes, H3Index.H3_DIGIT_MASK); + } + + /** + * Returns the parent of the given index. + */ + public static String h3ToParent(String h3Address) { + long parent = h3ToParent(stringToH3(h3Address)); + return h3ToString(parent); + } + + /** + * Returns the children of the given index. + */ + public static long[] h3ToChildren(long h3) { + long[] children = new long[cellToChildrenSize(h3)]; + int res = H3Index.H3_get_resolution(h3); + Iterator.IterCellsChildren it = Iterator.iterInitParent(h3, res + 1); + int pos = 0; + while (it.h != Iterator.H3_NULL) { + children[pos++] = it.h; + Iterator.iterStepChild(it); + } + return children; + } + + /** + * Transforms a list of H3 indexes in long form to a list of H3 + * indexes in string form. + */ + public static String[] h3ToChildren(String h3Address) { + return h3ToStringList(h3ToChildren(stringToH3(h3Address))); + } + + public static String[] hexRing(String h3Address) { + return h3ToStringList(hexRing(stringToH3(h3Address))); + } + + /** + * Returns the neighbor indexes. + * + * @param h3 Origin index + * @return All neighbor indexes from the origin + */ + public static long[] hexRing(long h3) { + return HexRing.hexRing(h3); + } + + /** + * cellToChildrenSize returns the exact number of children for a cell at a + * given child resolution. + * + * @param h H3Index to find the number of children of + * + * @return int Exact number of children (handles hexagons and pentagons + * correctly) + */ + private static int cellToChildrenSize(long h) { + int n = 1; + if (H3Index.H3_is_pentagon(h)) { + return (1 + 5 * (_ipow(7, n) - 1) / 6); + } else { + return _ipow(7, n); + } + } + + /** + * _ipow does integer exponentiation efficiently. Taken from StackOverflow. + * + * @param base the integer base (can be positive or negative) + * @param exp the integer exponent (should be nonnegative) + * + * @return the exponentiated value + */ + private static int _ipow(int base, int exp) { + int result = 1; + while (exp != 0) { + if ((exp & 1) != 0) { + result *= base; + } + exp >>= 1; + base *= base; + } + + return result; + } + + private static String[] h3ToStringList(long[] h3s) { + return Arrays.stream(h3s).mapToObj(H3::h3ToString).toArray(String[]::new); + } + + /** + * @throws IllegalArgumentException res is not a valid H3 resolution. + */ + private static void checkResolution(int res) { + if (res < 0 || res > Constants.MAX_H3_RES) { + throw new IllegalArgumentException("resolution [" + res + "] is out of range (must be 0 <= res <= 15)"); + } + } +} diff --git a/libs/h3/src/main/java/org/opensearch/geospatial/h3/H3Index.java b/libs/h3/src/main/java/org/opensearch/geospatial/h3/H3Index.java new file mode 100644 index 00000000..f12fcac1 --- /dev/null +++ b/libs/h3/src/main/java/org/opensearch/geospatial/h3/H3Index.java @@ -0,0 +1,337 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + * This project is based on a modification of https://github.com/uber/h3 which is licensed under the Apache 2.0 License. + * + * Copyright 2016-2018, 2020 Uber Technologies, Inc. + */ +package org.opensearch.geospatial.h3; + +/** + * Functions that can be applied to an H3 index. + */ +final class H3Index { + + /** + * Gets the integer base cell of h3. + */ + public static int H3_get_base_cell(long h3) { + return ((int) ((((h3) & H3_BC_MASK) >> H3_BC_OFFSET))); + } + + /** + * Returns true if this index is one of twelve pentagons per resolution. + */ + public static boolean H3_is_pentagon(long h3) { + return BaseCells.isBaseCellPentagon(H3Index.H3_get_base_cell(h3)) && H3Index.h3LeadingNonZeroDigit(h3) == 0; + } + + public static long H3_INIT = 35184372088831L; + + /** + * The bit offset of the mode in an H3 index. + */ + public static int H3_MODE_OFFSET = 59; + + /** + * 1's in the 4 mode bits, 0's everywhere else. + */ + public static long H3_MODE_MASK = 15L << H3_MODE_OFFSET; + + /** + * 0's in the 4 mode bits, 1's everywhere else. + */ + public static long H3_MODE_MASK_NEGATIVE = ~H3_MODE_MASK; + + public static long H3_set_mode(long h3, long mode) { + return (h3 & H3_MODE_MASK_NEGATIVE) | (mode << H3_MODE_OFFSET); + } + + /** + * The bit offset of the base cell in an H3 index. + */ + public static int H3_BC_OFFSET = 45; + /** + * 1's in the 7 base cell bits, 0's everywhere else. + */ + public static long H3_BC_MASK = 127L << H3_BC_OFFSET; + + /** + * 0's in the 7 base cell bits, 1's everywhere else. + */ + public static long H3_BC_MASK_NEGATIVE = ~H3_BC_MASK; + + /** + * Sets the integer base cell of h3 to bc. + */ + public static long H3_set_base_cell(long h3, long bc) { + return (h3 & H3_BC_MASK_NEGATIVE) | (bc << H3_BC_OFFSET); + } + + public static int H3_RES_OFFSET = 52; + /** + * 1's in the 4 resolution bits, 0's everywhere else. + */ + public static long H3_RES_MASK = 15L << H3_RES_OFFSET; + + /** + * 0's in the 4 resolution bits, 1's everywhere else. + */ + public static long H3_RES_MASK_NEGATIVE = ~H3_RES_MASK; + + /** + * The bit offset of the max resolution digit in an H3 index. + */ + public static int H3_MAX_OFFSET = 63; + + /** + * 1 in the highest bit, 0's everywhere else. + */ + public static long H3_HIGH_BIT_MASK = (1L << H3_MAX_OFFSET); + + /** + * Gets the highest bit of the H3 index. + */ + public static int H3_get_high_bit(long h3) { + return ((int) ((((h3) & H3_HIGH_BIT_MASK) >> H3_MAX_OFFSET))); + } + + /** + * Sets the long resolution of h3. + */ + public static long H3_set_resolution(long h3, long res) { + return (((h3) & H3_RES_MASK_NEGATIVE) | (((res)) << H3_RES_OFFSET)); + } + + /** + * The bit offset of the reserved bits in an H3 index. + */ + public static int H3_RESERVED_OFFSET = 56; + + /** + * 1's in the 3 reserved bits, 0's everywhere else. + */ + public static long H3_RESERVED_MASK = (7L << H3_RESERVED_OFFSET); + + /** + * Gets a value in the reserved space. Should always be zero for valid indexes. + */ + public static int H3_get_reserved_bits(long h3) { + return ((int) ((((h3) & H3_RESERVED_MASK) >> H3_RESERVED_OFFSET))); + } + + public static int H3_get_mode(long h3) { + return ((int) ((((h3) & H3_MODE_MASK) >> H3_MODE_OFFSET))); + } + + /** + * Gets the integer resolution of h3. + */ + public static int H3_get_resolution(long h3) { + return (int) ((h3 & H3_RES_MASK) >> H3_RES_OFFSET); + } + + /** + * The number of bits in a single H3 resolution digit. + */ + public static int H3_PER_DIGIT_OFFSET = 3; + + /** + * 1's in the 3 bits of res 15 digit bits, 0's everywhere else. + */ + public static long H3_DIGIT_MASK = 7L; + + /** + * Gets the resolution res integer digit (0-7) of h3. + */ + public static int H3_get_index_digit(long h3, int res) { + return ((int) ((((h3) >> ((Constants.MAX_H3_RES - (res)) * H3_PER_DIGIT_OFFSET)) & H3_DIGIT_MASK))); + } + + /** + * Sets the resolution res digit of h3 to the integer digit (0-7) + */ + public static long H3_set_index_digit(long h3, int res, long digit) { + int x = (Constants.MAX_H3_RES - res) * H3_PER_DIGIT_OFFSET; + return (((h3) & ~((H3_DIGIT_MASK << (x)))) | (((digit)) << x)); + } + + /** + * Returns whether or not a resolution is a Class III grid. Note that odd + * resolutions are Class III and even resolutions are Class II. + * @param res The H3 resolution. + * @return 1 if the resolution is a Class III grid, and 0 if the resolution is + * a Class II grid. + */ + public static boolean isResolutionClassIII(int res) { + return res % 2 != 0; + } + + /** + * Convert an H3Index to a FaceIJK address. + * @param h3 The H3Index. + */ + public static FaceIJK h3ToFaceIjk(long h3) { + int baseCell = H3Index.H3_get_base_cell(h3); + if (baseCell < 0 || baseCell >= Constants.NUM_BASE_CELLS) { // LCOV_EXCL_BR_LINE + // Base cells less than zero can not be represented in an index + // To prevent reading uninitialized memory, we zero the output. + throw new IllegalArgumentException(); + } + // adjust for the pentagonal missing sequence; all of sub-sequence 5 needs + // to be adjusted (and some of sub-sequence 4 below) + if (BaseCells.isBaseCellPentagon(baseCell) && h3LeadingNonZeroDigit(h3) == 5) { + h3 = h3Rotate60cw(h3); + } + + // start with the "home" face and ijk+ coordinates for the base cell of c + FaceIJK fijk = BaseCells.getBaseFaceIJK(baseCell); + if (h3ToFaceIjkWithInitializedFijk(h3, fijk) == false) { + return fijk; // no overage is possible; h lies on this face + } + // if we're here we have the potential for an "overage"; i.e., it is + // possible that c lies on an adjacent face + + CoordIJK origIJK = new CoordIJK(fijk.coord.i, fijk.coord.j, fijk.coord.k); + + // if we're in Class III, drop into the next finer Class II grid + int res = H3Index.H3_get_resolution(h3); + if (isResolutionClassIII(res)) { + // Class III + fijk.coord.downAp7r(); + res++; + } + + // adjust for overage if needed + // a pentagon base cell with a leading 4 digit requires special handling + boolean pentLeading4 = (BaseCells.isBaseCellPentagon(baseCell) && h3LeadingNonZeroDigit(h3) == 4); + if (fijk.adjustOverageClassII(res, pentLeading4, false) != FaceIJK.Overage.NO_OVERAGE) { + // if the base cell is a pentagon we have the potential for secondary + // overages + if (BaseCells.isBaseCellPentagon(baseCell)) { + FaceIJK.Overage overage; + do { + overage = fijk.adjustOverageClassII(res, false, false); + } while (overage != FaceIJK.Overage.NO_OVERAGE); + } + + if (res != H3Index.H3_get_resolution(h3)) { + fijk.coord.upAp7r(); + } + } else if (res != H3Index.H3_get_resolution(h3)) { + fijk.coord = origIJK; + } + return fijk; + } + + /** + * Returns the highest resolution non-zero digit in an H3Index. + * @param h The H3Index. + * @return The highest resolution non-zero digit in the H3Index. + */ + public static int h3LeadingNonZeroDigit(long h) { + for (int r = 1; r <= H3Index.H3_get_resolution(h); r++) { + final int dir = H3Index.H3_get_index_digit(h, r); + if (dir != CoordIJK.Direction.CENTER_DIGIT.digit()) { + return dir; + } + } + // if we're here it's all 0's + return CoordIJK.Direction.CENTER_DIGIT.digit(); + } + + /** + * Convert an H3Index to the FaceIJK address on a specified icosahedral face. + * @param h The H3Index. + * @param fijk The FaceIJK address, initialized with the desired face + * and normalized base cell coordinates. + * @return Returns true if the possibility of overage exists, otherwise false. + */ + private static boolean h3ToFaceIjkWithInitializedFijk(long h, FaceIJK fijk) { + + final int res = H3Index.H3_get_resolution(h); + + // center base cell hierarchy is entirely on this face + final boolean possibleOverage = BaseCells.isBaseCellPentagon(H3_get_base_cell(h)) != false + || (res != 0 && (fijk.coord.i != 0 || fijk.coord.j != 0 || fijk.coord.k != 0)); + + for (int r = 1; r <= res; r++) { + if (isResolutionClassIII(r)) { + // Class III == rotate ccw + fijk.coord.downAp7(); + } else { + // Class II == rotate cw + fijk.coord.downAp7r(); + } + fijk.coord.neighbor(H3_get_index_digit(h, r)); + } + + return possibleOverage; + } + + /** + * Rotate an H3Index 60 degrees clockwise. + * @param h The H3Index. + */ + public static long h3Rotate60cw(long h) { + for (int r = 1, res = H3_get_resolution(h); r <= res; r++) { + h = H3_set_index_digit(h, r, CoordIJK.rotate60cw(H3_get_index_digit(h, r))); + } + return h; + } + + /** + * Rotate an H3Index 60 degrees counter-clockwise. + * @param h The H3Index. + */ + public static long h3Rotate60ccw(long h) { + for (int r = 1, res = H3_get_resolution(h); r <= res; r++) { + h = H3_set_index_digit(h, r, CoordIJK.rotate60ccw(H3_get_index_digit(h, r))); + } + return h; + } + + /** + * Rotate an H3Index 60 degrees counter-clockwise about a pentagonal center. + * @param h The H3Index. + */ + public static long h3RotatePent60ccw(long h) { + // skips any leading 1 digits (k-axis) + boolean foundFirstNonZeroDigit = false; + for (int r = 1, res = H3_get_resolution(h); r <= res; r++) { + // rotate this digit + h = H3_set_index_digit(h, r, CoordIJK.rotate60ccw(H3_get_index_digit(h, r))); + + // look for the first non-zero digit so we + // can adjust for deleted k-axes sequence + // if necessary + if (foundFirstNonZeroDigit == false && H3_get_index_digit(h, r) != 0) { + foundFirstNonZeroDigit = true; + + // adjust for deleted k-axes sequence + if (h3LeadingNonZeroDigit(h) == CoordIJK.Direction.K_AXES_DIGIT.digit()) h = h3Rotate60ccw(h); + } + } + return h; + } + +} diff --git a/libs/h3/src/main/java/org/opensearch/geospatial/h3/HexRing.java b/libs/h3/src/main/java/org/opensearch/geospatial/h3/HexRing.java new file mode 100644 index 00000000..9f95f1a6 --- /dev/null +++ b/libs/h3/src/main/java/org/opensearch/geospatial/h3/HexRing.java @@ -0,0 +1,760 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + * This project is based on a modification of https://github.com/uber/h3 which is licensed under the Apache 2.0 License. + * + * Copyright 2016-2021 Uber Technologies, Inc. + */ +package org.opensearch.geospatial.h3; + +/** + * Computes the neighbour H3 index from a given index. + */ +final class HexRing { + + private static final int INVALID_BASE_CELL = 127; + + /** Neighboring base cell ID in each IJK direction. + * + * For each base cell, for each direction, the neighboring base + * cell ID is given. 127 indicates there is no neighbor in that direction. + */ + private static final int[][] baseCellNeighbors = new int[][] { + { 0, 1, 5, 2, 4, 3, 8 }, // base cell 0 + { 1, 7, 6, 9, 0, 3, 2 }, // base cell 1 + { 2, 6, 10, 11, 0, 1, 5 }, // base cell 2 + { 3, 13, 1, 7, 4, 12, 0 }, // base cell 3 + { 4, INVALID_BASE_CELL, 15, 8, 3, 0, 12 }, // base cell 4 (pentagon) + { 5, 2, 18, 10, 8, 0, 16 }, // base cell 5 + { 6, 14, 11, 17, 1, 9, 2 }, // base cell 6 + { 7, 21, 9, 19, 3, 13, 1 }, // base cell 7 + { 8, 5, 22, 16, 4, 0, 15 }, // base cell 8 + { 9, 19, 14, 20, 1, 7, 6 }, // base cell 9 + { 10, 11, 24, 23, 5, 2, 18 }, // base cell 10 + { 11, 17, 23, 25, 2, 6, 10 }, // base cell 11 + { 12, 28, 13, 26, 4, 15, 3 }, // base cell 12 + { 13, 26, 21, 29, 3, 12, 7 }, // base cell 13 + { 14, INVALID_BASE_CELL, 17, 27, 9, 20, 6 }, // base cell 14 (pentagon) + { 15, 22, 28, 31, 4, 8, 12 }, // base cell 15 + { 16, 18, 33, 30, 8, 5, 22 }, // base cell 16 + { 17, 11, 14, 6, 35, 25, 27 }, // base cell 17 + { 18, 24, 30, 32, 5, 10, 16 }, // base cell 18 + { 19, 34, 20, 36, 7, 21, 9 }, // base cell 19 + { 20, 14, 19, 9, 40, 27, 36 }, // base cell 20 + { 21, 38, 19, 34, 13, 29, 7 }, // base cell 21 + { 22, 16, 41, 33, 15, 8, 31 }, // base cell 22 + { 23, 24, 11, 10, 39, 37, 25 }, // base cell 23 + { 24, INVALID_BASE_CELL, 32, 37, 10, 23, 18 }, // base cell 24 (pentagon) + { 25, 23, 17, 11, 45, 39, 35 }, // base cell 25 + { 26, 42, 29, 43, 12, 28, 13 }, // base cell 26 + { 27, 40, 35, 46, 14, 20, 17 }, // base cell 27 + { 28, 31, 42, 44, 12, 15, 26 }, // base cell 28 + { 29, 43, 38, 47, 13, 26, 21 }, // base cell 29 + { 30, 32, 48, 50, 16, 18, 33 }, // base cell 30 + { 31, 41, 44, 53, 15, 22, 28 }, // base cell 31 + { 32, 30, 24, 18, 52, 50, 37 }, // base cell 32 + { 33, 30, 49, 48, 22, 16, 41 }, // base cell 33 + { 34, 19, 38, 21, 54, 36, 51 }, // base cell 34 + { 35, 46, 45, 56, 17, 27, 25 }, // base cell 35 + { 36, 20, 34, 19, 55, 40, 54 }, // base cell 36 + { 37, 39, 52, 57, 24, 23, 32 }, // base cell 37 + { 38, INVALID_BASE_CELL, 34, 51, 29, 47, 21 }, // base cell 38 (pentagon) + { 39, 37, 25, 23, 59, 57, 45 }, // base cell 39 + { 40, 27, 36, 20, 60, 46, 55 }, // base cell 40 + { 41, 49, 53, 61, 22, 33, 31 }, // base cell 41 + { 42, 58, 43, 62, 28, 44, 26 }, // base cell 42 + { 43, 62, 47, 64, 26, 42, 29 }, // base cell 43 + { 44, 53, 58, 65, 28, 31, 42 }, // base cell 44 + { 45, 39, 35, 25, 63, 59, 56 }, // base cell 45 + { 46, 60, 56, 68, 27, 40, 35 }, // base cell 46 + { 47, 38, 43, 29, 69, 51, 64 }, // base cell 47 + { 48, 49, 30, 33, 67, 66, 50 }, // base cell 48 + { 49, INVALID_BASE_CELL, 61, 66, 33, 48, 41 }, // base cell 49 (pentagon) + { 50, 48, 32, 30, 70, 67, 52 }, // base cell 50 + { 51, 69, 54, 71, 38, 47, 34 }, // base cell 51 + { 52, 57, 70, 74, 32, 37, 50 }, // base cell 52 + { 53, 61, 65, 75, 31, 41, 44 }, // base cell 53 + { 54, 71, 55, 73, 34, 51, 36 }, // base cell 54 + { 55, 40, 54, 36, 72, 60, 73 }, // base cell 55 + { 56, 68, 63, 77, 35, 46, 45 }, // base cell 56 + { 57, 59, 74, 78, 37, 39, 52 }, // base cell 57 + { 58, INVALID_BASE_CELL, 62, 76, 44, 65, 42 }, // base cell 58 (pentagon) + { 59, 63, 78, 79, 39, 45, 57 }, // base cell 59 + { 60, 72, 68, 80, 40, 55, 46 }, // base cell 60 + { 61, 53, 49, 41, 81, 75, 66 }, // base cell 61 + { 62, 43, 58, 42, 82, 64, 76 }, // base cell 62 + { 63, INVALID_BASE_CELL, 56, 45, 79, 59, 77 }, // base cell 63 (pentagon) + { 64, 47, 62, 43, 84, 69, 82 }, // base cell 64 + { 65, 58, 53, 44, 86, 76, 75 }, // base cell 65 + { 66, 67, 81, 85, 49, 48, 61 }, // base cell 66 + { 67, 66, 50, 48, 87, 85, 70 }, // base cell 67 + { 68, 56, 60, 46, 90, 77, 80 }, // base cell 68 + { 69, 51, 64, 47, 89, 71, 84 }, // base cell 69 + { 70, 67, 52, 50, 83, 87, 74 }, // base cell 70 + { 71, 89, 73, 91, 51, 69, 54 }, // base cell 71 + { 72, INVALID_BASE_CELL, 73, 55, 80, 60, 88 }, // base cell 72 (pentagon) + { 73, 91, 72, 88, 54, 71, 55 }, // base cell 73 + { 74, 78, 83, 92, 52, 57, 70 }, // base cell 74 + { 75, 65, 61, 53, 94, 86, 81 }, // base cell 75 + { 76, 86, 82, 96, 58, 65, 62 }, // base cell 76 + { 77, 63, 68, 56, 93, 79, 90 }, // base cell 77 + { 78, 74, 59, 57, 95, 92, 79 }, // base cell 78 + { 79, 78, 63, 59, 93, 95, 77 }, // base cell 79 + { 80, 68, 72, 60, 99, 90, 88 }, // base cell 80 + { 81, 85, 94, 101, 61, 66, 75 }, // base cell 81 + { 82, 96, 84, 98, 62, 76, 64 }, // base cell 82 + { 83, INVALID_BASE_CELL, 74, 70, 100, 87, 92 }, // base cell 83 (pentagon) + { 84, 69, 82, 64, 97, 89, 98 }, // base cell 84 + { 85, 87, 101, 102, 66, 67, 81 }, // base cell 85 + { 86, 76, 75, 65, 104, 96, 94 }, // base cell 86 + { 87, 83, 102, 100, 67, 70, 85 }, // base cell 87 + { 88, 72, 91, 73, 99, 80, 105 }, // base cell 88 + { 89, 97, 91, 103, 69, 84, 71 }, // base cell 89 + { 90, 77, 80, 68, 106, 93, 99 }, // base cell 90 + { 91, 73, 89, 71, 105, 88, 103 }, // base cell 91 + { 92, 83, 78, 74, 108, 100, 95 }, // base cell 92 + { 93, 79, 90, 77, 109, 95, 106 }, // base cell 93 + { 94, 86, 81, 75, 107, 104, 101 }, // base cell 94 + { 95, 92, 79, 78, 109, 108, 93 }, // base cell 95 + { 96, 104, 98, 110, 76, 86, 82 }, // base cell 96 + { 97, INVALID_BASE_CELL, 98, 84, 103, 89, 111 }, // base cell 97 (pentagon) + { 98, 110, 97, 111, 82, 96, 84 }, // base cell 98 + { 99, 80, 105, 88, 106, 90, 113 }, // base cell 99 + { 100, 102, 83, 87, 108, 114, 92 }, // base cell 100 + { 101, 102, 107, 112, 81, 85, 94 }, // base cell 101 + { 102, 101, 87, 85, 114, 112, 100 }, // base cell 102 + { 103, 91, 97, 89, 116, 105, 111 }, // base cell 103 + { 104, 107, 110, 115, 86, 94, 96 }, // base cell 104 + { 105, 88, 103, 91, 113, 99, 116 }, // base cell 105 + { 106, 93, 99, 90, 117, 109, 113 }, // base cell 106 + { 107, INVALID_BASE_CELL, 101, 94, 115, 104, 112 }, // base cell 107 (pentagon) + { 108, 100, 95, 92, 118, 114, 109 }, // base cell 108 + { 109, 108, 93, 95, 117, 118, 106 }, // base cell 109 + { 110, 98, 104, 96, 119, 111, 115 }, // base cell 110 + { 111, 97, 110, 98, 116, 103, 119 }, // base cell 111 + { 112, 107, 102, 101, 120, 115, 114 }, // base cell 112 + { 113, 99, 116, 105, 117, 106, 121 }, // base cell 113 + { 114, 112, 100, 102, 118, 120, 108 }, // base cell 114 + { 115, 110, 107, 104, 120, 119, 112 }, // base cell 115 + { 116, 103, 119, 111, 113, 105, 121 }, // base cell 116 + { 117, INVALID_BASE_CELL, 109, 118, 113, 121, 106 }, // base cell 117 (pentagon) + { 118, 120, 108, 114, 117, 121, 109 }, // base cell 118 + { 119, 111, 115, 110, 121, 116, 120 }, // base cell 119 + { 120, 115, 114, 112, 121, 119, 118 }, // base cell 120 + { 121, 116, 120, 119, 117, 113, 118 }, // base cell 121 + }; + + /** @brief Neighboring base cell rotations in each IJK direction. + * + * For each base cell, for each direction, the number of 60 degree + * CCW rotations to the coordinate system of the neighbor is given. + * -1 indicates there is no neighbor in that direction. + */ + private static final int[][] baseCellNeighbor60CCWRots = new int[][] { + { 0, 5, 0, 0, 1, 5, 1 }, // base cell 0 + { 0, 0, 1, 0, 1, 0, 1 }, // base cell 1 + { 0, 0, 0, 0, 0, 5, 0 }, // base cell 2 + { 0, 5, 0, 0, 2, 5, 1 }, // base cell 3 + { 0, -1, 1, 0, 3, 4, 2 }, // base cell 4 (pentagon) + { 0, 0, 1, 0, 1, 0, 1 }, // base cell 5 + { 0, 0, 0, 3, 5, 5, 0 }, // base cell 6 + { 0, 0, 0, 0, 0, 5, 0 }, // base cell 7 + { 0, 5, 0, 0, 0, 5, 1 }, // base cell 8 + { 0, 0, 1, 3, 0, 0, 1 }, // base cell 9 + { 0, 0, 1, 3, 0, 0, 1 }, // base cell 10 + { 0, 3, 3, 3, 0, 0, 0 }, // base cell 11 + { 0, 5, 0, 0, 3, 5, 1 }, // base cell 12 + { 0, 0, 1, 0, 1, 0, 1 }, // base cell 13 + { 0, -1, 3, 0, 5, 2, 0 }, // base cell 14 (pentagon) + { 0, 5, 0, 0, 4, 5, 1 }, // base cell 15 + { 0, 0, 0, 0, 0, 5, 0 }, // base cell 16 + { 0, 3, 3, 3, 3, 0, 3 }, // base cell 17 + { 0, 0, 0, 3, 5, 5, 0 }, // base cell 18 + { 0, 3, 3, 3, 0, 0, 0 }, // base cell 19 + { 0, 3, 3, 3, 0, 3, 0 }, // base cell 20 + { 0, 0, 0, 3, 5, 5, 0 }, // base cell 21 + { 0, 0, 1, 0, 1, 0, 1 }, // base cell 22 + { 0, 3, 3, 3, 0, 3, 0 }, // base cell 23 + { 0, -1, 3, 0, 5, 2, 0 }, // base cell 24 (pentagon) + { 0, 0, 0, 3, 0, 0, 3 }, // base cell 25 + { 0, 0, 0, 0, 0, 5, 0 }, // base cell 26 + { 0, 3, 0, 0, 0, 3, 3 }, // base cell 27 + { 0, 0, 1, 0, 1, 0, 1 }, // base cell 28 + { 0, 0, 1, 3, 0, 0, 1 }, // base cell 29 + { 0, 3, 3, 3, 0, 0, 0 }, // base cell 30 + { 0, 0, 0, 0, 0, 5, 0 }, // base cell 31 + { 0, 3, 3, 3, 3, 0, 3 }, // base cell 32 + { 0, 0, 1, 3, 0, 0, 1 }, // base cell 33 + { 0, 3, 3, 3, 3, 0, 3 }, // base cell 34 + { 0, 0, 3, 0, 3, 0, 3 }, // base cell 35 + { 0, 0, 0, 3, 0, 0, 3 }, // base cell 36 + { 0, 3, 0, 0, 0, 3, 3 }, // base cell 37 + { 0, -1, 3, 0, 5, 2, 0 }, // base cell 38 (pentagon) + { 0, 3, 0, 0, 3, 3, 0 }, // base cell 39 + { 0, 3, 0, 0, 3, 3, 0 }, // base cell 40 + { 0, 0, 0, 3, 5, 5, 0 }, // base cell 41 + { 0, 0, 0, 3, 5, 5, 0 }, // base cell 42 + { 0, 3, 3, 3, 0, 0, 0 }, // base cell 43 + { 0, 0, 1, 3, 0, 0, 1 }, // base cell 44 + { 0, 0, 3, 0, 0, 3, 3 }, // base cell 45 + { 0, 0, 0, 3, 0, 3, 0 }, // base cell 46 + { 0, 3, 3, 3, 0, 3, 0 }, // base cell 47 + { 0, 3, 3, 3, 0, 3, 0 }, // base cell 48 + { 0, -1, 3, 0, 5, 2, 0 }, // base cell 49 (pentagon) + { 0, 0, 0, 3, 0, 0, 3 }, // base cell 50 + { 0, 3, 0, 0, 0, 3, 3 }, // base cell 51 + { 0, 0, 3, 0, 3, 0, 3 }, // base cell 52 + { 0, 3, 3, 3, 0, 0, 0 }, // base cell 53 + { 0, 0, 3, 0, 3, 0, 3 }, // base cell 54 + { 0, 0, 3, 0, 0, 3, 3 }, // base cell 55 + { 0, 3, 3, 3, 0, 0, 3 }, // base cell 56 + { 0, 0, 0, 3, 0, 3, 0 }, // base cell 57 + { 0, -1, 3, 0, 5, 2, 0 }, // base cell 58 (pentagon) + { 0, 3, 3, 3, 3, 3, 0 }, // base cell 59 + { 0, 3, 3, 3, 3, 3, 0 }, // base cell 60 + { 0, 3, 3, 3, 3, 0, 3 }, // base cell 61 + { 0, 3, 3, 3, 3, 0, 3 }, // base cell 62 + { 0, -1, 3, 0, 5, 2, 0 }, // base cell 63 (pentagon) + { 0, 0, 0, 3, 0, 0, 3 }, // base cell 64 + { 0, 3, 3, 3, 0, 3, 0 }, // base cell 65 + { 0, 3, 0, 0, 0, 3, 3 }, // base cell 66 + { 0, 3, 0, 0, 3, 3, 0 }, // base cell 67 + { 0, 3, 3, 3, 0, 0, 0 }, // base cell 68 + { 0, 3, 0, 0, 3, 3, 0 }, // base cell 69 + { 0, 0, 3, 0, 0, 3, 3 }, // base cell 70 + { 0, 0, 0, 3, 0, 3, 0 }, // base cell 71 + { 0, -1, 3, 0, 5, 2, 0 }, // base cell 72 (pentagon) + { 0, 3, 3, 3, 0, 0, 3 }, // base cell 73 + { 0, 3, 3, 3, 0, 0, 3 }, // base cell 74 + { 0, 0, 0, 3, 0, 0, 3 }, // base cell 75 + { 0, 3, 0, 0, 0, 3, 3 }, // base cell 76 + { 0, 0, 0, 3, 0, 5, 0 }, // base cell 77 + { 0, 3, 3, 3, 0, 0, 0 }, // base cell 78 + { 0, 0, 1, 3, 1, 0, 1 }, // base cell 79 + { 0, 0, 1, 3, 1, 0, 1 }, // base cell 80 + { 0, 0, 3, 0, 3, 0, 3 }, // base cell 81 + { 0, 0, 3, 0, 3, 0, 3 }, // base cell 82 + { 0, -1, 3, 0, 5, 2, 0 }, // base cell 83 (pentagon) + { 0, 0, 3, 0, 0, 3, 3 }, // base cell 84 + { 0, 0, 0, 3, 0, 3, 0 }, // base cell 85 + { 0, 3, 0, 0, 3, 3, 0 }, // base cell 86 + { 0, 3, 3, 3, 3, 3, 0 }, // base cell 87 + { 0, 0, 0, 3, 0, 5, 0 }, // base cell 88 + { 0, 3, 3, 3, 3, 3, 0 }, // base cell 89 + { 0, 0, 0, 0, 0, 0, 1 }, // base cell 90 + { 0, 3, 3, 3, 0, 0, 0 }, // base cell 91 + { 0, 0, 0, 3, 0, 5, 0 }, // base cell 92 + { 0, 5, 0, 0, 5, 5, 0 }, // base cell 93 + { 0, 0, 3, 0, 0, 3, 3 }, // base cell 94 + { 0, 0, 0, 0, 0, 0, 1 }, // base cell 95 + { 0, 0, 0, 3, 0, 3, 0 }, // base cell 96 + { 0, -1, 3, 0, 5, 2, 0 }, // base cell 97 (pentagon) + { 0, 3, 3, 3, 0, 0, 3 }, // base cell 98 + { 0, 5, 0, 0, 5, 5, 0 }, // base cell 99 + { 0, 0, 1, 3, 1, 0, 1 }, // base cell 100 + { 0, 3, 3, 3, 0, 0, 3 }, // base cell 101 + { 0, 3, 3, 3, 0, 0, 0 }, // base cell 102 + { 0, 0, 1, 3, 1, 0, 1 }, // base cell 103 + { 0, 3, 3, 3, 3, 3, 0 }, // base cell 104 + { 0, 0, 0, 0, 0, 0, 1 }, // base cell 105 + { 0, 0, 1, 0, 3, 5, 1 }, // base cell 106 + { 0, -1, 3, 0, 5, 2, 0 }, // base cell 107 (pentagon) + { 0, 5, 0, 0, 5, 5, 0 }, // base cell 108 + { 0, 0, 1, 0, 4, 5, 1 }, // base cell 109 + { 0, 3, 3, 3, 0, 0, 0 }, // base cell 110 + { 0, 0, 0, 3, 0, 5, 0 }, // base cell 111 + { 0, 0, 0, 3, 0, 5, 0 }, // base cell 112 + { 0, 0, 1, 0, 2, 5, 1 }, // base cell 113 + { 0, 0, 0, 0, 0, 0, 1 }, // base cell 114 + { 0, 0, 1, 3, 1, 0, 1 }, // base cell 115 + { 0, 5, 0, 0, 5, 5, 0 }, // base cell 116 + { 0, -1, 1, 0, 3, 4, 2 }, // base cell 117 (pentagon) + { 0, 0, 1, 0, 0, 5, 1 }, // base cell 118 + { 0, 0, 0, 0, 0, 0, 1 }, // base cell 119 + { 0, 5, 0, 0, 5, 5, 0 }, // base cell 120 + { 0, 0, 1, 0, 1, 5, 1 }, // base cell 121 + }; + + private static final int E_SUCCESS = 0; // Success (no error) + private static final int E_PENTAGON = 9; // Pentagon distortion was encountered which the algorithm + private static final int E_CELL_INVALID = 5; // `H3Index` cell argument was not valid + private static final int E_FAILED = 1; // The operation failed but a more specific error is not available + + /** + * Directions used for traversing a hexagonal ring counterclockwise around + * {1, 0, 0} + * + *

+     *      _
+     *    _/ \\_
+     *   / \\5/ \\
+     *   \\0/ \\4/
+     *   / \\_/ \\
+     *   \\1/ \\3/
+     *     \\2/
+     * 
+ */ + private static final CoordIJK.Direction[] DIRECTIONS = new CoordIJK.Direction[] { + CoordIJK.Direction.J_AXES_DIGIT, + CoordIJK.Direction.JK_AXES_DIGIT, + CoordIJK.Direction.K_AXES_DIGIT, + CoordIJK.Direction.IK_AXES_DIGIT, + CoordIJK.Direction.I_AXES_DIGIT, + CoordIJK.Direction.IJ_AXES_DIGIT }; + + /** + * New digit when traversing along class II grids. + * + * Current digit -> direction -> new digit. + */ + private static final CoordIJK.Direction[][] NEW_DIGIT_II = new CoordIJK.Direction[][] { + { + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.K_AXES_DIGIT, + CoordIJK.Direction.J_AXES_DIGIT, + CoordIJK.Direction.JK_AXES_DIGIT, + CoordIJK.Direction.I_AXES_DIGIT, + CoordIJK.Direction.IK_AXES_DIGIT, + CoordIJK.Direction.IJ_AXES_DIGIT }, + { + CoordIJK.Direction.K_AXES_DIGIT, + CoordIJK.Direction.I_AXES_DIGIT, + CoordIJK.Direction.JK_AXES_DIGIT, + CoordIJK.Direction.IJ_AXES_DIGIT, + CoordIJK.Direction.IK_AXES_DIGIT, + CoordIJK.Direction.J_AXES_DIGIT, + CoordIJK.Direction.CENTER_DIGIT }, + { + CoordIJK.Direction.J_AXES_DIGIT, + CoordIJK.Direction.JK_AXES_DIGIT, + CoordIJK.Direction.K_AXES_DIGIT, + CoordIJK.Direction.I_AXES_DIGIT, + CoordIJK.Direction.IJ_AXES_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.IK_AXES_DIGIT }, + { + CoordIJK.Direction.JK_AXES_DIGIT, + CoordIJK.Direction.IJ_AXES_DIGIT, + CoordIJK.Direction.I_AXES_DIGIT, + CoordIJK.Direction.IK_AXES_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.K_AXES_DIGIT, + CoordIJK.Direction.J_AXES_DIGIT }, + { + CoordIJK.Direction.I_AXES_DIGIT, + CoordIJK.Direction.IK_AXES_DIGIT, + CoordIJK.Direction.IJ_AXES_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.J_AXES_DIGIT, + CoordIJK.Direction.JK_AXES_DIGIT, + CoordIJK.Direction.K_AXES_DIGIT }, + { + CoordIJK.Direction.IK_AXES_DIGIT, + CoordIJK.Direction.J_AXES_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.K_AXES_DIGIT, + CoordIJK.Direction.JK_AXES_DIGIT, + CoordIJK.Direction.IJ_AXES_DIGIT, + CoordIJK.Direction.I_AXES_DIGIT }, + { + CoordIJK.Direction.IJ_AXES_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.IK_AXES_DIGIT, + CoordIJK.Direction.J_AXES_DIGIT, + CoordIJK.Direction.K_AXES_DIGIT, + CoordIJK.Direction.I_AXES_DIGIT, + CoordIJK.Direction.JK_AXES_DIGIT } }; + + /** + * New traversal direction when traversing along class II grids. + * + * Current digit -> direction -> new ap7 move (at coarser level). + */ + private static final CoordIJK.Direction[][] NEW_ADJUSTMENT_II = new CoordIJK.Direction[][] { + { + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.CENTER_DIGIT }, + { + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.K_AXES_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.K_AXES_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.IK_AXES_DIGIT, + CoordIJK.Direction.CENTER_DIGIT }, + { + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.J_AXES_DIGIT, + CoordIJK.Direction.JK_AXES_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.J_AXES_DIGIT }, + { + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.K_AXES_DIGIT, + CoordIJK.Direction.JK_AXES_DIGIT, + CoordIJK.Direction.JK_AXES_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.CENTER_DIGIT }, + { + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.I_AXES_DIGIT, + CoordIJK.Direction.I_AXES_DIGIT, + CoordIJK.Direction.IJ_AXES_DIGIT }, + { + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.IK_AXES_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.I_AXES_DIGIT, + CoordIJK.Direction.IK_AXES_DIGIT, + CoordIJK.Direction.CENTER_DIGIT }, + { + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.J_AXES_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.IJ_AXES_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.IJ_AXES_DIGIT } }; + + /** + * New traversal direction when traversing along class III grids. + * + * Current digit -> direction -> new ap7 move (at coarser level). + */ + private static final CoordIJK.Direction[][] NEW_DIGIT_III = new CoordIJK.Direction[][] { + { + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.K_AXES_DIGIT, + CoordIJK.Direction.J_AXES_DIGIT, + CoordIJK.Direction.JK_AXES_DIGIT, + CoordIJK.Direction.I_AXES_DIGIT, + CoordIJK.Direction.IK_AXES_DIGIT, + CoordIJK.Direction.IJ_AXES_DIGIT }, + { + CoordIJK.Direction.K_AXES_DIGIT, + CoordIJK.Direction.J_AXES_DIGIT, + CoordIJK.Direction.JK_AXES_DIGIT, + CoordIJK.Direction.I_AXES_DIGIT, + CoordIJK.Direction.IK_AXES_DIGIT, + CoordIJK.Direction.IJ_AXES_DIGIT, + CoordIJK.Direction.CENTER_DIGIT }, + { + CoordIJK.Direction.J_AXES_DIGIT, + CoordIJK.Direction.JK_AXES_DIGIT, + CoordIJK.Direction.I_AXES_DIGIT, + CoordIJK.Direction.IK_AXES_DIGIT, + CoordIJK.Direction.IJ_AXES_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.K_AXES_DIGIT }, + { + CoordIJK.Direction.JK_AXES_DIGIT, + CoordIJK.Direction.I_AXES_DIGIT, + CoordIJK.Direction.IK_AXES_DIGIT, + CoordIJK.Direction.IJ_AXES_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.K_AXES_DIGIT, + CoordIJK.Direction.J_AXES_DIGIT }, + { + CoordIJK.Direction.I_AXES_DIGIT, + CoordIJK.Direction.IK_AXES_DIGIT, + CoordIJK.Direction.IJ_AXES_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.K_AXES_DIGIT, + CoordIJK.Direction.J_AXES_DIGIT, + CoordIJK.Direction.JK_AXES_DIGIT }, + { + CoordIJK.Direction.IK_AXES_DIGIT, + CoordIJK.Direction.IJ_AXES_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.K_AXES_DIGIT, + CoordIJK.Direction.J_AXES_DIGIT, + CoordIJK.Direction.JK_AXES_DIGIT, + CoordIJK.Direction.I_AXES_DIGIT }, + { + CoordIJK.Direction.IJ_AXES_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.K_AXES_DIGIT, + CoordIJK.Direction.J_AXES_DIGIT, + CoordIJK.Direction.JK_AXES_DIGIT, + CoordIJK.Direction.I_AXES_DIGIT, + CoordIJK.Direction.IK_AXES_DIGIT } }; + + /** + * New traversal direction when traversing along class III grids. + * + * Current digit -> direction -> new ap7 move (at coarser level). + */ + private static final CoordIJK.Direction[][] NEW_ADJUSTMENT_III = new CoordIJK.Direction[][] { + { + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.CENTER_DIGIT }, + { + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.K_AXES_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.JK_AXES_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.K_AXES_DIGIT, + CoordIJK.Direction.CENTER_DIGIT }, + { + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.J_AXES_DIGIT, + CoordIJK.Direction.J_AXES_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.IJ_AXES_DIGIT }, + { + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.JK_AXES_DIGIT, + CoordIJK.Direction.J_AXES_DIGIT, + CoordIJK.Direction.JK_AXES_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.CENTER_DIGIT }, + { + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.I_AXES_DIGIT, + CoordIJK.Direction.IK_AXES_DIGIT, + CoordIJK.Direction.I_AXES_DIGIT }, + { + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.K_AXES_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.IK_AXES_DIGIT, + CoordIJK.Direction.IK_AXES_DIGIT, + CoordIJK.Direction.CENTER_DIGIT }, + { + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.IJ_AXES_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.I_AXES_DIGIT, + CoordIJK.Direction.CENTER_DIGIT, + CoordIJK.Direction.IJ_AXES_DIGIT } }; + + /** + * Produce all neighboring cells. For Hexagons there will be 6 neighbors while + * for pentagon just 5. + * Output is placed in the provided array in no particular order. + * + * @param origin origin cell + */ + public static long[] hexRing(long origin) { + final long[] out = H3Index.H3_is_pentagon(origin) ? new long[5] : new long[6]; + int idx = 0; + long previous = -1; + for (int i = 0; i < 6; i++) { + int[] rotations = new int[] { 0 }; + long[] nextNeighbor = new long[] { 0 }; + int neighborResult = h3NeighborRotations(origin, DIRECTIONS[i].digit(), rotations, nextNeighbor); + if (neighborResult != E_PENTAGON) { + // E_PENTAGON is an expected case when trying to traverse off of + // pentagons. + if (neighborResult != E_SUCCESS) { + throw new IllegalArgumentException(); + } + if (previous != nextNeighbor[0]) { + out[idx++] = nextNeighbor[0]; + previous = nextNeighbor[0]; + } + } + } + assert idx == out.length; + return out; + } + + /** + * Returns the hexagon index neighboring the origin, in the direction dir. + * + * Implementation note: The only reachable case where this returns 0 is if the + * origin is a pentagon and the translation is in the k direction. Thus, + * 0 can only be returned if origin is a pentagon. + * + * @param origin Origin index + * @param dir Direction to move in + * @param rotations Number of ccw rotations to perform to reorient the + * translation vector. Will be modified to the new number of + * rotations to perform (such as when crossing a face edge.) + * @param out H3Index of the specified neighbor if succesful + * @return E_SUCCESS on success + */ + private static int h3NeighborRotations(long origin, int dir, int[] rotations, long[] out) { + long current = origin; + + for (int i = 0; i < rotations[0]; i++) { + dir = CoordIJK.rotate60ccw(dir); + } + + int newRotations = 0; + int oldBaseCell = H3Index.H3_get_base_cell(current); + if (oldBaseCell < 0 || oldBaseCell >= Constants.NUM_BASE_CELLS) { // LCOV_EXCL_BR_LINE + // Base cells less than zero can not be represented in an index + return E_CELL_INVALID; + } + int oldLeadingDigit = H3Index.h3LeadingNonZeroDigit(current); + + // Adjust the indexing digits and, if needed, the base cell. + int r = H3Index.H3_get_resolution(current) - 1; + while (true) { + if (r == -1) { + current = H3Index.H3_set_base_cell(current, baseCellNeighbors[oldBaseCell][dir]); + newRotations = baseCellNeighbor60CCWRots[oldBaseCell][dir]; + + if (H3Index.H3_get_base_cell(current) == INVALID_BASE_CELL) { + // Adjust for the deleted k vertex at the base cell level. + // This edge actually borders a different neighbor. + current = H3Index.H3_set_base_cell(current, baseCellNeighbors[oldBaseCell][CoordIJK.Direction.IK_AXES_DIGIT.digit()]); + newRotations = baseCellNeighbor60CCWRots[oldBaseCell][CoordIJK.Direction.IK_AXES_DIGIT.digit()]; + + // perform the adjustment for the k-subsequence we're skipping + // over. + current = H3Index.h3Rotate60ccw(current); + rotations[0] = rotations[0] + 1; + } + + break; + } else { + int oldDigit = H3Index.H3_get_index_digit(current, r + 1); + int nextDir; + if (oldDigit == CoordIJK.Direction.INVALID_DIGIT.digit()) { + // Only possible on invalid input + return E_CELL_INVALID; + } else if (H3Index.isResolutionClassIII(r + 1)) { + current = H3Index.H3_set_index_digit(current, r + 1, NEW_DIGIT_II[oldDigit][dir].digit()); + nextDir = NEW_ADJUSTMENT_II[oldDigit][dir].digit(); + } else { + current = H3Index.H3_set_index_digit(current, r + 1, NEW_DIGIT_III[oldDigit][dir].digit()); + nextDir = NEW_ADJUSTMENT_III[oldDigit][dir].digit(); + } + + if (nextDir != CoordIJK.Direction.CENTER_DIGIT.digit()) { + dir = nextDir; + r--; + } else { + // No more adjustment to perform + break; + } + } + } + + int newBaseCell = H3Index.H3_get_base_cell(current); + if (BaseCells.isBaseCellPentagon(newBaseCell)) { + boolean alreadyAdjustedKSubsequence = false; + + // force rotation out of missing k-axes sub-sequence + if (H3Index.h3LeadingNonZeroDigit(current) == CoordIJK.Direction.K_AXES_DIGIT.digit()) { + if (oldBaseCell != newBaseCell) { + // in this case, we traversed into the deleted + // k subsequence of a pentagon base cell. + // We need to rotate out of that case depending + // on how we got here. + // check for a cw/ccw offset face; default is ccw + + if (BaseCells.baseCellIsCwOffset(newBaseCell, BaseCells.getBaseFaceIJK(oldBaseCell).face)) { + current = H3Index.h3Rotate60cw(current); + } else { + // See cwOffsetPent in testGridDisk.c for why this is + // unreachable. + current = H3Index.h3Rotate60ccw(current); // LCOV_EXCL_LINE + } + alreadyAdjustedKSubsequence = true; + } else { + // In this case, we traversed into the deleted + // k subsequence from within the same pentagon + // base cell. + if (oldLeadingDigit == CoordIJK.Direction.CENTER_DIGIT.digit()) { + // Undefined: the k direction is deleted from here + return E_PENTAGON; + } else if (oldLeadingDigit == CoordIJK.Direction.JK_AXES_DIGIT.digit()) { + // Rotate out of the deleted k subsequence + // We also need an additional change to the direction we're + // moving in + current = H3Index.h3Rotate60ccw(current); + rotations[0] = rotations[0] + 1; + } else if (oldLeadingDigit == CoordIJK.Direction.IK_AXES_DIGIT.digit()) { + // Rotate out of the deleted k subsequence + // We also need an additional change to the direction we're + // moving in + current = H3Index.h3Rotate60cw(current); + rotations[0] = rotations[0] + 5; + } else { + // Should never occur + return E_FAILED; // LCOV_EXCL_LINE + } + } + } + + for (int i = 0; i < newRotations; i++) + current = H3Index.h3RotatePent60ccw(current); + + // Account for differing orientation of the base cells (this edge + // might not follow properties of some other edges.) + if (oldBaseCell != newBaseCell) { + if (BaseCells.isBaseCellPolarPentagon(newBaseCell)) { + // 'polar' base cells behave differently because they have all + // i neighbors. + if (oldBaseCell != 118 + && oldBaseCell != 8 + && H3Index.h3LeadingNonZeroDigit(current) != CoordIJK.Direction.JK_AXES_DIGIT.digit()) { + rotations[0] = rotations[0] + 1; + } + } else if (H3Index.h3LeadingNonZeroDigit(current) == CoordIJK.Direction.IK_AXES_DIGIT.digit() + && alreadyAdjustedKSubsequence == false) { + // account for distortion introduced to the 5 neighbor by the + // deleted k subsequence. + rotations[0] = rotations[0] + 1; + } + } + } else { + for (int i = 0; i < newRotations; i++) + current = H3Index.h3Rotate60ccw(current); + } + + rotations[0] = (rotations[0] + newRotations) % 6; + out[0] = current; + + return E_SUCCESS; + } + +} diff --git a/libs/h3/src/main/java/org/opensearch/geospatial/h3/Iterator.java b/libs/h3/src/main/java/org/opensearch/geospatial/h3/Iterator.java new file mode 100644 index 00000000..ad21842d --- /dev/null +++ b/libs/h3/src/main/java/org/opensearch/geospatial/h3/Iterator.java @@ -0,0 +1,310 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + * This project is based on a modification of https://github.com/uber/h3 which is licensed under the Apache 2.0 License. + * + * Copyright 2021 Uber Technologies, Inc. + */ +package org.opensearch.geospatial.h3; + +/** + * Iterator structures and functions for the children of a cell. + */ +final class Iterator { + /** + * Invalid index used to indicate an error from latLngToCell and related + * functions or missing data in arrays of H3 indices. Analogous to NaN in + * floating point. + */ + public static final long H3_NULL = 0; + + /** + * The number of bits in a single H3 resolution digit. + */ + private static final int H3_PER_DIGIT_OFFSET = 3; + + /** + * IterCellsChildren: struct for iterating through the descendants of + * a given cell. + *

+ * Constructors: + *

+ * Initialize with either `iterInitParent` or `iterInitBaseCellNum`. + * `iterInitParent` sets up an iterator for all the children of a given + * parent cell at a given resolution. + *

+ * `iterInitBaseCellNum` sets up an iterator for children cells, given + * a base cell number (0--121). + *

+ * Iteration: + *

+ * Step iterator with `iterStepChild`. + * During the lifetime of the `IterCellsChildren`, the current iterate + * is accessed via the `IterCellsChildren.h` member. + * When the iterator is exhausted or if there was an error in initialization, + * `IterCellsChildren.h` will be `H3_NULL` even after calling `iterStepChild`. + */ + static class IterCellsChildren { + long h; + int _parentRes; // parent resolution + int _skipDigit; // this digit skips `1` for pentagons + + IterCellsChildren(long h, int _parentRes, int _skipDigit) { + this.h = h; + this._parentRes = _parentRes; + this._skipDigit = _skipDigit; + } + } + + /** + * Create a fully nulled-out child iterator for when an iterator is exhausted. + * This helps minimize the chance that a user will depend on the iterator + * internal state after it's exhausted, like the child resolution, for + * example. + */ + private static IterCellsChildren nullIter() { + return new IterCellsChildren(H3_NULL, -1, -1); + } + + /** + ## Logic for iterating through the children of a cell + We'll describe the logic for .... + - normal (non pentagon iteration) + - pentagon iteration. define "pentagon digit" + ### Cell Index Component Diagrams + The lower 56 bits of an H3 Cell Index describe the following index components: + - the cell resolution (4 bits) + - the base cell number (7 bits) + - the child cell digit for each resolution from 1 to 15 (3*15 = 45 bits) + These are the bits we'll be focused on when iterating through child cells. + To help describe the iteration logic, we'll use diagrams displaying the + (decimal) values for each component like: + child digit for resolution 2 + / + | res | base cell # | 1 | 2 | 3 | 4 | 5 | 6 | ... | + |-----|-------------|---|---|---|---|---|---|-----| + | 9 | 17 | 5 | 3 | 0 | 6 | 2 | 1 | ... | + ### Iteration through children of a hexagon (but not a pentagon) + Iteration through the children of a *hexagon* (but not a pentagon) + simply involves iterating through all the children values (0--6) + for each child digit (up to the child's resolution). + For example, suppose a resolution 3 hexagon index has the following + components: + parent resolution + / + | res | base cell # | 1 | 2 | 3 | 4 | 5 | 6 | ... | + |-----|-------------|---|---|---|---|---|---|-----| + | 3 | 17 | 3 | 5 | 1 | 7 | 7 | 7 | ... | + The iteration through all children of resolution 6 would look like: + parent res child res + / / + | res | base cell # | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | ... | + |-----|-------------|---|---|---|---|---|---|---|---|-----| + | 6 | 17 | 3 | 5 | 1 | 0 | 0 | 0 | 7 | 7 | ... | + | 6 | 17 | 3 | 5 | 1 | 0 | 0 | 1 | 7 | 7 | ... | + | ... | | | | | | | | | | | + | 6 | 17 | 3 | 5 | 1 | 0 | 0 | 6 | 7 | 7 | ... | + | 6 | 17 | 3 | 5 | 1 | 0 | 1 | 0 | 7 | 7 | ... | + | 6 | 17 | 3 | 5 | 1 | 0 | 1 | 1 | 7 | 7 | ... | + | ... | | | | | | | | | | | + | 6 | 17 | 3 | 5 | 1 | 6 | 6 | 6 | 7 | 7 | ... | + ### Step sequence on a *pentagon* cell + Pentagon cells have a base cell number (e.g., 97) corresponding to a + resolution 0 pentagon, and have all zeros from digit 1 to the digit + corresponding to the cell's resolution. + (We'll drop the ellipses from now on, knowing that digits should contain + 7's beyond the cell resolution.) + parent res child res + / / + | res | base cell # | 1 | 2 | 3 | 4 | 5 | 6 | + |-----|-------------|---|---|---|---|---|---| + | 6 | 97 | 0 | 0 | 0 | 0 | 0 | 0 | + Iteration through children of a *pentagon* is almost the same + as *hexagon* iteration, except that we skip the *first* 1 value + that appears in the "skip digit". This corresponds to the fact + that a pentagon only has 6 children, which are denoted with + the numbers {0,2,3,4,5,6}. + The skip digit starts at the child resolution position. + When iterating through children more than one resolution below + the parent, we move the skip digit to the left + (up to the next coarser resolution) each time we skip the 1 value + in that digit. + Iteration would start like: + parent res child res + / / + | res | base cell # | 1 | 2 | 3 | 4 | 5 | 6 | + |-----|-------------|---|---|---|---|---|---| + | 6 | 97 | 0 | 0 | 0 | 0 | 0 | 0 | + \ + skip digit + Noticing we skip the 1 value and move the skip digit, + the next iterate would be: + | res | base cell # | 1 | 2 | 3 | 4 | 5 | 6 | + |-----|-------------|---|---|---|---|---|---| + | 6 | 97 | 0 | 0 | 0 | 0 | 0 | 2 | + \ + skip digit + Iteration continues normally until we get to: + | res | base cell # | 1 | 2 | 3 | 4 | 5 | 6 | + |-----|-------------|---|---|---|---|---|---| + | 6 | 97 | 0 | 0 | 0 | 0 | 0 | 6 | + \ + skip digit + which is followed by (skipping the 1): + | res | base cell # | 1 | 2 | 3 | 4 | 5 | 6 | + |-----|-------------|---|---|---|---|---|---| + | 6 | 97 | 0 | 0 | 0 | 0 | 2 | 0 | + \ + skip digit + For the next iterate, we won't skip the `1` in the previous digit + because it is no longer the skip digit: + | res | base cell # | 1 | 2 | 3 | 4 | 5 | 6 | + |-----|-------------|---|---|---|---|---|---| + | 6 | 97 | 0 | 0 | 0 | 0 | 2 | 1 | + \ + skip digit + Iteration continues normally until we're right before the next skip + digit: + | res | base cell # | 1 | 2 | 3 | 4 | 5 | 6 | + |-----|-------------|---|---|---|---|---|---| + | 6 | 97 | 0 | 0 | 0 | 0 | 6 | 6 | + \ + skip digit + Which is followed by + | res | base cell # | 1 | 2 | 3 | 4 | 5 | 6 | + |-----|-------------|---|---|---|---|---|---| + | 6 | 97 | 0 | 0 | 0 | 2 | 0 | 0 | + \ + skip digit + and so on. + */ + + /** + * Initialize a IterCellsChildren struct representing the sequence giving + * the children of cell `h` at resolution `childRes`. + *

+ * At any point in the iteration, starting once + * the struct is initialized, IterCellsChildren.h gives the current child. + *

+ * Also, IterCellsChildren.h == H3_NULL when all the children have been iterated + * through, or if the input to `iterInitParent` was invalid. + */ + public static IterCellsChildren iterInitParent(long h, int childRes) { + + int parentRes = H3Index.H3_get_resolution(h); + + if (childRes < parentRes || childRes > Constants.MAX_H3_RES || h == H3_NULL) { + return nullIter(); + } + + long newH = zeroIndexDigits(h, parentRes + 1, childRes); + newH = H3Index.H3_set_resolution(newH, childRes); + + int _skipDigit; + if (H3Index.H3_is_pentagon(newH)) { + // The skip digit skips `1` for pentagons. + // The "_skipDigit" moves to the left as we count up from the + // child resolution to the parent resolution. + _skipDigit = childRes; + } else { + // if not a pentagon, we can ignore "skip digit" logic + _skipDigit = -1; + } + + return new IterCellsChildren(newH, parentRes, _skipDigit); + } + + /** + * Step a IterCellsChildren to the next child cell. + * When the iteration is over, IterCellsChildren.h will be H3_NULL. + * Handles iterating through hexagon and pentagon cells. + */ + public static void iterStepChild(IterCellsChildren it) { + // once h == H3_NULL, the iterator returns an infinite sequence of H3_NULL + if (it.h == H3_NULL) return; + + int childRes = H3Index.H3_get_resolution(it.h); + + incrementResDigit(it, childRes); + + for (int i = childRes; i >= it._parentRes; i--) { + if (i == it._parentRes) { + // if we're modifying the parent resolution digit, then we're done + // *it = _null_iter(); + it.h = H3_NULL; + return; + } + + // PENTAGON_SKIPPED_DIGIT == 1 + if (i == it._skipDigit && getResDigit(it, i) == CoordIJK.Direction.PENTAGON_SKIPPED_DIGIT.digit()) { + // Then we are iterating through the children of a pentagon cell. + // All children of a pentagon have the property that the first + // nonzero digit between the parent and child resolutions is + // not 1. + // I.e., we never see a sequence like 00001. + // Thus, we skip the `1` in this digit. + incrementResDigit(it, i); + it._skipDigit -= 1; + return; + } + + // INVALID_DIGIT == 7 + if (getResDigit(it, i) == CoordIJK.Direction.INVALID_DIGIT.digit()) { + incrementResDigit(it, i); // zeros out it[i] and increments it[i-1] by 1 + } else { + break; + } + } + } + + // extract the `res` digit (0--7) of the current cell + private static int getResDigit(IterCellsChildren it, int res) { + return H3Index.H3_get_index_digit(it.h, res); + } + + /** + * Zero out index digits from start to end, inclusive. + * No-op if start > end. + */ + private static long zeroIndexDigits(long h, int start, int end) { + if (start > end) { + return h; + } + + long m = 0; + + m = ~m; + m <<= H3_PER_DIGIT_OFFSET * (end - start + 1); + m = ~m; + m <<= H3_PER_DIGIT_OFFSET * (Constants.MAX_H3_RES - end); + m = ~m; + + return h & m; + } + + // increment the digit (0--7) at location `res` + private static void incrementResDigit(IterCellsChildren it, int res) { + long val = 1; + val <<= H3_PER_DIGIT_OFFSET * (Constants.MAX_H3_RES - res); + it.h += val; + } +} diff --git a/libs/h3/src/main/java/org/opensearch/geospatial/h3/LatLng.java b/libs/h3/src/main/java/org/opensearch/geospatial/h3/LatLng.java new file mode 100644 index 00000000..427e1e3d --- /dev/null +++ b/libs/h3/src/main/java/org/opensearch/geospatial/h3/LatLng.java @@ -0,0 +1,125 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + * This project is based on a modification of https://github.com/uber/h3 which is licensed under the Apache 2.0 License. + * + * Copyright 2016-2021 Uber Technologies, Inc. + */ +package org.opensearch.geospatial.h3; + +/** pair of latitude/longitude */ +public final class LatLng { + + // lat / lon in radians + private final double lon; + private final double lat; + + LatLng(double lat, double lon) { + this.lon = lon; + this.lat = lat; + } + + /** Returns latitude in radians */ + public double getLatRad() { + return lat; + } + + /** Returns longitude in radians */ + public double getLonRad() { + return lon; + } + + /** Returns latitude in degrees */ + public double getLatDeg() { + return Math.toDegrees(getLatRad()); + } + + /** Returns longitude in degrees */ + public double getLonDeg() { + return Math.toDegrees(getLonRad()); + } + + /** + * Encodes a coordinate on the sphere to the corresponding icosahedral face and + * containing 2D hex coordinates relative to that face center. + * + * @param res The desired H3 resolution for the encoding. + */ + FaceIJK geoToFaceIJK(int res) { + Vec3d v3d = new Vec3d(this); + + // determine the icosahedron face + int face = 0; + double sqd = v3d.pointSquareDist(Vec3d.faceCenterPoint[0]); + for (int i = 1; i < Vec3d.faceCenterPoint.length; i++) { + double sqdT = v3d.pointSquareDist(Vec3d.faceCenterPoint[i]); + if (sqdT < sqd) { + face = i; + sqd = sqdT; + } + } + // cos(r) = 1 - 2 * sin^2(r/2) = 1 - 2 * (sqd / 4) = 1 - sqd/2 + double r = Math.acos(1 - sqd / 2); + + if (r < Constants.EPSILON) { + return new FaceIJK(face, new Vec2d(0.0, 0.0).hex2dToCoordIJK()); + } + + // now have face and r, now find CCW theta from CII i-axis + double theta = Vec2d.posAngleRads( + Vec2d.faceAxesAzRadsCII[face][0] - Vec2d.posAngleRads(Vec2d.faceCenterGeo[face].geoAzimuthRads(this)) + ); + + // adjust theta for Class III (odd resolutions) + if (H3Index.isResolutionClassIII(res)) { + theta = Vec2d.posAngleRads(theta - Constants.M_AP7_ROT_RADS); + } + + // perform gnomonic scaling of r + r = Math.tan(r); + + // scale for current resolution length u + r /= Constants.RES0_U_GNOMONIC; + for (int i = 0; i < res; i++) { + r *= Constants.M_SQRT7; + } + + // we now have (r, theta) in hex2d with theta ccw from x-axes + + // convert to local x,y + Vec2d vec2d = new Vec2d(r * Math.cos(theta), r * Math.sin(theta)); + return new FaceIJK(face, vec2d.hex2dToCoordIJK()); + } + + /** + * Determines the azimuth to the provided LatLng in radians. + * + * @param p The spherical coordinates. + * @return The azimuth in radians. + */ + private double geoAzimuthRads(LatLng p) { + return Math.atan2( + Math.cos(p.lat) * Math.sin(p.lon - lon), + Math.cos(lat) * Math.sin(p.lat) - Math.sin(lat) * Math.cos(p.lat) * Math.cos(p.lon - lon) + ); + } +} diff --git a/libs/h3/src/main/java/org/opensearch/geospatial/h3/Vec2d.java b/libs/h3/src/main/java/org/opensearch/geospatial/h3/Vec2d.java new file mode 100644 index 00000000..aefd3ce8 --- /dev/null +++ b/libs/h3/src/main/java/org/opensearch/geospatial/h3/Vec2d.java @@ -0,0 +1,408 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + * This project is based on a modification of https://github.com/uber/h3 which is licensed under the Apache 2.0 License. + * + * Copyright 2016-2017 Uber Technologies, Inc. + */ +package org.opensearch.geospatial.h3; + +import java.util.Objects; + +/** + * 2D floating-point vector + */ +final class Vec2d { + + /** sin(60') */ + private static final double M_SIN60 = Constants.M_SQRT3_2; + + /** + * icosahedron face centers in lat/lng radians + */ + public static final LatLng[] faceCenterGeo = new LatLng[] { + new LatLng(0.803582649718989942, 1.248397419617396099), // face 0 + new LatLng(1.307747883455638156, 2.536945009877921159), // face 1 + new LatLng(1.054751253523952054, -1.347517358900396623), // face 2 + new LatLng(0.600191595538186799, -0.450603909469755746), // face 3 + new LatLng(0.491715428198773866, 0.401988202911306943), // face 4 + new LatLng(0.172745327415618701, 1.678146885280433686), // face 5 + new LatLng(0.605929321571350690, 2.953923329812411617), // face 6 + new LatLng(0.427370518328979641, -1.888876200336285401), // face 7 + new LatLng(-0.079066118549212831, -0.733429513380867741), // face 8 + new LatLng(-0.230961644455383637, 0.506495587332349035), // face 9 + new LatLng(0.079066118549212831, 2.408163140208925497), // face 10 + new LatLng(0.230961644455383637, -2.635097066257444203), // face 11 + new LatLng(-0.172745327415618701, -1.463445768309359553), // face 12 + new LatLng(-0.605929321571350690, -0.187669323777381622), // face 13 + new LatLng(-0.427370518328979641, 1.252716453253507838), // face 14 + new LatLng(-0.600191595538186799, 2.690988744120037492), // face 15 + new LatLng(-0.491715428198773866, -2.739604450678486295), // face 16 + new LatLng(-0.803582649718989942, -1.893195233972397139), // face 17 + new LatLng(-1.307747883455638156, -0.604647643711872080), // face 18 + new LatLng(-1.054751253523952054, 1.794075294689396615), // face 19 + }; + + /** + * icosahedron face ijk axes as azimuth in radians from face center to + * vertex 0/1/2 respectively + */ + public static final double[][] faceAxesAzRadsCII = new double[][] { + { 5.619958268523939882, 3.525563166130744542, 1.431168063737548730 }, // face 0 + { 5.760339081714187279, 3.665943979320991689, 1.571548876927796127 }, // face 1 + { 0.780213654393430055, 4.969003859179821079, 2.874608756786625655 }, // face 2 + { 0.430469363979999913, 4.619259568766391033, 2.524864466373195467 }, // face 3 + { 6.130269123335111400, 4.035874020941915804, 1.941478918548720291 }, // face 4 + { 2.692877706530642877, 0.598482604137447119, 4.787272808923838195 }, // face 5 + { 2.982963003477243874, 0.888567901084048369, 5.077358105870439581 }, // face 6 + { 3.532912002790141181, 1.438516900396945656, 5.627307105183336758 }, // face 7 + { 3.494305004259568154, 1.399909901866372864, 5.588700106652763840 }, // face 8 + { 3.003214169499538391, 0.908819067106342928, 5.097609271892733906 }, // face 9 + { 5.930472956509811562, 3.836077854116615875, 1.741682751723420374 }, // face 10 + { 0.138378484090254847, 4.327168688876645809, 2.232773586483450311 }, // face 11 + { 0.448714947059150361, 4.637505151845541521, 2.543110049452346120 }, // face 12 + { 0.158629650112549365, 4.347419854898940135, 2.253024752505744869 }, // face 13 + { 5.891865957979238535, 3.797470855586042958, 1.703075753192847583 }, // face 14 + { 2.711123289609793325, 0.616728187216597771, 4.805518392002988683 }, // face 15 + { 3.294508837434268316, 1.200113735041072948, 5.388903939827463911 }, // face 16 + { 3.804819692245439833, 1.710424589852244509, 5.899214794638635174 }, // face 17 + { 3.664438879055192436, 1.570043776661997111, 5.758833981448388027 }, // face 18 + { 2.361378999196363184, 0.266983896803167583, 4.455774101589558636 }, // face 19 + }; + + /** + * pi + */ + private static double M_PI = 3.14159265358979323846; + /** + * pi / 2.0 + */ + private static double M_PI_2 = 1.5707963267948966; + /** + * 2.0 * PI + */ + public static double M_2PI = 6.28318530717958647692528676655900576839433; + + private final double x; /// < x component + private final double y; /// < y component + + Vec2d(double x, double y) { + this.x = x; + this.y = y; + } + + /** + * Determines the center point in spherical coordinates of a cell given by 2D + * hex coordinates on a particular icosahedral face. + * + * @param face The icosahedral face upon which the 2D hex coordinate system is + * centered. + * @param res The H3 resolution of the cell. + * @param substrate Indicates whether or not this grid is actually a substrate + * grid relative to the specified resolution. + */ + public LatLng hex2dToGeo(int face, int res, boolean substrate) { + // calculate (r, theta) in hex2d + double r = v2dMag(); + + if (r < Constants.EPSILON) { + return faceCenterGeo[face]; + } + + double theta = Math.atan2(y, x); + + // scale for current resolution length u + for (int i = 0; i < res; i++) { + r /= Constants.M_SQRT7; + } + + // scale accordingly if this is a substrate grid + if (substrate) { + r /= 3.0; + if (H3Index.isResolutionClassIII(res)) { + r /= Constants.M_SQRT7; + } + } + + r *= Constants.RES0_U_GNOMONIC; + + // perform inverse gnomonic scaling of r + r = Math.atan(r); + + // adjust theta for Class III + // if a substrate grid, then it's already been adjusted for Class III + if (substrate == false && H3Index.isResolutionClassIII(res)) theta = posAngleRads(theta + Constants.M_AP7_ROT_RADS); + + // find theta as an azimuth + theta = posAngleRads(faceAxesAzRadsCII[face][0] - theta); + + // now find the point at (r,theta) from the face center + return geoAzDistanceRads(faceCenterGeo[face], theta, r); + } + + /** + * Determine the containing hex in ijk+ coordinates for a 2D cartesian + * coordinate vector (from DGGRID). + * + */ + public CoordIJK hex2dToCoordIJK() { + double a1, a2; + double x1, x2; + int m1, m2; + double r1, r2; + + // quantize into the ij system and then normalize + int k = 0; + int i; + int j; + + a1 = Math.abs(x); + a2 = Math.abs(y); + + // first do a reverse conversion + x2 = a2 / M_SIN60; + x1 = a1 + x2 / 2.0; + + // check if we have the center of a hex + m1 = (int) x1; + m2 = (int) x2; + + // otherwise round correctly + r1 = x1 - m1; + r2 = x2 - m2; + + if (r1 < 0.5) { + if (r1 < 1.0 / 3.0) { + if (r2 < (1.0 + r1) / 2.0) { + i = m1; + j = m2; + } else { + i = m1; + j = m2 + 1; + } + } else { + if (r2 < (1.0 - r1)) { + j = m2; + } else { + j = m2 + 1; + } + + if ((1.0 - r1) <= r2 && r2 < (2.0 * r1)) { + i = m1 + 1; + } else { + i = m1; + } + } + } else { + if (r1 < 2.0 / 3.0) { + if (r2 < (1.0 - r1)) { + j = m2; + } else { + j = m2 + 1; + } + + if ((2.0 * r1 - 1.0) < r2 && r2 < (1.0 - r1)) { + i = m1; + } else { + i = m1 + 1; + } + } else { + if (r2 < (r1 / 2.0)) { + i = m1 + 1; + j = m2; + } else { + i = m1 + 1; + j = m2 + 1; + } + } + } + + // now fold across the axes if necessary + + if (x < 0.0) { + if ((j % 2) == 0) // even + { + int axisi = j / 2; + int diff = i - axisi; + i = i - 2 * diff; + } else { + int axisi = (j + 1) / 2; + int diff = i - axisi; + i = i - (2 * diff + 1); + } + } + + if (y < 0.0) { + i = i - (2 * j + 1) / 2; + j = -1 * j; + } + CoordIJK coordIJK = new CoordIJK(i, j, k); + coordIJK.ijkNormalize(); + return coordIJK; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Vec2d vec2d = (Vec2d) o; + return Double.compare(vec2d.x, x) == 0 && Double.compare(vec2d.y, y) == 0; + } + + @Override + public int hashCode() { + return Objects.hash(x, y); + } + + /** + * Finds the intersection between two lines. Assumes that the lines intersect + * and that the intersection is not at an endpoint of either line. + * + * @param p0 The first endpoint of the first line. + * @param p1 The second endpoint of the first line. + * @param p2 The first endpoint of the second line. + * @param p3 The second endpoint of the second line. + */ + public static Vec2d v2dIntersect(Vec2d p0, Vec2d p1, Vec2d p2, Vec2d p3) { + double[] s1 = new double[2], s2 = new double[2]; + s1[0] = p1.x - p0.x; + s1[1] = p1.y - p0.y; + s2[0] = p3.x - p2.x; + s2[1] = p3.y - p2.y; + + float t; + t = (float) ((s2[0] * (p0.y - p2.y) - s2[1] * (p0.x - p2.x)) / (-s2[0] * s1[1] + s1[0] * s2[1])); + + return new Vec2d(p0.x + (t * s1[0]), p0.y + (t * s1[1])); + } + + /** + * Calculates the magnitude of a 2D cartesian vector. + * + * @return The magnitude of the vector. + */ + private double v2dMag() { + return Math.sqrt(x * x + y * y); + } + + /** + * Normalizes radians to a value between 0.0 and two PI. + * + * @param rads The input radians value. + * @return The normalized radians value. + */ + static double posAngleRads(double rads) { + double tmp = ((rads < 0.0) ? rads + M_2PI : rads); + if (rads >= M_2PI) tmp -= M_2PI; + return tmp; + } + + /** + * Computes the point on the sphere a specified azimuth and distance from + * another point. + * + * @param p1 The first spherical coordinates. + * @param az The desired azimuth from p1. + * @param distance The desired distance from p1, must be non-negative. + * p1. + */ + private static LatLng geoAzDistanceRads(LatLng p1, double az, double distance) { + if (distance < Constants.EPSILON) { + return p1; + } + + double sinlat, sinlng, coslng; + + az = posAngleRads(az); + + double lat, lon; + + // check for due north/south azimuth + if (az < Constants.EPSILON || Math.abs(az - M_PI) < Constants.EPSILON) { + if (az < Constants.EPSILON) {// due north + lat = p1.getLatRad() + distance; + } else { // due south + lat = p1.getLatRad() - distance; + } + if (Math.abs(lat - M_PI_2) < Constants.EPSILON) { // north pole + lat = M_PI_2; + lon = 0.0; + } else if (Math.abs(lat + M_PI_2) < Constants.EPSILON) { // south pole + lat = -M_PI_2; + lon = 0.0; + } else { + lon = constrainLng(p1.getLonRad()); + } + } else { // not due north or south + sinlat = Math.sin(p1.getLatRad()) * Math.cos(distance) + Math.cos(p1.getLatRad()) * Math.sin(distance) * Math.cos(az); + if (sinlat > 1.0) { + sinlat = 1.0; + } + if (sinlat < -1.0) { + sinlat = -1.0; + } + lat = Math.asin(sinlat); + if (Math.abs(lat - M_PI_2) < Constants.EPSILON) // north pole + { + lat = M_PI_2; + lon = 0.0; + } else if (Math.abs(lat + M_PI_2) < Constants.EPSILON) // south pole + { + lat = -M_PI_2; + lon = 0.0; + } else { + sinlng = Math.sin(az) * Math.sin(distance) / Math.cos(lat); + coslng = (Math.cos(distance) - Math.sin(p1.getLatRad()) * Math.sin(lat)) / Math.cos(p1.getLatRad()) / Math.cos(lat); + if (sinlng > 1.0) { + sinlng = 1.0; + } + if (sinlng < -1.0) { + sinlng = -1.0; + } + if (coslng > 1.0) { + coslng = 1.0; + } + if (coslng < -1.0) { + coslng = -1.0; + } + lon = constrainLng(p1.getLonRad() + Math.atan2(sinlng, coslng)); + } + } + return new LatLng(lat, lon); + } + + /** + * constrainLng makes sure longitudes are in the proper bounds + * + * @param lng The origin lng value + * @return The corrected lng value + */ + private static double constrainLng(double lng) { + while (lng > M_PI) { + lng = lng - (2 * M_PI); + } + while (lng < -M_PI) { + lng = lng + (2 * M_PI); + } + return lng; + } +} diff --git a/libs/h3/src/main/java/org/opensearch/geospatial/h3/Vec3d.java b/libs/h3/src/main/java/org/opensearch/geospatial/h3/Vec3d.java new file mode 100644 index 00000000..f4b75ef8 --- /dev/null +++ b/libs/h3/src/main/java/org/opensearch/geospatial/h3/Vec3d.java @@ -0,0 +1,87 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + * This project is based on a modification of https://github.com/uber/h3 which is licensed under the Apache 2.0 License. + * + * Copyright 2018, 2020-2021 Uber Technologies, Inc. + */ + +package org.opensearch.geospatial.h3; + +final class Vec3d { + + /** icosahedron face centers in x/y/z on the unit sphere */ + public static final double[][] faceCenterPoint = new double[][] { + { 0.2199307791404606, 0.6583691780274996, 0.7198475378926182 }, // face 0 + { -0.2139234834501421, 0.1478171829550703, 0.9656017935214205 }, // face 1 + { 0.1092625278784797, -0.4811951572873210, 0.8697775121287253 }, // face 2 + { 0.7428567301586791, -0.3593941678278028, 0.5648005936517033 }, // face 3 + { 0.8112534709140969, 0.3448953237639384, 0.4721387736413930 }, // face 4 + { -0.1055498149613921, 0.9794457296411413, 0.1718874610009365 }, // face 5 + { -0.8075407579970092, 0.1533552485898818, 0.5695261994882688 }, // face 6 + { -0.2846148069787907, -0.8644080972654206, 0.4144792552473539 }, // face 7 + { 0.7405621473854482, -0.6673299564565524, -0.0789837646326737 }, // face 8 + { 0.8512303986474293, 0.4722343788582681, -0.2289137388687808 }, // face 9 + { -0.7405621473854481, 0.6673299564565524, 0.0789837646326737 }, // face 10 + { -0.8512303986474292, -0.4722343788582682, 0.2289137388687808 }, // face 11 + { 0.1055498149613919, -0.9794457296411413, -0.1718874610009365 }, // face 12 + { 0.8075407579970092, -0.1533552485898819, -0.5695261994882688 }, // face 13 + { 0.2846148069787908, 0.8644080972654204, -0.4144792552473539 }, // face 14 + { -0.7428567301586791, 0.3593941678278027, -0.5648005936517033 }, // face 15 + { -0.8112534709140971, -0.3448953237639382, -0.4721387736413930 }, // face 16 + { -0.2199307791404607, -0.6583691780274996, -0.7198475378926182 }, // face 17 + { 0.2139234834501420, -0.1478171829550704, -0.9656017935214205 }, // face 18 + { -0.1092625278784796, 0.4811951572873210, -0.8697775121287253 }, // face 19 + }; + + private final double x; + private final double y; + private final double z; + + Vec3d(LatLng latLng) { + double r = Math.cos(latLng.getLatRad()); + this.z = Math.sin(latLng.getLatRad()); + this.x = Math.cos(latLng.getLonRad()) * r; + this.y = Math.sin(latLng.getLonRad()) * r; + } + + /** + * Calculate the square of the distance between two 3D coordinates. + * + * @param v The first 3D coordinate. + * @return The square of the distance between the given points. + */ + public double pointSquareDist(double[] v) { + return square(x - v[0]) + square(y - v[1]) + square(z - v[2]); + } + + /** + * Square of a number + * + * @param x The input number. + * @return The square of the input number. + */ + private double square(double x) { + return x * x; + } + +} diff --git a/libs/h3/src/test/java/org/opensearch/geospatial/h3/CellBoundaryTests.java b/libs/h3/src/test/java/org/opensearch/geospatial/h3/CellBoundaryTests.java new file mode 100644 index 00000000..82c22635 --- /dev/null +++ b/libs/h3/src/test/java/org/opensearch/geospatial/h3/CellBoundaryTests.java @@ -0,0 +1,182 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.opensearch.geospatial.h3; + +import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; +import org.opensearch.test.OpenSearchTestCase; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; +import java.util.StringTokenizer; + +public class CellBoundaryTests extends OpenSearchTestCase { + + public void testRes0() throws Exception { + processFile("res00cells.txt"); + } + + public void testRes1() throws Exception { + processFile("res01cells.txt"); + } + + public void testRes2() throws Exception { + processFile("res02cells.txt"); + } + + public void testRes3() throws Exception { + processFile("res03cells.txt"); + } + + public void testBc05r08cells() throws Exception { + processFile("bc05r08cells.txt"); + } + + public void testBc05r09cells() throws Exception { + processFile("bc05r09cells.txt"); + } + + public void testBc05r10cells() throws Exception { + processFile("bc05r10cells.txt"); + } + + public void testBc05r11cells() throws Exception { + processFile("bc05r11cells.txt"); + } + + public void testBc05r12cells() throws Exception { + processFile("bc05r12cells.txt"); + } + + public void testBc05r13cells() throws Exception { + processFile("bc05r13cells.txt"); + } + + public void testBc05r05cells() throws Exception { + processFile("bc05r14cells.txt"); + } + + public void testBc05r15cells() throws Exception { + processFile("bc05r15cells.txt"); + } + + public void testBc14r08cells() throws Exception { + processFile("bc14r08cells.txt"); + } + + public void testBc14r09cells() throws Exception { + processFile("bc14r09cells.txt"); + } + + public void testBc14r10cells() throws Exception { + processFile("bc14r10cells.txt"); + } + + public void testBc14r11cells() throws Exception { + processFile("bc14r11cells.txt"); + } + + public void testBc14r12cells() throws Exception { + processFile("bc14r12cells.txt"); + } + + public void testBc14r13cells() throws Exception { + processFile("bc14r13cells.txt"); + } + + public void testBc14r14cells() throws Exception { + processFile("bc14r14cells.txt"); + } + + public void testBc14r15cells() throws Exception { + processFile("bc14r15cells.txt"); + } + + public void testBc19r08cells() throws Exception { + processFile("bc19r08cells.txt"); + } + + public void testBc19r09cells() throws Exception { + processFile("bc19r09cells.txt"); + } + + public void testBc19r10cells() throws Exception { + processFile("bc19r10cells.txt"); + } + + public void testBc19r11cells() throws Exception { + processFile("bc19r11cells.txt"); + } + + public void testBc19r12cells() throws Exception { + processFile("bc19r12cells.txt"); + } + + public void testBc19r13cells() throws Exception { + processFile("bc19r13cells.txt"); + } + + public void testBc19r14cells() throws Exception { + processFile("bc19r14cells.txt"); + } + + private void processFile(String file) throws IOException { + InputStream fis = getClass().getResourceAsStream(file + ".gz"); + BufferedReader reader = new BufferedReader(new InputStreamReader(new GzipCompressorInputStream(fis), StandardCharsets.UTF_8)); + String h3Address = reader.readLine(); + while (h3Address != null) { + assertEquals(true, H3.h3IsValid(h3Address)); + long h3 = H3.stringToH3(h3Address); + assertEquals(true, H3.h3IsValid(h3)); + processOne(h3Address, reader); + h3Address = reader.readLine(); + } + } + + private void processOne(String h3Address, BufferedReader reader) throws IOException { + String line = reader.readLine(); + if ("{".equals(line) == false) { + throw new IllegalArgumentException(); + } + line = reader.readLine(); + List points = new ArrayList<>(); + while ("}".equals(line) == false) { + StringTokenizer tokens = new StringTokenizer(line, " "); + assertEquals(2, tokens.countTokens()); + double lat = Double.parseDouble(tokens.nextToken()); + double lon = Double.parseDouble(tokens.nextToken()); + points.add(new double[] { lat, lon }); + line = reader.readLine(); + } + CellBoundary boundary = H3.h3ToGeoBoundary(h3Address); + assert boundary.numPoints() == points.size(); + for (int i = 0; i < boundary.numPoints(); i++) { + assertEquals(h3Address, points.get(i)[0], boundary.getLatLon(i).getLatDeg(), 1e-8); + assertEquals(h3Address, points.get(i)[1], boundary.getLatLon(i).getLonDeg(), 1e-8); + } + } +} diff --git a/libs/h3/src/test/java/org/opensearch/geospatial/h3/CellCenterTests.java b/libs/h3/src/test/java/org/opensearch/geospatial/h3/CellCenterTests.java new file mode 100644 index 00000000..1d7b026d --- /dev/null +++ b/libs/h3/src/test/java/org/opensearch/geospatial/h3/CellCenterTests.java @@ -0,0 +1,187 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.opensearch.geospatial.h3; + +import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; +import org.opensearch.test.OpenSearchTestCase; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.util.StringTokenizer; + +public class CellCenterTests extends OpenSearchTestCase { + + public void testRes0() throws Exception { + processFile("res00ic.txt"); + } + + public void testRes1() throws Exception { + processFile("res01ic.txt"); + } + + public void testRes2() throws Exception { + processFile("res02ic.txt"); + } + + public void testRes3() throws Exception { + processFile("res03ic.txt"); + } + + public void testBc05r08centers() throws Exception { + processFile("bc05r08centers.txt"); + } + + public void testBc05r09centers() throws Exception { + processFile("bc05r09centers.txt"); + } + + public void testBc05r10centers() throws Exception { + processFile("bc05r10centers.txt"); + } + + public void testBc05r11centers() throws Exception { + processFile("bc05r11centers.txt"); + } + + public void testBc05r12centers() throws Exception { + processFile("bc05r12centers.txt"); + } + + public void testBc05r13centers() throws Exception { + processFile("bc05r13centers.txt"); + } + + public void testBc05r05centers() throws Exception { + processFile("bc05r14centers.txt"); + } + + public void testBc05r15centers() throws Exception { + processFile("bc05r15centers.txt"); + } + + public void testBc14r08centers() throws Exception { + processFile("bc14r08centers.txt"); + } + + public void testBc14r09centers() throws Exception { + processFile("bc14r09centers.txt"); + } + + public void testBc14r10centers() throws Exception { + processFile("bc14r10centers.txt"); + } + + public void testBc14r11centers() throws Exception { + processFile("bc14r11centers.txt"); + } + + public void testBc14r12centers() throws Exception { + processFile("bc14r12centers.txt"); + } + + public void testBc14r13centers() throws Exception { + processFile("bc14r13centers.txt"); + } + + public void testBc14r14centers() throws Exception { + processFile("bc14r14centers.txt"); + } + + public void testBc14r15centers() throws Exception { + processFile("bc14r15centers.txt"); + } + + public void testBc19r08centers() throws Exception { + processFile("bc19r08centers.txt"); + } + + public void testBc19r09centers() throws Exception { + processFile("bc19r09centers.txt"); + } + + public void testBc19r10centers() throws Exception { + processFile("bc19r10centers.txt"); + } + + public void testBc19r11centers() throws Exception { + processFile("bc19r11centers.txt"); + } + + public void testBc19r12centers() throws Exception { + processFile("bc19r12centers.txt"); + } + + public void testBc19r13centers() throws Exception { + processFile("bc19r13centers.txt"); + } + + public void testBc19r14centers() throws Exception { + processFile("bc19r14centers.txt"); + } + + public void testBc19r15centers() throws Exception { + processFile("bc19r15centers.txt"); + } + + private void processFile(String file) throws IOException { + InputStream fis = getClass().getResourceAsStream(file + ".gz"); + BufferedReader reader = new BufferedReader(new InputStreamReader(new GzipCompressorInputStream(fis), StandardCharsets.UTF_8)); + String line = reader.readLine(); + while (line != null) { + StringTokenizer tokenizer = new StringTokenizer(line, " "); + assertEquals(3, tokenizer.countTokens()); + String h3Address = tokenizer.nextToken(); + assertEquals(h3Address, true, H3.h3IsValid(h3Address)); + double lat = Double.parseDouble(tokenizer.nextToken()); + double lon = Double.parseDouble(tokenizer.nextToken()); + assertH3ToLatLng(h3Address, lat, lon); + assertGeoToH3(h3Address, lat, lon); + assertHexRing(h3Address); + line = reader.readLine(); + } + } + + private void assertH3ToLatLng(String h3Address, double lat, double lon) { + LatLng latLng = H3.h3ToLatLng(h3Address); + assertEquals(h3Address, lat, latLng.getLatDeg(), 1e-6); + assertEquals(h3Address, lon, latLng.getLonDeg(), 1e-6); + } + + private void assertGeoToH3(String h3Address, double lat, double lon) { + String computedH3Address = H3.geoToH3Address(lat, lon, H3Index.H3_get_resolution(H3.stringToH3(h3Address))); + assertEquals(h3Address, computedH3Address); + assertEquals(h3Address, computedH3Address); + } + + private void assertHexRing(String h3Address) { + String[] neighbors = H3.hexRing(h3Address); + long center = H3.stringToH3(h3Address); + for (String neighbor : neighbors) { + long l = H3.stringToH3(neighbor); + assertEquals(H3Index.H3_get_resolution(center), H3Index.H3_get_resolution(l)); + } + } +} diff --git a/libs/h3/src/test/java/org/opensearch/geospatial/h3/GeoToH3Tests.java b/libs/h3/src/test/java/org/opensearch/geospatial/h3/GeoToH3Tests.java new file mode 100644 index 00000000..94347a98 --- /dev/null +++ b/libs/h3/src/test/java/org/opensearch/geospatial/h3/GeoToH3Tests.java @@ -0,0 +1,64 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.opensearch.geospatial.h3; + +import org.apache.lucene.spatial3d.geom.GeoPoint; +import org.apache.lucene.spatial3d.geom.GeoPolygon; +import org.apache.lucene.spatial3d.geom.GeoPolygonFactory; +import org.apache.lucene.spatial3d.geom.PlanetModel; +import org.apache.lucene.tests.geo.GeoTestUtil; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.ArrayList; +import java.util.List; + +public class GeoToH3Tests extends OpenSearchTestCase { + + public void testRandomPoints() { + for (int i = 0; i < 50; i++) { + // avoid points close to the poles + double lat = randomValueOtherThanMany(d -> d > 60 || d < -60, GeoTestUtil::nextLatitude); + // avoid points close to the dateline + double lon = randomValueOtherThanMany(d -> d > 150 || d < -150, GeoTestUtil::nextLongitude); + testPoint(GeoTestUtil.nextLatitude(), GeoTestUtil.nextLongitude()); + } + } + + private void testPoint(double lat, double lon) { + GeoPoint point = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat), Math.toRadians(lon)); + for (int i = 0; i < Constants.MAX_H3_RES; i++) { + String h3Address = H3.geoToH3Address(lat, lon, i); + GeoPolygon polygon = buildGeoPolygon(h3Address); + assertTrue(polygon.isWithin(point)); + } + } + private GeoPolygon buildGeoPolygon(String h3Address) { + CellBoundary cellBoundary = H3.h3ToGeoBoundary(h3Address); + List points = new ArrayList<>(cellBoundary.numPoints()); + for (int i = 0; i < cellBoundary.numPoints(); i++) { + LatLng latLng = cellBoundary.getLatLon(i); + points.add(new GeoPoint(PlanetModel.SPHERE, latLng.getLatRad(), latLng.getLonRad())); + } + return GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, points); + } +} diff --git a/libs/h3/src/test/java/org/opensearch/geospatial/h3/ParentChildNavigationTests.java b/libs/h3/src/test/java/org/opensearch/geospatial/h3/ParentChildNavigationTests.java new file mode 100644 index 00000000..a0a1a3a2 --- /dev/null +++ b/libs/h3/src/test/java/org/opensearch/geospatial/h3/ParentChildNavigationTests.java @@ -0,0 +1,72 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.opensearch.geospatial.h3; + +import com.carrotsearch.randomizedtesting.generators.RandomPicks; + +import org.opensearch.test.OpenSearchTestCase; + +public class ParentChildNavigationTests extends OpenSearchTestCase { + + public void testParentChild() { + String[] h3Addresses = H3.getStringRes0Cells(); + String h3Address = RandomPicks.randomFrom(random(), h3Addresses); + String[] values = new String[H3.MAX_H3_RES]; + values[0] = h3Address; + for (int i = 1; i < H3.MAX_H3_RES; i++) { + h3Addresses = H3.h3ToChildren(h3Address); + h3Address = RandomPicks.randomFrom(random(), h3Addresses); + values[i] = h3Address; + } + h3Addresses = H3.h3ToChildren(h3Address); + h3Address = RandomPicks.randomFrom(random(), h3Addresses); + for (int i = H3.MAX_H3_RES - 1; i >= H3.MIN_H3_RES; i--) { + h3Address = H3.h3ToParent(h3Address); + assertEquals(values[i], h3Address); + } + } + + public void testHexRing() { + String[] h3Addresses = H3.getStringRes0Cells(); + String h3Address = RandomPicks.randomFrom(random(), h3Addresses); + for (int i = 1; i < H3.MAX_H3_RES; i++) { + h3Addresses = H3.h3ToChildren(h3Address); + assertHexRing(i, h3Address, h3Addresses); + h3Address = RandomPicks.randomFrom(random(), h3Addresses); + } + } + + private static final int[] HEX_RING_POSITIONS = new int[] { 2, 0, 1, 4, 3, 5 }; + private static final int[] PENT_RING_POSITIONS = new int[] { 0, 1, 3, 2, 4 }; + + private void assertHexRing(int res, String h3Address, String[] children) { + LatLng latLng = H3.h3ToLatLng(h3Address); + String centerChild = H3.geoToH3Address(latLng.getLatDeg(), latLng.getLonDeg(), res); + assertEquals(children[0], centerChild); + String[] ring = H3.hexRing(centerChild); + int[] positions = H3.isPentagon(centerChild) ? PENT_RING_POSITIONS : HEX_RING_POSITIONS; + for (int i = 1; i < children.length; i++) { + assertEquals(children[i], ring[positions[i - 1]]); + } + } +} diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/NOTICE.txt b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/NOTICE.txt new file mode 100644 index 00000000..50caeb4a --- /dev/null +++ b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/NOTICE.txt @@ -0,0 +1,4 @@ +The files under this directory come from the input test files from Uber's h3 repository +(https://github.com/uber/h3/tree/master/tests/inputfiles) and are made available here +under the same Apache 2 license. + diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r08cells.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r08cells.txt.gz new file mode 100644 index 00000000..e8904b41 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r08cells.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r08centers.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r08centers.txt.gz new file mode 100644 index 00000000..9efeadc8 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r08centers.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r09cells.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r09cells.txt.gz new file mode 100644 index 00000000..dbb2e593 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r09cells.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r09centers.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r09centers.txt.gz new file mode 100644 index 00000000..9cbeae27 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r09centers.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r10cells.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r10cells.txt.gz new file mode 100644 index 00000000..f91ba387 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r10cells.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r10centers.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r10centers.txt.gz new file mode 100644 index 00000000..405a1e36 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r10centers.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r11cells.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r11cells.txt.gz new file mode 100644 index 00000000..ebffe357 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r11cells.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r11centers.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r11centers.txt.gz new file mode 100644 index 00000000..04c8ac2f Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r11centers.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r12cells.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r12cells.txt.gz new file mode 100644 index 00000000..84620767 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r12cells.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r12centers.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r12centers.txt.gz new file mode 100644 index 00000000..ae9cc569 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r12centers.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r13cells.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r13cells.txt.gz new file mode 100644 index 00000000..d8da966a Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r13cells.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r13centers.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r13centers.txt.gz new file mode 100644 index 00000000..e3e98279 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r13centers.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r14cells.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r14cells.txt.gz new file mode 100644 index 00000000..27be76a9 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r14cells.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r14centers.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r14centers.txt.gz new file mode 100644 index 00000000..0695444f Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r14centers.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r15cells.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r15cells.txt.gz new file mode 100644 index 00000000..73e12447 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r15cells.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r15centers.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r15centers.txt.gz new file mode 100644 index 00000000..1ac79869 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc05r15centers.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r08cells.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r08cells.txt.gz new file mode 100644 index 00000000..5b0dc433 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r08cells.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r08centers.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r08centers.txt.gz new file mode 100644 index 00000000..911f7e4c Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r08centers.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r09cells.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r09cells.txt.gz new file mode 100644 index 00000000..f4a614bf Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r09cells.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r09centers.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r09centers.txt.gz new file mode 100644 index 00000000..28be0951 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r09centers.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r10cells.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r10cells.txt.gz new file mode 100644 index 00000000..95941dc1 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r10cells.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r10centers.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r10centers.txt.gz new file mode 100644 index 00000000..da6b7d29 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r10centers.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r11cells.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r11cells.txt.gz new file mode 100644 index 00000000..e05bb28b Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r11cells.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r11centers.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r11centers.txt.gz new file mode 100644 index 00000000..636d4d14 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r11centers.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r12cells.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r12cells.txt.gz new file mode 100644 index 00000000..5fc28a11 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r12cells.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r12centers.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r12centers.txt.gz new file mode 100644 index 00000000..e0dd3f43 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r12centers.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r13cells.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r13cells.txt.gz new file mode 100644 index 00000000..8c49ce4f Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r13cells.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r13centers.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r13centers.txt.gz new file mode 100644 index 00000000..2ea6219f Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r13centers.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r14cells.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r14cells.txt.gz new file mode 100644 index 00000000..caf54eb6 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r14cells.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r14centers.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r14centers.txt.gz new file mode 100644 index 00000000..2a96ded4 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r14centers.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r15cells.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r15cells.txt.gz new file mode 100644 index 00000000..2a64b905 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r15cells.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r15centers.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r15centers.txt.gz new file mode 100644 index 00000000..963b44b9 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc14r15centers.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r08cells.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r08cells.txt.gz new file mode 100644 index 00000000..201c6d2c Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r08cells.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r08centers.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r08centers.txt.gz new file mode 100644 index 00000000..f1011e07 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r08centers.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r09cells.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r09cells.txt.gz new file mode 100644 index 00000000..51bf7405 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r09cells.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r09centers.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r09centers.txt.gz new file mode 100644 index 00000000..6e8dba12 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r09centers.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r10cells.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r10cells.txt.gz new file mode 100644 index 00000000..1bb9cd27 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r10cells.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r10centers.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r10centers.txt.gz new file mode 100644 index 00000000..c20c3029 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r10centers.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r11cells.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r11cells.txt.gz new file mode 100644 index 00000000..23d923d8 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r11cells.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r11centers.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r11centers.txt.gz new file mode 100644 index 00000000..5a0a57a9 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r11centers.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r12cells.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r12cells.txt.gz new file mode 100644 index 00000000..16631958 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r12cells.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r12centers.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r12centers.txt.gz new file mode 100644 index 00000000..77d2cb9d Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r12centers.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r13cells.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r13cells.txt.gz new file mode 100644 index 00000000..74d5d33b Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r13cells.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r13centers.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r13centers.txt.gz new file mode 100644 index 00000000..961b5a4c Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r13centers.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r14cells.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r14cells.txt.gz new file mode 100644 index 00000000..8c3c6ead Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r14cells.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r14centers.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r14centers.txt.gz new file mode 100644 index 00000000..114ff526 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r14centers.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r15cells.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r15cells.txt.gz new file mode 100644 index 00000000..bb1394d1 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r15cells.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r15centers.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r15centers.txt.gz new file mode 100644 index 00000000..0b3a1ac5 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/bc19r15centers.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/res00cells.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/res00cells.txt.gz new file mode 100644 index 00000000..5b45234c Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/res00cells.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/res00ic.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/res00ic.txt.gz new file mode 100644 index 00000000..498d8de3 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/res00ic.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/res01cells.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/res01cells.txt.gz new file mode 100644 index 00000000..f6437ad7 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/res01cells.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/res01ic.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/res01ic.txt.gz new file mode 100644 index 00000000..3d8f4569 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/res01ic.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/res02cells.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/res02cells.txt.gz new file mode 100644 index 00000000..0c63e3bb Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/res02cells.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/res02ic.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/res02ic.txt.gz new file mode 100644 index 00000000..9cfdb2b1 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/res02ic.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/res03cells.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/res03cells.txt.gz new file mode 100644 index 00000000..13cdb005 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/res03cells.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/res03ic.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/res03ic.txt.gz new file mode 100644 index 00000000..d72ae7a2 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/res03ic.txt.gz differ diff --git a/libs/h3/src/test/resources/org/opensearch/geospatial/h3/res04ic.txt.gz b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/res04ic.txt.gz new file mode 100644 index 00000000..be6b73d1 Binary files /dev/null and b/libs/h3/src/test/resources/org/opensearch/geospatial/h3/res04ic.txt.gz differ diff --git a/lombok.config b/lombok.config new file mode 100644 index 00000000..9745d1ed --- /dev/null +++ b/lombok.config @@ -0,0 +1,5 @@ +# tell lombok this is your root directory +config.stopBubbling = true +# add @lombok.Generated annotations to all generated nodes where possible +# to skip code coverage for auto generated code +lombok.addLombokGeneratedAnnotation = true diff --git a/release-notes/opensearch-geospatial.release-notes-2.10.0.0.md b/release-notes/opensearch-geospatial.release-notes-2.10.0.0.md new file mode 100644 index 00000000..9746028f --- /dev/null +++ b/release-notes/opensearch-geospatial.release-notes-2.10.0.0.md @@ -0,0 +1,22 @@ +## Version 2.10.0.0 Release Notes + +Compatible with OpenSearch 2.10.0 + +### Features +* IP2Geo processor implementation ([#362](https://github.com/opensearch-project/geospatial/pull/362)) + +### Bug Fixes +* Revert datasource state when delete fails([#382](https://github.com/opensearch-project/geospatial/pull/382)) +* Update ip2geo test data url([#389](https://github.com/opensearch-project/geospatial/pull/389)) + +### Infrastructure +* Make jacoco report to be generated faster in local ([#267](https://github.com/opensearch-project/geospatial/pull/267)) +* Exclude lombok generated code from jacoco coverage report ([#268](https://github.com/opensearch-project/geospatial/pull/268)) + +### Maintenance +* Change package for Strings.hasText ([#314](https://github.com/opensearch-project/geospatial/pull/314)) +* Fixed compilation errors after refactoring in core foundation classes ([#380](https://github.com/opensearch-project/geospatial/pull/380)) +* Version bump for spotlss and apache commons([#400](https://github.com/opensearch-project/geospatial/pull/400)) +### Refactoring +* Refactor LifecycleComponent package path ([#377](https://github.com/opensearch-project/geospatial/pull/377)) +* Refactor Strings utility methods to core library ([#379](https://github.com/opensearch-project/geospatial/pull/379)) diff --git a/release-notes/opensearch-geospatial.release-notes-2.11.0.0.md b/release-notes/opensearch-geospatial.release-notes-2.11.0.0.md new file mode 100644 index 00000000..2ea69e3d --- /dev/null +++ b/release-notes/opensearch-geospatial.release-notes-2.11.0.0.md @@ -0,0 +1,9 @@ +## Version 2.11.0.0 Release Notes + +Compatible with OpenSearch 2.11.0 + +### Bug Fixes +* Fix flaky test, testIndexingMultiPolygon ([#483](https://github.com/opensearch-project/geospatial/pull/483)) + +### Infrastructure +* Add integration test against security enabled cluster ([#513](https://github.com/opensearch-project/geospatial/pull/513)) diff --git a/release-notes/opensearch-geospatial.release-notes-2.11.1.0.md b/release-notes/opensearch-geospatial.release-notes-2.11.1.0.md new file mode 100644 index 00000000..e266eaee --- /dev/null +++ b/release-notes/opensearch-geospatial.release-notes-2.11.1.0.md @@ -0,0 +1,7 @@ +## Version 2.11.1.0 Release Notes + +Compatible with OpenSearch 2.11.1 + +### Bug Fixes +* Add default value in denylist ([#583](https://github.com/opensearch-project/geospatial/pull/583)) +* Add denylist ip config for datasource endpoint ([#573](https://github.com/opensearch-project/geospatial/pull/573)) diff --git a/release-notes/opensearch-geospatial.release-notes-2.2.0.0.md b/release-notes/opensearch-geospatial.release-notes-2.2.0.0.md new file mode 100644 index 00000000..6216abaa --- /dev/null +++ b/release-notes/opensearch-geospatial.release-notes-2.2.0.0.md @@ -0,0 +1,27 @@ +## Version 2.2.0.0 Release Notes + +Compatible with OpenSearch 2.2.0 + +### Features + +* Add feature processor to convert geo-json feature to geo-shape field ([#15](https://github.com/opensearch-project/geospatial/pull/15)) +* Add rest handler for geo-json upload ([#25](https://github.com/opensearch-project/geospatial/pull/25)) +* Create UploadGeoJSONRequest content as an object ([#32](https://github.com/opensearch-project/geospatial/pull/32)) +* Add GeoJSON object of type FeatureCollection ([#33](https://github.com/opensearch-project/geospatial/pull/33)) +* Include new route to support update index while upload ([#34](https://github.com/opensearch-project/geospatial/pull/34)) +* Add uploader to upload user input ([#35](https://github.com/opensearch-project/geospatial/pull/35)) +* Make field name as optional ([#37](https://github.com/opensearch-project/geospatial/pull/37)) +* Use BulkResponse build error message ([#46](https://github.com/opensearch-project/geospatial/pull/46)) +* Update upload API response structure ([#51](https://github.com/opensearch-project/geospatial/pull/51)) +* Add metric and stat entity ([#54](https://github.com/opensearch-project/geospatial/pull/54)) +* Create Upload Stats Service to build response for stats API ([#62](https://github.com/opensearch-project/geospatial/pull/62)) +* Include stats api to provide upload metrics ([#64](https://github.com/opensearch-project/geospatial/pull/64)) + +### Infrastructure +* Create plugin using plugin template ([#3](https://github.com/opensearch-project/geospatial/pull/3)) +* Add formatter config from OpenSearch ([#21](https://github.com/opensearch-project/geospatial/pull/21)) +* Adding JDK 11 to CI matrix ([#31](https://github.com/opensearch-project/geospatial/pull/31)) +* Add support to run integration tests with multiple nodes ([#57](https://github.com/opensearch-project/geospatial/pull/57)) + +### Maintenance +* Update OpenSearch upstream version to 2.2.0([#87](https://github.com/opensearch-project/geospatial/pull/87)) \ No newline at end of file diff --git a/release-notes/opensearch-geospatial.release-notes-2.3.0.0.md b/release-notes/opensearch-geospatial.release-notes-2.3.0.0.md new file mode 100644 index 00000000..5b8bbfc2 --- /dev/null +++ b/release-notes/opensearch-geospatial.release-notes-2.3.0.0.md @@ -0,0 +1,6 @@ +## Version 2.3.0.0 Release Notes + +Compatible with OpenSearch 2.3.0 + +### Maintenance +* Increment version to 2.3.0-SNAPSHOT ([#137](https://github.com/opensearch-project/geospatial/pull/137)) diff --git a/release-notes/opensearch-geospatial.release-notes-2.4.0.0.md b/release-notes/opensearch-geospatial.release-notes-2.4.0.0.md new file mode 100644 index 00000000..3836d285 --- /dev/null +++ b/release-notes/opensearch-geospatial.release-notes-2.4.0.0.md @@ -0,0 +1,30 @@ +## Version 2.4.0.0 Release Notes + +Compatible with OpenSearch 2.4.0 + +### Features +* Support Uber's H3 geospatial indexing system as geohex_grid ([#179](https://github.com/opensearch-project/geospatial/pull/179)) +* Add geojson support for XYPoint ([#162](https://github.com/opensearch-project/geospatial/pull/162)) +* Add XYPoint Field Type to index and query documents that contains cartesian points ([#130](https://github.com/opensearch-project/geospatial/pull/130)) +* Add XYShapeQueryBuilder ([#82](https://github.com/opensearch-project/geospatial/pull/82)) +* Add parameter to randomly include z coordinates to geometry ([#79](https://github.com/opensearch-project/geospatial/pull/79)) +* Add shape processor ([#74](https://github.com/opensearch-project/geospatial/pull/74)) +* Add shape field mapper ([#70](https://github.com/opensearch-project/geospatial/pull/70)) +* Add ShapeIndexer to create indexable fields ([#68](https://github.com/opensearch-project/geospatial/pull/68)) + +### Enhancements +* add groupId to pluginzip publication ([#167](https://github.com/opensearch-project/geospatial/pull/167)) +* Flip X and Y coordinates for WKT and array formats in XYPoint ([#156](https://github.com/opensearch-project/geospatial/pull/156)) + +### Infrastructure +* Add window and mac platform in CI ([#173](https://github.com/opensearch-project/geospatial/pull/173)) +* Fix integration test failure with security enabled cluster ([#138](https://github.com/opensearch-project/geospatial/pull/138)) +* Remove explicit dco check ([#126](https://github.com/opensearch-project/geospatial/pull/126)) +* Include feature branch in workflow to trigger CI ([#102](https://github.com/opensearch-project/geospatial/pull/102)) + +### Maintenance +* Increment version to 2.4.0-SNAPSHOT ([#139](https://github.com/opensearch-project/geospatial/pull/139)) +* Update to Gradle 7.5.1 ([#134](https://github.com/opensearch-project/geospatial/pull/134)) + +### Refactoring +* Remove optional to get features ([#177](https://github.com/opensearch-project/geospatial/pull/177)) \ No newline at end of file diff --git a/release-notes/opensearch-geospatial.release-notes-2.5.0.0.md b/release-notes/opensearch-geospatial.release-notes-2.5.0.0.md new file mode 100644 index 00000000..2cabf290 --- /dev/null +++ b/release-notes/opensearch-geospatial.release-notes-2.5.0.0.md @@ -0,0 +1,6 @@ +## Version 2.5.0.0 Release Notes + +Compatible with OpenSearch 2.5.0 + +### Maintenance +* Increment version to 2.5.0-SNAPSHOT ([#184](https://github.com/opensearch-project/geospatial/pull/184)) diff --git a/release-notes/opensearch-geospatial.release-notes-2.6.0.0.md b/release-notes/opensearch-geospatial.release-notes-2.6.0.0.md new file mode 100644 index 00000000..bef6970d --- /dev/null +++ b/release-notes/opensearch-geospatial.release-notes-2.6.0.0.md @@ -0,0 +1,14 @@ +## Version 2.6.0.0 Release Notes + +Compatible with OpenSearch 2.6.0 + +### Maintenance +* Upgrade snapshot version to 2.6 for 2.x ([#208](https://github.com/opensearch-project/geospatial/pull/208)) + +### Features +* Add limit to geojson upload API ([#218](https://github.com/opensearch-project/geospatial/pull/218)) +* Allow API to accept any index name without suffix ([#182](https://github.com/opensearch-project/geospatial/pull/182)) + +### Refactoring +* Fix compilation error and test failure ([#210](https://github.com/opensearch-project/geospatial/pull/210)) +* Replace Locale.getDefault() with Local.ROOT ([#214](https://github.com/opensearch-project/geospatial/pull/214)) \ No newline at end of file diff --git a/release-notes/opensearch-geospatial.release-notes-2.7.0.0.md b/release-notes/opensearch-geospatial.release-notes-2.7.0.0.md new file mode 100644 index 00000000..57439e9a --- /dev/null +++ b/release-notes/opensearch-geospatial.release-notes-2.7.0.0.md @@ -0,0 +1,8 @@ +## Version 2.7.0.0 Release Notes + +Compatible with OpenSearch 2.7.0 + +### Infrastructure +* Publish snapshots to maven via GHA ([#233](https://github.com/opensearch-project/geospatial/pull/233)) +* Update snapshot version and fix compilation issues ([#237](https://github.com/opensearch-project/geospatial/pull/237)) +* Add CHANGELOG ([#238](https://github.com/opensearch-project/geospatial/pull/238)) diff --git a/release-notes/opensearch-geospatial.release-notes-2.8.0.0.md b/release-notes/opensearch-geospatial.release-notes-2.8.0.0.md new file mode 100644 index 00000000..00281437 --- /dev/null +++ b/release-notes/opensearch-geospatial.release-notes-2.8.0.0.md @@ -0,0 +1,10 @@ +## Version 2.8.0.0 Release Notes + +Compatible with OpenSearch 2.8.0 + +### Infrastructure +* Make jacoco report to be generated faster in local ([#267](https://github.com/opensearch-project/geospatial/pull/267)) +* Exclude lombok generated code from jacoco coverage report ([#268](https://github.com/opensearch-project/geospatial/pull/268)) + +### Maintenance +* Change package for Strings.hasText ([#314](https://github.com/opensearch-project/geospatial/pull/314)) \ No newline at end of file diff --git a/release-notes/opensearch-geospatial.release-notes-2.9.0.0.md b/release-notes/opensearch-geospatial.release-notes-2.9.0.0.md new file mode 100644 index 00000000..baa64e3c --- /dev/null +++ b/release-notes/opensearch-geospatial.release-notes-2.9.0.0.md @@ -0,0 +1,9 @@ +## Version 2.9.0.0 Release Notes + +Compatible with OpenSearch 2.9.0 + +### Maintenance +Increment version to 2.9.0-SNAPSHOT ([#329](https://github.com/opensearch-project/geospatial/pull/329)) + +### Refactoring +Change package for Strings.hasText ([#314](https://github.com/opensearch-project/geospatial/pull/314)) diff --git a/settings.gradle b/settings.gradle index be8ff3ba..72251801 100644 --- a/settings.gradle +++ b/settings.gradle @@ -8,3 +8,6 @@ */ rootProject.name = 'geospatial' + +include ":libs" +include ":libs:h3" \ No newline at end of file diff --git a/src/main/java/org/opensearch/geospatial/GeospatialParser.java b/src/main/java/org/opensearch/geospatial/GeospatialParser.java index 7119e096..04c81efd 100644 --- a/src/main/java/org/opensearch/geospatial/GeospatialParser.java +++ b/src/main/java/org/opensearch/geospatial/GeospatialParser.java @@ -12,11 +12,10 @@ import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Optional; -import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.geospatial.geojson.Feature; import org.opensearch.geospatial.geojson.FeatureCollection; @@ -77,18 +76,18 @@ public static Map convertToMap(BytesReference content) { * getFeatures will return features from given map input. This function abstracts the logic to parse given input and returns * list of Features if exists in Map format. * @param geoJSON given input which may contain GeoJSON Object - * @return Returns an Optional with the List of Features as map if input is GeoJSON, - * else, returns an empty Optional instance. + * @return Returns List of Features as map if input is GeoJSON, + * else, returns an empty list. */ - public static Optional>> getFeatures(final Map geoJSON) { + public static List> getFeatures(final Map geoJSON) { final String type = extractValueAsString(geoJSON, TYPE_KEY); Objects.requireNonNull(type, TYPE_KEY + " cannot be null"); if (Feature.TYPE.equalsIgnoreCase(type)) { - return Optional.of(List.of(geoJSON)); + return List.of(geoJSON); } if (FeatureCollection.TYPE.equalsIgnoreCase(type)) { - return Optional.of(Collections.unmodifiableList(FeatureCollection.create(geoJSON).getFeatures())); + return Collections.unmodifiableList(FeatureCollection.create(geoJSON).getFeatures()); } - return Optional.empty(); + return List.of(); } } diff --git a/src/main/java/org/opensearch/geospatial/action/upload/geojson/ContentBuilder.java b/src/main/java/org/opensearch/geospatial/action/upload/geojson/ContentBuilder.java index 687fa009..66e708ed 100644 --- a/src/main/java/org/opensearch/geospatial/action/upload/geojson/ContentBuilder.java +++ b/src/main/java/org/opensearch/geospatial/action/upload/geojson/ContentBuilder.java @@ -14,7 +14,7 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.support.WriteRequest; import org.opensearch.client.Client; -import org.opensearch.common.Strings; +import org.opensearch.core.common.Strings; import org.opensearch.geospatial.GeospatialParser; /** @@ -45,8 +45,6 @@ private Optional prepareContentRequest(UploadGeoJSONRequestC .stream() .map(GeospatialParser::toStringObjectMap) .map(GeospatialParser::getFeatures) - .filter(Optional::isPresent) - .map(Optional::get) .flatMap(List::stream) .map(this::createIndexRequestBuilder) .map(indexRequestBuilder -> indexRequestBuilder.setIndex(content.getIndexName())) diff --git a/src/main/java/org/opensearch/geospatial/action/upload/geojson/IndexManager.java b/src/main/java/org/opensearch/geospatial/action/upload/geojson/IndexManager.java index 96bc0252..b8bba9db 100644 --- a/src/main/java/org/opensearch/geospatial/action/upload/geojson/IndexManager.java +++ b/src/main/java/org/opensearch/geospatial/action/upload/geojson/IndexManager.java @@ -8,17 +8,17 @@ import java.io.IOException; import java.util.Map; -import lombok.AllArgsConstructor; -import lombok.NonNull; - import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.opensearch.action.ActionListener; import org.opensearch.action.StepListener; import org.opensearch.action.admin.indices.create.CreateIndexRequest; import org.opensearch.client.IndicesAdminClient; -import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.xcontent.XContentBuilder; + +import lombok.AllArgsConstructor; +import lombok.NonNull; /** * IndexManager is responsible for managing index operations like create, delete, etc... diff --git a/src/main/java/org/opensearch/geospatial/action/upload/geojson/PipelineManager.java b/src/main/java/org/opensearch/geospatial/action/upload/geojson/PipelineManager.java index d0df6c89..e5ff71bf 100644 --- a/src/main/java/org/opensearch/geospatial/action/upload/geojson/PipelineManager.java +++ b/src/main/java/org/opensearch/geospatial/action/upload/geojson/PipelineManager.java @@ -8,23 +8,23 @@ import java.io.IOException; import java.util.function.Supplier; -import lombok.AllArgsConstructor; -import lombok.NonNull; -import lombok.extern.log4j.Log4j2; - -import org.opensearch.action.ActionListener; import org.opensearch.action.StepListener; import org.opensearch.action.ingest.DeletePipelineRequest; import org.opensearch.action.ingest.PutPipelineRequest; import org.opensearch.client.ClusterAdminClient; import org.opensearch.common.UUIDs; -import org.opensearch.common.bytes.BytesReference; -import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.geospatial.processor.FeatureProcessor; import org.opensearch.ingest.Pipeline; +import lombok.AllArgsConstructor; +import lombok.NonNull; +import lombok.extern.log4j.Log4j2; + /** * PipelineManager is responsible for managing pipeline operations like create and delete */ diff --git a/src/main/java/org/opensearch/geospatial/action/upload/geojson/UploadGeoJSONRequest.java b/src/main/java/org/opensearch/geospatial/action/upload/geojson/UploadGeoJSONRequest.java index 8bc8df5c..db928136 100644 --- a/src/main/java/org/opensearch/geospatial/action/upload/geojson/UploadGeoJSONRequest.java +++ b/src/main/java/org/opensearch/geospatial/action/upload/geojson/UploadGeoJSONRequest.java @@ -14,17 +14,17 @@ import java.io.IOException; import java.util.Objects; -import lombok.AllArgsConstructor; -import lombok.Getter; -import lombok.NonNull; - import org.opensearch.action.ActionRequest; import org.opensearch.action.ActionRequestValidationException; -import org.opensearch.common.bytes.BytesReference; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; import org.opensearch.rest.RestRequest; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NonNull; + @AllArgsConstructor @Getter public class UploadGeoJSONRequest extends ActionRequest { diff --git a/src/main/java/org/opensearch/geospatial/action/upload/geojson/UploadGeoJSONRequestContent.java b/src/main/java/org/opensearch/geospatial/action/upload/geojson/UploadGeoJSONRequestContent.java index 23be5288..adddc4bf 100644 --- a/src/main/java/org/opensearch/geospatial/action/upload/geojson/UploadGeoJSONRequestContent.java +++ b/src/main/java/org/opensearch/geospatial/action/upload/geojson/UploadGeoJSONRequestContent.java @@ -12,12 +12,13 @@ import java.util.Map; import java.util.Objects; +import org.opensearch.core.ParseField; +import org.opensearch.core.common.Strings; +import org.opensearch.geospatial.GeospatialParser; + import lombok.AccessLevel; import lombok.AllArgsConstructor; -import org.opensearch.common.ParseField; -import org.opensearch.common.Strings; - /** * UploadGeoJSONRequestContent is the Data model for UploadGeoJSONRequest's body */ @@ -29,7 +30,10 @@ public final class UploadGeoJSONRequestContent { public static final ParseField FIELD_GEOSPATIAL = new ParseField("field"); public static final ParseField FIELD_GEOSPATIAL_TYPE = new ParseField("type"); public static final ParseField FIELD_DATA = new ParseField("data"); - public static final String ACCEPTED_INDEX_SUFFIX_PATH = "-map"; + + // Custom Vector Map can support fetching up to 10K Features. Hence, we chose same value as limit + // for upload as well. + public static final int MAX_SUPPORTED_GEOJSON_FEATURE_COUNT = 10_000; private final String indexName; private final String fieldName; private final String fieldType; @@ -62,27 +66,40 @@ public static UploadGeoJSONRequestContent create(Map input) { geoJSONData + " is not an instance of List, but of type [ " + geoJSONData.getClass().getName() + " ]" ); } + validateFeatureCount(geoJSONData); return new UploadGeoJSONRequestContent(index, fieldName, fieldType, (List) geoJSONData); } - private static String validateIndexName(Map input) { - String index = extractValueAsString(input, FIELD_INDEX.getPreferredName()); - if (!Strings.hasText(index)) { - throw new IllegalArgumentException( - String.format(Locale.getDefault(), "field [ %s ] cannot be empty", FIELD_INDEX.getPreferredName()) - ); - } - if (!index.endsWith(ACCEPTED_INDEX_SUFFIX_PATH)) { + private static void validateFeatureCount(Object geoJSONData) { + final long featureCount = getFeatureCount(geoJSONData); + if (featureCount > MAX_SUPPORTED_GEOJSON_FEATURE_COUNT) { throw new IllegalArgumentException( String.format( - Locale.getDefault(), - "field [ %s ] should end with suffix %s", - FIELD_INDEX.getPreferredName(), - ACCEPTED_INDEX_SUFFIX_PATH + Locale.ROOT, + "Received %d features, but, cannot upload more than %d features", + featureCount, + MAX_SUPPORTED_GEOJSON_FEATURE_COUNT ) ); } - return index; + } + + private static long getFeatureCount(Object geoJSONData) { + return ((List) geoJSONData).stream() + .map(GeospatialParser::toStringObjectMap) + .map(GeospatialParser::getFeatures) + .flatMap(List::stream) + .count(); + } + + private static String validateIndexName(Map input) { + String index = extractValueAsString(input, FIELD_INDEX.getPreferredName()); + if (Strings.hasText(index)) { + return index; + } + throw new IllegalArgumentException( + String.format(Locale.getDefault(), "field [ %s ] cannot be empty", FIELD_INDEX.getPreferredName()) + ); } public String getIndexName() { diff --git a/src/main/java/org/opensearch/geospatial/action/upload/geojson/UploadGeoJSONResponse.java b/src/main/java/org/opensearch/geospatial/action/upload/geojson/UploadGeoJSONResponse.java index 868312eb..1ddf0359 100644 --- a/src/main/java/org/opensearch/geospatial/action/upload/geojson/UploadGeoJSONResponse.java +++ b/src/main/java/org/opensearch/geospatial/action/upload/geojson/UploadGeoJSONResponse.java @@ -10,13 +10,13 @@ import java.util.List; import java.util.stream.Collectors; -import org.opensearch.action.ActionResponse; import org.opensearch.action.bulk.BulkItemResponse; import org.opensearch.action.bulk.BulkResponse; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.xcontent.ToXContentObject; -import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.core.action.ActionResponse; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; //UploadGeoJSONResponse represents UploadGeoJSONRequest's Response public class UploadGeoJSONResponse extends ActionResponse implements ToXContentObject { diff --git a/src/main/java/org/opensearch/geospatial/action/upload/geojson/UploadGeoJSONTransportAction.java b/src/main/java/org/opensearch/geospatial/action/upload/geojson/UploadGeoJSONTransportAction.java index 5d190870..4e0f0e1d 100644 --- a/src/main/java/org/opensearch/geospatial/action/upload/geojson/UploadGeoJSONTransportAction.java +++ b/src/main/java/org/opensearch/geospatial/action/upload/geojson/UploadGeoJSONTransportAction.java @@ -14,12 +14,12 @@ import java.util.Map; import org.opensearch.ResourceAlreadyExistsException; -import org.opensearch.action.ActionListener; import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.HandledTransportAction; import org.opensearch.client.Client; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.inject.Inject; +import org.opensearch.core.action.ActionListener; import org.opensearch.geospatial.GeospatialParser; import org.opensearch.geospatial.stats.upload.UploadStats; import org.opensearch.rest.RestRequest; diff --git a/src/main/java/org/opensearch/geospatial/action/upload/geojson/Uploader.java b/src/main/java/org/opensearch/geospatial/action/upload/geojson/Uploader.java index 1fcbfb41..f6cf02cf 100644 --- a/src/main/java/org/opensearch/geospatial/action/upload/geojson/Uploader.java +++ b/src/main/java/org/opensearch/geospatial/action/upload/geojson/Uploader.java @@ -11,12 +11,12 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.opensearch.action.ActionListener; import org.opensearch.action.StepListener; import org.opensearch.action.bulk.BulkItemResponse; import org.opensearch.action.bulk.BulkRequestBuilder; import org.opensearch.action.bulk.BulkResponse; import org.opensearch.common.collect.MapBuilder; +import org.opensearch.core.action.ActionListener; import org.opensearch.geospatial.stats.upload.UploadMetric; import org.opensearch.geospatial.stats.upload.UploadStats; diff --git a/src/main/java/org/opensearch/geospatial/annotation/VisibleForTesting.java b/src/main/java/org/opensearch/geospatial/annotation/VisibleForTesting.java new file mode 100644 index 00000000..d48c6dc2 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/annotation/VisibleForTesting.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.geospatial.annotation; + +public @interface VisibleForTesting { +} diff --git a/src/main/java/org/opensearch/geospatial/constants/IndexSetting.java b/src/main/java/org/opensearch/geospatial/constants/IndexSetting.java new file mode 100644 index 00000000..0978c411 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/constants/IndexSetting.java @@ -0,0 +1,18 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.constants; + +/** + * Collection of keys for index setting + */ +public class IndexSetting { + public static final String NUMBER_OF_SHARDS = "index.number_of_shards"; + public static final String NUMBER_OF_REPLICAS = "index.number_of_replicas"; + public static final String REFRESH_INTERVAL = "index.refresh_interval"; + public static final String AUTO_EXPAND_REPLICAS = "index.auto_expand_replicas"; + public static final String HIDDEN = "index.hidden"; + public static final String BLOCKS_WRITE = "index.blocks.write"; +} diff --git a/src/main/java/org/opensearch/geospatial/exceptions/ConcurrentModificationException.java b/src/main/java/org/opensearch/geospatial/exceptions/ConcurrentModificationException.java new file mode 100644 index 00000000..f3a2ae11 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/exceptions/ConcurrentModificationException.java @@ -0,0 +1,37 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.exceptions; + +import java.io.IOException; + +import org.opensearch.OpenSearchException; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.rest.RestStatus; + +/** + * General ConcurrentModificationException corresponding to the {@link RestStatus#BAD_REQUEST} status code + * + * The exception is thrown when multiple mutation API is called for a same resource at the same time + */ +public class ConcurrentModificationException extends OpenSearchException { + + public ConcurrentModificationException(String msg, Object... args) { + super(msg, args); + } + + public ConcurrentModificationException(String msg, Throwable cause, Object... args) { + super(msg, cause, args); + } + + public ConcurrentModificationException(StreamInput in) throws IOException { + super(in); + } + + @Override + public final RestStatus status() { + return RestStatus.BAD_REQUEST; + } +} diff --git a/src/main/java/org/opensearch/geospatial/exceptions/IncompatibleDatasourceException.java b/src/main/java/org/opensearch/geospatial/exceptions/IncompatibleDatasourceException.java new file mode 100644 index 00000000..ce30d5c0 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/exceptions/IncompatibleDatasourceException.java @@ -0,0 +1,38 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.exceptions; + +import java.io.IOException; + +import org.opensearch.OpenSearchException; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.rest.RestStatus; + +/** + * IncompatibleDatasourceException corresponding to the {@link RestStatus#BAD_REQUEST} status code + * + * The exception is thrown when a user tries to update datasource with new endpoint which is not compatible + * with current datasource + */ +public class IncompatibleDatasourceException extends OpenSearchException { + + public IncompatibleDatasourceException(String msg, Object... args) { + super(msg, args); + } + + public IncompatibleDatasourceException(String msg, Throwable cause, Object... args) { + super(msg, cause, args); + } + + public IncompatibleDatasourceException(StreamInput in) throws IOException { + super(in); + } + + @Override + public final RestStatus status() { + return RestStatus.BAD_REQUEST; + } +} diff --git a/src/main/java/org/opensearch/geospatial/exceptions/ResourceInUseException.java b/src/main/java/org/opensearch/geospatial/exceptions/ResourceInUseException.java new file mode 100644 index 00000000..d102bb9d --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/exceptions/ResourceInUseException.java @@ -0,0 +1,35 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.exceptions; + +import java.io.IOException; + +import org.opensearch.OpenSearchException; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.rest.RestStatus; + +/** + * Generic ResourceInUseException corresponding to the {@link RestStatus#BAD_REQUEST} status code + */ +public class ResourceInUseException extends OpenSearchException { + + public ResourceInUseException(String msg, Object... args) { + super(msg, args); + } + + public ResourceInUseException(String msg, Throwable cause, Object... args) { + super(msg, cause, args); + } + + public ResourceInUseException(StreamInput in) throws IOException { + super(in); + } + + @Override + public final RestStatus status() { + return RestStatus.BAD_REQUEST; + } +} diff --git a/src/main/java/org/opensearch/geospatial/geojson/FeatureCollection.java b/src/main/java/org/opensearch/geospatial/geojson/FeatureCollection.java index 1e7cf426..7afcfdcb 100644 --- a/src/main/java/org/opensearch/geospatial/geojson/FeatureCollection.java +++ b/src/main/java/org/opensearch/geospatial/geojson/FeatureCollection.java @@ -9,11 +9,11 @@ import java.util.List; import java.util.Map; +import org.opensearch.geospatial.GeospatialParser; + import lombok.NonNull; import lombok.Value; -import org.opensearch.geospatial.GeospatialParser; - /** * FeatureCollection represents GEOJSON of type FeatureCollection. A FeatureCollection object has a member * with the name "features". The value of "features" is a List. diff --git a/src/main/java/org/opensearch/geospatial/geojson/FeatureFactory.java b/src/main/java/org/opensearch/geospatial/geojson/FeatureFactory.java index a7eddcb0..4da2d5d7 100644 --- a/src/main/java/org/opensearch/geospatial/geojson/FeatureFactory.java +++ b/src/main/java/org/opensearch/geospatial/geojson/FeatureFactory.java @@ -10,10 +10,10 @@ import java.util.Map; -import lombok.NonNull; - import org.opensearch.geospatial.geojson.Feature.FeatureBuilder; +import lombok.NonNull; + /** * FeatureFactory helps to create {@link Feature} instance based on user input */ diff --git a/src/main/java/org/opensearch/geospatial/index/common/xyshape/XYShapeConverter.java b/src/main/java/org/opensearch/geospatial/index/common/xyshape/XYShapeConverter.java index 10bff8cf..b38b088e 100644 --- a/src/main/java/org/opensearch/geospatial/index/common/xyshape/XYShapeConverter.java +++ b/src/main/java/org/opensearch/geospatial/index/common/xyshape/XYShapeConverter.java @@ -11,10 +11,6 @@ import java.util.stream.DoubleStream; import java.util.stream.IntStream; -import lombok.AccessLevel; -import lombok.NoArgsConstructor; -import lombok.NonNull; - import org.apache.lucene.geo.XYCircle; import org.apache.lucene.geo.XYLine; import org.apache.lucene.geo.XYPoint; @@ -26,6 +22,10 @@ import org.opensearch.geometry.Polygon; import org.opensearch.geometry.Rectangle; +import lombok.AccessLevel; +import lombok.NoArgsConstructor; +import lombok.NonNull; + /** * Utility class to convert compatible shapes from opensearch to Lucene */ diff --git a/src/main/java/org/opensearch/geospatial/index/mapper/xypoint/XYPoint.java b/src/main/java/org/opensearch/geospatial/index/mapper/xypoint/XYPoint.java new file mode 100644 index 00000000..262849f7 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/index/mapper/xypoint/XYPoint.java @@ -0,0 +1,239 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.index.mapper.xypoint; + +import static org.opensearch.index.mapper.AbstractGeometryFieldMapper.Names.IGNORE_Z_VALUE; + +import java.io.IOException; +import java.util.Locale; +import java.util.Objects; + +import org.opensearch.OpenSearchParseException; +import org.opensearch.core.xcontent.ToXContentFragment; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.geometry.Geometry; +import org.opensearch.geometry.Point; +import org.opensearch.geometry.ShapeType; +import org.opensearch.geometry.utils.StandardValidator; +import org.opensearch.geometry.utils.WellKnownText; +import org.opensearch.index.mapper.AbstractPointGeometryFieldMapper; + +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; + +/** + * Represents a point in a 2-dimensional planar coordinate system with no range limitations. + */ +@AllArgsConstructor +@Getter +@NoArgsConstructor +public class XYPoint implements AbstractPointGeometryFieldMapper.ParsedPoint, ToXContentFragment { + private double x; + private double y; + private static final String POINT_PRIMITIVE = "point"; + private static final String X_PARAMETER = "x"; + private static final String Y_PARAMETER = "y"; + private static final String XY_POINT = "XY_POINT"; + + /** + * To set x and y values + * + * @param x x coordinate value + * @param y y coordinate value + * @return initialized XYPoint + */ + public XYPoint reset(double x, double y) { + this.x = x; + this.y = y; + return this; + } + + @Override + public void validate(String fieldName) { + // validation is not required for xy_point + } + + @Override + public void normalize(String fieldName) { + // normalization is not required for xy_point + } + + /** + * To set x and y values + * + * @param x x coordinate + * @param y y coordinate + */ + @Override + public void resetCoords(double x, double y) { + this.reset(x, y); + } + + /** + * @return returns geometry of type point + */ + @Override + public Point asGeometry() { + return new Point(getX(), getY()); + } + + /** + * Set x and y coordinates of XYPoint if input contains WKT or coordinates. + * + * @param value input String which needs to be parsed and validated + * @param ignoreZValue boolean parameter which decides if z coordinate needs to be ignored or not + * @return XYPoint after setting the x and y coordinates + */ + public XYPoint resetFromString(String value, final boolean ignoreZValue) { + Objects.requireNonNull(value, "input string which needs to be parsed should not be null"); + + if (value.toLowerCase(Locale.ROOT).contains(POINT_PRIMITIVE)) { + return resetFromWKT(value, ignoreZValue); + } + return resetFromCoordinates(value, ignoreZValue); + } + + /** + * Set x and y coordinates of XYPoint if input contains coordinates. + * + * @param value input String which contains coordinates that needs to be parsed and validated + * @param ignoreZValue boolean parameter which decides if z coordinate needs to be ignored or not + * @return XYPoint after setting the x and y coordinates + * throws OpenSearchParseException + */ + public XYPoint resetFromCoordinates(String value, final boolean ignoreZValue) { + Objects.requireNonNull(value, "input string which needs to be parsed should not be null"); + + String[] values = value.split(","); + int numOfValues = values.length; + + if (numOfValues > 3) { + throw new OpenSearchParseException("expected 2 or 3 coordinates, but found: [{}]", values.length); + } + + if (numOfValues > 2) { + assertZValue(ignoreZValue, Double.parseDouble(values[2].trim())); + } + + double x = parseCoordinate(values[0], X_PARAMETER); + double y = parseCoordinate(values[1], Y_PARAMETER); + + return reset(x, y); + } + + /** + * Parse and extract coordinate value from given String. + * + * @param value input String which contains coordinate that needs to be parsed and validated + * @param parameter x or y parameter + * @return parsed coordinate value + * throws OpenSearchParseException + */ + private double parseCoordinate(String value, String parameter) { + try { + return Double.parseDouble(value.trim()); + } catch (NumberFormatException ex) { + throw new OpenSearchParseException("[{}] must be a number", parameter, ex); + } + } + + /** + * Set x and y coordinates of XYPoint if input contains WKT POINT. + * + * @param value input String which contains WKT POINT that needs to be parsed and validated + * @param ignoreZValue boolean parameter which decides if z coordinate needs to be ignored or not + * @return XYPoint after setting the x and y coordinates + * throws OpenSearchParseException + */ + private XYPoint resetFromWKT(String value, boolean ignoreZValue) { + Geometry geometry; + try { + geometry = new WellKnownText(false, new StandardValidator(ignoreZValue)).fromWKT(value); + } catch (Exception e) { + throw new OpenSearchParseException("Invalid WKT format, [{}]", value, e); + } + if (geometry.type() != ShapeType.POINT) { + throw new OpenSearchParseException("[xy_point] supports only POINT among WKT primitives, but found [{}]", geometry.type()); + } + Point point = (Point) geometry; + return reset(point.getX(), point.getY()); + } + + /** + * Validates if z coordinate value needs to be ignored or not. + * + * @param ignoreZValue boolean parameter which decides if z coordinate needs to be ignored or not + * @param zValue z coordinate value + * throws OpenSearchParseException + */ + public static void assertZValue(boolean ignoreZValue, double zValue) { + if (!ignoreZValue) { + throw new OpenSearchParseException( + "Exception parsing coordinates: found Z value [{}] but [{}] parameter is [{}]", + zValue, + IGNORE_Z_VALUE, + ignoreZValue + ); + } + } + + /** + * Deep Comparison to compare object of XYPoint class w.r.t state of the object + * + * @param obj Object + * @return true if the parameter obj is of type XYPoint and its data members (x and y) are same, else false + */ + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (!(obj instanceof XYPoint)) return false; + XYPoint point = (XYPoint) obj; + return point.x == x && point.y == y; + } + + /** + * This method returns the hash code value for the object on which this method is invoked. + * + * @return hashcode value as an Integer + */ + @Override + public int hashCode() { + int result = Double.hashCode(x); + result = 31 * result + Double.hashCode(y); + return result; + } + + /** + * String representation of XYPoint object. + * + * @return XYPoint object as "Point(x,y)" where x and y are coordinate values + */ + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(XY_POINT); + sb.append('('); + sb.append(x); + sb.append(","); + sb.append(y); + sb.append(')'); + return sb.toString(); + } + + /** + * Return x and y coordinates in the object form. + * + * @param builder XContentBuilder object + * @param params Params object + * @return x and y coordinates in the object form. For example: {"x" : 100.35, "y" : -200.54} + * @throws IOException + */ + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.startObject().field(X_PARAMETER, x).field(Y_PARAMETER, y).endObject(); + } +} diff --git a/src/main/java/org/opensearch/geospatial/index/mapper/xypoint/XYPointFieldMapper.java b/src/main/java/org/opensearch/geospatial/index/mapper/xypoint/XYPointFieldMapper.java new file mode 100644 index 00000000..6d6b3468 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/index/mapper/xypoint/XYPointFieldMapper.java @@ -0,0 +1,190 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.index.mapper.xypoint; + +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import org.apache.lucene.document.FieldType; +import org.apache.lucene.document.StoredField; +import org.apache.lucene.document.XYDocValuesField; +import org.apache.lucene.geo.XYPoint; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.search.Query; +import org.opensearch.common.Explicit; +import org.opensearch.common.geo.ShapeRelation; +import org.opensearch.geometry.Geometry; +import org.opensearch.geospatial.index.mapper.xyshape.XYShapeQueryable; +import org.opensearch.geospatial.index.query.xypoint.XYPointQueryProcessor; +import org.opensearch.index.mapper.AbstractPointGeometryFieldMapper; +import org.opensearch.index.mapper.MappedFieldType; +import org.opensearch.index.mapper.ParseContext; +import org.opensearch.index.query.QueryShardContext; + +/** + * FieldMapper for indexing {@link XYPoint} points + */ +public class XYPointFieldMapper extends AbstractPointGeometryFieldMapper< + List, + List> { + public static final String CONTENT_TYPE = "xy_point"; + private static final FieldType FIELD_TYPE = new FieldType(); + + static { + FIELD_TYPE.setStored(false); + FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); + FIELD_TYPE.freeze(); + } + + private XYPointFieldMapper( + String simpleName, + FieldType fieldType, + MappedFieldType mappedFieldType, + MultiFields multiFields, + Explicit ignoreMalformed, + Explicit ignoreZValue, + ParsedPoint nullValue, + CopyTo copyTo + ) { + super(simpleName, fieldType, mappedFieldType, multiFields, ignoreMalformed, ignoreZValue, nullValue, copyTo); + } + + @Override + protected void addStoredFields(ParseContext context, List points) { + for (XYPoint point : points) { + context.doc().add(new StoredField(fieldType().name(), point.toString())); + } + } + + @Override + protected void addDocValuesFields(String name, List points, List fields, ParseContext context) { + for (XYPoint point : points) { + context.doc().add(new XYDocValuesField(fieldType().name(), point.getX(), point.getY())); + } + } + + @Override + protected void addMultiFields(ParseContext context, List points) { + // Any other fields will not be added + } + + @Override + protected String contentType() { + return CONTENT_TYPE; + } + + @Override + public XYPointFieldType fieldType() { + return (XYPointFieldType) mappedFieldType; + } + + /** + * Builder class to create an instance of {@link XYPointFieldMapper} + */ + public static class XYPointFieldMapperBuilder extends AbstractPointGeometryFieldMapper.Builder< + XYPointFieldMapperBuilder, + XYPointFieldType> { + + public XYPointFieldMapperBuilder(String fieldName) { + super(fieldName, FIELD_TYPE); + this.hasDocValues = true; + } + + /** + * Set the GeometryParser and GeometryIndexer for XYPointFieldType and create an instance of XYPointFieldMapper + * + * The point {@link org.opensearch.geospatial.index.mapper.xypoint.XYPoint} sent as a parameter by + * {@link AbstractPointGeometryFieldMapper} to PointParser is of no use and can be ignored. + * + * @param context BuilderContext + * @param simpleName field name + * @param fieldType indicates the kind of data the field contains + * @param multiFields used to index same field in different ways for different purposes + * @param ignoreMalformed if true, malformed points are ignored else. If false(default) malformed points throw an exception + * @param ignoreZValue if true (default), third dimension is ignored. If false, points containing more than two dimension throw an exception + * @param nullValue used as a substitute for any explicit null values + * @param copyTo CopyTo instance + * @return instance of XYPointFieldMapper + */ + @Override + public XYPointFieldMapper build( + BuilderContext context, + String simpleName, + FieldType fieldType, + MultiFields multiFields, + Explicit ignoreMalformed, + Explicit ignoreZValue, + ParsedPoint nullValue, + CopyTo copyTo + ) { + var processor = new XYPointQueryProcessor(); + var xyPointFieldType = new XYPointFieldType( + buildFullName(context), + indexed, + this.fieldType.stored(), + hasDocValues, + meta, + processor + ); + + xyPointFieldType.setGeometryParser( + new PointParser<>( + name, + org.opensearch.geospatial.index.mapper.xypoint.XYPoint::new, + (parser, point) -> XYPointParser.parseXYPoint(parser, ignoreZValue().value()), + (org.opensearch.geospatial.index.mapper.xypoint.XYPoint) nullValue, + ignoreZValue.value(), + ignoreMalformed.value() + ) + ); + xyPointFieldType.setGeometryIndexer(new XYPointIndexer(xyPointFieldType.name())); + return new XYPointFieldMapper(name, fieldType, xyPointFieldType, multiFields, ignoreMalformed, ignoreZValue, nullValue, copyTo); + } + } + + /** + * Concrete field type for xy_point + */ + public static class XYPointFieldType extends AbstractPointGeometryFieldType< + List, + List> implements XYShapeQueryable { + private final XYPointQueryProcessor queryProcessor; + + public XYPointFieldType( + String name, + boolean indexed, + boolean stored, + boolean hasDocValues, + Map meta, + XYPointQueryProcessor processor + ) { + super(name, indexed, stored, hasDocValues, meta); + this.queryProcessor = Objects.requireNonNull(processor, "query processor cannot be null"); + } + + @Override + public String typeName() { + return CONTENT_TYPE; + } + + /** + * Finds all previously indexed shapes that comply the given {@link ShapeRelation} with + * the specified {@link Geometry}. + * + * @param geometry query parameter to search indexed points + * @param fieldName field name that contains indexed points + * @param relation relation between search shape and indexed points + * @param context instance of {@link QueryShardContext} + * @return Lucene {@link Query} to find indexed points based on given geometry + */ + @Override + public Query shapeQuery(Geometry geometry, String fieldName, ShapeRelation relation, QueryShardContext context) { + return queryProcessor.shapeQuery(geometry, fieldName, relation, context); + } + } +} diff --git a/src/main/java/org/opensearch/geospatial/index/mapper/xypoint/XYPointFieldTypeParser.java b/src/main/java/org/opensearch/geospatial/index/mapper/xypoint/XYPointFieldTypeParser.java new file mode 100644 index 00000000..521fff68 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/index/mapper/xypoint/XYPointFieldTypeParser.java @@ -0,0 +1,41 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.index.mapper.xypoint; + +import java.util.Map; + +import org.opensearch.index.mapper.AbstractPointGeometryFieldMapper; + +/** + * XYPointFieldTypeParser is used to parse and validate mapping parameters + */ +public class XYPointFieldTypeParser extends AbstractPointGeometryFieldMapper.TypeParser { + /** + * Invoke XYPointFieldMapperBuilder constructor and return object. + * + * @param name field name + * @param params parameters + * @return invoked XYPointFieldMapperBuilder object + */ + @Override + protected AbstractPointGeometryFieldMapper.Builder newBuilder(String name, Map params) { + return new XYPointFieldMapper.XYPointFieldMapperBuilder(name); + } + + /** + * Parse nullValue and reset XYPoint. + * + * @param nullValue null_value parameter value used as a substitute for any explicit null values + * @param ignoreZValue if true (default), third dimension is ignored. If false, points containing more than two dimension throw an exception + * @param ignoreMalformed if true, malformed points are ignored else. If false(default) malformed points throw an exception + * @return XYPoint after parsing null_value and resetting coordinates + */ + @Override + protected XYPoint parseNullValue(Object nullValue, boolean ignoreZValue, boolean ignoreMalformed) { + return XYPointParser.parseXYPoint(nullValue, ignoreZValue); + } + +} diff --git a/src/main/java/org/opensearch/geospatial/index/mapper/xypoint/XYPointIndexer.java b/src/main/java/org/opensearch/geospatial/index/mapper/xypoint/XYPointIndexer.java new file mode 100644 index 00000000..08ef7f02 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/index/mapper/xypoint/XYPointIndexer.java @@ -0,0 +1,70 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.index.mapper.xypoint; + +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; + +import org.apache.lucene.document.XYPointField; +import org.apache.lucene.geo.XYPoint; +import org.apache.lucene.index.IndexableField; +import org.opensearch.geometry.Point; +import org.opensearch.geospatial.index.common.xyshape.XYShapeConverter; +import org.opensearch.index.mapper.AbstractGeometryFieldMapper; +import org.opensearch.index.mapper.ParseContext; + +import lombok.AllArgsConstructor; + +/** + * Converts points into Lucene-compatible form{@link XYPoint} for indexing in a xy_point field. + */ +@AllArgsConstructor +public class XYPointIndexer + implements + AbstractGeometryFieldMapper.Indexer, List> { + private final String fieldName; + + /** + * Converts the list of {@link org.opensearch.geospatial.index.mapper.xypoint.XYPoint} to list of {@link XYPoint} + * @param points list of {@link org.opensearch.geospatial.index.mapper.xypoint.XYPoint} + * @return list of {@link XYPoint} that are converted from opensearch to lucene type + */ + @Override + public List prepareForIndexing(List points) { + Objects.requireNonNull(points, "XYPoint cannot be null"); + + if (points.isEmpty()) { + throw new IllegalArgumentException("XYPoint cannot be empty"); + } + + return points.stream() + .map(parsedXYPoint -> new Point(parsedXYPoint.getX(), parsedXYPoint.getY())) + .map(XYShapeConverter::toXYPoint) + .collect(Collectors.toList()); + } + + /** + * @return processed class type + */ + @Override + public Class> processedClass() { + Object listToObjectClass = List.class; + return (Class>) listToObjectClass.getClass(); + } + + /** + * converts the List of {@link XYPoint} to list of {@link IndexableField}. + * The {@link IndexableField} returned are of type {@link XYPointField} + * @param context {@link ParseContext} + * @param xyPoints {@link XYPoint} list + * @return List of {@link IndexableField} + */ + @Override + public List indexShape(ParseContext context, List xyPoints) { + return xyPoints.stream().map(xyPoint -> new XYPointField(fieldName, xyPoint.getX(), xyPoint.getY())).collect(Collectors.toList()); + } +} diff --git a/src/main/java/org/opensearch/geospatial/index/mapper/xypoint/XYPointParser.java b/src/main/java/org/opensearch/geospatial/index/mapper/xypoint/XYPointParser.java new file mode 100644 index 00000000..19653e0e --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/index/mapper/xypoint/XYPointParser.java @@ -0,0 +1,282 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.index.mapper.xypoint; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +import org.opensearch.OpenSearchParseException; +import org.opensearch.common.xcontent.LoggingDeprecationHandler; +import org.opensearch.core.xcontent.MapXContentParser; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.core.xcontent.XContentSubParser; +import org.opensearch.geometry.ShapeType; + +/** + * Parse the value and set XYPoint represented as a String, Object, WKT, array. + */ +public class XYPointParser { + private static final String ERR_MSG_INVALID_TOKEN = "token [{}] not allowed"; + private static final String ERR_MSG_INVALID_FIELDS = "field must be either [x|y], or [type|coordinates]"; + private static final String X_PARAMETER = "x"; + private static final String Y_PARAMETER = "y"; + public static final String GEOJSON_TYPE = "type"; + public static final String GEOJSON_COORDS = "coordinates"; + private static final String NULL_VALUE_PARAMETER = "null_value"; + private static final Boolean TRUE = true; + + /** + * Parses the value and set XYPoint which was represented as an object. + * + * @param value input which needs to be parsed which contains x and y coordinates in object form + * @param ignoreZValue boolean parameter which decides if third coordinate needs to be ignored or not + * @return {@link XYPoint} after setting the x and y coordinates parsed from the parse + * @throws OpenSearchParseException + */ + public static XYPoint parseXYPoint(final Object value, final boolean ignoreZValue) throws OpenSearchParseException { + Objects.requireNonNull(value, "input value which needs to be parsed should not be null"); + + try ( + XContentParser parser = new MapXContentParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + Collections.singletonMap(NULL_VALUE_PARAMETER, value), + null + ) + ) { + parser.nextToken(); // start object + parser.nextToken(); // field name + parser.nextToken(); // field value + return parseXYPoint(parser, ignoreZValue); + } catch (IOException ex) { + throw new OpenSearchParseException("error parsing xy_point", ex); + } + } + + /** + * Parse the values to set the XYPoint which was represented as a String, Object, WKT, Array, or GeoJson. + *
    + *
  • Object:
    {@code {"x": , "y": 
  • + *
  • String:
    {@code ","}
  • + *
  • WKT:
    {@code "POINT ( )"}
  • + *
  • Array:
    {@code [, ]}
  • + *
  • GeoJson:
    {@code {"type": "Point", "coordinates": [, ]}}
  • + *
+ * + * @param parser {@link XContentParser} to parse the value from + * @param ignoreZValue boolean parameter which decides if third coordinate needs to be ignored or not + * @return {@link XYPoint} after setting the x and y coordinates parsed from the parse + * @throws IOException + * @throws OpenSearchParseException + */ + public static XYPoint parseXYPoint(final XContentParser parser, final boolean ignoreZValue) throws IOException, + OpenSearchParseException { + Objects.requireNonNull(parser, "parser should not be null"); + XYPoint point = new XYPoint(); + switch (parser.currentToken()) { + case START_OBJECT: + parseXYPointObject(parser, point, ignoreZValue); + break; + case START_ARRAY: + parseXYPointArray(parser, point, ignoreZValue); + break; + case VALUE_STRING: + String val = parser.text(); + point.resetFromString(val, ignoreZValue); + break; + default: + throw new OpenSearchParseException("expecting xy_point as an array, a string, or an object format"); + } + return point; + } + + /** + * Parse point in either basic object format or GeoJson format + * + * Parser is expected to be pointing the start of the object. + * ex) Parser is pointing { in {"x": 12.3, "y": 45.6} + * + * @param parser {@link XContentParser} to parse the value from + * @param point {@link XYPoint} to be returned after setting the x and y coordinates parsed from the parse + * @return {@link XYPoint} after setting the x and y coordinates parsed from the parse + * @throws IOException + */ + private static XYPoint parseXYPointObject(final XContentParser parser, final XYPoint point, final boolean ignoreZValue) + throws IOException { + try (XContentSubParser subParser = new XContentSubParser(parser)) { + if (subParser.nextToken() != XContentParser.Token.FIELD_NAME) { + throw new OpenSearchParseException(ERR_MSG_INVALID_TOKEN, subParser.currentToken()); + } + + String field = subParser.currentName(); + if (X_PARAMETER.equals(field) || Y_PARAMETER.equals(field)) { + parseXYPointObjectBasicFields(subParser, point); + } else if (GEOJSON_TYPE.equals(field) || GEOJSON_COORDS.equals(field)) { + parseGeoJsonFields(subParser, point, ignoreZValue); + } else { + throw new OpenSearchParseException(ERR_MSG_INVALID_FIELDS); + } + + if (subParser.nextToken() != XContentParser.Token.END_OBJECT) { + throw new OpenSearchParseException(ERR_MSG_INVALID_FIELDS); + } + + return point; + } + } + + /** + * Parse point in basic object format + * + * Parser is expected to be pointing the first field of the object. + * ex) Parser is pointing x in {"x": 12.3, "y": 45.6} + * + * @param parser {@link XContentParser} to parse the value from + * @param point {@link XYPoint} to be returned after setting the x and y coordinates parsed from the parse + * @return {@link XYPoint} after setting the x and y coordinates parsed from the parse + * @throws IOException + */ + private static XYPoint parseXYPointObjectBasicFields(final XContentParser parser, final XYPoint point) throws IOException { + final int numberOfFields = 2; + Map data = new HashMap<>(); + for (int i = 0; i < numberOfFields; i++) { + if (i != 0) { + parser.nextToken(); + } + + if (parser.currentToken() != XContentParser.Token.FIELD_NAME) { + break; + } + + String field = parser.currentName(); + if (X_PARAMETER.equals(field) == false && Y_PARAMETER.equals(field) == false) { + throw new OpenSearchParseException(ERR_MSG_INVALID_FIELDS); + } + switch (parser.nextToken()) { + case VALUE_NUMBER: + case VALUE_STRING: + try { + data.put(field, parser.doubleValue(true)); + } catch (NumberFormatException e) { + throw new OpenSearchParseException("[{}] and [{}] must be valid double values", e, X_PARAMETER, Y_PARAMETER); + } + break; + default: + throw new OpenSearchParseException("{} must be a number", field); + } + } + + if (data.get(X_PARAMETER) == null) { + throw new OpenSearchParseException("field [{}] missing", X_PARAMETER); + } + if (data.get(Y_PARAMETER) == null) { + throw new OpenSearchParseException("field [{}] missing", Y_PARAMETER); + } + + return point.reset(data.get(X_PARAMETER), data.get(Y_PARAMETER)); + } + + /** + * Parse point in GeoJson format + * + * Parser is expected to be pointing the first field of the object. + * ex) Parser is pointing type in {"type": "Point", "coordinates": [12.3, 45.6]} + * + * @param parser {@link XContentParser} to parse the value from + * @param point {@link XYPoint} to be returned after setting the x and y coordinates parsed from the parse + * @param ignoreZValue boolean parameter which decides if third coordinate needs to be ignored or not + * @return {@link XYPoint} after setting the x and y coordinates parsed from the parse + * @throws IOException + */ + private static XYPoint parseGeoJsonFields(final XContentParser parser, final XYPoint point, final boolean ignoreZValue) + throws IOException { + final int numberOfFields = 2; + boolean hasTypePoint = false; + boolean hasCoordinates = false; + for (int i = 0; i < numberOfFields; i++) { + if (i != 0) { + parser.nextToken(); + } + + if (parser.currentToken() != XContentParser.Token.FIELD_NAME) { + if (hasTypePoint == false) { + throw new OpenSearchParseException("field [{}] missing", GEOJSON_TYPE); + } + if (hasCoordinates == false) { + throw new OpenSearchParseException("field [{}] missing", GEOJSON_COORDS); + } + } + + if (GEOJSON_TYPE.equals(parser.currentName())) { + if (parser.nextToken() != XContentParser.Token.VALUE_STRING) { + throw new OpenSearchParseException("{} must be a string", GEOJSON_TYPE); + } + + // To be consistent with geo_shape parsing, ignore case here as well. + if (ShapeType.POINT.name().equalsIgnoreCase(parser.text()) == false) { + throw new OpenSearchParseException("{} must be Point", GEOJSON_TYPE); + } + hasTypePoint = true; + } else if (GEOJSON_COORDS.equals(parser.currentName())) { + if (parser.nextToken() != XContentParser.Token.START_ARRAY) { + throw new OpenSearchParseException("{} must be an array", GEOJSON_COORDS); + } + parseXYPointArray(parser, point, ignoreZValue); + hasCoordinates = true; + } else { + throw new OpenSearchParseException(ERR_MSG_INVALID_FIELDS); + } + } + + return point; + } + + /** + * Parse point in an array format + * + * Parser is expected to be pointing the start of the array. + * ex) Parser is pointing [ in [12.3, 45.6] + * + * @param parser {@link XContentParser} to parse the value from + * @param point {@link XYPoint} to be returned after setting the x and y coordinates parsed from the parse + * @param ignoreZValue boolean parameter which decides if third coordinate needs to be ignored or not + * @return {@link XYPoint} after setting the x and y coordinates parsed from the parse + * @throws IOException + */ + private static XYPoint parseXYPointArray(final XContentParser parser, final XYPoint point, final boolean ignoreZValue) + throws IOException { + try (XContentSubParser subParser = new XContentSubParser(parser)) { + double x = Double.NaN; + double y = Double.NaN; + + int element = 0; + while (subParser.nextToken() != XContentParser.Token.END_ARRAY) { + if (parser.currentToken() != XContentParser.Token.VALUE_NUMBER) { + throw new OpenSearchParseException("numeric value expected"); + } + element++; + if (element == 1) { + x = parser.doubleValue(); + } else if (element == 2) { + y = parser.doubleValue(); + } else if (element == 3) { + XYPoint.assertZValue(ignoreZValue, parser.doubleValue()); + } else { + throw new OpenSearchParseException("[xy_point] field type does not accept more than 3 values"); + } + } + + if (element < 2) { + throw new OpenSearchParseException("[xy_point] field type should have at least two dimensions"); + } + return point.reset(x, y); + } + } +} diff --git a/src/main/java/org/opensearch/geospatial/index/mapper/xyshape/XYShapeIndexableFieldsVisitor.java b/src/main/java/org/opensearch/geospatial/index/mapper/xyshape/XYShapeIndexableFieldsVisitor.java index c4bc2e8a..3a0dd35a 100644 --- a/src/main/java/org/opensearch/geospatial/index/mapper/xyshape/XYShapeIndexableFieldsVisitor.java +++ b/src/main/java/org/opensearch/geospatial/index/mapper/xyshape/XYShapeIndexableFieldsVisitor.java @@ -46,14 +46,12 @@ public XYShapeIndexableFieldsVisitor(String fieldName) { @Override public IndexableField[] visit(Circle circle) { - throw new IllegalArgumentException( - String.format(Locale.getDefault(), "invalid shape type found [ %s ] while indexing shape", CIRCLE) - ); + throw new IllegalArgumentException(String.format(Locale.ROOT, "invalid shape type found [ %s ] while indexing shape", CIRCLE)); } @Override public IndexableField[] visit(GeometryCollection collection) { - Objects.requireNonNull(collection, String.format(Locale.getDefault(), "%s cannot be null", ShapeType.GEOMETRYCOLLECTION)); + Objects.requireNonNull(collection, String.format(Locale.ROOT, "%s cannot be null", ShapeType.GEOMETRYCOLLECTION)); return visitCollection(collection); } @@ -65,32 +63,30 @@ public IndexableField[] visit(Line line) { @Override public IndexableField[] visit(LinearRing ring) { - throw new IllegalArgumentException( - String.format(Locale.getDefault(), "invalid shape type found [ %s ] while indexing shape", LINEARRING) - ); + throw new IllegalArgumentException(String.format(Locale.ROOT, "invalid shape type found [ %s ] while indexing shape", LINEARRING)); } @Override public IndexableField[] visit(MultiLine multiLine) { - Objects.requireNonNull(multiLine, String.format(Locale.getDefault(), "%s cannot be null", ShapeType.MULTILINESTRING)); + Objects.requireNonNull(multiLine, String.format(Locale.ROOT, "%s cannot be null", ShapeType.MULTILINESTRING)); return visitCollection(multiLine); } @Override public IndexableField[] visit(MultiPoint multiPoint) { - Objects.requireNonNull(multiPoint, String.format(Locale.getDefault(), "%s cannot be null", ShapeType.MULTIPOINT)); + Objects.requireNonNull(multiPoint, String.format(Locale.ROOT, "%s cannot be null", ShapeType.MULTIPOINT)); return visitCollection(multiPoint); } @Override public IndexableField[] visit(MultiPolygon multiPolygon) { - Objects.requireNonNull(multiPolygon, String.format(Locale.getDefault(), "%s cannot be null", ShapeType.MULTIPOLYGON)); + Objects.requireNonNull(multiPolygon, String.format(Locale.ROOT, "%s cannot be null", ShapeType.MULTIPOLYGON)); return visitCollection(multiPolygon); } @Override public IndexableField[] visit(Point point) { - Objects.requireNonNull(point, String.format(Locale.getDefault(), "%s cannot be null", ShapeType.POINT)); + Objects.requireNonNull(point, String.format(Locale.ROOT, "%s cannot be null", ShapeType.POINT)); XYPoint xyPoint = toXYPoint(point); return XYShape.createIndexableFields(fieldName, xyPoint.getX(), xyPoint.getY()); } diff --git a/src/main/java/org/opensearch/geospatial/index/mapper/xyshape/XYShapeSupportVisitor.java b/src/main/java/org/opensearch/geospatial/index/mapper/xyshape/XYShapeSupportVisitor.java index d26dcd50..26c56d8d 100644 --- a/src/main/java/org/opensearch/geospatial/index/mapper/xyshape/XYShapeSupportVisitor.java +++ b/src/main/java/org/opensearch/geospatial/index/mapper/xyshape/XYShapeSupportVisitor.java @@ -30,7 +30,7 @@ public final class XYShapeSupportVisitor implements GeometryVisitor { @Override public Geometry visit(Circle circle) { - throw new UnsupportedOperationException(String.format(Locale.getDefault(), "%s is not supported", CIRCLE)); + throw new UnsupportedOperationException(String.format(Locale.ROOT, "%s is not supported", CIRCLE)); } @Override @@ -45,7 +45,7 @@ public Geometry visit(Line line) { @Override public Geometry visit(LinearRing ring) { - throw new UnsupportedOperationException(String.format(Locale.getDefault(), "cannot index %s [ %s ] directly", LINEARRING, ring)); + throw new UnsupportedOperationException(String.format(Locale.ROOT, "cannot index %s [ %s ] directly", LINEARRING, ring)); } @Override diff --git a/src/main/java/org/opensearch/geospatial/index/query/xypoint/XYPointQueryProcessor.java b/src/main/java/org/opensearch/geospatial/index/query/xypoint/XYPointQueryProcessor.java new file mode 100644 index 00000000..66b377c7 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/index/query/xypoint/XYPointQueryProcessor.java @@ -0,0 +1,65 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.index.query.xypoint; + +import java.util.Locale; + +import org.apache.lucene.search.Query; +import org.opensearch.common.geo.ShapeRelation; +import org.opensearch.geometry.Geometry; +import org.opensearch.geospatial.index.mapper.xypoint.XYPointFieldMapper; +import org.opensearch.index.query.QueryShardContext; +import org.opensearch.index.query.QueryShardException; + +/** + * Query Processor to convert given Geometry into Lucene query + */ +public class XYPointQueryProcessor { + /** + * Creates a {@link Query} that matches all indexed shapes to the provided {@link Geometry} based on {@link ShapeRelation} + * + * @param shape OpenSearch {@link Geometry} as an input + * @param fieldName field name that contains indexed points + * @param relation Relation to be used to get all points from given Geometry + * @param context QueryShardContext instance + * @return {@link Query} instance from XYPointField.XYPointInGeometryQuery + */ + public Query shapeQuery(Geometry shape, String fieldName, ShapeRelation relation, QueryShardContext context) { + validateIsXYPointFieldType(fieldName, context); + // XYPoint only support "intersects" spatial relation which returns points that are on the edge and inside the given geometry + if (relation != ShapeRelation.INTERSECTS) { + throw new QueryShardException( + context, + String.format(Locale.ROOT, "[%s] query relation not supported for Field [%s]", relation, fieldName) + ); + } + + return getVectorQueryFromShape(shape, fieldName, context); + } + + private void validateIsXYPointFieldType(String fieldName, QueryShardContext context) { + var fieldType = context.fieldMapper(fieldName); + if (fieldType instanceof XYPointFieldMapper.XYPointFieldType) { + return; + } + + throw new QueryShardException( + context, + String.format( + Locale.ROOT, + "Expected [%s] field type for Field [%s] but found [%s]", + XYPointFieldMapper.CONTENT_TYPE, + fieldName, + fieldType.typeName() + ) + ); + } + + protected Query getVectorQueryFromShape(Geometry queryShape, String fieldName, QueryShardContext context) { + var xyPointQueryVisitor = new XYPointQueryVisitor(fieldName, context.fieldMapper(fieldName), context); + return queryShape.visit(xyPointQueryVisitor); + } +} diff --git a/src/main/java/org/opensearch/geospatial/index/query/xypoint/XYPointQueryVisitor.java b/src/main/java/org/opensearch/geospatial/index/query/xypoint/XYPointQueryVisitor.java new file mode 100644 index 00000000..df9d9eae --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/index/query/xypoint/XYPointQueryVisitor.java @@ -0,0 +1,206 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.index.query.xypoint; + +import static org.opensearch.geospatial.index.common.xyshape.XYShapeConverter.toXYCircle; +import static org.opensearch.geospatial.index.common.xyshape.XYShapeConverter.toXYPolygon; +import static org.opensearch.geospatial.index.common.xyshape.XYShapeConverter.toXYRectangle; + +import java.util.Collections; +import java.util.Locale; +import java.util.Objects; + +import org.apache.lucene.document.XYDocValuesField; +import org.apache.lucene.document.XYPointField; +import org.apache.lucene.geo.XYCircle; +import org.apache.lucene.geo.XYPolygon; +import org.apache.lucene.geo.XYRectangle; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexOrDocValuesQuery; +import org.apache.lucene.search.MatchNoDocsQuery; +import org.apache.lucene.search.Query; +import org.opensearch.geometry.Circle; +import org.opensearch.geometry.Geometry; +import org.opensearch.geometry.GeometryCollection; +import org.opensearch.geometry.GeometryVisitor; +import org.opensearch.geometry.Line; +import org.opensearch.geometry.LinearRing; +import org.opensearch.geometry.MultiLine; +import org.opensearch.geometry.MultiPoint; +import org.opensearch.geometry.MultiPolygon; +import org.opensearch.geometry.Point; +import org.opensearch.geometry.Polygon; +import org.opensearch.geometry.Rectangle; +import org.opensearch.geometry.ShapeType; +import org.opensearch.index.mapper.MappedFieldType; +import org.opensearch.index.query.QueryShardContext; +import org.opensearch.index.query.QueryShardException; + +import lombok.AllArgsConstructor; + +/** + * Geometry Visitor to create a query to find all cartesian XYPoints + * that comply ShapeRelation with all other XYShapes objects + */ +@AllArgsConstructor +public class XYPointQueryVisitor implements GeometryVisitor { + private final String fieldName; + private final MappedFieldType fieldType; + private final QueryShardContext context; + + /** + * @param line input geometry {@link Line} + * @return {@link Query} instance from XYPointField.XYPointInGeometryQuery + * throws QueryShardException + */ + @Override + public Query visit(Line line) { + return geometryNotSupported(ShapeType.LINESTRING); + } + + /** + * @param ring input geometry {@link LinearRing} + * @return {@link Query} instance from XYPointField.XYPointInGeometryQuery + * throws QueryShardException + */ + @Override + public Query visit(LinearRing ring) { + return geometryNotSupported(ShapeType.LINEARRING); + } + + /** + * @param multiLine input geometry {@link MultiLine} + * @return {@link Query} instance from XYPointField.XYPointInGeometryQuery + * throws QueryShardException + */ + @Override + public Query visit(MultiLine multiLine) { + return geometryNotSupported(ShapeType.MULTILINESTRING); + } + + /** + * @param multiPoint input geometry {@link MultiPoint} + * @return {@link Query} instance from XYPointField.XYPointInGeometryQuery + * throws QueryShardException + */ + @Override + public Query visit(MultiPoint multiPoint) { + return geometryNotSupported(ShapeType.MULTIPOINT); + } + + /** + * @param point input geometry {@link Point} + * @return {@link Query} instance from XYPointField.XYPointInGeometryQuery + * throws QueryShardException + */ + @Override + public Query visit(Point point) { + return geometryNotSupported(ShapeType.POINT); + } + + /** + * @param circle input geometry {@link Circle} + * @return {@link Query} instance from XYPointField.XYPointInGeometryQuery + * throws QueryShardException + */ + @Override + public Query visit(Circle circle) throws RuntimeException { + Objects.requireNonNull(circle, "Circle cannot be null"); + XYCircle xyCircle = toXYCircle(circle); + var query = XYPointField.newDistanceQuery(fieldName, xyCircle.getX(), xyCircle.getY(), xyCircle.getRadius()); + if (!fieldType.hasDocValues()) { + return query; + } + + var dvQuery = XYDocValuesField.newSlowDistanceQuery(fieldName, xyCircle.getX(), xyCircle.getY(), xyCircle.getRadius()); + return new IndexOrDocValuesQuery(query, dvQuery); + } + + /** + * @param rectangle input geometry {@link Rectangle} + * @return {@link Query} instance from XYPointField.XYPointInGeometryQuery + */ + @Override + public Query visit(Rectangle rectangle) { + Objects.requireNonNull(rectangle, "Rectangle cannot be null"); + XYRectangle xyRectangle = toXYRectangle(rectangle); + var query = XYPointField.newBoxQuery(fieldName, xyRectangle.minX, xyRectangle.maxX, xyRectangle.minY, xyRectangle.maxY); + if (!fieldType.hasDocValues()) { + return query; + } + + var dvQuery = XYDocValuesField.newSlowBoxQuery(fieldName, xyRectangle.minX, xyRectangle.maxX, xyRectangle.minY, xyRectangle.maxY); + return new IndexOrDocValuesQuery(query, dvQuery); + } + + /** + * @param multiPolygon input geometry {@link MultiPolygon} + * @return {@link Query} instance from XYPointField.XYPointInGeometryQuery + */ + @Override + public Query visit(MultiPolygon multiPolygon) { + Objects.requireNonNull(multiPolygon, "Multi Polygon cannot be null"); + return visitCollection(multiPolygon); + } + + /** + * @param polygon input geometry {@link Polygon} + * @return {@link Query} instance from XYPointField.XYPointInGeometryQuery + */ + @Override + public Query visit(Polygon polygon) { + Objects.requireNonNull(polygon, "Polygon cannot be null"); + return visitCollection(new GeometryCollection(Collections.singletonList(polygon))); + } + + /** + * @param collection input geometry {@link GeometryCollection} + * @return {@link Query} instance from XYPointField.XYPointInGeometryQuery + */ + @Override + public Query visit(GeometryCollection collection) { + if (collection.isEmpty()) { + return new MatchNoDocsQuery(); + } + var booleanQueryBuilder = new BooleanQuery.Builder(); + visit(booleanQueryBuilder, collection); + return booleanQueryBuilder.build(); + } + + private void visit(BooleanQuery.Builder booleanQueryBuilder, GeometryCollection collection) { + var occur = BooleanClause.Occur.FILTER; + for (Geometry shape : collection) { + booleanQueryBuilder.add(shape.visit(this), occur); + } + } + + private Query visitCollection(GeometryCollection collection) { + if (collection.isEmpty()) { + return new MatchNoDocsQuery(); + } + + XYPolygon[] xyPolygons = new XYPolygon[collection.size()]; + for (int i = 0; i < collection.size(); i++) { + xyPolygons[i] = toXYPolygon(collection.get(i)); + } + + var query = XYPointField.newPolygonQuery(fieldName, xyPolygons); + if (!fieldType.hasDocValues()) { + return query; + } + + var dvQuery = XYDocValuesField.newSlowPolygonQuery(fieldName, xyPolygons); + return new IndexOrDocValuesQuery(query, dvQuery); + } + + private Query geometryNotSupported(ShapeType shapeType) { + throw new QueryShardException( + context, + String.format(Locale.ROOT, "Field [%s] found an unsupported shape [%s]", fieldName, shapeType.name()) + ); + } +} diff --git a/src/main/java/org/opensearch/geospatial/index/query/xyshape/XYShapeQueryBuilder.java b/src/main/java/org/opensearch/geospatial/index/query/xyshape/XYShapeQueryBuilder.java index 5e564e5f..f1221a4a 100644 --- a/src/main/java/org/opensearch/geospatial/index/query/xyshape/XYShapeQueryBuilder.java +++ b/src/main/java/org/opensearch/geospatial/index/query/xyshape/XYShapeQueryBuilder.java @@ -13,9 +13,9 @@ import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.Query; import org.opensearch.common.geo.parsers.ShapeParser; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentParser; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; import org.opensearch.geometry.Geometry; import org.opensearch.geospatial.index.mapper.xyshape.XYShapeQueryable; import org.opensearch.index.mapper.MappedFieldType; @@ -66,13 +66,7 @@ protected Query buildShapeQuery(QueryShardContext context, MappedFieldType field } throw new QueryShardException( context, - String.format( - Locale.getDefault(), - "Field [%s] is of unsupported type [%s] for [%s] query", - fieldName, - fieldType.typeName(), - NAME - ) + String.format(Locale.ROOT, "Field [%s] is of unsupported type [%s] for [%s] query", fieldName, fieldType.typeName(), NAME) ); } diff --git a/src/main/java/org/opensearch/geospatial/index/query/xyshape/XYShapeQueryProcessor.java b/src/main/java/org/opensearch/geospatial/index/query/xyshape/XYShapeQueryProcessor.java index 6d91984f..cc95b59f 100644 --- a/src/main/java/org/opensearch/geospatial/index/query/xyshape/XYShapeQueryProcessor.java +++ b/src/main/java/org/opensearch/geospatial/index/query/xyshape/XYShapeQueryProcessor.java @@ -8,8 +8,6 @@ import java.util.List; import java.util.Locale; -import lombok.NonNull; - import org.apache.lucene.document.ShapeField; import org.apache.lucene.document.XYShape; import org.apache.lucene.geo.XYGeometry; @@ -23,6 +21,8 @@ import org.opensearch.index.query.QueryShardContext; import org.opensearch.index.query.QueryShardException; +import lombok.NonNull; + /** * Query Processor to convert given Geometry into Lucene query */ @@ -75,7 +75,7 @@ private void validateIsXYShapeFieldType(String fieldName, QueryShardContext cont throw new QueryShardException( context, String.format( - Locale.getDefault(), + Locale.ROOT, "Expected %s field type for Field [ %s ] but found %s", XYShapeFieldMapper.CONTENT_TYPE, fieldName, diff --git a/src/main/java/org/opensearch/geospatial/index/query/xyshape/XYShapeQueryVisitor.java b/src/main/java/org/opensearch/geospatial/index/query/xyshape/XYShapeQueryVisitor.java index e198b84a..d82d61fc 100644 --- a/src/main/java/org/opensearch/geospatial/index/query/xyshape/XYShapeQueryVisitor.java +++ b/src/main/java/org/opensearch/geospatial/index/query/xyshape/XYShapeQueryVisitor.java @@ -71,7 +71,7 @@ public List visit(Line line) throws RuntimeException { public List visit(LinearRing linearRing) throws RuntimeException { throw new QueryShardException( this.context, - String.format(Locale.getDefault(), "Field [%s] found an unsupported shape LinearRing", this.name) + String.format(Locale.ROOT, "Field [%s] found an unsupported shape LinearRing", this.name) ); } diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/action/DeleteDatasourceAction.java b/src/main/java/org/opensearch/geospatial/ip2geo/action/DeleteDatasourceAction.java new file mode 100644 index 00000000..b08e0861 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/action/DeleteDatasourceAction.java @@ -0,0 +1,27 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.action; + +import org.opensearch.action.ActionType; +import org.opensearch.action.support.master.AcknowledgedResponse; + +/** + * Ip2Geo datasource delete action + */ +public class DeleteDatasourceAction extends ActionType { + /** + * Delete datasource action instance + */ + public static final DeleteDatasourceAction INSTANCE = new DeleteDatasourceAction(); + /** + * Delete datasource action name + */ + public static final String NAME = "cluster:admin/geospatial/datasource/delete"; + + private DeleteDatasourceAction() { + super(NAME, AcknowledgedResponse::new); + } +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/action/DeleteDatasourceRequest.java b/src/main/java/org/opensearch/geospatial/ip2geo/action/DeleteDatasourceRequest.java new file mode 100644 index 00000000..35a106de --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/action/DeleteDatasourceRequest.java @@ -0,0 +1,60 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.action; + +import java.io.IOException; + +import org.opensearch.action.ActionRequest; +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.geospatial.ip2geo.common.ParameterValidator; + +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.Setter; + +/** + * GeoIP datasource delete request + */ +@Getter +@Setter +@AllArgsConstructor +public class DeleteDatasourceRequest extends ActionRequest { + private static final ParameterValidator VALIDATOR = new ParameterValidator(); + /** + * @param name the datasource name + * @return the datasource name + */ + private String name; + + /** + * Constructor + * + * @param in the stream input + * @throws IOException IOException + */ + public DeleteDatasourceRequest(final StreamInput in) throws IOException { + super(in); + this.name = in.readString(); + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException errors = null; + if (VALIDATOR.validateDatasourceName(name).isEmpty() == false) { + errors = new ActionRequestValidationException(); + errors.addValidationError("no such datasource exist"); + } + return errors; + } + + @Override + public void writeTo(final StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(name); + } +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/action/DeleteDatasourceTransportAction.java b/src/main/java/org/opensearch/geospatial/ip2geo/action/DeleteDatasourceTransportAction.java new file mode 100644 index 00000000..d183b264 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/action/DeleteDatasourceTransportAction.java @@ -0,0 +1,148 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.action; + +import java.io.IOException; + +import org.opensearch.ResourceNotFoundException; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.HandledTransportAction; +import org.opensearch.action.support.master.AcknowledgedResponse; +import org.opensearch.common.inject.Inject; +import org.opensearch.core.action.ActionListener; +import org.opensearch.geospatial.annotation.VisibleForTesting; +import org.opensearch.geospatial.exceptions.ConcurrentModificationException; +import org.opensearch.geospatial.exceptions.ResourceInUseException; +import org.opensearch.geospatial.ip2geo.common.DatasourceState; +import org.opensearch.geospatial.ip2geo.common.Ip2GeoLockService; +import org.opensearch.geospatial.ip2geo.dao.DatasourceDao; +import org.opensearch.geospatial.ip2geo.dao.GeoIpDataDao; +import org.opensearch.geospatial.ip2geo.dao.Ip2GeoProcessorDao; +import org.opensearch.geospatial.ip2geo.jobscheduler.Datasource; +import org.opensearch.ingest.IngestService; +import org.opensearch.tasks.Task; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.TransportService; + +import lombok.extern.log4j.Log4j2; + +/** + * Transport action to delete datasource + */ +@Log4j2 +public class DeleteDatasourceTransportAction extends HandledTransportAction { + private static final long LOCK_DURATION_IN_SECONDS = 300l; + private final Ip2GeoLockService lockService; + private final IngestService ingestService; + private final DatasourceDao datasourceDao; + private final GeoIpDataDao geoIpDataDao; + private final Ip2GeoProcessorDao ip2GeoProcessorDao; + private final ThreadPool threadPool; + + /** + * Constructor + * @param transportService the transport service + * @param actionFilters the action filters + * @param lockService the lock service + * @param ingestService the ingest service + * @param datasourceDao the datasource facade + */ + @Inject + public DeleteDatasourceTransportAction( + final TransportService transportService, + final ActionFilters actionFilters, + final Ip2GeoLockService lockService, + final IngestService ingestService, + final DatasourceDao datasourceDao, + final GeoIpDataDao geoIpDataDao, + final Ip2GeoProcessorDao ip2GeoProcessorDao, + final ThreadPool threadPool + ) { + super(DeleteDatasourceAction.NAME, transportService, actionFilters, DeleteDatasourceRequest::new); + this.lockService = lockService; + this.ingestService = ingestService; + this.datasourceDao = datasourceDao; + this.geoIpDataDao = geoIpDataDao; + this.ip2GeoProcessorDao = ip2GeoProcessorDao; + this.threadPool = threadPool; + } + + /** + * We delete datasource regardless of its state as long as we can acquire a lock + * + * @param task the task + * @param request the request + * @param listener the listener + */ + @Override + protected void doExecute(final Task task, final DeleteDatasourceRequest request, final ActionListener listener) { + lockService.acquireLock(request.getName(), LOCK_DURATION_IN_SECONDS, ActionListener.wrap(lock -> { + if (lock == null) { + listener.onFailure( + new ConcurrentModificationException("another processor is holding a lock on the resource. Try again later") + ); + return; + } + try { + // TODO: makes every sub-methods as async call to avoid using a thread in generic pool + threadPool.generic().submit(() -> { + try { + deleteDatasource(request.getName()); + lockService.releaseLock(lock); + listener.onResponse(new AcknowledgedResponse(true)); + } catch (Exception e) { + lockService.releaseLock(lock); + listener.onFailure(e); + } + }); + } catch (Exception e) { + lockService.releaseLock(lock); + listener.onFailure(e); + } + }, exception -> { listener.onFailure(exception); })); + } + + @VisibleForTesting + protected void deleteDatasource(final String datasourceName) throws IOException { + Datasource datasource = datasourceDao.getDatasource(datasourceName); + if (datasource == null) { + throw new ResourceNotFoundException("no such datasource exist"); + } + DatasourceState previousState = datasource.getState(); + setDatasourceStateAsDeleting(datasource); + + try { + geoIpDataDao.deleteIp2GeoDataIndex(datasource.getIndices()); + } catch (Exception e) { + if (previousState.equals(datasource.getState()) == false) { + datasource.setState(previousState); + datasourceDao.updateDatasource(datasource); + } + throw e; + } + datasourceDao.deleteDatasource(datasource); + } + + private void setDatasourceStateAsDeleting(final Datasource datasource) { + if (ip2GeoProcessorDao.getProcessors(datasource.getName()).isEmpty() == false) { + throw new ResourceInUseException("datasource is being used by one of processors"); + } + + DatasourceState previousState = datasource.getState(); + datasource.setState(DatasourceState.DELETING); + datasourceDao.updateDatasource(datasource); + + // Check again as processor might just have been created. + // If it fails to update the state back to the previous state, the new processor + // will fail to convert an ip to a geo data. + // In such case, user have to delete the processor and delete this datasource again. + if (ip2GeoProcessorDao.getProcessors(datasource.getName()).isEmpty() == false) { + datasource.setState(previousState); + datasourceDao.updateDatasource(datasource); + throw new ResourceInUseException("datasource is being used by one of processors"); + } + } +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/action/GetDatasourceAction.java b/src/main/java/org/opensearch/geospatial/ip2geo/action/GetDatasourceAction.java new file mode 100644 index 00000000..039ab35b --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/action/GetDatasourceAction.java @@ -0,0 +1,26 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.action; + +import org.opensearch.action.ActionType; + +/** + * Ip2Geo datasource get action + */ +public class GetDatasourceAction extends ActionType { + /** + * Get datasource action instance + */ + public static final GetDatasourceAction INSTANCE = new GetDatasourceAction(); + /** + * Get datasource action name + */ + public static final String NAME = "cluster:admin/geospatial/datasource/get"; + + private GetDatasourceAction() { + super(NAME, GetDatasourceResponse::new); + } +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/action/GetDatasourceRequest.java b/src/main/java/org/opensearch/geospatial/ip2geo/action/GetDatasourceRequest.java new file mode 100644 index 00000000..34d72c58 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/action/GetDatasourceRequest.java @@ -0,0 +1,67 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.action; + +import java.io.IOException; + +import org.opensearch.action.ActionRequest; +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; + +import lombok.Getter; +import lombok.Setter; + +/** + * Ip2Geo datasource get request + */ +@Getter +@Setter +public class GetDatasourceRequest extends ActionRequest { + /** + * @param names the datasource names + * @return the datasource names + */ + private String[] names; + + /** + * Constructs a new get datasource request with a list of datasources. + * + * If the list of datasources is empty or it contains a single element "_all", all registered datasources + * are returned. + * + * @param names list of datasource names + */ + public GetDatasourceRequest(final String[] names) { + this.names = names; + } + + /** + * Constructor with stream input + * @param in the stream input + * @throws IOException IOException + */ + public GetDatasourceRequest(final StreamInput in) throws IOException { + super(in); + this.names = in.readStringArray(); + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException errors = null; + if (names == null) { + errors = new ActionRequestValidationException(); + errors.addValidationError("names should not be null"); + } + return errors; + } + + @Override + public void writeTo(final StreamOutput out) throws IOException { + super.writeTo(out); + out.writeStringArray(names); + } +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/action/GetDatasourceResponse.java b/src/main/java/org/opensearch/geospatial/ip2geo/action/GetDatasourceResponse.java new file mode 100644 index 00000000..cab60553 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/action/GetDatasourceResponse.java @@ -0,0 +1,88 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.action; + +import java.io.IOException; +import java.time.Instant; +import java.util.List; + +import org.opensearch.core.ParseField; +import org.opensearch.core.action.ActionResponse; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.geospatial.ip2geo.jobscheduler.Datasource; + +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.Setter; + +/** + * Ip2Geo datasource get request + */ +@Getter +@Setter +@EqualsAndHashCode(callSuper = false) +public class GetDatasourceResponse extends ActionResponse implements ToXContentObject { + private static final ParseField FIELD_NAME_DATASOURCES = new ParseField("datasources"); + private static final ParseField FIELD_NAME_NAME = new ParseField("name"); + private static final ParseField FIELD_NAME_STATE = new ParseField("state"); + private static final ParseField FIELD_NAME_ENDPOINT = new ParseField("endpoint"); + private static final ParseField FIELD_NAME_UPDATE_INTERVAL = new ParseField("update_interval_in_days"); + private static final ParseField FIELD_NAME_NEXT_UPDATE_AT = new ParseField("next_update_at_in_epoch_millis"); + private static final ParseField FIELD_NAME_NEXT_UPDATE_AT_READABLE = new ParseField("next_update_at"); + private static final ParseField FIELD_NAME_DATABASE = new ParseField("database"); + private static final ParseField FIELD_NAME_UPDATE_STATS = new ParseField("update_stats"); + private List datasources; + + /** + * Default constructor + * + * @param datasources List of datasources + */ + public GetDatasourceResponse(final List datasources) { + this.datasources = datasources; + } + + /** + * Constructor with StreamInput + * + * @param in the stream input + */ + public GetDatasourceResponse(final StreamInput in) throws IOException { + datasources = in.readList(Datasource::new); + } + + @Override + public void writeTo(final StreamOutput out) throws IOException { + out.writeList(datasources); + } + + @Override + public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { + builder.startObject(); + builder.startArray(FIELD_NAME_DATASOURCES.getPreferredName()); + for (Datasource datasource : datasources) { + builder.startObject(); + builder.field(FIELD_NAME_NAME.getPreferredName(), datasource.getName()); + builder.field(FIELD_NAME_STATE.getPreferredName(), datasource.getState()); + builder.field(FIELD_NAME_ENDPOINT.getPreferredName(), datasource.getEndpoint()); + builder.field(FIELD_NAME_UPDATE_INTERVAL.getPreferredName(), datasource.getUserSchedule().getInterval()); + builder.timeField( + FIELD_NAME_NEXT_UPDATE_AT.getPreferredName(), + FIELD_NAME_NEXT_UPDATE_AT_READABLE.getPreferredName(), + datasource.getUserSchedule().getNextExecutionTime(Instant.now()).toEpochMilli() + ); + builder.field(FIELD_NAME_DATABASE.getPreferredName(), datasource.getDatabase()); + builder.field(FIELD_NAME_UPDATE_STATS.getPreferredName(), datasource.getUpdateStats()); + builder.endObject(); + } + builder.endArray(); + builder.endObject(); + return builder; + } +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/action/GetDatasourceTransportAction.java b/src/main/java/org/opensearch/geospatial/ip2geo/action/GetDatasourceTransportAction.java new file mode 100644 index 00000000..522e1c39 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/action/GetDatasourceTransportAction.java @@ -0,0 +1,81 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.action; + +import java.util.Collections; +import java.util.List; + +import org.opensearch.OpenSearchException; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.HandledTransportAction; +import org.opensearch.common.inject.Inject; +import org.opensearch.core.action.ActionListener; +import org.opensearch.geospatial.annotation.VisibleForTesting; +import org.opensearch.geospatial.ip2geo.dao.DatasourceDao; +import org.opensearch.geospatial.ip2geo.jobscheduler.Datasource; +import org.opensearch.index.IndexNotFoundException; +import org.opensearch.tasks.Task; +import org.opensearch.transport.TransportService; + +/** + * Transport action to get datasource + */ +public class GetDatasourceTransportAction extends HandledTransportAction { + private final DatasourceDao datasourceDao; + + /** + * Default constructor + * @param transportService the transport service + * @param actionFilters the action filters + * @param datasourceDao the datasource facade + */ + @Inject + public GetDatasourceTransportAction( + final TransportService transportService, + final ActionFilters actionFilters, + final DatasourceDao datasourceDao + ) { + super(GetDatasourceAction.NAME, transportService, actionFilters, GetDatasourceRequest::new); + this.datasourceDao = datasourceDao; + } + + @Override + protected void doExecute(final Task task, final GetDatasourceRequest request, final ActionListener listener) { + if (shouldGetAllDatasource(request)) { + // We don't expect too many data sources. Therefore, querying all data sources without pagination should be fine. + datasourceDao.getAllDatasources(newActionListener(listener)); + } else { + datasourceDao.getDatasources(request.getNames(), newActionListener(listener)); + } + } + + private boolean shouldGetAllDatasource(final GetDatasourceRequest request) { + if (request.getNames() == null) { + throw new OpenSearchException("names in a request should not be null"); + } + + return request.getNames().length == 0 || (request.getNames().length == 1 && "_all".equals(request.getNames()[0])); + } + + @VisibleForTesting + protected ActionListener> newActionListener(final ActionListener listener) { + return new ActionListener<>() { + @Override + public void onResponse(final List datasources) { + listener.onResponse(new GetDatasourceResponse(datasources)); + } + + @Override + public void onFailure(final Exception e) { + if (e instanceof IndexNotFoundException) { + listener.onResponse(new GetDatasourceResponse(Collections.emptyList())); + return; + } + listener.onFailure(e); + } + }; + } +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/action/PutDatasourceAction.java b/src/main/java/org/opensearch/geospatial/ip2geo/action/PutDatasourceAction.java new file mode 100644 index 00000000..2554b4f5 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/action/PutDatasourceAction.java @@ -0,0 +1,27 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.action; + +import org.opensearch.action.ActionType; +import org.opensearch.action.support.master.AcknowledgedResponse; + +/** + * Ip2Geo datasource creation action + */ +public class PutDatasourceAction extends ActionType { + /** + * Put datasource action instance + */ + public static final PutDatasourceAction INSTANCE = new PutDatasourceAction(); + /** + * Put datasource action name + */ + public static final String NAME = "cluster:admin/geospatial/datasource/put"; + + private PutDatasourceAction() { + super(NAME, AcknowledgedResponse::new); + } +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/action/PutDatasourceRequest.java b/src/main/java/org/opensearch/geospatial/ip2geo/action/PutDatasourceRequest.java new file mode 100644 index 00000000..42072d6c --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/action/PutDatasourceRequest.java @@ -0,0 +1,173 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.action; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URISyntaxException; +import java.net.URL; +import java.util.List; +import java.util.Locale; + +import org.opensearch.action.ActionRequest; +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.core.ParseField; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.xcontent.ObjectParser; +import org.opensearch.geospatial.ip2geo.common.DatasourceManifest; +import org.opensearch.geospatial.ip2geo.common.ParameterValidator; + +import lombok.Getter; +import lombok.Setter; +import lombok.extern.log4j.Log4j2; + +/** + * Ip2Geo datasource creation request + */ +@Getter +@Setter +@Log4j2 +public class PutDatasourceRequest extends ActionRequest { + public static final ParseField ENDPOINT_FIELD = new ParseField("endpoint"); + public static final ParseField UPDATE_INTERVAL_IN_DAYS_FIELD = new ParseField("update_interval_in_days"); + private static final ParameterValidator VALIDATOR = new ParameterValidator(); + + /** + * @param name the datasource name + * @return the datasource name + */ + private String name; + /** + * @param endpoint url to a manifest file for a datasource + * @return url to a manifest file for a datasource + */ + private String endpoint; + /** + * @param updateInterval update interval of a datasource + * @return update interval of a datasource + */ + private TimeValue updateInterval; + + /** + * Parser of a datasource + */ + public static final ObjectParser PARSER; + static { + PARSER = new ObjectParser<>("put_datasource"); + PARSER.declareString((request, val) -> request.setEndpoint(val), ENDPOINT_FIELD); + PARSER.declareLong((request, val) -> request.setUpdateInterval(TimeValue.timeValueDays(val)), UPDATE_INTERVAL_IN_DAYS_FIELD); + } + + /** + * Default constructor + * @param name name of a datasource + */ + public PutDatasourceRequest(final String name) { + this.name = name; + } + + /** + * Constructor with stream input + * @param in the stream input + * @throws IOException IOException + */ + public PutDatasourceRequest(final StreamInput in) throws IOException { + super(in); + this.name = in.readString(); + this.endpoint = in.readString(); + this.updateInterval = in.readTimeValue(); + } + + @Override + public void writeTo(final StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(name); + out.writeString(endpoint); + out.writeTimeValue(updateInterval); + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException errors = new ActionRequestValidationException(); + List errorMsgs = VALIDATOR.validateDatasourceName(name); + if (errorMsgs.isEmpty() == false) { + errorMsgs.stream().forEach(msg -> errors.addValidationError(msg)); + } + validateEndpoint(errors); + validateUpdateInterval(errors); + return errors.validationErrors().isEmpty() ? null : errors; + } + + /** + * Conduct following validation on endpoint + * 1. endpoint format complies with RFC-2396 + * 2. validate manifest file from the endpoint + * + * @param errors the errors to add error messages + */ + private void validateEndpoint(final ActionRequestValidationException errors) { + try { + URL url = new URL(endpoint); + url.toURI(); // Validate URL complies with RFC-2396 + validateManifestFile(url, errors); + } catch (MalformedURLException | URISyntaxException e) { + log.info("Invalid URL[{}] is provided", endpoint, e); + errors.addValidationError("Invalid URL format is provided"); + } + } + + /** + * Conduct following validation on url + * 1. can read manifest file from the endpoint + * 2. the url in the manifest file complies with RFC-2396 + * 3. updateInterval is less than validForInDays value in the manifest file + * + * @param url the url to validate + * @param errors the errors to add error messages + */ + private void validateManifestFile(final URL url, final ActionRequestValidationException errors) { + DatasourceManifest manifest; + try { + manifest = DatasourceManifest.Builder.build(url); + } catch (Exception e) { + log.info("Error occurred while reading a file from {}", url, e); + errors.addValidationError(String.format(Locale.ROOT, "Error occurred while reading a file from %s: %s", url, e.getMessage())); + return; + } + + try { + new URL(manifest.getUrl()).toURI(); // Validate URL complies with RFC-2396 + } catch (MalformedURLException | URISyntaxException e) { + log.info("Invalid URL[{}] is provided for url field in the manifest file", manifest.getUrl(), e); + errors.addValidationError("Invalid URL format is provided for url field in the manifest file"); + return; + } + + if (manifest.getValidForInDays() != null && updateInterval.days() >= manifest.getValidForInDays()) { + errors.addValidationError( + String.format( + Locale.ROOT, + "updateInterval %d should be smaller than %d", + updateInterval.days(), + manifest.getValidForInDays() + ) + ); + } + } + + /** + * Validate updateInterval is equal or larger than 1 + * + * @param errors the errors to add error messages + */ + private void validateUpdateInterval(final ActionRequestValidationException errors) { + if (updateInterval.compareTo(TimeValue.timeValueDays(1)) < 0) { + errors.addValidationError("Update interval should be equal to or larger than 1 day"); + } + } +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/action/PutDatasourceTransportAction.java b/src/main/java/org/opensearch/geospatial/ip2geo/action/PutDatasourceTransportAction.java new file mode 100644 index 00000000..bb55d411 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/action/PutDatasourceTransportAction.java @@ -0,0 +1,173 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.action; + +import static org.opensearch.geospatial.ip2geo.common.Ip2GeoLockService.LOCK_DURATION_IN_SECONDS; + +import java.time.Instant; +import java.util.concurrent.atomic.AtomicReference; + +import org.opensearch.ResourceAlreadyExistsException; +import org.opensearch.action.StepListener; +import org.opensearch.action.index.IndexResponse; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.HandledTransportAction; +import org.opensearch.action.support.master.AcknowledgedResponse; +import org.opensearch.common.inject.Inject; +import org.opensearch.core.action.ActionListener; +import org.opensearch.geospatial.annotation.VisibleForTesting; +import org.opensearch.geospatial.exceptions.ConcurrentModificationException; +import org.opensearch.geospatial.ip2geo.common.DatasourceState; +import org.opensearch.geospatial.ip2geo.common.Ip2GeoLockService; +import org.opensearch.geospatial.ip2geo.dao.DatasourceDao; +import org.opensearch.geospatial.ip2geo.jobscheduler.Datasource; +import org.opensearch.geospatial.ip2geo.jobscheduler.DatasourceUpdateService; +import org.opensearch.index.engine.VersionConflictEngineException; +import org.opensearch.jobscheduler.spi.LockModel; +import org.opensearch.tasks.Task; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.TransportService; + +import lombok.extern.log4j.Log4j2; + +/** + * Transport action to create datasource + */ +@Log4j2 +public class PutDatasourceTransportAction extends HandledTransportAction { + private final ThreadPool threadPool; + private final DatasourceDao datasourceDao; + private final DatasourceUpdateService datasourceUpdateService; + private final Ip2GeoLockService lockService; + + /** + * Default constructor + * @param transportService the transport service + * @param actionFilters the action filters + * @param threadPool the thread pool + * @param datasourceDao the datasource facade + * @param datasourceUpdateService the datasource update service + * @param lockService the lock service + */ + @Inject + public PutDatasourceTransportAction( + final TransportService transportService, + final ActionFilters actionFilters, + final ThreadPool threadPool, + final DatasourceDao datasourceDao, + final DatasourceUpdateService datasourceUpdateService, + final Ip2GeoLockService lockService + ) { + super(PutDatasourceAction.NAME, transportService, actionFilters, PutDatasourceRequest::new); + this.threadPool = threadPool; + this.datasourceDao = datasourceDao; + this.datasourceUpdateService = datasourceUpdateService; + this.lockService = lockService; + } + + @Override + protected void doExecute(final Task task, final PutDatasourceRequest request, final ActionListener listener) { + lockService.acquireLock(request.getName(), LOCK_DURATION_IN_SECONDS, ActionListener.wrap(lock -> { + if (lock == null) { + listener.onFailure( + new ConcurrentModificationException("another processor is holding a lock on the resource. Try again later") + ); + return; + } + try { + internalDoExecute(request, lock, listener); + } catch (Exception e) { + lockService.releaseLock(lock); + listener.onFailure(e); + } + }, exception -> { listener.onFailure(exception); })); + } + + /** + * This method takes lock as a parameter and is responsible for releasing lock + * unless exception is thrown + */ + @VisibleForTesting + protected void internalDoExecute( + final PutDatasourceRequest request, + final LockModel lock, + final ActionListener listener + ) { + StepListener createIndexStep = new StepListener<>(); + datasourceDao.createIndexIfNotExists(createIndexStep); + createIndexStep.whenComplete(v -> { + Datasource datasource = Datasource.Builder.build(request); + datasourceDao.putDatasource(datasource, getIndexResponseListener(datasource, lock, listener)); + }, exception -> { + lockService.releaseLock(lock); + listener.onFailure(exception); + }); + } + + /** + * This method takes lock as a parameter and is responsible for releasing lock + * unless exception is thrown + */ + @VisibleForTesting + protected ActionListener getIndexResponseListener( + final Datasource datasource, + final LockModel lock, + final ActionListener listener + ) { + return new ActionListener<>() { + @Override + public void onResponse(final IndexResponse indexResponse) { + // This is user initiated request. Therefore, we want to handle the first datasource update task in a generic thread + // pool. + threadPool.generic().submit(() -> { + AtomicReference lockReference = new AtomicReference<>(lock); + try { + createDatasource(datasource, lockService.getRenewLockRunnable(lockReference)); + } finally { + lockService.releaseLock(lockReference.get()); + } + }); + listener.onResponse(new AcknowledgedResponse(true)); + } + + @Override + public void onFailure(final Exception e) { + lockService.releaseLock(lock); + if (e instanceof VersionConflictEngineException) { + listener.onFailure(new ResourceAlreadyExistsException("datasource [{}] already exists", datasource.getName())); + } else { + listener.onFailure(e); + } + } + }; + } + + @VisibleForTesting + protected void createDatasource(final Datasource datasource, final Runnable renewLock) { + if (DatasourceState.CREATING.equals(datasource.getState()) == false) { + log.error("Invalid datasource state. Expecting {} but received {}", DatasourceState.CREATING, datasource.getState()); + markDatasourceAsCreateFailed(datasource); + return; + } + + try { + datasourceUpdateService.updateOrCreateGeoIpData(datasource, renewLock); + } catch (Exception e) { + log.error("Failed to create datasource for {}", datasource.getName(), e); + markDatasourceAsCreateFailed(datasource); + } + } + + private void markDatasourceAsCreateFailed(final Datasource datasource) { + datasource.getUpdateStats().setLastFailedAt(Instant.now()); + datasource.setState(DatasourceState.CREATE_FAILED); + try { + datasourceDao.updateDatasource(datasource); + } catch (Exception e) { + log.error("Failed to mark datasource state as CREATE_FAILED for {}", datasource.getName(), e); + } + } +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/action/RestDeleteDatasourceHandler.java b/src/main/java/org/opensearch/geospatial/ip2geo/action/RestDeleteDatasourceHandler.java new file mode 100644 index 00000000..dc2dd117 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/action/RestDeleteDatasourceHandler.java @@ -0,0 +1,49 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.action; + +import static org.opensearch.geospatial.shared.URLBuilder.URL_DELIMITER; +import static org.opensearch.geospatial.shared.URLBuilder.getPluginURLPrefix; +import static org.opensearch.rest.RestRequest.Method.DELETE; + +import java.util.List; +import java.util.Locale; + +import org.opensearch.client.node.NodeClient; +import org.opensearch.rest.BaseRestHandler; +import org.opensearch.rest.RestRequest; +import org.opensearch.rest.action.RestToXContentListener; + +/** + * Rest handler for Ip2Geo datasource delete request + */ +public class RestDeleteDatasourceHandler extends BaseRestHandler { + private static final String ACTION_NAME = "ip2geo_datasource_delete"; + private static final String PARAMS_NAME = "name"; + + @Override + public String getName() { + return ACTION_NAME; + } + + @Override + protected RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) { + final String name = request.param(PARAMS_NAME); + final DeleteDatasourceRequest deleteDatasourceRequest = new DeleteDatasourceRequest(name); + + return channel -> client.executeLocally( + DeleteDatasourceAction.INSTANCE, + deleteDatasourceRequest, + new RestToXContentListener<>(channel) + ); + } + + @Override + public List routes() { + String path = String.join(URL_DELIMITER, getPluginURLPrefix(), String.format(Locale.ROOT, "ip2geo/datasource/{%s}", PARAMS_NAME)); + return List.of(new Route(DELETE, path)); + } +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/action/RestGetDatasourceHandler.java b/src/main/java/org/opensearch/geospatial/ip2geo/action/RestGetDatasourceHandler.java new file mode 100644 index 00000000..0a51660d --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/action/RestGetDatasourceHandler.java @@ -0,0 +1,46 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.action; + +import static org.opensearch.geospatial.shared.URLBuilder.URL_DELIMITER; +import static org.opensearch.geospatial.shared.URLBuilder.getPluginURLPrefix; +import static org.opensearch.rest.RestRequest.Method.GET; + +import java.util.List; + +import org.opensearch.client.node.NodeClient; +import org.opensearch.core.common.Strings; +import org.opensearch.rest.BaseRestHandler; +import org.opensearch.rest.RestRequest; +import org.opensearch.rest.action.RestToXContentListener; + +/** + * Rest handler for Ip2Geo datasource get request + */ +public class RestGetDatasourceHandler extends BaseRestHandler { + private static final String ACTION_NAME = "ip2geo_datasource_get"; + + @Override + public String getName() { + return ACTION_NAME; + } + + @Override + protected RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) { + final String[] names = request.paramAsStringArray("name", Strings.EMPTY_ARRAY); + final GetDatasourceRequest getDatasourceRequest = new GetDatasourceRequest(names); + + return channel -> client.executeLocally(GetDatasourceAction.INSTANCE, getDatasourceRequest, new RestToXContentListener<>(channel)); + } + + @Override + public List routes() { + return List.of( + new Route(GET, String.join(URL_DELIMITER, getPluginURLPrefix(), "ip2geo/datasource")), + new Route(GET, String.join(URL_DELIMITER, getPluginURLPrefix(), "ip2geo/datasource/{name}")) + ); + } +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/action/RestPutDatasourceHandler.java b/src/main/java/org/opensearch/geospatial/ip2geo/action/RestPutDatasourceHandler.java new file mode 100644 index 00000000..8645d46b --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/action/RestPutDatasourceHandler.java @@ -0,0 +1,79 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.action; + +import static org.opensearch.geospatial.shared.URLBuilder.URL_DELIMITER; +import static org.opensearch.geospatial.shared.URLBuilder.getPluginURLPrefix; +import static org.opensearch.rest.RestRequest.Method.PUT; + +import java.io.IOException; +import java.util.List; + +import org.opensearch.client.node.NodeClient; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.geospatial.ip2geo.common.Ip2GeoSettings; +import org.opensearch.geospatial.ip2geo.common.URLDenyListChecker; +import org.opensearch.rest.BaseRestHandler; +import org.opensearch.rest.RestRequest; +import org.opensearch.rest.action.RestToXContentListener; + +/** + * Rest handler for Ip2Geo datasource creation + * + * This handler handles a request of + * PUT /_plugins/geospatial/ip2geo/datasource/{id} + * { + * "endpoint": {endpoint}, + * "update_interval_in_days": 3 + * } + * + * When request is received, it will create a datasource by downloading GeoIp data from the endpoint. + * After the creation of datasource is completed, it will schedule the next update task after update_interval_in_days. + * + */ +public class RestPutDatasourceHandler extends BaseRestHandler { + private static final String ACTION_NAME = "ip2geo_datasource_put"; + private final ClusterSettings clusterSettings; + private final URLDenyListChecker urlDenyListChecker; + + public RestPutDatasourceHandler(final ClusterSettings clusterSettings, final URLDenyListChecker urlDenyListChecker) { + this.clusterSettings = clusterSettings; + this.urlDenyListChecker = urlDenyListChecker; + } + + @Override + public String getName() { + return ACTION_NAME; + } + + @Override + protected RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { + final PutDatasourceRequest putDatasourceRequest = new PutDatasourceRequest(request.param("name")); + if (request.hasContentOrSourceParam()) { + try (XContentParser parser = request.contentOrSourceParamParser()) { + PutDatasourceRequest.PARSER.parse(parser, putDatasourceRequest, null); + } + } + if (putDatasourceRequest.getEndpoint() == null) { + putDatasourceRequest.setEndpoint(clusterSettings.get(Ip2GeoSettings.DATASOURCE_ENDPOINT)); + } + if (putDatasourceRequest.getUpdateInterval() == null) { + putDatasourceRequest.setUpdateInterval(TimeValue.timeValueDays(clusterSettings.get(Ip2GeoSettings.DATASOURCE_UPDATE_INTERVAL))); + } + + // Call to validate if URL is in a deny-list or not. + urlDenyListChecker.toUrlIfNotInDenyList(putDatasourceRequest.getEndpoint()); + return channel -> client.executeLocally(PutDatasourceAction.INSTANCE, putDatasourceRequest, new RestToXContentListener<>(channel)); + } + + @Override + public List routes() { + String path = String.join(URL_DELIMITER, getPluginURLPrefix(), "ip2geo/datasource/{name}"); + return List.of(new Route(PUT, path)); + } +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/action/RestUpdateDatasourceHandler.java b/src/main/java/org/opensearch/geospatial/ip2geo/action/RestUpdateDatasourceHandler.java new file mode 100644 index 00000000..8ecc2cfc --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/action/RestUpdateDatasourceHandler.java @@ -0,0 +1,63 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.action; + +import static org.opensearch.geospatial.shared.URLBuilder.URL_DELIMITER; +import static org.opensearch.geospatial.shared.URLBuilder.getPluginURLPrefix; +import static org.opensearch.rest.RestRequest.Method.PUT; + +import java.io.IOException; +import java.util.List; + +import org.opensearch.client.node.NodeClient; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.geospatial.ip2geo.common.URLDenyListChecker; +import org.opensearch.rest.BaseRestHandler; +import org.opensearch.rest.RestRequest; +import org.opensearch.rest.action.RestToXContentListener; + +/** + * Rest handler for Ip2Geo datasource update request + */ +public class RestUpdateDatasourceHandler extends BaseRestHandler { + private static final String ACTION_NAME = "ip2geo_datasource_update"; + + private final URLDenyListChecker urlDenyListChecker; + + public RestUpdateDatasourceHandler(final URLDenyListChecker urlDenyListChecker) { + this.urlDenyListChecker = urlDenyListChecker; + } + + @Override + public String getName() { + return ACTION_NAME; + } + + @Override + protected RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { + final UpdateDatasourceRequest updateDatasourceRequest = new UpdateDatasourceRequest(request.param("name")); + if (request.hasContentOrSourceParam()) { + try (XContentParser parser = request.contentOrSourceParamParser()) { + UpdateDatasourceRequest.PARSER.parse(parser, updateDatasourceRequest, null); + } + } + if (updateDatasourceRequest.getEndpoint() != null) { + // Call to validate if URL is in a deny-list or not. + urlDenyListChecker.toUrlIfNotInDenyList(updateDatasourceRequest.getEndpoint()); + } + return channel -> client.executeLocally( + UpdateDatasourceAction.INSTANCE, + updateDatasourceRequest, + new RestToXContentListener<>(channel) + ); + } + + @Override + public List routes() { + String path = String.join(URL_DELIMITER, getPluginURLPrefix(), "ip2geo/datasource/{name}/_settings"); + return List.of(new Route(PUT, path)); + } +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/action/UpdateDatasourceAction.java b/src/main/java/org/opensearch/geospatial/ip2geo/action/UpdateDatasourceAction.java new file mode 100644 index 00000000..96cd00df --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/action/UpdateDatasourceAction.java @@ -0,0 +1,27 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.action; + +import org.opensearch.action.ActionType; +import org.opensearch.action.support.master.AcknowledgedResponse; + +/** + * Ip2Geo datasource update action + */ +public class UpdateDatasourceAction extends ActionType { + /** + * Update datasource action instance + */ + public static final UpdateDatasourceAction INSTANCE = new UpdateDatasourceAction(); + /** + * Update datasource action name + */ + public static final String NAME = "cluster:admin/geospatial/datasource/update"; + + private UpdateDatasourceAction() { + super(NAME, AcknowledgedResponse::new); + } +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/action/UpdateDatasourceRequest.java b/src/main/java/org/opensearch/geospatial/ip2geo/action/UpdateDatasourceRequest.java new file mode 100644 index 00000000..64d3deb0 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/action/UpdateDatasourceRequest.java @@ -0,0 +1,174 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.action; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URISyntaxException; +import java.net.URL; +import java.util.Locale; + +import org.opensearch.action.ActionRequest; +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.core.ParseField; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.xcontent.ObjectParser; +import org.opensearch.geospatial.ip2geo.common.DatasourceManifest; +import org.opensearch.geospatial.ip2geo.common.ParameterValidator; + +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.Setter; +import lombok.extern.log4j.Log4j2; + +/** + * Ip2Geo datasource update request + */ +@Getter +@Setter +@Log4j2 +@EqualsAndHashCode(callSuper = false) +public class UpdateDatasourceRequest extends ActionRequest { + public static final ParseField ENDPOINT_FIELD = new ParseField("endpoint"); + public static final ParseField UPDATE_INTERVAL_IN_DAYS_FIELD = new ParseField("update_interval_in_days"); + private static final int MAX_DATASOURCE_NAME_BYTES = 255; + private static final ParameterValidator VALIDATOR = new ParameterValidator(); + + /** + * @param name the datasource name + * @return the datasource name + */ + private String name; + /** + * @param endpoint url to a manifest file for a datasource + * @return url to a manifest file for a datasource + */ + private String endpoint; + /** + * @param updateInterval update interval of a datasource + * @return update interval of a datasource + */ + private TimeValue updateInterval; + + /** + * Parser of a datasource + */ + public static final ObjectParser PARSER; + static { + PARSER = new ObjectParser<>("update_datasource"); + PARSER.declareString((request, val) -> request.setEndpoint(val), ENDPOINT_FIELD); + PARSER.declareLong((request, val) -> request.setUpdateInterval(TimeValue.timeValueDays(val)), UPDATE_INTERVAL_IN_DAYS_FIELD); + } + + /** + * Constructor + * @param name name of a datasource + */ + public UpdateDatasourceRequest(final String name) { + this.name = name; + } + + /** + * Constructor + * @param in the stream input + * @throws IOException IOException + */ + public UpdateDatasourceRequest(final StreamInput in) throws IOException { + super(in); + this.name = in.readString(); + this.endpoint = in.readOptionalString(); + this.updateInterval = in.readOptionalTimeValue(); + } + + @Override + public void writeTo(final StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(name); + out.writeOptionalString(endpoint); + out.writeOptionalTimeValue(updateInterval); + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException errors = new ActionRequestValidationException(); + if (VALIDATOR.validateDatasourceName(name).isEmpty() == false) { + errors.addValidationError("no such datasource exist"); + } + if (endpoint == null && updateInterval == null) { + errors.addValidationError("no values to update"); + } + + validateEndpoint(errors); + validateUpdateInterval(errors); + + return errors.validationErrors().isEmpty() ? null : errors; + } + + /** + * Conduct following validation on endpoint + * 1. endpoint format complies with RFC-2396 + * 2. validate manifest file from the endpoint + * + * @param errors the errors to add error messages + */ + private void validateEndpoint(final ActionRequestValidationException errors) { + if (endpoint == null) { + return; + } + + try { + URL url = new URL(endpoint); + url.toURI(); // Validate URL complies with RFC-2396 + validateManifestFile(url, errors); + } catch (MalformedURLException | URISyntaxException e) { + log.info("Invalid URL[{}] is provided", endpoint, e); + errors.addValidationError("Invalid URL format is provided"); + } + } + + /** + * Conduct following validation on url + * 1. can read manifest file from the endpoint + * 2. the url in the manifest file complies with RFC-2396 + * + * @param url the url to validate + * @param errors the errors to add error messages + */ + private void validateManifestFile(final URL url, final ActionRequestValidationException errors) { + DatasourceManifest manifest; + try { + manifest = DatasourceManifest.Builder.build(url); + } catch (Exception e) { + log.info("Error occurred while reading a file from {}", url, e); + errors.addValidationError(String.format(Locale.ROOT, "Error occurred while reading a file from %s: %s", url, e.getMessage())); + return; + } + + try { + new URL(manifest.getUrl()).toURI(); // Validate URL complies with RFC-2396 + } catch (MalformedURLException | URISyntaxException e) { + log.info("Invalid URL[{}] is provided for url field in the manifest file", manifest.getUrl(), e); + errors.addValidationError("Invalid URL format is provided for url field in the manifest file"); + } + } + + /** + * Validate updateInterval is equal or larger than 1 + * + * @param errors the errors to add error messages + */ + private void validateUpdateInterval(final ActionRequestValidationException errors) { + if (updateInterval == null) { + return; + } + + if (updateInterval.compareTo(TimeValue.timeValueDays(1)) < 0) { + errors.addValidationError("Update interval should be equal to or larger than 1 day"); + } + } +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/action/UpdateDatasourceTransportAction.java b/src/main/java/org/opensearch/geospatial/ip2geo/action/UpdateDatasourceTransportAction.java new file mode 100644 index 00000000..f60df625 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/action/UpdateDatasourceTransportAction.java @@ -0,0 +1,222 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.action; + +import java.io.IOException; +import java.net.URL; +import java.security.InvalidParameterException; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.List; +import java.util.Locale; + +import org.opensearch.ResourceNotFoundException; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.HandledTransportAction; +import org.opensearch.action.support.master.AcknowledgedResponse; +import org.opensearch.common.inject.Inject; +import org.opensearch.core.action.ActionListener; +import org.opensearch.geospatial.exceptions.ConcurrentModificationException; +import org.opensearch.geospatial.exceptions.IncompatibleDatasourceException; +import org.opensearch.geospatial.ip2geo.common.DatasourceManifest; +import org.opensearch.geospatial.ip2geo.common.DatasourceState; +import org.opensearch.geospatial.ip2geo.common.Ip2GeoLockService; +import org.opensearch.geospatial.ip2geo.dao.DatasourceDao; +import org.opensearch.geospatial.ip2geo.jobscheduler.Datasource; +import org.opensearch.geospatial.ip2geo.jobscheduler.DatasourceTask; +import org.opensearch.geospatial.ip2geo.jobscheduler.DatasourceUpdateService; +import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; +import org.opensearch.tasks.Task; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.TransportService; + +import lombok.extern.log4j.Log4j2; + +/** + * Transport action to update datasource + */ +@Log4j2 +public class UpdateDatasourceTransportAction extends HandledTransportAction { + private static final long LOCK_DURATION_IN_SECONDS = 300l; + private final Ip2GeoLockService lockService; + private final DatasourceDao datasourceDao; + private final DatasourceUpdateService datasourceUpdateService; + private final ThreadPool threadPool; + + /** + * Constructor + * + * @param transportService the transport service + * @param actionFilters the action filters + * @param lockService the lock service + * @param datasourceDao the datasource facade + * @param datasourceUpdateService the datasource update service + */ + @Inject + public UpdateDatasourceTransportAction( + final TransportService transportService, + final ActionFilters actionFilters, + final Ip2GeoLockService lockService, + final DatasourceDao datasourceDao, + final DatasourceUpdateService datasourceUpdateService, + final ThreadPool threadPool + ) { + super(UpdateDatasourceAction.NAME, transportService, actionFilters, UpdateDatasourceRequest::new); + this.lockService = lockService; + this.datasourceUpdateService = datasourceUpdateService; + this.datasourceDao = datasourceDao; + this.threadPool = threadPool; + } + + /** + * Get a lock and update datasource + * + * @param task the task + * @param request the request + * @param listener the listener + */ + @Override + protected void doExecute(final Task task, final UpdateDatasourceRequest request, final ActionListener listener) { + lockService.acquireLock(request.getName(), LOCK_DURATION_IN_SECONDS, ActionListener.wrap(lock -> { + if (lock == null) { + listener.onFailure( + new ConcurrentModificationException("another processor is holding a lock on the resource. Try again later") + ); + return; + } + try { + // TODO: makes every sub-methods as async call to avoid using a thread in generic pool + threadPool.generic().submit(() -> { + try { + Datasource datasource = datasourceDao.getDatasource(request.getName()); + if (datasource == null) { + throw new ResourceNotFoundException("no such datasource exist"); + } + if (DatasourceState.AVAILABLE.equals(datasource.getState()) == false) { + throw new IllegalArgumentException( + String.format(Locale.ROOT, "data source is not in an [%s] state", DatasourceState.AVAILABLE) + ); + } + validate(request, datasource); + updateIfChanged(request, datasource); + lockService.releaseLock(lock); + listener.onResponse(new AcknowledgedResponse(true)); + } catch (Exception e) { + lockService.releaseLock(lock); + listener.onFailure(e); + } + }); + } catch (Exception e) { + lockService.releaseLock(lock); + listener.onFailure(e); + } + }, exception -> listener.onFailure(exception))); + } + + private void updateIfChanged(final UpdateDatasourceRequest request, final Datasource datasource) { + boolean isChanged = false; + if (isEndpointChanged(request, datasource)) { + datasource.setEndpoint(request.getEndpoint()); + isChanged = true; + } + if (isUpdateIntervalChanged(request)) { + datasource.setUserSchedule(new IntervalSchedule(Instant.now(), (int) request.getUpdateInterval().getDays(), ChronoUnit.DAYS)); + datasource.setSystemSchedule(datasource.getUserSchedule()); + datasource.setTask(DatasourceTask.ALL); + isChanged = true; + } + + if (isChanged) { + datasourceDao.updateDatasource(datasource); + } + } + + /** + * Additional validation based on an existing datasource + * + * Basic validation is done in UpdateDatasourceRequest#validate + * In this method we do additional validation based on an existing datasource + * + * 1. Check the compatibility of new fields and old fields + * 2. Check the updateInterval is less than validForInDays in datasource + * + * This method throws exception if one of validation fails. + * + * @param request the update request + * @param datasource the existing datasource + * @throws IOException the exception + */ + private void validate(final UpdateDatasourceRequest request, final Datasource datasource) throws IOException { + validateFieldsCompatibility(request, datasource); + validateUpdateIntervalIsLessThanValidForInDays(request, datasource); + validateNextUpdateScheduleIsBeforeExpirationDay(request, datasource); + } + + private void validateNextUpdateScheduleIsBeforeExpirationDay(final UpdateDatasourceRequest request, final Datasource datasource) { + if (request.getUpdateInterval() == null) { + return; + } + + IntervalSchedule newSchedule = new IntervalSchedule(Instant.now(), (int) request.getUpdateInterval().getDays(), ChronoUnit.DAYS); + + if (newSchedule.getNextExecutionTime(Instant.now()).isAfter(datasource.expirationDay())) { + throw new IllegalArgumentException( + String.format(Locale.ROOT, "datasource will expire at %s with the update interval", datasource.expirationDay().toString()) + ); + } + } + + private void validateFieldsCompatibility(final UpdateDatasourceRequest request, final Datasource datasource) throws IOException { + if (isEndpointChanged(request, datasource) == false) { + return; + } + + List fields = datasourceUpdateService.getHeaderFields(request.getEndpoint()); + if (datasource.isCompatible(fields) == false) { + throw new IncompatibleDatasourceException( + "new fields [{}] does not contain all old fields [{}]", + fields.toString(), + datasource.getDatabase().getFields().toString() + ); + } + } + + private void validateUpdateIntervalIsLessThanValidForInDays(final UpdateDatasourceRequest request, final Datasource datasource) + throws IOException { + if (isEndpointChanged(request, datasource) == false && isUpdateIntervalChanged(request) == false) { + return; + } + + long validForInDays = isEndpointChanged(request, datasource) + ? DatasourceManifest.Builder.build(new URL(request.getEndpoint())).getValidForInDays() + : datasource.getDatabase().getValidForInDays(); + + long updateInterval = isUpdateIntervalChanged(request) + ? request.getUpdateInterval().days() + : datasource.getUserSchedule().getInterval(); + + if (updateInterval >= validForInDays) { + throw new InvalidParameterException( + String.format(Locale.ROOT, "updateInterval %d should be smaller than %d", updateInterval, validForInDays) + ); + } + } + + private boolean isEndpointChanged(final UpdateDatasourceRequest request, final Datasource datasource) { + return request.getEndpoint() != null && request.getEndpoint().equals(datasource.getEndpoint()) == false; + } + + /** + * Update interval is changed as long as user provide one because + * start time will get updated even if the update interval is same as current one. + * + * @param request the update datasource request + * @return true if update interval is changed, and false otherwise + */ + private boolean isUpdateIntervalChanged(final UpdateDatasourceRequest request) { + return request.getUpdateInterval() != null; + } +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/common/DatasourceManifest.java b/src/main/java/org/opensearch/geospatial/ip2geo/common/DatasourceManifest.java new file mode 100644 index 00000000..541906c1 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/common/DatasourceManifest.java @@ -0,0 +1,147 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.common; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.net.URL; +import java.net.URLConnection; +import java.nio.CharBuffer; +import java.security.AccessController; +import java.security.PrivilegedAction; + +import org.opensearch.SpecialPermission; +import org.opensearch.common.SuppressForbidden; +import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.ParseField; +import org.opensearch.core.xcontent.ConstructingObjectParser; +import org.opensearch.core.xcontent.DeprecationHandler; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.geospatial.annotation.VisibleForTesting; +import org.opensearch.geospatial.shared.Constants; + +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.Setter; + +/** + * Ip2Geo datasource manifest file object + * + * Manifest file is stored in an external endpoint. OpenSearch read the file and store values it in this object. + */ +@Setter +@Getter +@AllArgsConstructor +public class DatasourceManifest { + private static final ParseField URL_FIELD = new ParseField("url"); + private static final ParseField DB_NAME_FIELD = new ParseField("db_name"); + private static final ParseField SHA256_HASH_FIELD = new ParseField("sha256_hash"); + private static final ParseField VALID_FOR_IN_DAYS_FIELD = new ParseField("valid_for_in_days"); + private static final ParseField UPDATED_AT_FIELD = new ParseField("updated_at_in_epoch_milli"); + private static final ParseField PROVIDER_FIELD = new ParseField("provider"); + + /** + * @param url URL of a ZIP file containing a database + * @return URL of a ZIP file containing a database + */ + private String url; + /** + * @param dbName A database file name inside the ZIP file + * @return A database file name inside the ZIP file + */ + private String dbName; + /** + * @param sha256Hash SHA256 hash value of a database file + * @return SHA256 hash value of a database file + */ + private String sha256Hash; + /** + * @param validForInDays A duration in which the database file is valid to use + * @return A duration in which the database file is valid to use + */ + private Long validForInDays; + /** + * @param updatedAt A date when the database was updated + * @return A date when the database was updated + */ + private Long updatedAt; + /** + * @param provider A database provider name + * @return A database provider name + */ + private String provider; + + /** + * Ddatasource manifest parser + */ + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "datasource_manifest", + true, + args -> { + String url = (String) args[0]; + String dbName = (String) args[1]; + String sha256Hash = (String) args[2]; + Long validForInDays = (Long) args[3]; + Long updatedAt = (Long) args[4]; + String provider = (String) args[5]; + return new DatasourceManifest(url, dbName, sha256Hash, validForInDays, updatedAt, provider); + } + ); + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), URL_FIELD); + PARSER.declareString(ConstructingObjectParser.constructorArg(), DB_NAME_FIELD); + PARSER.declareString(ConstructingObjectParser.constructorArg(), SHA256_HASH_FIELD); + PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), VALID_FOR_IN_DAYS_FIELD); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), UPDATED_AT_FIELD); + PARSER.declareString(ConstructingObjectParser.constructorArg(), PROVIDER_FIELD); + } + + /** + * Datasource manifest builder + */ + public static class Builder { + private static final int MANIFEST_FILE_MAX_BYTES = 1024 * 8; + + /** + * Build DatasourceManifest from a given url + * + * @param url url to downloads a manifest file + * @return DatasourceManifest representing the manifest file + */ + @SuppressForbidden(reason = "Need to connect to http endpoint to read manifest file") + public static DatasourceManifest build(final URL url) { + SpecialPermission.check(); + return AccessController.doPrivileged((PrivilegedAction) () -> { + try { + URLConnection connection = url.openConnection(); + return internalBuild(connection); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + } + + @VisibleForTesting + @SuppressForbidden(reason = "Need to connect to http endpoint to read manifest file") + protected static DatasourceManifest internalBuild(final URLConnection connection) throws IOException { + connection.addRequestProperty(Constants.USER_AGENT_KEY, Constants.USER_AGENT_VALUE); + InputStreamReader inputStreamReader = new InputStreamReader(connection.getInputStream()); + try (BufferedReader reader = new BufferedReader(inputStreamReader)) { + CharBuffer charBuffer = CharBuffer.allocate(MANIFEST_FILE_MAX_BYTES); + reader.read(charBuffer); + charBuffer.flip(); + XContentParser parser = JsonXContent.jsonXContent.createParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.IGNORE_DEPRECATIONS, + charBuffer.toString() + ); + return PARSER.parse(parser, null); + } + } + } +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/common/DatasourceState.java b/src/main/java/org/opensearch/geospatial/ip2geo/common/DatasourceState.java new file mode 100644 index 00000000..3fbb064c --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/common/DatasourceState.java @@ -0,0 +1,37 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.common; + +/** + * Ip2Geo datasource state + * + * When data source is created, it starts with CREATING state. Once the first GeoIP data is generated, the state changes to AVAILABLE. + * Only when the first GeoIP data generation failed, the state changes to CREATE_FAILED. + * Subsequent GeoIP data failure won't change data source state from AVAILABLE to CREATE_FAILED. + * When delete request is received, the data source state changes to DELETING. + * + * State changed from left to right for the entire lifecycle of a datasource + * (CREATING) to (CREATE_FAILED or AVAILABLE) to (DELETING) + * + */ +public enum DatasourceState { + /** + * Data source is being created + */ + CREATING, + /** + * Data source is ready to be used + */ + AVAILABLE, + /** + * Data source creation failed + */ + CREATE_FAILED, + /** + * Data source is being deleted + */ + DELETING +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/common/Ip2GeoExecutor.java b/src/main/java/org/opensearch/geospatial/ip2geo/common/Ip2GeoExecutor.java new file mode 100644 index 00000000..f6230e04 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/common/Ip2GeoExecutor.java @@ -0,0 +1,45 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.common; + +import java.util.concurrent.ExecutorService; + +import org.opensearch.common.settings.Settings; +import org.opensearch.threadpool.ExecutorBuilder; +import org.opensearch.threadpool.FixedExecutorBuilder; +import org.opensearch.threadpool.ThreadPool; + +/** + * Provide a list of static methods related with executors for Ip2Geo + */ +public class Ip2GeoExecutor { + private static final String THREAD_POOL_NAME = "_plugin_geospatial_ip2geo_datasource_update"; + private final ThreadPool threadPool; + + public Ip2GeoExecutor(final ThreadPool threadPool) { + this.threadPool = threadPool; + } + + /** + * We use fixed thread count of 1 for updating datasource as updating datasource is running background + * once a day at most and no need to expedite the task. + * + * @param settings the settings + * @return the executor builder + */ + public static ExecutorBuilder executorBuilder(final Settings settings) { + return new FixedExecutorBuilder(settings, THREAD_POOL_NAME, 1, 1000, THREAD_POOL_NAME, false); + } + + /** + * Return an executor service for datasource update task + * + * @return the executor service + */ + public ExecutorService forDatasourceUpdate() { + return threadPool.executor(THREAD_POOL_NAME); + } +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/common/Ip2GeoLockService.java b/src/main/java/org/opensearch/geospatial/ip2geo/common/Ip2GeoLockService.java new file mode 100644 index 00000000..89174f95 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/common/Ip2GeoLockService.java @@ -0,0 +1,157 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.common; + +import static org.opensearch.geospatial.ip2geo.jobscheduler.DatasourceExtension.JOB_INDEX_NAME; + +import java.time.Instant; +import java.util.Optional; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; + +import org.opensearch.OpenSearchException; +import org.opensearch.client.Client; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.core.action.ActionListener; +import org.opensearch.jobscheduler.spi.LockModel; +import org.opensearch.jobscheduler.spi.utils.LockService; + +import lombok.extern.log4j.Log4j2; + +/** + * A wrapper of job scheduler's lock service for datasource + */ +@Log4j2 +public class Ip2GeoLockService { + public static final long LOCK_DURATION_IN_SECONDS = 300l; + public static final long RENEW_AFTER_IN_SECONDS = 120l; + private final ClusterService clusterService; + private final LockService lockService; + + /** + * Constructor + * + * @param clusterService the cluster service + * @param client the client + */ + public Ip2GeoLockService(final ClusterService clusterService, final Client client) { + this.clusterService = clusterService; + this.lockService = new LockService(client, clusterService); + } + + /** + * Wrapper method of LockService#acquireLockWithId + * + * Datasource use its name as doc id in job scheduler. Therefore, we can use datasource name to acquire + * a lock on a datasource. + * + * @param datasourceName datasourceName to acquire lock on + * @param lockDurationSeconds the lock duration in seconds + * @param listener the listener + */ + public void acquireLock(final String datasourceName, final Long lockDurationSeconds, final ActionListener listener) { + lockService.acquireLockWithId(JOB_INDEX_NAME, lockDurationSeconds, datasourceName, listener); + } + + /** + * Synchronous method of #acquireLock + * + * @param datasourceName datasourceName to acquire lock on + * @param lockDurationSeconds the lock duration in seconds + * @return lock model + */ + public Optional acquireLock(final String datasourceName, final Long lockDurationSeconds) { + AtomicReference lockReference = new AtomicReference(); + CountDownLatch countDownLatch = new CountDownLatch(1); + lockService.acquireLockWithId(JOB_INDEX_NAME, lockDurationSeconds, datasourceName, new ActionListener<>() { + @Override + public void onResponse(final LockModel lockModel) { + lockReference.set(lockModel); + countDownLatch.countDown(); + } + + @Override + public void onFailure(final Exception e) { + lockReference.set(null); + countDownLatch.countDown(); + } + }); + + try { + countDownLatch.await(clusterService.getClusterSettings().get(Ip2GeoSettings.TIMEOUT).getSeconds(), TimeUnit.SECONDS); + return Optional.ofNullable(lockReference.get()); + } catch (InterruptedException e) { + return Optional.empty(); + } + } + + /** + * Wrapper method of LockService#release + * + * @param lockModel the lock model + */ + public void releaseLock(final LockModel lockModel) { + lockService.release( + lockModel, + ActionListener.wrap(released -> {}, exception -> log.error("Failed to release the lock", exception)) + ); + } + + /** + * Synchronous method of LockService#renewLock + * + * @param lockModel lock to renew + * @return renewed lock if renew succeed and null otherwise + */ + public LockModel renewLock(final LockModel lockModel) { + AtomicReference lockReference = new AtomicReference(); + CountDownLatch countDownLatch = new CountDownLatch(1); + lockService.renewLock(lockModel, new ActionListener<>() { + @Override + public void onResponse(final LockModel lockModel) { + lockReference.set(lockModel); + countDownLatch.countDown(); + } + + @Override + public void onFailure(final Exception e) { + lockReference.set(null); + countDownLatch.countDown(); + } + }); + + try { + countDownLatch.await(clusterService.getClusterSettings().get(Ip2GeoSettings.TIMEOUT).getSeconds(), TimeUnit.SECONDS); + return lockReference.get(); + } catch (InterruptedException e) { + return null; + } + } + + /** + * Return a runnable which can renew the given lock model + * + * The runnable renews the lock and store the renewed lock in the AtomicReference. + * It only renews the lock when it passed {@code RENEW_AFTER_IN_SECONDS} since + * the last time the lock was renewed to avoid resource abuse. + * + * @param lockModel lock model to renew + * @return runnable which can renew the given lock for every call + */ + public Runnable getRenewLockRunnable(final AtomicReference lockModel) { + return () -> { + LockModel preLock = lockModel.get(); + if (Instant.now().isBefore(preLock.getLockTime().plusSeconds(RENEW_AFTER_IN_SECONDS))) { + return; + } + lockModel.set(renewLock(lockModel.get())); + if (lockModel.get() == null) { + new OpenSearchException("failed to renew a lock [{}]", preLock); + } + }; + } +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/common/Ip2GeoSettings.java b/src/main/java/org/opensearch/geospatial/ip2geo/common/Ip2GeoSettings.java new file mode 100644 index 00000000..142055ee --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/common/Ip2GeoSettings.java @@ -0,0 +1,134 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.common; + +import java.net.MalformedURLException; +import java.net.URISyntaxException; +import java.net.URL; +import java.util.Arrays; +import java.util.List; +import java.util.function.Function; + +import org.opensearch.common.settings.Setting; +import org.opensearch.common.unit.TimeValue; + +/** + * Settings for Ip2Geo datasource operations + */ +public class Ip2GeoSettings { + + /** + * Default endpoint to be used in GeoIP datasource creation API + */ + public static final Setting DATASOURCE_ENDPOINT = Setting.simpleString( + "plugins.geospatial.ip2geo.datasource.endpoint", + "https://geoip.maps.opensearch.org/v1/geolite2-city/manifest.json", + new DatasourceEndpointValidator(), + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + /** + * Default update interval to be used in Ip2Geo datasource creation API + */ + public static final Setting DATASOURCE_UPDATE_INTERVAL = Setting.longSetting( + "plugins.geospatial.ip2geo.datasource.update_interval_in_days", + 3l, + 1l, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + /** + * Bulk size for indexing GeoIP data + */ + public static final Setting BATCH_SIZE = Setting.intSetting( + "plugins.geospatial.ip2geo.datasource.batch_size", + 10000, + 1, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + /** + * Timeout value for Ip2Geo processor + */ + public static final Setting TIMEOUT = Setting.timeSetting( + "plugins.geospatial.ip2geo.timeout", + TimeValue.timeValueSeconds(30), + TimeValue.timeValueSeconds(1), + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + /** + * Max size for geo data cache + */ + public static final Setting CACHE_SIZE = Setting.longSetting( + "plugins.geospatial.ip2geo.processor.cache_size", + 1000, + 0, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + /** + * A list of CIDR which will be blocked to be used as datasource endpoint + * Private network addresses will be blocked as default + */ + public static final Setting> DATASOURCE_ENDPOINT_DENYLIST = Setting.listSetting( + "plugins.geospatial.ip2geo.datasource.endpoint.denylist", + Arrays.asList( + "127.0.0.0/8", + "169.254.0.0/16", + "10.0.0.0/8", + "172.16.0.0/12", + "192.168.0.0/16", + "0.0.0.0/8", + "100.64.0.0/10", + "192.0.0.0/24", + "192.0.2.0/24", + "198.18.0.0/15", + "192.88.99.0/24", + "198.51.100.0/24", + "203.0.113.0/24", + "224.0.0.0/4", + "240.0.0.0/4", + "255.255.255.255/32", + "::1/128", + "fe80::/10", + "fc00::/7", + "::/128", + "2001:db8::/32", + "ff00::/8" + ), + Function.identity(), + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + /** + * Return all settings of Ip2Geo feature + * @return a list of all settings for Ip2Geo feature + */ + public static final List> settings() { + return List.of(DATASOURCE_ENDPOINT, DATASOURCE_UPDATE_INTERVAL, BATCH_SIZE, TIMEOUT, CACHE_SIZE, DATASOURCE_ENDPOINT_DENYLIST); + } + + /** + * Visible for testing + */ + protected static class DatasourceEndpointValidator implements Setting.Validator { + @Override + public void validate(final String value) { + try { + new URL(value).toURI(); + } catch (MalformedURLException | URISyntaxException e) { + throw new IllegalArgumentException("Invalid URL format is provided"); + } + } + } +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/common/ParameterValidator.java b/src/main/java/org/opensearch/geospatial/ip2geo/common/ParameterValidator.java new file mode 100644 index 00000000..deccbf21 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/common/ParameterValidator.java @@ -0,0 +1,58 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.common; + +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; + +import org.apache.commons.lang3.StringUtils; +import org.opensearch.core.common.Strings; + +/** + * Parameter validator for IP2Geo APIs + */ +public class ParameterValidator { + private static final int MAX_DATASOURCE_NAME_BYTES = 127; + + /** + * Validate datasource name and return list of error messages + * + * @param datasourceName datasource name + * @return Error messages. Empty list if there is no violation. + */ + public List validateDatasourceName(final String datasourceName) { + List errorMsgs = new ArrayList<>(); + if (StringUtils.isBlank(datasourceName)) { + errorMsgs.add("datasource name must not be empty"); + return errorMsgs; + } + + if (!Strings.validFileName(datasourceName)) { + errorMsgs.add( + String.format(Locale.ROOT, "datasource name must not contain the following characters %s", Strings.INVALID_FILENAME_CHARS) + ); + } + if (datasourceName.contains("#")) { + errorMsgs.add("datasource name must not contain '#'"); + } + if (datasourceName.contains(":")) { + errorMsgs.add("datasource name must not contain ':'"); + } + if (datasourceName.charAt(0) == '_' || datasourceName.charAt(0) == '-' || datasourceName.charAt(0) == '+') { + errorMsgs.add("datasource name must not start with '_', '-', or '+'"); + } + int byteCount = datasourceName.getBytes(StandardCharsets.UTF_8).length; + if (byteCount > MAX_DATASOURCE_NAME_BYTES) { + errorMsgs.add(String.format(Locale.ROOT, "datasource name is too long, (%d > %d)", byteCount, MAX_DATASOURCE_NAME_BYTES)); + } + if (datasourceName.equals(".") || datasourceName.equals("..")) { + errorMsgs.add("datasource name must not be '.' or '..'"); + } + return errorMsgs; + } +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/common/URLDenyListChecker.java b/src/main/java/org/opensearch/geospatial/ip2geo/common/URLDenyListChecker.java new file mode 100644 index 00000000..f4ce484d --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/common/URLDenyListChecker.java @@ -0,0 +1,63 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.common; + +import java.net.InetAddress; +import java.net.MalformedURLException; +import java.net.URL; +import java.net.UnknownHostException; +import java.util.List; + +import org.opensearch.common.SuppressForbidden; +import org.opensearch.common.settings.ClusterSettings; + +import inet.ipaddr.IPAddressString; +import lombok.extern.log4j.Log4j2; + +/** + * A class to check url against a deny-list + */ +@Log4j2 +public class URLDenyListChecker { + private final ClusterSettings clusterSettings; + + public URLDenyListChecker(final ClusterSettings clusterSettings) { + this.clusterSettings = clusterSettings; + } + + /** + * Convert String to URL after verifying the url is not on a deny-list + * + * @param url value to validate and convert to URL + * @return value in URL type + */ + public URL toUrlIfNotInDenyList(final String url) { + try { + return toUrlIfNotInDenyList(url, clusterSettings.get(Ip2GeoSettings.DATASOURCE_ENDPOINT_DENYLIST)); + } catch (UnknownHostException e) { + log.error("Unknown host", e); + throw new IllegalArgumentException("host provided in the datasource endpoint is unknown"); + } catch (MalformedURLException e) { + log.error("Malformed URL", e); + throw new IllegalArgumentException("URL provided in the datasource endpoint is malformed"); + } + } + + @SuppressForbidden(reason = "Need to connect to http endpoint to read GeoIP database file") + private URL toUrlIfNotInDenyList(final String url, final List denyList) throws UnknownHostException, MalformedURLException { + URL urlToReturn = new URL(url); + if (isInDenyList(new IPAddressString(InetAddress.getByName(urlToReturn.getHost()).getHostAddress()), denyList)) { + throw new IllegalArgumentException( + "given endpoint is blocked by deny list in cluster setting " + Ip2GeoSettings.DATASOURCE_ENDPOINT_DENYLIST.getKey() + ); + } + return urlToReturn; + } + + private boolean isInDenyList(final IPAddressString url, final List denyList) { + return denyList.stream().map(cidr -> new IPAddressString(cidr)).anyMatch(cidr -> cidr.contains(url)); + } +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/dao/DatasourceDao.java b/src/main/java/org/opensearch/geospatial/ip2geo/dao/DatasourceDao.java new file mode 100644 index 00000000..24b75535 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/dao/DatasourceDao.java @@ -0,0 +1,376 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.dao; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.time.Instant; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; + +import org.opensearch.OpenSearchException; +import org.opensearch.ResourceAlreadyExistsException; +import org.opensearch.ResourceNotFoundException; +import org.opensearch.action.DocWriteRequest; +import org.opensearch.action.StepListener; +import org.opensearch.action.admin.indices.create.CreateIndexRequest; +import org.opensearch.action.admin.indices.create.CreateIndexResponse; +import org.opensearch.action.bulk.BulkRequest; +import org.opensearch.action.bulk.BulkResponse; +import org.opensearch.action.delete.DeleteResponse; +import org.opensearch.action.get.GetRequest; +import org.opensearch.action.get.GetResponse; +import org.opensearch.action.get.MultiGetItemResponse; +import org.opensearch.action.get.MultiGetResponse; +import org.opensearch.action.index.IndexRequest; +import org.opensearch.action.index.IndexResponse; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.action.support.WriteRequest; +import org.opensearch.client.Client; +import org.opensearch.cluster.routing.Preference; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.xcontent.LoggingDeprecationHandler; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.common.xcontent.XContentHelper; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.rest.RestStatus; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.geospatial.ip2geo.common.Ip2GeoSettings; +import org.opensearch.geospatial.ip2geo.jobscheduler.Datasource; +import org.opensearch.geospatial.ip2geo.jobscheduler.DatasourceExtension; +import org.opensearch.geospatial.shared.StashedThreadContext; +import org.opensearch.index.IndexNotFoundException; +import org.opensearch.index.query.QueryBuilders; +import org.opensearch.search.SearchHit; + +import lombok.extern.log4j.Log4j2; + +/** + * Data access object for datasource + */ +@Log4j2 +public class DatasourceDao { + private static final Integer MAX_SIZE = 1000; + private final Client client; + private final ClusterService clusterService; + private final ClusterSettings clusterSettings; + + public DatasourceDao(final Client client, final ClusterService clusterService) { + this.client = client; + this.clusterService = clusterService; + this.clusterSettings = clusterService.getClusterSettings(); + } + + /** + * Create datasource index + * + * @param stepListener setp listener + */ + public void createIndexIfNotExists(final StepListener stepListener) { + if (clusterService.state().metadata().hasIndex(DatasourceExtension.JOB_INDEX_NAME) == true) { + stepListener.onResponse(null); + return; + } + final CreateIndexRequest createIndexRequest = new CreateIndexRequest(DatasourceExtension.JOB_INDEX_NAME).mapping(getIndexMapping()) + .settings(DatasourceExtension.INDEX_SETTING); + StashedThreadContext.run(client, () -> client.admin().indices().create(createIndexRequest, new ActionListener<>() { + @Override + public void onResponse(final CreateIndexResponse createIndexResponse) { + stepListener.onResponse(null); + } + + @Override + public void onFailure(final Exception e) { + if (e instanceof ResourceAlreadyExistsException) { + log.info("index[{}] already exist", DatasourceExtension.JOB_INDEX_NAME); + stepListener.onResponse(null); + return; + } + stepListener.onFailure(e); + } + })); + } + + private String getIndexMapping() { + try { + try (InputStream is = DatasourceDao.class.getResourceAsStream("/mappings/ip2geo_datasource.json")) { + try (BufferedReader reader = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))) { + return reader.lines().map(String::trim).collect(Collectors.joining()); + } + } + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + /** + * Update datasource in an index {@code DatasourceExtension.JOB_INDEX_NAME} + * @param datasource the datasource + * @return index response + */ + public IndexResponse updateDatasource(final Datasource datasource) { + datasource.setLastUpdateTime(Instant.now()); + return StashedThreadContext.run(client, () -> { + try { + return client.prepareIndex(DatasourceExtension.JOB_INDEX_NAME) + .setId(datasource.getName()) + .setOpType(DocWriteRequest.OpType.INDEX) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .setSource(datasource.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)) + .execute() + .actionGet(clusterSettings.get(Ip2GeoSettings.TIMEOUT)); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + } + + /** + * Update datasources in an index {@code DatasourceExtension.JOB_INDEX_NAME} + * @param datasources the datasources + * @param listener action listener + */ + public void updateDatasource(final List datasources, final ActionListener listener) { + BulkRequest bulkRequest = new BulkRequest(); + datasources.stream().map(datasource -> { + datasource.setLastUpdateTime(Instant.now()); + return datasource; + }).map(this::toIndexRequest).forEach(indexRequest -> bulkRequest.add(indexRequest)); + StashedThreadContext.run(client, () -> client.bulk(bulkRequest, listener)); + } + + private IndexRequest toIndexRequest(Datasource datasource) { + try { + IndexRequest indexRequest = new IndexRequest(); + indexRequest.index(DatasourceExtension.JOB_INDEX_NAME); + indexRequest.id(datasource.getName()); + indexRequest.opType(DocWriteRequest.OpType.INDEX); + indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + indexRequest.source(datasource.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)); + return indexRequest; + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + /** + * Put datasource in an index {@code DatasourceExtension.JOB_INDEX_NAME} + * + * @param datasource the datasource + * @param listener the listener + */ + public void putDatasource(final Datasource datasource, final ActionListener listener) { + datasource.setLastUpdateTime(Instant.now()); + StashedThreadContext.run(client, () -> { + try { + client.prepareIndex(DatasourceExtension.JOB_INDEX_NAME) + .setId(datasource.getName()) + .setOpType(DocWriteRequest.OpType.CREATE) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .setSource(datasource.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)) + .execute(listener); + } catch (IOException e) { + new RuntimeException(e); + } + }); + } + + /** + * Delete datasource in an index {@code DatasourceExtension.JOB_INDEX_NAME} + * + * @param datasource the datasource + * + */ + public void deleteDatasource(final Datasource datasource) { + DeleteResponse response = client.prepareDelete() + .setIndex(DatasourceExtension.JOB_INDEX_NAME) + .setId(datasource.getName()) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .execute() + .actionGet(clusterSettings.get(Ip2GeoSettings.TIMEOUT)); + + if (response.status().equals(RestStatus.OK)) { + log.info("deleted datasource[{}] successfully", datasource.getName()); + } else if (response.status().equals(RestStatus.NOT_FOUND)) { + throw new ResourceNotFoundException("datasource[{}] does not exist", datasource.getName()); + } else { + throw new OpenSearchException("failed to delete datasource[{}] with status[{}]", datasource.getName(), response.status()); + } + } + + /** + * Get datasource from an index {@code DatasourceExtension.JOB_INDEX_NAME} + * @param name the name of a datasource + * @return datasource + * @throws IOException exception + */ + public Datasource getDatasource(final String name) throws IOException { + GetRequest request = new GetRequest(DatasourceExtension.JOB_INDEX_NAME, name); + GetResponse response; + try { + response = StashedThreadContext.run(client, () -> client.get(request).actionGet(clusterSettings.get(Ip2GeoSettings.TIMEOUT))); + if (response.isExists() == false) { + log.error("Datasource[{}] does not exist in an index[{}]", name, DatasourceExtension.JOB_INDEX_NAME); + return null; + } + } catch (IndexNotFoundException e) { + log.error("Index[{}] is not found", DatasourceExtension.JOB_INDEX_NAME); + return null; + } + + XContentParser parser = XContentHelper.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + response.getSourceAsBytesRef() + ); + return Datasource.PARSER.parse(parser, null); + } + + /** + * Get datasource from an index {@code DatasourceExtension.JOB_INDEX_NAME} + * @param name the name of a datasource + * @param actionListener the action listener + */ + public void getDatasource(final String name, final ActionListener actionListener) { + GetRequest request = new GetRequest(DatasourceExtension.JOB_INDEX_NAME, name); + StashedThreadContext.run(client, () -> client.get(request, new ActionListener<>() { + @Override + public void onResponse(final GetResponse response) { + if (response.isExists() == false) { + actionListener.onResponse(null); + return; + } + + try { + XContentParser parser = XContentHelper.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + response.getSourceAsBytesRef() + ); + actionListener.onResponse(Datasource.PARSER.parse(parser, null)); + } catch (IOException e) { + actionListener.onFailure(e); + } + } + + @Override + public void onFailure(final Exception e) { + actionListener.onFailure(e); + } + })); + } + + /** + * Get datasources from an index {@code DatasourceExtension.JOB_INDEX_NAME} + * @param names the array of datasource names + * @param actionListener the action listener + */ + public void getDatasources(final String[] names, final ActionListener> actionListener) { + StashedThreadContext.run( + client, + () -> client.prepareMultiGet() + .add(DatasourceExtension.JOB_INDEX_NAME, names) + .execute(createGetDataSourceQueryActionLister(MultiGetResponse.class, actionListener)) + ); + } + + /** + * Get all datasources up to {@code MAX_SIZE} from an index {@code DatasourceExtension.JOB_INDEX_NAME} + * @param actionListener the action listener + */ + public void getAllDatasources(final ActionListener> actionListener) { + StashedThreadContext.run( + client, + () -> client.prepareSearch(DatasourceExtension.JOB_INDEX_NAME) + .setQuery(QueryBuilders.matchAllQuery()) + .setPreference(Preference.PRIMARY.type()) + .setSize(MAX_SIZE) + .execute(createGetDataSourceQueryActionLister(SearchResponse.class, actionListener)) + ); + } + + /** + * Get all datasources up to {@code MAX_SIZE} from an index {@code DatasourceExtension.JOB_INDEX_NAME} + */ + public List getAllDatasources() { + SearchResponse response = StashedThreadContext.run( + client, + () -> client.prepareSearch(DatasourceExtension.JOB_INDEX_NAME) + .setQuery(QueryBuilders.matchAllQuery()) + .setPreference(Preference.PRIMARY.type()) + .setSize(MAX_SIZE) + .execute() + .actionGet(clusterSettings.get(Ip2GeoSettings.TIMEOUT)) + ); + + List bytesReferences = toBytesReferences(response); + return bytesReferences.stream().map(bytesRef -> toDatasource(bytesRef)).collect(Collectors.toList()); + } + + private ActionListener createGetDataSourceQueryActionLister( + final Class response, + final ActionListener> actionListener + ) { + return new ActionListener() { + @Override + public void onResponse(final T response) { + try { + List bytesReferences = toBytesReferences(response); + List datasources = bytesReferences.stream() + .map(bytesRef -> toDatasource(bytesRef)) + .collect(Collectors.toList()); + actionListener.onResponse(datasources); + } catch (Exception e) { + actionListener.onFailure(e); + } + } + + @Override + public void onFailure(final Exception e) { + actionListener.onFailure(e); + } + }; + } + + private List toBytesReferences(final Object response) { + if (response instanceof SearchResponse) { + SearchResponse searchResponse = (SearchResponse) response; + return Arrays.stream(searchResponse.getHits().getHits()).map(SearchHit::getSourceRef).collect(Collectors.toList()); + } else if (response instanceof MultiGetResponse) { + MultiGetResponse multiGetResponse = (MultiGetResponse) response; + return Arrays.stream(multiGetResponse.getResponses()) + .map(MultiGetItemResponse::getResponse) + .filter(Objects::nonNull) + .filter(GetResponse::isExists) + .map(GetResponse::getSourceAsBytesRef) + .collect(Collectors.toList()); + } else { + throw new OpenSearchException("No supported instance type[{}] is provided", response.getClass()); + } + } + + private Datasource toDatasource(final BytesReference bytesReference) { + try { + XContentParser parser = XContentHelper.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + bytesReference + ); + return Datasource.PARSER.parse(parser, null); + } catch (IOException e) { + throw new RuntimeException(e); + } + } +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/dao/GeoIpDataDao.java b/src/main/java/org/opensearch/geospatial/ip2geo/dao/GeoIpDataDao.java new file mode 100644 index 00000000..0d98db55 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/dao/GeoIpDataDao.java @@ -0,0 +1,351 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.dao; + +import static org.opensearch.geospatial.ip2geo.jobscheduler.Datasource.IP2GEO_DATA_INDEX_NAME_PREFIX; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.net.URL; +import java.net.URLConnection; +import java.nio.charset.StandardCharsets; +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.Arrays; +import java.util.Collections; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Optional; +import java.util.Queue; +import java.util.stream.Collectors; +import java.util.zip.ZipEntry; +import java.util.zip.ZipInputStream; + +import org.apache.commons.csv.CSVFormat; +import org.apache.commons.csv.CSVParser; +import org.apache.commons.csv.CSVRecord; +import org.apache.logging.log4j.util.Strings; +import org.opensearch.OpenSearchException; +import org.opensearch.SpecialPermission; +import org.opensearch.action.DocWriteRequest; +import org.opensearch.action.admin.indices.create.CreateIndexRequest; +import org.opensearch.action.bulk.BulkRequest; +import org.opensearch.action.bulk.BulkResponse; +import org.opensearch.action.index.IndexRequest; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.action.support.IndicesOptions; +import org.opensearch.action.support.master.AcknowledgedResponse; +import org.opensearch.client.Client; +import org.opensearch.client.Requests; +import org.opensearch.cluster.routing.Preference; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.SuppressForbidden; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.common.xcontent.XContentHelper; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.geospatial.annotation.VisibleForTesting; +import org.opensearch.geospatial.constants.IndexSetting; +import org.opensearch.geospatial.ip2geo.common.DatasourceManifest; +import org.opensearch.geospatial.ip2geo.common.Ip2GeoSettings; +import org.opensearch.geospatial.ip2geo.common.URLDenyListChecker; +import org.opensearch.geospatial.shared.Constants; +import org.opensearch.geospatial.shared.StashedThreadContext; +import org.opensearch.index.query.QueryBuilders; + +import lombok.NonNull; +import lombok.extern.log4j.Log4j2; + +/** + * Data access object for GeoIp data + */ +@Log4j2 +public class GeoIpDataDao { + private static final String IP_RANGE_FIELD_NAME = "_cidr"; + private static final String DATA_FIELD_NAME = "_data"; + private static final Map INDEX_SETTING_TO_CREATE = Map.of( + IndexSetting.NUMBER_OF_SHARDS, + 1, + IndexSetting.NUMBER_OF_REPLICAS, + 0, + IndexSetting.REFRESH_INTERVAL, + -1, + IndexSetting.HIDDEN, + true + ); + private static final Map INDEX_SETTING_TO_FREEZE = Map.of( + IndexSetting.AUTO_EXPAND_REPLICAS, + "0-all", + IndexSetting.BLOCKS_WRITE, + true + ); + private final ClusterService clusterService; + private final ClusterSettings clusterSettings; + private final Client client; + private final URLDenyListChecker urlDenyListChecker; + + public GeoIpDataDao(final ClusterService clusterService, final Client client, final URLDenyListChecker urlDenyListChecker) { + this.clusterService = clusterService; + this.clusterSettings = clusterService.getClusterSettings(); + this.client = client; + this.urlDenyListChecker = urlDenyListChecker; + } + + /** + * Create an index for GeoIP data + * + * Index setting start with single shard, zero replica, no refresh interval, and hidden. + * Once the GeoIP data is indexed, do refresh and force merge. + * Then, change the index setting to expand replica to all nodes, and read only allow delete. + * See {@link #freezeIndex} + * + * @param indexName index name + */ + public void createIndexIfNotExists(final String indexName) { + if (clusterService.state().metadata().hasIndex(indexName) == true) { + return; + } + final CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName).settings(INDEX_SETTING_TO_CREATE) + .mapping(getIndexMapping()); + StashedThreadContext.run( + client, + () -> client.admin().indices().create(createIndexRequest).actionGet(clusterSettings.get(Ip2GeoSettings.TIMEOUT)) + ); + } + + private void freezeIndex(final String indexName) { + TimeValue timeout = clusterSettings.get(Ip2GeoSettings.TIMEOUT); + StashedThreadContext.run(client, () -> { + client.admin().indices().prepareForceMerge(indexName).setMaxNumSegments(1).execute().actionGet(timeout); + client.admin().indices().prepareRefresh(indexName).execute().actionGet(timeout); + client.admin() + .indices() + .prepareUpdateSettings(indexName) + .setSettings(INDEX_SETTING_TO_FREEZE) + .execute() + .actionGet(clusterSettings.get(Ip2GeoSettings.TIMEOUT)); + }); + } + + /** + * Generate XContentBuilder representing datasource database index mapping + * + * { + * "dynamic": false, + * "properties": { + * "_cidr": { + * "type": "ip_range", + * "doc_values": false + * } + * } + * } + * + * @return String representing datasource database index mapping + */ + private String getIndexMapping() { + try { + try (InputStream is = DatasourceDao.class.getResourceAsStream("/mappings/ip2geo_geoip.json")) { + try (BufferedReader reader = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))) { + return reader.lines().map(String::trim).collect(Collectors.joining()); + } + } + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + /** + * Create CSVParser of a GeoIP data + * + * @param manifest Datasource manifest + * @return CSVParser for GeoIP data + */ + @SuppressForbidden(reason = "Need to connect to http endpoint to read GeoIP database file") + public CSVParser getDatabaseReader(final DatasourceManifest manifest) { + SpecialPermission.check(); + return AccessController.doPrivileged((PrivilegedAction) () -> { + try { + URL zipUrl = urlDenyListChecker.toUrlIfNotInDenyList(manifest.getUrl()); + return internalGetDatabaseReader(manifest, zipUrl.openConnection()); + } catch (IOException e) { + throw new OpenSearchException("failed to read geoip data from {}", manifest.getUrl(), e); + } + }); + } + + @VisibleForTesting + @SuppressForbidden(reason = "Need to connect to http endpoint to read GeoIP database file") + protected CSVParser internalGetDatabaseReader(final DatasourceManifest manifest, final URLConnection connection) throws IOException { + connection.addRequestProperty(Constants.USER_AGENT_KEY, Constants.USER_AGENT_VALUE); + ZipInputStream zipIn = new ZipInputStream(connection.getInputStream()); + ZipEntry zipEntry = zipIn.getNextEntry(); + while (zipEntry != null) { + if (zipEntry.getName().equalsIgnoreCase(manifest.getDbName()) == false) { + zipEntry = zipIn.getNextEntry(); + continue; + } + return new CSVParser(new BufferedReader(new InputStreamReader(zipIn)), CSVFormat.RFC4180); + } + throw new IllegalArgumentException( + String.format(Locale.ROOT, "database file [%s] does not exist in the zip file [%s]", manifest.getDbName(), manifest.getUrl()) + ); + } + + /** + * Create a document to ingest in datasource database index + * + * It assumes the first field as ip_range. The rest is added under data field. + * + * Document example + * { + * "_cidr":"1.0.0.1/25", + * "_data":{ + * "country": "USA", + * "city": "Seattle", + * "location":"13.23,42.12" + * } + * } + * + * @param fields a list of field name + * @param values a list of values + * @return Document in json string format + * @throws IOException the exception + */ + public XContentBuilder createDocument(final String[] fields, final String[] values) throws IOException { + if (fields.length != values.length) { + throw new OpenSearchException("header[{}] and record[{}] length does not match", fields, values); + } + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.startObject(); + builder.field(IP_RANGE_FIELD_NAME, values[0]); + builder.startObject(DATA_FIELD_NAME); + for (int i = 1; i < fields.length; i++) { + if (Strings.isBlank(values[i])) { + continue; + } + builder.field(fields[i], values[i]); + } + builder.endObject(); + builder.endObject(); + builder.close(); + return builder; + } + + /** + * Query a given index using a given ip address to get geoip data + * + * @param indexName index + * @param ip ip address + * @return geoIP data + */ + public Map getGeoIpData(final String indexName, final String ip) { + SearchResponse response = StashedThreadContext.run( + client, + () -> client.prepareSearch(indexName) + .setSize(1) + .setQuery(QueryBuilders.termQuery(IP_RANGE_FIELD_NAME, ip)) + .setPreference(Preference.LOCAL.type()) + .setRequestCache(true) + .get(clusterSettings.get(Ip2GeoSettings.TIMEOUT)) + ); + + if (response.getHits().getHits().length == 0) { + return Collections.emptyMap(); + } else { + return (Map) XContentHelper.convertToMap(response.getHits().getAt(0).getSourceRef(), false, XContentType.JSON) + .v2() + .get(DATA_FIELD_NAME); + } + } + + /** + * Puts GeoIP data from CSVRecord iterator into a given index in bulk + * + * @param indexName Index name to puts the GeoIP data + * @param fields Field name matching with data in CSVRecord in order + * @param iterator GeoIP data to insert + * @param renewLock Runnable to renew lock + */ + public void putGeoIpData( + @NonNull final String indexName, + @NonNull final String[] fields, + @NonNull final Iterator iterator, + @NonNull final Runnable renewLock + ) throws IOException { + TimeValue timeout = clusterSettings.get(Ip2GeoSettings.TIMEOUT); + Integer batchSize = clusterSettings.get(Ip2GeoSettings.BATCH_SIZE); + final BulkRequest bulkRequest = new BulkRequest(); + Queue requests = new LinkedList<>(); + for (int i = 0; i < batchSize; i++) { + requests.add(Requests.indexRequest(indexName)); + } + while (iterator.hasNext()) { + CSVRecord record = iterator.next(); + XContentBuilder document = createDocument(fields, record.values()); + IndexRequest indexRequest = (IndexRequest) requests.poll(); + indexRequest.source(document); + indexRequest.id(record.get(0)); + bulkRequest.add(indexRequest); + if (iterator.hasNext() == false || bulkRequest.requests().size() == batchSize) { + BulkResponse response = StashedThreadContext.run(client, () -> client.bulk(bulkRequest).actionGet(timeout)); + if (response.hasFailures()) { + throw new OpenSearchException( + "error occurred while ingesting GeoIP data in {} with an error {}", + indexName, + response.buildFailureMessage() + ); + } + requests.addAll(bulkRequest.requests()); + bulkRequest.requests().clear(); + } + renewLock.run(); + } + freezeIndex(indexName); + + } + + public void deleteIp2GeoDataIndex(final String index) { + deleteIp2GeoDataIndex(Arrays.asList(index)); + } + + public void deleteIp2GeoDataIndex(final List indices) { + if (indices == null || indices.isEmpty()) { + return; + } + + Optional invalidIndex = indices.stream() + .filter(index -> index.startsWith(IP2GEO_DATA_INDEX_NAME_PREFIX) == false) + .findAny(); + if (invalidIndex.isPresent()) { + throw new OpenSearchException( + "the index[{}] is not ip2geo data index which should start with {}", + invalidIndex.get(), + IP2GEO_DATA_INDEX_NAME_PREFIX + ); + } + + AcknowledgedResponse response = StashedThreadContext.run( + client, + () -> client.admin() + .indices() + .prepareDelete(indices.toArray(new String[0])) + .setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN) + .execute() + .actionGet(clusterSettings.get(Ip2GeoSettings.TIMEOUT)) + ); + + if (response.isAcknowledged() == false) { + throw new OpenSearchException("failed to delete data[{}] in datasource", String.join(",", indices)); + } + } +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/dao/Ip2GeoCachedDao.java b/src/main/java/org/opensearch/geospatial/ip2geo/dao/Ip2GeoCachedDao.java new file mode 100644 index 00000000..9f229377 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/dao/Ip2GeoCachedDao.java @@ -0,0 +1,218 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.dao; + +import java.io.IOException; +import java.time.Instant; +import java.util.Iterator; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutionException; +import java.util.function.Function; + +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.cache.Cache; +import org.opensearch.common.cache.CacheBuilder; +import org.opensearch.common.xcontent.LoggingDeprecationHandler; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.index.shard.ShardId; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.geospatial.annotation.VisibleForTesting; +import org.opensearch.geospatial.ip2geo.common.DatasourceState; +import org.opensearch.geospatial.ip2geo.common.Ip2GeoSettings; +import org.opensearch.geospatial.ip2geo.jobscheduler.Datasource; +import org.opensearch.index.IndexNotFoundException; +import org.opensearch.index.engine.Engine; +import org.opensearch.index.shard.IndexingOperationListener; + +import lombok.AllArgsConstructor; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.extern.log4j.Log4j2; + +/** + * Data access object for Datasource and GeoIP data with added caching layer + * + * Ip2GeoCachedDao has a memory cache to store Datasource and GeoIP data. To fully utilize the cache, + * do not create multiple Ip2GeoCachedDao. Ip2GeoCachedDao instance is bound to guice so that you can use + * it through injection. + * + * All IP2Geo processors share single Ip2GeoCachedDao instance. + */ +@Log4j2 +public class Ip2GeoCachedDao implements IndexingOperationListener { + private final DatasourceDao datasourceDao; + private final GeoIpDataDao geoIpDataDao; + private final GeoDataCache geoDataCache; + private Map metadata; + + public Ip2GeoCachedDao(final ClusterService clusterService, final DatasourceDao datasourceDao, final GeoIpDataDao geoIpDataDao) { + this.datasourceDao = datasourceDao; + this.geoIpDataDao = geoIpDataDao; + this.geoDataCache = new GeoDataCache(clusterService.getClusterSettings().get(Ip2GeoSettings.CACHE_SIZE)); + clusterService.getClusterSettings() + .addSettingsUpdateConsumer(Ip2GeoSettings.CACHE_SIZE, setting -> this.geoDataCache.updateMaxSize(setting.longValue())); + } + + public String getIndexName(final String datasourceName) { + return getMetadata().getOrDefault(datasourceName, DatasourceMetadata.EMPTY_METADATA).getIndexName(); + } + + public boolean isExpired(final String datasourceName) { + return getMetadata().getOrDefault(datasourceName, DatasourceMetadata.EMPTY_METADATA).getExpirationDate().isBefore(Instant.now()); + } + + public boolean has(final String datasourceName) { + return getMetadata().containsKey(datasourceName); + } + + public DatasourceState getState(final String datasourceName) { + return getMetadata().getOrDefault(datasourceName, DatasourceMetadata.EMPTY_METADATA).getState(); + } + + public Map getGeoData(final String indexName, final String ip) { + try { + return geoDataCache.putIfAbsent(indexName, ip, addr -> geoIpDataDao.getGeoIpData(indexName, ip)); + } catch (ExecutionException e) { + throw new RuntimeException(e); + } + } + + private Map getMetadata() { + if (metadata != null) { + return metadata; + } + synchronized (this) { + if (metadata != null) { + return metadata; + } + Map tempData = new ConcurrentHashMap<>(); + try { + datasourceDao.getAllDatasources() + .stream() + .forEach(datasource -> tempData.put(datasource.getName(), new DatasourceMetadata(datasource))); + } catch (IndexNotFoundException e) { + log.debug("Datasource has never been created"); + } + metadata = tempData; + return metadata; + } + } + + private void put(final Datasource datasource) { + DatasourceMetadata metadata = new DatasourceMetadata(datasource); + getMetadata().put(datasource.getName(), metadata); + } + + private void remove(final String datasourceName) { + getMetadata().remove(datasourceName); + } + + @Override + public void postIndex(ShardId shardId, Engine.Index index, Engine.IndexResult result) { + if (Engine.Result.Type.FAILURE.equals(result.getResultType())) { + return; + } + + try { + XContentParser parser = XContentType.JSON.xContent() + .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, index.source().utf8ToString()); + parser.nextToken(); + Datasource datasource = Datasource.PARSER.parse(parser, null); + put(datasource); + } catch (IOException e) { + log.error("IOException occurred updating datasource metadata for datasource {} ", index.id(), e); + } + } + + @Override + public void postDelete(ShardId shardId, Engine.Delete delete, Engine.DeleteResult result) { + if (result.getResultType().equals(Engine.Result.Type.FAILURE)) { + return; + } + remove(delete.id()); + } + + @Getter + private static class DatasourceMetadata { + private static DatasourceMetadata EMPTY_METADATA = new DatasourceMetadata(); + private String indexName; + private Instant expirationDate; + private DatasourceState state; + + private DatasourceMetadata() { + expirationDate = Instant.MIN; + } + + public DatasourceMetadata(final Datasource datasource) { + this.indexName = datasource.currentIndexName(); + this.expirationDate = datasource.expirationDay(); + this.state = datasource.getState(); + } + } + + /** + * Cache to hold geo data + * + * GeoData in an index in immutable. Therefore, invalidation is not needed. + */ + @VisibleForTesting + protected static class GeoDataCache { + private Cache> cache; + + public GeoDataCache(final long maxSize) { + if (maxSize < 0) { + throw new IllegalArgumentException("ip2geo max cache size must be 0 or greater"); + } + this.cache = CacheBuilder.>builder().setMaximumWeight(maxSize).build(); + } + + public Map putIfAbsent( + final String indexName, + final String ip, + final Function> retrieveFunction + ) throws ExecutionException { + CacheKey cacheKey = new CacheKey(indexName, ip); + return cache.computeIfAbsent(cacheKey, key -> retrieveFunction.apply(key.ip)); + } + + public Map get(final String indexName, final String ip) { + return cache.get(new CacheKey(indexName, ip)); + } + + /** + * Create a new cache with give size and replace existing cache + * + * Try to populate the existing value from previous cache to the new cache in best effort + * + * @param maxSize + */ + public void updateMaxSize(final long maxSize) { + if (maxSize < 0) { + throw new IllegalArgumentException("ip2geo max cache size must be 0 or greater"); + } + Cache> temp = CacheBuilder.>builder() + .setMaximumWeight(maxSize) + .build(); + int count = 0; + Iterator it = cache.keys().iterator(); + while (it.hasNext() && count < maxSize) { + CacheKey key = it.next(); + temp.put(key, cache.get(key)); + count++; + } + cache = temp; + } + + @AllArgsConstructor + @EqualsAndHashCode + private static class CacheKey { + private final String indexName; + private final String ip; + } + } +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/dao/Ip2GeoProcessorDao.java b/src/main/java/org/opensearch/geospatial/ip2geo/dao/Ip2GeoProcessorDao.java new file mode 100644 index 00000000..55e1152d --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/dao/Ip2GeoProcessorDao.java @@ -0,0 +1,40 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.dao; + +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; + +import org.opensearch.common.inject.Inject; +import org.opensearch.geospatial.ip2geo.processor.Ip2GeoProcessor; +import org.opensearch.ingest.IngestMetadata; +import org.opensearch.ingest.IngestService; + +/** + * Data access object for Ip2Geo processors + */ +public class Ip2GeoProcessorDao { + private final IngestService ingestService; + + @Inject + public Ip2GeoProcessorDao(final IngestService ingestService) { + this.ingestService = ingestService; + } + + public List getProcessors(final String datasourceName) { + IngestMetadata ingestMetadata = ingestService.getClusterService().state().getMetadata().custom(IngestMetadata.TYPE); + if (ingestMetadata == null) { + return Collections.emptyList(); + } + return ingestMetadata.getPipelines() + .keySet() + .stream() + .flatMap(pipelineId -> ingestService.getProcessorsInPipeline(pipelineId, Ip2GeoProcessor.class).stream()) + .filter(ip2GeoProcessor -> ip2GeoProcessor.getDatasourceName().equals(datasourceName)) + .collect(Collectors.toList()); + } +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/jobscheduler/Datasource.java b/src/main/java/org/opensearch/geospatial/ip2geo/jobscheduler/Datasource.java new file mode 100644 index 00000000..95343025 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/jobscheduler/Datasource.java @@ -0,0 +1,720 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.jobscheduler; + +import java.io.IOException; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Set; + +import org.opensearch.core.ParseField; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.common.io.stream.Writeable; +import org.opensearch.core.xcontent.ConstructingObjectParser; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.geospatial.annotation.VisibleForTesting; +import org.opensearch.geospatial.ip2geo.action.PutDatasourceRequest; +import org.opensearch.geospatial.ip2geo.common.DatasourceManifest; +import org.opensearch.geospatial.ip2geo.common.DatasourceState; +import org.opensearch.geospatial.ip2geo.common.Ip2GeoLockService; +import org.opensearch.jobscheduler.spi.ScheduledJobParameter; +import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; +import org.opensearch.jobscheduler.spi.schedule.ScheduleParser; + +import lombok.AccessLevel; +import lombok.AllArgsConstructor; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.Setter; +import lombok.ToString; + +/** + * Ip2Geo datasource job parameter + */ +@Getter +@Setter +@ToString +@EqualsAndHashCode +@AllArgsConstructor +public class Datasource implements Writeable, ScheduledJobParameter { + /** + * Prefix of indices having Ip2Geo data + */ + public static final String IP2GEO_DATA_INDEX_NAME_PREFIX = ".geospatial-ip2geo-data"; + + /** + * Default fields for job scheduling + */ + private static final ParseField NAME_FIELD = new ParseField("name"); + private static final ParseField ENABLED_FIELD = new ParseField("update_enabled"); + private static final ParseField LAST_UPDATE_TIME_FIELD = new ParseField("last_update_time"); + private static final ParseField LAST_UPDATE_TIME_FIELD_READABLE = new ParseField("last_update_time_field"); + /** + * Schedule that user set + */ + private static final ParseField USER_SCHEDULE_FIELD = new ParseField("user_schedule"); + /** + * System schedule which will be used by job scheduler + * + * If datasource is going to get expired before next update, we want to run clean up task before the next update + * by changing system schedule. + * + * If datasource is restored from snapshot, we want to run clean up task immediately to handle expired datasource + * by changing system schedule. + * + * For every task run, we revert system schedule back to user schedule. + */ + private static final ParseField SYSTEM_SCHEDULE_FIELD = new ParseField("system_schedule"); + /** + * {@link DatasourceTask} that DatasourceRunner will execute in next run + * + * For every task run, we revert task back to {@link DatasourceTask#ALL} + */ + private static final ParseField TASK_FIELD = new ParseField("task"); + private static final ParseField ENABLED_TIME_FIELD = new ParseField("enabled_time"); + private static final ParseField ENABLED_TIME_FIELD_READABLE = new ParseField("enabled_time_field"); + + /** + * Additional fields for datasource + */ + private static final ParseField ENDPOINT_FIELD = new ParseField("endpoint"); + private static final ParseField STATE_FIELD = new ParseField("state"); + private static final ParseField CURRENT_INDEX_FIELD = new ParseField("current_index"); + private static final ParseField INDICES_FIELD = new ParseField("indices"); + private static final ParseField DATABASE_FIELD = new ParseField("database"); + private static final ParseField UPDATE_STATS_FIELD = new ParseField("update_stats"); + + /** + * Default variables for job scheduling + */ + + /** + * @param name name of a datasource + * @return name of a datasource + */ + private String name; + /** + * @param lastUpdateTime Last update time of a datasource + * @return Last update time of a datasource + */ + private Instant lastUpdateTime; + /** + * @param enabledTime Last time when a scheduling is enabled for a GeoIP data update + * @return Last time when a scheduling is enabled for the job scheduler + */ + private Instant enabledTime; + /** + * @param isEnabled Indicate if GeoIP data update is scheduled or not + * @return Indicate if scheduling is enabled or not + */ + private boolean isEnabled; + /** + * @param userSchedule Schedule that user provided + * @return Schedule that user provided + */ + private IntervalSchedule userSchedule; + + /** + * @param systemSchedule Schedule that job scheduler use + * @return Schedule that job scheduler use + */ + private IntervalSchedule systemSchedule; + + /** + * @param task Task that {@link DatasourceRunner} will execute + * @return Task that {@link DatasourceRunner} will execute + */ + private DatasourceTask task; + + /** + * Additional variables for datasource + */ + + /** + * @param endpoint URL of a manifest file + * @return URL of a manifest file + */ + private String endpoint; + /** + * @param state State of a datasource + * @return State of a datasource + */ + private DatasourceState state; + /** + * @param currentIndex the current index name having GeoIP data + * @return the current index name having GeoIP data + */ + @Getter(AccessLevel.NONE) + private String currentIndex; + /** + * @param indices A list of indices having GeoIP data including currentIndex + * @return A list of indices having GeoIP data including currentIndex + */ + private List indices; + /** + * @param database GeoIP database information + * @return GeoIP database information + */ + private Database database; + /** + * @param updateStats GeoIP database update statistics + * @return GeoIP database update statistics + */ + private UpdateStats updateStats; + + /** + * Datasource parser + */ + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "datasource_metadata", + true, + args -> { + String name = (String) args[0]; + Instant lastUpdateTime = Instant.ofEpochMilli((long) args[1]); + Instant enabledTime = args[2] == null ? null : Instant.ofEpochMilli((long) args[2]); + boolean isEnabled = (boolean) args[3]; + IntervalSchedule userSchedule = (IntervalSchedule) args[4]; + IntervalSchedule systemSchedule = (IntervalSchedule) args[5]; + DatasourceTask task = DatasourceTask.valueOf((String) args[6]); + String endpoint = (String) args[7]; + DatasourceState state = DatasourceState.valueOf((String) args[8]); + String currentIndex = (String) args[9]; + List indices = (List) args[10]; + Database database = (Database) args[11]; + UpdateStats updateStats = (UpdateStats) args[12]; + Datasource parameter = new Datasource( + name, + lastUpdateTime, + enabledTime, + isEnabled, + userSchedule, + systemSchedule, + task, + endpoint, + state, + currentIndex, + indices, + database, + updateStats + ); + + return parameter; + } + ); + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME_FIELD); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), LAST_UPDATE_TIME_FIELD); + PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), ENABLED_TIME_FIELD); + PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), ENABLED_FIELD); + PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> ScheduleParser.parse(p), USER_SCHEDULE_FIELD); + PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> ScheduleParser.parse(p), SYSTEM_SCHEDULE_FIELD); + PARSER.declareString(ConstructingObjectParser.constructorArg(), TASK_FIELD); + PARSER.declareString(ConstructingObjectParser.constructorArg(), ENDPOINT_FIELD); + PARSER.declareString(ConstructingObjectParser.constructorArg(), STATE_FIELD); + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), CURRENT_INDEX_FIELD); + PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), INDICES_FIELD); + PARSER.declareObject(ConstructingObjectParser.constructorArg(), Database.PARSER, DATABASE_FIELD); + PARSER.declareObject(ConstructingObjectParser.constructorArg(), UpdateStats.PARSER, UPDATE_STATS_FIELD); + } + + @VisibleForTesting + public Datasource() { + this(null, null, null); + } + + public Datasource(final String name, final IntervalSchedule schedule, final String endpoint) { + this( + name, + Instant.now().truncatedTo(ChronoUnit.MILLIS), + null, + false, + schedule, + schedule, + DatasourceTask.ALL, + endpoint, + DatasourceState.CREATING, + null, + new ArrayList<>(), + new Database(), + new UpdateStats() + ); + } + + public Datasource(final StreamInput in) throws IOException { + name = in.readString(); + lastUpdateTime = toInstant(in.readVLong()); + enabledTime = toInstant(in.readOptionalVLong()); + isEnabled = in.readBoolean(); + userSchedule = new IntervalSchedule(in); + systemSchedule = new IntervalSchedule(in); + task = DatasourceTask.valueOf(in.readString()); + endpoint = in.readString(); + state = DatasourceState.valueOf(in.readString()); + currentIndex = in.readOptionalString(); + indices = in.readStringList(); + database = new Database(in); + updateStats = new UpdateStats(in); + } + + @Override + public void writeTo(final StreamOutput out) throws IOException { + out.writeString(name); + out.writeVLong(lastUpdateTime.toEpochMilli()); + out.writeOptionalVLong(enabledTime == null ? null : enabledTime.toEpochMilli()); + out.writeBoolean(isEnabled); + userSchedule.writeTo(out); + systemSchedule.writeTo(out); + out.writeString(task.name()); + out.writeString(endpoint); + out.writeString(state.name()); + out.writeOptionalString(currentIndex); + out.writeStringCollection(indices); + database.writeTo(out); + updateStats.writeTo(out); + } + + @Override + public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { + builder.startObject(); + builder.field(NAME_FIELD.getPreferredName(), name); + builder.timeField( + LAST_UPDATE_TIME_FIELD.getPreferredName(), + LAST_UPDATE_TIME_FIELD_READABLE.getPreferredName(), + lastUpdateTime.toEpochMilli() + ); + if (enabledTime != null) { + builder.timeField( + ENABLED_TIME_FIELD.getPreferredName(), + ENABLED_TIME_FIELD_READABLE.getPreferredName(), + enabledTime.toEpochMilli() + ); + } + builder.field(ENABLED_FIELD.getPreferredName(), isEnabled); + builder.field(USER_SCHEDULE_FIELD.getPreferredName(), userSchedule); + builder.field(SYSTEM_SCHEDULE_FIELD.getPreferredName(), systemSchedule); + builder.field(TASK_FIELD.getPreferredName(), task.name()); + builder.field(ENDPOINT_FIELD.getPreferredName(), endpoint); + builder.field(STATE_FIELD.getPreferredName(), state.name()); + if (currentIndex != null) { + builder.field(CURRENT_INDEX_FIELD.getPreferredName(), currentIndex); + } + builder.field(INDICES_FIELD.getPreferredName(), indices); + builder.field(DATABASE_FIELD.getPreferredName(), database); + builder.field(UPDATE_STATS_FIELD.getPreferredName(), updateStats); + builder.endObject(); + return builder; + } + + @Override + public String getName() { + return name; + } + + @Override + public Instant getLastUpdateTime() { + return lastUpdateTime; + } + + @Override + public Instant getEnabledTime() { + return enabledTime; + } + + @Override + public IntervalSchedule getSchedule() { + return systemSchedule; + } + + @Override + public boolean isEnabled() { + return isEnabled; + } + + @Override + public Long getLockDurationSeconds() { + return Ip2GeoLockService.LOCK_DURATION_IN_SECONDS; + } + + /** + * Enable auto update of GeoIP data + */ + public void enable() { + if (isEnabled == true) { + return; + } + enabledTime = Instant.now().truncatedTo(ChronoUnit.MILLIS); + isEnabled = true; + } + + /** + * Disable auto update of GeoIP data + */ + public void disable() { + enabledTime = null; + isEnabled = false; + } + + /** + * Current index name of a datasource + * + * @return Current index name of a datasource + */ + public String currentIndexName() { + if (isExpired()) { + return null; + } + + return currentIndex; + } + + /** + * Index name for a datasource with given suffix + * + * @param suffix the suffix of a index name + * @return index name for a datasource with given suffix + */ + public String newIndexName(final String suffix) { + return String.format(Locale.ROOT, "%s.%s.%s", IP2GEO_DATA_INDEX_NAME_PREFIX, name, suffix); + } + + /** + * Reset database so that it can be updated in next run regardless there is new update or not + */ + public void resetDatabase() { + database.setUpdatedAt(null); + database.setSha256Hash(null); + } + + /** + * Checks if datasource is expired or not + * + * @return true if datasource is expired, and false otherwise + */ + public boolean isExpired() { + return willExpire(Instant.now()); + } + + /** + * Checks if datasource will expire at given time + * + * @return true if datasource will expired at given time, and false otherwise + */ + public boolean willExpire(Instant instant) { + if (database.validForInDays == null) { + return false; + } + + return instant.isAfter(expirationDay()); + } + + /** + * Day when datasource will expire + * + * @return Day when datasource will expire + */ + public Instant expirationDay() { + if (database.validForInDays == null) { + return Instant.MAX; + } + return lastCheckedAt().plus(database.validForInDays, ChronoUnit.DAYS); + } + + private Instant lastCheckedAt() { + Instant lastCheckedAt; + if (updateStats.lastSkippedAt == null) { + lastCheckedAt = updateStats.lastSucceededAt; + } else { + lastCheckedAt = updateStats.lastSucceededAt.isBefore(updateStats.lastSkippedAt) + ? updateStats.lastSkippedAt + : updateStats.lastSucceededAt; + } + return lastCheckedAt; + } + + /** + * Set database attributes with given input + * + * @param datasourceManifest the datasource manifest + * @param fields the fields + */ + public void setDatabase(final DatasourceManifest datasourceManifest, final List fields) { + this.database.setProvider(datasourceManifest.getProvider()); + this.database.setSha256Hash(datasourceManifest.getSha256Hash()); + this.database.setUpdatedAt(Instant.ofEpochMilli(datasourceManifest.getUpdatedAt())); + this.database.setValidForInDays(datasourceManifest.getValidForInDays()); + this.database.setFields(fields); + } + + /** + * Checks if the database fields are compatible with the given set of fields. + * + * If database fields are null, it is compatible with any input fields + * as it hasn't been generated before. + * + * @param fields The set of input fields to check for compatibility. + * @return true if the database fields are compatible with the given input fields, false otherwise. + */ + public boolean isCompatible(final List fields) { + if (database.fields == null) { + return true; + } + + if (fields.size() < database.fields.size()) { + return false; + } + + Set fieldsSet = new HashSet<>(fields); + for (String field : database.fields) { + if (fieldsSet.contains(field) == false) { + return false; + } + } + return true; + } + + private static Instant toInstant(final Long epochMilli) { + return epochMilli == null ? null : Instant.ofEpochMilli(epochMilli); + } + + /** + * Database of a datasource + */ + @Getter + @Setter + @ToString + @EqualsAndHashCode + @AllArgsConstructor(access = AccessLevel.PRIVATE) + @NoArgsConstructor(access = AccessLevel.PRIVATE) + public static class Database implements Writeable, ToXContent { + private static final ParseField PROVIDER_FIELD = new ParseField("provider"); + private static final ParseField SHA256_HASH_FIELD = new ParseField("sha256_hash"); + private static final ParseField UPDATED_AT_FIELD = new ParseField("updated_at_in_epoch_millis"); + private static final ParseField UPDATED_AT_FIELD_READABLE = new ParseField("updated_at"); + private static final ParseField FIELDS_FIELD = new ParseField("fields"); + private static final ParseField VALID_FOR_IN_DAYS_FIELD = new ParseField("valid_for_in_days"); + + /** + * @param provider A database provider name + * @return A database provider name + */ + private String provider; + /** + * @param sha256Hash SHA256 hash value of a database file + * @return SHA256 hash value of a database file + */ + private String sha256Hash; + /** + * @param updatedAt A date when the database was updated + * @return A date when the database was updated + */ + private Instant updatedAt; + /** + * @param validForInDays A duration in which the database file is valid to use + * @return A duration in which the database file is valid to use + */ + private Long validForInDays; + /** + * @param fields A list of available fields in the database + * @return A list of available fields in the database + */ + private List fields; + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "datasource_metadata_database", + true, + args -> { + String provider = (String) args[0]; + String sha256Hash = (String) args[1]; + Instant updatedAt = args[2] == null ? null : Instant.ofEpochMilli((Long) args[2]); + Long validForInDays = (Long) args[3]; + List fields = (List) args[4]; + return new Database(provider, sha256Hash, updatedAt, validForInDays, fields); + } + ); + static { + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), PROVIDER_FIELD); + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), SHA256_HASH_FIELD); + PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), UPDATED_AT_FIELD); + PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), VALID_FOR_IN_DAYS_FIELD); + PARSER.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), FIELDS_FIELD); + } + + public Database(final StreamInput in) throws IOException { + provider = in.readOptionalString(); + sha256Hash = in.readOptionalString(); + updatedAt = toInstant(in.readOptionalVLong()); + validForInDays = in.readOptionalVLong(); + fields = in.readOptionalStringList(); + } + + @Override + public void writeTo(final StreamOutput out) throws IOException { + out.writeOptionalString(provider); + out.writeOptionalString(sha256Hash); + out.writeOptionalVLong(updatedAt == null ? null : updatedAt.toEpochMilli()); + out.writeOptionalVLong(validForInDays); + out.writeOptionalStringCollection(fields); + } + + @Override + public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { + builder.startObject(); + if (provider != null) { + builder.field(PROVIDER_FIELD.getPreferredName(), provider); + } + if (sha256Hash != null) { + builder.field(SHA256_HASH_FIELD.getPreferredName(), sha256Hash); + } + if (updatedAt != null) { + builder.timeField( + UPDATED_AT_FIELD.getPreferredName(), + UPDATED_AT_FIELD_READABLE.getPreferredName(), + updatedAt.toEpochMilli() + ); + } + if (validForInDays != null) { + builder.field(VALID_FOR_IN_DAYS_FIELD.getPreferredName(), validForInDays); + } + if (fields != null) { + builder.startArray(FIELDS_FIELD.getPreferredName()); + for (String field : fields) { + builder.value(field); + } + builder.endArray(); + } + builder.endObject(); + return builder; + } + } + + /** + * Update stats of a datasource + */ + @Getter + @Setter + @ToString + @EqualsAndHashCode + @AllArgsConstructor(access = AccessLevel.PRIVATE) + @NoArgsConstructor(access = AccessLevel.PRIVATE) + public static class UpdateStats implements Writeable, ToXContent { + private static final ParseField LAST_SUCCEEDED_AT_FIELD = new ParseField("last_succeeded_at_in_epoch_millis"); + private static final ParseField LAST_SUCCEEDED_AT_FIELD_READABLE = new ParseField("last_succeeded_at"); + private static final ParseField LAST_PROCESSING_TIME_IN_MILLIS_FIELD = new ParseField("last_processing_time_in_millis"); + private static final ParseField LAST_FAILED_AT_FIELD = new ParseField("last_failed_at_in_epoch_millis"); + private static final ParseField LAST_FAILED_AT_FIELD_READABLE = new ParseField("last_failed_at"); + private static final ParseField LAST_SKIPPED_AT = new ParseField("last_skipped_at_in_epoch_millis"); + private static final ParseField LAST_SKIPPED_AT_READABLE = new ParseField("last_skipped_at"); + + /** + * @param lastSucceededAt The last time when GeoIP data update was succeeded + * @return The last time when GeoIP data update was succeeded + */ + private Instant lastSucceededAt; + /** + * @param lastProcessingTimeInMillis The last processing time when GeoIP data update was succeeded + * @return The last processing time when GeoIP data update was succeeded + */ + private Long lastProcessingTimeInMillis; + /** + * @param lastFailedAt The last time when GeoIP data update was failed + * @return The last time when GeoIP data update was failed + */ + private Instant lastFailedAt; + /** + * @param lastSkippedAt The last time when GeoIP data update was skipped as there was no new update from an endpoint + * @return The last time when GeoIP data update was skipped as there was no new update from an endpoint + */ + private Instant lastSkippedAt; + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "datasource_metadata_update_stats", + true, + args -> { + Instant lastSucceededAt = args[0] == null ? null : Instant.ofEpochMilli((long) args[0]); + Long lastProcessingTimeInMillis = (Long) args[1]; + Instant lastFailedAt = args[2] == null ? null : Instant.ofEpochMilli((long) args[2]); + Instant lastSkippedAt = args[3] == null ? null : Instant.ofEpochMilli((long) args[3]); + return new UpdateStats(lastSucceededAt, lastProcessingTimeInMillis, lastFailedAt, lastSkippedAt); + } + ); + + static { + PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), LAST_SUCCEEDED_AT_FIELD); + PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), LAST_PROCESSING_TIME_IN_MILLIS_FIELD); + PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), LAST_FAILED_AT_FIELD); + PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), LAST_SKIPPED_AT); + } + + public UpdateStats(final StreamInput in) throws IOException { + lastSucceededAt = toInstant(in.readOptionalVLong()); + lastProcessingTimeInMillis = in.readOptionalVLong(); + lastFailedAt = toInstant(in.readOptionalVLong()); + lastSkippedAt = toInstant(in.readOptionalVLong()); + } + + @Override + public void writeTo(final StreamOutput out) throws IOException { + out.writeOptionalVLong(lastSucceededAt == null ? null : lastSucceededAt.toEpochMilli()); + out.writeOptionalVLong(lastProcessingTimeInMillis); + out.writeOptionalVLong(lastFailedAt == null ? null : lastFailedAt.toEpochMilli()); + out.writeOptionalVLong(lastSkippedAt == null ? null : lastSkippedAt.toEpochMilli()); + } + + @Override + public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { + builder.startObject(); + if (lastSucceededAt != null) { + builder.timeField( + LAST_SUCCEEDED_AT_FIELD.getPreferredName(), + LAST_SUCCEEDED_AT_FIELD_READABLE.getPreferredName(), + lastSucceededAt.toEpochMilli() + ); + } + if (lastProcessingTimeInMillis != null) { + builder.field(LAST_PROCESSING_TIME_IN_MILLIS_FIELD.getPreferredName(), lastProcessingTimeInMillis); + } + if (lastFailedAt != null) { + builder.timeField( + LAST_FAILED_AT_FIELD.getPreferredName(), + LAST_FAILED_AT_FIELD_READABLE.getPreferredName(), + lastFailedAt.toEpochMilli() + ); + } + if (lastSkippedAt != null) { + builder.timeField( + LAST_SKIPPED_AT.getPreferredName(), + LAST_SKIPPED_AT_READABLE.getPreferredName(), + lastSkippedAt.toEpochMilli() + ); + } + builder.endObject(); + return builder; + } + } + + /** + * Builder class for Datasource + */ + public static class Builder { + public static Datasource build(final PutDatasourceRequest request) { + String id = request.getName(); + IntervalSchedule schedule = new IntervalSchedule( + Instant.now().truncatedTo(ChronoUnit.MILLIS), + (int) request.getUpdateInterval().days(), + ChronoUnit.DAYS + ); + String endpoint = request.getEndpoint(); + return new Datasource(id, schedule, endpoint); + } + } +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/jobscheduler/DatasourceExtension.java b/src/main/java/org/opensearch/geospatial/ip2geo/jobscheduler/DatasourceExtension.java new file mode 100644 index 00000000..3ba32bbb --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/jobscheduler/DatasourceExtension.java @@ -0,0 +1,57 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.jobscheduler; + +import static org.opensearch.geospatial.constants.IndexSetting.AUTO_EXPAND_REPLICAS; +import static org.opensearch.geospatial.constants.IndexSetting.HIDDEN; +import static org.opensearch.geospatial.constants.IndexSetting.NUMBER_OF_SHARDS; + +import java.util.Map; + +import org.opensearch.jobscheduler.spi.JobSchedulerExtension; +import org.opensearch.jobscheduler.spi.ScheduledJobParser; +import org.opensearch.jobscheduler.spi.ScheduledJobRunner; + +/** + * Datasource job scheduler extension + * + * This extension is responsible for scheduling GeoIp data update task + * + * See https://github.com/opensearch-project/job-scheduler/blob/main/README.md#getting-started + */ +public class DatasourceExtension implements JobSchedulerExtension { + /** + * Job index name for a datasource + */ + public static final String JOB_INDEX_NAME = ".scheduler-geospatial-ip2geo-datasource"; + /** + * Job index setting + * + * We want it to be single shard so that job can be run only in a single node by job scheduler. + * We want it to expand to all replicas so that querying to this index can be done locally to reduce latency. + */ + public static final Map INDEX_SETTING = Map.of(NUMBER_OF_SHARDS, 1, AUTO_EXPAND_REPLICAS, "0-all", HIDDEN, true); + + @Override + public String getJobType() { + return "scheduler_geospatial_ip2geo_datasource"; + } + + @Override + public String getJobIndex() { + return JOB_INDEX_NAME; + } + + @Override + public ScheduledJobRunner getJobRunner() { + return DatasourceRunner.getJobRunnerInstance(); + } + + @Override + public ScheduledJobParser getJobParser() { + return (parser, id, jobDocVersion) -> Datasource.PARSER.parse(parser, null); + } +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/jobscheduler/DatasourceRunner.java b/src/main/java/org/opensearch/geospatial/ip2geo/jobscheduler/DatasourceRunner.java new file mode 100644 index 00000000..93ca317a --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/jobscheduler/DatasourceRunner.java @@ -0,0 +1,189 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.jobscheduler; + +import java.io.IOException; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.Optional; +import java.util.concurrent.atomic.AtomicReference; + +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.geospatial.annotation.VisibleForTesting; +import org.opensearch.geospatial.ip2geo.common.DatasourceState; +import org.opensearch.geospatial.ip2geo.common.Ip2GeoExecutor; +import org.opensearch.geospatial.ip2geo.common.Ip2GeoLockService; +import org.opensearch.geospatial.ip2geo.dao.DatasourceDao; +import org.opensearch.jobscheduler.spi.JobExecutionContext; +import org.opensearch.jobscheduler.spi.LockModel; +import org.opensearch.jobscheduler.spi.ScheduledJobParameter; +import org.opensearch.jobscheduler.spi.ScheduledJobRunner; +import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; + +import lombok.extern.log4j.Log4j2; + +/** + * Datasource update task + * + * This is a background task which is responsible for updating GeoIp data + */ +@Log4j2 +public class DatasourceRunner implements ScheduledJobRunner { + private static final int DELETE_INDEX_RETRY_IN_MIN = 15; + private static final int DELETE_INDEX_DELAY_IN_MILLIS = 10000; + + private static DatasourceRunner INSTANCE; + + /** + * Return a singleton job runner instance + * @return job runner + */ + public static DatasourceRunner getJobRunnerInstance() { + if (INSTANCE != null) { + return INSTANCE; + } + synchronized (DatasourceRunner.class) { + if (INSTANCE != null) { + return INSTANCE; + } + INSTANCE = new DatasourceRunner(); + return INSTANCE; + } + } + + private ClusterService clusterService; + private DatasourceUpdateService datasourceUpdateService; + private Ip2GeoExecutor ip2GeoExecutor; + private DatasourceDao datasourceDao; + private Ip2GeoLockService ip2GeoLockService; + private boolean initialized; + + private DatasourceRunner() { + // Singleton class, use getJobRunner method instead of constructor + } + + /** + * Initialize timeout and indexingBulkSize from settings + */ + public void initialize( + final ClusterService clusterService, + final DatasourceUpdateService datasourceUpdateService, + final Ip2GeoExecutor ip2GeoExecutor, + final DatasourceDao datasourceDao, + final Ip2GeoLockService ip2GeoLockService + ) { + this.clusterService = clusterService; + this.datasourceUpdateService = datasourceUpdateService; + this.ip2GeoExecutor = ip2GeoExecutor; + this.datasourceDao = datasourceDao; + this.ip2GeoLockService = ip2GeoLockService; + this.initialized = true; + } + + @Override + public void runJob(final ScheduledJobParameter jobParameter, final JobExecutionContext context) { + if (initialized == false) { + throw new AssertionError("this instance is not initialized"); + } + + log.info("Update job started for a datasource[{}]", jobParameter.getName()); + if (jobParameter instanceof Datasource == false) { + throw new IllegalStateException( + "job parameter is not instance of Datasource, type: " + jobParameter.getClass().getCanonicalName() + ); + } + + ip2GeoExecutor.forDatasourceUpdate().submit(updateDatasourceRunner(jobParameter)); + } + + /** + * Update GeoIP data + * + * Lock is used so that only one of nodes run this task. + * + * @param jobParameter job parameter + */ + @VisibleForTesting + protected Runnable updateDatasourceRunner(final ScheduledJobParameter jobParameter) { + return () -> { + Optional lockModel = ip2GeoLockService.acquireLock( + jobParameter.getName(), + Ip2GeoLockService.LOCK_DURATION_IN_SECONDS + ); + if (lockModel.isEmpty()) { + log.error("Failed to update. Another processor is holding a lock for datasource[{}]", jobParameter.getName()); + return; + } + + LockModel lock = lockModel.get(); + try { + updateDatasource(jobParameter, ip2GeoLockService.getRenewLockRunnable(new AtomicReference<>(lock))); + } catch (Exception e) { + log.error("Failed to update datasource[{}]", jobParameter.getName(), e); + } finally { + ip2GeoLockService.releaseLock(lock); + } + }; + } + + @VisibleForTesting + protected void updateDatasource(final ScheduledJobParameter jobParameter, final Runnable renewLock) throws IOException { + Datasource datasource = datasourceDao.getDatasource(jobParameter.getName()); + /** + * If delete request comes while update task is waiting on a queue for other update tasks to complete, + * because update task for this datasource didn't acquire a lock yet, delete request is processed. + * When it is this datasource's turn to run, it will find that the datasource is deleted already. + * Therefore, we stop the update process when data source does not exist. + */ + if (datasource == null) { + log.info("Datasource[{}] does not exist", jobParameter.getName()); + return; + } + + if (DatasourceState.AVAILABLE.equals(datasource.getState()) == false) { + log.error("Invalid datasource state. Expecting {} but received {}", DatasourceState.AVAILABLE, datasource.getState()); + datasource.disable(); + datasource.getUpdateStats().setLastFailedAt(Instant.now()); + datasourceDao.updateDatasource(datasource); + return; + } + + try { + datasourceUpdateService.deleteUnusedIndices(datasource); + if (DatasourceTask.DELETE_UNUSED_INDICES.equals(datasource.getTask()) == false) { + datasourceUpdateService.updateOrCreateGeoIpData(datasource, renewLock); + } + datasourceUpdateService.deleteUnusedIndices(datasource); + } catch (Exception e) { + log.error("Failed to update datasource for {}", datasource.getName(), e); + datasource.getUpdateStats().setLastFailedAt(Instant.now()); + datasourceDao.updateDatasource(datasource); + } finally { + postProcessing(datasource); + } + } + + private void postProcessing(final Datasource datasource) { + if (datasource.isExpired()) { + // Try to delete again as it could have just been expired + datasourceUpdateService.deleteUnusedIndices(datasource); + datasourceUpdateService.updateDatasource(datasource, datasource.getUserSchedule(), DatasourceTask.ALL); + return; + } + + if (datasource.willExpire(datasource.getUserSchedule().getNextExecutionTime(Instant.now()))) { + IntervalSchedule intervalSchedule = new IntervalSchedule( + datasource.expirationDay(), + DELETE_INDEX_RETRY_IN_MIN, + ChronoUnit.MINUTES, + DELETE_INDEX_DELAY_IN_MILLIS + ); + datasourceUpdateService.updateDatasource(datasource, intervalSchedule, DatasourceTask.DELETE_UNUSED_INDICES); + } else { + datasourceUpdateService.updateDatasource(datasource, datasource.getUserSchedule(), DatasourceTask.ALL); + } + } +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/jobscheduler/DatasourceTask.java b/src/main/java/org/opensearch/geospatial/ip2geo/jobscheduler/DatasourceTask.java new file mode 100644 index 00000000..bfbcb1d2 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/jobscheduler/DatasourceTask.java @@ -0,0 +1,21 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.jobscheduler; + +/** + * Task that {@link DatasourceRunner} will run + */ +public enum DatasourceTask { + /** + * Do everything + */ + ALL, + + /** + * Only delete unused indices + */ + DELETE_UNUSED_INDICES +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/jobscheduler/DatasourceUpdateService.java b/src/main/java/org/opensearch/geospatial/ip2geo/jobscheduler/DatasourceUpdateService.java new file mode 100644 index 00000000..67aee2f9 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/jobscheduler/DatasourceUpdateService.java @@ -0,0 +1,292 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.jobscheduler; + +import java.io.IOException; +import java.net.URL; +import java.time.Duration; +import java.time.Instant; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.UUID; +import java.util.stream.Collectors; + +import org.apache.commons.csv.CSVParser; +import org.apache.commons.csv.CSVRecord; +import org.opensearch.OpenSearchException; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.geospatial.annotation.VisibleForTesting; +import org.opensearch.geospatial.ip2geo.common.DatasourceManifest; +import org.opensearch.geospatial.ip2geo.common.DatasourceState; +import org.opensearch.geospatial.ip2geo.common.URLDenyListChecker; +import org.opensearch.geospatial.ip2geo.dao.DatasourceDao; +import org.opensearch.geospatial.ip2geo.dao.GeoIpDataDao; +import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; + +import lombok.extern.log4j.Log4j2; + +@Log4j2 +public class DatasourceUpdateService { + private static final int SLEEP_TIME_IN_MILLIS = 5000; // 5 seconds + private static final int MAX_WAIT_TIME_FOR_REPLICATION_TO_COMPLETE_IN_MILLIS = 10 * 60 * 60 * 1000; // 10 hours + private final ClusterService clusterService; + private final ClusterSettings clusterSettings; + private final DatasourceDao datasourceDao; + private final GeoIpDataDao geoIpDataDao; + private final URLDenyListChecker urlDenyListChecker; + + public DatasourceUpdateService( + final ClusterService clusterService, + final DatasourceDao datasourceDao, + final GeoIpDataDao geoIpDataDao, + final URLDenyListChecker urlDenyListChecker + ) { + this.clusterService = clusterService; + this.clusterSettings = clusterService.getClusterSettings(); + this.datasourceDao = datasourceDao; + this.geoIpDataDao = geoIpDataDao; + this.urlDenyListChecker = urlDenyListChecker; + } + + /** + * Update GeoIp data + * + * The first column is ip range field regardless its header name. + * Therefore, we don't store the first column's header name. + * + * @param datasource the datasource + * @param renewLock runnable to renew lock + * + * @throws IOException + */ + public void updateOrCreateGeoIpData(final Datasource datasource, final Runnable renewLock) throws IOException { + URL url = urlDenyListChecker.toUrlIfNotInDenyList(datasource.getEndpoint()); + DatasourceManifest manifest = DatasourceManifest.Builder.build(url); + + if (shouldUpdate(datasource, manifest) == false) { + log.info("Skipping GeoIP database update. Update is not required for {}", datasource.getName()); + datasource.getUpdateStats().setLastSkippedAt(Instant.now()); + datasourceDao.updateDatasource(datasource); + return; + } + + Instant startTime = Instant.now(); + String indexName = setupIndex(datasource); + String[] header; + List fieldsToStore; + try (CSVParser reader = geoIpDataDao.getDatabaseReader(manifest)) { + CSVRecord headerLine = reader.iterator().next(); + header = validateHeader(headerLine).values(); + fieldsToStore = Arrays.asList(header).subList(1, header.length); + if (datasource.isCompatible(fieldsToStore) == false) { + throw new OpenSearchException( + "new fields [{}] does not contain all old fields [{}]", + fieldsToStore.toString(), + datasource.getDatabase().getFields().toString() + ); + } + geoIpDataDao.putGeoIpData(indexName, header, reader.iterator(), renewLock); + } + + waitUntilAllShardsStarted(indexName, MAX_WAIT_TIME_FOR_REPLICATION_TO_COMPLETE_IN_MILLIS); + Instant endTime = Instant.now(); + updateDatasourceAsSucceeded(indexName, datasource, manifest, fieldsToStore, startTime, endTime); + } + + /** + * We wait until all shards are ready to serve search requests before updating datasource metadata to + * point to a new index so that there won't be latency degradation during GeoIP data update + * + * @param indexName the indexName + */ + @VisibleForTesting + protected void waitUntilAllShardsStarted(final String indexName, final int timeout) { + Instant start = Instant.now(); + try { + while (Instant.now().toEpochMilli() - start.toEpochMilli() < timeout) { + if (clusterService.state().routingTable().allShards(indexName).stream().allMatch(shard -> shard.started())) { + return; + } + Thread.sleep(SLEEP_TIME_IN_MILLIS); + } + throw new OpenSearchException( + "index[{}] replication did not complete after {} millis", + MAX_WAIT_TIME_FOR_REPLICATION_TO_COMPLETE_IN_MILLIS + ); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + /** + * Return header fields of geo data with given url of a manifest file + * + * The first column is ip range field regardless its header name. + * Therefore, we don't store the first column's header name. + * + * @param manifestUrl the url of a manifest file + * @return header fields of geo data + */ + public List getHeaderFields(String manifestUrl) throws IOException { + URL url = new URL(manifestUrl); + DatasourceManifest manifest = DatasourceManifest.Builder.build(url); + + try (CSVParser reader = geoIpDataDao.getDatabaseReader(manifest)) { + String[] fields = reader.iterator().next().values(); + return Arrays.asList(fields).subList(1, fields.length); + } + } + + /** + * Delete all indices except the one which are being used + * + * @param datasource + */ + public void deleteUnusedIndices(final Datasource datasource) { + try { + List indicesToDelete = datasource.getIndices() + .stream() + .filter(index -> index.equals(datasource.currentIndexName()) == false) + .collect(Collectors.toList()); + + List deletedIndices = deleteIndices(indicesToDelete); + + if (deletedIndices.isEmpty() == false) { + datasource.getIndices().removeAll(deletedIndices); + datasourceDao.updateDatasource(datasource); + } + } catch (Exception e) { + log.error("Failed to delete old indices for {}", datasource.getName(), e); + } + } + + /** + * Update datasource with given systemSchedule and task + * + * @param datasource datasource to update + * @param systemSchedule new system schedule value + * @param task new task value + */ + public void updateDatasource(final Datasource datasource, final IntervalSchedule systemSchedule, final DatasourceTask task) { + boolean updated = false; + if (datasource.getSystemSchedule().equals(systemSchedule) == false) { + datasource.setSystemSchedule(systemSchedule); + updated = true; + } + if (datasource.getTask().equals(task) == false) { + datasource.setTask(task); + updated = true; + } + + if (updated) { + datasourceDao.updateDatasource(datasource); + } + } + + private List deleteIndices(final List indicesToDelete) { + List deletedIndices = new ArrayList<>(indicesToDelete.size()); + for (String index : indicesToDelete) { + if (clusterService.state().metadata().hasIndex(index) == false) { + deletedIndices.add(index); + continue; + } + + try { + geoIpDataDao.deleteIp2GeoDataIndex(index); + deletedIndices.add(index); + } catch (Exception e) { + log.error("Failed to delete an index [{}]", index, e); + } + } + return deletedIndices; + } + + /** + * Validate header + * + * 1. header should not be null + * 2. the number of values in header should be more than one + * + * @param header the header + * @return CSVRecord the input header + */ + private CSVRecord validateHeader(CSVRecord header) { + if (header == null) { + throw new OpenSearchException("geoip database is empty"); + } + if (header.values().length < 2) { + throw new OpenSearchException("geoip database should have at least two fields"); + } + return header; + } + + /*** + * Update datasource as succeeded + * + * @param manifest the manifest + * @param datasource the datasource + */ + private void updateDatasourceAsSucceeded( + final String newIndexName, + final Datasource datasource, + final DatasourceManifest manifest, + final List fields, + final Instant startTime, + final Instant endTime + ) { + datasource.setCurrentIndex(newIndexName); + datasource.setDatabase(manifest, fields); + datasource.getUpdateStats().setLastSucceededAt(endTime); + datasource.getUpdateStats().setLastProcessingTimeInMillis(endTime.toEpochMilli() - startTime.toEpochMilli()); + datasource.enable(); + datasource.setState(DatasourceState.AVAILABLE); + datasourceDao.updateDatasource(datasource); + log.info( + "GeoIP database creation succeeded for {} and took {} seconds", + datasource.getName(), + Duration.between(startTime, endTime) + ); + } + + /*** + * Setup index to add a new geoip data + * + * @param datasource the datasource + * @return new index name + */ + private String setupIndex(final Datasource datasource) { + String indexName = datasource.newIndexName(UUID.randomUUID().toString()); + datasource.getIndices().add(indexName); + datasourceDao.updateDatasource(datasource); + geoIpDataDao.createIndexIfNotExists(indexName); + return indexName; + } + + /** + * Determine if update is needed or not + * + * Update is needed when all following conditions are met + * 1. updatedAt value in datasource is equal or before updateAt value in manifest + * 2. SHA256 hash value in datasource is different with SHA256 hash value in manifest + * + * @param datasource + * @param manifest + * @return + */ + private boolean shouldUpdate(final Datasource datasource, final DatasourceManifest manifest) { + if (datasource.getDatabase().getUpdatedAt() != null + && datasource.getDatabase().getUpdatedAt().toEpochMilli() > manifest.getUpdatedAt()) { + return false; + } + + if (manifest.getSha256Hash().equals(datasource.getDatabase().getSha256Hash())) { + return false; + } + return true; + } +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/listener/Ip2GeoListener.java b/src/main/java/org/opensearch/geospatial/ip2geo/listener/Ip2GeoListener.java new file mode 100644 index 00000000..791bb1fc --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/listener/Ip2GeoListener.java @@ -0,0 +1,123 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.listener; + +import static org.opensearch.geospatial.ip2geo.jobscheduler.Datasource.IP2GEO_DATA_INDEX_NAME_PREFIX; + +import java.io.IOException; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.List; +import java.util.stream.Collectors; + +import org.opensearch.action.bulk.BulkResponse; +import org.opensearch.cluster.ClusterChangedEvent; +import org.opensearch.cluster.ClusterStateListener; +import org.opensearch.cluster.RestoreInProgress; +import org.opensearch.cluster.node.DiscoveryNode; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.inject.Inject; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; +import org.opensearch.core.action.ActionListener; +import org.opensearch.geospatial.ip2geo.dao.DatasourceDao; +import org.opensearch.geospatial.ip2geo.dao.GeoIpDataDao; +import org.opensearch.geospatial.ip2geo.jobscheduler.Datasource; +import org.opensearch.geospatial.ip2geo.jobscheduler.DatasourceExtension; +import org.opensearch.geospatial.ip2geo.jobscheduler.DatasourceTask; +import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; +import org.opensearch.threadpool.ThreadPool; + +import lombok.AllArgsConstructor; +import lombok.extern.log4j.Log4j2; + +@Log4j2 +@AllArgsConstructor(onConstructor = @__(@Inject)) +public class Ip2GeoListener extends AbstractLifecycleComponent implements ClusterStateListener { + private static final int SCHEDULE_IN_MIN = 15; + private static final int DELAY_IN_MILLIS = 10000; + private final ClusterService clusterService; + private final ThreadPool threadPool; + private final DatasourceDao datasourceDao; + private final GeoIpDataDao geoIpDataDao; + + @Override + public void clusterChanged(final ClusterChangedEvent event) { + if (event.localNodeClusterManager() == false) { + return; + } + + for (RestoreInProgress.Entry entry : event.state().custom(RestoreInProgress.TYPE, RestoreInProgress.EMPTY)) { + if (RestoreInProgress.State.SUCCESS.equals(entry.state()) == false) { + continue; + } + + if (entry.indices().stream().anyMatch(index -> DatasourceExtension.JOB_INDEX_NAME.equals(index))) { + threadPool.generic().submit(() -> forceUpdateGeoIpData()); + } + + List ip2GeoDataIndices = entry.indices() + .stream() + .filter(index -> index.startsWith(IP2GEO_DATA_INDEX_NAME_PREFIX)) + .collect(Collectors.toList()); + if (ip2GeoDataIndices.isEmpty() == false) { + threadPool.generic().submit(() -> geoIpDataDao.deleteIp2GeoDataIndex(ip2GeoDataIndices)); + } + } + } + + private void forceUpdateGeoIpData() { + datasourceDao.getAllDatasources(new ActionListener<>() { + @Override + public void onResponse(final List datasources) { + datasources.stream().forEach(Ip2GeoListener.this::scheduleForceUpdate); + datasourceDao.updateDatasource(datasources, new ActionListener<>() { + @Override + public void onResponse(final BulkResponse bulkItemResponses) { + log.info("Datasources are updated for cleanup"); + } + + @Override + public void onFailure(final Exception e) { + log.error("Failed to update datasource for cleanup after restoring", e); + } + }); + } + + @Override + public void onFailure(final Exception e) { + log.error("Failed to get datasource after restoring", e); + } + }); + } + + /** + * Give a delay so that job scheduler can schedule the job right after the delay. Otherwise, it schedules + * the job after specified update interval. + */ + private void scheduleForceUpdate(Datasource datasource) { + IntervalSchedule schedule = new IntervalSchedule(Instant.now(), SCHEDULE_IN_MIN, ChronoUnit.MINUTES, DELAY_IN_MILLIS); + datasource.resetDatabase(); + datasource.setSystemSchedule(schedule); + datasource.setTask(DatasourceTask.ALL); + } + + @Override + protected void doStart() { + if (DiscoveryNode.isClusterManagerNode(clusterService.getSettings())) { + clusterService.addListener(this); + } + } + + @Override + protected void doStop() { + clusterService.removeListener(this); + } + + @Override + protected void doClose() throws IOException { + + } +} diff --git a/src/main/java/org/opensearch/geospatial/ip2geo/processor/Ip2GeoProcessor.java b/src/main/java/org/opensearch/geospatial/ip2geo/processor/Ip2GeoProcessor.java new file mode 100644 index 00000000..03f90e88 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/ip2geo/processor/Ip2GeoProcessor.java @@ -0,0 +1,292 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ +package org.opensearch.geospatial.ip2geo.processor; + +import static org.opensearch.ingest.ConfigurationUtils.newConfigurationException; +import static org.opensearch.ingest.ConfigurationUtils.readBooleanProperty; +import static org.opensearch.ingest.ConfigurationUtils.readOptionalList; +import static org.opensearch.ingest.ConfigurationUtils.readStringProperty; + +import java.io.IOException; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; +import java.util.function.BiConsumer; +import java.util.stream.Collectors; + +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.geospatial.ip2geo.common.DatasourceState; +import org.opensearch.geospatial.ip2geo.common.ParameterValidator; +import org.opensearch.geospatial.ip2geo.dao.DatasourceDao; +import org.opensearch.geospatial.ip2geo.dao.GeoIpDataDao; +import org.opensearch.geospatial.ip2geo.dao.Ip2GeoCachedDao; +import org.opensearch.ingest.AbstractProcessor; +import org.opensearch.ingest.IngestDocument; +import org.opensearch.ingest.IngestService; +import org.opensearch.ingest.Processor; + +import lombok.Getter; +import lombok.extern.log4j.Log4j2; + +/** + * Ip2Geo processor + */ +@Log4j2 +public final class Ip2GeoProcessor extends AbstractProcessor { + private static final Map DATA_EXPIRED = Map.of("error", "ip2geo_data_expired"); + public static final String CONFIG_FIELD = "field"; + public static final String CONFIG_TARGET_FIELD = "target_field"; + public static final String CONFIG_DATASOURCE = "datasource"; + public static final String CONFIG_PROPERTIES = "properties"; + public static final String CONFIG_IGNORE_MISSING = "ignore_missing"; + + private final String field; + private final String targetField; + /** + * @return The datasource name + */ + @Getter + private final String datasourceName; + private final Set properties; + private final boolean ignoreMissing; + private final ClusterSettings clusterSettings; + private final DatasourceDao datasourceDao; + private final GeoIpDataDao geoIpDataDao; + private final Ip2GeoCachedDao ip2GeoCachedDao; + + /** + * Ip2Geo processor type + */ + public static final String TYPE = "ip2geo"; + + /** + * Construct an Ip2Geo processor. + * @param tag the processor tag + * @param description the processor description + * @param field the source field to geo-IP map + * @param targetField the target field + * @param datasourceName the datasourceName + * @param properties the properties + * @param ignoreMissing true if documents with a missing value for the field should be ignored + * @param clusterSettings the cluster settings + * @param datasourceDao the datasource facade + * @param geoIpDataDao the geoip data facade + * @param ip2GeoCachedDao the cache + */ + public Ip2GeoProcessor( + final String tag, + final String description, + final String field, + final String targetField, + final String datasourceName, + final Set properties, + final boolean ignoreMissing, + final ClusterSettings clusterSettings, + final DatasourceDao datasourceDao, + final GeoIpDataDao geoIpDataDao, + final Ip2GeoCachedDao ip2GeoCachedDao + ) { + super(tag, description); + this.field = field; + this.targetField = targetField; + this.datasourceName = datasourceName; + this.properties = properties; + this.ignoreMissing = ignoreMissing; + this.clusterSettings = clusterSettings; + this.datasourceDao = datasourceDao; + this.geoIpDataDao = geoIpDataDao; + this.ip2GeoCachedDao = ip2GeoCachedDao; + } + + /** + * Add geo data of a given ip address to ingestDocument in asynchronous way + * + * @param ingestDocument the document + * @param handler the handler + */ + @Override + public void execute(IngestDocument ingestDocument, BiConsumer handler) { + try { + Object ip = ingestDocument.getFieldValue(field, Object.class, ignoreMissing); + + if (ip == null) { + handler.accept(ingestDocument, null); + return; + } + + if (ip instanceof String) { + executeInternal(ingestDocument, handler, (String) ip); + } else if (ip instanceof List) { + executeInternal(ingestDocument, handler, ((List) ip)); + } else { + handler.accept( + null, + new IllegalArgumentException( + String.format(Locale.ROOT, "field [%s] should contain only string or array of strings", field) + ) + ); + } + } catch (Exception e) { + handler.accept(null, e); + } + } + + /** + * Use {@code execute(IngestDocument, BiConsumer)} instead + * + * @param ingestDocument the document + * @return none + */ + @Override + public IngestDocument execute(IngestDocument ingestDocument) { + throw new IllegalStateException("Not implemented"); + } + + private void executeInternal( + final IngestDocument ingestDocument, + final BiConsumer handler, + final String ip + ) { + validateDatasourceIsInAvailableState(datasourceName); + String indexName = ip2GeoCachedDao.getIndexName(datasourceName); + if (ip2GeoCachedDao.isExpired(datasourceName) || indexName == null) { + handleExpiredData(ingestDocument, handler); + return; + } + + Map geoData = ip2GeoCachedDao.getGeoData(indexName, ip); + if (geoData.isEmpty() == false) { + ingestDocument.setFieldValue(targetField, filteredGeoData(geoData)); + } + handler.accept(ingestDocument, null); + } + + private Map filteredGeoData(final Map geoData) { + if (properties == null) { + return geoData; + } + + return properties.stream().filter(p -> geoData.containsKey(p)).collect(Collectors.toMap(p -> p, p -> geoData.get(p))); + } + + private void validateDatasourceIsInAvailableState(final String datasourceName) { + if (ip2GeoCachedDao.has(datasourceName) == false) { + throw new IllegalStateException("datasource does not exist"); + } + + if (DatasourceState.AVAILABLE.equals(ip2GeoCachedDao.getState(datasourceName)) == false) { + throw new IllegalStateException("datasource is not in an available state"); + } + } + + private void handleExpiredData(final IngestDocument ingestDocument, final BiConsumer handler) { + ingestDocument.setFieldValue(targetField, DATA_EXPIRED); + handler.accept(ingestDocument, null); + } + + /** + * Handle multiple ips + * + * @param ingestDocument the document + * @param handler the handler + * @param ips the ip list + */ + private void executeInternal( + final IngestDocument ingestDocument, + final BiConsumer handler, + final List ips + ) { + for (Object ip : ips) { + if (ip instanceof String == false) { + throw new IllegalArgumentException("array in field [" + field + "] should only contain strings"); + } + } + + validateDatasourceIsInAvailableState(datasourceName); + String indexName = ip2GeoCachedDao.getIndexName(datasourceName); + if (ip2GeoCachedDao.isExpired(datasourceName) || indexName == null) { + handleExpiredData(ingestDocument, handler); + return; + } + + List> geoDataList = ips.stream() + .map(ip -> ip2GeoCachedDao.getGeoData(indexName, (String) ip)) + .filter(geoData -> geoData.isEmpty() == false) + .map(this::filteredGeoData) + .collect(Collectors.toList()); + + if (geoDataList.isEmpty() == false) { + ingestDocument.setFieldValue(targetField, geoDataList); + } + handler.accept(ingestDocument, null); + } + + @Override + public String getType() { + return TYPE; + } + + /** + * Ip2Geo processor factory + */ + public static final class Factory implements Processor.Factory { + private static final ParameterValidator VALIDATOR = new ParameterValidator(); + private final IngestService ingestService; + private final DatasourceDao datasourceDao; + private final GeoIpDataDao geoIpDataDao; + private final Ip2GeoCachedDao ip2GeoCachedDao; + + public Factory( + final IngestService ingestService, + final DatasourceDao datasourceDao, + final GeoIpDataDao geoIpDataDao, + final Ip2GeoCachedDao ip2GeoCachedDao + ) { + this.ingestService = ingestService; + this.datasourceDao = datasourceDao; + this.geoIpDataDao = geoIpDataDao; + this.ip2GeoCachedDao = ip2GeoCachedDao; + } + + /** + * Within this method, blocking request cannot be called because this method is executed in a transport thread. + * This means, validation using data in an index won't work. + */ + @Override + public Ip2GeoProcessor create( + final Map registry, + final String processorTag, + final String description, + final Map config + ) throws IOException { + String ipField = readStringProperty(TYPE, processorTag, config, CONFIG_FIELD); + String targetField = readStringProperty(TYPE, processorTag, config, CONFIG_TARGET_FIELD, "ip2geo"); + String datasourceName = readStringProperty(TYPE, processorTag, config, CONFIG_DATASOURCE); + List propertyNames = readOptionalList(TYPE, processorTag, config, CONFIG_PROPERTIES); + boolean ignoreMissing = readBooleanProperty(TYPE, processorTag, config, CONFIG_IGNORE_MISSING, false); + + List error = VALIDATOR.validateDatasourceName(datasourceName); + if (error.isEmpty() == false) { + throw newConfigurationException(TYPE, processorTag, "datasource", error.get(0)); + } + + return new Ip2GeoProcessor( + processorTag, + description, + ipField, + targetField, + datasourceName, + propertyNames == null ? null : new HashSet<>(propertyNames), + ignoreMissing, + ingestService.getClusterService().getClusterSettings(), + datasourceDao, + geoIpDataDao, + ip2GeoCachedDao + ); + } + } +} diff --git a/src/main/java/org/opensearch/geospatial/plugin/GeospatialPlugin.java b/src/main/java/org/opensearch/geospatial/plugin/GeospatialPlugin.java index 589c55ef..d64f20b4 100644 --- a/src/main/java/org/opensearch/geospatial/plugin/GeospatialPlugin.java +++ b/src/main/java/org/opensearch/geospatial/plugin/GeospatialPlugin.java @@ -5,64 +5,143 @@ package org.opensearch.geospatial.plugin; +import static org.opensearch.geospatial.ip2geo.jobscheduler.Datasource.IP2GEO_DATA_INDEX_NAME_PREFIX; + +import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.function.Supplier; import org.opensearch.action.ActionRequest; -import org.opensearch.action.ActionResponse; import org.opensearch.client.Client; import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.cluster.node.DiscoveryNodes; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.collect.MapBuilder; -import org.opensearch.common.io.stream.NamedWriteableRegistry; +import org.opensearch.common.lifecycle.LifecycleComponent; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.IndexScopedSettings; +import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.common.settings.SettingsFilter; -import org.opensearch.common.xcontent.NamedXContentRegistry; +import org.opensearch.core.action.ActionResponse; +import org.opensearch.core.common.io.stream.NamedWriteableRegistry; +import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.env.Environment; import org.opensearch.env.NodeEnvironment; import org.opensearch.geospatial.action.upload.geojson.UploadGeoJSONAction; import org.opensearch.geospatial.action.upload.geojson.UploadGeoJSONTransportAction; +import org.opensearch.geospatial.index.mapper.xypoint.XYPointFieldMapper; +import org.opensearch.geospatial.index.mapper.xypoint.XYPointFieldTypeParser; import org.opensearch.geospatial.index.mapper.xyshape.XYShapeFieldMapper; import org.opensearch.geospatial.index.mapper.xyshape.XYShapeFieldTypeParser; import org.opensearch.geospatial.index.query.xyshape.XYShapeQueryBuilder; +import org.opensearch.geospatial.ip2geo.action.DeleteDatasourceAction; +import org.opensearch.geospatial.ip2geo.action.DeleteDatasourceTransportAction; +import org.opensearch.geospatial.ip2geo.action.GetDatasourceAction; +import org.opensearch.geospatial.ip2geo.action.GetDatasourceTransportAction; +import org.opensearch.geospatial.ip2geo.action.PutDatasourceAction; +import org.opensearch.geospatial.ip2geo.action.PutDatasourceTransportAction; +import org.opensearch.geospatial.ip2geo.action.RestDeleteDatasourceHandler; +import org.opensearch.geospatial.ip2geo.action.RestGetDatasourceHandler; +import org.opensearch.geospatial.ip2geo.action.RestPutDatasourceHandler; +import org.opensearch.geospatial.ip2geo.action.RestUpdateDatasourceHandler; +import org.opensearch.geospatial.ip2geo.action.UpdateDatasourceAction; +import org.opensearch.geospatial.ip2geo.action.UpdateDatasourceTransportAction; +import org.opensearch.geospatial.ip2geo.common.Ip2GeoExecutor; +import org.opensearch.geospatial.ip2geo.common.Ip2GeoLockService; +import org.opensearch.geospatial.ip2geo.common.Ip2GeoSettings; +import org.opensearch.geospatial.ip2geo.common.URLDenyListChecker; +import org.opensearch.geospatial.ip2geo.dao.DatasourceDao; +import org.opensearch.geospatial.ip2geo.dao.GeoIpDataDao; +import org.opensearch.geospatial.ip2geo.dao.Ip2GeoCachedDao; +import org.opensearch.geospatial.ip2geo.jobscheduler.DatasourceExtension; +import org.opensearch.geospatial.ip2geo.jobscheduler.DatasourceRunner; +import org.opensearch.geospatial.ip2geo.jobscheduler.DatasourceUpdateService; +import org.opensearch.geospatial.ip2geo.listener.Ip2GeoListener; +import org.opensearch.geospatial.ip2geo.processor.Ip2GeoProcessor; import org.opensearch.geospatial.processor.FeatureProcessor; import org.opensearch.geospatial.rest.action.upload.geojson.RestUploadGeoJSONAction; +import org.opensearch.geospatial.search.aggregations.bucket.geogrid.GeoHexGrid; +import org.opensearch.geospatial.search.aggregations.bucket.geogrid.GeoHexGridAggregationBuilder; import org.opensearch.geospatial.stats.upload.RestUploadStatsAction; import org.opensearch.geospatial.stats.upload.UploadStats; import org.opensearch.geospatial.stats.upload.UploadStatsAction; import org.opensearch.geospatial.stats.upload.UploadStatsTransportAction; +import org.opensearch.index.IndexModule; import org.opensearch.index.mapper.Mapper; +import org.opensearch.indices.SystemIndexDescriptor; import org.opensearch.ingest.Processor; import org.opensearch.plugins.ActionPlugin; import org.opensearch.plugins.IngestPlugin; import org.opensearch.plugins.MapperPlugin; import org.opensearch.plugins.Plugin; import org.opensearch.plugins.SearchPlugin; +import org.opensearch.plugins.SystemIndexPlugin; import org.opensearch.repositories.RepositoriesService; import org.opensearch.rest.RestController; import org.opensearch.rest.RestHandler; import org.opensearch.script.ScriptService; +import org.opensearch.threadpool.ExecutorBuilder; import org.opensearch.threadpool.ThreadPool; import org.opensearch.watcher.ResourceWatcherService; +import lombok.extern.log4j.Log4j2; + /** * Entry point for Geospatial features. It provides additional Processors, Actions * to interact with Cluster. */ -public class GeospatialPlugin extends Plugin implements IngestPlugin, ActionPlugin, MapperPlugin, SearchPlugin { +@Log4j2 +public class GeospatialPlugin extends Plugin implements IngestPlugin, ActionPlugin, MapperPlugin, SearchPlugin, SystemIndexPlugin { + private Ip2GeoCachedDao ip2GeoCachedDao; + private DatasourceDao datasourceDao; + private GeoIpDataDao geoIpDataDao; + private URLDenyListChecker urlDenyListChecker; + + @Override + public Collection getSystemIndexDescriptors(Settings settings) { + return List.of(new SystemIndexDescriptor(IP2GEO_DATA_INDEX_NAME_PREFIX, "System index used for Ip2Geo data")); + } @Override public Map getProcessors(Processor.Parameters parameters) { + this.urlDenyListChecker = new URLDenyListChecker(parameters.ingestService.getClusterService().getClusterSettings()); + this.datasourceDao = new DatasourceDao(parameters.client, parameters.ingestService.getClusterService()); + this.geoIpDataDao = new GeoIpDataDao(parameters.ingestService.getClusterService(), parameters.client, urlDenyListChecker); + this.ip2GeoCachedDao = new Ip2GeoCachedDao(parameters.ingestService.getClusterService(), datasourceDao, geoIpDataDao); return MapBuilder.newMapBuilder() .put(FeatureProcessor.TYPE, new FeatureProcessor.Factory()) + .put(Ip2GeoProcessor.TYPE, new Ip2GeoProcessor.Factory(parameters.ingestService, datasourceDao, geoIpDataDao, ip2GeoCachedDao)) .immutableMap(); } + @Override + public void onIndexModule(IndexModule indexModule) { + if (DatasourceExtension.JOB_INDEX_NAME.equals(indexModule.getIndex().getName())) { + indexModule.addIndexOperationListener(ip2GeoCachedDao); + log.info("Ip2GeoListener started listening to operations on index {}", DatasourceExtension.JOB_INDEX_NAME); + } + } + + @Override + public Collection> getGuiceServiceClasses() { + return List.of(Ip2GeoListener.class); + } + + @Override + public List> getExecutorBuilders(Settings settings) { + List> executorBuilders = new ArrayList<>(); + executorBuilders.add(Ip2GeoExecutor.executorBuilder(settings)); + return executorBuilders; + } + + @Override + public List> getSettings() { + return Ip2GeoSettings.settings(); + } + @Override public Collection createComponents( Client client, @@ -77,7 +156,30 @@ public Collection createComponents( IndexNameExpressionResolver indexNameExpressionResolver, Supplier repositoriesServiceSupplier ) { - return List.of(UploadStats.getInstance()); + DatasourceUpdateService datasourceUpdateService = new DatasourceUpdateService( + clusterService, + datasourceDao, + geoIpDataDao, + urlDenyListChecker + ); + Ip2GeoExecutor ip2GeoExecutor = new Ip2GeoExecutor(threadPool); + Ip2GeoLockService ip2GeoLockService = new Ip2GeoLockService(clusterService, client); + /** + * We don't need to return datasource runner because it is used only by job scheduler and job scheduler + * does not use DI but it calls DatasourceExtension#getJobRunner to get DatasourceRunner instance. + */ + DatasourceRunner.getJobRunnerInstance() + .initialize(clusterService, datasourceUpdateService, ip2GeoExecutor, datasourceDao, ip2GeoLockService); + + return List.of( + UploadStats.getInstance(), + datasourceUpdateService, + datasourceDao, + ip2GeoExecutor, + geoIpDataDao, + ip2GeoLockService, + ip2GeoCachedDao + ); } @Override @@ -90,22 +192,49 @@ public List getRestHandlers( IndexNameExpressionResolver indexNameExpressionResolver, Supplier nodesInCluster ) { - RestUploadGeoJSONAction uploadGeoJSONAction = new RestUploadGeoJSONAction(); - RestUploadStatsAction statsAction = new RestUploadStatsAction(); - return List.of(statsAction, uploadGeoJSONAction); + List geoJsonHandlers = List.of(new RestUploadStatsAction(), new RestUploadGeoJSONAction()); + + List ip2geoHandlers = List.of( + new RestPutDatasourceHandler(clusterSettings, urlDenyListChecker), + new RestGetDatasourceHandler(), + new RestUpdateDatasourceHandler(urlDenyListChecker), + new RestDeleteDatasourceHandler() + ); + + List allHandlers = new ArrayList<>(); + allHandlers.addAll(geoJsonHandlers); + allHandlers.addAll(ip2geoHandlers); + return allHandlers; } @Override public List> getActions() { - return List.of( + List> geoJsonHandlers = List.of( new ActionHandler<>(UploadGeoJSONAction.INSTANCE, UploadGeoJSONTransportAction.class), new ActionHandler<>(UploadStatsAction.INSTANCE, UploadStatsTransportAction.class) ); + + List> ip2geoHandlers = List.of( + new ActionHandler<>(PutDatasourceAction.INSTANCE, PutDatasourceTransportAction.class), + new ActionHandler<>(GetDatasourceAction.INSTANCE, GetDatasourceTransportAction.class), + new ActionHandler<>(UpdateDatasourceAction.INSTANCE, UpdateDatasourceTransportAction.class), + new ActionHandler<>(DeleteDatasourceAction.INSTANCE, DeleteDatasourceTransportAction.class) + ); + + List> allHandlers = new ArrayList<>(); + allHandlers.addAll(geoJsonHandlers); + allHandlers.addAll(ip2geoHandlers); + return allHandlers; } @Override public Map getMappers() { - return Map.of(XYShapeFieldMapper.CONTENT_TYPE, new XYShapeFieldTypeParser()); + return Map.of( + XYShapeFieldMapper.CONTENT_TYPE, + new XYShapeFieldTypeParser(), + XYPointFieldMapper.CONTENT_TYPE, + new XYPointFieldTypeParser() + ); } @Override @@ -113,4 +242,19 @@ public List> getQueries() { // Register XYShapeQuery Builder to be delegated for query type: xy_shape return List.of(new QuerySpec<>(XYShapeQueryBuilder.NAME, XYShapeQueryBuilder::new, XYShapeQueryBuilder::fromXContent)); } + + /** + * Registering {@link GeoHexGrid} aggregation on GeoPoint field. + */ + @Override + public List getAggregations() { + + final var geoHexGridSpec = new AggregationSpec( + GeoHexGridAggregationBuilder.NAME, + GeoHexGridAggregationBuilder::new, + GeoHexGridAggregationBuilder.PARSER + ).addResultReader(GeoHexGrid::new).setAggregatorRegistrar(GeoHexGridAggregationBuilder::registerAggregators); + + return List.of(geoHexGridSpec); + } } diff --git a/src/main/java/org/opensearch/geospatial/rest/action/upload/geojson/RestUploadGeoJSONAction.java b/src/main/java/org/opensearch/geospatial/rest/action/upload/geojson/RestUploadGeoJSONAction.java index 5f211e31..51b3bbca 100644 --- a/src/main/java/org/opensearch/geospatial/rest/action/upload/geojson/RestUploadGeoJSONAction.java +++ b/src/main/java/org/opensearch/geospatial/rest/action/upload/geojson/RestUploadGeoJSONAction.java @@ -13,9 +13,9 @@ import java.util.List; import org.opensearch.client.node.NodeClient; -import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.collect.Tuple; -import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.xcontent.MediaType; import org.opensearch.geospatial.action.upload.geojson.UploadGeoJSONAction; import org.opensearch.geospatial.action.upload.geojson.UploadGeoJSONRequest; import org.opensearch.rest.BaseRestHandler; @@ -84,7 +84,7 @@ public List routes() { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { - Tuple sourceTuple = restRequest.contentOrSourceParam(); + Tuple sourceTuple = restRequest.contentOrSourceParam(); RestRequest.Method method = restRequest.getHttpRequest().method(); UploadGeoJSONRequest request = new UploadGeoJSONRequest(method, sourceTuple.v2()); return channel -> client.execute(UploadGeoJSONAction.INSTANCE, request, new RestToXContentListener<>(channel)); diff --git a/src/main/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/GeoHexGrid.java b/src/main/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/GeoHexGrid.java new file mode 100644 index 00000000..7173bfa1 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/GeoHexGrid.java @@ -0,0 +1,64 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.search.aggregations.bucket.geogrid; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.geo.search.aggregations.bucket.geogrid.InternalGeoGrid; +import org.opensearch.geo.search.aggregations.bucket.geogrid.InternalGeoGridBucket; +import org.opensearch.search.aggregations.InternalAggregations; + +/** + * Represents a grid of cells where each cell's location is determined by a h3 cell address. + * All h3CellAddress in a grid are of the same precision + */ +public final class GeoHexGrid extends InternalGeoGrid { + + public GeoHexGrid(StreamInput in) throws IOException { + super(in); + } + + @Override + public InternalGeoGrid create(List list) { + return new GeoHexGrid(name, requiredSize, buckets, metadata); + } + + @Override + public InternalGeoGridBucket createBucket(InternalAggregations internalAggregations, InternalGeoGridBucket baseGeoGridBucket) { + return new GeoHexGridBucket(baseGeoGridBucket.hashAsLong(), baseGeoGridBucket.getDocCount(), internalAggregations); + } + + @Override + public String getWriteableName() { + return GeoHexGridAggregationBuilder.NAME; + } + + protected GeoHexGrid(String name, int requiredSize, List buckets, Map metadata) { + super(name, requiredSize, buckets, metadata); + } + + @Override + protected Reader getBucketReader() { + return GeoHexGridBucket::new; + } + + @Override + protected InternalGeoGrid create(String name, int requiredSize, List buckets, Map metadata) { + return new GeoHexGrid(name, requiredSize, buckets, metadata); + } + + @Override + protected GeoHexGridBucket createBucket(long address, long docCount, InternalAggregations internalAggregations) { + return new GeoHexGridBucket(address, docCount, internalAggregations); + } + + int getRequiredSize() { + return requiredSize; + } +} diff --git a/src/main/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/GeoHexGridAggregationBuilder.java b/src/main/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/GeoHexGridAggregationBuilder.java new file mode 100644 index 00000000..018be48e --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/GeoHexGridAggregationBuilder.java @@ -0,0 +1,135 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.search.aggregations.bucket.geogrid; + +import static org.opensearch.geospatial.search.aggregations.bucket.geogrid.GeoHexHelper.checkPrecisionRange; + +import java.io.IOException; +import java.util.Map; + +import org.opensearch.OpenSearchParseException; +import org.opensearch.common.geo.GeoBoundingBox; +import org.opensearch.common.xcontent.support.XContentMapValues; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.xcontent.ObjectParser; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder; +import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoGridAggregatorSupplier; +import org.opensearch.index.query.QueryShardContext; +import org.opensearch.search.aggregations.AggregationBuilder; +import org.opensearch.search.aggregations.AggregatorFactories; +import org.opensearch.search.aggregations.AggregatorFactory; +import org.opensearch.search.aggregations.support.ValuesSourceAggregatorFactory; +import org.opensearch.search.aggregations.support.ValuesSourceConfig; +import org.opensearch.search.aggregations.support.ValuesSourceRegistry; + +/** + * Aggregation Builder for geo hex grid + */ +public class GeoHexGridAggregationBuilder extends GeoGridAggregationBuilder { + + /** + * Aggregation context name + */ + public static final String NAME = "geohex_grid"; + public static final ValuesSourceRegistry.RegistryKey REGISTRY_KEY = new ValuesSourceRegistry.RegistryKey<>( + NAME, + GeoGridAggregatorSupplier.class + ); + public static final ObjectParser PARSER = createParser( + NAME, + GeoHexGridAggregationBuilder::parsePrecision, + GeoHexGridAggregationBuilder::new + ); + private static final int DEFAULT_MAX_NUM_CELLS = 10000; + private static final int DEFAULT_PRECISION = 5; + private static final int DEFAULT_SHARD_SIZE = -1; + + public GeoHexGridAggregationBuilder(String name) { + super(name); + precision(DEFAULT_PRECISION); + size(DEFAULT_MAX_NUM_CELLS); + shardSize = DEFAULT_SHARD_SIZE; + } + + public GeoHexGridAggregationBuilder(StreamInput in) throws IOException { + super(in); + } + + @Override + public String getType() { + return NAME; + } + + /** + * Register's Geo Hex Aggregation + * @param builder Builder to register new Aggregation + */ + public static void registerAggregators(final ValuesSourceRegistry.Builder builder) { + GeoHexGridAggregatorFactory.registerAggregators(builder); + } + + @Override + public GeoGridAggregationBuilder precision(int precision) { + checkPrecisionRange(precision); + this.precision = precision; + return this; + } + + protected GeoHexGridAggregationBuilder( + GeoGridAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { + super(clone, factoriesBuilder, metadata); + } + + @Override + protected ValuesSourceAggregatorFactory createFactory( + String name, + ValuesSourceConfig config, + int precision, + int requiredSize, + int shardSize, + GeoBoundingBox geoBoundingBox, + QueryShardContext queryShardContext, + AggregatorFactory aggregatorFactory, + AggregatorFactories.Builder builder, + Map metadata + ) throws IOException { + return new GeoHexGridAggregatorFactory( + name, + config, + precision, + requiredSize, + shardSize, + geoBoundingBox, + queryShardContext, + aggregatorFactory, + builder, + metadata + ); + } + + @Override + protected ValuesSourceRegistry.RegistryKey getRegistryKey() { + return REGISTRY_KEY; + } + + @Override + protected AggregationBuilder shallowCopy(AggregatorFactories.Builder builder, Map metadata) { + return new GeoHexGridAggregationBuilder(this, builder, metadata); + } + + private static int parsePrecision(final XContentParser parser) throws IOException, OpenSearchParseException { + final var token = parser.currentToken(); + if (token.equals(XContentParser.Token.VALUE_NUMBER)) { + return XContentMapValues.nodeIntegerValue(parser.intValue()); + } + final var precision = parser.text(); + return XContentMapValues.nodeIntegerValue(precision); + } +} diff --git a/src/main/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/GeoHexGridAggregator.java b/src/main/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/GeoHexGridAggregator.java new file mode 100644 index 00000000..c4b902bf --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/GeoHexGridAggregator.java @@ -0,0 +1,53 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.search.aggregations.bucket.geogrid; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoGridAggregator; +import org.opensearch.geo.search.aggregations.bucket.geogrid.InternalGeoGridBucket; +import org.opensearch.search.aggregations.Aggregator; +import org.opensearch.search.aggregations.AggregatorFactories; +import org.opensearch.search.aggregations.CardinalityUpperBound; +import org.opensearch.search.aggregations.support.ValuesSource; +import org.opensearch.search.internal.SearchContext; + +/** + * Aggregates data expressed as H3 Cell ID. + */ +public class GeoHexGridAggregator extends GeoGridAggregator { + + public GeoHexGridAggregator( + String name, + AggregatorFactories factories, + ValuesSource.Numeric valuesSource, + int requiredSize, + int shardSize, + SearchContext aggregationContext, + Aggregator parent, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException { + super(name, factories, valuesSource, requiredSize, shardSize, aggregationContext, parent, cardinality, metadata); + } + + @Override + protected GeoHexGrid buildAggregation( + String name, + int requiredSize, + List buckets, + Map metadata + ) { + return new GeoHexGrid(name, requiredSize, buckets, metadata); + } + + @Override + protected InternalGeoGridBucket newEmptyBucket() { + return new GeoHexGridBucket(0, 0, null); + } +} diff --git a/src/main/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/GeoHexGridAggregatorFactory.java b/src/main/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/GeoHexGridAggregatorFactory.java new file mode 100644 index 00000000..424f810a --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/GeoHexGridAggregatorFactory.java @@ -0,0 +1,129 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.search.aggregations.bucket.geogrid; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import org.opensearch.common.geo.GeoBoundingBox; +import org.opensearch.geo.search.aggregations.bucket.geogrid.cells.CellIdSource; +import org.opensearch.index.query.QueryShardContext; +import org.opensearch.search.aggregations.Aggregator; +import org.opensearch.search.aggregations.AggregatorFactories; +import org.opensearch.search.aggregations.AggregatorFactory; +import org.opensearch.search.aggregations.CardinalityUpperBound; +import org.opensearch.search.aggregations.InternalAggregation; +import org.opensearch.search.aggregations.NonCollectingAggregator; +import org.opensearch.search.aggregations.support.CoreValuesSourceType; +import org.opensearch.search.aggregations.support.ValuesSource; +import org.opensearch.search.aggregations.support.ValuesSourceAggregatorFactory; +import org.opensearch.search.aggregations.support.ValuesSourceConfig; +import org.opensearch.search.aggregations.support.ValuesSourceRegistry; +import org.opensearch.search.internal.SearchContext; + +/** + * Aggregation Factory for geohex_grid agg + */ +public class GeoHexGridAggregatorFactory extends ValuesSourceAggregatorFactory { + private final int precision; + private final int requiredSize; + private final int shardSize; + private final GeoBoundingBox geoBoundingBox; + + GeoHexGridAggregatorFactory( + String name, + ValuesSourceConfig config, + int precision, + int requiredSize, + int shardSize, + GeoBoundingBox geoBoundingBox, + QueryShardContext queryShardContext, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata + ) throws IOException { + super(name, config, queryShardContext, parent, subFactoriesBuilder, metadata); + this.precision = precision; + this.requiredSize = requiredSize; + this.shardSize = shardSize; + this.geoBoundingBox = geoBoundingBox; + } + + @Override + protected Aggregator createUnmapped(SearchContext searchContext, Aggregator aggregator, Map map) throws IOException { + final var aggregation = new GeoHexGrid(name, requiredSize, List.of(), metadata); + + return new NonCollectingAggregator(name, searchContext, aggregator, factories, metadata) { + @Override + public InternalAggregation buildEmptyAggregation() { + return aggregation; + } + }; + } + + @Override + protected Aggregator doCreateInternal( + SearchContext searchContext, + Aggregator aggregator, + CardinalityUpperBound cardinalityUpperBound, + Map map + ) throws IOException { + return queryShardContext.getValuesSourceRegistry() + .getAggregator(GeoHexGridAggregationBuilder.REGISTRY_KEY, config) + .build( + name, + factories, + config.getValuesSource(), + precision, + geoBoundingBox, + requiredSize, + shardSize, + searchContext, + aggregator, + cardinalityUpperBound, + metadata + ); + } + + static void registerAggregators(final ValuesSourceRegistry.Builder builder) { + builder.register( + GeoHexGridAggregationBuilder.REGISTRY_KEY, + CoreValuesSourceType.GEOPOINT, + ( + name, + factories, + valuesSource, + precision, + geoBoundingBox, + requiredSize, + shardSize, + aggregationContext, + parent, + cardinality, + metadata) -> { + CellIdSource cellIdSource = new CellIdSource( + (ValuesSource.GeoPoint) valuesSource, + precision, + geoBoundingBox, + GeoHexHelper::longEncode + ); + return new GeoHexGridAggregator( + name, + factories, + cellIdSource, + requiredSize, + shardSize, + aggregationContext, + parent, + cardinality, + metadata + ); + }, + true + ); + } +} diff --git a/src/main/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/GeoHexGridBucket.java b/src/main/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/GeoHexGridBucket.java new file mode 100644 index 00000000..4a3fa676 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/GeoHexGridBucket.java @@ -0,0 +1,44 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.search.aggregations.bucket.geogrid; + +import static org.opensearch.geospatial.search.aggregations.bucket.geogrid.GeoHexHelper.h3ToGeoPoint; + +import java.io.IOException; + +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.geo.search.aggregations.bucket.geogrid.InternalGeoGridBucket; +import org.opensearch.geospatial.h3.H3; +import org.opensearch.search.aggregations.InternalAggregations; + +/** + * Implementation of geohex grid bucket + */ +public class GeoHexGridBucket extends InternalGeoGridBucket { + + public GeoHexGridBucket(long hashAsLong, long docCount, InternalAggregations aggregations) { + super(hashAsLong, docCount, aggregations); + } + + /** + * Read from a Stream + * @param in {@link StreamInput} contains GridBucket + * @throws IOException + */ + public GeoHexGridBucket(StreamInput in) throws IOException { + super(in); + } + + @Override + public Object getKey() { + return h3ToGeoPoint(hashAsLong); + } + + @Override + public String getKeyAsString() { + return H3.h3ToString(hashAsLong); + } +} diff --git a/src/main/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/GeoHexHelper.java b/src/main/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/GeoHexHelper.java new file mode 100644 index 00000000..c556f3f9 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/GeoHexHelper.java @@ -0,0 +1,72 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.search.aggregations.bucket.geogrid; + +import static org.opensearch.geospatial.h3.H3.MAX_H3_RES; +import static org.opensearch.geospatial.h3.H3.MIN_H3_RES; +import static org.opensearch.geospatial.h3.H3.geoToH3; +import static org.opensearch.geospatial.h3.H3.h3IsValid; +import static org.opensearch.geospatial.h3.H3.h3ToLatLng; +import static org.opensearch.geospatial.h3.H3.stringToH3; + +import java.util.Locale; + +import org.opensearch.common.geo.GeoPoint; + +import lombok.NonNull; + +/** + * Helper class for H3 library + */ +public class GeoHexHelper { + + /** + * Checks whether given precision is within H3 Precision range + * @param precision H3 index precision + */ + public static void checkPrecisionRange(int precision) { + if ((precision < MIN_H3_RES) || (precision > MAX_H3_RES)) { + throw new IllegalArgumentException( + String.format(Locale.ROOT, "Invalid precision of %d . Must be between %d and %d.", precision, MIN_H3_RES, MAX_H3_RES) + ); + } + } + + /** + * Converts from long representation of an index to {@link GeoPoint} representation. + * @param h3CellID H3 Cell Id + * @throws IllegalArgumentException if invalid h3CellID is provided + */ + public static GeoPoint h3ToGeoPoint(long h3CellID) { + if (h3IsValid(h3CellID) == false) { + throw new IllegalArgumentException(String.format(Locale.ROOT, "Invalid H3 Cell address: %d", h3CellID)); + } + final var position = h3ToLatLng(h3CellID); + return new GeoPoint(position.getLatDeg(), position.getLonDeg()); + } + + /** + * Converts from {@link String} representation of an index to {@link GeoPoint} representation. + * @param h3CellID H3 Cell Id + * @throws IllegalArgumentException if invalid h3CellID is provided + */ + public static GeoPoint h3ToGeoPoint(@NonNull String h3CellID) { + return h3ToGeoPoint(stringToH3(h3CellID)); + } + + /** + * Encodes longitude/latitude into H3 Cell Address for given precision + * + * @param latitude Latitude in degrees. + * @param longitude Longitude in degrees. + * @param precision Precision, 0 <= res <= 15 + * @return The H3 index. + * @throws IllegalArgumentException latitude, longitude, or precision are out of range. + */ + public static long longEncode(double longitude, double latitude, int precision) { + return geoToH3(latitude, longitude, precision); + } +} diff --git a/src/main/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/ParsedGeoHexGrid.java b/src/main/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/ParsedGeoHexGrid.java new file mode 100644 index 00000000..e7231c22 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/ParsedGeoHexGrid.java @@ -0,0 +1,33 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.search.aggregations.bucket.geogrid; + +import java.io.IOException; + +import org.opensearch.core.xcontent.ObjectParser; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.geo.search.aggregations.bucket.geogrid.ParsedGeoGrid; + +import lombok.NoArgsConstructor; + +@NoArgsConstructor +public class ParsedGeoHexGrid extends ParsedGeoGrid { + private static final ObjectParser PARSER = createParser( + ParsedGeoHexGrid::new, + ParsedGeoHexGridBucket::fromXContent, + ParsedGeoHexGridBucket::fromXContent + ); + + public static ParsedGeoGrid fromXContent(XContentParser parser, String name) throws IOException { + final var parsedGeoGrid = PARSER.parse(parser, null); + parsedGeoGrid.setName(name); + return parsedGeoGrid; + } + + public String getType() { + return GeoHexGridAggregationBuilder.NAME; + } +} diff --git a/src/main/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/ParsedGeoHexGridBucket.java b/src/main/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/ParsedGeoHexGridBucket.java new file mode 100644 index 00000000..6ddb0734 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/ParsedGeoHexGridBucket.java @@ -0,0 +1,30 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.search.aggregations.bucket.geogrid; + +import java.io.IOException; + +import org.opensearch.common.geo.GeoPoint; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.geo.search.aggregations.bucket.geogrid.ParsedGeoGridBucket; + +import lombok.NoArgsConstructor; + +@NoArgsConstructor +public class ParsedGeoHexGridBucket extends ParsedGeoGridBucket { + + public GeoPoint getKey() { + return GeoHexHelper.h3ToGeoPoint(this.hashAsString); + } + + public String getKeyAsString() { + return this.hashAsString; + } + + static ParsedGeoHexGridBucket fromXContent(XContentParser parser) throws IOException { + return parseXContent(parser, false, ParsedGeoHexGridBucket::new, (p, bucket) -> { bucket.hashAsString = p.textOrNull(); }); + } +} diff --git a/src/main/java/org/opensearch/geospatial/shared/Constants.java b/src/main/java/org/opensearch/geospatial/shared/Constants.java new file mode 100644 index 00000000..7b6488a4 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/shared/Constants.java @@ -0,0 +1,15 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.shared; + +import java.util.Locale; + +import org.opensearch.Version; + +public class Constants { + public static final String USER_AGENT_KEY = "User-Agent"; + public static final String USER_AGENT_VALUE = String.format(Locale.ROOT, "OpenSearch/%s vanilla", Version.CURRENT.toString()); +} diff --git a/src/main/java/org/opensearch/geospatial/shared/StashedThreadContext.java b/src/main/java/org/opensearch/geospatial/shared/StashedThreadContext.java new file mode 100644 index 00000000..1ee59297 --- /dev/null +++ b/src/main/java/org/opensearch/geospatial/shared/StashedThreadContext.java @@ -0,0 +1,41 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.shared; + +import java.util.function.Supplier; + +import org.opensearch.client.Client; +import org.opensearch.common.util.concurrent.ThreadContext; + +/** + * Helper class to run code with stashed thread context + * + * Code need to be run with stashed thread context if it interacts with system index + * when security plugin is enabled. + */ +public class StashedThreadContext { + /** + * Set the thread context to default, this is needed to allow actions on model system index + * when security plugin is enabled + * @param function runnable that needs to be executed after thread context has been stashed, accepts and returns nothing + */ + public static void run(final Client client, final Runnable function) { + try (ThreadContext.StoredContext context = client.threadPool().getThreadContext().stashContext()) { + function.run(); + } + } + + /** + * Set the thread context to default, this is needed to allow actions on model system index + * when security plugin is enabled + * @param function supplier function that needs to be executed after thread context has been stashed, return object + */ + public static T run(final Client client, final Supplier function) { + try (ThreadContext.StoredContext context = client.threadPool().getThreadContext().stashContext()) { + return function.get(); + } + } +} diff --git a/src/main/java/org/opensearch/geospatial/stats/upload/TotalUploadStats.java b/src/main/java/org/opensearch/geospatial/stats/upload/TotalUploadStats.java index f7be61cc..d6427754 100644 --- a/src/main/java/org/opensearch/geospatial/stats/upload/TotalUploadStats.java +++ b/src/main/java/org/opensearch/geospatial/stats/upload/TotalUploadStats.java @@ -11,8 +11,8 @@ import java.util.Objects; import java.util.function.Function; -import org.opensearch.common.xcontent.ToXContentObject; -import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; // Holder to construct summary of Upload API Stats across all Nodes public final class TotalUploadStats implements ToXContentObject { @@ -28,7 +28,7 @@ public enum FIELDS { @Override public String toString() { - return name().toLowerCase(Locale.getDefault()); + return name().toLowerCase(Locale.ROOT); } } diff --git a/src/main/java/org/opensearch/geospatial/stats/upload/UploadMetric.java b/src/main/java/org/opensearch/geospatial/stats/upload/UploadMetric.java index 7419941d..b9deeb79 100644 --- a/src/main/java/org/opensearch/geospatial/stats/upload/UploadMetric.java +++ b/src/main/java/org/opensearch/geospatial/stats/upload/UploadMetric.java @@ -9,14 +9,14 @@ import java.util.Locale; import java.util.Objects; -import lombok.Getter; +import org.opensearch.core.common.Strings; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.common.io.stream.Writeable; +import org.opensearch.core.xcontent.ToXContentFragment; +import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.Strings; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.io.stream.Writeable; -import org.opensearch.common.xcontent.ToXContentFragment; -import org.opensearch.common.xcontent.XContentBuilder; +import lombok.Getter; /** * UploadMetric stores metric for an upload API @@ -34,7 +34,7 @@ public enum FIELDS { @Override public String toString() { - return this.name().toLowerCase(Locale.getDefault()); + return this.name().toLowerCase(Locale.ROOT); } } diff --git a/src/main/java/org/opensearch/geospatial/stats/upload/UploadStats.java b/src/main/java/org/opensearch/geospatial/stats/upload/UploadStats.java index a2a134cb..c2afd22b 100644 --- a/src/main/java/org/opensearch/geospatial/stats/upload/UploadStats.java +++ b/src/main/java/org/opensearch/geospatial/stats/upload/UploadStats.java @@ -12,12 +12,12 @@ import java.util.Objects; import java.util.Set; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.io.stream.Writeable; import org.opensearch.common.metrics.CounterMetric; -import org.opensearch.common.xcontent.ToXContentObject; -import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.common.io.stream.Writeable; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; /** * Contains the total upload stats @@ -32,7 +32,7 @@ public enum FIELDS { @Override public String toString() { - return name().toLowerCase(Locale.getDefault()); + return name().toLowerCase(Locale.ROOT); } } diff --git a/src/main/java/org/opensearch/geospatial/stats/upload/UploadStatsNodeRequest.java b/src/main/java/org/opensearch/geospatial/stats/upload/UploadStatsNodeRequest.java index 5ddb0e54..1bae07ac 100644 --- a/src/main/java/org/opensearch/geospatial/stats/upload/UploadStatsNodeRequest.java +++ b/src/main/java/org/opensearch/geospatial/stats/upload/UploadStatsNodeRequest.java @@ -8,8 +8,8 @@ import java.io.IOException; import org.opensearch.action.support.nodes.BaseNodeRequest; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; public class UploadStatsNodeRequest extends BaseNodeRequest { diff --git a/src/main/java/org/opensearch/geospatial/stats/upload/UploadStatsNodeResponse.java b/src/main/java/org/opensearch/geospatial/stats/upload/UploadStatsNodeResponse.java index c5801cd9..3baa29a1 100644 --- a/src/main/java/org/opensearch/geospatial/stats/upload/UploadStatsNodeResponse.java +++ b/src/main/java/org/opensearch/geospatial/stats/upload/UploadStatsNodeResponse.java @@ -10,10 +10,10 @@ import org.opensearch.action.support.nodes.BaseNodeResponse; import org.opensearch.cluster.node.DiscoveryNode; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.xcontent.ToXContentObject; -import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; public class UploadStatsNodeResponse extends BaseNodeResponse implements ToXContentObject { diff --git a/src/main/java/org/opensearch/geospatial/stats/upload/UploadStatsRequest.java b/src/main/java/org/opensearch/geospatial/stats/upload/UploadStatsRequest.java index 9bcacc99..5c3efa9d 100644 --- a/src/main/java/org/opensearch/geospatial/stats/upload/UploadStatsRequest.java +++ b/src/main/java/org/opensearch/geospatial/stats/upload/UploadStatsRequest.java @@ -8,7 +8,7 @@ import java.io.IOException; import org.opensearch.action.support.nodes.BaseNodesRequest; -import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamInput; public class UploadStatsRequest extends BaseNodesRequest { diff --git a/src/main/java/org/opensearch/geospatial/stats/upload/UploadStatsResponse.java b/src/main/java/org/opensearch/geospatial/stats/upload/UploadStatsResponse.java index fd870193..c8f0fe76 100644 --- a/src/main/java/org/opensearch/geospatial/stats/upload/UploadStatsResponse.java +++ b/src/main/java/org/opensearch/geospatial/stats/upload/UploadStatsResponse.java @@ -14,11 +14,11 @@ import org.opensearch.action.FailedNodeException; import org.opensearch.action.support.nodes.BaseNodesResponse; import org.opensearch.cluster.ClusterName; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.io.stream.Writeable; -import org.opensearch.common.xcontent.ToXContentObject; -import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.common.io.stream.Writeable; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; public class UploadStatsResponse extends BaseNodesResponse implements Writeable, ToXContentObject { diff --git a/src/main/java/org/opensearch/geospatial/stats/upload/UploadStatsService.java b/src/main/java/org/opensearch/geospatial/stats/upload/UploadStatsService.java index 4657526b..95717edb 100644 --- a/src/main/java/org/opensearch/geospatial/stats/upload/UploadStatsService.java +++ b/src/main/java/org/opensearch/geospatial/stats/upload/UploadStatsService.java @@ -13,8 +13,8 @@ import java.util.Objects; import java.util.stream.Collectors; -import org.opensearch.common.xcontent.ToXContentFragment; -import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.ToXContentFragment; +import org.opensearch.core.xcontent.XContentBuilder; // Service to calculate summary of upload stats and generate XContent for UploadStatsResponse public class UploadStatsService implements ToXContentFragment { diff --git a/src/main/java/org/opensearch/geospatial/stats/upload/UploadStatsTransportAction.java b/src/main/java/org/opensearch/geospatial/stats/upload/UploadStatsTransportAction.java index 5caa3ac0..869eecb4 100644 --- a/src/main/java/org/opensearch/geospatial/stats/upload/UploadStatsTransportAction.java +++ b/src/main/java/org/opensearch/geospatial/stats/upload/UploadStatsTransportAction.java @@ -13,7 +13,7 @@ import org.opensearch.action.support.nodes.TransportNodesAction; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.inject.Inject; -import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportService; diff --git a/src/main/plugin-metadata/plugin-security.policy b/src/main/plugin-metadata/plugin-security.policy new file mode 100644 index 00000000..6e9e1030 --- /dev/null +++ b/src/main/plugin-metadata/plugin-security.policy @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +grant { + // needed by Ip2Geo datasource to get GeoIP database + permission java.net.SocketPermission "*", "connect,resolve"; +}; diff --git a/src/main/resources/META-INF/services/org.opensearch.jobscheduler.spi.JobSchedulerExtension b/src/main/resources/META-INF/services/org.opensearch.jobscheduler.spi.JobSchedulerExtension new file mode 100644 index 00000000..e3d6fe6f --- /dev/null +++ b/src/main/resources/META-INF/services/org.opensearch.jobscheduler.spi.JobSchedulerExtension @@ -0,0 +1,11 @@ +# +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# + +# This file is needed to register DatasourceExtension in job scheduler framework +# See https://github.com/opensearch-project/job-scheduler/blob/main/README.md#getting-started +org.opensearch.geospatial.ip2geo.jobscheduler.DatasourceExtension diff --git a/src/main/resources/mappings/ip2geo_datasource.json b/src/main/resources/mappings/ip2geo_datasource.json new file mode 100644 index 00000000..567052d6 --- /dev/null +++ b/src/main/resources/mappings/ip2geo_datasource.json @@ -0,0 +1,132 @@ +{ + "properties": { + "database": { + "properties": { + "fields": { + "type": "text" + }, + "provider": { + "type": "text" + }, + "sha256_hash": { + "type": "text" + }, + "updated_at_in_epoch_millis": { + "type": "long" + }, + "valid_for_in_days": { + "type": "long" + } + } + }, + "enabled_time": { + "type": "long" + }, + "endpoint": { + "type": "text" + }, + "indices": { + "type": "text" + }, + "last_update_time": { + "type": "long" + }, + "name": { + "type": "text" + }, + "schedule": { + "properties": { + "interval": { + "properties": { + "period": { + "type": "long" + }, + "start_time": { + "type": "long" + }, + "unit": { + "type": "text" + } + } + } + } + }, + "state": { + "type": "text" + }, + "system_schedule": { + "properties": { + "interval": { + "properties": { + "period": { + "type": "long" + }, + "start_time": { + "type": "long" + }, + "unit": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + } + } + } + } + }, + "task": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + }, + "update_enabled": { + "type": "boolean" + }, + "update_stats": { + "properties": { + "last_failed_at_in_epoch_millis": { + "type": "long" + }, + "last_processing_time_in_millis": { + "type": "long" + }, + "last_skipped_at_in_epoch_millis": { + "type": "long" + }, + "last_succeeded_at_in_epoch_millis": { + "type": "long" + } + } + }, + "user_schedule": { + "properties": { + "interval": { + "properties": { + "period": { + "type": "long" + }, + "start_time": { + "type": "long" + }, + "unit": { + "type": "text", + "fields": { + "keyword": { + "type": "keyword", + "ignore_above": 256 + } + } + } + } + } + } + } + } +} \ No newline at end of file diff --git a/src/main/resources/mappings/ip2geo_geoip.json b/src/main/resources/mappings/ip2geo_geoip.json new file mode 100644 index 00000000..3179ef0d --- /dev/null +++ b/src/main/resources/mappings/ip2geo_geoip.json @@ -0,0 +1,9 @@ +{ + "dynamic": false, + "properties": { + "_cidr": { + "type": "ip_range", + "doc_values": false + } + } +} diff --git a/src/test/java/org/opensearch/geospatial/ClusterSettingHelper.java b/src/test/java/org/opensearch/geospatial/ClusterSettingHelper.java new file mode 100644 index 00000000..93bde1b6 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ClusterSettingHelper.java @@ -0,0 +1,64 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial; + +import static org.apache.lucene.tests.util.LuceneTestCase.createTempDir; +import static org.opensearch.test.NodeRoles.dataNode; +import static org.opensearch.test.OpenSearchTestCase.getTestTransportPlugin; +import static org.opensearch.test.OpenSearchTestCase.getTestTransportType; +import static org.opensearch.test.OpenSearchTestCase.randomLong; + +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import org.opensearch.cluster.ClusterName; +import org.opensearch.common.SuppressForbidden; +import org.opensearch.common.network.NetworkModule; +import org.opensearch.common.settings.Settings; +import org.opensearch.env.Environment; +import org.opensearch.geospatial.plugin.GeospatialPlugin; +import org.opensearch.node.MockNode; +import org.opensearch.node.Node; +import org.opensearch.plugins.Plugin; +import org.opensearch.test.InternalTestCluster; +import org.opensearch.test.MockHttpTransport; + +@SuppressForbidden(reason = "used only for testing") +public class ClusterSettingHelper { + public Node createMockNode(Map configSettings) throws IOException { + Path configDir = createTempDir(); + File configFile = configDir.resolve("opensearch.yml").toFile(); + FileWriter configFileWriter = new FileWriter(configFile); + + for (Map.Entry setting : configSettings.entrySet()) { + configFileWriter.write("\"" + setting.getKey() + "\": " + setting.getValue()); + } + configFileWriter.close(); + return new MockNode(baseSettings().build(), basePlugins(), configDir, true); + } + + private List> basePlugins() { + List> plugins = new ArrayList<>(); + plugins.add(getTestTransportPlugin()); + plugins.add(MockHttpTransport.TestPlugin.class); + plugins.add(GeospatialPlugin.class); + return plugins; + } + + private static Settings.Builder baseSettings() { + final Path tempDir = createTempDir(); + return Settings.builder() + .put(ClusterName.CLUSTER_NAME_SETTING.getKey(), InternalTestCluster.clusterName("single-node-cluster", randomLong())) + .put(Environment.PATH_HOME_SETTING.getKey(), tempDir) + .put(NetworkModule.TRANSPORT_TYPE_KEY, getTestTransportType()) + .put(dataNode()); + } +} diff --git a/src/test/java/org/opensearch/geospatial/GeospatialObjectBuilder.java b/src/test/java/org/opensearch/geospatial/GeospatialObjectBuilder.java index 8e60140b..6422db4e 100644 --- a/src/test/java/org/opensearch/geospatial/GeospatialObjectBuilder.java +++ b/src/test/java/org/opensearch/geospatial/GeospatialObjectBuilder.java @@ -5,6 +5,7 @@ package org.opensearch.geospatial; +import java.util.List; import java.util.Map; import java.util.stream.IntStream; @@ -12,7 +13,6 @@ import org.json.JSONObject; import org.opensearch.common.Randomness; import org.opensearch.common.UUIDs; -import org.opensearch.common.collect.List; import org.opensearch.common.geo.GeoShapeType; import org.opensearch.geo.GeometryTestUtils; import org.opensearch.geospatial.geojson.Feature; diff --git a/src/test/java/org/opensearch/geospatial/GeospatialParserTests.java b/src/test/java/org/opensearch/geospatial/GeospatialParserTests.java index 53778c6c..d7de97b9 100644 --- a/src/test/java/org/opensearch/geospatial/GeospatialParserTests.java +++ b/src/test/java/org/opensearch/geospatial/GeospatialParserTests.java @@ -9,11 +9,10 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Optional; import org.json.JSONArray; import org.json.JSONObject; -import org.opensearch.common.bytes.BytesArray; +import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.geospatial.geojson.Feature; import org.opensearch.test.OpenSearchTestCase; @@ -77,10 +76,10 @@ public void testConvertToMap() { public void testGetFeaturesWithGeoJSONFeature() { Map geoJSON = GeospatialObjectBuilder.randomGeoJSONFeature(new JSONObject()).toMap(); - Optional>> features = GeospatialParser.getFeatures(geoJSON); - assertTrue(features.isPresent()); - assertEquals(1, features.get().size()); - assertEquals(features.get().get(0), geoJSON); + List> features = GeospatialParser.getFeatures(geoJSON); + assertFalse(features.isEmpty()); + assertEquals(1, features.size()); + assertEquals(features.get(0), geoJSON); } public void testGetFeaturesWithGeoJSONFeatureCollection() { @@ -90,16 +89,16 @@ public void testGetFeaturesWithGeoJSONFeatureCollection() { features.put(GeospatialObjectBuilder.randomGeoJSONFeature(new JSONObject())); JSONObject collection = GeospatialObjectBuilder.buildGeoJSONFeatureCollection(features); - Optional>> featureList = GeospatialParser.getFeatures(collection.toMap()); - assertTrue(featureList.isPresent()); - assertEquals(featureList.get().size(), features.length()); + List> featureList = GeospatialParser.getFeatures(collection.toMap()); + assertFalse(featureList.isEmpty()); + assertEquals(featureList.size(), features.length()); } public void testGetFeaturesWithUnSupportedType() { Map geoJSON = new HashMap<>(); geoJSON.put(Feature.TYPE_KEY, "invalid-type"); - Optional>> features = GeospatialParser.getFeatures(geoJSON); - assertFalse(features.isPresent()); + List> features = GeospatialParser.getFeatures(geoJSON); + assertTrue(features.isEmpty()); } } diff --git a/src/test/java/org/opensearch/geospatial/GeospatialRestTestCase.java b/src/test/java/org/opensearch/geospatial/GeospatialRestTestCase.java index 0b213dbc..b6bd9937 100644 --- a/src/test/java/org/opensearch/geospatial/GeospatialRestTestCase.java +++ b/src/test/java/org/opensearch/geospatial/GeospatialRestTestCase.java @@ -6,19 +6,23 @@ package org.opensearch.geospatial; import static java.util.stream.Collectors.joining; -import static org.opensearch.common.xcontent.ToXContent.EMPTY_PARAMS; +import static org.opensearch.core.xcontent.ToXContent.EMPTY_PARAMS; import static org.opensearch.geospatial.GeospatialObjectBuilder.buildProperties; import static org.opensearch.geospatial.GeospatialObjectBuilder.randomGeoJSONFeature; import static org.opensearch.geospatial.GeospatialTestHelper.randomLowerCaseString; -import static org.opensearch.geospatial.GeospatialTestHelper.randomLowerCaseStringWithSuffix; -import static org.opensearch.geospatial.action.upload.geojson.UploadGeoJSONRequestContent.ACCEPTED_INDEX_SUFFIX_PATH; import static org.opensearch.geospatial.action.upload.geojson.UploadGeoJSONRequestContent.FIELD_DATA; import static org.opensearch.geospatial.shared.URLBuilder.getPluginURLPrefix; +import static org.opensearch.index.query.AbstractGeometryQueryBuilder.DEFAULT_SHAPE_FIELD_NAME; +import static org.opensearch.rest.action.search.RestSearchAction.TYPED_KEYS_PARAM; +import static org.opensearch.search.aggregations.Aggregations.AGGREGATIONS_FIELD; import java.io.IOException; +import java.time.Duration; +import java.time.Instant; import java.util.Collections; import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Optional; import java.util.stream.IntStream; @@ -32,24 +36,25 @@ import org.opensearch.client.Request; import org.opensearch.client.Response; import org.opensearch.common.CheckedConsumer; -import org.opensearch.common.Strings; import org.opensearch.common.UUIDs; import org.opensearch.common.geo.GeoJson; +import org.opensearch.common.geo.ShapeRelation; import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.common.Strings; +import org.opensearch.core.rest.RestStatus; +import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.geometry.Geometry; import org.opensearch.geospatial.action.upload.geojson.UploadGeoJSONRequestContent; -import org.opensearch.geospatial.processor.FeatureProcessor; +import org.opensearch.geospatial.index.mapper.xyshape.XYShapeFieldMapper; +import org.opensearch.geospatial.index.query.xyshape.XYShapeQueryBuilder; +import org.opensearch.geospatial.ip2geo.common.DatasourceState; import org.opensearch.geospatial.rest.action.upload.geojson.RestUploadGeoJSONAction; import org.opensearch.ingest.Pipeline; -import org.opensearch.rest.RestStatus; -import org.opensearch.test.rest.OpenSearchRestTestCase; - -public abstract class GeospatialRestTestCase extends OpenSearchRestTestCase { +public abstract class GeospatialRestTestCase extends OpenSearchSecureRestTestCase { public static final String SOURCE = "_source"; public static final String DOC = "_doc"; public static final String URL_DELIMITER = "/"; @@ -61,12 +66,30 @@ public abstract class GeospatialRestTestCase extends OpenSearchRestTestCase { public static final String FIELD_COUNT_KEY = "count"; public static final String PARAM_REFRESH_KEY = "refresh"; public static final String SEARCH = "_search"; + public static final String SHAPE_RELATION = "relation"; + public static final String INDEXED_SHAPE_FIELD = "indexed_shape"; + public static final String SHAPE_INDEX_FIELD = "index"; + public static final String SHAPE_ID_FIELD = "id"; + public static final String SHAPE_INDEX_PATH_FIELD = "path"; + public static final String QUERY_PARAM_TOKEN = "?"; + private static final String SETTINGS = "_settings"; + private static final String SIMULATE = "_simulate"; + private static final String DOCS = "docs"; + private static final String DATASOURCES = "datasources"; + private static final String STATE = "state"; + private static final String PUT = "PUT"; + private static final String GET = "GET"; + private static final String DELETE = "DELETE"; private static String buildPipelinePath(String name) { return String.join(URL_DELIMITER, "_ingest", "pipeline", name); } - protected static void createPipeline(String name, Optional description, List> processorConfigs) + private static String buildDatasourcePath(String name) { + return String.join(URL_DELIMITER, getPluginURLPrefix(), "ip2geo/datasource", name); + } + + protected static Response createPipeline(String name, Optional description, List> processorConfigs) throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); if (description.isPresent()) { @@ -78,8 +101,8 @@ protected static void createPipeline(String name, Optional description, builder.endObject(); Request request = new Request("PUT", buildPipelinePath(name)); - request.setJsonEntity(Strings.toString(builder)); - client().performRequest(request); + request.setJsonEntity(builder.toString()); + return client().performRequest(request); } protected static void deletePipeline(String name) throws IOException { @@ -87,13 +110,110 @@ protected static void deletePipeline(String name) throws IOException { client().performRequest(request); } + protected Response createDatasource(final String name, Map properties) throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + for (Map.Entry config : properties.entrySet()) { + builder.field(config.getKey(), config.getValue()); + } + builder.endObject(); + + Request request = new Request(PUT, buildDatasourcePath(name)); + request.setJsonEntity(builder.toString()); + return client().performRequest(request); + } + + protected void waitForDatasourceToBeAvailable(final String name, final Duration timeout) throws Exception { + Instant start = Instant.now(); + while (DatasourceState.AVAILABLE.equals(getDatasourceState(name)) == false) { + if (Duration.between(start, Instant.now()).compareTo(timeout) > 0) { + throw new RuntimeException( + String.format( + Locale.ROOT, + "Datasource state didn't change to %s after %d seconds", + DatasourceState.AVAILABLE.name(), + timeout.toSeconds() + ) + ); + } + Thread.sleep(1000); + } + } + + private DatasourceState getDatasourceState(final String name) throws Exception { + List> datasources = (List>) getDatasource(name).get(DATASOURCES); + return DatasourceState.valueOf((String) datasources.get(0).get(STATE)); + } + + protected Response deleteDatasource(final String name) throws IOException { + Request request = new Request(DELETE, buildDatasourcePath(name)); + return client().performRequest(request); + } + + protected Response deleteDatasource(final String name, final int retry) throws Exception { + for (int i = 0; i < retry; i++) { + try { + Request request = new Request(DELETE, buildDatasourcePath(name)); + return client().performRequest(request); + } catch (Exception e) { + if (i + 1 == retry) { + throw e; + } + Thread.sleep(1000); + } + } + throw new RuntimeException("should not reach here"); + } + + protected Map getDatasource(final String name) throws Exception { + Request request = new Request(GET, buildDatasourcePath(name)); + Response response = client().performRequest(request); + return createParser(XContentType.JSON.xContent(), EntityUtils.toString(response.getEntity())).map(); + } + + protected Response updateDatasource(final String name, Map properties) throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + for (Map.Entry config : properties.entrySet()) { + builder.field(config.getKey(), config.getValue()); + } + builder.endObject(); + + Request request = new Request(PUT, String.join(URL_DELIMITER, buildDatasourcePath(name), SETTINGS)); + request.setJsonEntity(builder.toString()); + return client().performRequest(request); + } + + protected Map simulatePipeline(final String name, List docs) throws Exception { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + builder.field(DOCS, docs); + builder.endObject(); + + Request request = new Request(GET, String.join(URL_DELIMITER, buildPipelinePath(name), SIMULATE)); + request.setJsonEntity(builder.toString()); + Response response = client().performRequest(request); + return createParser(XContentType.JSON.xContent(), EntityUtils.toString(response.getEntity())).map(); + } + + protected Response updateClusterSetting(final Map properties) throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + builder.startObject("transient"); + for (Map.Entry config : properties.entrySet()) { + builder.field(config.getKey(), config.getValue()); + } + builder.endObject(); + builder.endObject(); + + Request request = new Request(PUT, "/_cluster/settings"); + request.setJsonEntity(builder.toString()); + return client().performRequest(request); + } + protected static void createIndex(String name, Settings settings, Map fieldMap) throws IOException { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject(MAPPING_PROPERTIES_KEY); for (Map.Entry entry : fieldMap.entrySet()) { xContentBuilder.startObject(entry.getKey()).field(FIELD_TYPE_KEY, entry.getValue()).endObject(); } xContentBuilder.endObject().endObject(); - String mapping = Strings.toString(xContentBuilder); + String mapping = xContentBuilder.toString(); createIndex(name, settings, mapping.substring(1, mapping.length() - 1)); } @@ -128,9 +248,9 @@ public static String indexDocument(String indexName, String docID, String body, return docID; } - protected Map buildGeoJSONFeatureProcessorConfig(Map properties) { + protected Map buildProcessorConfig(final String processorType, final Map properties) { Map featureProcessor = new HashMap<>(); - featureProcessor.put(FeatureProcessor.TYPE, properties); + featureProcessor.put(processorType, properties); return featureProcessor; } @@ -151,7 +271,7 @@ public Map getDocument(String docID, String indexName) throws IO // TODO This method is copied from unit test. Refactor to common class to share across tests protected JSONObject buildUploadGeoJSONRequestContent(int totalGeoJSONObject, String index, String geoFieldName) { JSONObject contents = new JSONObject(); - String indexName = Strings.hasText(index) ? index : randomLowerCaseStringWithSuffix(ACCEPTED_INDEX_SUFFIX_PATH); + String indexName = Strings.hasText(index) ? index : randomLowerCaseString(); String fieldName = Strings.hasText(geoFieldName) ? geoFieldName : randomLowerCaseString(); contents.put(UploadGeoJSONRequestContent.FIELD_INDEX.getPreferredName(), indexName); contents.put(UploadGeoJSONRequestContent.FIELD_GEOSPATIAL.getPreferredName(), fieldName); @@ -240,10 +360,19 @@ public String buildContentAsString(CheckedConsumer XContentBuilder builder = JsonXContent.contentBuilder().startObject(); build.accept(builder); builder.endObject(); - return Strings.toString(builder); + return builder.toString(); + } + + public String buildSearchAggregationsBodyAsString(CheckedConsumer aggregationsBuilder) + throws IOException { + return buildContentAsString(builder -> { + builder.startObject(AGGREGATIONS_FIELD); + aggregationsBuilder.accept(builder); + builder.endObject(); + }); } - public String buildSearchBodyAsString( + public String buildSearchQueryBodyAsString( CheckedConsumer searchQueryBuilder, String queryType, String fieldName @@ -256,9 +385,12 @@ public String buildSearchBodyAsString( }); } - public SearchResponse searchIndex(String indexName, String entity) throws IOException { - String path = String.join(URL_DELIMITER, indexName, SEARCH); - final Request request = new Request("GET", path); + public SearchResponse searchIndex(String indexName, String entity, boolean includeType) throws Exception { + var urlPathBuilder = new StringBuilder().append(indexName).append(URL_DELIMITER).append(SEARCH); + if (includeType) { + urlPathBuilder.append(QUERY_PARAM_TOKEN).append(TYPED_KEYS_PARAM); + } + final Request request = new Request("GET", urlPathBuilder.toString()); request.setJsonEntity(entity); final Response response = client().performRequest(request); return SearchResponse.fromXContent(createParser(XContentType.JSON.xContent(), EntityUtils.toString(response.getEntity()))); @@ -279,4 +411,49 @@ protected String buildDocumentWithGeoJSON(String fieldName, Geometry geometry) t return document; } + public String indexDocumentUsingWKT(String indexName, String fieldName, String wktFormat) throws IOException { + final String document = buildDocumentWithWKT(fieldName, wktFormat); + return indexDocument(indexName, document); + } + + public String indexDocumentUsingGeoJSON(String indexName, String fieldName, Geometry geometry) throws IOException { + final String document = buildDocumentWithGeoJSON(fieldName, geometry); + return indexDocument(indexName, document); + } + + public SearchResponse searchUsingShapeRelation(String indexName, String fieldName, Geometry geometry, ShapeRelation shapeRelation) + throws Exception { + String searchEntity = buildSearchQueryBodyAsString(builder -> { + builder.field(DEFAULT_SHAPE_FIELD_NAME); + GeoJson.toXContent(geometry, builder, EMPTY_PARAMS); + builder.field(SHAPE_RELATION, shapeRelation.getRelationName()); + }, XYShapeQueryBuilder.NAME, fieldName); + + return searchIndex(indexName, searchEntity, false); + } + + public void createIndexedShapeIndex() throws IOException { + String indexedShapeIndex = randomLowerCaseString(); + String indexedShapePath = randomLowerCaseString(); + createIndex(indexedShapeIndex, Settings.EMPTY, Map.of(indexedShapePath, XYShapeFieldMapper.CONTENT_TYPE)); + } + + public SearchResponse searchUsingIndexedShapeIndex( + String indexName, + String indexedShapeIndex, + String indexedShapePath, + String docId, + String fieldName + ) throws Exception { + String searchEntity = buildSearchQueryBodyAsString(builder -> { + builder.startObject(INDEXED_SHAPE_FIELD); + builder.field(SHAPE_INDEX_FIELD, indexedShapeIndex); + builder.field(SHAPE_ID_FIELD, docId); + builder.field(SHAPE_INDEX_PATH_FIELD, indexedShapePath); + builder.endObject(); + }, XYShapeQueryBuilder.NAME, fieldName); + + return searchIndex(indexName, searchEntity, false); + } + } diff --git a/src/test/java/org/opensearch/geospatial/GeospatialTestHelper.java b/src/test/java/org/opensearch/geospatial/GeospatialTestHelper.java index a52a4c82..9f455a1e 100644 --- a/src/test/java/org/opensearch/geospatial/GeospatialTestHelper.java +++ b/src/test/java/org/opensearch/geospatial/GeospatialTestHelper.java @@ -16,7 +16,6 @@ import static org.junit.Assert.assertTrue; import static org.opensearch.geospatial.GeospatialObjectBuilder.buildProperties; import static org.opensearch.geospatial.GeospatialObjectBuilder.randomGeoJSONFeature; -import static org.opensearch.geospatial.action.upload.geojson.UploadGeoJSONRequestContent.ACCEPTED_INDEX_SUFFIX_PATH; import static org.opensearch.geospatial.action.upload.geojson.UploadGeoJSONRequestContent.FIELD_DATA; import static org.opensearch.test.OpenSearchTestCase.randomBoolean; import static org.opensearch.test.OpenSearchTestCase.randomIntBetween; @@ -40,10 +39,11 @@ import org.opensearch.common.Randomness; import org.opensearch.common.UUIDs; import org.opensearch.common.collect.Tuple; +import org.opensearch.core.index.shard.ShardId; import org.opensearch.geospatial.action.upload.geojson.ContentBuilder; import org.opensearch.geospatial.action.upload.geojson.UploadGeoJSONRequestContent; +import org.opensearch.geospatial.h3.H3; import org.opensearch.geospatial.stats.upload.UploadMetric; -import org.opensearch.index.shard.ShardId; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.RandomObjects; @@ -65,10 +65,7 @@ public static Map buildRequestContent(int featureCount) { if (Randomness.get().nextBoolean()) { contents.put(ContentBuilder.GEOJSON_FEATURE_ID_FIELD, randomLowerCaseString()); } - contents.put( - UploadGeoJSONRequestContent.FIELD_INDEX.getPreferredName(), - randomLowerCaseStringWithSuffix(ACCEPTED_INDEX_SUFFIX_PATH) - ); + contents.put(UploadGeoJSONRequestContent.FIELD_INDEX.getPreferredName(), randomLowerCaseString()); contents.put(UploadGeoJSONRequestContent.FIELD_GEOSPATIAL.getPreferredName(), randomLowerCaseString()); contents.put(UploadGeoJSONRequestContent.FIELD_GEOSPATIAL_TYPE.getPreferredName(), "geo_shape"); JSONArray values = new JSONArray(); @@ -82,11 +79,7 @@ private static String randomString() { } public static String randomLowerCaseString() { - return randomString().toLowerCase(Locale.getDefault()); - } - - public static String randomLowerCaseStringWithSuffix(String suffix) { - return String.format(Locale.getDefault(), "%s%s", randomString().toLowerCase(Locale.getDefault()), suffix); + return randomString().toLowerCase(Locale.ROOT); } /** @@ -166,4 +159,8 @@ public static double[] toDoubleArray(float[] input) { return IntStream.range(0, input.length).mapToDouble(i -> input[i]).toArray(); } + public static int randomHexGridPrecision() { + return randomIntBetween(H3.MIN_H3_RES, H3.MAX_H3_RES); + } + } diff --git a/src/test/java/org/opensearch/geospatial/OpenSearchSecureRestTestCase.java b/src/test/java/org/opensearch/geospatial/OpenSearchSecureRestTestCase.java new file mode 100644 index 00000000..dba3364e --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/OpenSearchSecureRestTestCase.java @@ -0,0 +1,165 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.geospatial; + +import java.io.IOException; +import java.util.*; +import java.util.stream.Collectors; + +import org.apache.http.Header; +import org.apache.http.HttpHost; +import org.apache.http.auth.AuthScope; +import org.apache.http.auth.UsernamePasswordCredentials; +import org.apache.http.client.CredentialsProvider; +import org.apache.http.conn.ssl.NoopHostnameVerifier; +import org.apache.http.impl.client.BasicCredentialsProvider; +import org.apache.http.message.BasicHeader; +import org.apache.http.ssl.SSLContextBuilder; +import org.junit.After; +import org.opensearch.client.Request; +import org.opensearch.client.Response; +import org.opensearch.client.RestClient; +import org.opensearch.client.RestClientBuilder; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.common.util.concurrent.ThreadContext; +import org.opensearch.core.xcontent.DeprecationHandler; +import org.opensearch.core.xcontent.MediaType; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.test.rest.OpenSearchRestTestCase; + +/** + * Integration test base class to support both security disabled and enabled OpenSearch cluster. + */ +public abstract class OpenSearchSecureRestTestCase extends OpenSearchRestTestCase { + private static final String PROTOCOL_HTTP = "http"; + private static final String PROTOCOL_HTTPS = "https"; + private static final String SYS_PROPERTY_KEY_HTTPS = "https"; + private static final String SYS_PROPERTY_KEY_CLUSTER_ENDPOINT = "tests.rest.cluster"; + private static final String SYS_PROPERTY_KEY_USER = "user"; + private static final String SYS_PROPERTY_KEY_PASSWORD = "password"; + private static final String DEFAULT_SOCKET_TIMEOUT = "60s"; + private static final String INTERNAL_INDICES_PREFIX = "."; + private static final String SYSTEM_INDEX_PREFIX = "security-auditlog"; + private static String protocol; + + @Override + protected String getProtocol() { + if (protocol == null) { + protocol = readProtocolFromSystemProperty(); + } + return protocol; + } + + private String readProtocolFromSystemProperty() { + boolean isHttps = Optional.ofNullable(System.getProperty(SYS_PROPERTY_KEY_HTTPS)).map("true"::equalsIgnoreCase).orElse(false); + if (!isHttps) { + return PROTOCOL_HTTP; + } + + // currently only external cluster is supported for security enabled testing + if (Optional.ofNullable(System.getProperty(SYS_PROPERTY_KEY_CLUSTER_ENDPOINT)).isEmpty()) { + throw new RuntimeException("cluster url should be provided for security enabled testing"); + } + return PROTOCOL_HTTPS; + } + + @Override + protected RestClient buildClient(Settings settings, HttpHost[] hosts) throws IOException { + RestClientBuilder builder = RestClient.builder(hosts); + if (PROTOCOL_HTTPS.equals(getProtocol())) { + configureHttpsClient(builder, settings); + } else { + configureClient(builder, settings); + } + + return builder.build(); + } + + private void configureHttpsClient(RestClientBuilder builder, Settings settings) { + Map headers = ThreadContext.buildDefaultHeaders(settings); + Header[] defaultHeaders = new Header[headers.size()]; + int i = 0; + for (Map.Entry entry : headers.entrySet()) { + defaultHeaders[i++] = new BasicHeader(entry.getKey(), entry.getValue()); + } + builder.setDefaultHeaders(defaultHeaders); + builder.setHttpClientConfigCallback(httpClientBuilder -> { + String userName = Optional.ofNullable(System.getProperty(SYS_PROPERTY_KEY_USER)) + .orElseThrow(() -> new RuntimeException("user name is missing")); + String password = Optional.ofNullable(System.getProperty(SYS_PROPERTY_KEY_PASSWORD)) + .orElseThrow(() -> new RuntimeException("password is missing")); + CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(userName, password)); + try { + return httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider) + // disable the certificate since our testing cluster just uses the default security configuration + .setSSLHostnameVerifier(NoopHostnameVerifier.INSTANCE) + .setSSLContext(SSLContextBuilder.create().loadTrustMaterial(null, (chains, authType) -> true).build()); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + + final String socketTimeoutString = settings.get(CLIENT_SOCKET_TIMEOUT); + final TimeValue socketTimeout = TimeValue.parseTimeValue( + socketTimeoutString == null ? DEFAULT_SOCKET_TIMEOUT : socketTimeoutString, + CLIENT_SOCKET_TIMEOUT + ); + builder.setRequestConfigCallback(conf -> conf.setSocketTimeout(Math.toIntExact(socketTimeout.getMillis()))); + if (settings.hasValue(CLIENT_PATH_PREFIX)) { + builder.setPathPrefix(settings.get(CLIENT_PATH_PREFIX)); + } + } + + /** + * wipeAllIndices won't work since it cannot delete security index. Use deleteExternalIndices instead. + */ + @Override + protected boolean preserveIndicesUponCompletion() { + return true; + } + + @After + public void deleteExternalIndices() throws IOException { + Response response = client().performRequest(new Request("GET", "/_cat/indices?format=json&expand_wildcards=all")); + MediaType mediaType = MediaType.fromMediaType(response.getEntity().getContentType().getValue()); + try ( + XContentParser parser = mediaType.xContent() + .createParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + response.getEntity().getContent() + ) + ) { + XContentParser.Token token = parser.nextToken(); + List> parserList; + if (token == XContentParser.Token.START_ARRAY) { + parserList = parser.listOrderedMap().stream().map(obj -> (Map) obj).collect(Collectors.toList()); + } else { + parserList = Collections.singletonList(parser.mapOrdered()); + } + + List externalIndices = parserList.stream() + .map(index -> (String) index.get("index")) + .filter(indexName -> indexName != null) + .filter(indexName -> !indexName.startsWith(INTERNAL_INDICES_PREFIX)) + // This is hack to remove the security audit index from deletion. We will need a proper fix where + // we delete the indices after a test is completed. + // Issue: https://github.com/opensearch-project/geospatial/issues/428 + .filter(indexName -> !indexName.startsWith(SYSTEM_INDEX_PREFIX)) + .collect(Collectors.toList()); + + for (String indexName : externalIndices) { + adminClient().performRequest(new Request("DELETE", "/" + indexName)); + } + } + } +} diff --git a/src/test/java/org/opensearch/geospatial/action/upload/geojson/IndexManagerTests.java b/src/test/java/org/opensearch/geospatial/action/upload/geojson/IndexManagerTests.java index 6fe205ad..be30eaca 100644 --- a/src/test/java/org/opensearch/geospatial/action/upload/geojson/IndexManagerTests.java +++ b/src/test/java/org/opensearch/geospatial/action/upload/geojson/IndexManagerTests.java @@ -16,11 +16,11 @@ import java.util.stream.IntStream; import org.opensearch.ResourceAlreadyExistsException; -import org.opensearch.action.ActionListener; import org.opensearch.action.StepListener; import org.opensearch.action.admin.indices.create.CreateIndexRequest; import org.opensearch.action.admin.indices.create.CreateIndexResponse; import org.opensearch.client.IndicesAdminClient; +import org.opensearch.core.action.ActionListener; import org.opensearch.test.OpenSearchTestCase; public class IndexManagerTests extends OpenSearchTestCase { diff --git a/src/test/java/org/opensearch/geospatial/action/upload/geojson/PipelineManagerTests.java b/src/test/java/org/opensearch/geospatial/action/upload/geojson/PipelineManagerTests.java index ff301fe8..de2677f3 100644 --- a/src/test/java/org/opensearch/geospatial/action/upload/geojson/PipelineManagerTests.java +++ b/src/test/java/org/opensearch/geospatial/action/upload/geojson/PipelineManagerTests.java @@ -11,12 +11,12 @@ import static org.mockito.Mockito.verify; import static org.opensearch.geospatial.GeospatialTestHelper.randomLowerCaseString; -import org.opensearch.action.ActionListener; import org.opensearch.action.StepListener; import org.opensearch.action.ingest.DeletePipelineRequest; import org.opensearch.action.ingest.PutPipelineRequest; import org.opensearch.action.support.master.AcknowledgedResponse; import org.opensearch.client.ClusterAdminClient; +import org.opensearch.core.action.ActionListener; import org.opensearch.test.OpenSearchTestCase; public class PipelineManagerTests extends OpenSearchTestCase { diff --git a/src/test/java/org/opensearch/geospatial/action/upload/geojson/UploadGeoJSONRequestContentTests.java b/src/test/java/org/opensearch/geospatial/action/upload/geojson/UploadGeoJSONRequestContentTests.java index 52b95907..7486cc68 100644 --- a/src/test/java/org/opensearch/geospatial/action/upload/geojson/UploadGeoJSONRequestContentTests.java +++ b/src/test/java/org/opensearch/geospatial/action/upload/geojson/UploadGeoJSONRequestContentTests.java @@ -8,10 +8,9 @@ import static org.opensearch.geospatial.GeospatialObjectBuilder.buildProperties; import static org.opensearch.geospatial.GeospatialObjectBuilder.randomGeoJSONFeature; import static org.opensearch.geospatial.GeospatialTestHelper.randomLowerCaseString; -import static org.opensearch.geospatial.GeospatialTestHelper.randomLowerCaseStringWithSuffix; -import static org.opensearch.geospatial.action.upload.geojson.UploadGeoJSONRequestContent.ACCEPTED_INDEX_SUFFIX_PATH; import static org.opensearch.geospatial.action.upload.geojson.UploadGeoJSONRequestContent.FIELD_DATA; import static org.opensearch.geospatial.action.upload.geojson.UploadGeoJSONRequestContent.GEOSPATIAL_DEFAULT_FIELD_NAME; +import static org.opensearch.geospatial.action.upload.geojson.UploadGeoJSONRequestContent.MAX_SUPPORTED_GEOJSON_FEATURE_COUNT; import java.util.Collections; import java.util.Map; @@ -21,25 +20,33 @@ import org.opensearch.test.OpenSearchTestCase; public class UploadGeoJSONRequestContentTests extends OpenSearchTestCase { + private static int MIN_FEATURE_COUNT = 3; + private String indexName; + private String fieldName; - private Map buildRequestContent(String indexName, String fieldName) { - JSONObject contents = new JSONObject(); + @Override + public void setUp() throws Exception { + super.setUp(); + indexName = randomLowerCaseString(); + fieldName = randomLowerCaseString(); + } + + private Map buildRequestContent(String indexName, String fieldName, int count) { + final var contents = new JSONObject(); contents.put(UploadGeoJSONRequestContent.FIELD_INDEX.getPreferredName(), indexName); contents.put(UploadGeoJSONRequestContent.FIELD_GEOSPATIAL.getPreferredName(), fieldName); contents.put(UploadGeoJSONRequestContent.FIELD_GEOSPATIAL_TYPE.getPreferredName(), "geo_shape"); JSONArray values = new JSONArray(); - values.put(randomGeoJSONFeature(buildProperties(Collections.emptyMap()))); - values.put(randomGeoJSONFeature(buildProperties(Collections.emptyMap()))); - values.put(randomGeoJSONFeature(buildProperties(Collections.emptyMap()))); + for (int i = 0; i < count; i++) { + values.put(randomGeoJSONFeature(buildProperties(Collections.emptyMap()))); + } contents.put(FIELD_DATA.getPreferredName(), values); return contents.toMap(); } public void testCreate() { - final String indexName = randomLowerCaseStringWithSuffix(ACCEPTED_INDEX_SUFFIX_PATH); - final String fieldName = "location"; - Map contents = buildRequestContent(indexName, fieldName); - UploadGeoJSONRequestContent content = UploadGeoJSONRequestContent.create(contents); + Map contents = buildRequestContent(indexName, fieldName, MIN_FEATURE_COUNT); + final var content = UploadGeoJSONRequestContent.create(contents); assertNotNull(content); assertEquals(fieldName, content.getFieldName()); assertEquals(indexName, content.getIndexName()); @@ -49,39 +56,32 @@ public void testCreate() { public void testCreateEmptyIndexName() { IllegalArgumentException invalidIndexName = assertThrows( IllegalArgumentException.class, - () -> UploadGeoJSONRequestContent.create(buildRequestContent("", "location")) + () -> UploadGeoJSONRequestContent.create(buildRequestContent("", "location", MIN_FEATURE_COUNT)) ); assertTrue(invalidIndexName.getMessage().contains("[ index ] cannot be empty")); } - public void testCreateEmptyGeospatialFieldName() { - UploadGeoJSONRequestContent content = UploadGeoJSONRequestContent.create( - buildRequestContent(randomLowerCaseStringWithSuffix(ACCEPTED_INDEX_SUFFIX_PATH), "") - ); - assertNotNull(content); - assertEquals("wrong field name", GEOSPATIAL_DEFAULT_FIELD_NAME, content.getFieldName()); - } - - public void testCreateInvalidIndexName() { - final String indexName = randomLowerCaseString(); - final String fieldName = "location"; - Map contents = buildRequestContent(indexName, fieldName); - contents.remove(UploadGeoJSONRequestContent.FIELD_GEOSPATIAL_TYPE.getPreferredName()); - IllegalArgumentException invalidIndexName = assertThrows( + public void testCreateWithOneMoreThanMaxSupportedFeatureCount() { + int featureCount = MAX_SUPPORTED_GEOJSON_FEATURE_COUNT + 1; + IllegalArgumentException reachedMaxFeatureCount = assertThrows( IllegalArgumentException.class, - () -> UploadGeoJSONRequestContent.create(contents) + () -> UploadGeoJSONRequestContent.create(buildRequestContent(indexName, fieldName, featureCount)) ); assertEquals( - "wrong exception message", - "field [ index ] should end with suffix " + ACCEPTED_INDEX_SUFFIX_PATH, - invalidIndexName.getMessage() + "wrong error returned", + reachedMaxFeatureCount.getMessage(), + "Received 10001 features, but, cannot upload more than 10000 features" ); } + public void testCreateEmptyGeospatialFieldName() { + final var content = UploadGeoJSONRequestContent.create(buildRequestContent(randomLowerCaseString(), "", MIN_FEATURE_COUNT)); + assertNotNull(content); + assertEquals("wrong field name", GEOSPATIAL_DEFAULT_FIELD_NAME, content.getFieldName()); + } + public void testCreateEmptyGeospatialFieldType() { - final String indexName = randomLowerCaseStringWithSuffix(ACCEPTED_INDEX_SUFFIX_PATH); - final String fieldName = "location"; - Map contents = buildRequestContent(indexName, fieldName); + Map contents = buildRequestContent(indexName, fieldName, MIN_FEATURE_COUNT); contents.remove(UploadGeoJSONRequestContent.FIELD_GEOSPATIAL_TYPE.getPreferredName()); IllegalArgumentException invalidIndexName = assertThrows( IllegalArgumentException.class, @@ -89,5 +89,4 @@ public void testCreateEmptyGeospatialFieldType() { ); assertTrue(invalidIndexName.getMessage().contains("[ type ] cannot be empty")); } - } diff --git a/src/test/java/org/opensearch/geospatial/action/upload/geojson/UploadGeoJSONRequestTests.java b/src/test/java/org/opensearch/geospatial/action/upload/geojson/UploadGeoJSONRequestTests.java index d973192a..0a591f2c 100644 --- a/src/test/java/org/opensearch/geospatial/action/upload/geojson/UploadGeoJSONRequestTests.java +++ b/src/test/java/org/opensearch/geospatial/action/upload/geojson/UploadGeoJSONRequestTests.java @@ -13,9 +13,9 @@ import java.nio.charset.StandardCharsets; import org.json.JSONObject; -import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.io.stream.BytesStreamOutput; -import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.core.common.bytes.BytesArray; +import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.rest.RestRequest; import org.opensearch.test.OpenSearchTestCase; diff --git a/src/test/java/org/opensearch/geospatial/action/upload/geojson/UploadGeoJSONResponseTests.java b/src/test/java/org/opensearch/geospatial/action/upload/geojson/UploadGeoJSONResponseTests.java index a70d506c..0f7feb4d 100644 --- a/src/test/java/org/opensearch/geospatial/action/upload/geojson/UploadGeoJSONResponseTests.java +++ b/src/test/java/org/opensearch/geospatial/action/upload/geojson/UploadGeoJSONResponseTests.java @@ -6,7 +6,8 @@ package org.opensearch.geospatial.action.upload.geojson; import org.opensearch.action.bulk.BulkResponse; -import org.opensearch.common.Strings; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.common.Strings; import org.opensearch.geospatial.GeospatialTestHelper; import org.opensearch.test.OpenSearchTestCase; @@ -20,7 +21,7 @@ public void testToXContentHasNoFailure() { int successActionCount = randomIntBetween(MIN_SUCCESS_ITEM_COUNT, MAX_SUCCESS_ITEM_COUNT); final BulkResponse bulkItemResponses = GeospatialTestHelper.generateRandomBulkResponse(successActionCount, false); UploadGeoJSONResponse getResponse = new UploadGeoJSONResponse(bulkItemResponses); - String responseBody = Strings.toString(getResponse); + String responseBody = Strings.toString(XContentType.JSON, getResponse); assertTrue(responseBody.contains("\"errors\":false")); assertTrue(responseBody.contains("\"failure\":0")); assertTrue(responseBody.contains("\"total\":" + successActionCount)); @@ -33,7 +34,7 @@ public void testToXContentHasFailure() { int totalActionCount = successActionCount + FAILURE_ITEM_COUNT; final BulkResponse bulkItemResponses = GeospatialTestHelper.generateRandomBulkResponse(successActionCount, true); UploadGeoJSONResponse getResponse = new UploadGeoJSONResponse(bulkItemResponses); - String responseBody = Strings.toString(getResponse); + String responseBody = Strings.toString(XContentType.JSON, getResponse); assertTrue(responseBody.contains("\"errors\":true")); assertTrue(responseBody.contains("\"total\":" + totalActionCount)); assertTrue(responseBody.contains("\"success\":" + successActionCount)); diff --git a/src/test/java/org/opensearch/geospatial/action/upload/geojson/UploaderTests.java b/src/test/java/org/opensearch/geospatial/action/upload/geojson/UploaderTests.java index 2e699947..e52834d8 100644 --- a/src/test/java/org/opensearch/geospatial/action/upload/geojson/UploaderTests.java +++ b/src/test/java/org/opensearch/geospatial/action/upload/geojson/UploaderTests.java @@ -22,11 +22,11 @@ import java.util.function.Supplier; import java.util.stream.Collectors; -import org.opensearch.action.ActionListener; import org.opensearch.action.StepListener; import org.opensearch.action.bulk.BulkItemResponse; import org.opensearch.action.bulk.BulkRequestBuilder; import org.opensearch.action.bulk.BulkResponse; +import org.opensearch.core.action.ActionListener; import org.opensearch.geospatial.GeospatialTestHelper; import org.opensearch.geospatial.stats.upload.UploadMetric; import org.opensearch.geospatial.stats.upload.UploadStats; diff --git a/src/test/java/org/opensearch/geospatial/exceptions/ConcurrentModificationExceptionTests.java b/src/test/java/org/opensearch/geospatial/exceptions/ConcurrentModificationExceptionTests.java new file mode 100644 index 00000000..caaf6e89 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/exceptions/ConcurrentModificationExceptionTests.java @@ -0,0 +1,42 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.exceptions; + +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.common.io.stream.BytesStreamInput; +import org.opensearch.core.rest.RestStatus; +import org.opensearch.test.OpenSearchTestCase; + +import lombok.SneakyThrows; + +public class ConcurrentModificationExceptionTests extends OpenSearchTestCase { + public void testConstructor_whenCreated_thenSucceed() { + ConcurrentModificationException exception = new ConcurrentModificationException("Resource is being modified by another processor"); + assertEquals(RestStatus.BAD_REQUEST, exception.status()); + } + + public void testConstructor_whenCreatedWithRootCause_thenSucceed() { + ConcurrentModificationException exception = new ConcurrentModificationException( + "Resource is being modified by another processor", + new RuntimeException() + ); + assertEquals(RestStatus.BAD_REQUEST, exception.status()); + } + + @SneakyThrows + public void testConstructor_whenCreatedWithStream_thenSucceed() { + ConcurrentModificationException exception = new ConcurrentModificationException( + "New datasource is not compatible with existing datasource" + ); + + BytesStreamOutput output = new BytesStreamOutput(); + exception.writeTo(output); + BytesStreamInput input = new BytesStreamInput(output.bytes().toBytesRef().bytes); + ConcurrentModificationException copiedException = new ConcurrentModificationException(input); + assertEquals(exception.getMessage(), copiedException.getMessage()); + assertEquals(exception.status(), copiedException.status()); + } +} diff --git a/src/test/java/org/opensearch/geospatial/exceptions/IncompatibleDatasourceExceptionTests.java b/src/test/java/org/opensearch/geospatial/exceptions/IncompatibleDatasourceExceptionTests.java new file mode 100644 index 00000000..009cc1a1 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/exceptions/IncompatibleDatasourceExceptionTests.java @@ -0,0 +1,44 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.exceptions; + +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.common.io.stream.BytesStreamInput; +import org.opensearch.core.rest.RestStatus; +import org.opensearch.test.OpenSearchTestCase; + +import lombok.SneakyThrows; + +public class IncompatibleDatasourceExceptionTests extends OpenSearchTestCase { + public void testConstructor_whenCreated_thenSucceed() { + IncompatibleDatasourceException exception = new IncompatibleDatasourceException( + "New datasource is not compatible with existing datasource" + ); + assertEquals(RestStatus.BAD_REQUEST, exception.status()); + } + + public void testConstructor_whenCreatedWithRootCause_thenSucceed() { + IncompatibleDatasourceException exception = new IncompatibleDatasourceException( + "New datasource is not compatible with existing datasource", + new RuntimeException() + ); + assertEquals(RestStatus.BAD_REQUEST, exception.status()); + } + + @SneakyThrows + public void testConstructor_whenCreatedWithStream_thenSucceed() { + IncompatibleDatasourceException exception = new IncompatibleDatasourceException( + "New datasource is not compatible with existing datasource" + ); + + BytesStreamOutput output = new BytesStreamOutput(); + exception.writeTo(output); + BytesStreamInput input = new BytesStreamInput(output.bytes().toBytesRef().bytes); + IncompatibleDatasourceException copiedException = new IncompatibleDatasourceException(input); + assertEquals(exception.getMessage(), copiedException.getMessage()); + assertEquals(exception.status(), copiedException.status()); + } +} diff --git a/src/test/java/org/opensearch/geospatial/exceptions/ResourceInUseExceptionTests.java b/src/test/java/org/opensearch/geospatial/exceptions/ResourceInUseExceptionTests.java new file mode 100644 index 00000000..139566b0 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/exceptions/ResourceInUseExceptionTests.java @@ -0,0 +1,37 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.exceptions; + +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.common.io.stream.BytesStreamInput; +import org.opensearch.core.rest.RestStatus; +import org.opensearch.test.OpenSearchTestCase; + +import lombok.SneakyThrows; + +public class ResourceInUseExceptionTests extends OpenSearchTestCase { + public void testConstructor_whenCreated_thenSucceed() { + ResourceInUseException exception = new ResourceInUseException("Resource is in use"); + assertEquals(RestStatus.BAD_REQUEST, exception.status()); + } + + public void testConstructor_whenCreatedWithRootCause_thenSucceed() { + ResourceInUseException exception = new ResourceInUseException("Resource is in use", new RuntimeException()); + assertEquals(RestStatus.BAD_REQUEST, exception.status()); + } + + @SneakyThrows + public void testConstructor_whenCreatedWithStream_thenSucceed() { + ResourceInUseException exception = new ResourceInUseException("New datasource is not compatible with existing datasource"); + + BytesStreamOutput output = new BytesStreamOutput(); + exception.writeTo(output); + BytesStreamInput input = new BytesStreamInput(output.bytes().toBytesRef().bytes); + ResourceInUseException copiedException = new ResourceInUseException(input); + assertEquals(exception.getMessage(), copiedException.getMessage()); + assertEquals(exception.status(), copiedException.status()); + } +} diff --git a/src/test/java/org/opensearch/geospatial/index/common/xyshape/ShapeObjectBuilder.java b/src/test/java/org/opensearch/geospatial/index/common/xyshape/ShapeObjectBuilder.java index 16c2d9ef..b40b91df 100644 --- a/src/test/java/org/opensearch/geospatial/index/common/xyshape/ShapeObjectBuilder.java +++ b/src/test/java/org/opensearch/geospatial/index/common/xyshape/ShapeObjectBuilder.java @@ -15,6 +15,7 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; +import org.apache.lucene.geo.XYPoint; import org.opensearch.common.Randomness; import org.opensearch.common.geo.ShapeRelation; import org.opensearch.geo.GeometryTestUtils; @@ -208,4 +209,15 @@ public static Geometry randomGeometryWithXYCoordinates() { throw new RuntimeException("failed to generate random geometry"); } + public static List getRandomXYPoints(int size, boolean hasZCoords) { + List xyPoints = new ArrayList<>(); + + for (int i = 0; i < size; i++) { + Point point = randomPoint(hasZCoords); + XYPoint xyPoint = new XYPoint((float) point.getX(), (float) point.getY()); + xyPoints.add(xyPoint); + } + return xyPoints; + } + } diff --git a/src/test/java/org/opensearch/geospatial/index/mapper/xypoint/XYPointFieldMapperIT.java b/src/test/java/org/opensearch/geospatial/index/mapper/xypoint/XYPointFieldMapperIT.java new file mode 100644 index 00000000..6935cc9b --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/index/mapper/xypoint/XYPointFieldMapperIT.java @@ -0,0 +1,135 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.index.mapper.xypoint; + +import java.util.List; +import java.util.Locale; +import java.util.Map; + +import org.opensearch.common.settings.Settings; +import org.opensearch.geometry.Geometry; +import org.opensearch.geometry.Point; +import org.opensearch.geospatial.GeospatialRestTestCase; +import org.opensearch.geospatial.GeospatialTestHelper; +import org.opensearch.geospatial.index.common.xyshape.ShapeObjectBuilder; + +public class XYPointFieldMapperIT extends GeospatialRestTestCase { + private static final String FIELD_X_KEY = "x"; + private static final String FIELD_Y_KEY = "y"; + private static final String FIELD_GEOJSON_TYPE_KEY = "type"; + private static final String FIELD_GEOJSON_TYPE_VALUE = "Point"; + private static final String FIELD_GEOJSON_COORDINATES_KEY = "coordinates"; + + public void testMappingWithXYPointField() throws Exception { + String indexName = GeospatialTestHelper.randomLowerCaseString(); + String fieldName = GeospatialTestHelper.randomLowerCaseString(); + createIndex(indexName, Settings.EMPTY, Map.of(fieldName, XYPointFieldMapper.CONTENT_TYPE)); + final Map fieldNameTypeMap = getIndexProperties(indexName); + assertTrue("field name is not found inside mapping", fieldNameTypeMap.containsKey(fieldName)); + final Map fieldType = (Map) fieldNameTypeMap.get(fieldName); + assertEquals("invalid field type", XYPointFieldMapper.CONTENT_TYPE, fieldType.get(FIELD_TYPE_KEY)); + deleteIndex(indexName); + } + + public void testIndexWithXYPointFieldAsWKTFormat() throws Exception { + String indexName = GeospatialTestHelper.randomLowerCaseString(); + String fieldName = GeospatialTestHelper.randomLowerCaseString(); + createIndex(indexName, Settings.EMPTY, Map.of(fieldName, XYPointFieldMapper.CONTENT_TYPE)); + final Point point = ShapeObjectBuilder.randomPoint(randomBoolean()); + String docID = indexDocument(indexName, getDocumentWithWKTValueForXYPoint(fieldName, point)); + assertTrue("failed to index document", getIndexDocumentCount(indexName) > 0); + final Map document = getDocument(docID, indexName); + assertNotNull("failed to get indexed document", document); + assertEquals("failed to index xy_point", point.toString(), document.get(fieldName)); + deleteIndex(indexName); + } + + public void testIndexWithXYPointFieldAsArrayFormat() throws Exception { + String indexName = GeospatialTestHelper.randomLowerCaseString(); + String fieldName = GeospatialTestHelper.randomLowerCaseString(); + createIndex(indexName, Settings.EMPTY, Map.of(fieldName, XYPointFieldMapper.CONTENT_TYPE)); + final Point point = ShapeObjectBuilder.randomPoint(randomBoolean()); + String docID = indexDocument(indexName, getDocumentWithArrayValueForXYPoint(fieldName, point)); + assertTrue("failed to index document", getIndexDocumentCount(indexName) > 0); + final Map document = getDocument(docID, indexName); + assertNotNull("failed to get indexed document", document); + assertEquals("failed to index xy_point", List.of(point.getY(), point.getX()), document.get(fieldName)); + deleteIndex(indexName); + } + + public void testIndexWithXYPointFieldAsStringFormat() throws Exception { + String indexName = GeospatialTestHelper.randomLowerCaseString(); + String fieldName = GeospatialTestHelper.randomLowerCaseString(); + createIndex(indexName, Settings.EMPTY, Map.of(fieldName, XYPointFieldMapper.CONTENT_TYPE)); + final Point point = ShapeObjectBuilder.randomPoint(randomBoolean()); + String pointAsString = point.getX() + "," + point.getY(); + String docID = indexDocument(indexName, getDocumentWithStringValueForXYPoint(fieldName, pointAsString)); + assertTrue("failed to index document", getIndexDocumentCount(indexName) > 0); + final Map document = getDocument(docID, indexName); + assertNotNull("failed to get indexed document", document); + assertEquals("failed to index xy_point", pointAsString, document.get(fieldName)); + deleteIndex(indexName); + } + + public void testIndexWithXYPointFieldAsObjectFormat() throws Exception { + String indexName = GeospatialTestHelper.randomLowerCaseString(); + String fieldName = GeospatialTestHelper.randomLowerCaseString(); + createIndex(indexName, Settings.EMPTY, Map.of(fieldName, XYPointFieldMapper.CONTENT_TYPE)); + final Point point = ShapeObjectBuilder.randomPoint(randomBoolean()); + String docID = indexDocument(indexName, getDocumentWithObjectValueForXYPoint(fieldName, point)); + assertTrue("failed to index document", getIndexDocumentCount(indexName) > 0); + final Map document = getDocument(docID, indexName); + assertNotNull("failed to get indexed document", document); + String expectedValue = String.format(Locale.ROOT, "{x=%s, y=%s}", point.getX(), point.getY()); + assertEquals("failed to index xy_point", expectedValue, document.get(fieldName).toString()); + deleteIndex(indexName); + } + + public void testIndexWithXYPointFieldAsGeoJsonFormat() throws Exception { + String indexName = GeospatialTestHelper.randomLowerCaseString(); + String fieldName = GeospatialTestHelper.randomLowerCaseString(); + createIndex(indexName, Settings.EMPTY, Map.of(fieldName, XYPointFieldMapper.CONTENT_TYPE)); + final Point point = ShapeObjectBuilder.randomPoint(randomBoolean()); + String docID = indexDocument(indexName, getDocumentWithObjectValueForXYPoint(fieldName, point)); + assertTrue("failed to index document", getIndexDocumentCount(indexName) > 0); + final Map document = getDocument(docID, indexName); + assertNotNull("failed to get indexed document", document); + String expectedValue = String.format(Locale.ROOT, "{x=%s, y=%s}", point.getX(), point.getY()); + assertEquals("failed to index xy_point", expectedValue, document.get(fieldName).toString()); + deleteIndex(indexName); + } + + private String getDocumentWithWKTValueForXYPoint(String fieldName, Geometry geometry) throws Exception { + return buildContentAsString(build -> build.field(fieldName, geometry.toString())); + } + + private String getDocumentWithArrayValueForXYPoint(String fieldName, Point point) throws Exception { + return buildContentAsString(build -> build.field(fieldName, new double[] { point.getY(), point.getX() })); + } + + private String getDocumentWithStringValueForXYPoint(String fieldName, String pointAsString) throws Exception { + return buildContentAsString(build -> build.field(fieldName, pointAsString)); + } + + private String getDocumentWithObjectValueForXYPoint(String fieldName, Point point) throws Exception { + return buildContentAsString(build -> { + build.startObject(fieldName); + build.field(FIELD_X_KEY, point.getX()); + build.field(FIELD_Y_KEY, point.getY()); + build.endObject(); + }); + } + + private String getDocumentWithGeoJsonValueForXYPoint(String fieldName, Point point) throws Exception { + return buildContentAsString(build -> { + build.startObject(fieldName); + build.field(FIELD_GEOJSON_TYPE_KEY, FIELD_GEOJSON_TYPE_VALUE); + build.array(FIELD_GEOJSON_COORDINATES_KEY, new double[] { point.getX(), point.getY() }); + build.endObject(); + }); + } + +} diff --git a/src/test/java/org/opensearch/geospatial/index/mapper/xypoint/XYPointFieldMapperTests.java b/src/test/java/org/opensearch/geospatial/index/mapper/xypoint/XYPointFieldMapperTests.java new file mode 100644 index 00000000..6acc1848 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/index/mapper/xypoint/XYPointFieldMapperTests.java @@ -0,0 +1,332 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.index.mapper.xypoint; + +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; +import java.util.Set; + +import org.apache.lucene.index.IndexableField; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.geospatial.GeospatialTestHelper; +import org.opensearch.geospatial.plugin.GeospatialPlugin; +import org.opensearch.index.mapper.AbstractPointGeometryFieldMapper; +import org.opensearch.index.mapper.DocumentMapper; +import org.opensearch.index.mapper.FieldMapperTestCase2; +import org.opensearch.index.mapper.Mapper; +import org.opensearch.index.mapper.MapperParsingException; +import org.opensearch.index.mapper.MapperService; +import org.opensearch.index.mapper.ParsedDocument; +import org.opensearch.plugins.Plugin; + +public class XYPointFieldMapperTests extends FieldMapperTestCase2 { + + private static final String FIELD_TYPE_NAME = "type"; + private static final String FIELD_NAME = "field"; + private static final String FIELD_X_KEY = "x"; + private static final String FIELD_Y_KEY = "y"; + + private static final String FIELD_GEOJSON_TYPE_KEY = "type"; + private static final String FIELD_GEOJSON_TYPE_VALUE = "Point"; + private static final String FIELD_GEOJSON_COORDINATES_KEY = "coordinates"; + private final static Integer MIN_NUM_POINTS = 1; + private final static Integer MAX_NUM_POINTS = 10; + + @Override + protected XYPointFieldMapper.XYPointFieldMapperBuilder newBuilder() { + return new XYPointFieldMapper.XYPointFieldMapperBuilder(GeospatialTestHelper.randomLowerCaseString()); + } + + @Override + protected void minimalMapping(XContentBuilder xContentBuilder) throws IOException { + xContentBuilder.field(FIELD_TYPE_NAME, XYPointFieldMapper.CONTENT_TYPE); + } + + @Override + protected void writeFieldValue(XContentBuilder xContentBuilder) throws IOException { + xContentBuilder.value("POINT (14.0 15.0)"); + } + + @Override + protected void registerParameters(ParameterChecker parameterChecker) throws IOException { + parameterChecker.registerUpdateCheck( + b -> b.field(AbstractPointGeometryFieldMapper.Names.IGNORE_MALFORMED.getPreferredName(), true), + mapper -> { + assertTrue("invalid mapper retrieved", mapper instanceof XYPointFieldMapper); + XYPointFieldMapper xyPointFieldMapper = (XYPointFieldMapper) mapper; + assertTrue("param [ ignore_malformed ] is not updated", xyPointFieldMapper.ignoreMalformed().value()); + } + ); + parameterChecker.registerUpdateCheck( + b -> b.field(AbstractPointGeometryFieldMapper.Names.IGNORE_Z_VALUE.getPreferredName(), false), + mapper -> { + assertTrue("invalid mapper retrieved", mapper instanceof XYPointFieldMapper); + XYPointFieldMapper xyPointFieldMapper = (XYPointFieldMapper) mapper; + assertFalse("param [ ignore_z_value ] is not updated", xyPointFieldMapper.ignoreZValue().value()); + } + ); + + XYPoint point = new XYPoint(); + String pointAsString = "23.35,-50.55"; + point.resetFromString(pointAsString, true); + parameterChecker.registerUpdateCheck( + b -> b.field(AbstractPointGeometryFieldMapper.Names.NULL_VALUE.getPreferredName(), pointAsString), + mapper -> { + assertTrue("invalid mapper retrieved", mapper instanceof XYPointFieldMapper); + XYPointFieldMapper xyPointFieldMapper = (XYPointFieldMapper) mapper; + assertEquals("param [ null_value ] is not updated", point, xyPointFieldMapper.nullValue()); + } + ); + } + + @Override + protected Set unsupportedProperties() { + return Set.of("analyzer", "similarity"); + } + + @Override + protected Collection getPlugins() { + return Collections.singletonList(new GeospatialPlugin()); + } + + @Override + protected boolean supportsMeta() { + return false; + } + + @Override + protected boolean supportsOrIgnoresBoost() { + return false; + } + + public final void testExistsQueryDocValuesDisabled() throws IOException { + MapperService mapperService = createMapperService(fieldMapping(builder -> { + minimalMapping(builder); + builder.field("doc_values", false); + })); + assertExistsQuery(mapperService); + assertParseMinimalWarnings(); + } + + public void testDefaultConfiguration() throws IOException { + DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); + Mapper fieldMapper = mapper.mappers().getMapper(FIELD_NAME); + assertTrue("Invalid FieldMapper retrieved", fieldMapper instanceof XYPointFieldMapper); + XYPointFieldMapper xyPointFieldMapper = (XYPointFieldMapper) fieldMapper; + + assertTrue("param [ docs_value ] default value should be true", xyPointFieldMapper.fieldType().hasDocValues()); + assertEquals("param [ ignore_malformed ] default value should be false", xyPointFieldMapper.ignoreMalformed().value(), false); + assertEquals("param [ ignore_z_value ] default value should be true", xyPointFieldMapper.ignoreZValue().value(), true); + assertNull("param [ null_value ] default value should be null", xyPointFieldMapper.nullValue()); + + } + + public void testFieldTypeContentType() throws IOException { + DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); + Mapper fieldMapper = mapper.mappers().getMapper(FIELD_NAME); + assertTrue("Invalid FieldMapper retrieved", fieldMapper instanceof XYPointFieldMapper); + final XYPointFieldMapper.XYPointFieldType fieldType = ((XYPointFieldMapper) fieldMapper).fieldType(); + assertEquals("invalid field type name", fieldType.typeName(), XYPointFieldMapper.CONTENT_TYPE); + } + + public void testIndexAsWKT() throws IOException { + DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); + ParsedDocument doc = mapper.parse( + source(builder -> builder.field(FIELD_NAME, "POINT (" + randomDouble() + " " + randomDouble() + ")")) + ); + final IndexableField actualFieldValue = doc.rootDoc().getField(FIELD_NAME); + assertNotNull("FieldValue is null", actualFieldValue); + } + + public void testIndexAsObject() throws IOException { + DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); + ParsedDocument doc = mapper.parse( + source( + builder -> builder.startObject(FIELD_NAME).field(FIELD_X_KEY, randomDouble()).field(FIELD_Y_KEY, randomDouble()).endObject() + ) + ); + final IndexableField[] actualFieldValues = doc.rootDoc().getFields(FIELD_NAME); + assertNotNull("FieldValue is null", actualFieldValues); + assertEquals("mismatch in field values count", 2, actualFieldValues.length); + } + + public void testIndexAsArray() throws IOException { + DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); + ParsedDocument doc = mapper.parse( + source(builder -> builder.startArray(FIELD_NAME).value(randomDouble()).value(randomDouble()).endArray()) + ); + final IndexableField[] actualFieldValues = doc.rootDoc().getFields(FIELD_NAME); + assertNotNull("FieldValue is null", actualFieldValues); + assertEquals("mismatch in field values count", 2, actualFieldValues.length); + } + + public void testIndexAsString() throws IOException { + DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); + ParsedDocument doc = mapper.parse(source(builder -> builder.field(FIELD_NAME, randomDouble() + "," + randomDouble()))); + final IndexableField actualFieldValue = doc.rootDoc().getField(FIELD_NAME); + assertNotNull("FieldValue is null", actualFieldValue); + } + + public void testIndexAsGeoJson() throws IOException { + DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); + ParsedDocument doc = mapper.parse( + source( + builder -> builder.startObject(FIELD_NAME) + .field(FIELD_GEOJSON_TYPE_KEY, FIELD_GEOJSON_TYPE_VALUE) + .array(FIELD_GEOJSON_COORDINATES_KEY, new double[] { randomDouble(), randomDouble() }) + .endObject() + ) + ); + final IndexableField[] actualFieldValues = doc.rootDoc().getFields(FIELD_NAME); + assertNotNull("FieldValue is null", actualFieldValues); + assertEquals("mismatch in field values count", 2, actualFieldValues.length); + } + + public void testIndexAsArrayMultiPoints() throws IOException { + int numOfPoints = randomIntBetween(MIN_NUM_POINTS, MAX_NUM_POINTS); + DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); + ParsedDocument doc = mapper.parse(source(builder -> { + builder.startArray(FIELD_NAME); + for (int i = 0; i < numOfPoints; i++) { + builder.startArray().value(randomDouble()).value(randomDouble()).endArray(); + } + builder.endArray(); + })); + final IndexableField[] actualFieldValues = doc.rootDoc().getFields(FIELD_NAME); + assertNotNull("FieldValue is null", actualFieldValues); + assertEquals("mismatch in field values count", 2 * numOfPoints, actualFieldValues.length); + } + + public void testIndexAsObjectMultiPoints() throws IOException { + int numOfPoints = randomIntBetween(MIN_NUM_POINTS, MAX_NUM_POINTS); + DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); + ParsedDocument doc = mapper.parse(source(builder -> { + builder.startArray(FIELD_NAME); + for (int i = 0; i < numOfPoints; i++) { + builder.startObject().field(FIELD_X_KEY, randomDouble()).field(FIELD_Y_KEY, randomDouble()).endObject(); + } + builder.endArray(); + })); + final IndexableField[] actualFieldValues = doc.rootDoc().getFields(FIELD_NAME); + assertNotNull("FieldValue is null", actualFieldValues); + assertEquals("mismatch in field values count", 2 * numOfPoints, actualFieldValues.length); + } + + public void testIndexAsStringMultiPoints() throws IOException { + int numOfPoints = randomIntBetween(MIN_NUM_POINTS, MAX_NUM_POINTS); + DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); + ParsedDocument doc = mapper.parse(source(builder -> { + builder.startArray(FIELD_NAME); + for (int i = 0; i < numOfPoints; i++) { + builder.value(randomDouble() + "," + randomDouble()); + } + builder.endArray(); + })); + + final IndexableField[] actualFieldValues = doc.rootDoc().getFields(FIELD_NAME); + assertNotNull("FieldValue is null", actualFieldValues); + assertEquals("mismatch in field values count", 2 * numOfPoints, actualFieldValues.length); + } + + public void testIndexAsWKTMultiPoints() throws IOException { + int numOfPoints = randomIntBetween(MIN_NUM_POINTS, MAX_NUM_POINTS); + DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); + ParsedDocument doc = mapper.parse(source(builder -> { + builder.startArray(FIELD_NAME); + for (int i = 0; i < numOfPoints; i++) { + builder.value("POINT (" + randomDouble() + " " + randomDouble() + ")"); + } + builder.endArray(); + })); + final IndexableField[] actualFieldValues = doc.rootDoc().getFields(FIELD_NAME); + assertNotNull("FieldValue is null", actualFieldValues); + assertEquals("mismatch in field values count", 2 * numOfPoints, actualFieldValues.length); + } + + public void testIndexAsGeoJsonMultiPoints() throws IOException { + int numOfPoints = randomIntBetween(MIN_NUM_POINTS, MAX_NUM_POINTS); + DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); + ParsedDocument doc = mapper.parse(source(builder -> { + builder.startArray(FIELD_NAME); + for (int i = 0; i < numOfPoints; i++) { + builder.startObject() + .field(FIELD_GEOJSON_TYPE_KEY, FIELD_GEOJSON_TYPE_VALUE) + .array(FIELD_GEOJSON_COORDINATES_KEY, new double[] { randomDouble(), randomDouble() }) + .endObject(); + } + builder.endArray(); + })); + final IndexableField[] actualFieldValues = doc.rootDoc().getFields(FIELD_NAME); + assertNotNull("FieldValue is null", actualFieldValues); + assertEquals("mismatch in field values count", 2 * numOfPoints, actualFieldValues.length); + } + + public void testIgnoreZValue() throws IOException { + boolean z_value = randomBoolean(); + DocumentMapper mapper = createDocumentMapper( + fieldMapping( + builder -> builder.field(FIELD_TYPE_NAME, XYPointFieldMapper.CONTENT_TYPE) + .field(AbstractPointGeometryFieldMapper.Names.IGNORE_Z_VALUE.getPreferredName(), z_value) + ) + ); + if (z_value) { + ParsedDocument doc = mapper.parse( + source(builder -> builder.field(FIELD_NAME, randomDouble() + "," + randomDouble() + "," + randomDouble())) + ); + final IndexableField actualFieldValue = doc.rootDoc().getField(FIELD_NAME); + assertNotNull("failed to ignore z value even if [ignore_z_value] is [true]", actualFieldValue); + } else { + MapperParsingException exception = expectThrows( + MapperParsingException.class, + () -> mapper.parse( + source(builder -> builder.field(FIELD_NAME, randomDouble() + "," + randomDouble() + "," + randomDouble())) + ) + ); + assertTrue( + "failed to throw exception even if [ignore_z_value] is false", + exception.getCause().getMessage().contains("but [ignore_z_value] parameter is [false]") + ); + } + } + + public void testIgnoreMalformed() throws IOException { + boolean ignore_malformed_value = randomBoolean(); + DocumentMapper mapper = createDocumentMapper( + fieldMapping( + builder -> builder.field(FIELD_TYPE_NAME, XYPointFieldMapper.CONTENT_TYPE) + .field(AbstractPointGeometryFieldMapper.Names.IGNORE_MALFORMED.getPreferredName(), ignore_malformed_value) + ) + ); + if (ignore_malformed_value) { + ParsedDocument doc = mapper.parse(source(builder -> builder.field(FIELD_NAME, "50.0,abcd"))); + assertNull("failed to ignore malformed point even if [ignore_malformed] is [true]", doc.rootDoc().getField(FIELD_NAME)); + } else { + MapperParsingException exception = expectThrows( + MapperParsingException.class, + () -> mapper.parse(source(builder -> builder.field(FIELD_NAME, "50.0,abcd"))) + ); + assertTrue( + "failed to throw exception even if [ignore_malformed] is [false]", + exception.getCause().getMessage().contains("[y] must be a number") + ); + } + } + + public void testNullValue() throws Exception { + DocumentMapper mapper = createDocumentMapper( + fieldMapping( + builder -> builder.field(FIELD_TYPE_NAME, XYPointFieldMapper.CONTENT_TYPE) + .field(AbstractPointGeometryFieldMapper.Names.NULL_VALUE.getPreferredName(), "91,181") + ) + ); + Mapper fieldMapper = mapper.mappers().getMapper(FIELD_NAME); + + AbstractPointGeometryFieldMapper.ParsedPoint nullValue = ((XYPointFieldMapper) fieldMapper).nullValue(); + assertEquals("assertion failed even if [null_value] parameter is set", nullValue, new XYPoint(91, 181)); + } + +} diff --git a/src/test/java/org/opensearch/geospatial/index/mapper/xypoint/XYPointIndexerTests.java b/src/test/java/org/opensearch/geospatial/index/mapper/xypoint/XYPointIndexerTests.java new file mode 100644 index 00000000..90b1eb69 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/index/mapper/xypoint/XYPointIndexerTests.java @@ -0,0 +1,53 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.index.mapper.xypoint; + +import static org.mockito.Mockito.mock; +import static org.opensearch.geospatial.index.common.xyshape.ShapeObjectBuilder.getRandomXYPoints; + +import java.util.Collections; +import java.util.List; + +import org.apache.lucene.geo.XYPoint; +import org.apache.lucene.index.IndexableField; +import org.opensearch.index.mapper.ParseContext; +import org.opensearch.test.OpenSearchTestCase; + +public class XYPointIndexerTests extends OpenSearchTestCase { + private XYPointIndexer indexer; + private ParseContext parseContext; + private final static String fieldName = "geometry"; + private final static Integer MIN_NUM_POINTS = 1; + private final static Integer MAX_NUM_POINTS = 10; + + @Override + public void setUp() throws Exception { + super.setUp(); + indexer = new XYPointIndexer(fieldName); + parseContext = mock(ParseContext.class); + } + + public void testIndexingNullGeometry() { + expectThrows(NullPointerException.class, () -> indexer.prepareForIndexing(null)); + } + + public void testIndexingEmptyList() { + expectThrows(IllegalArgumentException.class, () -> indexer.prepareForIndexing(Collections.emptyList())); + } + + public void testPrepareIndexing() { + var point = mock(org.opensearch.geospatial.index.mapper.xypoint.XYPoint.class); + List points = List.of(point); + assertNotNull("failed to convert xypoints from opensearch to lucene type", indexer.prepareForIndexing(points)); + } + + public void testIndexShape() { + int numOfPoints = randomIntBetween(MIN_NUM_POINTS, MAX_NUM_POINTS); + List xyPoints = getRandomXYPoints(numOfPoints, randomBoolean()); + List indexableFields = indexer.indexShape(parseContext, xyPoints); + assertEquals("failed to index xypoints", numOfPoints, indexableFields.size()); + } +} diff --git a/src/test/java/org/opensearch/geospatial/index/mapper/xypoint/XYPointParserTests.java b/src/test/java/org/opensearch/geospatial/index/mapper/xypoint/XYPointParserTests.java new file mode 100644 index 00000000..b7d8c97e --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/index/mapper/xypoint/XYPointParserTests.java @@ -0,0 +1,255 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.index.mapper.xypoint; + +import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; + +import java.io.IOException; + +import org.opensearch.OpenSearchParseException; +import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.test.OpenSearchTestCase; + +public class XYPointParserTests extends OpenSearchTestCase { + private static final String FIELD_X_KEY = "x"; + private static final String FIELD_Y_KEY = "y"; + + public void testXYPointReset() { + double x = randomDouble(); + double y = randomDouble(); + + XYPoint point = new XYPoint(); + + assertEquals("Reset from WKT", point.resetFromString("POINT(" + y + " " + x + ")", randomBoolean()), point.reset(x, y)); + assertEquals("Reset from Coordinates", point.resetFromString(x + ", " + y, randomBoolean()), point.reset(x, y)); + + } + + public void testResetFromWKTInvalid() { + XYPoint point = new XYPoint(); + OpenSearchParseException e = expectThrows( + OpenSearchParseException.class, + () -> point.resetFromString("NOT A POINT(1 2)", randomBoolean()) + ); + assertEquals("Validation failed for Invalid WKT", "Invalid WKT format, [NOT A POINT(1 2)]", e.getMessage()); + + OpenSearchParseException e2 = expectThrows( + OpenSearchParseException.class, + () -> point.resetFromString("MULTIPOINT(1 2, 3 4)", randomBoolean()) + ); + assertEquals( + "Validation failed for invalid WKT primitive", + "[xy_point] supports only POINT among WKT primitives, but found [MULTIPOINT]", + e2.getMessage() + ); + } + + public void testResetFromCoordinatesInvalid() { + XYPoint point = new XYPoint(); + OpenSearchParseException e = expectThrows( + OpenSearchParseException.class, + () -> point.resetFromString("20.4, 50.6, 70.8, -200.6", randomBoolean()) + ); + assertEquals("Validation failed for checking count of coordinates", "expected 2 or 3 coordinates, but found: [4]", e.getMessage()); + + OpenSearchParseException e2 = expectThrows(OpenSearchParseException.class, () -> point.resetFromString("20.4, 50.6, 70.8", false)); + assertEquals( + "Validation failed for [ignore_z_value] parameter", + "Exception parsing coordinates: found Z value [70.8] but [ignore_z_value] parameter is [false]", + e2.getMessage() + ); + + OpenSearchParseException e3 = expectThrows( + OpenSearchParseException.class, + () -> point.resetFromString("abcd, 50.6", randomBoolean()) + ); + assertEquals("Validation failed even if x is not a number", "[x] must be a number", e3.getMessage()); + + OpenSearchParseException e4 = expectThrows( + OpenSearchParseException.class, + () -> point.resetFromString("50.6, abcd", randomBoolean()) + ); + assertEquals("Validation failed even if y is not a number", "[y] must be a number", e4.getMessage()); + } + + public void testXYPointParsing() throws IOException { + XYPoint randomXYPoint = new XYPoint(randomDouble(), randomDouble()); + + XYPoint point = XYPointParser.parseXYPoint(xyAsObject(randomXYPoint.getX(), randomXYPoint.getY()), randomBoolean()); + assertEquals("Parsing XYPoint as an object failed", randomXYPoint, point); + + XYPoint point2 = XYPointParser.parseXYPoint(xyAsString(randomXYPoint.getX(), randomXYPoint.getY()), randomBoolean()); + assertEquals("Parsing XYPoint as a string failed", randomXYPoint, point2); + + XYPoint point3 = XYPointParser.parseXYPoint(xyAsArray(randomXYPoint.getX(), randomXYPoint.getY()), randomBoolean()); + assertEquals("Parsing XYPoint as an array failed", randomXYPoint, point3); + + XYPoint point4 = XYPointParser.parseXYPoint(xyAsWKT(randomXYPoint.getX(), randomXYPoint.getY()), randomBoolean()); + assertEquals("Parsing XYPoint as a WKT failed", randomXYPoint, point4); + } + + public void testInvalidField() throws IOException { + XContentBuilder content = JsonXContent.contentBuilder(); + content.startObject(); + content.field(FIELD_Y_KEY, 0).field(FIELD_X_KEY, 0).field("test", 0); + content.endObject(); + + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) { + parser.nextToken(); + OpenSearchParseException e = expectThrows( + OpenSearchParseException.class, + () -> XYPointParser.parseXYPoint(parser, randomBoolean()) + ); + assertEquals("Validation for invalid fields failed", "field must be either [x|y], or [type|coordinates]", e.getMessage()); + } + } + + public void testParsingInvalidObject() throws IOException { + // Send empty string instead of double for y coordinate + XContentBuilder content = JsonXContent.contentBuilder(); + content.startObject(); + content.field(FIELD_X_KEY, randomDouble()).field(FIELD_Y_KEY, ""); + content.endObject(); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); + parser.nextToken(); + OpenSearchParseException e = expectThrows( + OpenSearchParseException.class, + () -> XYPointParser.parseXYPoint(parser, randomBoolean()) + ); + assertEquals("Validation failed for invalid x and y values", "[x] and [y] must be valid double values", e.getMessage()); + + // Skip the 'y' field and y coordinate + XContentBuilder content1 = JsonXContent.contentBuilder(); + content1.startObject(); + content1.field(FIELD_X_KEY, randomDouble()); + content1.endObject(); + XContentParser parser1 = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content1)); + parser1.nextToken(); + OpenSearchParseException e1 = expectThrows( + OpenSearchParseException.class, + () -> XYPointParser.parseXYPoint(parser1, randomBoolean()) + ); + assertEquals("Validation failed even if field [y] is missing", "field [y] missing", e1.getMessage()); + + // Skip the 'x' field and x coordinate + XContentBuilder content2 = JsonXContent.contentBuilder(); + content2.startObject(); + content2.field(FIELD_Y_KEY, randomDouble()); + content2.endObject(); + XContentParser parser2 = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content2)); + parser2.nextToken(); + OpenSearchParseException e2 = expectThrows( + OpenSearchParseException.class, + () -> XYPointParser.parseXYPoint(parser2, randomBoolean()) + ); + assertEquals("Validation failed even if field [x] is missing", "field [x] missing", e2.getMessage()); + + } + + private XContentParser xyAsObject(double x, double y) throws IOException { + XContentBuilder content = JsonXContent.contentBuilder(); + content.startObject(); + content.field(FIELD_X_KEY, x).field(FIELD_Y_KEY, y); + content.endObject(); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); + parser.nextToken(); + return parser; + } + + private XContentParser xyAsString(double x, double y) throws IOException { + XContentBuilder content = JsonXContent.contentBuilder(); + content.value(x + ", " + y); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); + parser.nextToken(); + return parser; + } + + private XContentParser xyAsArray(double x, double y) throws IOException { + XContentBuilder content = JsonXContent.contentBuilder(); + content.startArray().value(x).value(y).endArray(); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); + parser.nextToken(); + return parser; + } + + private XContentParser xyAsWKT(double x, double y) throws IOException { + XContentBuilder content = JsonXContent.contentBuilder(); + content.value("POINT (" + x + " " + y + ")"); + XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); + parser.nextToken(); + return parser; + } + + public void testParserGeoPointGeoJson() throws IOException { + XYPoint xyPoint = new XYPoint(randomDouble(), randomDouble()); + double[] coordinates = { xyPoint.getX(), xyPoint.getY() }; + XContentBuilder json1 = jsonBuilder().startObject().field("type", "Point").array("coordinates", coordinates).endObject(); + try (XContentParser parser = createParser(json1)) { + parser.nextToken(); + XYPoint paredPoint = XYPointParser.parseXYPoint(parser, randomBoolean()); + assertEquals(xyPoint, paredPoint); + } + + XContentBuilder json2 = jsonBuilder().startObject().field("type", "PoInT").array("coordinates", coordinates).endObject(); + try (XContentParser parser = createParser(json2)) { + parser.nextToken(); + XYPoint paredPoint = XYPointParser.parseXYPoint(parser, randomBoolean()); + assertEquals(xyPoint, paredPoint); + } + } + + public void testParserGeoPointGeoJsonMissingField() throws IOException { + XYPoint xyPoint = new XYPoint(randomDouble(), randomDouble()); + double[] coordinates = { xyPoint.getX(), xyPoint.getY() }; + XContentBuilder missingType = jsonBuilder().startObject().array("coordinates", coordinates).endObject(); + expectParseException(missingType, "field [type] missing"); + + XContentBuilder missingCoordinates = jsonBuilder().startObject().field("type", "Point").endObject(); + expectParseException(missingCoordinates, "field [coordinates] missing"); + } + + public void testParserGeoPointGeoJsonUnknownField() throws IOException { + XYPoint xyPoint = new XYPoint(randomDouble(), randomDouble()); + double[] coordinates = { xyPoint.getX(), xyPoint.getY() }; + XContentBuilder unknownField = jsonBuilder().startObject() + .field("type", "Point") + .array("coordinates", coordinates) + .field("unknown", "value") + .endObject(); + expectParseException(unknownField, "field must be either [x|y], or [type|coordinates]"); + } + + public void testParserGeoPointGeoJsonInvalidValue() throws IOException { + XYPoint xyPoint = new XYPoint(randomDouble(), randomDouble()); + double[] coordinates = { xyPoint.getX(), xyPoint.getY() }; + XContentBuilder invalidGeoJsonType = jsonBuilder().startObject() + .field("type", "invalid") + .array("coordinates", coordinates) + .endObject(); + expectParseException(invalidGeoJsonType, "type must be Point"); + + String[] coordinatesInString = { String.valueOf(xyPoint.getX()), String.valueOf(xyPoint.getY()) }; + XContentBuilder invalideCoordinatesType = jsonBuilder().startObject() + .field("type", "Point") + .array("coordinates", coordinatesInString) + .endObject(); + expectParseException(invalideCoordinatesType, "numeric value expected"); + } + + private void expectParseException(XContentBuilder content, String errMsg) throws IOException { + try (XContentParser parser = createParser(content)) { + parser.nextToken(); + OpenSearchParseException ex = expectThrows( + OpenSearchParseException.class, + () -> XYPointParser.parseXYPoint(parser, randomBoolean()) + ); + assertEquals(errMsg, ex.getMessage()); + } + } +} diff --git a/src/test/java/org/opensearch/geospatial/index/mapper/xyshape/XYShapeFieldMapperTests.java b/src/test/java/org/opensearch/geospatial/index/mapper/xyshape/XYShapeFieldMapperTests.java index 34bba616..f02c5268 100644 --- a/src/test/java/org/opensearch/geospatial/index/mapper/xyshape/XYShapeFieldMapperTests.java +++ b/src/test/java/org/opensearch/geospatial/index/mapper/xyshape/XYShapeFieldMapperTests.java @@ -14,7 +14,7 @@ import org.apache.lucene.index.IndexableField; import org.opensearch.common.geo.GeoShapeType; import org.opensearch.common.geo.builders.ShapeBuilder; -import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.geospatial.GeospatialTestHelper; import org.opensearch.geospatial.plugin.GeospatialPlugin; import org.opensearch.index.mapper.AbstractShapeGeometryFieldMapper; diff --git a/src/test/java/org/opensearch/geospatial/index/query/AbstractXYShapeQueryTestCase.java b/src/test/java/org/opensearch/geospatial/index/query/AbstractXYShapeQueryTestCase.java new file mode 100644 index 00000000..12508ca6 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/index/query/AbstractXYShapeQueryTestCase.java @@ -0,0 +1,203 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.index.query; + +import static org.hamcrest.Matchers.equalTo; +import static org.opensearch.geospatial.GeospatialTestHelper.randomLowerCaseString; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import org.hamcrest.MatcherAssert; +import org.hamcrest.Matchers; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.common.geo.ShapeRelation; +import org.opensearch.common.settings.Settings; +import org.opensearch.geometry.Circle; +import org.opensearch.geometry.LinearRing; +import org.opensearch.geometry.MultiPolygon; +import org.opensearch.geometry.Point; +import org.opensearch.geometry.Polygon; +import org.opensearch.geometry.Rectangle; +import org.opensearch.geospatial.GeospatialRestTestCase; +import org.opensearch.geospatial.index.mapper.xypoint.XYPointFieldMapper; +import org.opensearch.geospatial.index.mapper.xyshape.XYShapeFieldMapper; +import org.opensearch.search.SearchHit; + +public abstract class AbstractXYShapeQueryTestCase extends GeospatialRestTestCase { + + public abstract String getIndexName(); + + public abstract String getFieldName(); + + public abstract String getContentType(); + + public void testNullShape() throws Exception { + createIndex(getIndexName(), Settings.EMPTY, Map.of(getFieldName(), getContentType())); + String body = buildContentAsString(builder -> builder.field(getFieldName(), (String) null)); + String docID = indexDocument(getIndexName(), body); + + final Map document = getDocument(docID, getIndexName()); + assertTrue("failed to index document with type", document.containsKey(getFieldName())); + assertNull("failed to accept null value", document.get(getFieldName())); + + deleteIndex(getIndexName()); + } + + public void testIndexPointsFilterRectangleWithIntersectsRelation() throws Exception { + createIndex(getIndexName(), Settings.EMPTY, Map.of(getFieldName(), getContentType())); + final String firstDocumentID = indexDocumentUsingWKT(getIndexName(), getFieldName(), "POINT(-30 -30)"); + indexDocumentUsingWKT(getIndexName(), getFieldName(), "POINT(-45 -50)"); + + Rectangle rectangle = new Rectangle(-45, 45, 45, -45); + final SearchResponse searchResponse = searchUsingShapeRelation(getIndexName(), getFieldName(), rectangle, ShapeRelation.INTERSECTS); + assertSearchResponse(searchResponse); + assertHitCount(searchResponse, 1); + MatcherAssert.assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(firstDocumentID)); + + deleteIndex(getIndexName()); + } + + public void testIndexPointsIndexedRectangleMatches() throws Exception { + String secondDocumentID = ""; + createIndex(getIndexName(), Settings.EMPTY, Map.of(getFieldName(), getContentType())); + // Will index two points and search with envelope that will intersect only one point + indexDocumentUsingWKT(getIndexName(), getFieldName(), "POINT(-30 -30)"); + if (XYPointFieldMapper.CONTENT_TYPE.equals(getContentType())) { + secondDocumentID = indexDocumentUsingWKT(getIndexName(), getFieldName(), "POINT(-45 -50)"); + } else { + secondDocumentID = indexDocumentUsingGeoJSON(getIndexName(), getFieldName(), new Point(-45, -50)); + } + + // create an index to insert shape + String indexedShapeIndex = randomLowerCaseString(); + String indexedShapePath = randomLowerCaseString(); + createIndex(indexedShapeIndex, Settings.EMPTY, Map.of(indexedShapePath, XYShapeFieldMapper.CONTENT_TYPE)); + + final String shapeDocID = indexDocumentUsingWKT(indexedShapeIndex, indexedShapePath, "BBOX(-50, -40, -45, -55)"); + + final SearchResponse searchResponse = searchUsingIndexedShapeIndex( + getIndexName(), + indexedShapeIndex, + indexedShapePath, + shapeDocID, + getFieldName() + ); + assertSearchResponse(searchResponse); + assertHitCount(searchResponse, 1); + MatcherAssert.assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(secondDocumentID)); + + deleteIndex(getIndexName()); + deleteIndex(indexedShapeIndex); + } + + public void testIndexPointsCircle() throws Exception { + createIndex(getIndexName(), Settings.EMPTY, Map.of(getFieldName(), getContentType())); + + indexDocumentUsingWKT(getIndexName(), getFieldName(), "POINT(-131 -30)"); + final String secondDocumentID = indexDocumentUsingWKT(getIndexName(), getFieldName(), "POINT(-45 -50)"); + + Circle circle = new Circle(-30, -30, 100); + + final SearchResponse searchResponse = searchUsingShapeRelation(getIndexName(), getFieldName(), circle, ShapeRelation.INTERSECTS); + assertSearchResponse(searchResponse); + assertHitCount(searchResponse, 1); + MatcherAssert.assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(secondDocumentID)); + + deleteIndex(getIndexName()); + } + + public void testIndexPointsPolygon() throws Exception { + createIndex(getIndexName(), Settings.EMPTY, Map.of(getFieldName(), getContentType())); + + final String firstDocumentID = indexDocumentUsingWKT(getIndexName(), getFieldName(), "POINT(-30 -30)"); + indexDocumentUsingWKT(getIndexName(), getFieldName(), "POINT(-45 -50)"); + + double[] x = new double[] { -35, -35, -25, -25, -35 }; + double[] y = new double[] { -35, -25, -25, -35, -35 }; + LinearRing ring = new LinearRing(x, y); + Polygon polygon = new Polygon(ring); + + final SearchResponse searchResponse = searchUsingShapeRelation(getIndexName(), getFieldName(), polygon, ShapeRelation.INTERSECTS); + assertSearchResponse(searchResponse); + assertHitCount(searchResponse, 1); + MatcherAssert.assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(firstDocumentID)); + + deleteIndex(getIndexName()); + } + + public void testIndexPointsMultiPolygon() throws Exception { + String thirdDocumentId = ""; + createIndex(getIndexName(), Settings.EMPTY, Map.of(getFieldName(), getContentType())); + + final String firstDocumentID = indexDocumentUsingWKT(getIndexName(), getFieldName(), "POINT(-30 -30)"); + indexDocumentUsingWKT(getIndexName(), getFieldName(), "POINT(-40 -40)"); + if (XYPointFieldMapper.CONTENT_TYPE.equals(getContentType())) { + thirdDocumentId = indexDocumentUsingWKT(getIndexName(), getFieldName(), "POINT(-50 -50)"); + } else { + thirdDocumentId = indexDocumentUsingGeoJSON(getIndexName(), getFieldName(), new Point(-50, -50)); + } + + LinearRing ring1 = new LinearRing(new double[] { -35, -35, -25, -25, -35 }, new double[] { -35, -25, -25, -35, -35 }); + Polygon polygon1 = new Polygon(ring1); + + LinearRing ring2 = new LinearRing(new double[] { -55, -55, -45, -45, -55 }, new double[] { -55, -45, -45, -55, -55 }); + Polygon polygon2 = new Polygon(ring2); + + MultiPolygon multiPolygon = new MultiPolygon(List.of(polygon1, polygon2)); + List expectedDocIDs = List.of(firstDocumentID, thirdDocumentId); + + final SearchResponse searchResponse = searchUsingShapeRelation( + getIndexName(), + getFieldName(), + multiPolygon, + ShapeRelation.INTERSECTS + ); + assertSearchResponse(searchResponse); + assertHitCount(searchResponse, expectedDocIDs.size()); + List actualDocIDS = new ArrayList<>(); + for (SearchHit hit : searchResponse.getHits().getHits()) { + actualDocIDS.add(hit.getId()); + } + MatcherAssert.assertThat(expectedDocIDs, Matchers.containsInAnyOrder(actualDocIDS.toArray())); + + deleteIndex(getIndexName()); + } + + public void testIndexPointsIndexedRectangleNoMatch() throws Exception { + createIndex(getIndexName(), Settings.EMPTY, Map.of(getFieldName(), getContentType())); + + indexDocumentUsingWKT(getIndexName(), getFieldName(), "POINT(-30 -30)"); + if (XYPointFieldMapper.CONTENT_TYPE.equals(getContentType())) { + indexDocumentUsingWKT(getIndexName(), getFieldName(), "POINT(-45 -50)"); + } else { + indexDocumentUsingGeoJSON(getIndexName(), getFieldName(), new Point(-45, -50)); + } + + // create an index to insert shape + String indexedShapeIndex = randomLowerCaseString(); + String indexedShapePath = randomLowerCaseString(); + createIndex(indexedShapeIndex, Settings.EMPTY, Map.of(indexedShapePath, XYShapeFieldMapper.CONTENT_TYPE)); + + final String shapeDocID = indexDocumentUsingWKT(indexedShapeIndex, indexedShapePath, "BBOX(-60, -50, -50, -60)"); + + final SearchResponse searchResponse = searchUsingIndexedShapeIndex( + getIndexName(), + indexedShapeIndex, + indexedShapePath, + shapeDocID, + getFieldName() + ); + assertSearchResponse(searchResponse); + assertHitCount(searchResponse, 0); + + deleteIndex(getIndexName()); + deleteIndex(indexedShapeIndex); + } +} diff --git a/src/test/java/org/opensearch/geospatial/index/query/xypoint/XYPointQueryIT.java b/src/test/java/org/opensearch/geospatial/index/query/xypoint/XYPointQueryIT.java new file mode 100644 index 00000000..05b7c8c7 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/index/query/xypoint/XYPointQueryIT.java @@ -0,0 +1,61 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.index.query.xypoint; + +import static org.opensearch.geospatial.GeospatialTestHelper.randomLowerCaseString; + +import java.util.Map; + +import org.opensearch.client.ResponseException; +import org.opensearch.common.geo.ShapeRelation; +import org.opensearch.common.settings.Settings; +import org.opensearch.geometry.Rectangle; +import org.opensearch.geospatial.index.mapper.xypoint.XYPointFieldMapper; +import org.opensearch.geospatial.index.query.AbstractXYShapeQueryTestCase; + +public class XYPointQueryIT extends AbstractXYShapeQueryTestCase { + private String indexName; + private String xyPointFieldName; + + @Override + public void setUp() throws Exception { + super.setUp(); + indexName = randomLowerCaseString(); + xyPointFieldName = randomLowerCaseString(); + } + + @Override + public String getIndexName() { + return indexName; + } + + @Override + public String getFieldName() { + return xyPointFieldName; + } + + @Override + public String getContentType() { + return XYPointFieldMapper.CONTENT_TYPE; + } + + public void testIndexPointsFilterRectangleWithUnsupportedRelation() throws Exception { + createIndex(indexName, Settings.EMPTY, Map.of(xyPointFieldName, XYPointFieldMapper.CONTENT_TYPE)); + + final String firstDocument = buildDocumentWithWKT(xyPointFieldName, "POINT(-30 -30)"); + indexDocument(indexName, firstDocument); + + Rectangle rectangle = new Rectangle(-45, 45, 45, -45); + + ResponseException exception = expectThrows( + ResponseException.class, + () -> searchUsingShapeRelation(indexName, xyPointFieldName, rectangle, ShapeRelation.CONTAINS) + ); + assertTrue(exception.getMessage().contains("[CONTAINS] query relation not supported")); + + deleteIndex(indexName); + } +} diff --git a/src/test/java/org/opensearch/geospatial/index/query/xypoint/XYPointQueryProcessorTests.java b/src/test/java/org/opensearch/geospatial/index/query/xypoint/XYPointQueryProcessorTests.java new file mode 100644 index 00000000..c23c8d00 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/index/query/xypoint/XYPointQueryProcessorTests.java @@ -0,0 +1,209 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.index.query.xypoint; + +import static java.util.Collections.emptyMap; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.opensearch.geospatial.index.common.xyshape.ShapeObjectBuilder.randomCircle; +import static org.opensearch.geospatial.index.common.xyshape.ShapeObjectBuilder.randomLine; +import static org.opensearch.geospatial.index.common.xyshape.ShapeObjectBuilder.randomLinearRing; +import static org.opensearch.geospatial.index.common.xyshape.ShapeObjectBuilder.randomMultiLine; +import static org.opensearch.geospatial.index.common.xyshape.ShapeObjectBuilder.randomMultiPoint; +import static org.opensearch.geospatial.index.common.xyshape.ShapeObjectBuilder.randomMultiPolygon; +import static org.opensearch.geospatial.index.common.xyshape.ShapeObjectBuilder.randomPoint; +import static org.opensearch.geospatial.index.common.xyshape.ShapeObjectBuilder.randomPolygon; +import static org.opensearch.geospatial.index.common.xyshape.ShapeObjectBuilder.randomRectangle; + +import java.io.IOException; +import java.text.ParseException; +import java.util.Locale; + +import org.apache.lucene.search.MatchNoDocsQuery; +import org.apache.lucene.search.Query; +import org.opensearch.common.geo.ShapeRelation; +import org.opensearch.geometry.Circle; +import org.opensearch.geometry.Geometry; +import org.opensearch.geometry.GeometryCollection; +import org.opensearch.geometry.Line; +import org.opensearch.geometry.LinearRing; +import org.opensearch.geometry.MultiLine; +import org.opensearch.geometry.MultiPoint; +import org.opensearch.geometry.MultiPolygon; +import org.opensearch.geometry.Point; +import org.opensearch.geometry.Polygon; +import org.opensearch.geometry.Rectangle; +import org.opensearch.geospatial.GeospatialTestHelper; +import org.opensearch.geospatial.index.mapper.xypoint.XYPointFieldMapper; +import org.opensearch.index.mapper.GeoPointFieldMapper; +import org.opensearch.index.query.QueryShardContext; +import org.opensearch.index.query.QueryShardException; +import org.opensearch.test.OpenSearchTestCase; + +public class XYPointQueryProcessorTests extends OpenSearchTestCase { + private XYPointQueryProcessor queryProcessor; + private String fieldName; + private ShapeRelation relation; + private QueryShardContext context; + private static final boolean VALID_FIELD_TYPE = true; + private static final boolean INVALID_FIELD_TYPE = false; + private final static Integer MAX_NUMBER_OF_VERTICES = 10; + private final static Integer MIN_NUMBER_OF_VERTICES = 2; + private final static Integer MIN_NUMBER_OF_GEOMETRY_OBJECTS = 10; + + @Override + public void setUp() throws Exception { + super.setUp(); + context = mock(QueryShardContext.class); + fieldName = GeospatialTestHelper.randomLowerCaseString(); + relation = ShapeRelation.INTERSECTS; + queryProcessor = new XYPointQueryProcessor(); + } + + public void testInvalidRelation() { + mockFieldType(VALID_FIELD_TYPE); + QueryShardException exception = expectThrows( + QueryShardException.class, + () -> queryProcessor.shapeQuery(randomPolygon(), fieldName, ShapeRelation.CONTAINS, context) + ); + assertTrue(exception.getMessage().contains("[CONTAINS] query relation not supported")); + } + + public void testQueryingNullFieldName() { + assertThrows(NullPointerException.class, () -> queryProcessor.shapeQuery(randomPolygon(), null, relation, context)); + } + + public void testQueryingNullQueryContext() { + assertThrows(NullPointerException.class, () -> queryProcessor.shapeQuery(randomPolygon(), fieldName, relation, null)); + } + + public void testQueryingNullShapeRelation() { + mockFieldType(VALID_FIELD_TYPE); + QueryShardException exception = expectThrows( + QueryShardException.class, + () -> queryProcessor.shapeQuery(randomPolygon(), fieldName, null, context) + ); + assertTrue(exception.getMessage().contains("[null] query relation not supported")); + } + + public void testQueryingNullGeometry() { + mockFieldType(VALID_FIELD_TYPE); + assertThrows(NullPointerException.class, () -> queryProcessor.shapeQuery(null, fieldName, relation, context)); + } + + public void testQueryingEmptyGeometry() { + mockFieldType(VALID_FIELD_TYPE); + final Query query = queryProcessor.shapeQuery(new GeometryCollection<>(), fieldName, relation, context); + assertEquals("No match found query should be returned", new MatchNoDocsQuery(), query); + } + + public void testQueryingInvalidFieldTypeGeometry() { + mockFieldType(INVALID_FIELD_TYPE); + final QueryShardException exception = expectThrows( + QueryShardException.class, + () -> queryProcessor.shapeQuery(randomPolygon(), fieldName, relation, context) + ); + assertEquals( + "wrong exception message", + String.format( + Locale.ROOT, + "Expected [%s] field type for Field [%s] but found [geo_point]", + XYPointFieldMapper.CONTENT_TYPE, + fieldName + ), + exception.getMessage() + ); + } + + public void testQueryingLinearRing() { + mockFieldType(VALID_FIELD_TYPE); + LinearRing ring = randomLinearRing(randomIntBetween(MIN_NUMBER_OF_VERTICES, MAX_NUMBER_OF_VERTICES), randomBoolean()); + expectThrows(QueryShardException.class, () -> queryProcessor.shapeQuery(ring, fieldName, relation, context)); + } + + public void testQueryingMultiLine() { + mockFieldType(VALID_FIELD_TYPE); + int verticesLimit = randomIntBetween(MIN_NUMBER_OF_VERTICES, MAX_NUMBER_OF_VERTICES); + final int linesLimit = atLeast(MIN_NUMBER_OF_GEOMETRY_OBJECTS); + MultiLine multiLine = randomMultiLine(verticesLimit, linesLimit, randomBoolean()); + expectThrows(QueryShardException.class, () -> queryProcessor.shapeQuery(multiLine, fieldName, relation, context)); + } + + public void testQueryingMultiPoint() { + mockFieldType(VALID_FIELD_TYPE); + int pointLimit = atLeast(MIN_NUMBER_OF_GEOMETRY_OBJECTS); + MultiPoint multiPoint = randomMultiPoint(pointLimit, randomBoolean()); + expectThrows(QueryShardException.class, () -> queryProcessor.shapeQuery(multiPoint, fieldName, relation, context)); + } + + public void testQueryingPoint() { + mockFieldType(VALID_FIELD_TYPE); + Point point = randomPoint(randomBoolean()); + expectThrows(QueryShardException.class, () -> queryProcessor.shapeQuery(point, fieldName, relation, context)); + } + + public void testQueryingLine() { + mockFieldType(VALID_FIELD_TYPE); + int verticesLimit = randomIntBetween(MIN_NUMBER_OF_VERTICES, MAX_NUMBER_OF_VERTICES); + Line line = randomLine(verticesLimit, randomBoolean()); + expectThrows(QueryShardException.class, () -> queryProcessor.shapeQuery(line, fieldName, relation, context)); + } + + public void testQueryingCircle() { + mockFieldType(VALID_FIELD_TYPE); + Circle circle = randomCircle(randomBoolean()); + assertNotNull("failed to convert to Query", queryProcessor.shapeQuery(circle, fieldName, relation, context)); + } + + public void testQueryingRectangle() { + mockFieldType(VALID_FIELD_TYPE); + Rectangle rectangle = randomRectangle(); + assertNotNull("failed to convert to Query", queryProcessor.shapeQuery(rectangle, fieldName, relation, context)); + } + + public void testQueryingPolygon() throws IOException, ParseException { + mockFieldType(VALID_FIELD_TYPE); + Polygon geometry = randomPolygon(); + assertNotNull("failed to convert to Query", queryProcessor.shapeQuery(geometry, fieldName, relation, context)); + } + + public void testQueryingMultiPolygon() throws IOException, ParseException { + mockFieldType(VALID_FIELD_TYPE); + MultiPolygon geometry = randomMultiPolygon(); + assertNotNull("failed to convert to Query", queryProcessor.shapeQuery(geometry, fieldName, relation, context)); + } + + public void testQueryingEmptyGeometryCollection() { + mockFieldType(VALID_FIELD_TYPE); + GeometryCollection geometry = new GeometryCollection<>(); + final Query actualQuery = queryProcessor.shapeQuery(geometry, fieldName, relation, context); + assertNotNull("failed to convert to Query", actualQuery); + assertEquals("MatchNoDocs query should be returned", new MatchNoDocsQuery(), actualQuery); + } + + public void testGetVectorQueryFromShape() { + mockFieldType(VALID_FIELD_TYPE); + Circle circle = randomCircle(randomBoolean()); + assertNotNull("failed to convert to Query", queryProcessor.shapeQuery(circle, fieldName, relation, context)); + } + + private void mockFieldType(boolean success) { + if (success) { + when(context.fieldMapper(fieldName)).thenReturn( + new XYPointFieldMapper.XYPointFieldType( + fieldName, + randomBoolean(), + randomBoolean(), + randomBoolean(), + emptyMap(), + mock(XYPointQueryProcessor.class) + ) + ); + return; + } + when(context.fieldMapper(fieldName)).thenReturn(new GeoPointFieldMapper.GeoPointFieldType(fieldName)); + } +} diff --git a/src/test/java/org/opensearch/geospatial/index/query/xypoint/XYPointQueryVisitorTests.java b/src/test/java/org/opensearch/geospatial/index/query/xypoint/XYPointQueryVisitorTests.java new file mode 100644 index 00000000..191f9620 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/index/query/xypoint/XYPointQueryVisitorTests.java @@ -0,0 +1,184 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.index.query.xypoint; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.opensearch.geospatial.index.common.xyshape.ShapeObjectBuilder.randomCircle; +import static org.opensearch.geospatial.index.common.xyshape.ShapeObjectBuilder.randomGeometryCollection; +import static org.opensearch.geospatial.index.common.xyshape.ShapeObjectBuilder.randomLine; +import static org.opensearch.geospatial.index.common.xyshape.ShapeObjectBuilder.randomLinearRing; +import static org.opensearch.geospatial.index.common.xyshape.ShapeObjectBuilder.randomMultiLine; +import static org.opensearch.geospatial.index.common.xyshape.ShapeObjectBuilder.randomMultiPoint; +import static org.opensearch.geospatial.index.common.xyshape.ShapeObjectBuilder.randomMultiPolygon; +import static org.opensearch.geospatial.index.common.xyshape.ShapeObjectBuilder.randomPoint; +import static org.opensearch.geospatial.index.common.xyshape.ShapeObjectBuilder.randomPolygon; +import static org.opensearch.geospatial.index.common.xyshape.ShapeObjectBuilder.randomRectangle; + +import java.io.IOException; +import java.text.ParseException; +import java.util.List; +import java.util.Locale; + +import org.apache.lucene.search.MatchNoDocsQuery; +import org.apache.lucene.search.Query; +import org.opensearch.geometry.Circle; +import org.opensearch.geometry.GeometryCollection; +import org.opensearch.geometry.GeometryVisitor; +import org.opensearch.geometry.Line; +import org.opensearch.geometry.LinearRing; +import org.opensearch.geometry.MultiLine; +import org.opensearch.geometry.MultiPoint; +import org.opensearch.geometry.MultiPolygon; +import org.opensearch.geometry.Point; +import org.opensearch.geometry.Polygon; +import org.opensearch.geometry.Rectangle; +import org.opensearch.geometry.ShapeType; +import org.opensearch.geospatial.GeospatialTestHelper; +import org.opensearch.geospatial.index.mapper.xypoint.XYPointFieldMapper; +import org.opensearch.index.mapper.MappedFieldType; +import org.opensearch.index.query.QueryShardContext; +import org.opensearch.index.query.QueryShardException; +import org.opensearch.test.OpenSearchTestCase; + +public class XYPointQueryVisitorTests extends OpenSearchTestCase { + private final static Integer MAX_NUMBER_OF_VERTICES = 10; + private final static Integer MIN_NUMBER_OF_VERTICES = 2; + private final static Integer MIN_NUMBER_OF_GEOMETRY_OBJECTS = 10; + private GeometryVisitor queryVisitor; + private String fieldName; + + private MappedFieldType fieldType; + + @Override + public void setUp() throws Exception { + super.setUp(); + QueryShardContext context = mock(QueryShardContext.class); + fieldType = mock(XYPointFieldMapper.XYPointFieldType.class); + fieldName = GeospatialTestHelper.randomLowerCaseString(); + queryVisitor = new XYPointQueryVisitor(fieldName, fieldType, context); + } + + public void testQueryingLinearRing() { + LinearRing ring = randomLinearRing(randomIntBetween(MIN_NUMBER_OF_VERTICES, MAX_NUMBER_OF_VERTICES), randomBoolean()); + QueryShardException exception = expectThrows(QueryShardException.class, () -> ring.visit(queryVisitor)); + assertEquals( + String.format(Locale.ROOT, "Field [%s] found an unsupported shape [%s]", fieldName, ShapeType.LINEARRING.name()), + exception.getMessage() + ); + } + + public void testQueryingMultiLine() { + int verticesLimit = randomIntBetween(MIN_NUMBER_OF_VERTICES, MAX_NUMBER_OF_VERTICES); + final int linesLimit = atLeast(MIN_NUMBER_OF_GEOMETRY_OBJECTS); + MultiLine multiLine = randomMultiLine(verticesLimit, linesLimit, randomBoolean()); + QueryShardException exception = expectThrows(QueryShardException.class, () -> multiLine.visit(queryVisitor)); + assertEquals( + String.format(Locale.ROOT, "Field [%s] found an unsupported shape [%s]", fieldName, ShapeType.MULTILINESTRING.name()), + exception.getMessage() + ); + } + + public void testQueryingMultiPoint() { + int pointLimit = atLeast(MIN_NUMBER_OF_GEOMETRY_OBJECTS); + MultiPoint multiPoint = randomMultiPoint(pointLimit, randomBoolean()); + QueryShardException exception = expectThrows(QueryShardException.class, () -> multiPoint.visit(queryVisitor)); + assertEquals( + String.format(Locale.ROOT, "Field [%s] found an unsupported shape [%s]", fieldName, ShapeType.MULTIPOINT.name()), + exception.getMessage() + ); + } + + public void testQueryingPoint() { + Point point = randomPoint(randomBoolean()); + QueryShardException exception = expectThrows(QueryShardException.class, () -> point.visit(queryVisitor)); + assertEquals( + String.format(Locale.ROOT, "Field [%s] found an unsupported shape [%s]", fieldName, ShapeType.POINT.name()), + exception.getMessage() + ); + } + + public void testQueryingLine() { + int verticesLimit = randomIntBetween(MIN_NUMBER_OF_VERTICES, MAX_NUMBER_OF_VERTICES); + Line line = randomLine(verticesLimit, randomBoolean()); + QueryShardException exception = expectThrows(QueryShardException.class, () -> line.visit(queryVisitor)); + assertEquals( + String.format(Locale.ROOT, "Field [%s] found an unsupported shape [%s]", fieldName, ShapeType.LINESTRING.name()), + exception.getMessage() + ); + } + + public void testQueryingCircleAsNull() { + NullPointerException nullPointerException = expectThrows(NullPointerException.class, () -> queryVisitor.visit((Circle) null)); + assertEquals("Circle cannot be null", nullPointerException.getMessage()); + } + + public void testQueryingRectangleAsNull() { + NullPointerException nullPointerException = expectThrows(NullPointerException.class, () -> queryVisitor.visit((Rectangle) null)); + assertEquals("Rectangle cannot be null", nullPointerException.getMessage()); + } + + public void testQueryingPolygonAsNull() { + NullPointerException nullPointerException = expectThrows(NullPointerException.class, () -> queryVisitor.visit((Polygon) null)); + assertEquals("Polygon cannot be null", nullPointerException.getMessage()); + } + + public void testQueryingMultiPolygonAsNull() { + NullPointerException nullPointerException = expectThrows(NullPointerException.class, () -> queryVisitor.visit((MultiPolygon) null)); + assertEquals("Multi Polygon cannot be null", nullPointerException.getMessage()); + } + + public void testQueryingCircle() { + Circle circle = randomCircle(randomBoolean()); + when(fieldType.hasDocValues()).thenReturn(randomBoolean()); + Query query = circle.visit(queryVisitor); + assertNotNull("failed to convert to Query", query); + } + + public void testQueryingRectangle() { + Rectangle rectangle = randomRectangle(); + when(fieldType.hasDocValues()).thenReturn(randomBoolean()); + Query query = rectangle.visit(queryVisitor); + assertNotNull("failed to convert to Query", query); + } + + public void testQueryingPolygon() throws IOException, ParseException { + Polygon polygon = randomPolygon(); + when(fieldType.hasDocValues()).thenReturn(randomBoolean()); + Query query = polygon.visit(queryVisitor); + assertNotNull("failed to convert to Query", query); + } + + public void testQueryingMultiPolygon() throws IOException, ParseException { + MultiPolygon multiPolygon = randomMultiPolygon(); + when(fieldType.hasDocValues()).thenReturn(randomBoolean()); + Query query = multiPolygon.visit(queryVisitor); + assertNotNull("failed to convert to Query", query); + } + + public void testQueryingEmptyGeometryCollection() { + GeometryCollection collection = new GeometryCollection<>(); + Query query = collection.visit(queryVisitor); + assertNotNull("failed to convert to Query", query); + assertEquals("MatchNoDocs query should be returned", new MatchNoDocsQuery(), query); + } + + public void testQueryingUnsupportedGeometryCollection() { + GeometryCollection collection = randomGeometryCollection(MIN_NUMBER_OF_GEOMETRY_OBJECTS, randomBoolean()); + QueryShardException exception = expectThrows(QueryShardException.class, () -> collection.visit(queryVisitor)); + assertTrue( + "Validation failed for unsupported geometries", + exception.getMessage().contains(String.format(Locale.ROOT, "Field [%s] found an unsupported shape", fieldName)) + ); + } + + public void testQueryingGeometryCollection() throws IOException, ParseException { + GeometryCollection collection = new GeometryCollection<>(List.of(randomPolygon(), randomRectangle())); + when(fieldType.hasDocValues()).thenReturn(randomBoolean()); + Query query = collection.visit(queryVisitor); + assertNotNull("failed to convert to Query", query); + } +} diff --git a/src/test/java/org/opensearch/geospatial/index/query/xyshape/XYShapeQueryBuilderTests.java b/src/test/java/org/opensearch/geospatial/index/query/xyshape/XYShapeQueryBuilderTests.java index db3dfe49..4da130fc 100644 --- a/src/test/java/org/opensearch/geospatial/index/query/xyshape/XYShapeQueryBuilderTests.java +++ b/src/test/java/org/opensearch/geospatial/index/query/xyshape/XYShapeQueryBuilderTests.java @@ -27,15 +27,14 @@ import org.opensearch.action.admin.indices.mapping.put.PutMappingRequest; import org.opensearch.action.get.GetRequest; import org.opensearch.action.get.GetResponse; -import org.opensearch.common.Strings; -import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.compress.CompressedXContent; import org.opensearch.common.geo.GeoJson; import org.opensearch.common.geo.ShapeRelation; import org.opensearch.common.io.stream.BytesStreamOutput; -import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.common.xcontent.XContentParser; +import org.opensearch.core.common.bytes.BytesArray; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; import org.opensearch.geometry.Geometry; import org.opensearch.geometry.ShapeType; import org.opensearch.geospatial.index.common.xyshape.ShapeObjectBuilder; @@ -92,12 +91,10 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws mapperService.merge( DOC_TYPE, new CompressedXContent( - Strings.toString( - PutMappingRequest.simpleMapping( - XY_SHAPE_FIELD_NAME, - String.format(Locale.getDefault(), "%s=%s", MAPPING_FIELD_TYPE_KEY, XYShapeQueryBuilder.NAME) - ) - ) + PutMappingRequest.simpleMapping( + XY_SHAPE_FIELD_NAME, + String.format(Locale.ROOT, "%s=%s", MAPPING_FIELD_TYPE_KEY, XYShapeQueryBuilder.NAME) + ).toString() ), MapperService.MergeReason.MAPPING_UPDATE ); @@ -120,7 +117,7 @@ protected GetResponse executeGet(GetRequest getRequest) { builder.field(expectedShapePath, (contentBuilder, params) -> GeoJson.toXContent(indexedShapeToReturn, contentBuilder, params)); builder.field(randomAlphaOfLengthBetween(10, 20), randomLowerCaseString()); builder.endObject(); - json = Strings.toString(builder); + json = builder.toString(); } catch (IOException ex) { throw new OpenSearchException(ex); } @@ -241,7 +238,7 @@ private XYShapeQueryBuilder createQueryBuilderFromQueryShape() { Geometry geometry = ShapeObjectBuilder.randomGeometryWithXYCoordinates(); XYShapeQueryBuilder builder = new XYShapeQueryBuilder(XY_SHAPE_FIELD_NAME, geometry); builder.ignoreUnmapped(randomBoolean()); - return setRelationBasedonType(geometry.type(), builder); + return setRelationBasedOnType(geometry.type(), builder); } private XYShapeQueryBuilder createQueryBuilderFromIndexedShape() { @@ -262,10 +259,10 @@ private XYShapeQueryBuilder createQueryBuilderFromIndexedShape() { builder.indexedShapeRouting(indexedShapeRouting); } builder.ignoreUnmapped(randomBoolean()); - return setRelationBasedonType(indexedShapeToReturn.type(), builder); + return setRelationBasedOnType(indexedShapeToReturn.type(), builder); } - private XYShapeQueryBuilder setRelationBasedonType(ShapeType shapeType, XYShapeQueryBuilder builder) { + private XYShapeQueryBuilder setRelationBasedOnType(ShapeType shapeType, XYShapeQueryBuilder builder) { if (shapeType == ShapeType.LINESTRING || shapeType == ShapeType.MULTILINESTRING) { return builder.relation(randomFrom(ShapeRelation.DISJOINT, ShapeRelation.INTERSECTS, ShapeRelation.CONTAINS)); } diff --git a/src/test/java/org/opensearch/geospatial/index/query/xyshape/XYShapeQueryIT.java b/src/test/java/org/opensearch/geospatial/index/query/xyshape/XYShapeQueryIT.java index d3cbfdcb..7d126707 100644 --- a/src/test/java/org/opensearch/geospatial/index/query/xyshape/XYShapeQueryIT.java +++ b/src/test/java/org/opensearch/geospatial/index/query/xyshape/XYShapeQueryIT.java @@ -6,38 +6,24 @@ package org.opensearch.geospatial.index.query.xyshape; import static org.hamcrest.Matchers.equalTo; -import static org.opensearch.common.xcontent.ToXContent.EMPTY_PARAMS; +import static org.opensearch.core.xcontent.ToXContent.EMPTY_PARAMS; import static org.opensearch.geospatial.GeospatialTestHelper.randomLowerCaseString; import static org.opensearch.index.query.AbstractGeometryQueryBuilder.DEFAULT_SHAPE_FIELD_NAME; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse; -import java.util.ArrayList; -import java.util.List; import java.util.Map; import org.hamcrest.MatcherAssert; -import org.hamcrest.Matchers; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.geo.GeoJson; -import org.opensearch.common.geo.ShapeRelation; import org.opensearch.common.settings.Settings; -import org.opensearch.geometry.Circle; -import org.opensearch.geometry.LinearRing; -import org.opensearch.geometry.MultiPolygon; import org.opensearch.geometry.Point; -import org.opensearch.geometry.Polygon; import org.opensearch.geometry.Rectangle; -import org.opensearch.geospatial.GeospatialRestTestCase; import org.opensearch.geospatial.index.mapper.xyshape.XYShapeFieldMapper; -import org.opensearch.search.SearchHit; +import org.opensearch.geospatial.index.query.AbstractXYShapeQueryTestCase; -public class XYShapeQueryIT extends GeospatialRestTestCase { - - private static final String INDEXED_SHAPE_FIELD = "indexed_shape"; - private static final String SHAPE_INDEX_FIELD = "index"; - private static final String SHAPE_ID_FIELD = "id"; - private static final String SHAPE_INDEX_PATH_FIELD = "path"; +public class XYShapeQueryIT extends AbstractXYShapeQueryTestCase { private String indexName; private String xyShapeFieldName; @@ -48,225 +34,38 @@ public void setUp() throws Exception { xyShapeFieldName = randomLowerCaseString(); } - public void testNullShape() throws Exception { - createIndex(indexName, Settings.EMPTY, Map.of(xyShapeFieldName, XYShapeFieldMapper.CONTENT_TYPE)); - String body = buildContentAsString(builder -> builder.field(xyShapeFieldName, (String) null)); - String docID = indexDocument(indexName, body); - - final Map document = getDocument(docID, indexName); - assertTrue("failed to index document with type", document.containsKey(xyShapeFieldName)); - assertNull("failed to accept null value", document.get(xyShapeFieldName)); + @Override + public String getIndexName() { + return indexName; + } - deleteIndex(indexName); + @Override + public String getFieldName() { + return xyShapeFieldName; + } + @Override + public String getContentType() { + return XYShapeFieldMapper.CONTENT_TYPE; } public void testIndexPointsFilterRectangleWithDefaultRelation() throws Exception { createIndex(indexName, Settings.EMPTY, Map.of(xyShapeFieldName, XYShapeFieldMapper.CONTENT_TYPE)); // Will index two points and search with envelope that will intersect only one point - final String firstDocument = buildDocumentWithWKT(xyShapeFieldName, "POINT(-30 -30)"); - final String firstDocumentID = indexDocument(indexName, firstDocument); - - Point point = new Point(-45, -50); - final String secondDocument = buildDocumentWithGeoJSON(xyShapeFieldName, point); - indexDocument(indexName, secondDocument); + final String firstDocumentID = indexDocumentUsingWKT(indexName, xyShapeFieldName, "POINT(-30 -30)"); + indexDocumentUsingGeoJSON(indexName, xyShapeFieldName, new Point(-45, -50)); Rectangle rectangle = new Rectangle(-45, 45, 45, -45); - String searchEntity = buildSearchBodyAsString(builder -> { + String searchEntity = buildSearchQueryBodyAsString(builder -> { builder.field(DEFAULT_SHAPE_FIELD_NAME); GeoJson.toXContent(rectangle, builder, EMPTY_PARAMS); }, XYShapeQueryBuilder.NAME, xyShapeFieldName); - final SearchResponse searchResponse = searchIndex(indexName, searchEntity); - assertSearchResponse(searchResponse); - assertHitCount(searchResponse, 1); - MatcherAssert.assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(firstDocumentID)); - - deleteIndex(indexName); - } - - public void testIndexPointsFilterRectangleWithIntersectsRelation() throws Exception { - createIndex(indexName, Settings.EMPTY, Map.of(xyShapeFieldName, XYShapeFieldMapper.CONTENT_TYPE)); - - final String firstDocument = buildDocumentWithWKT(xyShapeFieldName, "POINT(-30 -30)"); - final String firstDocumentID = indexDocument(indexName, firstDocument); - - final String secondDocument = buildDocumentWithWKT(xyShapeFieldName, "POINT(-45 -50)"); - indexDocument(indexName, secondDocument); - - Rectangle rectangle = new Rectangle(-45, 45, 45, -45); - String searchEntity = buildSearchBodyAsString(builder -> { - builder.field(DEFAULT_SHAPE_FIELD_NAME); - GeoJson.toXContent(rectangle, builder, EMPTY_PARAMS); - builder.field("relation", ShapeRelation.INTERSECTS.getRelationName()); - }, XYShapeQueryBuilder.NAME, xyShapeFieldName); - - final SearchResponse searchResponse = searchIndex(indexName, searchEntity); - assertSearchResponse(searchResponse); - assertHitCount(searchResponse, 1); - MatcherAssert.assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(firstDocumentID)); - - deleteIndex(indexName); - } - - public void testIndexPointsCircle() throws Exception { - createIndex(indexName, Settings.EMPTY, Map.of(xyShapeFieldName, XYShapeFieldMapper.CONTENT_TYPE)); - - final String firstDocument = buildDocumentWithWKT(xyShapeFieldName, "POINT(-131 -30)"); - indexDocument(indexName, firstDocument); - - final String secondDocument = buildDocumentWithWKT(xyShapeFieldName, "POINT(-45 -50)"); - final String secondDocumentID = indexDocument(indexName, secondDocument); - - Circle circle = new Circle(-30, -30, 100); - String searchEntity = buildSearchBodyAsString(builder -> { - builder.field(DEFAULT_SHAPE_FIELD_NAME); - GeoJson.toXContent(circle, builder, EMPTY_PARAMS); - builder.field("relation", ShapeRelation.INTERSECTS.getRelationName()); - }, XYShapeQueryBuilder.NAME, xyShapeFieldName); - - final SearchResponse searchResponse = searchIndex(indexName, searchEntity); - assertSearchResponse(searchResponse); - assertHitCount(searchResponse, 1); - MatcherAssert.assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(secondDocumentID)); - - deleteIndex(indexName); - } - - public void testIndexPointsPolygon() throws Exception { - createIndex(indexName, Settings.EMPTY, Map.of(xyShapeFieldName, XYShapeFieldMapper.CONTENT_TYPE)); - - final String firstDocument = buildDocumentWithWKT(xyShapeFieldName, "POINT(-30 -30)"); - final String firstDocumentID = indexDocument(indexName, firstDocument); - - final String secondDocument = buildDocumentWithWKT(xyShapeFieldName, "POINT(-45 -50)"); - indexDocument(indexName, secondDocument); - - double[] x = new double[] { -35, -35, -25, -25, -35 }; - double[] y = new double[] { -35, -25, -25, -35, -35 }; - LinearRing ring = new LinearRing(x, y); - Polygon polygon = new Polygon(ring); - String searchEntity = buildSearchBodyAsString(builder -> { - builder.field(DEFAULT_SHAPE_FIELD_NAME); - GeoJson.toXContent(polygon, builder, EMPTY_PARAMS); - builder.field("relation", ShapeRelation.INTERSECTS.getRelationName()); - }, XYShapeQueryBuilder.NAME, xyShapeFieldName); - - final SearchResponse searchResponse = searchIndex(indexName, searchEntity); + final SearchResponse searchResponse = searchIndex(indexName, searchEntity, false); assertSearchResponse(searchResponse); assertHitCount(searchResponse, 1); MatcherAssert.assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(firstDocumentID)); deleteIndex(indexName); } - - public void testIndexPointsMultiPolygon() throws Exception { - - createIndex(indexName, Settings.EMPTY, Map.of(xyShapeFieldName, XYShapeFieldMapper.CONTENT_TYPE)); - - final String firstDocument = buildDocumentWithWKT(xyShapeFieldName, "POINT(-30 -30)"); - final String firstDocumentID = indexDocument(indexName, firstDocument); - - final String secondDocument = buildDocumentWithWKT(xyShapeFieldName, "POINT(-40 -40)"); - indexDocument(indexName, secondDocument); - - final String thirdDocument = buildDocumentWithGeoJSON(xyShapeFieldName, new Point(-50, -50)); - final String thirdDocumentId = indexDocument(indexName, thirdDocument); - - LinearRing ring1 = new LinearRing(new double[] { -35, -35, -25, -25, -35 }, new double[] { -35, -25, -25, -35, -35 }); - Polygon polygon1 = new Polygon(ring1); - - LinearRing ring2 = new LinearRing(new double[] { -55, -55, -45, -45, -55 }, new double[] { -55, -45, -45, -55, -55 }); - Polygon polygon2 = new Polygon(ring2); - - MultiPolygon multiPolygon = new MultiPolygon(List.of(polygon1, polygon2)); - String searchEntity = buildSearchBodyAsString(builder -> { - builder.field(DEFAULT_SHAPE_FIELD_NAME); - GeoJson.toXContent(multiPolygon, builder, EMPTY_PARAMS); - builder.field("relation", ShapeRelation.INTERSECTS.getRelationName()); - }, XYShapeQueryBuilder.NAME, xyShapeFieldName); - List expectedDocIDs = List.of(firstDocumentID, thirdDocumentId); - final SearchResponse searchResponse = searchIndex(indexName, searchEntity); - assertSearchResponse(searchResponse); - assertHitCount(searchResponse, expectedDocIDs.size()); - List actualDocIDS = new ArrayList<>(); - for (SearchHit hit : searchResponse.getHits().getHits()) { - actualDocIDS.add(hit.getId()); - } - MatcherAssert.assertThat(expectedDocIDs, Matchers.containsInAnyOrder(actualDocIDS.toArray())); - } - - public void testIndexPointsIndexedRectangleMatches() throws Exception { - - createIndex(indexName, Settings.EMPTY, Map.of(xyShapeFieldName, XYShapeFieldMapper.CONTENT_TYPE)); - // Will index two points and search with envelope that will intersect only one point - final String firstDocument = buildDocumentWithWKT(xyShapeFieldName, "POINT(-30 -30)"); - indexDocument(indexName, firstDocument); - - Point point = new Point(-45, -50); - final String secondDocument = buildDocumentWithGeoJSON(xyShapeFieldName, point); - final String secondDocumentID = indexDocument(indexName, secondDocument); - - // create an index to insert shape - String indexedShapeIndex = randomLowerCaseString(); - String indexedShapePath = randomLowerCaseString(); - createIndex(indexedShapeIndex, Settings.EMPTY, Map.of(indexedShapePath, XYShapeFieldMapper.CONTENT_TYPE)); - - final String indexedRefDoc1 = buildDocumentWithWKT(indexedShapePath, "BBOX(-50, -40, -45, -55)"); - final String shape = indexDocument(indexedShapeIndex, indexedRefDoc1); - - String searchEntity = buildSearchBodyAsString(builder -> { - builder.startObject(INDEXED_SHAPE_FIELD); - builder.field(SHAPE_INDEX_FIELD, indexedShapeIndex); - builder.field(SHAPE_ID_FIELD, shape); - builder.field(SHAPE_INDEX_PATH_FIELD, indexedShapePath); - builder.endObject(); - }, XYShapeQueryBuilder.NAME, xyShapeFieldName); - - final SearchResponse searchResponse = searchIndex(indexName, searchEntity); - - assertSearchResponse(searchResponse); - assertHitCount(searchResponse, 1); - MatcherAssert.assertThat(searchResponse.getHits().getAt(0).getId(), equalTo(secondDocumentID)); - - deleteIndex(indexName); - deleteIndex(indexedShapeIndex); - } - - public void testIndexPointsIndexedRectangleNoMatch() throws Exception { - - createIndex(indexName, Settings.EMPTY, Map.of(xyShapeFieldName, XYShapeFieldMapper.CONTENT_TYPE)); - // Will index two points and search with envelope that will intersect only one point - final String firstDocument = buildDocumentWithWKT(xyShapeFieldName, "POINT(-30 -30)"); - indexDocument(indexName, firstDocument); - - Point point = new Point(-45, -50); - final String secondDocument = buildDocumentWithGeoJSON(xyShapeFieldName, point); - indexDocument(indexName, secondDocument); - - // create an index to insert shape - String indexedShapeIndex = randomLowerCaseString(); - String indexedShapePath = randomLowerCaseString(); - createIndex(indexedShapeIndex, Settings.EMPTY, Map.of(indexedShapePath, XYShapeFieldMapper.CONTENT_TYPE)); - - final String indexedRefDoc2 = buildDocumentWithWKT(indexedShapePath, "BBOX(-60, -50, -50, -60)"); - final String shape = indexDocument(indexedShapeIndex, indexedRefDoc2); - - String searchEntity = buildSearchBodyAsString(builder -> { - builder.startObject(INDEXED_SHAPE_FIELD); - builder.field(SHAPE_INDEX_FIELD, indexedShapeIndex); - builder.field(SHAPE_ID_FIELD, shape); - builder.field(SHAPE_INDEX_PATH_FIELD, indexedShapePath); - builder.endObject(); - }, XYShapeQueryBuilder.NAME, xyShapeFieldName); - - final SearchResponse searchResponse = searchIndex(indexName, searchEntity); - - assertSearchResponse(searchResponse); - assertHitCount(searchResponse, 0); - - deleteIndex(indexName); - deleteIndex(indexedShapeIndex); - } - } diff --git a/src/test/java/org/opensearch/geospatial/index/query/xyshape/XYShapeQueryVisitorTests.java b/src/test/java/org/opensearch/geospatial/index/query/xyshape/XYShapeQueryVisitorTests.java index 541b01a1..806f1827 100644 --- a/src/test/java/org/opensearch/geospatial/index/query/xyshape/XYShapeQueryVisitorTests.java +++ b/src/test/java/org/opensearch/geospatial/index/query/xyshape/XYShapeQueryVisitorTests.java @@ -65,7 +65,7 @@ public void testQueryingCircle() { Circle circle = randomCircle(randomBoolean()); final List geometries = queryVisitor.visit(circle); assertNotNull("failed to convert to XYCircle", geometries); - assertEquals("Unexpected number of geomteries found", SIZE, geometries.size()); + assertEquals("Unexpected number of geometries found", SIZE, geometries.size()); assertTrue("invalid object found", geometries.get(FIRST_GEOMETRY) instanceof XYCircle); } @@ -80,7 +80,7 @@ public void testQueryingLine() { Line geometry = randomLine(verticesLimit, randomBoolean()); final List geometries = queryVisitor.visit(geometry); assertNotNull("Query geometries cannot be null", geometries); - assertEquals("Unexpected number of geomteries found", SIZE, geometries.size()); + assertEquals("Unexpected number of geometries found", SIZE, geometries.size()); assertTrue("invalid object found ", geometries.get(FIRST_GEOMETRY) instanceof XYLine); } @@ -90,7 +90,7 @@ public void testQueryingMultiLine() { MultiLine multiLine = randomMultiLine(verticesLimit, linesLimit, randomBoolean()); final List geometries = queryVisitor.visit(multiLine); assertNotNull("Query geometries cannot be null", geometries); - assertEquals("Unexpected number of geomteries found", geometries.size(), multiLine.size()); + assertEquals("Unexpected number of geometries found", geometries.size(), multiLine.size()); for (XYGeometry geometry : geometries) { assertTrue("invalid object found", geometry instanceof XYLine); } @@ -100,7 +100,7 @@ public void testQueryingPoint() { Point geometry = randomPoint(randomBoolean()); final List geometries = queryVisitor.visit(geometry); assertNotNull("Query geometries cannot be null", geometries); - assertEquals("Unexpected number of geomteries found", SIZE, geometries.size()); + assertEquals("Unexpected number of geometries found", SIZE, geometries.size()); assertTrue("invalid object found", geometries.get(FIRST_GEOMETRY) instanceof XYPoint); } @@ -110,7 +110,7 @@ public void testQueryingMultiPoint() { MultiPoint multiPoint = randomMultiPoint(pointLimit, randomBoolean()); final List geometries = queryVisitor.visit(multiPoint); assertNotNull("Query geometries cannot be null", geometries); - assertEquals("Unexpected number of geomteries found", geometries.size(), multiPoint.size()); + assertEquals("Unexpected number of geometries found", geometries.size(), multiPoint.size()); for (XYGeometry geometry : geometries) { assertTrue("invalid object found", geometry instanceof XYPoint); } @@ -120,7 +120,7 @@ public void testQueryingPolygon() throws IOException, ParseException { Polygon geometry = randomPolygon(); final List geometries = queryVisitor.visit(geometry); assertNotNull("Query geometries cannot be null", geometries); - assertEquals("Unexpected number of geomteries found", SIZE, geometries.size()); + assertEquals("Unexpected number of geometries found", SIZE, geometries.size()); assertTrue("invalid object found", geometries.get(FIRST_GEOMETRY) instanceof XYPolygon); } @@ -128,7 +128,7 @@ public void testQueryingMultiPolygon() throws IOException, ParseException { MultiPolygon multiPolygon = randomMultiPolygon(); final List geometries = queryVisitor.visit(multiPolygon); assertNotNull("Query geometries cannot be null", geometries); - assertEquals("Unexpected number of geomteries found", geometries.size(), multiPolygon.size()); + assertEquals("Unexpected number of geometries found", geometries.size(), multiPolygon.size()); for (XYGeometry geometry : geometries) { assertTrue("invalid object found", geometry instanceof XYPolygon); } @@ -145,7 +145,7 @@ public void testQueryingRectangle() { Rectangle geometry = randomRectangle(); final List geometries = queryVisitor.visit(geometry); assertNotNull("Query geometries cannot be null", geometries); - assertEquals("Unexpected number of geomteries found", SIZE, geometries.size()); + assertEquals("Unexpected number of geometries found", SIZE, geometries.size()); assertTrue("invalid object found", geometries.get(FIRST_GEOMETRY) instanceof XYRectangle); } } diff --git a/src/test/java/org/opensearch/geospatial/ip2geo/Ip2GeoDataServer.java b/src/test/java/org/opensearch/geospatial/ip2geo/Ip2GeoDataServer.java new file mode 100644 index 00000000..2058ff75 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ip2geo/Ip2GeoDataServer.java @@ -0,0 +1,127 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo; + +import java.io.IOException; +import java.io.OutputStream; +import java.net.InetSocketAddress; +import java.nio.file.Files; +import java.nio.file.Paths; + +import org.opensearch.common.SuppressForbidden; + +import com.sun.net.httpserver.HttpExchange; +import com.sun.net.httpserver.HttpHandler; +import com.sun.net.httpserver.HttpServer; + +import lombok.SneakyThrows; +import lombok.extern.log4j.Log4j2; + +/** + * Simple http server to serve static files under test/java/resources/ip2geo/server for integration testing + */ +@Log4j2 +@SuppressForbidden(reason = "used only for testing") +public class Ip2GeoDataServer { + private static final String SYS_PROPERTY_KEY_CLUSTER_ENDPOINT = "tests.rest.cluster"; + private static final String LOCAL_CLUSTER_ENDPOINT = "127.0.0.1"; + private static final String ROOT = "ip2geo/server"; + private static final int PORT = 8001; + private static final String EXTERNAL_ENDPOINT_PREFIX = + "https://raw.githubusercontent.com/opensearch-project/geospatial/main/src/test/resources/ip2geo/server"; + + private static HttpServer server; + private static volatile int counter = 0; + private static String endpointPrefix = "http://localhost:" + PORT; + private static String cityFilePath = endpointPrefix + "/city/manifest_local.json"; + private static String countryFilePath = endpointPrefix + "/country/manifest_local.json"; + + /** + * Return an endpoint to a manifest file for a sample city data + * The sample data should contain three lines as follows + * + * cidr,city,country + * 10.0.0.0/8,Seattle,USA + * 127.0.0.0/12,Vancouver,Canada + * fd12:2345:6789:1::/64,Bengaluru,India + * + */ + public static String getEndpointCity() { + return cityFilePath; + } + + /** + * Return an endpoint to a manifest file for a sample country data + * The sample data should contain three lines as follows + * + * cidr,country + * 10.0.0.0/8,USA + * 127.0.0.0/12,Canada + * fd12:2345:6789:1::/64,India + * + */ + public static String getEndpointCountry() { + return countryFilePath; + } + + @SneakyThrows + synchronized public static void start() { + log.info("Start server is called"); + // If it is remote cluster test, use external endpoint and do not launch local server + if (System.getProperty(SYS_PROPERTY_KEY_CLUSTER_ENDPOINT).contains(LOCAL_CLUSTER_ENDPOINT) == false) { + log.info("Remote cluster[{}] testing. Skip launching local server", System.getProperty(SYS_PROPERTY_KEY_CLUSTER_ENDPOINT)); + cityFilePath = EXTERNAL_ENDPOINT_PREFIX + "/city/manifest.json"; + countryFilePath = EXTERNAL_ENDPOINT_PREFIX + "/country/manifest.json"; + return; + } + + counter++; + if (server != null) { + log.info("Server has started already"); + return; + } + server = HttpServer.create(new InetSocketAddress("localhost", PORT), 0); + server.createContext("/", new Ip2GeoHttpHandler()); + server.start(); + log.info("Local file server started on port {}", PORT); + } + + synchronized public static void stop() { + log.info("Stop server is called"); + if (server == null) { + log.info("Server has stopped already"); + return; + } + counter--; + if (counter > 0) { + log.info("[{}] processors are still using the server", counter); + return; + } + + server.stop(0); + server = null; + log.info("Server stopped"); + } + + @SuppressForbidden(reason = "used only for testing") + private static class Ip2GeoHttpHandler implements HttpHandler { + @Override + public void handle(final HttpExchange exchange) throws IOException { + try { + byte[] data = Files.readAllBytes( + Paths.get(this.getClass().getClassLoader().getResource(ROOT + exchange.getRequestURI().getPath()).toURI()) + ); + exchange.sendResponseHeaders(200, data.length); + OutputStream outputStream = exchange.getResponseBody(); + outputStream.write(data); + outputStream.flush(); + outputStream.close(); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + } +} diff --git a/src/test/java/org/opensearch/geospatial/ip2geo/Ip2GeoTestCase.java b/src/test/java/org/opensearch/geospatial/ip2geo/Ip2GeoTestCase.java new file mode 100644 index 00000000..5d7b8e1c --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ip2geo/Ip2GeoTestCase.java @@ -0,0 +1,361 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo; + +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.when; + +import java.io.File; +import java.net.URL; +import java.nio.file.Paths; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.Locale; +import java.util.Set; +import java.util.UUID; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BiFunction; +import java.util.stream.Collectors; + +import org.junit.After; +import org.junit.Before; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.opensearch.action.ActionRequest; +import org.opensearch.action.ActionType; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.cluster.routing.RoutingTable; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.Randomness; +import org.opensearch.common.SuppressForbidden; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.concurrent.OpenSearchExecutors; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.action.ActionResponse; +import org.opensearch.geospatial.GeospatialTestHelper; +import org.opensearch.geospatial.ip2geo.common.DatasourceState; +import org.opensearch.geospatial.ip2geo.common.Ip2GeoExecutor; +import org.opensearch.geospatial.ip2geo.common.Ip2GeoLockService; +import org.opensearch.geospatial.ip2geo.common.Ip2GeoSettings; +import org.opensearch.geospatial.ip2geo.common.URLDenyListChecker; +import org.opensearch.geospatial.ip2geo.dao.DatasourceDao; +import org.opensearch.geospatial.ip2geo.dao.GeoIpDataDao; +import org.opensearch.geospatial.ip2geo.dao.Ip2GeoCachedDao; +import org.opensearch.geospatial.ip2geo.dao.Ip2GeoProcessorDao; +import org.opensearch.geospatial.ip2geo.jobscheduler.Datasource; +import org.opensearch.geospatial.ip2geo.jobscheduler.DatasourceTask; +import org.opensearch.geospatial.ip2geo.jobscheduler.DatasourceUpdateService; +import org.opensearch.geospatial.ip2geo.processor.Ip2GeoProcessor; +import org.opensearch.ingest.IngestMetadata; +import org.opensearch.ingest.IngestService; +import org.opensearch.jobscheduler.spi.LockModel; +import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; +import org.opensearch.jobscheduler.spi.utils.LockService; +import org.opensearch.tasks.Task; +import org.opensearch.tasks.TaskListener; +import org.opensearch.test.client.NoOpNodeClient; +import org.opensearch.test.rest.RestActionTestCase; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.TransportService; + +import lombok.SneakyThrows; + +public abstract class Ip2GeoTestCase extends RestActionTestCase { + @Mock + protected ClusterService clusterService; + @Mock + protected DatasourceUpdateService datasourceUpdateService; + @Mock + protected DatasourceDao datasourceDao; + @Mock + protected Ip2GeoExecutor ip2GeoExecutor; + @Mock + protected GeoIpDataDao geoIpDataDao; + @Mock + protected Ip2GeoCachedDao ip2GeoCachedDao; + @Mock + protected ClusterState clusterState; + @Mock + protected Metadata metadata; + @Mock + protected IngestService ingestService; + @Mock + protected ActionFilters actionFilters; + @Mock + protected ThreadPool threadPool; + @Mock + protected TransportService transportService; + @Mock + protected Ip2GeoLockService ip2GeoLockService; + @Mock + protected Ip2GeoProcessorDao ip2GeoProcessorDao; + @Mock + protected RoutingTable routingTable; + @Mock + protected URLDenyListChecker urlDenyListChecker; + protected IngestMetadata ingestMetadata; + protected NoOpNodeClient client; + protected VerifyingClient verifyingClient; + protected LockService lockService; + protected ClusterSettings clusterSettings; + protected Settings settings; + private AutoCloseable openMocks; + + @Before + public void prepareIp2GeoTestCase() { + openMocks = MockitoAnnotations.openMocks(this); + settings = Settings.EMPTY; + client = new NoOpNodeClient(this.getTestName()); + verifyingClient = spy(new VerifyingClient(this.getTestName())); + clusterSettings = new ClusterSettings(settings, new HashSet<>(Ip2GeoSettings.settings())); + lockService = new LockService(client, clusterService); + ingestMetadata = new IngestMetadata(Collections.emptyMap()); + when(urlDenyListChecker.toUrlIfNotInDenyList(anyString())).thenAnswer(i -> new URL(i.getArgument(0))); + when(metadata.custom(IngestMetadata.TYPE)).thenReturn(ingestMetadata); + when(clusterService.getSettings()).thenReturn(Settings.EMPTY); + when(clusterService.getClusterSettings()).thenReturn(clusterSettings); + when(clusterService.state()).thenReturn(clusterState); + when(clusterState.metadata()).thenReturn(metadata); + when(clusterState.getMetadata()).thenReturn(metadata); + when(clusterState.routingTable()).thenReturn(routingTable); + when(ip2GeoExecutor.forDatasourceUpdate()).thenReturn(OpenSearchExecutors.newDirectExecutorService()); + when(ingestService.getClusterService()).thenReturn(clusterService); + when(threadPool.generic()).thenReturn(OpenSearchExecutors.newDirectExecutorService()); + } + + @After + public void clean() throws Exception { + openMocks.close(); + client.close(); + verifyingClient.close(); + } + + protected DatasourceState randomStateExcept(DatasourceState state) { + assertNotNull(state); + return Arrays.stream(DatasourceState.values()) + .sequential() + .filter(s -> !s.equals(state)) + .collect(Collectors.toList()) + .get(Randomness.createSecure().nextInt(DatasourceState.values().length - 2)); + } + + protected DatasourceState randomState() { + return Arrays.stream(DatasourceState.values()) + .sequential() + .collect(Collectors.toList()) + .get(Randomness.createSecure().nextInt(DatasourceState.values().length - 1)); + } + + protected DatasourceTask randomTask() { + return Arrays.stream(DatasourceTask.values()) + .sequential() + .collect(Collectors.toList()) + .get(Randomness.createSecure().nextInt(DatasourceTask.values().length - 1)); + } + + protected String randomIpAddress() { + return String.format( + Locale.ROOT, + "%d.%d.%d.%d", + Randomness.get().nextInt(255), + Randomness.get().nextInt(255), + Randomness.get().nextInt(255), + Randomness.get().nextInt(255) + ); + } + + @SneakyThrows + @SuppressForbidden(reason = "unit test") + protected String sampleManifestUrl() { + return Paths.get(this.getClass().getClassLoader().getResource("ip2geo/manifest.json").toURI()).toUri().toURL().toExternalForm(); + } + + @SuppressForbidden(reason = "unit test") + protected String sampleManifestUrlWithInvalidUrl() throws Exception { + return Paths.get(this.getClass().getClassLoader().getResource("ip2geo/manifest_invalid_url.json").toURI()) + .toUri() + .toURL() + .toExternalForm(); + } + + @SuppressForbidden(reason = "unit test") + protected File sampleIp2GeoFile() { + return new File(this.getClass().getClassLoader().getResource("ip2geo/sample_valid.csv").getFile()); + } + + protected long randomPositiveLong() { + long value = Randomness.get().nextLong(); + return value < 0 ? -value : value; + } + + /** + * Update interval should be > 0 and < validForInDays. + * For an update test to work, there should be at least one eligible value other than current update interval. + * Therefore, the smallest value for validForInDays is 2. + * Update interval is random value from 1 to validForInDays - 2. + * The new update value will be validForInDays - 1. + */ + protected Datasource randomDatasource(final Instant updateStartTime) { + int validForInDays = 3 + Randomness.get().nextInt(30); + Instant now = Instant.now().truncatedTo(ChronoUnit.MILLIS); + Datasource datasource = new Datasource(); + datasource.setName(GeospatialTestHelper.randomLowerCaseString()); + datasource.setUserSchedule( + new IntervalSchedule( + updateStartTime.truncatedTo(ChronoUnit.MILLIS), + 1 + Randomness.get().nextInt(validForInDays - 2), + ChronoUnit.DAYS + ) + ); + datasource.setSystemSchedule(datasource.getUserSchedule()); + datasource.setTask(randomTask()); + datasource.setState(randomState()); + datasource.setCurrentIndex(datasource.newIndexName(UUID.randomUUID().toString())); + datasource.setIndices(Arrays.asList(GeospatialTestHelper.randomLowerCaseString(), GeospatialTestHelper.randomLowerCaseString())); + datasource.setEndpoint(String.format(Locale.ROOT, "https://%s.com/manifest.json", GeospatialTestHelper.randomLowerCaseString())); + datasource.getDatabase() + .setFields(Arrays.asList(GeospatialTestHelper.randomLowerCaseString(), GeospatialTestHelper.randomLowerCaseString())); + datasource.getDatabase().setProvider(GeospatialTestHelper.randomLowerCaseString()); + datasource.getDatabase().setUpdatedAt(now); + datasource.getDatabase().setSha256Hash(GeospatialTestHelper.randomLowerCaseString()); + datasource.getDatabase().setValidForInDays((long) validForInDays); + datasource.getUpdateStats().setLastSkippedAt(now); + datasource.getUpdateStats().setLastSucceededAt(now); + datasource.getUpdateStats().setLastFailedAt(now); + datasource.getUpdateStats().setLastProcessingTimeInMillis(randomPositiveLong()); + datasource.setLastUpdateTime(now); + if (Randomness.get().nextInt() % 2 == 0) { + datasource.enable(); + } else { + datasource.disable(); + } + return datasource; + } + + protected Datasource randomDatasource() { + return randomDatasource(Instant.now()); + } + + protected LockModel randomLockModel() { + LockModel lockModel = new LockModel( + GeospatialTestHelper.randomLowerCaseString(), + GeospatialTestHelper.randomLowerCaseString(), + Instant.now(), + randomPositiveLong(), + false + ); + return lockModel; + } + + protected Ip2GeoProcessor randomIp2GeoProcessor(String datasourceName) { + String tag = GeospatialTestHelper.randomLowerCaseString(); + String description = GeospatialTestHelper.randomLowerCaseString(); + String field = GeospatialTestHelper.randomLowerCaseString(); + String targetField = GeospatialTestHelper.randomLowerCaseString(); + Set properties = Set.of(GeospatialTestHelper.randomLowerCaseString()); + Ip2GeoProcessor ip2GeoProcessor = new Ip2GeoProcessor( + tag, + description, + field, + targetField, + datasourceName, + properties, + true, + clusterSettings, + datasourceDao, + geoIpDataDao, + ip2GeoCachedDao + ); + return ip2GeoProcessor; + } + + /** + * Temporary class of VerifyingClient until this PR(https://github.com/opensearch-project/OpenSearch/pull/7167) + * is merged in OpenSearch core + */ + public static class VerifyingClient extends NoOpNodeClient { + AtomicReference executeVerifier = new AtomicReference<>(); + AtomicReference executeLocallyVerifier = new AtomicReference<>(); + + public VerifyingClient(String testName) { + super(testName); + reset(); + } + + /** + * Clears any previously set verifier functions set by {@link #setExecuteVerifier(BiFunction)} and/or + * {@link #setExecuteLocallyVerifier(BiFunction)}. These functions are replaced with functions which will throw an + * {@link AssertionError} if called. + */ + public void reset() { + executeVerifier.set((arg1, arg2) -> { throw new AssertionError(); }); + executeLocallyVerifier.set((arg1, arg2) -> { throw new AssertionError(); }); + } + + /** + * Sets the function that will be called when {@link #doExecute(ActionType, ActionRequest, ActionListener)} is called. The given + * function should return either a subclass of {@link ActionResponse} or {@code null}. + * @param verifier A function which is called in place of {@link #doExecute(ActionType, ActionRequest, ActionListener)} + */ + public void setExecuteVerifier( + BiFunction, Request, Response> verifier + ) { + executeVerifier.set(verifier); + } + + @Override + public void doExecute( + ActionType action, + Request request, + ActionListener listener + ) { + try { + listener.onResponse((Response) executeVerifier.get().apply(action, request)); + } catch (Exception e) { + listener.onFailure(e); + } + } + + /** + * Sets the function that will be called when {@link #executeLocally(ActionType, ActionRequest, TaskListener)}is called. The given + * function should return either a subclass of {@link ActionResponse} or {@code null}. + * @param verifier A function which is called in place of {@link #executeLocally(ActionType, ActionRequest, TaskListener)} + */ + public void setExecuteLocallyVerifier( + BiFunction, Request, Response> verifier + ) { + executeLocallyVerifier.set(verifier); + } + + @Override + public Task executeLocally( + ActionType action, + Request request, + ActionListener listener + ) { + listener.onResponse((Response) executeLocallyVerifier.get().apply(action, request)); + return null; + } + + @Override + public Task executeLocally( + ActionType action, + Request request, + TaskListener listener + ) { + listener.onResponse(null, (Response) executeLocallyVerifier.get().apply(action, request)); + return null; + } + + } +} diff --git a/src/test/java/org/opensearch/geospatial/ip2geo/action/DeleteDatasourceRequestTests.java b/src/test/java/org/opensearch/geospatial/ip2geo/action/DeleteDatasourceRequestTests.java new file mode 100644 index 00000000..c2720fb9 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ip2geo/action/DeleteDatasourceRequestTests.java @@ -0,0 +1,65 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.action; + +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.common.io.stream.BytesStreamInput; +import org.opensearch.geospatial.GeospatialTestHelper; +import org.opensearch.geospatial.ip2geo.Ip2GeoTestCase; + +import lombok.SneakyThrows; + +public class DeleteDatasourceRequestTests extends Ip2GeoTestCase { + @SneakyThrows + public void testStreamInOut_whenValidInput_thenSucceed() { + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + DeleteDatasourceRequest request = new DeleteDatasourceRequest(datasourceName); + + // Run + BytesStreamOutput output = new BytesStreamOutput(); + request.writeTo(output); + BytesStreamInput input = new BytesStreamInput(output.bytes().toBytesRef().bytes); + DeleteDatasourceRequest copiedRequest = new DeleteDatasourceRequest(input); + + // Verify + assertEquals(request.getName(), copiedRequest.getName()); + } + + public void testValidate_whenNull_thenError() { + DeleteDatasourceRequest request = new DeleteDatasourceRequest((String) null); + + // Run + ActionRequestValidationException error = request.validate(); + + // Verify + assertNotNull(error.validationErrors()); + assertFalse(error.validationErrors().isEmpty()); + } + + public void testValidate_whenBlank_thenError() { + DeleteDatasourceRequest request = new DeleteDatasourceRequest(" "); + + // Run + ActionRequestValidationException error = request.validate(); + + // Verify + assertNotNull(error.validationErrors()); + assertFalse(error.validationErrors().isEmpty()); + } + + public void testValidate_whenInvalidDatasourceName_thenFails() { + String invalidName = "_" + GeospatialTestHelper.randomLowerCaseString(); + DeleteDatasourceRequest request = new DeleteDatasourceRequest(invalidName); + + // Run + ActionRequestValidationException exception = request.validate(); + + // Verify + assertEquals(1, exception.validationErrors().size()); + assertTrue(exception.validationErrors().get(0).contains("no such datasource")); + } +} diff --git a/src/test/java/org/opensearch/geospatial/ip2geo/action/DeleteDatasourceTransportActionTests.java b/src/test/java/org/opensearch/geospatial/ip2geo/action/DeleteDatasourceTransportActionTests.java new file mode 100644 index 00000000..76604f1c --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ip2geo/action/DeleteDatasourceTransportActionTests.java @@ -0,0 +1,181 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.action; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.io.IOException; +import java.time.Instant; +import java.util.Arrays; +import java.util.Collections; + +import org.junit.Before; +import org.mockito.ArgumentCaptor; +import org.mockito.InOrder; +import org.mockito.Mockito; +import org.opensearch.OpenSearchException; +import org.opensearch.ResourceNotFoundException; +import org.opensearch.action.support.master.AcknowledgedResponse; +import org.opensearch.core.action.ActionListener; +import org.opensearch.geospatial.GeospatialTestHelper; +import org.opensearch.geospatial.ip2geo.Ip2GeoTestCase; +import org.opensearch.geospatial.ip2geo.common.DatasourceState; +import org.opensearch.geospatial.ip2geo.jobscheduler.Datasource; +import org.opensearch.jobscheduler.spi.LockModel; +import org.opensearch.tasks.Task; + +import lombok.SneakyThrows; + +public class DeleteDatasourceTransportActionTests extends Ip2GeoTestCase { + private DeleteDatasourceTransportAction action; + + @Before + public void init() { + action = new DeleteDatasourceTransportAction( + transportService, + actionFilters, + ip2GeoLockService, + ingestService, + datasourceDao, + geoIpDataDao, + ip2GeoProcessorDao, + threadPool + ); + } + + @SneakyThrows + public void testDoExecute_whenFailedToAcquireLock_thenError() { + validateDoExecute(null, null); + } + + @SneakyThrows + public void testDoExecute_whenValidInput_thenSucceed() { + String jobIndexName = GeospatialTestHelper.randomLowerCaseString(); + String jobId = GeospatialTestHelper.randomLowerCaseString(); + LockModel lockModel = new LockModel(jobIndexName, jobId, Instant.now(), randomPositiveLong(), false); + validateDoExecute(lockModel, null); + } + + @SneakyThrows + public void testDoExecute_whenException_thenError() { + validateDoExecute(null, new RuntimeException()); + } + + private void validateDoExecute(final LockModel lockModel, final Exception exception) throws IOException { + Task task = mock(Task.class); + Datasource datasource = randomDatasource(); + when(datasourceDao.getDatasource(datasource.getName())).thenReturn(datasource); + DeleteDatasourceRequest request = new DeleteDatasourceRequest(datasource.getName()); + ActionListener listener = mock(ActionListener.class); + + // Run + action.doExecute(task, request, listener); + + // Verify + ArgumentCaptor> captor = ArgumentCaptor.forClass(ActionListener.class); + verify(ip2GeoLockService).acquireLock(eq(datasource.getName()), anyLong(), captor.capture()); + + if (exception == null) { + // Run + captor.getValue().onResponse(lockModel); + + // Verify + if (lockModel == null) { + verify(listener).onFailure(any(OpenSearchException.class)); + } else { + verify(listener).onResponse(new AcknowledgedResponse(true)); + verify(ip2GeoLockService).releaseLock(eq(lockModel)); + } + } else { + // Run + captor.getValue().onFailure(exception); + // Verify + verify(listener).onFailure(exception); + } + } + + @SneakyThrows + public void testDeleteDatasource_whenNull_thenThrowException() { + Datasource datasource = randomDatasource(); + expectThrows(ResourceNotFoundException.class, () -> action.deleteDatasource(datasource.getName())); + } + + @SneakyThrows + public void testDeleteDatasource_whenSafeToDelete_thenDelete() { + Datasource datasource = randomDatasource(); + when(datasourceDao.getDatasource(datasource.getName())).thenReturn(datasource); + when(ip2GeoProcessorDao.getProcessors(datasource.getName())).thenReturn(Collections.emptyList()); + + // Run + action.deleteDatasource(datasource.getName()); + + // Verify + assertEquals(DatasourceState.DELETING, datasource.getState()); + verify(datasourceDao).updateDatasource(datasource); + InOrder inOrder = Mockito.inOrder(geoIpDataDao, datasourceDao); + inOrder.verify(geoIpDataDao).deleteIp2GeoDataIndex(datasource.getIndices()); + inOrder.verify(datasourceDao).deleteDatasource(datasource); + } + + @SneakyThrows + public void testDeleteDatasource_whenProcessorIsUsingDatasource_thenThrowException() { + Datasource datasource = randomDatasource(); + datasource.setState(DatasourceState.AVAILABLE); + when(datasourceDao.getDatasource(datasource.getName())).thenReturn(datasource); + when(ip2GeoProcessorDao.getProcessors(datasource.getName())).thenReturn(Arrays.asList(randomIp2GeoProcessor(datasource.getName()))); + + // Run + expectThrows(OpenSearchException.class, () -> action.deleteDatasource(datasource.getName())); + + // Verify + assertEquals(DatasourceState.AVAILABLE, datasource.getState()); + verify(datasourceDao, never()).updateDatasource(datasource); + verify(geoIpDataDao, never()).deleteIp2GeoDataIndex(datasource.getIndices()); + verify(datasourceDao, never()).deleteDatasource(datasource); + } + + @SneakyThrows + public void testDeleteDatasource_whenProcessorIsCreatedDuringDeletion_thenThrowException() { + Datasource datasource = randomDatasource(); + datasource.setState(DatasourceState.AVAILABLE); + when(datasourceDao.getDatasource(datasource.getName())).thenReturn(datasource); + when(ip2GeoProcessorDao.getProcessors(datasource.getName())).thenReturn( + Collections.emptyList(), + Arrays.asList(randomIp2GeoProcessor(datasource.getName())) + ); + + // Run + expectThrows(OpenSearchException.class, () -> action.deleteDatasource(datasource.getName())); + + // Verify + verify(datasourceDao, times(2)).updateDatasource(datasource); + verify(geoIpDataDao, never()).deleteIp2GeoDataIndex(datasource.getIndices()); + verify(datasourceDao, never()).deleteDatasource(datasource); + } + + @SneakyThrows + public void testDeleteDatasource_whenDeleteFailsAfterStateIsChanged_thenRevertState() { + Datasource datasource = randomDatasource(); + datasource.setState(DatasourceState.AVAILABLE); + when(datasourceDao.getDatasource(datasource.getName())).thenReturn(datasource); + doThrow(new RuntimeException()).when(geoIpDataDao).deleteIp2GeoDataIndex(datasource.getIndices()); + + // Run + expectThrows(RuntimeException.class, () -> action.deleteDatasource(datasource.getName())); + + // Verify + verify(datasourceDao, times(2)).updateDatasource(datasource); + assertEquals(DatasourceState.AVAILABLE, datasource.getState()); + } +} diff --git a/src/test/java/org/opensearch/geospatial/ip2geo/action/GetDatasourceRequestTests.java b/src/test/java/org/opensearch/geospatial/ip2geo/action/GetDatasourceRequestTests.java new file mode 100644 index 00000000..7ee19c63 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ip2geo/action/GetDatasourceRequestTests.java @@ -0,0 +1,55 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.action; + +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.common.io.stream.BytesStreamInput; +import org.opensearch.geospatial.GeospatialTestHelper; +import org.opensearch.geospatial.ip2geo.Ip2GeoTestCase; + +public class GetDatasourceRequestTests extends Ip2GeoTestCase { + public void testStreamInOut_whenEmptyNames_thenSucceed() throws Exception { + String[] names = new String[0]; + GetDatasourceRequest request = new GetDatasourceRequest(names); + assertNull(request.validate()); + + // Run + BytesStreamOutput output = new BytesStreamOutput(); + request.writeTo(output); + BytesStreamInput input = new BytesStreamInput(output.bytes().toBytesRef().bytes); + GetDatasourceRequest copiedRequest = new GetDatasourceRequest(input); + + // Verify + assertArrayEquals(request.getNames(), copiedRequest.getNames()); + } + + public void testStreamInOut_whenNames_thenSucceed() throws Exception { + String[] names = { GeospatialTestHelper.randomLowerCaseString(), GeospatialTestHelper.randomLowerCaseString() }; + GetDatasourceRequest request = new GetDatasourceRequest(names); + assertNull(request.validate()); + + // Run + BytesStreamOutput output = new BytesStreamOutput(); + request.writeTo(output); + BytesStreamInput input = new BytesStreamInput(output.bytes().toBytesRef().bytes); + GetDatasourceRequest copiedRequest = new GetDatasourceRequest(input); + + // Verify + assertArrayEquals(request.getNames(), copiedRequest.getNames()); + } + + public void testValidate_whenNull_thenError() { + GetDatasourceRequest request = new GetDatasourceRequest((String[]) null); + + // Run + ActionRequestValidationException error = request.validate(); + + // Verify + assertNotNull(error.validationErrors()); + assertFalse(error.validationErrors().isEmpty()); + } +} diff --git a/src/test/java/org/opensearch/geospatial/ip2geo/action/GetDatasourceResponseTests.java b/src/test/java/org/opensearch/geospatial/ip2geo/action/GetDatasourceResponseTests.java new file mode 100644 index 00000000..00b6f801 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ip2geo/action/GetDatasourceResponseTests.java @@ -0,0 +1,97 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.action; + +import java.util.Arrays; +import java.util.List; +import java.util.Locale; + +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.common.io.stream.BytesStreamInput; +import org.opensearch.geospatial.ip2geo.Ip2GeoTestCase; +import org.opensearch.geospatial.ip2geo.jobscheduler.Datasource; + +public class GetDatasourceResponseTests extends Ip2GeoTestCase { + + public void testStreamInOut_whenValidInput_thenSucceed() throws Exception { + List datasourceList = Arrays.asList(randomDatasource(), randomDatasource()); + GetDatasourceResponse response = new GetDatasourceResponse(datasourceList); + + // Run + BytesStreamOutput output = new BytesStreamOutput(); + response.writeTo(output); + BytesStreamInput input = new BytesStreamInput(output.bytes().toBytesRef().bytes); + GetDatasourceResponse copiedResponse = new GetDatasourceResponse(input); + + // Verify + assertArrayEquals(response.getDatasources().toArray(), copiedResponse.getDatasources().toArray()); + } + + public void testToXContent_whenValidInput_thenSucceed() throws Exception { + List datasourceList = Arrays.asList(randomDatasource(), randomDatasource()); + GetDatasourceResponse response = new GetDatasourceResponse(datasourceList); + String json = response.toXContent(JsonXContent.contentBuilder(), null).toString(); + for (Datasource datasource : datasourceList) { + assertTrue(json.contains(String.format(Locale.ROOT, "\"name\":\"%s\"", datasource.getName()))); + assertTrue(json.contains(String.format(Locale.ROOT, "\"state\":\"%s\"", datasource.getState()))); + assertTrue(json.contains(String.format(Locale.ROOT, "\"endpoint\":\"%s\"", datasource.getEndpoint()))); + assertTrue( + json.contains(String.format(Locale.ROOT, "\"update_interval_in_days\":%d", datasource.getUserSchedule().getInterval())) + ); + assertTrue(json.contains(String.format(Locale.ROOT, "\"next_update_at_in_epoch_millis\""))); + assertTrue(json.contains(String.format(Locale.ROOT, "\"provider\":\"%s\"", datasource.getDatabase().getProvider()))); + assertTrue(json.contains(String.format(Locale.ROOT, "\"sha256_hash\":\"%s\"", datasource.getDatabase().getSha256Hash()))); + assertTrue( + json.contains( + String.format(Locale.ROOT, "\"updated_at_in_epoch_millis\":%d", datasource.getDatabase().getUpdatedAt().toEpochMilli()) + ) + ); + assertTrue(json.contains(String.format(Locale.ROOT, "\"valid_for_in_days\":%d", datasource.getDatabase().getValidForInDays()))); + for (String field : datasource.getDatabase().getFields()) { + assertTrue(json.contains(field)); + } + assertTrue( + json.contains( + String.format( + Locale.ROOT, + "\"last_succeeded_at_in_epoch_millis\":%d", + datasource.getUpdateStats().getLastSucceededAt().toEpochMilli() + ) + ) + ); + assertTrue( + json.contains( + String.format( + Locale.ROOT, + "\"last_processing_time_in_millis\":%d", + datasource.getUpdateStats().getLastProcessingTimeInMillis() + ) + ) + ); + assertTrue( + json.contains( + String.format( + Locale.ROOT, + "\"last_failed_at_in_epoch_millis\":%d", + datasource.getUpdateStats().getLastFailedAt().toEpochMilli() + ) + ) + ); + assertTrue( + json.contains( + String.format( + Locale.ROOT, + "\"last_skipped_at_in_epoch_millis\":%d", + datasource.getUpdateStats().getLastSkippedAt().toEpochMilli() + ) + ) + ); + + } + } + +} diff --git a/src/test/java/org/opensearch/geospatial/ip2geo/action/GetDatasourceTransportActionTests.java b/src/test/java/org/opensearch/geospatial/ip2geo/action/GetDatasourceTransportActionTests.java new file mode 100644 index 00000000..08eb6e22 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ip2geo/action/GetDatasourceTransportActionTests.java @@ -0,0 +1,102 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.action; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import org.junit.Before; +import org.opensearch.OpenSearchException; +import org.opensearch.core.action.ActionListener; +import org.opensearch.geospatial.ip2geo.Ip2GeoTestCase; +import org.opensearch.geospatial.ip2geo.jobscheduler.Datasource; +import org.opensearch.index.IndexNotFoundException; +import org.opensearch.tasks.Task; + +public class GetDatasourceTransportActionTests extends Ip2GeoTestCase { + private GetDatasourceTransportAction action; + + @Before + public void init() { + action = new GetDatasourceTransportAction(transportService, actionFilters, datasourceDao); + } + + public void testDoExecute_whenAll_thenSucceed() { + Task task = mock(Task.class); + GetDatasourceRequest request = new GetDatasourceRequest(new String[] { "_all" }); + ActionListener listener = mock(ActionListener.class); + + // Run + action.doExecute(task, request, listener); + + // Verify + verify(datasourceDao).getAllDatasources(any(ActionListener.class)); + } + + public void testDoExecute_whenNames_thenSucceed() { + Task task = mock(Task.class); + List datasources = Arrays.asList(randomDatasource(), randomDatasource()); + String[] datasourceNames = datasources.stream().map(Datasource::getName).toArray(String[]::new); + + GetDatasourceRequest request = new GetDatasourceRequest(datasourceNames); + ActionListener listener = mock(ActionListener.class); + + // Run + action.doExecute(task, request, listener); + + // Verify + verify(datasourceDao).getDatasources(eq(datasourceNames), any(ActionListener.class)); + } + + public void testDoExecute_whenNull_thenException() { + Task task = mock(Task.class); + GetDatasourceRequest request = new GetDatasourceRequest((String[]) null); + ActionListener listener = mock(ActionListener.class); + + // Run + Exception exception = expectThrows(OpenSearchException.class, () -> action.doExecute(task, request, listener)); + + // Verify + assertTrue(exception.getMessage().contains("should not be null")); + } + + public void testNewActionListener_whenOnResponse_thenSucceed() { + List datasources = Arrays.asList(randomDatasource(), randomDatasource()); + ActionListener actionListener = mock(ActionListener.class); + + // Run + action.newActionListener(actionListener).onResponse(datasources); + + // Verify + verify(actionListener).onResponse(new GetDatasourceResponse(datasources)); + } + + public void testNewActionListener_whenOnFailureWithNoSuchIndexException_thenEmptyDatasource() { + ActionListener actionListener = mock(ActionListener.class); + + // Run + action.newActionListener(actionListener).onFailure(new IndexNotFoundException("no index")); + + // Verify + verify(actionListener).onResponse(new GetDatasourceResponse(Collections.emptyList())); + } + + public void testNewActionListener_whenOnFailure_thenFails() { + ActionListener actionListener = mock(ActionListener.class); + + // Run + action.newActionListener(actionListener).onFailure(new RuntimeException()); + + // Verify + verify(actionListener).onFailure(any(RuntimeException.class)); + } +} diff --git a/src/test/java/org/opensearch/geospatial/ip2geo/action/PutDatasourceRequestTests.java b/src/test/java/org/opensearch/geospatial/ip2geo/action/PutDatasourceRequestTests.java new file mode 100644 index 00000000..b06651f2 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ip2geo/action/PutDatasourceRequestTests.java @@ -0,0 +1,131 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.action; + +import java.util.Locale; + +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.common.Randomness; +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.core.common.io.stream.BytesStreamInput; +import org.opensearch.geospatial.GeospatialTestHelper; +import org.opensearch.geospatial.ip2geo.Ip2GeoTestCase; + +public class PutDatasourceRequestTests extends Ip2GeoTestCase { + + public void testValidate_whenInvalidUrl_thenFails() { + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + PutDatasourceRequest request = new PutDatasourceRequest(datasourceName); + request.setEndpoint("invalidUrl"); + request.setUpdateInterval(TimeValue.timeValueDays(1)); + ActionRequestValidationException exception = request.validate(); + assertEquals(1, exception.validationErrors().size()); + assertEquals("Invalid URL format is provided", exception.validationErrors().get(0)); + } + + public void testValidate_whenInvalidManifestFile_thenFails() { + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + String domain = GeospatialTestHelper.randomLowerCaseString(); + PutDatasourceRequest request = new PutDatasourceRequest(datasourceName); + request.setEndpoint(String.format(Locale.ROOT, "https://%s.com", domain)); + request.setUpdateInterval(TimeValue.timeValueDays(1)); + + // Run + ActionRequestValidationException exception = request.validate(); + + // Verify + assertEquals(1, exception.validationErrors().size()); + assertTrue(exception.validationErrors().get(0).contains("Error occurred while reading a file")); + } + + public void testValidate_whenValidInput_thenSucceed() { + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + PutDatasourceRequest request = new PutDatasourceRequest(datasourceName); + request.setEndpoint(sampleManifestUrl()); + request.setUpdateInterval(TimeValue.timeValueDays(1)); + + assertNull(request.validate()); + } + + public void testValidate_whenInvalidDatasourceName_thenFails() { + String invalidName = "_" + GeospatialTestHelper.randomLowerCaseString(); + PutDatasourceRequest request = new PutDatasourceRequest(invalidName); + request.setEndpoint(sampleManifestUrl()); + request.setUpdateInterval(TimeValue.timeValueDays(1)); + + // Run + ActionRequestValidationException exception = request.validate(); + + // Verify + assertEquals(1, exception.validationErrors().size()); + assertTrue(exception.validationErrors().get(0).contains("must not")); + } + + public void testValidate_whenZeroUpdateInterval_thenFails() throws Exception { + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + PutDatasourceRequest request = new PutDatasourceRequest(datasourceName); + request.setEndpoint(sampleManifestUrl()); + request.setUpdateInterval(TimeValue.timeValueDays(0)); + + // Run + ActionRequestValidationException exception = request.validate(); + + // Verify + assertEquals(1, exception.validationErrors().size()); + assertEquals( + String.format(Locale.ROOT, "Update interval should be equal to or larger than 1 day"), + exception.validationErrors().get(0) + ); + } + + public void testValidate_whenLargeUpdateInterval_thenFail() throws Exception { + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + PutDatasourceRequest request = new PutDatasourceRequest(datasourceName); + request.setEndpoint(sampleManifestUrl()); + request.setUpdateInterval(TimeValue.timeValueDays(30)); + + // Run + ActionRequestValidationException exception = request.validate(); + + // Verify + assertEquals(1, exception.validationErrors().size()); + assertTrue(exception.validationErrors().get(0).contains("should be smaller")); + } + + public void testValidate_whenInvalidUrlInsideManifest_thenFail() throws Exception { + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + PutDatasourceRequest request = new PutDatasourceRequest(datasourceName); + request.setEndpoint(sampleManifestUrlWithInvalidUrl()); + request.setUpdateInterval(TimeValue.timeValueDays(1)); + + // Run + ActionRequestValidationException exception = request.validate(); + + // Verify + assertEquals(1, exception.validationErrors().size()); + assertTrue(exception.validationErrors().get(0).contains("Invalid URL format")); + } + + public void testStreamInOut_whenValidInput_thenSucceed() throws Exception { + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + String domain = GeospatialTestHelper.randomLowerCaseString(); + PutDatasourceRequest request = new PutDatasourceRequest(datasourceName); + request.setEndpoint(String.format(Locale.ROOT, "https://%s.com", domain)); + request.setUpdateInterval(TimeValue.timeValueDays(Randomness.get().nextInt(29) + 1)); + + // Run + BytesStreamOutput output = new BytesStreamOutput(); + request.writeTo(output); + BytesStreamInput input = new BytesStreamInput(output.bytes().toBytesRef().bytes); + PutDatasourceRequest copiedRequest = new PutDatasourceRequest(input); + + // Verify + assertEquals(request.getName(), copiedRequest.getName()); + assertEquals(request.getUpdateInterval(), copiedRequest.getUpdateInterval()); + assertEquals(request.getEndpoint(), copiedRequest.getEndpoint()); + } +} diff --git a/src/test/java/org/opensearch/geospatial/ip2geo/action/PutDatasourceTransportActionTests.java b/src/test/java/org/opensearch/geospatial/ip2geo/action/PutDatasourceTransportActionTests.java new file mode 100644 index 00000000..c443615b --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ip2geo/action/PutDatasourceTransportActionTests.java @@ -0,0 +1,195 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.action; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; + +import java.io.IOException; + +import org.junit.Before; +import org.mockito.ArgumentCaptor; +import org.opensearch.ResourceAlreadyExistsException; +import org.opensearch.action.StepListener; +import org.opensearch.action.support.master.AcknowledgedResponse; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.core.action.ActionListener; +import org.opensearch.geospatial.GeospatialTestHelper; +import org.opensearch.geospatial.exceptions.ConcurrentModificationException; +import org.opensearch.geospatial.ip2geo.Ip2GeoTestCase; +import org.opensearch.geospatial.ip2geo.common.DatasourceState; +import org.opensearch.geospatial.ip2geo.jobscheduler.Datasource; +import org.opensearch.index.engine.VersionConflictEngineException; +import org.opensearch.jobscheduler.spi.LockModel; +import org.opensearch.tasks.Task; + +import lombok.SneakyThrows; + +public class PutDatasourceTransportActionTests extends Ip2GeoTestCase { + private PutDatasourceTransportAction action; + + @Before + public void init() { + action = new PutDatasourceTransportAction( + transportService, + actionFilters, + threadPool, + datasourceDao, + datasourceUpdateService, + ip2GeoLockService + ); + } + + @SneakyThrows + public void testDoExecute_whenFailedToAcquireLock_thenError() { + validateDoExecute(null, null, null); + } + + @SneakyThrows + public void testDoExecute_whenAcquiredLock_thenSucceed() { + validateDoExecute(randomLockModel(), null, null); + } + + @SneakyThrows + public void testDoExecute_whenExceptionBeforeAcquiringLock_thenError() { + validateDoExecute(randomLockModel(), new RuntimeException(), null); + } + + @SneakyThrows + public void testDoExecute_whenExceptionAfterAcquiringLock_thenError() { + validateDoExecute(randomLockModel(), null, new RuntimeException()); + } + + private void validateDoExecute(final LockModel lockModel, final Exception before, final Exception after) throws IOException { + Task task = mock(Task.class); + Datasource datasource = randomDatasource(); + PutDatasourceRequest request = new PutDatasourceRequest(datasource.getName()); + ActionListener listener = mock(ActionListener.class); + if (after != null) { + doThrow(after).when(datasourceDao).createIndexIfNotExists(any(StepListener.class)); + } + + // Run + action.doExecute(task, request, listener); + + // Verify + ArgumentCaptor> captor = ArgumentCaptor.forClass(ActionListener.class); + verify(ip2GeoLockService).acquireLock(eq(datasource.getName()), anyLong(), captor.capture()); + + if (before == null) { + // Run + captor.getValue().onResponse(lockModel); + + // Verify + if (lockModel == null) { + verify(listener).onFailure(any(ConcurrentModificationException.class)); + } + if (after != null) { + verify(ip2GeoLockService).releaseLock(eq(lockModel)); + verify(listener).onFailure(after); + } else { + verify(ip2GeoLockService, never()).releaseLock(eq(lockModel)); + } + } else { + // Run + captor.getValue().onFailure(before); + // Verify + verify(listener).onFailure(before); + } + } + + @SneakyThrows + public void testInternalDoExecute_whenValidInput_thenSucceed() { + PutDatasourceRequest request = new PutDatasourceRequest(GeospatialTestHelper.randomLowerCaseString()); + request.setEndpoint(sampleManifestUrl()); + request.setUpdateInterval(TimeValue.timeValueDays(1)); + ActionListener listener = mock(ActionListener.class); + + // Run + action.internalDoExecute(request, randomLockModel(), listener); + + // Verify + ArgumentCaptor captor = ArgumentCaptor.forClass(StepListener.class); + verify(datasourceDao).createIndexIfNotExists(captor.capture()); + + // Run + captor.getValue().onResponse(null); + // Verify + ArgumentCaptor datasourceCaptor = ArgumentCaptor.forClass(Datasource.class); + ArgumentCaptor actionListenerCaptor = ArgumentCaptor.forClass(ActionListener.class); + verify(datasourceDao).putDatasource(datasourceCaptor.capture(), actionListenerCaptor.capture()); + assertEquals(request.getName(), datasourceCaptor.getValue().getName()); + assertEquals(request.getEndpoint(), datasourceCaptor.getValue().getEndpoint()); + assertEquals(request.getUpdateInterval().days(), datasourceCaptor.getValue().getUserSchedule().getInterval()); + + // Run next listener.onResponse + actionListenerCaptor.getValue().onResponse(null); + // Verify + verify(listener).onResponse(new AcknowledgedResponse(true)); + } + + public void testGetIndexResponseListener_whenVersionConflict_thenFailure() { + Datasource datasource = new Datasource(); + ActionListener listener = mock(ActionListener.class); + action.getIndexResponseListener(datasource, randomLockModel(), listener) + .onFailure( + new VersionConflictEngineException( + null, + GeospatialTestHelper.randomLowerCaseString(), + GeospatialTestHelper.randomLowerCaseString() + ) + ); + verify(listener).onFailure(any(ResourceAlreadyExistsException.class)); + } + + @SneakyThrows + public void testCreateDatasource_whenInvalidState_thenUpdateStateAsFailed() { + Datasource datasource = new Datasource(); + datasource.setState(randomStateExcept(DatasourceState.CREATING)); + datasource.getUpdateStats().setLastFailedAt(null); + + // Run + action.createDatasource(datasource, mock(Runnable.class)); + + // Verify + assertEquals(DatasourceState.CREATE_FAILED, datasource.getState()); + assertNotNull(datasource.getUpdateStats().getLastFailedAt()); + verify(datasourceDao).updateDatasource(datasource); + verify(datasourceUpdateService, never()).updateOrCreateGeoIpData(any(Datasource.class), any(Runnable.class)); + } + + @SneakyThrows + public void testCreateDatasource_whenExceptionHappens_thenUpdateStateAsFailed() { + Datasource datasource = new Datasource(); + doThrow(new RuntimeException()).when(datasourceUpdateService).updateOrCreateGeoIpData(any(Datasource.class), any(Runnable.class)); + + // Run + action.createDatasource(datasource, mock(Runnable.class)); + + // Verify + assertEquals(DatasourceState.CREATE_FAILED, datasource.getState()); + assertNotNull(datasource.getUpdateStats().getLastFailedAt()); + verify(datasourceDao).updateDatasource(datasource); + } + + @SneakyThrows + public void testCreateDatasource_whenValidInput_thenUpdateStateAsCreating() { + Datasource datasource = new Datasource(); + + Runnable renewLock = mock(Runnable.class); + // Run + action.createDatasource(datasource, renewLock); + + // Verify + verify(datasourceUpdateService).updateOrCreateGeoIpData(datasource, renewLock); + assertEquals(DatasourceState.CREATING, datasource.getState()); + } +} diff --git a/src/test/java/org/opensearch/geospatial/ip2geo/action/RestDeleteDatasourceHandlerTests.java b/src/test/java/org/opensearch/geospatial/ip2geo/action/RestDeleteDatasourceHandlerTests.java new file mode 100644 index 00000000..937c5532 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ip2geo/action/RestDeleteDatasourceHandlerTests.java @@ -0,0 +1,49 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.action; + +import static org.opensearch.geospatial.shared.URLBuilder.URL_DELIMITER; +import static org.opensearch.geospatial.shared.URLBuilder.getPluginURLPrefix; + +import java.util.Locale; +import java.util.concurrent.atomic.AtomicBoolean; + +import org.junit.Before; +import org.opensearch.geospatial.GeospatialTestHelper; +import org.opensearch.rest.RestRequest; +import org.opensearch.test.rest.FakeRestRequest; +import org.opensearch.test.rest.RestActionTestCase; + +public class RestDeleteDatasourceHandlerTests extends RestActionTestCase { + private String path; + private RestDeleteDatasourceHandler action; + + @Before + public void setupAction() { + action = new RestDeleteDatasourceHandler(); + controller().registerHandler(action); + path = String.join(URL_DELIMITER, getPluginURLPrefix(), "ip2geo/datasource/%s"); + } + + public void testPrepareRequest_whenValidInput_thenSucceed() { + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.DELETE) + .withPath(String.format(Locale.ROOT, path, datasourceName)) + .build(); + AtomicBoolean isExecuted = new AtomicBoolean(false); + + verifyingClient.setExecuteLocallyVerifier((actionResponse, actionRequest) -> { + assertTrue(actionRequest instanceof DeleteDatasourceRequest); + DeleteDatasourceRequest deleteDatasourceRequest = (DeleteDatasourceRequest) actionRequest; + assertEquals(datasourceName, deleteDatasourceRequest.getName()); + isExecuted.set(true); + return null; + }); + + dispatchRequest(request); + assertTrue(isExecuted.get()); + } +} diff --git a/src/test/java/org/opensearch/geospatial/ip2geo/action/RestGetDatasourceHandlerTests.java b/src/test/java/org/opensearch/geospatial/ip2geo/action/RestGetDatasourceHandlerTests.java new file mode 100644 index 00000000..e1177da6 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ip2geo/action/RestGetDatasourceHandlerTests.java @@ -0,0 +1,78 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.action; + +import static org.opensearch.geospatial.shared.URLBuilder.URL_DELIMITER; +import static org.opensearch.geospatial.shared.URLBuilder.getPluginURLPrefix; + +import java.util.Locale; +import java.util.concurrent.atomic.AtomicBoolean; + +import org.apache.commons.lang3.StringUtils; +import org.junit.Before; +import org.opensearch.geospatial.GeospatialTestHelper; +import org.opensearch.rest.RestRequest; +import org.opensearch.test.rest.FakeRestRequest; +import org.opensearch.test.rest.RestActionTestCase; + +public class RestGetDatasourceHandlerTests extends RestActionTestCase { + private String PATH_FOR_ALL = String.join(URL_DELIMITER, getPluginURLPrefix(), "ip2geo/datasource"); + private String path; + private RestGetDatasourceHandler action; + + @Before + public void setupAction() { + action = new RestGetDatasourceHandler(); + controller().registerHandler(action); + path = String.join(URL_DELIMITER, getPluginURLPrefix(), "ip2geo/datasource/%s"); + } + + public void testPrepareRequest_whenNames_thenSucceed() { + String dsName1 = GeospatialTestHelper.randomLowerCaseString(); + String dsName2 = GeospatialTestHelper.randomLowerCaseString(); + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET) + .withPath(String.format(Locale.ROOT, path, StringUtils.joinWith(",", dsName1, dsName2))) + .build(); + + AtomicBoolean isExecuted = new AtomicBoolean(false); + verifyingClient.setExecuteLocallyVerifier((actionResponse, actionRequest) -> { + // Verifying + assertTrue(actionRequest instanceof GetDatasourceRequest); + GetDatasourceRequest getDatasourceRequest = (GetDatasourceRequest) actionRequest; + assertArrayEquals(new String[] { dsName1, dsName2 }, getDatasourceRequest.getNames()); + isExecuted.set(true); + return null; + }); + + // Run + dispatchRequest(request); + + // Verify + assertTrue(isExecuted.get()); + } + + public void testPrepareRequest_whenAll_thenSucceed() { + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET) + .withPath(PATH_FOR_ALL) + .build(); + + AtomicBoolean isExecuted = new AtomicBoolean(false); + verifyingClient.setExecuteLocallyVerifier((actionResponse, actionRequest) -> { + // Verifying + assertTrue(actionRequest instanceof GetDatasourceRequest); + GetDatasourceRequest getDatasourceRequest = (GetDatasourceRequest) actionRequest; + assertArrayEquals(new String[] {}, getDatasourceRequest.getNames()); + isExecuted.set(true); + return null; + }); + + // Run + dispatchRequest(request); + + // Verify + assertTrue(isExecuted.get()); + } +} diff --git a/src/test/java/org/opensearch/geospatial/ip2geo/action/RestPutDatasourceHandlerTests.java b/src/test/java/org/opensearch/geospatial/ip2geo/action/RestPutDatasourceHandlerTests.java new file mode 100644 index 00000000..fcf55108 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ip2geo/action/RestPutDatasourceHandlerTests.java @@ -0,0 +1,97 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.action; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.opensearch.geospatial.shared.URLBuilder.URL_DELIMITER; +import static org.opensearch.geospatial.shared.URLBuilder.getPluginURLPrefix; + +import java.util.HashSet; +import java.util.Locale; +import java.util.concurrent.atomic.AtomicBoolean; + +import org.junit.Before; +import org.opensearch.common.SuppressForbidden; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.common.bytes.BytesArray; +import org.opensearch.geospatial.GeospatialTestHelper; +import org.opensearch.geospatial.ip2geo.common.Ip2GeoSettings; +import org.opensearch.geospatial.ip2geo.common.URLDenyListChecker; +import org.opensearch.rest.RestRequest; +import org.opensearch.test.rest.FakeRestRequest; +import org.opensearch.test.rest.RestActionTestCase; + +import lombok.SneakyThrows; + +@SuppressForbidden(reason = "unit test") +public class RestPutDatasourceHandlerTests extends RestActionTestCase { + private String path; + private RestPutDatasourceHandler action; + private URLDenyListChecker urlDenyListChecker; + + @Before + public void setupAction() { + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, new HashSet(Ip2GeoSettings.settings())); + urlDenyListChecker = mock(URLDenyListChecker.class); + action = new RestPutDatasourceHandler(clusterSettings, urlDenyListChecker); + controller().registerHandler(action); + path = String.join(URL_DELIMITER, getPluginURLPrefix(), "ip2geo/datasource/%s"); + } + + @SneakyThrows + public void testPrepareRequest() { + String endpoint = "https://test.com"; + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + String content = String.format(Locale.ROOT, "{\"endpoint\":\"%s\", \"update_interval_in_days\":1}", endpoint); + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.PUT) + .withPath(String.format(Locale.ROOT, path, datasourceName)) + .withContent(new BytesArray(content), XContentType.JSON) + .build(); + AtomicBoolean isExecuted = new AtomicBoolean(false); + + verifyingClient.setExecuteLocallyVerifier((actionResponse, actionRequest) -> { + assertTrue(actionRequest instanceof PutDatasourceRequest); + PutDatasourceRequest putDatasourceRequest = (PutDatasourceRequest) actionRequest; + assertEquals(endpoint, putDatasourceRequest.getEndpoint()); + assertEquals(TimeValue.timeValueDays(1), putDatasourceRequest.getUpdateInterval()); + assertEquals(datasourceName, putDatasourceRequest.getName()); + isExecuted.set(true); + return null; + }); + + dispatchRequest(request); + assertTrue(isExecuted.get()); + verify(urlDenyListChecker).toUrlIfNotInDenyList(endpoint); + } + + @SneakyThrows + public void testPrepareRequestDefaultValue() { + String endpoint = "https://geoip.maps.opensearch.org/v1/geolite2-city/manifest.json"; + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.PUT) + .withPath(String.format(Locale.ROOT, path, datasourceName)) + .withContent(new BytesArray("{}"), XContentType.JSON) + .build(); + AtomicBoolean isExecuted = new AtomicBoolean(false); + verifyingClient.setExecuteLocallyVerifier((actionResponse, actionRequest) -> { + assertTrue(actionRequest instanceof PutDatasourceRequest); + PutDatasourceRequest putDatasourceRequest = (PutDatasourceRequest) actionRequest; + assertEquals(endpoint, putDatasourceRequest.getEndpoint()); + assertEquals(TimeValue.timeValueDays(3), putDatasourceRequest.getUpdateInterval()); + assertEquals(datasourceName, putDatasourceRequest.getName()); + isExecuted.set(true); + return null; + }); + + dispatchRequest(request); + assertTrue(isExecuted.get()); + verify(urlDenyListChecker).toUrlIfNotInDenyList(endpoint); + } +} diff --git a/src/test/java/org/opensearch/geospatial/ip2geo/action/RestUpdateDatasourceHandlerTests.java b/src/test/java/org/opensearch/geospatial/ip2geo/action/RestUpdateDatasourceHandlerTests.java new file mode 100644 index 00000000..64e26904 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ip2geo/action/RestUpdateDatasourceHandlerTests.java @@ -0,0 +1,93 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.action; + +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; +import static org.opensearch.geospatial.shared.URLBuilder.URL_DELIMITER; +import static org.opensearch.geospatial.shared.URLBuilder.getPluginURLPrefix; + +import java.util.Locale; +import java.util.concurrent.atomic.AtomicBoolean; + +import org.junit.Before; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.common.bytes.BytesArray; +import org.opensearch.geospatial.GeospatialTestHelper; +import org.opensearch.geospatial.ip2geo.common.URLDenyListChecker; +import org.opensearch.rest.RestRequest; +import org.opensearch.test.rest.FakeRestRequest; +import org.opensearch.test.rest.RestActionTestCase; + +import lombok.SneakyThrows; + +public class RestUpdateDatasourceHandlerTests extends RestActionTestCase { + private String path; + private RestUpdateDatasourceHandler handler; + private URLDenyListChecker urlDenyListChecker; + + @Before + public void setupAction() { + urlDenyListChecker = mock(URLDenyListChecker.class); + handler = new RestUpdateDatasourceHandler(urlDenyListChecker); + controller().registerHandler(handler); + path = String.join(URL_DELIMITER, getPluginURLPrefix(), "ip2geo/datasource/%s/_settings"); + } + + @SneakyThrows + public void testPrepareRequest_whenValidInput_thenSucceed() { + String endpoint = "https://test.com"; + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + String content = String.format(Locale.ROOT, "{\"endpoint\":\"%s\", \"update_interval_in_days\":1}", endpoint); + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.PUT) + .withPath(String.format(Locale.ROOT, path, datasourceName)) + .withContent(new BytesArray(content), XContentType.JSON) + .build(); + AtomicBoolean isExecuted = new AtomicBoolean(false); + + verifyingClient.setExecuteLocallyVerifier((actionResponse, actionRequest) -> { + assertTrue(actionRequest instanceof UpdateDatasourceRequest); + UpdateDatasourceRequest updateDatasourceRequest = (UpdateDatasourceRequest) actionRequest; + assertEquals("https://test.com", updateDatasourceRequest.getEndpoint()); + assertEquals(TimeValue.timeValueDays(1), updateDatasourceRequest.getUpdateInterval()); + assertEquals(datasourceName, updateDatasourceRequest.getName()); + isExecuted.set(true); + return null; + }); + + dispatchRequest(request); + assertTrue(isExecuted.get()); + verify(urlDenyListChecker).toUrlIfNotInDenyList(endpoint); + } + + @SneakyThrows + public void testPrepareRequest_whenNullInput_thenSucceed() { + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + String content = "{}"; + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.PUT) + .withPath(String.format(Locale.ROOT, path, datasourceName)) + .withContent(new BytesArray(content), XContentType.JSON) + .build(); + AtomicBoolean isExecuted = new AtomicBoolean(false); + + verifyingClient.setExecuteLocallyVerifier((actionResponse, actionRequest) -> { + assertTrue(actionRequest instanceof UpdateDatasourceRequest); + UpdateDatasourceRequest updateDatasourceRequest = (UpdateDatasourceRequest) actionRequest; + assertNull(updateDatasourceRequest.getEndpoint()); + assertNull(updateDatasourceRequest.getUpdateInterval()); + assertEquals(datasourceName, updateDatasourceRequest.getName()); + isExecuted.set(true); + return null; + }); + + dispatchRequest(request); + assertTrue(isExecuted.get()); + verify(urlDenyListChecker, never()).toUrlIfNotInDenyList(anyString()); + } +} diff --git a/src/test/java/org/opensearch/geospatial/ip2geo/action/UpdateDatasourceIT.java b/src/test/java/org/opensearch/geospatial/ip2geo/action/UpdateDatasourceIT.java new file mode 100644 index 00000000..483f0d30 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ip2geo/action/UpdateDatasourceIT.java @@ -0,0 +1,152 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.action; + +import java.io.IOException; +import java.time.Duration; +import java.util.Collections; +import java.util.List; +import java.util.Locale; +import java.util.Map; + +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.opensearch.client.ResponseException; +import org.opensearch.common.settings.Settings; +import org.opensearch.core.rest.RestStatus; +import org.opensearch.geospatial.GeospatialRestTestCase; +import org.opensearch.geospatial.GeospatialTestHelper; +import org.opensearch.geospatial.ip2geo.Ip2GeoDataServer; +import org.opensearch.geospatial.ip2geo.common.Ip2GeoSettings; + +import lombok.SneakyThrows; + +public class UpdateDatasourceIT extends GeospatialRestTestCase { + // Use this value in resource name to avoid name conflict among tests + private static final String PREFIX = UpdateDatasourceIT.class.getSimpleName().toLowerCase(Locale.ROOT); + + @BeforeClass + public static void start() { + Ip2GeoDataServer.start(); + } + + @AfterClass + public static void stop() { + Ip2GeoDataServer.stop(); + } + + @SneakyThrows + public void testUpdateDatasource_whenPrivateNetwork_thenBlocked() { + // Reset deny list to allow private network access during test + updateClusterSetting(Map.of(Ip2GeoSettings.DATASOURCE_ENDPOINT_DENYLIST.getKey(), Collections.emptyList())); + + boolean isDatasourceCreated = false; + String datasourceName = PREFIX + GeospatialTestHelper.randomLowerCaseString(); + try { + Map datasourceProperties = Map.of( + PutDatasourceRequest.ENDPOINT_FIELD.getPreferredName(), + Ip2GeoDataServer.getEndpointCountry() + ); + + // Create datasource and wait for it to be available + createDatasource(datasourceName, datasourceProperties); + isDatasourceCreated = true; + waitForDatasourceToBeAvailable(datasourceName, Duration.ofSeconds(10)); + + // Revert deny list to its default value and private network ip should be blocked + updateClusterSetting( + Map.of( + Ip2GeoSettings.DATASOURCE_ENDPOINT_DENYLIST.getKey(), + Ip2GeoSettings.DATASOURCE_ENDPOINT_DENYLIST.get(Settings.EMPTY) + ) + ); + int updateIntervalInDays = 1; + ResponseException exception = expectThrows( + ResponseException.class, + () -> updateDatasourceEndpoint(datasourceName, "http://127.0.0.1:9200/city/manifest_local.json", updateIntervalInDays) + ); + assertEquals(400, exception.getResponse().getStatusLine().getStatusCode()); + assertTrue(exception.getMessage().contains("blocked by deny list")); + } finally { + if (isDatasourceCreated) { + deleteDatasource(datasourceName, 3); + } + } + } + + @SneakyThrows + public void testUpdateDatasource_whenValidInput_thenUpdated() { + // Reset deny list to allow private network access during test + updateClusterSetting(Map.of(Ip2GeoSettings.DATASOURCE_ENDPOINT_DENYLIST.getKey(), Collections.emptyList())); + + boolean isDatasourceCreated = false; + String datasourceName = PREFIX + GeospatialTestHelper.randomLowerCaseString(); + try { + Map datasourceProperties = Map.of( + PutDatasourceRequest.ENDPOINT_FIELD.getPreferredName(), + Ip2GeoDataServer.getEndpointCountry() + ); + + // Create datasource and wait for it to be available + createDatasource(datasourceName, datasourceProperties); + isDatasourceCreated = true; + waitForDatasourceToBeAvailable(datasourceName, Duration.ofSeconds(10)); + + int updateIntervalInDays = 1; + updateDatasourceEndpoint(datasourceName, Ip2GeoDataServer.getEndpointCity(), updateIntervalInDays); + List> datasources = (List>) getDatasource(datasourceName).get("datasources"); + + assertEquals(Ip2GeoDataServer.getEndpointCity(), datasources.get(0).get("endpoint")); + assertEquals(updateIntervalInDays, datasources.get(0).get("update_interval_in_days")); + } finally { + if (isDatasourceCreated) { + deleteDatasource(datasourceName, 3); + } + } + } + + @SneakyThrows + public void testUpdateDatasource_whenIncompatibleFields_thenFails() { + // Reset deny list to allow private network access during test + updateClusterSetting(Map.of(Ip2GeoSettings.DATASOURCE_ENDPOINT_DENYLIST.getKey(), Collections.emptyList())); + + boolean isDatasourceCreated = false; + String datasourceName = PREFIX + GeospatialTestHelper.randomLowerCaseString(); + try { + Map datasourceProperties = Map.of( + PutDatasourceRequest.ENDPOINT_FIELD.getPreferredName(), + Ip2GeoDataServer.getEndpointCity() + ); + + // Create datasource and wait for it to be available + createDatasource(datasourceName, datasourceProperties); + isDatasourceCreated = true; + waitForDatasourceToBeAvailable(datasourceName, Duration.ofSeconds(10)); + + // Update should fail as country data does not have every fields that city data has + int updateIntervalInDays = 1; + ResponseException exception = expectThrows( + ResponseException.class, + () -> updateDatasourceEndpoint(datasourceName, Ip2GeoDataServer.getEndpointCountry(), updateIntervalInDays) + ); + assertEquals(RestStatus.BAD_REQUEST.getStatus(), exception.getResponse().getStatusLine().getStatusCode()); + } finally { + if (isDatasourceCreated) { + deleteDatasource(datasourceName, 3); + } + } + } + + private void updateDatasourceEndpoint(final String datasourceName, final String endpoint, final int updateInterval) throws IOException { + Map properties = Map.of( + UpdateDatasourceRequest.ENDPOINT_FIELD.getPreferredName(), + endpoint, + UpdateDatasourceRequest.UPDATE_INTERVAL_IN_DAYS_FIELD.getPreferredName(), + updateInterval + ); + updateDatasource(datasourceName, properties); + } +} diff --git a/src/test/java/org/opensearch/geospatial/ip2geo/action/UpdateDatasourceRequestTests.java b/src/test/java/org/opensearch/geospatial/ip2geo/action/UpdateDatasourceRequestTests.java new file mode 100644 index 00000000..4c346d4c --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ip2geo/action/UpdateDatasourceRequestTests.java @@ -0,0 +1,135 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.action; + +import java.util.Locale; + +import org.opensearch.action.ActionRequestValidationException; +import org.opensearch.common.Randomness; +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.core.common.io.stream.BytesStreamInput; +import org.opensearch.geospatial.GeospatialTestHelper; +import org.opensearch.geospatial.ip2geo.Ip2GeoTestCase; + +import lombok.SneakyThrows; + +public class UpdateDatasourceRequestTests extends Ip2GeoTestCase { + + public void testValidate_whenNullValues_thenFails() { + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + UpdateDatasourceRequest request = new UpdateDatasourceRequest(datasourceName); + + // Run + ActionRequestValidationException exception = request.validate(); + + // Verify + assertEquals(1, exception.validationErrors().size()); + assertEquals("no values to update", exception.validationErrors().get(0)); + } + + public void testValidate_whenInvalidUrl_thenFails() { + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + UpdateDatasourceRequest request = new UpdateDatasourceRequest(datasourceName); + request.setEndpoint("invalidUrl"); + + // Run + ActionRequestValidationException exception = request.validate(); + + // Verify + assertEquals(1, exception.validationErrors().size()); + assertEquals("Invalid URL format is provided", exception.validationErrors().get(0)); + } + + public void testValidate_whenInvalidManifestFile_thenFails() { + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + String domain = GeospatialTestHelper.randomLowerCaseString(); + UpdateDatasourceRequest request = new UpdateDatasourceRequest(datasourceName); + request.setEndpoint(String.format(Locale.ROOT, "https://%s.com", domain)); + + // Run + ActionRequestValidationException exception = request.validate(); + + // Verify + assertEquals(1, exception.validationErrors().size()); + assertTrue(exception.validationErrors().get(0).contains("Error occurred while reading a file")); + } + + @SneakyThrows + public void testValidate_whenValidInput_thenSucceed() { + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + UpdateDatasourceRequest request = new UpdateDatasourceRequest(datasourceName); + request.setEndpoint(sampleManifestUrl()); + request.setUpdateInterval(TimeValue.timeValueDays(1)); + + // Run and verify + assertNull(request.validate()); + } + + public void testValidate_whenInvalidDatasourceName_thenFails() { + String invalidName = "_" + GeospatialTestHelper.randomLowerCaseString(); + UpdateDatasourceRequest request = new UpdateDatasourceRequest(invalidName); + request.setEndpoint(sampleManifestUrl()); + request.setUpdateInterval(TimeValue.timeValueDays(1)); + + // Run + ActionRequestValidationException exception = request.validate(); + + // Verify + assertEquals(1, exception.validationErrors().size()); + assertTrue(exception.validationErrors().get(0).contains("no such datasource")); + } + + @SneakyThrows + public void testValidate_whenZeroUpdateInterval_thenFails() { + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + UpdateDatasourceRequest request = new UpdateDatasourceRequest(datasourceName); + request.setUpdateInterval(TimeValue.timeValueDays(0)); + + // Run + ActionRequestValidationException exception = request.validate(); + + // Verify + assertEquals(1, exception.validationErrors().size()); + assertEquals( + String.format(Locale.ROOT, "Update interval should be equal to or larger than 1 day"), + exception.validationErrors().get(0) + ); + } + + @SneakyThrows + public void testValidate_whenInvalidUrlInsideManifest_thenFail() { + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + UpdateDatasourceRequest request = new UpdateDatasourceRequest(datasourceName); + request.setEndpoint(sampleManifestUrlWithInvalidUrl()); + request.setUpdateInterval(TimeValue.timeValueDays(1)); + + // Run + ActionRequestValidationException exception = request.validate(); + + // Verify + assertEquals(1, exception.validationErrors().size()); + assertTrue(exception.validationErrors().get(0).contains("Invalid URL format")); + } + + @SneakyThrows + public void testStreamInOut_whenValidInput_thenSucceed() { + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + String domain = GeospatialTestHelper.randomLowerCaseString(); + UpdateDatasourceRequest request = new UpdateDatasourceRequest(datasourceName); + request.setEndpoint(String.format(Locale.ROOT, "https://%s.com", domain)); + request.setUpdateInterval(TimeValue.timeValueDays(Randomness.get().nextInt(29) + 1)); + + // Run + BytesStreamOutput output = new BytesStreamOutput(); + request.writeTo(output); + BytesStreamInput input = new BytesStreamInput(output.bytes().toBytesRef().bytes); + UpdateDatasourceRequest copiedRequest = new UpdateDatasourceRequest(input); + + // Verify + assertEquals(request, copiedRequest); + } +} diff --git a/src/test/java/org/opensearch/geospatial/ip2geo/action/UpdateDatasourceTransportActionTests.java b/src/test/java/org/opensearch/geospatial/ip2geo/action/UpdateDatasourceTransportActionTests.java new file mode 100644 index 00000000..2b1c941d --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ip2geo/action/UpdateDatasourceTransportActionTests.java @@ -0,0 +1,305 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.action; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyLong; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.security.InvalidParameterException; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.List; + +import org.junit.Before; +import org.mockito.ArgumentCaptor; +import org.opensearch.OpenSearchException; +import org.opensearch.ResourceNotFoundException; +import org.opensearch.action.support.master.AcknowledgedResponse; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.core.action.ActionListener; +import org.opensearch.geospatial.exceptions.IncompatibleDatasourceException; +import org.opensearch.geospatial.ip2geo.Ip2GeoTestCase; +import org.opensearch.geospatial.ip2geo.common.DatasourceState; +import org.opensearch.geospatial.ip2geo.jobscheduler.Datasource; +import org.opensearch.geospatial.ip2geo.jobscheduler.DatasourceTask; +import org.opensearch.jobscheduler.spi.LockModel; +import org.opensearch.tasks.Task; + +import lombok.SneakyThrows; + +public class UpdateDatasourceTransportActionTests extends Ip2GeoTestCase { + private UpdateDatasourceTransportAction action; + + @Before + public void init() { + action = new UpdateDatasourceTransportAction( + transportService, + actionFilters, + ip2GeoLockService, + datasourceDao, + datasourceUpdateService, + threadPool + ); + } + + public void testDoExecute_whenFailedToAcquireLock_thenError() { + validateDoExecuteWithLockError(null); + } + + public void testDoExecute_whenExceptionToAcquireLock_thenError() { + validateDoExecuteWithLockError(new RuntimeException()); + } + + private void validateDoExecuteWithLockError(final Exception exception) { + Task task = mock(Task.class); + Datasource datasource = randomDatasource(); + UpdateDatasourceRequest request = new UpdateDatasourceRequest(datasource.getName()); + ActionListener listener = mock(ActionListener.class); + + // Run + action.doExecute(task, request, listener); + + // Verify + ArgumentCaptor> captor = ArgumentCaptor.forClass(ActionListener.class); + verify(ip2GeoLockService).acquireLock(eq(datasource.getName()), anyLong(), captor.capture()); + + if (exception == null) { + // Run + captor.getValue().onResponse(null); + // Verify + verify(listener).onFailure(any(OpenSearchException.class)); + } else { + // Run + captor.getValue().onFailure(exception); + // Verify + verify(listener).onFailure(exception); + } + } + + @SneakyThrows + public void testDoExecute_whenValidInput_thenUpdate() { + Datasource datasource = randomDatasource(Instant.now().minusSeconds(60)); + datasource.setState(DatasourceState.AVAILABLE); + datasource.setTask(DatasourceTask.DELETE_UNUSED_INDICES); + Instant originalStartTime = datasource.getSchedule().getStartTime(); + UpdateDatasourceRequest request = new UpdateDatasourceRequest(datasource.getName()); + request.setEndpoint(sampleManifestUrl()); + request.setUpdateInterval(TimeValue.timeValueDays(datasource.getSchedule().getInterval())); + + Task task = mock(Task.class); + when(datasourceDao.getDatasource(datasource.getName())).thenReturn(datasource); + when(datasourceUpdateService.getHeaderFields(request.getEndpoint())).thenReturn(datasource.getDatabase().getFields()); + ActionListener listener = mock(ActionListener.class); + LockModel lockModel = randomLockModel(); + + // Run + action.doExecute(task, request, listener); + + // Verify + ArgumentCaptor> captor = ArgumentCaptor.forClass(ActionListener.class); + verify(ip2GeoLockService).acquireLock(eq(datasource.getName()), anyLong(), captor.capture()); + + // Run + captor.getValue().onResponse(lockModel); + + // Verify + verify(datasourceDao).getDatasource(datasource.getName()); + verify(datasourceDao).updateDatasource(datasource); + verify(datasourceUpdateService).getHeaderFields(request.getEndpoint()); + assertEquals(request.getEndpoint(), datasource.getEndpoint()); + assertEquals(request.getUpdateInterval().days(), datasource.getUserSchedule().getInterval()); + verify(listener).onResponse(new AcknowledgedResponse(true)); + verify(ip2GeoLockService).releaseLock(eq(lockModel)); + assertTrue(originalStartTime.isBefore(datasource.getSchedule().getStartTime())); + assertEquals(DatasourceTask.ALL, datasource.getTask()); + } + + @SneakyThrows + public void testDoExecute_whenNoChangesInValues_thenNoUpdate() { + Datasource datasource = randomDatasource(); + datasource.setState(DatasourceState.AVAILABLE); + UpdateDatasourceRequest request = new UpdateDatasourceRequest(datasource.getName()); + request.setEndpoint(datasource.getEndpoint()); + + Task task = mock(Task.class); + when(datasourceDao.getDatasource(datasource.getName())).thenReturn(datasource); + ActionListener listener = mock(ActionListener.class); + LockModel lockModel = randomLockModel(); + + // Run + action.doExecute(task, request, listener); + + // Verify + ArgumentCaptor> captor = ArgumentCaptor.forClass(ActionListener.class); + verify(ip2GeoLockService).acquireLock(eq(datasource.getName()), anyLong(), captor.capture()); + + // Run + captor.getValue().onResponse(lockModel); + + // Verify + verify(datasourceDao).getDatasource(datasource.getName()); + verify(datasourceUpdateService, never()).getHeaderFields(anyString()); + verify(datasourceDao, never()).updateDatasource(datasource); + verify(listener).onResponse(new AcknowledgedResponse(true)); + verify(ip2GeoLockService).releaseLock(eq(lockModel)); + } + + @SneakyThrows + public void testDoExecute_whenNoDatasource_thenError() { + Datasource datasource = randomDatasource(); + UpdateDatasourceRequest request = new UpdateDatasourceRequest(datasource.getName()); + + Task task = mock(Task.class); + ActionListener listener = mock(ActionListener.class); + LockModel lockModel = randomLockModel(); + + // Run + action.doExecute(task, request, listener); + + // Verify + ArgumentCaptor> captor = ArgumentCaptor.forClass(ActionListener.class); + verify(ip2GeoLockService).acquireLock(eq(datasource.getName()), anyLong(), captor.capture()); + + // Run + captor.getValue().onResponse(lockModel); + + // Verify + ArgumentCaptor exceptionCaptor = ArgumentCaptor.forClass(Exception.class); + verify(listener).onFailure(exceptionCaptor.capture()); + assertEquals(ResourceNotFoundException.class, exceptionCaptor.getValue().getClass()); + exceptionCaptor.getValue().getMessage().contains("no such datasource exist"); + verify(ip2GeoLockService).releaseLock(eq(lockModel)); + } + + @SneakyThrows + public void testDoExecute_whenNotInAvailableState_thenError() { + Datasource datasource = randomDatasource(); + datasource.setState(DatasourceState.CREATE_FAILED); + UpdateDatasourceRequest request = new UpdateDatasourceRequest(datasource.getName()); + request.setEndpoint(datasource.getEndpoint()); + + Task task = mock(Task.class); + when(datasourceDao.getDatasource(datasource.getName())).thenReturn(datasource); + ActionListener listener = mock(ActionListener.class); + LockModel lockModel = randomLockModel(); + + // Run + action.doExecute(task, request, listener); + + // Verify + ArgumentCaptor> captor = ArgumentCaptor.forClass(ActionListener.class); + verify(ip2GeoLockService).acquireLock(eq(datasource.getName()), anyLong(), captor.capture()); + + // Run + captor.getValue().onResponse(lockModel); + + // Verify + ArgumentCaptor exceptionCaptor = ArgumentCaptor.forClass(Exception.class); + verify(listener).onFailure(exceptionCaptor.capture()); + assertEquals(IllegalArgumentException.class, exceptionCaptor.getValue().getClass()); + exceptionCaptor.getValue().getMessage().contains("not in an available"); + verify(ip2GeoLockService).releaseLock(eq(lockModel)); + } + + @SneakyThrows + public void testDoExecute_whenIncompatibleFields_thenError() { + Datasource datasource = randomDatasource(); + datasource.setState(DatasourceState.AVAILABLE); + UpdateDatasourceRequest request = new UpdateDatasourceRequest(datasource.getName()); + request.setEndpoint(sampleManifestUrl()); + + Task task = mock(Task.class); + when(datasourceDao.getDatasource(datasource.getName())).thenReturn(datasource); + List newFields = datasource.getDatabase().getFields().subList(0, 0); + when(datasourceUpdateService.getHeaderFields(request.getEndpoint())).thenReturn(newFields); + ActionListener listener = mock(ActionListener.class); + LockModel lockModel = randomLockModel(); + + // Run + action.doExecute(task, request, listener); + + // Verify + ArgumentCaptor> captor = ArgumentCaptor.forClass(ActionListener.class); + verify(ip2GeoLockService).acquireLock(eq(datasource.getName()), anyLong(), captor.capture()); + + // Run + captor.getValue().onResponse(lockModel); + + // Verify + ArgumentCaptor exceptionCaptor = ArgumentCaptor.forClass(Exception.class); + verify(listener).onFailure(exceptionCaptor.capture()); + assertEquals(IncompatibleDatasourceException.class, exceptionCaptor.getValue().getClass()); + exceptionCaptor.getValue().getMessage().contains("does not contain"); + verify(ip2GeoLockService).releaseLock(eq(lockModel)); + } + + @SneakyThrows + public void testDoExecute_whenLargeUpdateInterval_thenError() { + Datasource datasource = randomDatasource(); + datasource.setState(DatasourceState.AVAILABLE); + UpdateDatasourceRequest request = new UpdateDatasourceRequest(datasource.getName()); + request.setUpdateInterval(TimeValue.timeValueDays(datasource.getDatabase().getValidForInDays())); + + Task task = mock(Task.class); + when(datasourceDao.getDatasource(datasource.getName())).thenReturn(datasource); + ActionListener listener = mock(ActionListener.class); + LockModel lockModel = randomLockModel(); + + // Run + action.doExecute(task, request, listener); + + // Verify + ArgumentCaptor> captor = ArgumentCaptor.forClass(ActionListener.class); + verify(ip2GeoLockService).acquireLock(eq(datasource.getName()), anyLong(), captor.capture()); + + // Run + captor.getValue().onResponse(lockModel); + + // Verify + ArgumentCaptor exceptionCaptor = ArgumentCaptor.forClass(Exception.class); + verify(listener).onFailure(exceptionCaptor.capture()); + assertEquals(InvalidParameterException.class, exceptionCaptor.getValue().getClass()); + exceptionCaptor.getValue().getMessage().contains("should be smaller"); + verify(ip2GeoLockService).releaseLock(eq(lockModel)); + } + + @SneakyThrows + public void testDoExecute_whenExpireWithNewUpdateInterval_thenError() { + Datasource datasource = randomDatasource(); + datasource.getUpdateStats().setLastSkippedAt(null); + datasource.getUpdateStats().setLastSucceededAt(Instant.now().minus(datasource.getDatabase().getValidForInDays(), ChronoUnit.DAYS)); + UpdateDatasourceRequest request = new UpdateDatasourceRequest(datasource.getName()); + request.setUpdateInterval(TimeValue.timeValueDays(1)); + + Task task = mock(Task.class); + when(datasourceDao.getDatasource(datasource.getName())).thenReturn(datasource); + ActionListener listener = mock(ActionListener.class); + LockModel lockModel = randomLockModel(); + + // Run + action.doExecute(task, request, listener); + + // Verify + ArgumentCaptor> captor = ArgumentCaptor.forClass(ActionListener.class); + verify(ip2GeoLockService).acquireLock(eq(datasource.getName()), anyLong(), captor.capture()); + + // Run + captor.getValue().onResponse(lockModel); + + // Verify + ArgumentCaptor exceptionCaptor = ArgumentCaptor.forClass(Exception.class); + verify(listener).onFailure(exceptionCaptor.capture()); + assertEquals(IllegalArgumentException.class, exceptionCaptor.getValue().getClass()); + exceptionCaptor.getValue().getMessage().contains("will expire"); + verify(ip2GeoLockService).releaseLock(eq(lockModel)); + } +} diff --git a/src/test/java/org/opensearch/geospatial/ip2geo/common/DatasourceManifestTests.java b/src/test/java/org/opensearch/geospatial/ip2geo/common/DatasourceManifestTests.java new file mode 100644 index 00000000..c4cd3edb --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ip2geo/common/DatasourceManifestTests.java @@ -0,0 +1,38 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.common; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.io.File; +import java.io.FileInputStream; +import java.net.URLConnection; + +import org.opensearch.common.SuppressForbidden; +import org.opensearch.geospatial.ip2geo.Ip2GeoTestCase; +import org.opensearch.geospatial.shared.Constants; + +import lombok.SneakyThrows; + +@SuppressForbidden(reason = "unit test") +public class DatasourceManifestTests extends Ip2GeoTestCase { + + @SneakyThrows + public void testInternalBuild_whenCalled_thenCorrectUserAgentValueIsSet() { + URLConnection connection = mock(URLConnection.class); + File manifestFile = new File(this.getClass().getClassLoader().getResource("ip2geo/manifest.json").getFile()); + when(connection.getInputStream()).thenReturn(new FileInputStream(manifestFile)); + + // Run + DatasourceManifest manifest = DatasourceManifest.Builder.internalBuild(connection); + + // Verify + verify(connection).addRequestProperty(Constants.USER_AGENT_KEY, Constants.USER_AGENT_VALUE); + assertEquals("https://test.com/db.zip", manifest.getUrl()); + } +} diff --git a/src/test/java/org/opensearch/geospatial/ip2geo/common/InputFormatValidatorTests.java b/src/test/java/org/opensearch/geospatial/ip2geo/common/InputFormatValidatorTests.java new file mode 100644 index 00000000..247585d2 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ip2geo/common/InputFormatValidatorTests.java @@ -0,0 +1,71 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.common; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +import org.opensearch.common.Randomness; +import org.opensearch.core.common.Strings; +import org.opensearch.geospatial.GeospatialTestHelper; +import org.opensearch.geospatial.ip2geo.Ip2GeoTestCase; + +public class InputFormatValidatorTests extends Ip2GeoTestCase { + public void testValidateDatasourceName_whenValidName_thenSucceed() { + ParameterValidator inputFormatValidator = new ParameterValidator(); + String validDatasourceName = GeospatialTestHelper.randomLowerCaseString(); + + // Run + List errorMsgs = inputFormatValidator.validateDatasourceName(validDatasourceName); + + // Verify + assertTrue(errorMsgs.isEmpty()); + } + + public void testValidate_whenInvalidDatasourceNames_thenFails() { + ParameterValidator inputFormatValidator = new ParameterValidator(); + String validDatasourceName = GeospatialTestHelper.randomLowerCaseString(); + String fileNameChar = validDatasourceName + Strings.INVALID_FILENAME_CHARS.stream() + .skip(Randomness.get().nextInt(Strings.INVALID_FILENAME_CHARS.size() - 1)) + .findFirst(); + String startsWith = Arrays.asList("_", "-", "+").get(Randomness.get().nextInt(3)) + validDatasourceName; + String empty = ""; + String hash = validDatasourceName + "#"; + String colon = validDatasourceName + ":"; + StringBuilder longName = new StringBuilder(); + while (longName.length() <= 127) { + longName.append(GeospatialTestHelper.randomLowerCaseString()); + } + String point = Arrays.asList(".", "..").get(Randomness.get().nextInt(2)); + Map nameToError = Map.of( + fileNameChar, + "not contain the following characters", + empty, + "must not be empty", + hash, + "must not contain '#'", + colon, + "must not contain ':'", + startsWith, + "must not start with", + longName.toString(), + "name is too long", + point, + "must not be '.' or '..'" + ); + + for (Map.Entry entry : nameToError.entrySet()) { + + // Run + List errorMsgs = inputFormatValidator.validateDatasourceName(entry.getKey()); + + // Verify + assertFalse(errorMsgs.isEmpty()); + assertTrue(errorMsgs.get(0).contains(entry.getValue())); + } + } +} diff --git a/src/test/java/org/opensearch/geospatial/ip2geo/common/Ip2GeoLockServiceTests.java b/src/test/java/org/opensearch/geospatial/ip2geo/common/Ip2GeoLockServiceTests.java new file mode 100644 index 00000000..74206f68 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ip2geo/common/Ip2GeoLockServiceTests.java @@ -0,0 +1,116 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.common; + +import static org.mockito.Mockito.mock; +import static org.opensearch.geospatial.ip2geo.common.Ip2GeoLockService.LOCK_DURATION_IN_SECONDS; +import static org.opensearch.geospatial.ip2geo.common.Ip2GeoLockService.RENEW_AFTER_IN_SECONDS; + +import java.time.Instant; +import java.util.concurrent.atomic.AtomicReference; + +import org.junit.Before; +import org.opensearch.action.DocWriteResponse; +import org.opensearch.action.update.UpdateRequest; +import org.opensearch.action.update.UpdateResponse; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.index.shard.ShardId; +import org.opensearch.geospatial.GeospatialTestHelper; +import org.opensearch.geospatial.ip2geo.Ip2GeoTestCase; +import org.opensearch.jobscheduler.spi.LockModel; + +public class Ip2GeoLockServiceTests extends Ip2GeoTestCase { + private Ip2GeoLockService ip2GeoLockService; + private Ip2GeoLockService noOpsLockService; + + @Before + public void init() { + ip2GeoLockService = new Ip2GeoLockService(clusterService, verifyingClient); + noOpsLockService = new Ip2GeoLockService(clusterService, client); + } + + public void testAcquireLock_whenValidInput_thenSucceed() { + // Cannot test because LockService is final class + // Simply calling method to increase coverage + noOpsLockService.acquireLock(GeospatialTestHelper.randomLowerCaseString(), randomPositiveLong(), mock(ActionListener.class)); + } + + public void testAcquireLock_whenCalled_thenNotBlocked() { + long expectedDurationInMillis = 1000; + Instant before = Instant.now(); + assertTrue(ip2GeoLockService.acquireLock(null, null).isEmpty()); + Instant after = Instant.now(); + assertTrue(after.toEpochMilli() - before.toEpochMilli() < expectedDurationInMillis); + } + + public void testReleaseLock_whenValidInput_thenSucceed() { + // Cannot test because LockService is final class + // Simply calling method to increase coverage + noOpsLockService.releaseLock(null); + } + + public void testRenewLock_whenCalled_thenNotBlocked() { + long expectedDurationInMillis = 1000; + Instant before = Instant.now(); + assertNull(ip2GeoLockService.renewLock(null)); + Instant after = Instant.now(); + assertTrue(after.toEpochMilli() - before.toEpochMilli() < expectedDurationInMillis); + } + + public void testGetRenewLockRunnable_whenLockIsFresh_thenDoNotRenew() { + LockModel lockModel = new LockModel( + GeospatialTestHelper.randomLowerCaseString(), + GeospatialTestHelper.randomLowerCaseString(), + Instant.now(), + LOCK_DURATION_IN_SECONDS, + false + ); + + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + // Verifying + assertTrue(actionRequest instanceof UpdateRequest); + return new UpdateResponse( + mock(ShardId.class), + GeospatialTestHelper.randomLowerCaseString(), + randomPositiveLong(), + randomPositiveLong(), + randomPositiveLong(), + DocWriteResponse.Result.UPDATED + ); + }); + + AtomicReference reference = new AtomicReference<>(lockModel); + ip2GeoLockService.getRenewLockRunnable(reference).run(); + assertEquals(lockModel, reference.get()); + } + + public void testGetRenewLockRunnable_whenLockIsStale_thenRenew() { + LockModel lockModel = new LockModel( + GeospatialTestHelper.randomLowerCaseString(), + GeospatialTestHelper.randomLowerCaseString(), + Instant.now().minusSeconds(RENEW_AFTER_IN_SECONDS), + LOCK_DURATION_IN_SECONDS, + false + ); + + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + // Verifying + assertTrue(actionRequest instanceof UpdateRequest); + return new UpdateResponse( + mock(ShardId.class), + GeospatialTestHelper.randomLowerCaseString(), + randomPositiveLong(), + randomPositiveLong(), + randomPositiveLong(), + DocWriteResponse.Result.UPDATED + ); + }); + + AtomicReference reference = new AtomicReference<>(lockModel); + ip2GeoLockService.getRenewLockRunnable(reference).run(); + assertNotEquals(lockModel, reference.get()); + } +} diff --git a/src/test/java/org/opensearch/geospatial/ip2geo/common/Ip2GeoSettingsTests.java b/src/test/java/org/opensearch/geospatial/ip2geo/common/Ip2GeoSettingsTests.java new file mode 100644 index 00000000..066c10c8 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ip2geo/common/Ip2GeoSettingsTests.java @@ -0,0 +1,53 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.common; + +import java.util.Arrays; +import java.util.List; + +import org.opensearch.common.settings.Settings; +import org.opensearch.test.OpenSearchTestCase; + +public class Ip2GeoSettingsTests extends OpenSearchTestCase { + public void testValidateInvalidUrl() { + Ip2GeoSettings.DatasourceEndpointValidator validator = new Ip2GeoSettings.DatasourceEndpointValidator(); + Exception e = expectThrows(IllegalArgumentException.class, () -> validator.validate("InvalidUrl")); + assertEquals("Invalid URL format is provided", e.getMessage()); + } + + public void testValidateValidUrl() { + Ip2GeoSettings.DatasourceEndpointValidator validator = new Ip2GeoSettings.DatasourceEndpointValidator(); + validator.validate("https://test.com"); + } + + public void testDenyListDefaultValue() { + List privateNetworks = Arrays.asList( + "127.0.0.0/8", + "169.254.0.0/16", + "10.0.0.0/8", + "172.16.0.0/12", + "192.168.0.0/16", + "0.0.0.0/8", + "100.64.0.0/10", + "192.0.0.0/24", + "192.0.2.0/24", + "198.18.0.0/15", + "192.88.99.0/24", + "198.51.100.0/24", + "203.0.113.0/24", + "224.0.0.0/4", + "240.0.0.0/4", + "255.255.255.255/32", + "::1/128", + "fe80::/10", + "fc00::/7", + "::/128", + "2001:db8::/32", + "ff00::/8" + ); + assertEquals(privateNetworks, Ip2GeoSettings.DATASOURCE_ENDPOINT_DENYLIST.get(Settings.EMPTY)); + } +} diff --git a/src/test/java/org/opensearch/geospatial/ip2geo/common/URLDenyListCheckerTests.java b/src/test/java/org/opensearch/geospatial/ip2geo/common/URLDenyListCheckerTests.java new file mode 100644 index 00000000..f1104ab8 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ip2geo/common/URLDenyListCheckerTests.java @@ -0,0 +1,73 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.common; + +import java.util.Arrays; +import java.util.Locale; +import java.util.Map; + +import org.junit.Before; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.Randomness; +import org.opensearch.geospatial.ClusterSettingHelper; +import org.opensearch.node.Node; +import org.opensearch.test.OpenSearchTestCase; + +import lombok.SneakyThrows; + +public class URLDenyListCheckerTests extends OpenSearchTestCase { + private ClusterSettingHelper clusterSettingHelper; + + @Before + public void init() { + clusterSettingHelper = new ClusterSettingHelper(); + } + + @SneakyThrows + public void testToUrlIfNotInDenyListWithBlockedAddress() { + Node mockNode = clusterSettingHelper.createMockNode( + Map.of(Ip2GeoSettings.DATASOURCE_ENDPOINT_DENYLIST.getKey(), Arrays.asList("127.0.0.0/8")) + ); + mockNode.start(); + try { + ClusterService clusterService = mockNode.injector().getInstance(ClusterService.class); + URLDenyListChecker urlDenyListChecker = new URLDenyListChecker(clusterService.getClusterSettings()); + String endpoint = String.format( + Locale.ROOT, + "https://127.%d.%d.%d/v1/manifest.json", + Randomness.get().nextInt(256), + Randomness.get().nextInt(256), + Randomness.get().nextInt(256) + ); + expectThrows(IllegalArgumentException.class, () -> urlDenyListChecker.toUrlIfNotInDenyList(endpoint)); + } finally { + mockNode.close(); + } + } + + @SneakyThrows + public void testToUrlIfNotInDenyListWithNonBlockedAddress() { + Node mockNode = clusterSettingHelper.createMockNode( + Map.of(Ip2GeoSettings.DATASOURCE_ENDPOINT_DENYLIST.getKey(), Arrays.asList("127.0.0.0/8")) + ); + mockNode.start(); + try { + ClusterService clusterService = mockNode.injector().getInstance(ClusterService.class); + URLDenyListChecker urlDenyListChecker = new URLDenyListChecker(clusterService.getClusterSettings()); + String endpoint = String.format( + Locale.ROOT, + "https://128.%d.%d.%d/v1/manifest.json", + Randomness.get().nextInt(256), + Randomness.get().nextInt(256), + Randomness.get().nextInt(256) + ); + // Expect no exception + urlDenyListChecker.toUrlIfNotInDenyList(endpoint); + } finally { + mockNode.close(); + } + } +} diff --git a/src/test/java/org/opensearch/geospatial/ip2geo/dao/DatasourceDaoTests.java b/src/test/java/org/opensearch/geospatial/ip2geo/dao/DatasourceDaoTests.java new file mode 100644 index 00000000..500c3397 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ip2geo/dao/DatasourceDaoTests.java @@ -0,0 +1,392 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.dao; + +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.io.IOException; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.Arrays; +import java.util.List; + +import org.apache.lucene.search.TotalHits; +import org.junit.Before; +import org.mockito.ArgumentCaptor; +import org.opensearch.ResourceAlreadyExistsException; +import org.opensearch.ResourceNotFoundException; +import org.opensearch.action.DocWriteRequest; +import org.opensearch.action.StepListener; +import org.opensearch.action.admin.indices.create.CreateIndexRequest; +import org.opensearch.action.bulk.BulkRequest; +import org.opensearch.action.delete.DeleteRequest; +import org.opensearch.action.delete.DeleteResponse; +import org.opensearch.action.get.GetRequest; +import org.opensearch.action.get.GetResponse; +import org.opensearch.action.get.MultiGetItemResponse; +import org.opensearch.action.get.MultiGetRequest; +import org.opensearch.action.get.MultiGetResponse; +import org.opensearch.action.index.IndexRequest; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.action.support.WriteRequest; +import org.opensearch.cluster.routing.Preference; +import org.opensearch.common.Randomness; +import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.rest.RestStatus; +import org.opensearch.geospatial.GeospatialTestHelper; +import org.opensearch.geospatial.ip2geo.Ip2GeoTestCase; +import org.opensearch.geospatial.ip2geo.jobscheduler.Datasource; +import org.opensearch.geospatial.ip2geo.jobscheduler.DatasourceExtension; +import org.opensearch.index.IndexNotFoundException; +import org.opensearch.index.query.QueryBuilders; +import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; +import org.opensearch.search.SearchHit; +import org.opensearch.search.SearchHits; + +import lombok.SneakyThrows; + +public class DatasourceDaoTests extends Ip2GeoTestCase { + private DatasourceDao datasourceDao; + + @Before + public void init() { + datasourceDao = new DatasourceDao(verifyingClient, clusterService); + } + + public void testCreateIndexIfNotExists_whenIndexExist_thenCreateRequestIsNotCalled() { + when(metadata.hasIndex(DatasourceExtension.JOB_INDEX_NAME)).thenReturn(true); + + // Verify + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { throw new RuntimeException("Shouldn't get called"); }); + + // Run + StepListener stepListener = new StepListener<>(); + datasourceDao.createIndexIfNotExists(stepListener); + + // Verify stepListener is called + stepListener.result(); + } + + public void testCreateIndexIfNotExists_whenIndexExist_thenCreateRequestIsCalled() { + when(metadata.hasIndex(DatasourceExtension.JOB_INDEX_NAME)).thenReturn(false); + + // Verify + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + assertTrue(actionRequest instanceof CreateIndexRequest); + CreateIndexRequest request = (CreateIndexRequest) actionRequest; + assertEquals(DatasourceExtension.JOB_INDEX_NAME, request.index()); + assertEquals("1", request.settings().get("index.number_of_shards")); + assertEquals("0-all", request.settings().get("index.auto_expand_replicas")); + assertEquals("true", request.settings().get("index.hidden")); + assertNotNull(request.mappings()); + return null; + }); + + // Run + StepListener stepListener = new StepListener<>(); + datasourceDao.createIndexIfNotExists(stepListener); + + // Verify stepListener is called + stepListener.result(); + } + + public void testCreateIndexIfNotExists_whenIndexCreatedAlready_thenExceptionIsIgnored() { + when(metadata.hasIndex(DatasourceExtension.JOB_INDEX_NAME)).thenReturn(false); + verifyingClient.setExecuteVerifier( + (actionResponse, actionRequest) -> { throw new ResourceAlreadyExistsException(DatasourceExtension.JOB_INDEX_NAME); } + ); + + // Run + StepListener stepListener = new StepListener<>(); + datasourceDao.createIndexIfNotExists(stepListener); + + // Verify stepListener is called + stepListener.result(); + } + + public void testCreateIndexIfNotExists_whenExceptionIsThrown_thenExceptionIsThrown() { + when(metadata.hasIndex(DatasourceExtension.JOB_INDEX_NAME)).thenReturn(false); + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { throw new RuntimeException(); }); + + // Run + StepListener stepListener = new StepListener<>(); + datasourceDao.createIndexIfNotExists(stepListener); + + // Verify stepListener is called + expectThrows(RuntimeException.class, () -> stepListener.result()); + } + + public void testUpdateDatasource_whenValidInput_thenSucceed() throws Exception { + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + Datasource datasource = new Datasource( + datasourceName, + new IntervalSchedule(Instant.now().truncatedTo(ChronoUnit.MILLIS), 1, ChronoUnit.DAYS), + "https://test.com" + ); + Instant previousTime = Instant.now().minusMillis(1); + datasource.setLastUpdateTime(previousTime); + + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + assertTrue(actionRequest instanceof IndexRequest); + IndexRequest request = (IndexRequest) actionRequest; + assertEquals(datasource.getName(), request.id()); + assertEquals(DocWriteRequest.OpType.INDEX, request.opType()); + assertEquals(DatasourceExtension.JOB_INDEX_NAME, request.index()); + assertEquals(WriteRequest.RefreshPolicy.IMMEDIATE, request.getRefreshPolicy()); + return null; + }); + + datasourceDao.updateDatasource(datasource); + assertTrue(previousTime.isBefore(datasource.getLastUpdateTime())); + } + + @SneakyThrows + public void testPutDatasource_whenValidInpu_thenSucceed() { + Datasource datasource = randomDatasource(); + Instant previousTime = Instant.now().minusMillis(1); + datasource.setLastUpdateTime(previousTime); + + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + assertTrue(actionRequest instanceof IndexRequest); + IndexRequest indexRequest = (IndexRequest) actionRequest; + assertEquals(DatasourceExtension.JOB_INDEX_NAME, indexRequest.index()); + assertEquals(datasource.getName(), indexRequest.id()); + assertEquals(WriteRequest.RefreshPolicy.IMMEDIATE, indexRequest.getRefreshPolicy()); + assertEquals(DocWriteRequest.OpType.CREATE, indexRequest.opType()); + return null; + }); + + datasourceDao.putDatasource(datasource, mock(ActionListener.class)); + assertTrue(previousTime.isBefore(datasource.getLastUpdateTime())); + } + + public void testGetDatasource_whenException_thenNull() throws Exception { + Datasource datasource = setupClientForGetRequest(true, new IndexNotFoundException(DatasourceExtension.JOB_INDEX_NAME)); + assertNull(datasourceDao.getDatasource(datasource.getName())); + } + + public void testGetDatasource_whenExist_thenReturnDatasource() throws Exception { + Datasource datasource = setupClientForGetRequest(true, null); + assertEquals(datasource, datasourceDao.getDatasource(datasource.getName())); + } + + public void testGetDatasource_whenNotExist_thenNull() throws Exception { + Datasource datasource = setupClientForGetRequest(false, null); + assertNull(datasourceDao.getDatasource(datasource.getName())); + } + + public void testGetDatasource_whenExistWithListener_thenListenerIsCalledWithDatasource() { + Datasource datasource = setupClientForGetRequest(true, null); + ActionListener listener = mock(ActionListener.class); + datasourceDao.getDatasource(datasource.getName(), listener); + verify(listener).onResponse(eq(datasource)); + } + + public void testGetDatasource_whenNotExistWithListener_thenListenerIsCalledWithNull() { + Datasource datasource = setupClientForGetRequest(false, null); + ActionListener listener = mock(ActionListener.class); + datasourceDao.getDatasource(datasource.getName(), listener); + verify(listener).onResponse(null); + } + + private Datasource setupClientForGetRequest(final boolean isExist, final RuntimeException exception) { + Datasource datasource = randomDatasource(); + + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + assertTrue(actionRequest instanceof GetRequest); + GetRequest request = (GetRequest) actionRequest; + assertEquals(datasource.getName(), request.id()); + assertEquals(DatasourceExtension.JOB_INDEX_NAME, request.index()); + GetResponse response = getMockedGetResponse(isExist ? datasource : null); + if (exception != null) { + throw exception; + } + return response; + }); + return datasource; + } + + public void testDeleteDatasource_whenValidInput_thenSucceed() { + Datasource datasource = randomDatasource(); + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + // Verify + assertTrue(actionRequest instanceof DeleteRequest); + DeleteRequest request = (DeleteRequest) actionRequest; + assertEquals(DatasourceExtension.JOB_INDEX_NAME, request.index()); + assertEquals(DocWriteRequest.OpType.DELETE, request.opType()); + assertEquals(datasource.getName(), request.id()); + assertEquals(WriteRequest.RefreshPolicy.IMMEDIATE, request.getRefreshPolicy()); + + DeleteResponse response = mock(DeleteResponse.class); + when(response.status()).thenReturn(RestStatus.OK); + return response; + }); + + // Run + datasourceDao.deleteDatasource(datasource); + } + + public void testDeleteDatasource_whenIndexNotFound_thenThrowException() { + Datasource datasource = randomDatasource(); + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + DeleteResponse response = mock(DeleteResponse.class); + when(response.status()).thenReturn(RestStatus.NOT_FOUND); + return response; + }); + + // Run + expectThrows(ResourceNotFoundException.class, () -> datasourceDao.deleteDatasource(datasource)); + } + + public void testGetDatasources_whenValidInput_thenSucceed() { + List datasources = Arrays.asList(randomDatasource(), randomDatasource()); + String[] names = datasources.stream().map(Datasource::getName).toArray(String[]::new); + ActionListener> listener = mock(ActionListener.class); + MultiGetItemResponse[] multiGetItemResponses = datasources.stream().map(datasource -> { + GetResponse getResponse = getMockedGetResponse(datasource); + MultiGetItemResponse multiGetItemResponse = mock(MultiGetItemResponse.class); + when(multiGetItemResponse.getResponse()).thenReturn(getResponse); + return multiGetItemResponse; + }).toArray(MultiGetItemResponse[]::new); + + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + // Verify + assertTrue(actionRequest instanceof MultiGetRequest); + MultiGetRequest request = (MultiGetRequest) actionRequest; + assertEquals(2, request.getItems().size()); + for (MultiGetRequest.Item item : request.getItems()) { + assertEquals(DatasourceExtension.JOB_INDEX_NAME, item.index()); + assertTrue(datasources.stream().filter(datasource -> datasource.getName().equals(item.id())).findAny().isPresent()); + } + + MultiGetResponse response = mock(MultiGetResponse.class); + when(response.getResponses()).thenReturn(multiGetItemResponses); + return response; + }); + + // Run + datasourceDao.getDatasources(names, listener); + + // Verify + ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class); + verify(listener).onResponse(captor.capture()); + assertEquals(datasources, captor.getValue()); + + } + + public void testGetAllDatasources_whenAsynchronous_thenSucceed() { + List datasources = Arrays.asList(randomDatasource(), randomDatasource()); + ActionListener> listener = mock(ActionListener.class); + SearchHits searchHits = getMockedSearchHits(datasources); + + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + // Verify + assertTrue(actionRequest instanceof SearchRequest); + SearchRequest request = (SearchRequest) actionRequest; + assertEquals(1, request.indices().length); + assertEquals(DatasourceExtension.JOB_INDEX_NAME, request.indices()[0]); + assertEquals(QueryBuilders.matchAllQuery(), request.source().query()); + assertEquals(1000, request.source().size()); + assertEquals(Preference.PRIMARY.type(), request.preference()); + + SearchResponse response = mock(SearchResponse.class); + when(response.getHits()).thenReturn(searchHits); + return response; + }); + + // Run + datasourceDao.getAllDatasources(listener); + + // Verify + ArgumentCaptor> captor = ArgumentCaptor.forClass(List.class); + verify(listener).onResponse(captor.capture()); + assertEquals(datasources, captor.getValue()); + } + + public void testGetAllDatasources_whenSynchronous_thenSucceed() { + List datasources = Arrays.asList(randomDatasource(), randomDatasource()); + SearchHits searchHits = getMockedSearchHits(datasources); + + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + // Verify + assertTrue(actionRequest instanceof SearchRequest); + SearchRequest request = (SearchRequest) actionRequest; + assertEquals(1, request.indices().length); + assertEquals(DatasourceExtension.JOB_INDEX_NAME, request.indices()[0]); + assertEquals(QueryBuilders.matchAllQuery(), request.source().query()); + assertEquals(1000, request.source().size()); + assertEquals(Preference.PRIMARY.type(), request.preference()); + + SearchResponse response = mock(SearchResponse.class); + when(response.getHits()).thenReturn(searchHits); + return response; + }); + + // Run + datasourceDao.getAllDatasources(); + + // Verify + assertEquals(datasources, datasourceDao.getAllDatasources()); + } + + public void testUpdateDatasource_whenValidInput_thenUpdate() { + List datasources = Arrays.asList(randomDatasource(), randomDatasource()); + + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + // Verify + assertTrue(actionRequest instanceof BulkRequest); + BulkRequest bulkRequest = (BulkRequest) actionRequest; + assertEquals(2, bulkRequest.requests().size()); + for (int i = 0; i < bulkRequest.requests().size(); i++) { + IndexRequest request = (IndexRequest) bulkRequest.requests().get(i); + assertEquals(DatasourceExtension.JOB_INDEX_NAME, request.index()); + assertEquals(datasources.get(i).getName(), request.id()); + assertEquals(DocWriteRequest.OpType.INDEX, request.opType()); + assertTrue(request.source().utf8ToString().contains(datasources.get(i).getEndpoint())); + } + return null; + }); + + datasourceDao.updateDatasource(datasources, mock(ActionListener.class)); + } + + private SearchHits getMockedSearchHits(List datasources) { + SearchHit[] searchHitArray = datasources.stream().map(this::toBytesReference).map(this::toSearchHit).toArray(SearchHit[]::new); + + return new SearchHits(searchHitArray, new TotalHits(1l, TotalHits.Relation.EQUAL_TO), 1); + } + + private GetResponse getMockedGetResponse(Datasource datasource) { + GetResponse response = mock(GetResponse.class); + when(response.isExists()).thenReturn(datasource != null); + when(response.getSourceAsBytesRef()).thenReturn(toBytesReference(datasource)); + return response; + } + + private BytesReference toBytesReference(Datasource datasource) { + if (datasource == null) { + return null; + } + + try { + return BytesReference.bytes(datasource.toXContent(JsonXContent.contentBuilder(), null)); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private SearchHit toSearchHit(BytesReference bytesReference) { + SearchHit searchHit = new SearchHit(Randomness.get().nextInt()); + searchHit.sourceRef(bytesReference); + return searchHit; + } +} diff --git a/src/test/java/org/opensearch/geospatial/ip2geo/dao/GeoIpDataDaoTests.java b/src/test/java/org/opensearch/geospatial/ip2geo/dao/GeoIpDataDaoTests.java new file mode 100644 index 00000000..b9cf5d1a --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ip2geo/dao/GeoIpDataDaoTests.java @@ -0,0 +1,292 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.dao; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.opensearch.geospatial.ip2geo.jobscheduler.Datasource.IP2GEO_DATA_INDEX_NAME_PREFIX; + +import java.io.File; +import java.io.FileInputStream; +import java.net.URLConnection; +import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; +import java.time.Instant; +import java.util.Iterator; +import java.util.Locale; +import java.util.Map; + +import org.apache.commons.csv.CSVFormat; +import org.apache.commons.csv.CSVParser; +import org.apache.commons.csv.CSVRecord; +import org.apache.lucene.search.TotalHits; +import org.junit.Before; +import org.opensearch.OpenSearchException; +import org.opensearch.action.admin.indices.create.CreateIndexRequest; +import org.opensearch.action.admin.indices.delete.DeleteIndexRequest; +import org.opensearch.action.admin.indices.forcemerge.ForceMergeRequest; +import org.opensearch.action.admin.indices.refresh.RefreshRequest; +import org.opensearch.action.admin.indices.settings.put.UpdateSettingsRequest; +import org.opensearch.action.bulk.BulkRequest; +import org.opensearch.action.bulk.BulkResponse; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.action.support.master.AcknowledgedResponse; +import org.opensearch.cluster.routing.Preference; +import org.opensearch.common.SuppressForbidden; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.geospatial.GeospatialTestHelper; +import org.opensearch.geospatial.ip2geo.Ip2GeoTestCase; +import org.opensearch.geospatial.ip2geo.common.DatasourceManifest; +import org.opensearch.geospatial.shared.Constants; +import org.opensearch.index.query.QueryBuilders; +import org.opensearch.search.SearchHit; +import org.opensearch.search.SearchHits; + +import lombok.SneakyThrows; + +@SuppressForbidden(reason = "unit test") +public class GeoIpDataDaoTests extends Ip2GeoTestCase { + private static final String IP_RANGE_FIELD_NAME = "_cidr"; + private static final String DATA_FIELD_NAME = "_data"; + private GeoIpDataDao noOpsGeoIpDataDao; + private GeoIpDataDao verifyingGeoIpDataDao; + + @Before + public void init() { + noOpsGeoIpDataDao = new GeoIpDataDao(clusterService, client, urlDenyListChecker); + verifyingGeoIpDataDao = new GeoIpDataDao(clusterService, verifyingClient, urlDenyListChecker); + } + + public void testCreateIndexIfNotExistsWithExistingIndex() { + String index = GeospatialTestHelper.randomLowerCaseString(); + when(metadata.hasIndex(index)).thenReturn(true); + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { throw new RuntimeException("Shouldn't get called"); }); + verifyingGeoIpDataDao.createIndexIfNotExists(index); + } + + public void testCreateIndexIfNotExistsWithoutExistingIndex() { + String index = GeospatialTestHelper.randomLowerCaseString(); + when(metadata.hasIndex(index)).thenReturn(false); + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + assertTrue(actionRequest instanceof CreateIndexRequest); + CreateIndexRequest request = (CreateIndexRequest) actionRequest; + assertEquals(index, request.index()); + assertEquals(1, (int) request.settings().getAsInt("index.number_of_shards", 0)); + assertNull(request.settings().get("index.auto_expand_replicas")); + assertEquals(0, (int) request.settings().getAsInt("index.number_of_replicas", 1)); + assertEquals(-1, (int) request.settings().getAsInt("index.refresh_interval", 0)); + assertEquals(true, request.settings().getAsBoolean("index.hidden", false)); + + assertEquals( + "{\"dynamic\": false,\"properties\": {\"_cidr\": {\"type\": \"ip_range\",\"doc_values\": false}}}", + request.mappings() + ); + return null; + }); + verifyingGeoIpDataDao.createIndexIfNotExists(index); + } + + @SneakyThrows + public void testCreateDocument_whenBlankValue_thenDoNotAdd() { + String[] names = { "ip", "country", "location", "city" }; + String[] values = { "1.0.0.0/25", "USA", " ", "Seattle" }; + assertEquals( + "{\"_cidr\":\"1.0.0.0/25\",\"_data\":{\"country\":\"USA\",\"city\":\"Seattle\"}}", + noOpsGeoIpDataDao.createDocument(names, values).toString() + ); + } + + @SneakyThrows + public void testCreateDocument_whenFieldsAndValuesLengthDoesNotMatch_thenThrowException() { + String[] names = { "ip", "country", "location", "city" }; + String[] values = { "1.0.0.0/25", "USA", " " }; + + // Run + Exception e = expectThrows(OpenSearchException.class, () -> noOpsGeoIpDataDao.createDocument(names, values)); + + // Verify + assertTrue(e.getMessage().contains("does not match")); + } + + public void testGetDatabaseReader() throws Exception { + File zipFile = new File(this.getClass().getClassLoader().getResource("ip2geo/sample_valid.zip").getFile()); + DatasourceManifest manifest = new DatasourceManifest( + zipFile.toURI().toURL().toExternalForm(), + "sample_valid.csv", + "fake_sha256", + 1l, + Instant.now().toEpochMilli(), + "tester" + ); + CSVParser parser = noOpsGeoIpDataDao.getDatabaseReader(manifest); + String[] expectedHeader = { "network", "country_name" }; + assertArrayEquals(expectedHeader, parser.iterator().next().values()); + String[] expectedValues = { "1.0.0.0/24", "Australia" }; + assertArrayEquals(expectedValues, parser.iterator().next().values()); + verify(urlDenyListChecker).toUrlIfNotInDenyList(manifest.getUrl()); + } + + public void testGetDatabaseReaderNoFile() throws Exception { + File zipFile = new File(this.getClass().getClassLoader().getResource("ip2geo/sample_valid.zip").getFile()); + DatasourceManifest manifest = new DatasourceManifest( + zipFile.toURI().toURL().toExternalForm(), + "no_file.csv", + "fake_sha256", + 1l, + Instant.now().toEpochMilli(), + "tester" + ); + Exception exception = expectThrows(IllegalArgumentException.class, () -> noOpsGeoIpDataDao.getDatabaseReader(manifest)); + assertTrue(exception.getMessage().contains("does not exist")); + verify(urlDenyListChecker).toUrlIfNotInDenyList(manifest.getUrl()); + } + + @SneakyThrows + public void testInternalGetDatabaseReader_whenCalled_thenSetUserAgent() { + File zipFile = new File(this.getClass().getClassLoader().getResource("ip2geo/sample_valid.zip").getFile()); + DatasourceManifest manifest = new DatasourceManifest( + zipFile.toURI().toURL().toExternalForm(), + "sample_valid.csv", + "fake_sha256", + 1l, + Instant.now().toEpochMilli(), + "tester" + ); + + URLConnection connection = mock(URLConnection.class); + when(connection.getInputStream()).thenReturn(new FileInputStream(zipFile)); + + // Run + noOpsGeoIpDataDao.internalGetDatabaseReader(manifest, connection); + + // Verify + verify(connection).addRequestProperty(Constants.USER_AGENT_KEY, Constants.USER_AGENT_VALUE); + } + + public void testDeleteIp2GeoDataIndex_whenCalled_thenDeleteIndex() { + String index = String.format(Locale.ROOT, "%s.%s", IP2GEO_DATA_INDEX_NAME_PREFIX, GeospatialTestHelper.randomLowerCaseString()); + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + assertTrue(actionRequest instanceof DeleteIndexRequest); + DeleteIndexRequest request = (DeleteIndexRequest) actionRequest; + assertEquals(1, request.indices().length); + assertEquals(index, request.indices()[0]); + return new AcknowledgedResponse(true); + }); + verifyingGeoIpDataDao.deleteIp2GeoDataIndex(index); + } + + public void testDeleteIp2GeoDataIndexWithNonIp2GeoDataIndex() { + String index = GeospatialTestHelper.randomLowerCaseString(); + Exception e = expectThrows(OpenSearchException.class, () -> verifyingGeoIpDataDao.deleteIp2GeoDataIndex(index)); + assertTrue(e.getMessage().contains("not ip2geo data index")); + verify(verifyingClient, never()).index(any()); + } + + @SneakyThrows + public void testPutGeoIpData_whenValidInput_thenSucceed() { + String index = GeospatialTestHelper.randomLowerCaseString(); + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + if (actionRequest instanceof BulkRequest) { + BulkRequest request = (BulkRequest) actionRequest; + assertEquals(2, request.numberOfActions()); + BulkResponse response = mock(BulkResponse.class); + when(response.hasFailures()).thenReturn(false); + return response; + } else if (actionRequest instanceof RefreshRequest) { + RefreshRequest request = (RefreshRequest) actionRequest; + assertEquals(1, request.indices().length); + assertEquals(index, request.indices()[0]); + return null; + } else if (actionRequest instanceof ForceMergeRequest) { + ForceMergeRequest request = (ForceMergeRequest) actionRequest; + assertEquals(1, request.indices().length); + assertEquals(index, request.indices()[0]); + assertEquals(1, request.maxNumSegments()); + return null; + } else if (actionRequest instanceof UpdateSettingsRequest) { + UpdateSettingsRequest request = (UpdateSettingsRequest) actionRequest; + assertEquals(1, request.indices().length); + assertEquals(index, request.indices()[0]); + assertEquals(true, request.settings().getAsBoolean("index.blocks.write", false)); + assertNull(request.settings().get("index.num_of_replica")); + assertEquals("0-all", request.settings().get("index.auto_expand_replicas")); + return null; + } else { + throw new RuntimeException("invalid request is called"); + } + }); + Runnable renewLock = mock(Runnable.class); + try (CSVParser csvParser = CSVParser.parse(sampleIp2GeoFile(), StandardCharsets.UTF_8, CSVFormat.RFC4180)) { + Iterator iterator = csvParser.iterator(); + String[] fields = iterator.next().values(); + verifyingGeoIpDataDao.putGeoIpData(index, fields, iterator, renewLock); + verify(renewLock, times(2)).run(); + } + } + + public void testGetGeoIpData_whenDataExist_thenReturnTheData() { + String indexName = GeospatialTestHelper.randomLowerCaseString(); + String ip = randomIpAddress(); + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + assert actionRequest instanceof SearchRequest; + SearchRequest request = (SearchRequest) actionRequest; + assertEquals(Preference.LOCAL.type(), request.preference()); + assertEquals(1, request.source().size()); + assertEquals(QueryBuilders.termQuery(IP_RANGE_FIELD_NAME, ip), request.source().query()); + + String data = String.format( + Locale.ROOT, + "{\"%s\":\"1.0.0.1/16\",\"%s\":{\"city\":\"seattle\"}}", + IP_RANGE_FIELD_NAME, + DATA_FIELD_NAME + ); + SearchHit searchHit = new SearchHit(1); + searchHit.sourceRef(BytesReference.fromByteBuffer(ByteBuffer.wrap(data.getBytes(StandardCharsets.UTF_8)))); + SearchHit[] searchHitArray = { searchHit }; + SearchHits searchHits = new SearchHits(searchHitArray, new TotalHits(1l, TotalHits.Relation.EQUAL_TO), 1); + + SearchResponse response = mock(SearchResponse.class); + when(response.getHits()).thenReturn(searchHits); + return response; + }); + + // Run + Map geoData = verifyingGeoIpDataDao.getGeoIpData(indexName, ip); + + // Verify + assertEquals("seattle", geoData.get("city")); + } + + public void testGetGeoIpData_whenNoData_thenReturnEmpty() { + String indexName = GeospatialTestHelper.randomLowerCaseString(); + String ip = randomIpAddress(); + verifyingClient.setExecuteVerifier((actionResponse, actionRequest) -> { + assert actionRequest instanceof SearchRequest; + SearchRequest request = (SearchRequest) actionRequest; + assertEquals(Preference.LOCAL.type(), request.preference()); + assertEquals(1, request.source().size()); + assertEquals(QueryBuilders.termQuery(IP_RANGE_FIELD_NAME, ip), request.source().query()); + + SearchHit[] searchHitArray = {}; + SearchHits searchHits = new SearchHits(searchHitArray, new TotalHits(0l, TotalHits.Relation.EQUAL_TO), 0); + + SearchResponse response = mock(SearchResponse.class); + when(response.getHits()).thenReturn(searchHits); + return response; + }); + + // Run + Map geoData = verifyingGeoIpDataDao.getGeoIpData(indexName, ip); + + // Verify + assertTrue(geoData.isEmpty()); + } +} diff --git a/src/test/java/org/opensearch/geospatial/ip2geo/dao/Ip2GeoCachedDaoTests.java b/src/test/java/org/opensearch/geospatial/ip2geo/dao/Ip2GeoCachedDaoTests.java new file mode 100644 index 00000000..d130d593 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ip2geo/dao/Ip2GeoCachedDaoTests.java @@ -0,0 +1,265 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.dao; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.time.Instant; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +import org.junit.Before; +import org.opensearch.common.network.NetworkAddress; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.index.shard.ShardId; +import org.opensearch.geospatial.GeospatialTestHelper; +import org.opensearch.geospatial.ip2geo.Ip2GeoTestCase; +import org.opensearch.geospatial.ip2geo.common.DatasourceState; +import org.opensearch.geospatial.ip2geo.jobscheduler.Datasource; +import org.opensearch.index.IndexNotFoundException; +import org.opensearch.index.engine.Engine; + +import lombok.SneakyThrows; + +public class Ip2GeoCachedDaoTests extends Ip2GeoTestCase { + private Ip2GeoCachedDao ip2GeoCachedDao; + + @Before + public void init() { + ip2GeoCachedDao = new Ip2GeoCachedDao(clusterService, datasourceDao, geoIpDataDao); + } + + public void testGetIndexName_whenCalled_thenReturnIndexName() { + Datasource datasource = randomDatasource(); + when(datasourceDao.getAllDatasources()).thenReturn(Arrays.asList(datasource)); + + // Run + String indexName = ip2GeoCachedDao.getIndexName(datasource.getName()); + + // Verify + assertEquals(datasource.currentIndexName(), indexName); + } + + public void testGetIndexName_whenIndexNotFound_thenReturnNull() { + when(datasourceDao.getAllDatasources()).thenThrow(new IndexNotFoundException("not found")); + + // Run + String indexName = ip2GeoCachedDao.getIndexName(GeospatialTestHelper.randomLowerCaseString()); + + // Verify + assertNull(indexName); + } + + public void testIsExpired_whenExpired_thenReturnTrue() { + Datasource datasource = randomDatasource(); + datasource.getUpdateStats().setLastSucceededAt(Instant.MIN); + datasource.getUpdateStats().setLastSkippedAt(null); + when(datasourceDao.getAllDatasources()).thenReturn(Arrays.asList(datasource)); + + // Run + boolean isExpired = ip2GeoCachedDao.isExpired(datasource.getName()); + + // Verify + assertTrue(isExpired); + } + + public void testIsExpired_whenNotExpired_thenReturnFalse() { + Datasource datasource = randomDatasource(); + datasource.getUpdateStats().setLastSucceededAt(Instant.now()); + datasource.getUpdateStats().setLastSkippedAt(null); + when(datasourceDao.getAllDatasources()).thenReturn(Arrays.asList(datasource)); + + // Run + boolean isExpired = ip2GeoCachedDao.isExpired(datasource.getName()); + + // Verify + assertFalse(isExpired); + } + + public void testHas_whenHasDatasource_thenReturnTrue() { + Datasource datasource = randomDatasource(); + when(datasourceDao.getAllDatasources()).thenReturn(Arrays.asList(datasource)); + + // Run + boolean hasDatasource = ip2GeoCachedDao.has(datasource.getName()); + + // Verify + assertTrue(hasDatasource); + } + + public void testHas_whenNoDatasource_thenReturnFalse() { + Datasource datasource = randomDatasource(); + when(datasourceDao.getAllDatasources()).thenReturn(Arrays.asList(datasource)); + + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + // Run + boolean hasDatasource = ip2GeoCachedDao.has(datasourceName); + + // Verify + assertFalse(hasDatasource); + } + + public void testGetState_whenCalled_thenReturnState() { + Datasource datasource = randomDatasource(); + when(datasourceDao.getAllDatasources()).thenReturn(Arrays.asList(datasource)); + + // Run + DatasourceState state = ip2GeoCachedDao.getState(datasource.getName()); + + // Verify + assertEquals(datasource.getState(), state); + } + + public void testGetGeoData_whenCalled_thenReturnGeoData() { + Datasource datasource = randomDatasource(); + String ip = NetworkAddress.format(randomIp(false)); + Map expectedGeoData = Map.of("city", "Seattle"); + when(geoIpDataDao.getGeoIpData(datasource.currentIndexName(), ip)).thenReturn(expectedGeoData); + + // Run + Map geoData = ip2GeoCachedDao.getGeoData(datasource.currentIndexName(), ip); + + // Verify + assertEquals(expectedGeoData, geoData); + } + + @SneakyThrows + public void testPostIndex_whenFailed_thenNoUpdate() { + when(datasourceDao.getAllDatasources()).thenReturn(Arrays.asList()); + Datasource datasource = randomDatasource(); + + ShardId shardId = mock(ShardId.class); + Engine.Index index = mock(Engine.Index.class); + BytesReference bytesReference = BytesReference.bytes(datasource.toXContent(XContentFactory.jsonBuilder(), null)); + when(index.source()).thenReturn(bytesReference); + Engine.IndexResult result = mock(Engine.IndexResult.class); + when(result.getResultType()).thenReturn(Engine.Result.Type.FAILURE); + + // Run + ip2GeoCachedDao.postIndex(shardId, index, result); + + // Verify + assertFalse(ip2GeoCachedDao.has(datasource.getName())); + assertTrue(ip2GeoCachedDao.isExpired(datasource.getName())); + assertNull(ip2GeoCachedDao.getIndexName(datasource.getName())); + assertNull(ip2GeoCachedDao.getState(datasource.getName())); + } + + @SneakyThrows + public void testPostIndex_whenSucceed_thenUpdate() { + when(datasourceDao.getAllDatasources()).thenReturn(Arrays.asList()); + Datasource datasource = randomDatasource(); + + ShardId shardId = mock(ShardId.class); + Engine.Index index = mock(Engine.Index.class); + BytesReference bytesReference = BytesReference.bytes(datasource.toXContent(XContentFactory.jsonBuilder(), null)); + when(index.source()).thenReturn(bytesReference); + Engine.IndexResult result = mock(Engine.IndexResult.class); + when(result.getResultType()).thenReturn(Engine.Result.Type.SUCCESS); + + // Run + ip2GeoCachedDao.postIndex(shardId, index, result); + + // Verify + assertTrue(ip2GeoCachedDao.has(datasource.getName())); + assertFalse(ip2GeoCachedDao.isExpired(datasource.getName())); + assertEquals(datasource.currentIndexName(), ip2GeoCachedDao.getIndexName(datasource.getName())); + assertEquals(datasource.getState(), ip2GeoCachedDao.getState(datasource.getName())); + } + + public void testPostDelete_whenFailed_thenNoUpdate() { + Datasource datasource = randomDatasource(); + when(datasourceDao.getAllDatasources()).thenReturn(Arrays.asList(datasource)); + + ShardId shardId = mock(ShardId.class); + Engine.Delete index = mock(Engine.Delete.class); + Engine.DeleteResult result = mock(Engine.DeleteResult.class); + when(result.getResultType()).thenReturn(Engine.Result.Type.FAILURE); + + // Run + ip2GeoCachedDao.postDelete(shardId, index, result); + + // Verify + assertTrue(ip2GeoCachedDao.has(datasource.getName())); + } + + public void testPostDelete_whenSucceed_thenUpdate() { + Datasource datasource = randomDatasource(); + when(datasourceDao.getAllDatasources()).thenReturn(Arrays.asList(datasource)); + + ShardId shardId = mock(ShardId.class); + Engine.Delete index = mock(Engine.Delete.class); + when(index.id()).thenReturn(datasource.getName()); + Engine.DeleteResult result = mock(Engine.DeleteResult.class); + when(result.getResultType()).thenReturn(Engine.Result.Type.SUCCESS); + + // Run + ip2GeoCachedDao.postDelete(shardId, index, result); + + // Verify + assertFalse(ip2GeoCachedDao.has(datasource.getName())); + } + + @SneakyThrows + public void testUpdateMaxSize_whenBiggerSize_thenContainsAllData() { + int cacheSize = 10; + String datasource = GeospatialTestHelper.randomLowerCaseString(); + Ip2GeoCachedDao.GeoDataCache geoDataCache = new Ip2GeoCachedDao.GeoDataCache(cacheSize); + List ips = new ArrayList<>(cacheSize); + for (int i = 0; i < cacheSize; i++) { + String ip = NetworkAddress.format(randomIp(false)); + ips.add(ip); + geoDataCache.putIfAbsent(datasource, ip, addr -> Collections.emptyMap()); + } + + // Verify all data exist in the cache + assertTrue(ips.stream().allMatch(ip -> geoDataCache.get(datasource, ip) != null)); + + // Update cache size + int newCacheSize = 15; + geoDataCache.updateMaxSize(newCacheSize); + + // Verify all data exist in the cache + assertTrue(ips.stream().allMatch(ip -> geoDataCache.get(datasource, ip) != null)); + + // Add (newCacheSize - cacheSize + 1) data and the first data should not be available in the cache + for (int i = 0; i < newCacheSize - cacheSize + 1; i++) { + geoDataCache.putIfAbsent(datasource, NetworkAddress.format(randomIp(false)), addr -> Collections.emptyMap()); + } + assertNull(geoDataCache.get(datasource, ips.get(0))); + } + + @SneakyThrows + public void testUpdateMaxSize_whenSmallerSize_thenContainsPartialData() { + int cacheSize = 10; + String datasource = GeospatialTestHelper.randomLowerCaseString(); + Ip2GeoCachedDao.GeoDataCache geoDataCache = new Ip2GeoCachedDao.GeoDataCache(cacheSize); + List ips = new ArrayList<>(cacheSize); + for (int i = 0; i < cacheSize; i++) { + String ip = NetworkAddress.format(randomIp(false)); + ips.add(ip); + geoDataCache.putIfAbsent(datasource, ip, addr -> Collections.emptyMap()); + } + + // Verify all data exist in the cache + assertTrue(ips.stream().allMatch(ip -> geoDataCache.get(datasource, ip) != null)); + + // Update cache size + int newCacheSize = 5; + geoDataCache.updateMaxSize(newCacheSize); + + // Verify the last (cacheSize - newCacheSize) data is available in the cache + List deleted = ips.subList(0, ips.size() - newCacheSize); + List retained = ips.subList(ips.size() - newCacheSize, ips.size()); + assertTrue(deleted.stream().allMatch(ip -> geoDataCache.get(datasource, ip) == null)); + assertTrue(retained.stream().allMatch(ip -> geoDataCache.get(datasource, ip) != null)); + } +} diff --git a/src/test/java/org/opensearch/geospatial/ip2geo/dao/Ip2GeoProcessorDaoTests.java b/src/test/java/org/opensearch/geospatial/ip2geo/dao/Ip2GeoProcessorDaoTests.java new file mode 100644 index 00000000..9088b0de --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ip2geo/dao/Ip2GeoProcessorDaoTests.java @@ -0,0 +1,78 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.dao; + +import static org.mockito.Mockito.when; + +import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.junit.Before; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.geospatial.GeospatialTestHelper; +import org.opensearch.geospatial.ip2geo.Ip2GeoTestCase; +import org.opensearch.geospatial.ip2geo.processor.Ip2GeoProcessor; +import org.opensearch.ingest.IngestMetadata; +import org.opensearch.ingest.PipelineConfiguration; + +public class Ip2GeoProcessorDaoTests extends Ip2GeoTestCase { + private Ip2GeoProcessorDao ip2GeoProcessorDao; + + @Before + public void init() { + ip2GeoProcessorDao = new Ip2GeoProcessorDao(ingestService); + } + + public void testGetProcessors_whenNullMetadata_thenReturnEmpty() { + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + when(metadata.custom(IngestMetadata.TYPE)).thenReturn(null); + + List ip2GeoProcessorList = ip2GeoProcessorDao.getProcessors(datasourceName); + assertTrue(ip2GeoProcessorList.isEmpty()); + } + + public void testGetProcessors_whenNoProcessorForGivenDatasource_thenReturnEmpty() { + String datasourceBeingUsed = GeospatialTestHelper.randomLowerCaseString(); + String datasourceNotBeingUsed = GeospatialTestHelper.randomLowerCaseString(); + String pipelineId = GeospatialTestHelper.randomLowerCaseString(); + Map pipelines = new HashMap<>(); + pipelines.put(pipelineId, createPipelineConfiguration()); + IngestMetadata ingestMetadata = new IngestMetadata(pipelines); + when(metadata.custom(IngestMetadata.TYPE)).thenReturn(ingestMetadata); + Ip2GeoProcessor ip2GeoProcessor = randomIp2GeoProcessor(datasourceBeingUsed); + when(ingestService.getProcessorsInPipeline(pipelineId, Ip2GeoProcessor.class)).thenReturn(Arrays.asList(ip2GeoProcessor)); + + List ip2GeoProcessorList = ip2GeoProcessorDao.getProcessors(datasourceNotBeingUsed); + assertTrue(ip2GeoProcessorList.isEmpty()); + } + + public void testGetProcessors_whenProcessorsForGivenDatasource_thenReturnProcessors() { + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + String pipelineId = GeospatialTestHelper.randomLowerCaseString(); + Map pipelines = new HashMap<>(); + pipelines.put(pipelineId, createPipelineConfiguration()); + IngestMetadata ingestMetadata = new IngestMetadata(pipelines); + when(metadata.custom(IngestMetadata.TYPE)).thenReturn(ingestMetadata); + Ip2GeoProcessor ip2GeoProcessor = randomIp2GeoProcessor(datasourceName); + when(ingestService.getProcessorsInPipeline(pipelineId, Ip2GeoProcessor.class)).thenReturn(Arrays.asList(ip2GeoProcessor)); + + List ip2GeoProcessorList = ip2GeoProcessorDao.getProcessors(datasourceName); + assertEquals(1, ip2GeoProcessorList.size()); + assertEquals(ip2GeoProcessor.getDatasourceName(), ip2GeoProcessorList.get(0).getDatasourceName()); + } + + private PipelineConfiguration createPipelineConfiguration() { + String id = GeospatialTestHelper.randomLowerCaseString(); + ByteBuffer byteBuffer = ByteBuffer.wrap(GeospatialTestHelper.randomLowerCaseString().getBytes(StandardCharsets.US_ASCII)); + BytesReference config = BytesReference.fromByteBuffer(byteBuffer); + return new PipelineConfiguration(id, config, XContentType.JSON); + } +} diff --git a/src/test/java/org/opensearch/geospatial/ip2geo/jobscheduler/DatasourceExtensionTests.java b/src/test/java/org/opensearch/geospatial/ip2geo/jobscheduler/DatasourceExtensionTests.java new file mode 100644 index 00000000..0ea22117 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ip2geo/jobscheduler/DatasourceExtensionTests.java @@ -0,0 +1,43 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.jobscheduler; + +import static org.opensearch.geospatial.ip2geo.jobscheduler.DatasourceExtension.JOB_INDEX_NAME; + +import java.time.Instant; +import java.time.temporal.ChronoUnit; + +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.geospatial.GeospatialTestHelper; +import org.opensearch.geospatial.ip2geo.Ip2GeoTestCase; +import org.opensearch.jobscheduler.spi.JobDocVersion; +import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; + +public class DatasourceExtensionTests extends Ip2GeoTestCase { + public void testBasic() { + DatasourceExtension extension = new DatasourceExtension(); + assertEquals("scheduler_geospatial_ip2geo_datasource", extension.getJobType()); + assertEquals(JOB_INDEX_NAME, extension.getJobIndex()); + assertEquals(DatasourceRunner.getJobRunnerInstance(), extension.getJobRunner()); + } + + public void testParser() throws Exception { + DatasourceExtension extension = new DatasourceExtension(); + String id = GeospatialTestHelper.randomLowerCaseString(); + IntervalSchedule schedule = new IntervalSchedule(Instant.now().truncatedTo(ChronoUnit.MILLIS), 1, ChronoUnit.DAYS); + String endpoint = GeospatialTestHelper.randomLowerCaseString(); + Datasource datasource = new Datasource(id, schedule, endpoint); + + Datasource anotherDatasource = (Datasource) extension.getJobParser() + .parse( + createParser(datasource.toXContent(XContentFactory.jsonBuilder(), null)), + GeospatialTestHelper.randomLowerCaseString(), + new JobDocVersion(randomPositiveLong(), randomPositiveLong(), randomPositiveLong()) + ); + + assertTrue(datasource.equals(anotherDatasource)); + } +} diff --git a/src/test/java/org/opensearch/geospatial/ip2geo/jobscheduler/DatasourceRunnerTests.java b/src/test/java/org/opensearch/geospatial/ip2geo/jobscheduler/DatasourceRunnerTests.java new file mode 100644 index 00000000..9919e8b8 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ip2geo/jobscheduler/DatasourceRunnerTests.java @@ -0,0 +1,230 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.jobscheduler; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.mockito.internal.verification.VerificationModeFactory.times; +import static org.opensearch.geospatial.GeospatialTestHelper.randomLowerCaseString; + +import java.time.Duration; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.Optional; + +import org.junit.Before; +import org.mockito.ArgumentCaptor; +import org.opensearch.geospatial.GeospatialTestHelper; +import org.opensearch.geospatial.ip2geo.Ip2GeoTestCase; +import org.opensearch.geospatial.ip2geo.common.DatasourceState; +import org.opensearch.geospatial.ip2geo.common.Ip2GeoLockService; +import org.opensearch.jobscheduler.spi.JobDocVersion; +import org.opensearch.jobscheduler.spi.JobExecutionContext; +import org.opensearch.jobscheduler.spi.LockModel; +import org.opensearch.jobscheduler.spi.ScheduledJobParameter; +import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; + +import lombok.SneakyThrows; + +public class DatasourceRunnerTests extends Ip2GeoTestCase { + @Before + public void init() { + DatasourceRunner.getJobRunnerInstance() + .initialize(clusterService, datasourceUpdateService, ip2GeoExecutor, datasourceDao, ip2GeoLockService); + } + + public void testGetJobRunnerInstance_whenCalledAgain_thenReturnSameInstance() { + assertTrue(DatasourceRunner.getJobRunnerInstance() == DatasourceRunner.getJobRunnerInstance()); + } + + public void testRunJob_whenInvalidClass_thenThrowException() { + JobDocVersion jobDocVersion = new JobDocVersion(randomInt(), randomInt(), randomInt()); + String jobIndexName = randomLowerCaseString(); + String jobId = randomLowerCaseString(); + JobExecutionContext jobExecutionContext = new JobExecutionContext(Instant.now(), jobDocVersion, lockService, jobIndexName, jobId); + ScheduledJobParameter jobParameter = mock(ScheduledJobParameter.class); + + // Run + expectThrows(IllegalStateException.class, () -> DatasourceRunner.getJobRunnerInstance().runJob(jobParameter, jobExecutionContext)); + } + + @SneakyThrows + public void testRunJob_whenValidInput_thenSucceed() { + JobDocVersion jobDocVersion = new JobDocVersion(randomInt(), randomInt(), randomInt()); + String jobIndexName = randomLowerCaseString(); + String jobId = randomLowerCaseString(); + JobExecutionContext jobExecutionContext = new JobExecutionContext(Instant.now(), jobDocVersion, lockService, jobIndexName, jobId); + Datasource datasource = randomDatasource(); + + LockModel lockModel = randomLockModel(); + when(ip2GeoLockService.acquireLock(datasource.getName(), Ip2GeoLockService.LOCK_DURATION_IN_SECONDS)).thenReturn( + Optional.of(lockModel) + ); + + // Run + DatasourceRunner.getJobRunnerInstance().runJob(datasource, jobExecutionContext); + + // Verify + verify(ip2GeoLockService).acquireLock(datasource.getName(), Ip2GeoLockService.LOCK_DURATION_IN_SECONDS); + verify(datasourceDao).getDatasource(datasource.getName()); + verify(ip2GeoLockService).releaseLock(lockModel); + } + + @SneakyThrows + public void testUpdateDatasourceRunner_whenExceptionBeforeAcquiringLock_thenNoReleaseLock() { + ScheduledJobParameter jobParameter = mock(ScheduledJobParameter.class); + when(jobParameter.getName()).thenReturn(GeospatialTestHelper.randomLowerCaseString()); + when(ip2GeoLockService.acquireLock(jobParameter.getName(), Ip2GeoLockService.LOCK_DURATION_IN_SECONDS)).thenThrow( + new RuntimeException() + ); + + // Run + expectThrows(Exception.class, () -> DatasourceRunner.getJobRunnerInstance().updateDatasourceRunner(jobParameter).run()); + + // Verify + verify(ip2GeoLockService, never()).releaseLock(any()); + } + + @SneakyThrows + public void testUpdateDatasourceRunner_whenExceptionAfterAcquiringLock_thenReleaseLock() { + ScheduledJobParameter jobParameter = mock(ScheduledJobParameter.class); + when(jobParameter.getName()).thenReturn(GeospatialTestHelper.randomLowerCaseString()); + LockModel lockModel = randomLockModel(); + when(ip2GeoLockService.acquireLock(jobParameter.getName(), Ip2GeoLockService.LOCK_DURATION_IN_SECONDS)).thenReturn( + Optional.of(lockModel) + ); + when(datasourceDao.getDatasource(jobParameter.getName())).thenThrow(new RuntimeException()); + + // Run + DatasourceRunner.getJobRunnerInstance().updateDatasourceRunner(jobParameter).run(); + + // Verify + verify(ip2GeoLockService).releaseLock(any()); + } + + @SneakyThrows + public void testUpdateDatasource_whenDatasourceDoesNotExist_thenDoNothing() { + Datasource datasource = new Datasource(); + + // Run + DatasourceRunner.getJobRunnerInstance().updateDatasource(datasource, mock(Runnable.class)); + + // Verify + verify(datasourceUpdateService, never()).deleteUnusedIndices(any()); + } + + @SneakyThrows + public void testUpdateDatasource_whenInvalidState_thenUpdateLastFailedAt() { + Datasource datasource = new Datasource(); + datasource.enable(); + datasource.getUpdateStats().setLastFailedAt(null); + datasource.setState(randomStateExcept(DatasourceState.AVAILABLE)); + when(datasourceDao.getDatasource(datasource.getName())).thenReturn(datasource); + + // Run + DatasourceRunner.getJobRunnerInstance().updateDatasource(datasource, mock(Runnable.class)); + + // Verify + assertFalse(datasource.isEnabled()); + assertNotNull(datasource.getUpdateStats().getLastFailedAt()); + verify(datasourceDao).updateDatasource(datasource); + } + + @SneakyThrows + public void testUpdateDatasource_whenValidInput_thenSucceed() { + Datasource datasource = randomDatasource(); + datasource.setState(DatasourceState.AVAILABLE); + when(datasourceDao.getDatasource(datasource.getName())).thenReturn(datasource); + Runnable renewLock = mock(Runnable.class); + + // Run + DatasourceRunner.getJobRunnerInstance().updateDatasource(datasource, renewLock); + + // Verify + verify(datasourceUpdateService, times(2)).deleteUnusedIndices(datasource); + verify(datasourceUpdateService).updateOrCreateGeoIpData(datasource, renewLock); + verify(datasourceUpdateService).updateDatasource(datasource, datasource.getUserSchedule(), DatasourceTask.ALL); + } + + @SneakyThrows + public void testUpdateDatasource_whenDeleteTask_thenDeleteOnly() { + Datasource datasource = randomDatasource(); + datasource.setState(DatasourceState.AVAILABLE); + datasource.setTask(DatasourceTask.DELETE_UNUSED_INDICES); + when(datasourceDao.getDatasource(datasource.getName())).thenReturn(datasource); + Runnable renewLock = mock(Runnable.class); + + // Run + DatasourceRunner.getJobRunnerInstance().updateDatasource(datasource, renewLock); + + // Verify + verify(datasourceUpdateService, times(2)).deleteUnusedIndices(datasource); + verify(datasourceUpdateService, never()).updateOrCreateGeoIpData(datasource, renewLock); + verify(datasourceUpdateService).updateDatasource(datasource, datasource.getUserSchedule(), DatasourceTask.ALL); + } + + @SneakyThrows + public void testUpdateDatasource_whenExpired_thenDeleteIndicesAgain() { + Datasource datasource = randomDatasource(); + datasource.getUpdateStats().setLastSkippedAt(null); + datasource.getUpdateStats() + .setLastSucceededAt(Instant.now().minus(datasource.getDatabase().getValidForInDays() + 1, ChronoUnit.DAYS)); + datasource.setState(DatasourceState.AVAILABLE); + when(datasourceDao.getDatasource(datasource.getName())).thenReturn(datasource); + Runnable renewLock = mock(Runnable.class); + + // Run + DatasourceRunner.getJobRunnerInstance().updateDatasource(datasource, renewLock); + + // Verify + verify(datasourceUpdateService, times(3)).deleteUnusedIndices(datasource); + verify(datasourceUpdateService).updateOrCreateGeoIpData(datasource, renewLock); + verify(datasourceUpdateService).updateDatasource(datasource, datasource.getUserSchedule(), DatasourceTask.ALL); + } + + @SneakyThrows + public void testUpdateDatasource_whenWillExpire_thenScheduleDeleteTask() { + Datasource datasource = randomDatasource(); + datasource.getUpdateStats().setLastSkippedAt(null); + datasource.getUpdateStats() + .setLastSucceededAt(Instant.now().minus(datasource.getDatabase().getValidForInDays(), ChronoUnit.DAYS).plusSeconds(60)); + datasource.setState(DatasourceState.AVAILABLE); + when(datasourceDao.getDatasource(datasource.getName())).thenReturn(datasource); + Runnable renewLock = mock(Runnable.class); + + // Run + DatasourceRunner.getJobRunnerInstance().updateDatasource(datasource, renewLock); + + // Verify + verify(datasourceUpdateService, times(2)).deleteUnusedIndices(datasource); + verify(datasourceUpdateService).updateOrCreateGeoIpData(datasource, renewLock); + + ArgumentCaptor captor = ArgumentCaptor.forClass(IntervalSchedule.class); + verify(datasourceUpdateService).updateDatasource(eq(datasource), captor.capture(), eq(DatasourceTask.DELETE_UNUSED_INDICES)); + assertTrue(Duration.between(datasource.expirationDay(), captor.getValue().getNextExecutionTime(Instant.now())).getSeconds() < 30); + } + + @SneakyThrows + public void testUpdateDatasourceExceptionHandling() { + Datasource datasource = new Datasource(); + datasource.setName(randomLowerCaseString()); + datasource.getUpdateStats().setLastFailedAt(null); + when(datasourceDao.getDatasource(datasource.getName())).thenReturn(datasource); + doThrow(new RuntimeException("test failure")).when(datasourceUpdateService).deleteUnusedIndices(any()); + + // Run + DatasourceRunner.getJobRunnerInstance().updateDatasource(datasource, mock(Runnable.class)); + + // Verify + assertNotNull(datasource.getUpdateStats().getLastFailedAt()); + verify(datasourceDao).updateDatasource(datasource); + } +} diff --git a/src/test/java/org/opensearch/geospatial/ip2geo/jobscheduler/DatasourceTests.java b/src/test/java/org/opensearch/geospatial/ip2geo/jobscheduler/DatasourceTests.java new file mode 100644 index 00000000..adb08297 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ip2geo/jobscheduler/DatasourceTests.java @@ -0,0 +1,190 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.jobscheduler; + +import static org.opensearch.geospatial.ip2geo.jobscheduler.Datasource.IP2GEO_DATA_INDEX_NAME_PREFIX; + +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Locale; + +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.geospatial.GeospatialTestHelper; +import org.opensearch.geospatial.ip2geo.Ip2GeoTestCase; +import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; + +import lombok.SneakyThrows; + +public class DatasourceTests extends Ip2GeoTestCase { + + @SneakyThrows + public void testParser_whenAllValueIsFilled_thenSucceed() { + String id = GeospatialTestHelper.randomLowerCaseString(); + IntervalSchedule schedule = new IntervalSchedule(Instant.now().truncatedTo(ChronoUnit.MILLIS), 1, ChronoUnit.DAYS); + String endpoint = GeospatialTestHelper.randomLowerCaseString(); + Datasource datasource = new Datasource(id, schedule, endpoint); + datasource.enable(); + datasource.setCurrentIndex(GeospatialTestHelper.randomLowerCaseString()); + datasource.getDatabase().setFields(Arrays.asList("field1", "field2")); + datasource.getDatabase().setProvider("test_provider"); + datasource.getDatabase().setUpdatedAt(Instant.now().truncatedTo(ChronoUnit.MILLIS)); + datasource.getDatabase().setSha256Hash(GeospatialTestHelper.randomLowerCaseString()); + datasource.getDatabase().setValidForInDays(1l); + datasource.getUpdateStats().setLastProcessingTimeInMillis(randomPositiveLong()); + datasource.getUpdateStats().setLastSucceededAt(Instant.now().truncatedTo(ChronoUnit.MILLIS)); + datasource.getUpdateStats().setLastSkippedAt(Instant.now().truncatedTo(ChronoUnit.MILLIS)); + datasource.getUpdateStats().setLastFailedAt(Instant.now().truncatedTo(ChronoUnit.MILLIS)); + + Datasource anotherDatasource = Datasource.PARSER.parse( + createParser(datasource.toXContent(XContentFactory.jsonBuilder(), null)), + null + ); + assertTrue(datasource.equals(anotherDatasource)); + } + + @SneakyThrows + public void testParser_whenNullForOptionalFields_thenSucceed() { + String id = GeospatialTestHelper.randomLowerCaseString(); + IntervalSchedule schedule = new IntervalSchedule(Instant.now().truncatedTo(ChronoUnit.MILLIS), 1, ChronoUnit.DAYS); + String endpoint = GeospatialTestHelper.randomLowerCaseString(); + Datasource datasource = new Datasource(id, schedule, endpoint); + Datasource anotherDatasource = Datasource.PARSER.parse( + createParser(datasource.toXContent(XContentFactory.jsonBuilder(), null)), + null + ); + assertTrue(datasource.equals(anotherDatasource)); + } + + public void testCurrentIndexName_whenNotExpired_thenReturnName() { + String id = GeospatialTestHelper.randomLowerCaseString(); + Instant now = Instant.now(); + Datasource datasource = new Datasource(); + datasource.setName(id); + datasource.setCurrentIndex(datasource.newIndexName(GeospatialTestHelper.randomLowerCaseString())); + datasource.getDatabase().setProvider("provider"); + datasource.getDatabase().setSha256Hash("sha256Hash"); + datasource.getDatabase().setUpdatedAt(now); + datasource.getDatabase().setFields(new ArrayList<>()); + + assertNotNull(datasource.currentIndexName()); + } + + public void testCurrentIndexName_whenExpired_thenReturnNull() { + String id = GeospatialTestHelper.randomLowerCaseString(); + Instant now = Instant.now(); + Datasource datasource = new Datasource(); + datasource.setName(id); + datasource.setCurrentIndex(datasource.newIndexName(GeospatialTestHelper.randomLowerCaseString())); + datasource.getDatabase().setProvider("provider"); + datasource.getDatabase().setSha256Hash("sha256Hash"); + datasource.getDatabase().setUpdatedAt(now); + datasource.getDatabase().setValidForInDays(1l); + datasource.getUpdateStats().setLastSucceededAt(Instant.now().minus(25, ChronoUnit.HOURS)); + datasource.getDatabase().setFields(new ArrayList<>()); + + assertTrue(datasource.isExpired()); + assertNull(datasource.currentIndexName()); + } + + @SneakyThrows + public void testNewIndexName_whenCalled_thenReturnedExpectedValue() { + String name = GeospatialTestHelper.randomLowerCaseString(); + String suffix = GeospatialTestHelper.randomLowerCaseString(); + Datasource datasource = new Datasource(); + datasource.setName(name); + assertEquals(String.format(Locale.ROOT, "%s.%s.%s", IP2GEO_DATA_INDEX_NAME_PREFIX, name, suffix), datasource.newIndexName(suffix)); + } + + public void testResetDatabase_whenCalled_thenNullifySomeFields() { + Datasource datasource = randomDatasource(); + assertNotNull(datasource.getDatabase().getSha256Hash()); + assertNotNull(datasource.getDatabase().getUpdatedAt()); + + // Run + datasource.resetDatabase(); + + // Verify + assertNull(datasource.getDatabase().getSha256Hash()); + assertNull(datasource.getDatabase().getUpdatedAt()); + } + + public void testIsExpired_whenCalled_thenExpectedValue() { + Datasource datasource = new Datasource(); + // never expire if validForInDays is null + assertFalse(datasource.isExpired()); + + datasource.getDatabase().setValidForInDays(1l); + + // if last skipped date is null, use only last succeeded date to determine + datasource.getUpdateStats().setLastSucceededAt(Instant.now().minus(25, ChronoUnit.HOURS)); + assertTrue(datasource.isExpired()); + + // use the latest date between last skipped date and last succeeded date to determine + datasource.getUpdateStats().setLastSkippedAt(Instant.now()); + assertFalse(datasource.isExpired()); + datasource.getUpdateStats().setLastSkippedAt(Instant.now().minus(25, ChronoUnit.HOURS)); + datasource.getUpdateStats().setLastSucceededAt(Instant.now()); + assertFalse(datasource.isExpired()); + } + + public void testWillExpired_whenCalled_thenExpectedValue() { + Datasource datasource = new Datasource(); + // never expire if validForInDays is null + assertFalse(datasource.willExpire(Instant.MAX)); + + long validForInDays = 1; + datasource.getDatabase().setValidForInDays(validForInDays); + + // if last skipped date is null, use only last succeeded date to determine + datasource.getUpdateStats().setLastSucceededAt(Instant.now().minus(1, ChronoUnit.DAYS)); + assertTrue( + datasource.willExpire(datasource.getUpdateStats().getLastSucceededAt().plus(validForInDays, ChronoUnit.DAYS).plusSeconds(1)) + ); + assertFalse(datasource.willExpire(datasource.getUpdateStats().getLastSucceededAt().plus(validForInDays, ChronoUnit.DAYS))); + + // use the latest date between last skipped date and last succeeded date to determine + datasource.getUpdateStats().setLastSkippedAt(Instant.now()); + assertTrue( + datasource.willExpire(datasource.getUpdateStats().getLastSkippedAt().plus(validForInDays, ChronoUnit.DAYS).plusSeconds(1)) + ); + assertFalse(datasource.willExpire(datasource.getUpdateStats().getLastSkippedAt().plus(validForInDays, ChronoUnit.DAYS))); + + datasource.getUpdateStats().setLastSkippedAt(Instant.now().minus(1, ChronoUnit.HOURS)); + datasource.getUpdateStats().setLastSucceededAt(Instant.now()); + assertTrue( + datasource.willExpire(datasource.getUpdateStats().getLastSucceededAt().plus(validForInDays, ChronoUnit.DAYS).plusSeconds(1)) + ); + assertFalse(datasource.willExpire(datasource.getUpdateStats().getLastSucceededAt().plus(validForInDays, ChronoUnit.DAYS))); + } + + public void testExpirationDay_whenCalled_thenExpectedValue() { + Datasource datasource = new Datasource(); + datasource.getDatabase().setValidForInDays(null); + assertEquals(Instant.MAX, datasource.expirationDay()); + + long validForInDays = 1; + datasource.getDatabase().setValidForInDays(validForInDays); + + // if last skipped date is null, use only last succeeded date to determine + datasource.getUpdateStats().setLastSucceededAt(Instant.now().minus(1, ChronoUnit.DAYS)); + assertEquals(datasource.getUpdateStats().getLastSucceededAt().plus(validForInDays, ChronoUnit.DAYS), datasource.expirationDay()); + + // use the latest date between last skipped date and last succeeded date to determine + datasource.getUpdateStats().setLastSkippedAt(Instant.now()); + assertEquals(datasource.getUpdateStats().getLastSkippedAt().plus(validForInDays, ChronoUnit.DAYS), datasource.expirationDay()); + + datasource.getUpdateStats().setLastSkippedAt(Instant.now().minus(1, ChronoUnit.HOURS)); + datasource.getUpdateStats().setLastSucceededAt(Instant.now()); + assertEquals(datasource.getUpdateStats().getLastSucceededAt().plus(validForInDays, ChronoUnit.DAYS), datasource.expirationDay()); + } + + public void testLockDurationSeconds() { + Datasource datasource = new Datasource(); + assertNotNull(datasource.getLockDurationSeconds()); + } +} diff --git a/src/test/java/org/opensearch/geospatial/ip2geo/jobscheduler/DatasourceUpdateServiceTests.java b/src/test/java/org/opensearch/geospatial/ip2geo/jobscheduler/DatasourceUpdateServiceTests.java new file mode 100644 index 00000000..ba2baaed --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ip2geo/jobscheduler/DatasourceUpdateServiceTests.java @@ -0,0 +1,277 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.jobscheduler; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.isA; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.io.File; +import java.nio.charset.StandardCharsets; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; + +import org.apache.commons.csv.CSVFormat; +import org.apache.commons.csv.CSVParser; +import org.junit.Before; +import org.opensearch.OpenSearchException; +import org.opensearch.cluster.routing.ShardRouting; +import org.opensearch.common.SuppressForbidden; +import org.opensearch.geospatial.GeospatialTestHelper; +import org.opensearch.geospatial.ip2geo.Ip2GeoTestCase; +import org.opensearch.geospatial.ip2geo.common.DatasourceManifest; +import org.opensearch.geospatial.ip2geo.common.DatasourceState; +import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; + +import lombok.SneakyThrows; + +@SuppressForbidden(reason = "unit test") +public class DatasourceUpdateServiceTests extends Ip2GeoTestCase { + private DatasourceUpdateService datasourceUpdateService; + + @Before + public void init() { + datasourceUpdateService = new DatasourceUpdateService(clusterService, datasourceDao, geoIpDataDao, urlDenyListChecker); + } + + @SneakyThrows + public void testUpdateOrCreateGeoIpData_whenHashValueIsSame_thenSkipUpdate() { + File manifestFile = new File(this.getClass().getClassLoader().getResource("ip2geo/manifest.json").getFile()); + DatasourceManifest manifest = DatasourceManifest.Builder.build(manifestFile.toURI().toURL()); + + Datasource datasource = new Datasource(); + datasource.setState(DatasourceState.AVAILABLE); + datasource.getUpdateStats().setLastSkippedAt(null); + datasource.getDatabase().setUpdatedAt(Instant.ofEpochMilli(manifest.getUpdatedAt())); + datasource.getDatabase().setSha256Hash(manifest.getSha256Hash()); + datasource.setEndpoint(manifestFile.toURI().toURL().toExternalForm()); + + // Run + datasourceUpdateService.updateOrCreateGeoIpData(datasource, mock(Runnable.class)); + + // Verify + assertNotNull(datasource.getUpdateStats().getLastSkippedAt()); + verify(datasourceDao).updateDatasource(datasource); + verify(urlDenyListChecker).toUrlIfNotInDenyList(datasource.getEndpoint()); + } + + @SneakyThrows + public void testUpdateOrCreateGeoIpData_whenExpired_thenUpdate() { + File manifestFile = new File(this.getClass().getClassLoader().getResource("ip2geo/manifest.json").getFile()); + DatasourceManifest manifest = DatasourceManifest.Builder.build(manifestFile.toURI().toURL()); + + File sampleFile = new File(this.getClass().getClassLoader().getResource("ip2geo/sample_valid.csv").getFile()); + when(geoIpDataDao.getDatabaseReader(any())).thenReturn(CSVParser.parse(sampleFile, StandardCharsets.UTF_8, CSVFormat.RFC4180)); + + Datasource datasource = new Datasource(); + datasource.setState(DatasourceState.AVAILABLE); + datasource.getDatabase().setUpdatedAt(Instant.ofEpochMilli(manifest.getUpdatedAt())); + datasource.getDatabase().setSha256Hash(manifest.getSha256Hash()); + datasource.getDatabase().setValidForInDays(1l); + datasource.setEndpoint(manifestFile.toURI().toURL().toExternalForm()); + datasource.resetDatabase(); + + // Run + datasourceUpdateService.updateOrCreateGeoIpData(datasource, mock(Runnable.class)); + + // Verify + verify(geoIpDataDao).putGeoIpData(eq(datasource.currentIndexName()), isA(String[].class), any(Iterator.class), any(Runnable.class)); + verify(urlDenyListChecker).toUrlIfNotInDenyList(datasource.getEndpoint()); + } + + @SneakyThrows + public void testUpdateOrCreateGeoIpData_whenInvalidData_thenThrowException() { + File manifestFile = new File(this.getClass().getClassLoader().getResource("ip2geo/manifest.json").getFile()); + DatasourceManifest manifest = DatasourceManifest.Builder.build(manifestFile.toURI().toURL()); + + File sampleFile = new File( + this.getClass().getClassLoader().getResource("ip2geo/sample_invalid_less_than_two_fields.csv").getFile() + ); + when(geoIpDataDao.getDatabaseReader(any())).thenReturn(CSVParser.parse(sampleFile, StandardCharsets.UTF_8, CSVFormat.RFC4180)); + + Datasource datasource = new Datasource(); + datasource.setState(DatasourceState.AVAILABLE); + datasource.getDatabase().setUpdatedAt(Instant.ofEpochMilli(manifest.getUpdatedAt() - 1)); + datasource.getDatabase().setSha256Hash(manifest.getSha256Hash().substring(1)); + datasource.getDatabase().setFields(Arrays.asList("country_name")); + datasource.setEndpoint(manifestFile.toURI().toURL().toExternalForm()); + + // Run + expectThrows(OpenSearchException.class, () -> datasourceUpdateService.updateOrCreateGeoIpData(datasource, mock(Runnable.class))); + verify(urlDenyListChecker).toUrlIfNotInDenyList(datasource.getEndpoint()); + } + + @SneakyThrows + public void testUpdateOrCreateGeoIpData_whenIncompatibleFields_thenThrowException() { + File manifestFile = new File(this.getClass().getClassLoader().getResource("ip2geo/manifest.json").getFile()); + DatasourceManifest manifest = DatasourceManifest.Builder.build(manifestFile.toURI().toURL()); + + File sampleFile = new File(this.getClass().getClassLoader().getResource("ip2geo/sample_valid.csv").getFile()); + when(geoIpDataDao.getDatabaseReader(any())).thenReturn(CSVParser.parse(sampleFile, StandardCharsets.UTF_8, CSVFormat.RFC4180)); + + Datasource datasource = new Datasource(); + datasource.setState(DatasourceState.AVAILABLE); + datasource.getDatabase().setUpdatedAt(Instant.ofEpochMilli(manifest.getUpdatedAt() - 1)); + datasource.getDatabase().setSha256Hash(manifest.getSha256Hash().substring(1)); + datasource.getDatabase().setFields(Arrays.asList("country_name", "additional_field")); + datasource.setEndpoint(manifestFile.toURI().toURL().toExternalForm()); + + // Run + expectThrows(OpenSearchException.class, () -> datasourceUpdateService.updateOrCreateGeoIpData(datasource, mock(Runnable.class))); + verify(urlDenyListChecker).toUrlIfNotInDenyList(datasource.getEndpoint()); + } + + @SneakyThrows + public void testUpdateOrCreateGeoIpData_whenValidInput_thenSucceed() { + File manifestFile = new File(this.getClass().getClassLoader().getResource("ip2geo/manifest.json").getFile()); + DatasourceManifest manifest = DatasourceManifest.Builder.build(manifestFile.toURI().toURL()); + + File sampleFile = new File(this.getClass().getClassLoader().getResource("ip2geo/sample_valid.csv").getFile()); + when(geoIpDataDao.getDatabaseReader(any())).thenReturn(CSVParser.parse(sampleFile, StandardCharsets.UTF_8, CSVFormat.RFC4180)); + ShardRouting shardRouting = mock(ShardRouting.class); + when(shardRouting.started()).thenReturn(true); + when(routingTable.allShards(anyString())).thenReturn(Arrays.asList(shardRouting)); + + Datasource datasource = new Datasource(); + datasource.setState(DatasourceState.AVAILABLE); + datasource.getDatabase().setUpdatedAt(Instant.ofEpochMilli(manifest.getUpdatedAt() - 1)); + datasource.getDatabase().setSha256Hash(manifest.getSha256Hash().substring(1)); + datasource.getDatabase().setFields(Arrays.asList("country_name")); + datasource.setEndpoint(manifestFile.toURI().toURL().toExternalForm()); + datasource.getUpdateStats().setLastSucceededAt(null); + datasource.getUpdateStats().setLastProcessingTimeInMillis(null); + + // Run + datasourceUpdateService.updateOrCreateGeoIpData(datasource, mock(Runnable.class)); + + // Verify + assertEquals(manifest.getProvider(), datasource.getDatabase().getProvider()); + assertEquals(manifest.getSha256Hash(), datasource.getDatabase().getSha256Hash()); + assertEquals(Instant.ofEpochMilli(manifest.getUpdatedAt()), datasource.getDatabase().getUpdatedAt()); + assertEquals(manifest.getValidForInDays(), datasource.getDatabase().getValidForInDays()); + assertNotNull(datasource.getUpdateStats().getLastSucceededAt()); + assertNotNull(datasource.getUpdateStats().getLastProcessingTimeInMillis()); + verify(datasourceDao, times(2)).updateDatasource(datasource); + verify(geoIpDataDao).putGeoIpData(eq(datasource.currentIndexName()), isA(String[].class), any(Iterator.class), any(Runnable.class)); + verify(urlDenyListChecker).toUrlIfNotInDenyList(datasource.getEndpoint()); + } + + public void testWaitUntilAllShardsStarted_whenTimedOut_thenThrowException() { + String indexName = GeospatialTestHelper.randomLowerCaseString(); + ShardRouting shardRouting = mock(ShardRouting.class); + when(shardRouting.started()).thenReturn(false); + when(routingTable.allShards(indexName)).thenReturn(Arrays.asList(shardRouting)); + + // Run + Exception e = expectThrows(OpenSearchException.class, () -> datasourceUpdateService.waitUntilAllShardsStarted(indexName, 10)); + + // Verify + assertTrue(e.getMessage().contains("did not complete")); + } + + @SneakyThrows + public void testWaitUntilAllShardsStarted_whenInterrupted_thenThrowException() { + String indexName = GeospatialTestHelper.randomLowerCaseString(); + ShardRouting shardRouting = mock(ShardRouting.class); + when(shardRouting.started()).thenReturn(false); + when(routingTable.allShards(indexName)).thenReturn(Arrays.asList(shardRouting)); + + // Run + Thread.currentThread().interrupt(); + Exception e = expectThrows(RuntimeException.class, () -> datasourceUpdateService.waitUntilAllShardsStarted(indexName, 10)); + + // Verify + assertEquals(InterruptedException.class, e.getCause().getClass()); + } + + @SneakyThrows + public void testGetHeaderFields_whenValidInput_thenReturnCorrectValue() { + File manifestFile = new File(this.getClass().getClassLoader().getResource("ip2geo/manifest.json").getFile()); + + File sampleFile = new File(this.getClass().getClassLoader().getResource("ip2geo/sample_valid.csv").getFile()); + when(geoIpDataDao.getDatabaseReader(any())).thenReturn(CSVParser.parse(sampleFile, StandardCharsets.UTF_8, CSVFormat.RFC4180)); + + // Run + assertEquals(Arrays.asList("country_name"), datasourceUpdateService.getHeaderFields(manifestFile.toURI().toURL().toExternalForm())); + } + + @SneakyThrows + public void testDeleteUnusedIndices_whenValidInput_thenSucceed() { + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + String indexPrefix = String.format(".ip2geo-data.%s.", datasourceName); + Instant now = Instant.now(); + String currentIndex = indexPrefix + now.toEpochMilli(); + String oldIndex = indexPrefix + now.minusMillis(1).toEpochMilli(); + String lingeringIndex = indexPrefix + now.minusMillis(2).toEpochMilli(); + Datasource datasource = new Datasource(); + datasource.setName(datasourceName); + datasource.setCurrentIndex(currentIndex); + datasource.getIndices().add(currentIndex); + datasource.getIndices().add(oldIndex); + datasource.getIndices().add(lingeringIndex); + datasource.getDatabase().setUpdatedAt(now); + + when(metadata.hasIndex(currentIndex)).thenReturn(true); + when(metadata.hasIndex(oldIndex)).thenReturn(true); + when(metadata.hasIndex(lingeringIndex)).thenReturn(false); + + datasourceUpdateService.deleteUnusedIndices(datasource); + + assertEquals(1, datasource.getIndices().size()); + assertEquals(currentIndex, datasource.getIndices().get(0)); + verify(datasourceDao).updateDatasource(datasource); + verify(geoIpDataDao).deleteIp2GeoDataIndex(oldIndex); + } + + public void testUpdateDatasource_whenNoChange_thenNoUpdate() { + Datasource datasource = randomDatasource(); + + // Run + datasourceUpdateService.updateDatasource(datasource, datasource.getSystemSchedule(), datasource.getTask()); + + // Verify + verify(datasourceDao, never()).updateDatasource(any()); + } + + public void testUpdateDatasource_whenChange_thenUpdate() { + Datasource datasource = randomDatasource(); + datasource.setTask(DatasourceTask.ALL); + + // Run + datasourceUpdateService.updateDatasource( + datasource, + new IntervalSchedule(Instant.now(), datasource.getSystemSchedule().getInterval() + 1, ChronoUnit.DAYS), + datasource.getTask() + ); + datasourceUpdateService.updateDatasource(datasource, datasource.getSystemSchedule(), DatasourceTask.DELETE_UNUSED_INDICES); + + // Verify + verify(datasourceDao, times(2)).updateDatasource(any()); + } + + @SneakyThrows + public void testGetHeaderFields_whenValidInput_thenSucceed() { + File manifestFile = new File(this.getClass().getClassLoader().getResource("ip2geo/manifest.json").getFile()); + File sampleFile = new File(this.getClass().getClassLoader().getResource("ip2geo/sample_valid.csv").getFile()); + when(geoIpDataDao.getDatabaseReader(any())).thenReturn(CSVParser.parse(sampleFile, StandardCharsets.UTF_8, CSVFormat.RFC4180)); + + // Run + List fields = datasourceUpdateService.getHeaderFields(manifestFile.toURI().toURL().toExternalForm()); + + // Verify + List expectedFields = Arrays.asList("country_name"); + assertEquals(expectedFields, fields); + } +} diff --git a/src/test/java/org/opensearch/geospatial/ip2geo/listener/Ip2GeoListenerTests.java b/src/test/java/org/opensearch/geospatial/ip2geo/listener/Ip2GeoListenerTests.java new file mode 100644 index 00000000..ccf3fbfb --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ip2geo/listener/Ip2GeoListenerTests.java @@ -0,0 +1,199 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.listener; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.time.Instant; +import java.util.Arrays; +import java.util.List; + +import org.junit.Before; +import org.mockito.ArgumentCaptor; +import org.opensearch.cluster.ClusterChangedEvent; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.RestoreInProgress; +import org.opensearch.common.settings.Settings; +import org.opensearch.core.action.ActionListener; +import org.opensearch.geospatial.GeospatialTestHelper; +import org.opensearch.geospatial.ip2geo.Ip2GeoTestCase; +import org.opensearch.geospatial.ip2geo.jobscheduler.Datasource; +import org.opensearch.geospatial.ip2geo.jobscheduler.DatasourceExtension; +import org.opensearch.geospatial.ip2geo.jobscheduler.DatasourceTask; +import org.opensearch.snapshots.Snapshot; +import org.opensearch.snapshots.SnapshotId; + +public class Ip2GeoListenerTests extends Ip2GeoTestCase { + private Ip2GeoListener ip2GeoListener; + + @Before + public void init() { + ip2GeoListener = new Ip2GeoListener(clusterService, threadPool, datasourceDao, geoIpDataDao); + } + + public void testDoStart_whenClusterManagerNode_thenAddListener() { + Settings settings = Settings.builder().put("node.roles", "cluster_manager").build(); + when(clusterService.getSettings()).thenReturn(settings); + + // Run + ip2GeoListener.doStart(); + + // Verify + verify(clusterService).addListener(ip2GeoListener); + } + + public void testDoStart_whenNotClusterManagerNode_thenDoNotAddListener() { + Settings settings = Settings.builder().put("node.roles", "data").build(); + when(clusterService.getSettings()).thenReturn(settings); + + // Run + ip2GeoListener.doStart(); + + // Verify + verify(clusterService, never()).addListener(ip2GeoListener); + } + + public void testDoStop_whenCalled_thenRemoveListener() { + // Run + ip2GeoListener.doStop(); + + // Verify + verify(clusterService).removeListener(ip2GeoListener); + } + + public void testClusterChanged_whenNotClusterManagerNode_thenDoNothing() { + ClusterChangedEvent event = mock(ClusterChangedEvent.class); + when(event.localNodeClusterManager()).thenReturn(false); + + // Run + ip2GeoListener.clusterChanged(event); + + // Verify + verify(threadPool, never()).generic(); + } + + public void testClusterChanged_whenNotComplete_thenDoNothing() { + SnapshotId snapshotId = new SnapshotId(GeospatialTestHelper.randomLowerCaseString(), GeospatialTestHelper.randomLowerCaseString()); + Snapshot snapshot = new Snapshot(GeospatialTestHelper.randomLowerCaseString(), snapshotId); + RestoreInProgress.Entry entry = new RestoreInProgress.Entry( + GeospatialTestHelper.randomLowerCaseString(), + snapshot, + RestoreInProgress.State.STARTED, + Arrays.asList(DatasourceExtension.JOB_INDEX_NAME), + null + ); + RestoreInProgress restoreInProgress = new RestoreInProgress.Builder().add(entry).build(); + ClusterState clusterState = mock(ClusterState.class); + when(clusterState.custom(RestoreInProgress.TYPE, RestoreInProgress.EMPTY)).thenReturn(restoreInProgress); + ClusterChangedEvent event = mock(ClusterChangedEvent.class); + when(event.localNodeClusterManager()).thenReturn(true); + when(event.state()).thenReturn(clusterState); + + // Run + ip2GeoListener.clusterChanged(event); + + // Verify + verify(threadPool, never()).generic(); + } + + public void testClusterChanged_whenNotDatasourceIndex_thenDoNothing() { + SnapshotId snapshotId = new SnapshotId(GeospatialTestHelper.randomLowerCaseString(), GeospatialTestHelper.randomLowerCaseString()); + Snapshot snapshot = new Snapshot(GeospatialTestHelper.randomLowerCaseString(), snapshotId); + RestoreInProgress.Entry entry = new RestoreInProgress.Entry( + GeospatialTestHelper.randomLowerCaseString(), + snapshot, + RestoreInProgress.State.FAILURE, + Arrays.asList(GeospatialTestHelper.randomLowerCaseString()), + null + ); + RestoreInProgress restoreInProgress = new RestoreInProgress.Builder().add(entry).build(); + ClusterState clusterState = mock(ClusterState.class); + when(clusterState.custom(RestoreInProgress.TYPE, RestoreInProgress.EMPTY)).thenReturn(restoreInProgress); + ClusterChangedEvent event = mock(ClusterChangedEvent.class); + when(event.localNodeClusterManager()).thenReturn(true); + when(event.state()).thenReturn(clusterState); + + // Run + ip2GeoListener.clusterChanged(event); + + // Verify + verify(threadPool, never()).generic(); + } + + public void testClusterChanged_whenDatasourceIndexIsRestored_thenUpdate() { + SnapshotId snapshotId = new SnapshotId(GeospatialTestHelper.randomLowerCaseString(), GeospatialTestHelper.randomLowerCaseString()); + Snapshot snapshot = new Snapshot(GeospatialTestHelper.randomLowerCaseString(), snapshotId); + RestoreInProgress.Entry entry = new RestoreInProgress.Entry( + GeospatialTestHelper.randomLowerCaseString(), + snapshot, + RestoreInProgress.State.SUCCESS, + Arrays.asList(DatasourceExtension.JOB_INDEX_NAME), + null + ); + RestoreInProgress restoreInProgress = new RestoreInProgress.Builder().add(entry).build(); + ClusterState clusterState = mock(ClusterState.class); + when(clusterState.custom(RestoreInProgress.TYPE, RestoreInProgress.EMPTY)).thenReturn(restoreInProgress); + ClusterChangedEvent event = mock(ClusterChangedEvent.class); + when(event.localNodeClusterManager()).thenReturn(true); + when(event.state()).thenReturn(clusterState); + + // Run + ip2GeoListener.clusterChanged(event); + + // Verify + verify(threadPool).generic(); + ArgumentCaptor>> captor = ArgumentCaptor.forClass(ActionListener.class); + verify(datasourceDao).getAllDatasources(captor.capture()); + + // Run + List datasources = Arrays.asList(randomDatasource(), randomDatasource()); + datasources.stream().forEach(datasource -> { datasource.setTask(DatasourceTask.DELETE_UNUSED_INDICES); }); + + captor.getValue().onResponse(datasources); + + // Verify + datasources.stream().forEach(datasource -> { + assertEquals(DatasourceTask.ALL, datasource.getTask()); + assertNull(datasource.getDatabase().getUpdatedAt()); + assertNull(datasource.getDatabase().getSha256Hash()); + assertTrue(datasource.getSystemSchedule().getNextExecutionTime(Instant.now()).isAfter(Instant.now())); + assertTrue(datasource.getSystemSchedule().getNextExecutionTime(Instant.now()).isBefore(Instant.now().plusSeconds(60))); + }); + verify(datasourceDao).updateDatasource(eq(datasources), any()); + } + + public void testClusterChanged_whenGeoIpDataIsRestored_thenDelete() { + Datasource datasource = randomDatasource(); + SnapshotId snapshotId = new SnapshotId(GeospatialTestHelper.randomLowerCaseString(), GeospatialTestHelper.randomLowerCaseString()); + Snapshot snapshot = new Snapshot(GeospatialTestHelper.randomLowerCaseString(), snapshotId); + RestoreInProgress.Entry entry = new RestoreInProgress.Entry( + GeospatialTestHelper.randomLowerCaseString(), + snapshot, + RestoreInProgress.State.SUCCESS, + Arrays.asList(datasource.currentIndexName()), + null + ); + RestoreInProgress restoreInProgress = new RestoreInProgress.Builder().add(entry).build(); + ClusterState clusterState = mock(ClusterState.class); + when(clusterState.custom(RestoreInProgress.TYPE, RestoreInProgress.EMPTY)).thenReturn(restoreInProgress); + ClusterChangedEvent event = mock(ClusterChangedEvent.class); + when(event.localNodeClusterManager()).thenReturn(true); + when(event.state()).thenReturn(clusterState); + + // Run + ip2GeoListener.clusterChanged(event); + + // Verify + verify(threadPool).generic(); + verify(geoIpDataDao).deleteIp2GeoDataIndex(Arrays.asList(datasource.currentIndexName())); + } + +} diff --git a/src/test/java/org/opensearch/geospatial/ip2geo/processor/Ip2GeoProcessorIT.java b/src/test/java/org/opensearch/geospatial/ip2geo/processor/Ip2GeoProcessorIT.java new file mode 100644 index 00000000..e52cbabe --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ip2geo/processor/Ip2GeoProcessorIT.java @@ -0,0 +1,254 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.processor; + +import java.io.IOException; +import java.time.Duration; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; + +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.opensearch.client.Response; +import org.opensearch.client.ResponseException; +import org.opensearch.common.Randomness; +import org.opensearch.core.rest.RestStatus; +import org.opensearch.geospatial.GeospatialRestTestCase; +import org.opensearch.geospatial.GeospatialTestHelper; +import org.opensearch.geospatial.ip2geo.Ip2GeoDataServer; +import org.opensearch.geospatial.ip2geo.action.PutDatasourceRequest; +import org.opensearch.geospatial.ip2geo.common.Ip2GeoSettings; + +import lombok.SneakyThrows; + +public class Ip2GeoProcessorIT extends GeospatialRestTestCase { + // Use this value in resource name to avoid name conflict among tests + private static final String PREFIX = Ip2GeoProcessorIT.class.getSimpleName().toLowerCase(Locale.ROOT); + private static final String CITY = "city"; + private static final String COUNTRY = "country"; + private static final String IP = "ip"; + private static final String SOURCE = "_source"; + + @BeforeClass + public static void start() { + Ip2GeoDataServer.start(); + } + + @AfterClass + public static void stop() { + Ip2GeoDataServer.stop(); + } + + @SneakyThrows + public void testCreateIp2GeoProcessor_whenValidInput_thenAddData() { + // Reset deny list to allow private network access during test + updateClusterSetting(Map.of(Ip2GeoSettings.DATASOURCE_ENDPOINT_DENYLIST.getKey(), Collections.emptyList())); + + boolean isDatasourceCreated = false; + boolean isProcessorCreated = false; + String pipelineName = PREFIX + GeospatialTestHelper.randomLowerCaseString(); + String datasourceName = PREFIX + GeospatialTestHelper.randomLowerCaseString(); + try { + String targetField = GeospatialTestHelper.randomLowerCaseString(); + String field = GeospatialTestHelper.randomLowerCaseString(); + + Map datasourceProperties = Map.of( + PutDatasourceRequest.ENDPOINT_FIELD.getPreferredName(), + Ip2GeoDataServer.getEndpointCity() + ); + + // Create datasource and wait for it to be available + createDatasource(datasourceName, datasourceProperties); + isDatasourceCreated = true; + // Creation of datasource with same name should fail + ResponseException createException = expectThrows( + ResponseException.class, + () -> createDatasource(datasourceName, datasourceProperties) + ); + // Verify + assertEquals(RestStatus.BAD_REQUEST.getStatus(), createException.getResponse().getStatusLine().getStatusCode()); + waitForDatasourceToBeAvailable(datasourceName, Duration.ofSeconds(10)); + + Map processorProperties = Map.of( + Ip2GeoProcessor.CONFIG_FIELD, + field, + Ip2GeoProcessor.CONFIG_DATASOURCE, + datasourceName, + Ip2GeoProcessor.CONFIG_TARGET_FIELD, + targetField, + Ip2GeoProcessor.CONFIG_PROPERTIES, + Arrays.asList(CITY) + ); + + // Create ip2geo processor + createIp2GeoProcessorPipeline(pipelineName, processorProperties); + isProcessorCreated = true; + + Map> sampleData = getSampleData(); + List docs = sampleData.entrySet() + .stream() + .map(entry -> createDocument(field, entry.getKey())) + .collect(Collectors.toList()); + + // Simulate processor + Map response = simulatePipeline(pipelineName, docs); + + // Verify data added to document + List> sources = convertToListOfSources(response, targetField); + sources.stream().allMatch(source -> source.size() == 1); + List cities = sources.stream().map(value -> value.get(CITY)).collect(Collectors.toList()); + List expectedCities = sampleData.values().stream().map(value -> value.get(CITY)).collect(Collectors.toList()); + assertEquals(expectedCities, cities); + + // Delete datasource fails when there is a process using it + ResponseException deleteException = expectThrows(ResponseException.class, () -> deleteDatasource(datasourceName)); + // Verify + assertEquals(RestStatus.BAD_REQUEST.getStatus(), deleteException.getResponse().getStatusLine().getStatusCode()); + } finally { + Exception exception = null; + try { + if (isProcessorCreated) { + deletePipeline(pipelineName); + } + if (isDatasourceCreated) { + deleteDatasource(datasourceName, 3); + } + } catch (Exception e) { + exception = e; + } + if (exception != null) { + throw exception; + } + } + } + + @SneakyThrows + public void testCreateIp2GeoProcessor_whenPrivateAddress_thenBlocked() { + String datasourceName = PREFIX + GeospatialTestHelper.randomLowerCaseString(); + Map datasourceProperties = Map.of( + PutDatasourceRequest.ENDPOINT_FIELD.getPreferredName(), + "http://127.0.0.1:9200/city/manifest_local.json" + ); + + // Create datasource and wait for it to be available + ResponseException exception = expectThrows(ResponseException.class, () -> createDatasource(datasourceName, datasourceProperties)); + assertEquals(400, exception.getResponse().getStatusLine().getStatusCode()); + assertTrue(exception.getMessage().contains("blocked by deny list")); + } + + private Response createIp2GeoProcessorPipeline(final String pipelineName, final Map properties) throws IOException { + String field = GeospatialTestHelper.randomLowerCaseString(); + String datasourceName = PREFIX + GeospatialTestHelper.randomLowerCaseString(); + Map defaultProperties = Map.of( + Ip2GeoProcessor.CONFIG_FIELD, + field, + Ip2GeoProcessor.CONFIG_DATASOURCE, + datasourceName + ); + Map baseProperties = new HashMap<>(); + baseProperties.putAll(defaultProperties); + baseProperties.putAll(properties); + Map processorConfig = buildProcessorConfig(Ip2GeoProcessor.TYPE, baseProperties); + + return createPipeline(pipelineName, Optional.empty(), Arrays.asList(processorConfig)); + } + + private Map> getSampleData() { + Map> sampleData = new HashMap<>(); + sampleData.put( + String.format( + Locale.ROOT, + "10.%d.%d.%d", + Randomness.get().nextInt(255), + Randomness.get().nextInt(255), + Randomness.get().nextInt(255) + ), + Map.of(CITY, "Seattle", COUNTRY, "USA") + ); + sampleData.put( + String.format( + Locale.ROOT, + "127.%d.%d.%d", + Randomness.get().nextInt(15), + Randomness.get().nextInt(255), + Randomness.get().nextInt(255) + ), + Map.of(CITY, "Vancouver", COUNTRY, "Canada") + ); + sampleData.put( + String.format( + Locale.ROOT, + "fd12:2345:6789:1:%x:%x:%x:%x", + Randomness.get().nextInt(65535), + Randomness.get().nextInt(65535), + Randomness.get().nextInt(65535), + Randomness.get().nextInt(65535) + ), + Map.of(CITY, "Bengaluru", COUNTRY, "India") + ); + return sampleData; + } + + private Map> createDocument(String... args) { + if (args.length % 2 == 1) { + throw new RuntimeException("Number of arguments should be even"); + } + Map source = new HashMap<>(); + for (int i = 0; i < args.length; i += 2) { + source.put(args[0], args[1]); + } + return Map.of(SOURCE, source); + } + + /** + * This method convert returned value of simulatePipeline method to a list of sources + * + * For example, + * Input: + * { + * "docs" : [ + * { + * "doc" : { + * "_index" : "_index", + * "_id" : "_id", + * "_source" : { + * "ip2geo" : { + * "ip" : "127.0.0.1", + * "city" : "Seattle" + * }, + * "_ip" : "127.0.0.1" + * }, + * "_ingest" : { + * "timestamp" : "2023-05-12T17:41:42.939703Z" + * } + * } + * } + * ] + * } + * + * Output: + * [ + * { + * "ip" : "127.0.0.1", + * "city" : "Seattle" + * } + * ] + * + */ + private List> convertToListOfSources(final Map data, final String targetField) { + List>> docs = (List>>) data.get("docs"); + return docs.stream() + .map(doc -> (Map>) doc.get("doc").get(SOURCE)) + .map(source -> source.get(targetField)) + .collect(Collectors.toList()); + } +} diff --git a/src/test/java/org/opensearch/geospatial/ip2geo/processor/Ip2GeoProcessorTests.java b/src/test/java/org/opensearch/geospatial/ip2geo/processor/Ip2GeoProcessorTests.java new file mode 100644 index 00000000..b387ec97 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/ip2geo/processor/Ip2GeoProcessorTests.java @@ -0,0 +1,317 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.ip2geo.processor; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.isNull; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.BiConsumer; + +import org.junit.Before; +import org.mockito.ArgumentCaptor; +import org.opensearch.OpenSearchException; +import org.opensearch.common.Randomness; +import org.opensearch.geospatial.GeospatialTestHelper; +import org.opensearch.geospatial.ip2geo.Ip2GeoTestCase; +import org.opensearch.geospatial.ip2geo.common.DatasourceState; +import org.opensearch.geospatial.ip2geo.common.ParameterValidator; +import org.opensearch.geospatial.ip2geo.jobscheduler.Datasource; +import org.opensearch.ingest.IngestDocument; + +import lombok.SneakyThrows; + +public class Ip2GeoProcessorTests extends Ip2GeoTestCase { + private static final String DEFAULT_TARGET_FIELD = "ip2geo"; + private static final List SUPPORTED_FIELDS = Arrays.asList("city", "country"); + private Ip2GeoProcessor.Factory factory; + private ParameterValidator inputFormatValidator; + + @Before + public void init() { + factory = new Ip2GeoProcessor.Factory(ingestService, datasourceDao, geoIpDataDao, ip2GeoCachedDao); + } + + public void testExecuteWithNoIpAndIgnoreMissing() throws Exception { + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + Map config = new HashMap<>(); + config.put("ignore_missing", true); + Ip2GeoProcessor processor = createProcessor(datasourceName, config); + IngestDocument document = new IngestDocument(new HashMap<>(), new HashMap<>()); + BiConsumer handler = (doc, e) -> { + assertEquals(document, doc); + assertNull(e); + }; + processor.execute(document, handler); + } + + public void testExecute_whenNoIp_thenException() throws Exception { + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + Map config = new HashMap<>(); + Ip2GeoProcessor processor = createProcessor(datasourceName, config); + IngestDocument document = new IngestDocument(new HashMap<>(), new HashMap<>()); + BiConsumer handler = mock(BiConsumer.class); + + // Run + processor.execute(document, handler); + + // Verify + verify(handler).accept(isNull(), any(IllegalArgumentException.class)); + } + + public void testExecute_whenNonStringValue_thenException() throws Exception { + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + Ip2GeoProcessor processor = createProcessor(datasourceName, Collections.emptyMap()); + Map source = new HashMap<>(); + source.put("ip", Randomness.get().nextInt()); + IngestDocument document = new IngestDocument(source, new HashMap<>()); + BiConsumer handler = mock(BiConsumer.class); + + // Run + processor.execute(document, handler); + + // Verify + verify(handler).accept(isNull(), any(IllegalArgumentException.class)); + } + + @SneakyThrows + public void testExecute_whenNoDatasource_thenNotExistError() { + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + Ip2GeoProcessor processor = createProcessor(datasourceName, Collections.emptyMap()); + + Map source = new HashMap<>(); + String ip = randomIpAddress(); + source.put("ip", ip); + IngestDocument document = new IngestDocument(source, new HashMap<>()); + + when(ip2GeoCachedDao.has(datasourceName)).thenReturn(false); + BiConsumer handler = mock(BiConsumer.class); + + // Run + processor.execute(document, handler); + + // Verify + ArgumentCaptor captor = ArgumentCaptor.forClass(Exception.class); + verify(handler).accept(isNull(), captor.capture()); + captor.getValue().getMessage().contains("not exist"); + } + + @SneakyThrows + public void testExecute_whenExpired_thenExpiredMsg() { + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + Ip2GeoProcessor processor = createProcessor(datasourceName, Collections.emptyMap()); + BiConsumer handler = mock(BiConsumer.class); + + String indexName = GeospatialTestHelper.randomLowerCaseString(); + when(ip2GeoCachedDao.getIndexName(datasourceName)).thenReturn(indexName); + when(ip2GeoCachedDao.has(datasourceName)).thenReturn(true); + when(ip2GeoCachedDao.getState(datasourceName)).thenReturn(DatasourceState.AVAILABLE); + when(ip2GeoCachedDao.isExpired(datasourceName)).thenReturn(true); + Map geoData = Map.of("city", "Seattle", "country", "USA"); + when(ip2GeoCachedDao.getGeoData(eq(indexName), any())).thenReturn(geoData); + + // Run for single ip + String ip = randomIpAddress(); + IngestDocument documentWithIp = createDocument(ip); + processor.execute(documentWithIp, handler); + + // Verify + verify(handler).accept(documentWithIp, null); + assertEquals("ip2geo_data_expired", documentWithIp.getFieldValue(DEFAULT_TARGET_FIELD + ".error", String.class)); + + // Run for multi ips + List ips = Arrays.asList(randomIpAddress(), randomIpAddress()); + IngestDocument documentWithIps = createDocument(ips); + processor.execute(documentWithIps, handler); + + // Verify + verify(handler).accept(documentWithIps, null); + assertEquals("ip2geo_data_expired", documentWithIp.getFieldValue(DEFAULT_TARGET_FIELD + ".error", String.class)); + } + + @SneakyThrows + public void testExecute_whenNotAvailable_thenException() { + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + Ip2GeoProcessor processor = createProcessor(datasourceName, Collections.emptyMap()); + BiConsumer handler = mock(BiConsumer.class); + + String indexName = GeospatialTestHelper.randomLowerCaseString(); + when(ip2GeoCachedDao.getIndexName(datasourceName)).thenReturn(indexName); + when(ip2GeoCachedDao.has(datasourceName)).thenReturn(true); + when(ip2GeoCachedDao.getState(datasourceName)).thenReturn(DatasourceState.CREATE_FAILED); + when(ip2GeoCachedDao.isExpired(datasourceName)).thenReturn(false); + Map geoData = Map.of("city", "Seattle", "country", "USA"); + when(ip2GeoCachedDao.getGeoData(eq(indexName), any())).thenReturn(geoData); + + // Run for single ip + String ip = randomIpAddress(); + IngestDocument documentWithIp = createDocument(ip); + processor.execute(documentWithIp, handler); + + // Run for multi ips + List ips = Arrays.asList(randomIpAddress(), randomIpAddress()); + IngestDocument documentWithIps = createDocument(ips); + processor.execute(documentWithIps, handler); + + // Verify + ArgumentCaptor captor = ArgumentCaptor.forClass(IllegalStateException.class); + verify(handler, times(2)).accept(isNull(), captor.capture()); + assertTrue(captor.getAllValues().stream().allMatch(e -> e.getMessage().contains("not in an available state"))); + } + + @SneakyThrows + public void testExecute_whenCalled_thenGeoIpDataIsAdded() { + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + Ip2GeoProcessor processor = createProcessor(datasourceName, Collections.emptyMap()); + BiConsumer handler = mock(BiConsumer.class); + + String indexName = GeospatialTestHelper.randomLowerCaseString(); + when(ip2GeoCachedDao.getIndexName(datasourceName)).thenReturn(indexName); + when(ip2GeoCachedDao.has(datasourceName)).thenReturn(true); + when(ip2GeoCachedDao.getState(datasourceName)).thenReturn(DatasourceState.AVAILABLE); + when(ip2GeoCachedDao.isExpired(datasourceName)).thenReturn(false); + Map geoData = Map.of("city", "Seattle", "country", "USA"); + when(ip2GeoCachedDao.getGeoData(eq(indexName), any())).thenReturn(geoData); + + // Run for single ip + String ip = randomIpAddress(); + IngestDocument documentWithIp = createDocument(ip); + processor.execute(documentWithIp, handler); + + // Verify + assertEquals(geoData.get("city"), documentWithIp.getFieldValue("ip2geo.city", String.class)); + assertEquals(geoData.get("country"), documentWithIp.getFieldValue("ip2geo.country", String.class)); + + // Run for multi ips + List ips = Arrays.asList(randomIpAddress(), randomIpAddress()); + IngestDocument documentWithIps = createDocument(ips); + processor.execute(documentWithIps, handler); + + // Verify + assertEquals(2, documentWithIps.getFieldValue("ip2geo", List.class).size()); + Map addedValue = (Map) documentWithIps.getFieldValue("ip2geo", List.class).get(0); + assertEquals(geoData.get("city"), addedValue.get("city")); + assertEquals(geoData.get("country"), addedValue.get("country")); + } + + @SneakyThrows + public void testExecute_whenPropertiesSet_thenFilteredGeoIpDataIsAdded() { + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + Ip2GeoProcessor processor = createProcessor(datasourceName, Map.of(Ip2GeoProcessor.CONFIG_PROPERTIES, Arrays.asList("country"))); + BiConsumer handler = mock(BiConsumer.class); + + String indexName = GeospatialTestHelper.randomLowerCaseString(); + when(ip2GeoCachedDao.getIndexName(datasourceName)).thenReturn(indexName); + when(ip2GeoCachedDao.has(datasourceName)).thenReturn(true); + when(ip2GeoCachedDao.getState(datasourceName)).thenReturn(DatasourceState.AVAILABLE); + when(ip2GeoCachedDao.isExpired(datasourceName)).thenReturn(false); + Map geoData = Map.of("city", "Seattle", "country", "USA"); + when(ip2GeoCachedDao.getGeoData(eq(indexName), any())).thenReturn(geoData); + + // Run for single ip + String ip = randomIpAddress(); + IngestDocument documentWithIp = createDocument(ip); + processor.execute(documentWithIp, handler); + + // Verify + assertFalse(documentWithIp.hasField("ip2geo.city")); + assertEquals(geoData.get("country"), documentWithIp.getFieldValue("ip2geo.country", String.class)); + + // Run for multi ips + List ips = Arrays.asList(randomIpAddress(), randomIpAddress()); + IngestDocument documentWithIps = createDocument(ips); + processor.execute(documentWithIps, handler); + + // Verify + assertEquals(2, documentWithIps.getFieldValue("ip2geo", List.class).size()); + Map addedValue = (Map) documentWithIps.getFieldValue("ip2geo", List.class).get(0); + assertFalse(addedValue.containsKey("city")); + assertEquals(geoData.get("country"), addedValue.get("country")); + } + + @SneakyThrows + public void testExecute_whenNoHandler_thenException() { + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + Ip2GeoProcessor processor = createProcessor(datasourceName, Collections.emptyMap()); + IngestDocument document = new IngestDocument(Collections.emptyMap(), Collections.emptyMap()); + Exception e = expectThrows(IllegalStateException.class, () -> processor.execute(document)); + assertTrue(e.getMessage().contains("Not implemented")); + } + + @SneakyThrows + public void testExecute_whenContainsNonString_thenException() { + String datasourceName = GeospatialTestHelper.randomLowerCaseString(); + Ip2GeoProcessor processor = createProcessor(datasourceName, Collections.emptyMap()); + List ips = Arrays.asList(randomIpAddress(), 1); + Map source = new HashMap<>(); + source.put("ip", ips); + IngestDocument document = new IngestDocument(source, new HashMap<>()); + BiConsumer handler = mock(BiConsumer.class); + + // Run + processor.execute(document, handler); + + // Verify + ArgumentCaptor captor = ArgumentCaptor.forClass(IllegalArgumentException.class); + verify(handler).accept(isNull(), captor.capture()); + assertTrue(captor.getValue().getMessage().contains("should only contain strings")); + } + + @SneakyThrows + public void testCreate_whenInvalidDatasourceName_thenFails() { + String invalidName = "_" + GeospatialTestHelper.randomLowerCaseString(); + + // Run + Exception e = expectThrows(OpenSearchException.class, () -> createProcessor(invalidName, Collections.emptyMap())); + + // Verify + assertTrue(e.getMessage().contains("must not")); + } + + private Ip2GeoProcessor createProcessor(final String datasourceName, final Map config) throws Exception { + Datasource datasource = new Datasource(); + datasource.setName(datasourceName); + datasource.setState(DatasourceState.AVAILABLE); + datasource.getDatabase().setFields(SUPPORTED_FIELDS); + return createProcessor(datasource, config); + } + + private Ip2GeoProcessor createProcessor(final Datasource datasource, final Map config) throws Exception { + when(datasourceDao.getDatasource(datasource.getName())).thenReturn(datasource); + Map baseConfig = new HashMap<>(); + baseConfig.put(Ip2GeoProcessor.CONFIG_FIELD, "ip"); + baseConfig.put(Ip2GeoProcessor.CONFIG_DATASOURCE, datasource.getName()); + baseConfig.putAll(config); + + return factory.create( + Collections.emptyMap(), + GeospatialTestHelper.randomLowerCaseString(), + GeospatialTestHelper.randomLowerCaseString(), + baseConfig + ); + } + + private IngestDocument createDocument(String ip) { + Map source = new HashMap<>(); + source.put("ip", ip); + return new IngestDocument(source, new HashMap<>()); + } + + private IngestDocument createDocument(List ips) { + Map source = new HashMap<>(); + source.put("ip", ips); + return new IngestDocument(source, new HashMap<>()); + } +} diff --git a/src/test/java/org/opensearch/geospatial/plugin/GeospatialPluginIT.java b/src/test/java/org/opensearch/geospatial/plugin/GeospatialPluginIT.java index 70197266..0aca1c1c 100644 --- a/src/test/java/org/opensearch/geospatial/plugin/GeospatialPluginIT.java +++ b/src/test/java/org/opensearch/geospatial/plugin/GeospatialPluginIT.java @@ -10,10 +10,10 @@ import org.apache.http.util.EntityUtils; import org.opensearch.client.Request; import org.opensearch.client.Response; -import org.opensearch.rest.RestStatus; -import org.opensearch.test.rest.OpenSearchRestTestCase; +import org.opensearch.core.rest.RestStatus; +import org.opensearch.geospatial.GeospatialRestTestCase; -public class GeospatialPluginIT extends OpenSearchRestTestCase { +public class GeospatialPluginIT extends GeospatialRestTestCase { /** * Tests whether plugin is installed or not diff --git a/src/test/java/org/opensearch/geospatial/plugin/GeospatialPluginTests.java b/src/test/java/org/opensearch/geospatial/plugin/GeospatialPluginTests.java index 1a775955..5bfb489d 100644 --- a/src/test/java/org/opensearch/geospatial/plugin/GeospatialPluginTests.java +++ b/src/test/java/org/opensearch/geospatial/plugin/GeospatialPluginTests.java @@ -5,46 +5,206 @@ package org.opensearch.geospatial.plugin; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.opensearch.geospatial.ip2geo.jobscheduler.Datasource.IP2GEO_DATA_INDEX_NAME_PREFIX; + +import java.util.Collection; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.function.Supplier; +import org.junit.After; +import org.junit.Before; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; import org.opensearch.action.ActionRequest; -import org.opensearch.action.ActionResponse; +import org.opensearch.client.Client; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.lifecycle.LifecycleComponent; +import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; +import org.opensearch.core.action.ActionResponse; +import org.opensearch.core.common.io.stream.NamedWriteableRegistry; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.env.Environment; +import org.opensearch.env.NodeEnvironment; import org.opensearch.geospatial.action.upload.geojson.UploadGeoJSONAction; +import org.opensearch.geospatial.ip2geo.action.RestDeleteDatasourceHandler; +import org.opensearch.geospatial.ip2geo.action.RestGetDatasourceHandler; +import org.opensearch.geospatial.ip2geo.action.RestPutDatasourceHandler; +import org.opensearch.geospatial.ip2geo.action.RestUpdateDatasourceHandler; +import org.opensearch.geospatial.ip2geo.common.Ip2GeoExecutor; +import org.opensearch.geospatial.ip2geo.common.Ip2GeoLockService; +import org.opensearch.geospatial.ip2geo.common.Ip2GeoSettings; +import org.opensearch.geospatial.ip2geo.common.URLDenyListChecker; +import org.opensearch.geospatial.ip2geo.dao.DatasourceDao; +import org.opensearch.geospatial.ip2geo.dao.GeoIpDataDao; +import org.opensearch.geospatial.ip2geo.dao.Ip2GeoCachedDao; +import org.opensearch.geospatial.ip2geo.jobscheduler.DatasourceUpdateService; +import org.opensearch.geospatial.ip2geo.listener.Ip2GeoListener; import org.opensearch.geospatial.processor.FeatureProcessor; import org.opensearch.geospatial.rest.action.upload.geojson.RestUploadGeoJSONAction; import org.opensearch.geospatial.stats.upload.RestUploadStatsAction; +import org.opensearch.geospatial.stats.upload.UploadStats; +import org.opensearch.indices.SystemIndexDescriptor; +import org.opensearch.ingest.IngestService; import org.opensearch.ingest.Processor; import org.opensearch.plugins.ActionPlugin; import org.opensearch.plugins.IngestPlugin; +import org.opensearch.repositories.RepositoriesService; import org.opensearch.rest.RestHandler; +import org.opensearch.script.ScriptService; import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.watcher.ResourceWatcherService; public class GeospatialPluginTests extends OpenSearchTestCase { + private final ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, new HashSet(Ip2GeoSettings.settings())); + private final URLDenyListChecker urlDenyListChecker = new URLDenyListChecker(clusterSettings); + private final List SUPPORTED_REST_HANDLERS = List.of( + new RestUploadGeoJSONAction(), + new RestUploadStatsAction(), + new RestPutDatasourceHandler(clusterSettings, urlDenyListChecker), + new RestGetDatasourceHandler(), + new RestUpdateDatasourceHandler(urlDenyListChecker), + new RestDeleteDatasourceHandler() + ); + + private final Set SUPPORTED_SYSTEM_INDEX_PATTERN = Set.of(IP2GEO_DATA_INDEX_NAME_PREFIX); + + private final Set SUPPORTED_COMPONENTS = Set.of( + UploadStats.class, + DatasourceUpdateService.class, + DatasourceDao.class, + Ip2GeoExecutor.class, + GeoIpDataDao.class, + Ip2GeoLockService.class, + Ip2GeoCachedDao.class + ); + + @Mock + private Client client; + @Mock + private ClusterService clusterService; + @Mock + private IngestService ingestService; + @Mock + private ThreadPool threadPool; + @Mock + private ResourceWatcherService resourceWatcherService; + @Mock + private ScriptService scriptService; + @Mock + private NamedXContentRegistry xContentRegistry; + @Mock + private Environment environment; + @Mock + private NamedWriteableRegistry namedWriteableRegistry; + @Mock + private IndexNameExpressionResolver indexNameExpressionResolver; + @Mock + private Supplier repositoriesServiceSupplier; + private NodeEnvironment nodeEnvironment; + private Settings settings; + private AutoCloseable openMocks; + private GeospatialPlugin plugin; + + @Before + public void init() { + openMocks = MockitoAnnotations.openMocks(this); + settings = Settings.EMPTY; + when(client.settings()).thenReturn(settings); + when(clusterService.getClusterSettings()).thenReturn(clusterSettings); + when(clusterService.getSettings()).thenReturn(settings); + when(ingestService.getClusterService()).thenReturn(clusterService); + nodeEnvironment = null; + plugin = new GeospatialPlugin(); + // Need to call getProcessors to initialize few instances in plugin class + plugin.getProcessors(getProcessorParameter()); + } + + @After + public void close() throws Exception { + openMocks.close(); + } + + public void testSystemIndexDescriptors() { + Set registeredSystemIndexPatterns = new HashSet<>(); + for (SystemIndexDescriptor descriptor : plugin.getSystemIndexDescriptors(Settings.EMPTY)) { + registeredSystemIndexPatterns.add(descriptor.getIndexPattern()); + } + assertEquals(SUPPORTED_SYSTEM_INDEX_PATTERN, registeredSystemIndexPatterns); - private final List SUPPORTED_REST_HANDLERS = List.of(new RestUploadGeoJSONAction(), new RestUploadStatsAction()); + } + + public void testExecutorBuilders() { + assertEquals(1, plugin.getExecutorBuilders(Settings.EMPTY).size()); + } + + public void testCreateComponents() { + Set registeredComponents = new HashSet<>(); + Collection components = plugin.createComponents( + client, + clusterService, + threadPool, + resourceWatcherService, + scriptService, + xContentRegistry, + environment, + nodeEnvironment, + namedWriteableRegistry, + indexNameExpressionResolver, + repositoriesServiceSupplier + ); + for (Object component : components) { + registeredComponents.add(component.getClass()); + } + assertEquals(SUPPORTED_COMPONENTS, registeredComponents); + } + + public void testGetGuiceServiceClasses() { + Collection> classes = List.of(Ip2GeoListener.class); + assertEquals(classes, plugin.getGuiceServiceClasses()); + } public void testIsAnIngestPlugin() { - GeospatialPlugin plugin = new GeospatialPlugin(); assertTrue(plugin instanceof IngestPlugin); } public void testFeatureProcessorIsAdded() { - GeospatialPlugin plugin = new GeospatialPlugin(); - Map processors = plugin.getProcessors(null); + Map processors = plugin.getProcessors(getProcessorParameter()); assertTrue(processors.containsKey(FeatureProcessor.TYPE)); assertTrue(processors.get(FeatureProcessor.TYPE) instanceof FeatureProcessor.Factory); } public void testTotalRestHandlers() { - GeospatialPlugin plugin = new GeospatialPlugin(); - assertEquals(SUPPORTED_REST_HANDLERS.size(), plugin.getRestHandlers(Settings.EMPTY, null, null, null, null, null, null).size()); + assertEquals( + SUPPORTED_REST_HANDLERS.size(), + plugin.getRestHandlers(Settings.EMPTY, null, clusterSettings, null, null, null, null).size() + ); } public void testUploadGeoJSONTransportIsAdded() { - GeospatialPlugin plugin = new GeospatialPlugin(); final List> actions = plugin.getActions(); assertEquals(1, actions.stream().filter(actionHandler -> actionHandler.getAction() instanceof UploadGeoJSONAction).count()); } + + private Processor.Parameters getProcessorParameter() { + return new Processor.Parameters( + mock(Environment.class), + mock(ScriptService.class), + null, + null, + null, + null, + ingestService, + client, + null, + null + ); + } } diff --git a/src/test/java/org/opensearch/geospatial/processor/FeatureProcessorIT.java b/src/test/java/org/opensearch/geospatial/processor/FeatureProcessorIT.java index afe4a2e4..14fbd129 100644 --- a/src/test/java/org/opensearch/geospatial/processor/FeatureProcessorIT.java +++ b/src/test/java/org/opensearch/geospatial/processor/FeatureProcessorIT.java @@ -22,8 +22,8 @@ import org.opensearch.client.Request; import org.opensearch.client.Response; import org.opensearch.common.settings.Settings; +import org.opensearch.core.rest.RestStatus; import org.opensearch.geospatial.GeospatialRestTestCase; -import org.opensearch.rest.RestStatus; public class FeatureProcessorIT extends GeospatialRestTestCase { @@ -48,9 +48,9 @@ public void testIndexGeoJSONSuccess() throws IOException { Map geoFields = new HashMap<>(); geoFields.put(geoShapeField, "geo_shape"); - Map processorProperties = new HashMap<>(); + Map processorProperties = new HashMap<>(); processorProperties.put(FeatureProcessor.FIELD_KEY, geoShapeField); - Map geoJSONProcessorConfig = buildGeoJSONFeatureProcessorConfig(processorProperties); + Map geoJSONProcessorConfig = buildProcessorConfig(FeatureProcessor.TYPE, processorProperties); List> configs = new ArrayList<>(); configs.add(geoJSONProcessorConfig); diff --git a/src/test/java/org/opensearch/geospatial/rest/action/upload/geojson/RestUploadGeoJSONActionIT.java b/src/test/java/org/opensearch/geospatial/rest/action/upload/geojson/RestUploadGeoJSONActionIT.java index 4c42cbb7..5cde05ae 100644 --- a/src/test/java/org/opensearch/geospatial/rest/action/upload/geojson/RestUploadGeoJSONActionIT.java +++ b/src/test/java/org/opensearch/geospatial/rest/action/upload/geojson/RestUploadGeoJSONActionIT.java @@ -12,7 +12,6 @@ package org.opensearch.geospatial.rest.action.upload.geojson; import static org.opensearch.geospatial.GeospatialTestHelper.randomLowerCaseString; -import static org.opensearch.geospatial.GeospatialTestHelper.randomLowerCaseStringWithSuffix; import static org.opensearch.geospatial.action.upload.geojson.UploadGeoJSONRequestContent.*; import java.io.IOException; @@ -22,8 +21,8 @@ import org.opensearch.client.Response; import org.opensearch.client.ResponseException; import org.opensearch.common.settings.Settings; +import org.opensearch.core.rest.RestStatus; import org.opensearch.geospatial.GeospatialRestTestCase; -import org.opensearch.rest.RestStatus; public class RestUploadGeoJSONActionIT extends GeospatialRestTestCase { @@ -31,7 +30,7 @@ public class RestUploadGeoJSONActionIT extends GeospatialRestTestCase { public void testGeoJSONUploadSuccessPostMethod() throws IOException { - final String index = randomLowerCaseStringWithSuffix(ACCEPTED_INDEX_SUFFIX_PATH); + final String index = randomLowerCaseString(); assertIndexNotExists(index); Response response = uploadGeoJSONFeatures(NUMBER_OF_FEATURES_TO_ADD, index, null); assertEquals(RestStatus.OK, RestStatus.fromCode(response.getStatusLine().getStatusCode())); @@ -41,8 +40,7 @@ public void testGeoJSONUploadSuccessPostMethod() throws IOException { public void testGeoJSONUploadFailIndexExists() throws IOException { - String index = randomLowerCaseStringWithSuffix(ACCEPTED_INDEX_SUFFIX_PATH); - ; + String index = randomLowerCaseString(); String geoFieldName = randomLowerCaseString(); Map geoFields = new HashMap<>(); geoFields.put(geoFieldName, "geo_shape"); @@ -57,7 +55,7 @@ public void testGeoJSONUploadFailIndexExists() throws IOException { public void testGeoJSONUploadSuccessPutMethod() throws IOException { - String index = randomLowerCaseStringWithSuffix(ACCEPTED_INDEX_SUFFIX_PATH); + String index = randomLowerCaseString(); Response response = uploadGeoJSONFeaturesIntoExistingIndex(NUMBER_OF_FEATURES_TO_ADD, index, null); assertEquals(RestStatus.OK, RestStatus.fromCode(response.getStatusLine().getStatusCode())); assertIndexExists(index); @@ -66,7 +64,7 @@ public void testGeoJSONUploadSuccessPutMethod() throws IOException { public void testGeoJSONPutMethodUploadIndexExists() throws IOException { - String index = randomLowerCaseStringWithSuffix(ACCEPTED_INDEX_SUFFIX_PATH); + String index = randomLowerCaseString(); String geoFieldName = randomLowerCaseString(); Response response = uploadGeoJSONFeaturesIntoExistingIndex(NUMBER_OF_FEATURES_TO_ADD, index, geoFieldName); assertEquals(RestStatus.OK, RestStatus.fromCode(response.getStatusLine().getStatusCode())); diff --git a/src/test/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/GeoHexAggregationIT.java b/src/test/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/GeoHexAggregationIT.java new file mode 100644 index 00000000..9926d049 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/GeoHexAggregationIT.java @@ -0,0 +1,172 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.search.aggregations.bucket.geogrid; + +import static java.util.function.Function.identity; +import static java.util.stream.Collectors.toMap; +import static org.hamcrest.Matchers.containsString; +import static org.opensearch.geo.GeometryTestUtils.randomPoint; +import static org.opensearch.geospatial.GeospatialTestHelper.randomHexGridPrecision; +import static org.opensearch.geospatial.GeospatialTestHelper.randomLowerCaseString; +import static org.opensearch.geospatial.h3.H3.geoToH3Address; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse; + +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import org.hamcrest.MatcherAssert; +import org.opensearch.client.ResponseException; +import org.opensearch.cluster.ClusterModule; +import org.opensearch.common.settings.Settings; +import org.opensearch.core.ParseField; +import org.opensearch.core.xcontent.ContextParser; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.geometry.Point; +import org.opensearch.geospatial.GeospatialRestTestCase; +import org.opensearch.geospatial.h3.H3; +import org.opensearch.index.mapper.GeoPointFieldMapper; +import org.opensearch.search.aggregations.Aggregation; +import org.opensearch.search.aggregations.bucket.MultiBucketsAggregation; + +public class GeoHexAggregationIT extends GeospatialRestTestCase { + + private static final String FIELD = "field"; + private static final String FIELD_PRECISION = "precision"; + private static final String FIELD_SIZE = "size"; + private static int MAX_DOCUMENTS = 15; + private static int MIN_DOCUMENTS = 2; + private String indexName; + private String geospatialFieldName; + + @Override + public void setUp() throws Exception { + super.setUp(); + indexName = randomLowerCaseString(); + geospatialFieldName = randomLowerCaseString(); + } + + public void testGeoHexGridBucket() throws Exception { + // Step 1: Create an index + createIndex(indexName, Settings.EMPTY, Map.of(geospatialFieldName, GeoPointFieldMapper.CONTENT_TYPE)); + + // Generate metadata for Test data + final var randomDocumentsForTesting = randomIntBetween(MIN_DOCUMENTS, MAX_DOCUMENTS); + final var randomPrecision = randomHexGridPrecision(); + + // Generate Test data + final Map pointStringMap = generateRandomPointH3CellMap(randomDocumentsForTesting, randomPrecision); + for (var point : pointStringMap.keySet()) { + indexDocumentUsingWKT(indexName, geospatialFieldName, point.toString()); + } + + // do in-memory aggregation for comparison + final Map expectedAggregationMap = pointStringMap.values() + .stream() + .collect(Collectors.groupingBy(identity(), Collectors.counting())); + + // build test aggregation search query + var context = randomLowerCaseString(); + var content = buildSearchAggregationsBodyAsString(builder -> { + builder.startObject(context) + .startObject(GeoHexGridAggregationBuilder.NAME) + .field(FIELD, geospatialFieldName) + .field(FIELD_PRECISION, randomPrecision) + .field(FIELD_SIZE, expectedAggregationMap.size()) + .endObject() + .endObject(); + }); + + // execute Aggregation + final var searchResponse = searchIndex(indexName, content, true); + // Assert Search succeeded + assertSearchResponse(searchResponse); + // Fetch Aggregation + final var aggregation = searchResponse.getAggregations().asMap().get(context); + assertNotNull(aggregation); + + // Assert Results + assertTrue(aggregation instanceof MultiBucketsAggregation); + final var multiBucketsAggregation = (MultiBucketsAggregation) aggregation; + + // Assert size before checking contents + assertEquals(expectedAggregationMap.size(), multiBucketsAggregation.getBuckets().size()); + final Map actualAggregationMap = multiBucketsAggregation.getBuckets() + .stream() + .collect(toMap(MultiBucketsAggregation.Bucket::getKeyAsString, MultiBucketsAggregation.Bucket::getDocCount)); + + // compare in-memory aggregation with cluster aggregation + assertEquals(expectedAggregationMap, actualAggregationMap); + + } + + public void testSizeIsZero() throws Exception { + + // build test aggregation search query + var context = randomLowerCaseString(); + var content = buildSearchAggregationsBodyAsString(builder -> { + builder.startObject(context) + .startObject(GeoHexGridAggregationBuilder.NAME) + .field(FIELD, geospatialFieldName) + .field(FIELD_PRECISION, randomHexGridPrecision()) + .field(FIELD_SIZE, 0) + .endObject() + .endObject(); + }); + + // execute Aggregation + ResponseException exception = expectThrows(ResponseException.class, () -> searchIndex(indexName, content, true)); + MatcherAssert.assertThat(exception.getMessage(), containsString("[size] must be greater than 0.")); + } + + public void testInvalidPrecision() throws Exception { + + // build test aggregation search query + var invalidPrecision = H3.MAX_H3_RES + 1; + var content = buildSearchAggregationsBodyAsString(builder -> { + builder.startObject(randomLowerCaseString()) + .startObject(GeoHexGridAggregationBuilder.NAME) + .field(FIELD, geospatialFieldName) + .field(FIELD_PRECISION, invalidPrecision) + .endObject() + .endObject(); + }); + + // execute Aggregation + ResponseException exception = expectThrows(ResponseException.class, () -> searchIndex(indexName, content, true)); + MatcherAssert.assertThat( + exception.getMessage(), + containsString( + String.format( + Locale.ROOT, + "Invalid precision of %d . Must be between %d and %d", + invalidPrecision, + H3.MIN_H3_RES, + H3.MAX_H3_RES + ) + ) + ); + } + + @Override + protected NamedXContentRegistry xContentRegistry() { + final List namedXContents = new ArrayList<>(ClusterModule.getNamedXWriteables()); + final ContextParser hexGridParser = (p, c) -> ParsedGeoHexGrid.fromXContent(p, (String) c); + namedXContents.add( + new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(GeoHexGridAggregationBuilder.NAME), hexGridParser) + ); + return new NamedXContentRegistry(namedXContents); + } + + private Map generateRandomPointH3CellMap(int size, int randomPrecision) { + return IntStream.range(0, size) + .mapToObj(unUsed -> randomPoint()) + .collect(toMap(identity(), point -> geoToH3Address(point.getLat(), point.getLon(), randomPrecision))); + } +} diff --git a/src/test/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/GeoHexGridAggregatorTests.java b/src/test/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/GeoHexGridAggregatorTests.java new file mode 100644 index 00000000..dcf30c57 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/GeoHexGridAggregatorTests.java @@ -0,0 +1,334 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.search.aggregations.bucket.geogrid; + +import static org.hamcrest.Matchers.equalTo; +import static org.opensearch.geospatial.GeospatialTestHelper.randomLowerCaseString; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.TreeMap; +import java.util.function.Consumer; +import java.util.function.Function; + +import org.apache.lucene.document.LatLonDocValuesField; +import org.apache.lucene.document.SortedSetDocValuesField; +import org.apache.lucene.geo.GeoEncodingUtils; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.index.RandomIndexWriter; +import org.apache.lucene.util.BytesRef; +import org.hamcrest.MatcherAssert; +import org.opensearch.common.CheckedConsumer; +import org.opensearch.common.geo.GeoBoundingBox; +import org.opensearch.common.geo.GeoPoint; +import org.opensearch.common.geo.GeoUtils; +import org.opensearch.geo.GeometryTestUtils; +import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoGrid; +import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder; +import org.opensearch.geo.search.aggregations.bucket.geogrid.InternalGeoGridBucket; +import org.opensearch.geometry.Rectangle; +import org.opensearch.geospatial.h3.H3; +import org.opensearch.geospatial.plugin.GeospatialPlugin; +import org.opensearch.index.mapper.GeoPointFieldMapper; +import org.opensearch.index.mapper.MappedFieldType; +import org.opensearch.plugins.SearchPlugin; +import org.opensearch.search.aggregations.Aggregation; +import org.opensearch.search.aggregations.Aggregator; +import org.opensearch.search.aggregations.AggregatorTestCase; +import org.opensearch.search.aggregations.MultiBucketConsumerService; +import org.opensearch.search.aggregations.bucket.terms.StringTerms; +import org.opensearch.search.aggregations.bucket.terms.TermsAggregationBuilder; + +/** + * This class is modified from https://github.com/opensearch-project/OpenSearch/blob/main/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoGridAggregatorTestCase.java + * to keep relevant test case required for GeoHex Grid Aggregation. + */ +public class GeoHexGridAggregatorTests extends AggregatorTestCase { + + private static final String GEO_POINT_FIELD_NAME = "location"; + private static final double TOLERANCE = 1E-5D; + + public void testNoDocs() throws IOException { + testCase(new MatchAllDocsQuery(), GEO_POINT_FIELD_NAME, randomPrecision(), null, geoGrid -> { + assertEquals(0, geoGrid.getBuckets().size()); + }, iw -> { + // Intentionally not writing any docs + }); + } + + public void testUnmapped() throws IOException { + testCase(new MatchAllDocsQuery(), randomLowerCaseString(), randomPrecision(), null, geoGrid -> { + assertEquals(0, geoGrid.getBuckets().size()); + }, iw -> { iw.addDocument(Collections.singleton(new LatLonDocValuesField(GEO_POINT_FIELD_NAME, 10D, 10D))); }); + } + + public void testUnmappedMissing() throws IOException { + GeoGridAggregationBuilder builder = createBuilder(randomLowerCaseString()).field(randomLowerCaseString()) + .missing("53.69437,6.475031"); + testCase( + new MatchAllDocsQuery(), + randomPrecision(), + null, + geoGrid -> assertEquals(1, geoGrid.getBuckets().size()), + iw -> iw.addDocument(Collections.singleton(new LatLonDocValuesField(GEO_POINT_FIELD_NAME, 10D, 10D))), + builder + ); + + } + + public void testWithSeveralDocs() throws IOException { + int precision = randomPrecision(); + int numPoints = randomIntBetween(8, 128); + Map expectedCountPerGeoHex = new HashMap<>(); + testCase(new MatchAllDocsQuery(), GEO_POINT_FIELD_NAME, precision, null, geoHexGrid -> { + assertEquals(expectedCountPerGeoHex.size(), geoHexGrid.getBuckets().size()); + for (GeoGrid.Bucket bucket : geoHexGrid.getBuckets()) { + assertEquals((long) expectedCountPerGeoHex.get(bucket.getKeyAsString()), bucket.getDocCount()); + } + assertTrue(hasValue(geoHexGrid)); + }, iw -> { + List points = new ArrayList<>(); + Set distinctAddressPerDoc = new HashSet<>(); + for (int pointId = 0; pointId < numPoints; pointId++) { + double[] latLng = randomLatLng(); + points.add(new LatLonDocValuesField(GEO_POINT_FIELD_NAME, latLng[0], latLng[1])); + String address = h3AddressAsString(latLng[1], latLng[0], precision); + if (distinctAddressPerDoc.contains(address) == false) { + expectedCountPerGeoHex.put(address, expectedCountPerGeoHex.getOrDefault(address, 0) + 1); + } + distinctAddressPerDoc.add(address); + if (usually()) { + iw.addDocument(points); + points.clear(); + distinctAddressPerDoc.clear(); + } + } + if (points.size() != 0) { + iw.addDocument(points); + } + }); + } + + public void testAsSubAgg() throws IOException { + int precision = randomPrecision(); + Map> expectedCountPerTPerGeoHex = new TreeMap<>(); + List> docs = new ArrayList<>(); + for (int i = 0; i < 30; i++) { + String t = randomAlphaOfLength(1); + double[] latLng = randomLatLng(); + + List doc = new ArrayList<>(); + docs.add(doc); + doc.add(new LatLonDocValuesField(GEO_POINT_FIELD_NAME, latLng[0], latLng[1])); + doc.add(new SortedSetDocValuesField("t", new BytesRef(t))); + + String address = h3AddressAsString(latLng[1], latLng[0], precision); + Map expectedCountPerGeoHex = expectedCountPerTPerGeoHex.get(t); + if (expectedCountPerGeoHex == null) { + expectedCountPerGeoHex = new TreeMap<>(); + expectedCountPerTPerGeoHex.put(t, expectedCountPerGeoHex); + } + expectedCountPerGeoHex.put(address, expectedCountPerGeoHex.getOrDefault(address, 0L) + 1); + } + CheckedConsumer buildIndex = iw -> iw.addDocuments(docs); + String aggregation = randomLowerCaseString(); + TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("t").field("t") + .size(expectedCountPerTPerGeoHex.size()) + .subAggregation(createBuilder(aggregation).field(GEO_POINT_FIELD_NAME).precision(precision)); + Consumer verify = (terms) -> { + Map> actual = new TreeMap<>(); + for (StringTerms.Bucket tb : terms.getBuckets()) { + GeoHexGrid gg = tb.getAggregations().get(aggregation); + Map sub = new TreeMap<>(); + for (InternalGeoGridBucket ggb : gg.getBuckets()) { + sub.put(ggb.getKeyAsString(), ggb.getDocCount()); + } + actual.put(tb.getKeyAsString(), sub); + } + MatcherAssert.assertThat(actual, equalTo(expectedCountPerTPerGeoHex)); + }; + testCase(aggregationBuilder, new MatchAllDocsQuery(), buildIndex, verify, keywordField("t"), geoPointField(GEO_POINT_FIELD_NAME)); + } + + public void testBounds() throws IOException { + final int numDocs = randomIntBetween(64, 256); + final GeoHexGridAggregationBuilder builder = createBuilder("_name"); + + expectThrows(IllegalArgumentException.class, () -> builder.precision(-1)); + expectThrows(IllegalArgumentException.class, () -> builder.precision(30)); + + // only consider bounding boxes that are at least TOLERANCE wide and have quantized coordinates + GeoBoundingBox bbox = randomValueOtherThanMany( + (b) -> Math.abs(GeoUtils.normalizeLon(b.right()) - GeoUtils.normalizeLon(b.left())) < TOLERANCE, + GeoHexGridAggregatorTests::randomBBox + ); + Function encodeDecodeLat = (lat) -> GeoEncodingUtils.decodeLatitude(GeoEncodingUtils.encodeLatitude(lat)); + Function encodeDecodeLon = (lon) -> GeoEncodingUtils.decodeLongitude(GeoEncodingUtils.encodeLongitude(lon)); + bbox.topLeft().reset(encodeDecodeLat.apply(bbox.top()), encodeDecodeLon.apply(bbox.left())); + bbox.bottomRight().reset(encodeDecodeLat.apply(bbox.bottom()), encodeDecodeLon.apply(bbox.right())); + + int in = 0, out = 0; + List docs = new ArrayList<>(); + while (in + out < numDocs) { + if (bbox.left() > bbox.right()) { + if (randomBoolean()) { + double lonWithin = randomBoolean() + ? randomDoubleBetween(bbox.left(), 180.0, true) + : randomDoubleBetween(-180.0, bbox.right(), true); + double latWithin = randomDoubleBetween(bbox.bottom(), bbox.top(), true); + in++; + docs.add(new LatLonDocValuesField(GEO_POINT_FIELD_NAME, latWithin, lonWithin)); + } else { + double lonOutside = randomDoubleBetween(bbox.left(), bbox.right(), true); + double latOutside = randomDoubleBetween(bbox.top(), -90, false); + out++; + docs.add(new LatLonDocValuesField(GEO_POINT_FIELD_NAME, latOutside, lonOutside)); + } + } else { + if (randomBoolean()) { + double lonWithin = randomDoubleBetween(bbox.left(), bbox.right(), true); + double latWithin = randomDoubleBetween(bbox.bottom(), bbox.top(), true); + in++; + docs.add(new LatLonDocValuesField(GEO_POINT_FIELD_NAME, latWithin, lonWithin)); + } else { + double lonOutside = GeoUtils.normalizeLon(randomDoubleBetween(bbox.right(), 180.001, false)); + double latOutside = GeoUtils.normalizeLat(randomDoubleBetween(bbox.top(), 90.001, false)); + out++; + docs.add(new LatLonDocValuesField(GEO_POINT_FIELD_NAME, latOutside, lonOutside)); + } + } + + } + + final long numDocsInBucket = in; + final int precision = randomPrecision(); + + testCase(new MatchAllDocsQuery(), GEO_POINT_FIELD_NAME, precision, bbox, geoGrid -> { + assertTrue(hasValue(geoGrid)); + long docCount = 0; + for (int i = 0; i < geoGrid.getBuckets().size(); i++) { + docCount += geoGrid.getBuckets().get(i).getDocCount(); + } + MatcherAssert.assertThat(docCount, equalTo(numDocsInBucket)); + }, iw -> { + for (LatLonDocValuesField docField : docs) { + iw.addDocument(Collections.singletonList(docField)); + } + }); + } + + @Override + public void doAssertReducedMultiBucketConsumer(Aggregation agg, MultiBucketConsumerService.MultiBucketConsumer bucketConsumer) { + /* + * No-op. + */ + } + + /** + * Overriding the Search Plugins list with {@link GeospatialPlugin} so that the testcase will know that this plugin is + * to be loaded during the tests. + * @return List of {@link SearchPlugin} + */ + @Override + protected List getSearchPlugins() { + return Collections.singletonList(new GeospatialPlugin()); + } + + private double[] randomLatLng() { + double lat = (180d * randomDouble()) - 90d; + double lng = (360d * randomDouble()) - 180d; + + // Precision-adjust longitude/latitude to avoid wrong bucket placement + // Internally, lat/lng get converted to 32 bit integers, loosing some precision. + // This does not affect geo hex because it also uses the same algorithm, + // but it does affect other bucketing algos, thus we need to do the same steps here. + lng = GeoEncodingUtils.decodeLongitude(GeoEncodingUtils.encodeLongitude(lng)); + lat = GeoEncodingUtils.decodeLatitude(GeoEncodingUtils.encodeLatitude(lat)); + + return new double[] { lat, lng }; + } + + private void testCase( + Query query, + String field, + int precision, + GeoBoundingBox geoBoundingBox, + Consumer verify, + CheckedConsumer buildIndex + ) throws IOException { + testCase(query, precision, geoBoundingBox, verify, buildIndex, createBuilder("_name").field(field)); + } + + private void testCase( + Query query, + int precision, + GeoBoundingBox geoBoundingBox, + Consumer verify, + CheckedConsumer buildIndex, + GeoGridAggregationBuilder aggregationBuilder + ) throws IOException { + Directory directory = newDirectory(); + RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); + buildIndex.accept(indexWriter); + indexWriter.close(); + + IndexReader indexReader = DirectoryReader.open(directory); + IndexSearcher indexSearcher = newSearcher(indexReader, true, true); + + aggregationBuilder.precision(precision); + if (geoBoundingBox != null) { + aggregationBuilder.setGeoBoundingBox(geoBoundingBox); + MatcherAssert.assertThat(aggregationBuilder.geoBoundingBox(), equalTo(geoBoundingBox)); + } + + MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType(GEO_POINT_FIELD_NAME); + + Aggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); + aggregator.preCollection(); + indexSearcher.search(query, aggregator); + aggregator.postCollection(); + verify.accept((GeoHexGrid) aggregator.buildTopLevel()); + + indexReader.close(); + directory.close(); + } + + private int randomPrecision() { + return randomIntBetween(H3.MIN_H3_RES, H3.MAX_H3_RES); + } + + private static boolean hasValue(GeoHexGrid agg) { + return agg.getBuckets().stream().anyMatch(bucket -> bucket.getDocCount() > 0); + } + + private static GeoBoundingBox randomBBox() { + Rectangle rectangle = GeometryTestUtils.randomRectangle(); + return new GeoBoundingBox( + new GeoPoint(rectangle.getMaxLat(), rectangle.getMinLon()), + new GeoPoint(rectangle.getMinLat(), rectangle.getMaxLon()) + ); + } + + private String h3AddressAsString(double lng, double lat, int precision) { + return H3.geoToH3Address(lat, lng, precision); + } + + private GeoHexGridAggregationBuilder createBuilder(String name) { + return new GeoHexGridAggregationBuilder(name); + } +} diff --git a/src/test/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/GeoHexGridParserTests.java b/src/test/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/GeoHexGridParserTests.java new file mode 100644 index 00000000..109455a9 --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/GeoHexGridParserTests.java @@ -0,0 +1,143 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.search.aggregations.bucket.geogrid; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.instanceOf; +import static org.opensearch.geospatial.GeospatialTestHelper.randomHexGridPrecision; +import static org.opensearch.geospatial.GeospatialTestHelper.randomLowerCaseString; +import static org.opensearch.geospatial.search.aggregations.bucket.geogrid.GeoHexGridAggregationBuilder.NAME; +import static org.opensearch.geospatial.search.aggregations.bucket.geogrid.GeoHexGridAggregationBuilder.PARSER; + +import java.util.Locale; + +import org.hamcrest.MatcherAssert; +import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.xcontent.XContentParseException; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.geo.GeometryTestUtils; +import org.opensearch.geometry.Rectangle; +import org.opensearch.geospatial.h3.H3; +import org.opensearch.test.OpenSearchTestCase; + +public class GeoHexGridParserTests extends OpenSearchTestCase { + + private final static int MAX_SIZE = 100; + private final static int MIN_SIZE = 1; + private final static int MAX_SHARD_SIZE = 100; + private final static int MIN_SHARD_SIZE = 1; + + public void testParseValidFromInts() throws Exception { + XContentParser stParser = createParser( + JsonXContent.jsonXContent, + buildAggregation( + randomLowerCaseString(), + randomHexGridPrecision(), + randomIntBetween(MIN_SIZE, MAX_SIZE), + randomIntBetween(MIN_SHARD_SIZE, MAX_SHARD_SIZE) + ) + ); + XContentParser.Token token = stParser.nextToken(); + assertSame(XContentParser.Token.START_OBJECT, token); + // can create a factory + assertNotNull(PARSER.parse(stParser, NAME)); + } + + public void testParseValidFromStrings() throws Exception { + XContentParser stParser = createParser( + JsonXContent.jsonXContent, + buildAggregation( + randomLowerCaseString(), + randomHexGridPrecision(), + randomIntBetween(MIN_SIZE, MAX_SIZE), + randomIntBetween(MIN_SHARD_SIZE, MAX_SHARD_SIZE) + ) + ); + XContentParser.Token token = stParser.nextToken(); + assertSame(XContentParser.Token.START_OBJECT, token); + // can create a factory + assertNotNull(PARSER.parse(stParser, NAME)); + } + + public void testParseInvalidUnitPrecision() throws Exception { + XContentParser stParser = createParser(JsonXContent.jsonXContent, "{\"field\":\"my_loc\", \"precision\": \"10kg\"}"); + XContentParser.Token token = stParser.nextToken(); + assertSame(XContentParser.Token.START_OBJECT, token); + XContentParseException ex = expectThrows(XContentParseException.class, () -> PARSER.parse(stParser, NAME)); + MatcherAssert.assertThat(ex.getMessage(), containsString("failed to parse field [precision]")); + MatcherAssert.assertThat(ex.getCause(), instanceOf(NumberFormatException.class)); + assertEquals("For input string: \"10kg\"", ex.getCause().getMessage()); + } + + public void testParseErrorOnBooleanPrecision() throws Exception { + XContentParser stParser = createParser(JsonXContent.jsonXContent, "{\"field\":\"my_loc\", \"precision\":false}"); + XContentParser.Token token = stParser.nextToken(); + assertSame(XContentParser.Token.START_OBJECT, token); + XContentParseException e = expectThrows(XContentParseException.class, () -> PARSER.parse(stParser, NAME)); + MatcherAssert.assertThat(e.getMessage(), containsString("precision doesn't support values of type: VALUE_BOOLEAN")); + } + + public void testParseErrorOnPrecisionOutOfRange() throws Exception { + int invalidPrecision = H3.MAX_H3_RES + 1; + XContentParser stParser = createParser( + JsonXContent.jsonXContent, + buildAggregation( + randomLowerCaseString(), + invalidPrecision, + randomIntBetween(MIN_SIZE, MAX_SIZE), + randomIntBetween(MIN_SHARD_SIZE, MAX_SHARD_SIZE) + ) + ); + XContentParser.Token token = stParser.nextToken(); + assertSame(XContentParser.Token.START_OBJECT, token); + try { + PARSER.parse(stParser, NAME); + fail(); + } catch (XContentParseException ex) { + MatcherAssert.assertThat(ex.getCause(), instanceOf(IllegalArgumentException.class)); + assertEquals( + String.format( + Locale.ROOT, + "Invalid precision of %d . Must be between %d and %d.", + invalidPrecision, + H3.MIN_H3_RES, + H3.MAX_H3_RES + ), + ex.getCause().getMessage() + ); + } + } + + public void testParseValidBounds() throws Exception { + Rectangle bbox = GeometryTestUtils.randomRectangle(); + XContentParser stParser = createParser( + JsonXContent.jsonXContent, + String.format( + Locale.ROOT, + "{\"field\":\"my_loc\", \"precision\": 5, \"size\": 500, \"shard_size\": 550,\"bounds\": { \"top\": %s,\"bottom\": %s,\"left\": %s,\"right\": %s}}", + bbox.getMaxY(), + bbox.getMinY(), + bbox.getMinX(), + bbox.getMaxX() + ) + ); + XContentParser.Token token = stParser.nextToken(); + assertSame(XContentParser.Token.START_OBJECT, token); + // can create a factory + assertNotNull(PARSER.parse(stParser, NAME)); + } + + private String buildAggregation(String fieldName, int precision, int size, int shardSize) { + return String.format( + Locale.ROOT, + "{\"field\":\"%s\", \"precision\":%d, \"size\": %d, \"shard_size\": %d}", + fieldName, + precision, + size, + shardSize + ); + } +} diff --git a/src/test/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/GeoHexGridTests.java b/src/test/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/GeoHexGridTests.java new file mode 100644 index 00000000..db91180d --- /dev/null +++ b/src/test/java/org/opensearch/geospatial/search/aggregations/bucket/geogrid/GeoHexGridTests.java @@ -0,0 +1,186 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.geospatial.search.aggregations.bucket.geogrid; + +import static org.hamcrest.Matchers.equalTo; +import static org.opensearch.geospatial.GeospatialTestHelper.randomHexGridPrecision; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.lucene.index.IndexWriter; +import org.hamcrest.MatcherAssert; +import org.opensearch.core.ParseField; +import org.opensearch.core.xcontent.ContextParser; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoGrid; +import org.opensearch.geo.search.aggregations.bucket.geogrid.InternalGeoGrid; +import org.opensearch.geo.search.aggregations.bucket.geogrid.InternalGeoGridBucket; +import org.opensearch.geospatial.h3.H3; +import org.opensearch.geospatial.plugin.GeospatialPlugin; +import org.opensearch.plugins.SearchPlugin; +import org.opensearch.search.aggregations.Aggregation; +import org.opensearch.search.aggregations.InternalAggregations; +import org.opensearch.search.aggregations.ParsedMultiBucketAggregation; +import org.opensearch.test.InternalMultiBucketAggregationTestCase; + +/** + * This class is modified from https://github.com/opensearch-project/OpenSearch/blob/main/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoGridTestCase.java + * to keep relevant test case required for GeoHex Grid. + */ +public class GeoHexGridTests extends InternalMultiBucketAggregationTestCase { + + private static final double LATITUDE_MIN = -90.0; + private static final double LATITUDE_MAX = 90.0; + private static final double LONGITUDE_MIN = -180.0; + private static final double LONGITUDE_MAX = 180.0; + private static final int MIN_BUCKET_SIZE = 1; + private static final int MAX_BUCKET_SIZE = 3; + + public void testCreateFromBuckets() { + InternalGeoGrid original = createTestInstance(); + MatcherAssert.assertThat(original, equalTo(original.create(original.getBuckets()))); + } + + @Override + protected int minNumberOfBuckets() { + return MIN_BUCKET_SIZE; + } + + @Override + protected int maxNumberOfBuckets() { + return MAX_BUCKET_SIZE; + } + + /** + * Overriding the method so that tests can get the aggregation specs for namedWriteable. + * + * @return GeoSpatialPlugin + */ + @Override + protected SearchPlugin registerPlugin() { + return new GeospatialPlugin(); + } + + /** + * Overriding with the {@link ParsedGeoHexGrid} so that it can be parsed. We need to do this as {@link GeospatialPlugin} + * is registering this Aggregation. + * + * @return a List of {@link NamedXContentRegistry.Entry} + */ + @Override + protected List getNamedXContents() { + final List namedXContents = new ArrayList<>(getDefaultNamedXContents()); + final ContextParser hexGridParser = (p, c) -> ParsedGeoHexGrid.fromXContent(p, (String) c); + namedXContents.add( + new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(GeoHexGridAggregationBuilder.NAME), hexGridParser) + ); + return namedXContents; + } + + @Override + protected GeoHexGrid createTestInstance(String name, Map metadata, InternalAggregations aggregations) { + final int precision = randomHexGridPrecision(); + int size = randomNumberOfBuckets(); + List buckets = new ArrayList<>(size); + for (int i = 0; i < size; i++) { + double latitude = randomDoubleBetween(LATITUDE_MIN, LATITUDE_MAX, false); + double longitude = randomDoubleBetween(LONGITUDE_MIN, LONGITUDE_MAX, false); + + long addressAsLong = longEncode(longitude, latitude, precision); + buckets.add(createInternalGeoGridBucket(addressAsLong, randomInt(IndexWriter.MAX_DOCS), aggregations)); + } + return createInternalGeoGrid(name, size, buckets, metadata); + } + + @Override + protected void assertReduced(GeoHexGrid reduced, List inputs) { + Map> map = new HashMap<>(); + for (GeoHexGrid input : inputs) { + for (GeoGrid.Bucket bucketBase : input.getBuckets()) { + GeoHexGridBucket bucket = (GeoHexGridBucket) bucketBase; + List buckets = map.computeIfAbsent(bucket.hashAsLong(), k -> new ArrayList<>()); + buckets.add(bucket); + } + } + List expectedBuckets = new ArrayList<>(); + for (Map.Entry> entry : map.entrySet()) { + long docCount = 0; + for (GeoHexGridBucket bucket : entry.getValue()) { + docCount += bucket.getDocCount(); + } + expectedBuckets.add(createInternalGeoGridBucket(entry.getKey(), docCount, InternalAggregations.EMPTY)); + } + expectedBuckets.sort((first, second) -> { + int cmp = Long.compare(second.getDocCount(), first.getDocCount()); + if (cmp == 0) { + return second.compareTo(first); + } + return cmp; + }); + int requestedSize = inputs.get(0).getRequiredSize(); + expectedBuckets = expectedBuckets.subList(0, Math.min(requestedSize, expectedBuckets.size())); + assertEquals(expectedBuckets.size(), reduced.getBuckets().size()); + for (int i = 0; i < reduced.getBuckets().size(); i++) { + GeoGrid.Bucket expected = expectedBuckets.get(i); + GeoGrid.Bucket actual = reduced.getBuckets().get(i); + assertEquals(expected.getDocCount(), actual.getDocCount()); + assertEquals(expected.getKey(), actual.getKey()); + } + } + + @Override + protected Class implementationClass() { + return ParsedGeoHexGrid.class; + } + + @Override + protected GeoHexGrid mutateInstance(GeoHexGrid instance) { + String name = instance.getName(); + int size = instance.getRequiredSize(); + List buckets = instance.getBuckets(); + Map metadata = instance.getMetadata(); + switch (between(0, 3)) { + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + buckets = new ArrayList<>(buckets); + buckets.add( + createInternalGeoGridBucket(randomNonNegativeLong(), randomInt(IndexWriter.MAX_DOCS), InternalAggregations.EMPTY) + ); + break; + case 2: + size = size + between(1, 10); + break; + case 3: + if (metadata == null) { + metadata = new HashMap<>(1); + } else { + metadata = new HashMap<>(instance.getMetadata()); + } + metadata.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); + } + return createInternalGeoGrid(name, size, buckets, metadata); + } + + private GeoHexGrid createInternalGeoGrid(String name, int size, List buckets, Map metadata) { + return new GeoHexGrid(name, size, buckets, metadata); + } + + private GeoHexGridBucket createInternalGeoGridBucket(Long key, long docCount, InternalAggregations aggregations) { + return new GeoHexGridBucket(key, docCount, aggregations); + } + + private long longEncode(double lng, double lat, int precision) { + return H3.geoToH3(lng, lat, precision); + } +} diff --git a/src/test/java/org/opensearch/geospatial/stats/upload/RestUploadStatsActionIT.java b/src/test/java/org/opensearch/geospatial/stats/upload/RestUploadStatsActionIT.java index e702d232..1081f43b 100644 --- a/src/test/java/org/opensearch/geospatial/stats/upload/RestUploadStatsActionIT.java +++ b/src/test/java/org/opensearch/geospatial/stats/upload/RestUploadStatsActionIT.java @@ -14,8 +14,8 @@ import org.apache.http.util.EntityUtils; import org.opensearch.client.Request; import org.opensearch.client.Response; +import org.opensearch.core.rest.RestStatus; import org.opensearch.geospatial.GeospatialRestTestCase; -import org.opensearch.rest.RestStatus; public class RestUploadStatsActionIT extends GeospatialRestTestCase { diff --git a/src/test/java/org/opensearch/geospatial/stats/upload/TotalUploadStatsTests.java b/src/test/java/org/opensearch/geospatial/stats/upload/TotalUploadStatsTests.java index fe51feb0..2c26df9b 100644 --- a/src/test/java/org/opensearch/geospatial/stats/upload/TotalUploadStatsTests.java +++ b/src/test/java/org/opensearch/geospatial/stats/upload/TotalUploadStatsTests.java @@ -12,11 +12,10 @@ import java.util.Collections; import java.util.List; -import org.opensearch.common.Strings; -import org.opensearch.common.xcontent.ToXContent; -import org.opensearch.common.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.xcontent.MediaTypeRegistry; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.test.OpenSearchTestCase; public class TotalUploadStatsTests extends OpenSearchTestCase { @@ -43,11 +42,11 @@ public void testEmptyUploadStats() { public void testToXContentWithEmptyUploadStats() throws IOException { TotalUploadStats totalUploadStats = new TotalUploadStats(Collections.emptyList()); - XContentBuilder contentBuilder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder contentBuilder = MediaTypeRegistry.contentBuilder(XContentType.JSON); contentBuilder.startObject(); totalUploadStats.toXContent(contentBuilder, ToXContent.EMPTY_PARAMS); contentBuilder.endObject(); - String summary = Strings.toString(contentBuilder); + String summary = contentBuilder.toString(); assertNotNull(summary); String expectedEmptyContent = "{\"total\":{}}"; assertEquals(expectedEmptyContent, summary); @@ -58,11 +57,11 @@ public void testToXContentWithRequestAPICount() throws IOException { long expectedSum = INIT; expectedSum += randomUploadStats.stream().mapToLong(UploadStats::getTotalAPICount).sum(); TotalUploadStats totalUploadStats = new TotalUploadStats(randomUploadStats); - XContentBuilder contentBuilder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder contentBuilder = MediaTypeRegistry.contentBuilder(XContentType.JSON); contentBuilder.startObject(); totalUploadStats.toXContent(contentBuilder, ToXContent.EMPTY_PARAMS); contentBuilder.endObject(); - String summary = Strings.toString(contentBuilder); + String summary = contentBuilder.toString(); assertNotNull(summary); assertTrue(summary.contains(buildFieldNameValuePair(TotalUploadStats.FIELDS.REQUEST_COUNT.toString(), expectedSum))); } @@ -74,11 +73,11 @@ public void testToXContentWithUploadCount() throws IOException { expectedSum += stats.getMetrics().stream().mapToLong(UploadMetric::getUploadCount).sum(); } TotalUploadStats totalUploadStats = new TotalUploadStats(randomUploadStats); - XContentBuilder contentBuilder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder contentBuilder = MediaTypeRegistry.contentBuilder(XContentType.JSON); contentBuilder.startObject(); totalUploadStats.toXContent(contentBuilder, ToXContent.EMPTY_PARAMS); contentBuilder.endObject(); - String summary = Strings.toString(contentBuilder); + String summary = contentBuilder.toString(); assertNotNull(summary); assertTrue(summary.contains(buildFieldNameValuePair(TotalUploadStats.FIELDS.UPLOAD.toString(), expectedSum))); } @@ -90,11 +89,11 @@ public void testToXContentWithSuccessCount() throws IOException { expectedSum += stats.getMetrics().stream().mapToLong(UploadMetric::getSuccessCount).sum(); } TotalUploadStats totalUploadStats = new TotalUploadStats(randomUploadStats); - XContentBuilder contentBuilder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder contentBuilder = MediaTypeRegistry.contentBuilder(XContentType.JSON); contentBuilder.startObject(); totalUploadStats.toXContent(contentBuilder, ToXContent.EMPTY_PARAMS); contentBuilder.endObject(); - String summary = Strings.toString(contentBuilder); + String summary = contentBuilder.toString(); assertNotNull(summary); assertTrue(summary.contains(buildFieldNameValuePair(TotalUploadStats.FIELDS.SUCCESS.toString(), expectedSum))); } @@ -106,11 +105,11 @@ public void testToXContentWithFailedCount() throws IOException { expectedSum += stats.getMetrics().stream().mapToLong(UploadMetric::getFailedCount).sum(); } TotalUploadStats totalUploadStats = new TotalUploadStats(randomUploadStats); - XContentBuilder contentBuilder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder contentBuilder = MediaTypeRegistry.contentBuilder(XContentType.JSON); contentBuilder.startObject(); totalUploadStats.toXContent(contentBuilder, ToXContent.EMPTY_PARAMS); contentBuilder.endObject(); - String summary = Strings.toString(contentBuilder); + String summary = contentBuilder.toString(); assertNotNull(summary); assertTrue(summary.contains(buildFieldNameValuePair(TotalUploadStats.FIELDS.FAILED.toString(), expectedSum))); } @@ -122,11 +121,11 @@ public void testToXContentWithDuration() throws IOException { expectedSum += stats.getMetrics().stream().mapToLong(UploadMetric::getDuration).sum(); } TotalUploadStats totalUploadStats = new TotalUploadStats(randomUploadStats); - XContentBuilder contentBuilder = XContentFactory.contentBuilder(XContentType.JSON); + XContentBuilder contentBuilder = MediaTypeRegistry.contentBuilder(XContentType.JSON); contentBuilder.startObject(); totalUploadStats.toXContent(contentBuilder, ToXContent.EMPTY_PARAMS); contentBuilder.endObject(); - String summary = Strings.toString(contentBuilder); + String summary = contentBuilder.toString(); assertNotNull(summary); assertTrue(summary.contains(buildFieldNameValuePair(TotalUploadStats.FIELDS.DURATION.toString(), expectedSum))); } diff --git a/src/test/java/org/opensearch/geospatial/stats/upload/UploadMetricTests.java b/src/test/java/org/opensearch/geospatial/stats/upload/UploadMetricTests.java index 1729491c..f9b0df14 100644 --- a/src/test/java/org/opensearch/geospatial/stats/upload/UploadMetricTests.java +++ b/src/test/java/org/opensearch/geospatial/stats/upload/UploadMetricTests.java @@ -11,9 +11,10 @@ import java.io.IOException; -import org.opensearch.common.Strings; import org.opensearch.common.io.stream.BytesStreamOutput; -import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.common.Strings; +import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.geospatial.GeospatialTestHelper; import org.opensearch.test.OpenSearchTestCase; @@ -68,7 +69,7 @@ public void testInstanceFailsDueToEmptyType() { public void testToXContent() { UploadMetric actualMetric = GeospatialTestHelper.generateRandomUploadMetric(); - String metricAsString = Strings.toString(actualMetric); + String metricAsString = Strings.toString(XContentType.JSON, actualMetric); assertNotNull(metricAsString); assertTrue(metricAsString.contains(buildFieldNameValuePair(UploadMetric.FIELDS.ID, actualMetric.getMetricID()))); assertTrue(metricAsString.contains(buildFieldNameValuePair(UploadMetric.FIELDS.TYPE, GEOJSON))); diff --git a/src/test/java/org/opensearch/geospatial/stats/upload/UploadStatsNodeResponseTests.java b/src/test/java/org/opensearch/geospatial/stats/upload/UploadStatsNodeResponseTests.java index e453f03b..76b9587e 100644 --- a/src/test/java/org/opensearch/geospatial/stats/upload/UploadStatsNodeResponseTests.java +++ b/src/test/java/org/opensearch/geospatial/stats/upload/UploadStatsNodeResponseTests.java @@ -8,7 +8,7 @@ import java.io.IOException; import org.opensearch.common.io.stream.BytesStreamOutput; -import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.geospatial.GeospatialTestHelper; import org.opensearch.test.OpenSearchTestCase; diff --git a/src/test/java/org/opensearch/geospatial/stats/upload/UploadStatsResponseTests.java b/src/test/java/org/opensearch/geospatial/stats/upload/UploadStatsResponseTests.java index 34077149..3b43f7f2 100644 --- a/src/test/java/org/opensearch/geospatial/stats/upload/UploadStatsResponseTests.java +++ b/src/test/java/org/opensearch/geospatial/stats/upload/UploadStatsResponseTests.java @@ -18,12 +18,11 @@ import java.util.stream.Collectors; import org.opensearch.cluster.ClusterName; -import org.opensearch.common.Strings; import org.opensearch.common.io.stream.BytesStreamOutput; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.xcontent.ToXContent; -import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.test.OpenSearchTestCase; public class UploadStatsResponseTests extends OpenSearchTestCase { @@ -38,7 +37,7 @@ public void testXContentWithMetrics() throws IOException { ); final XContentBuilder serviceContentBuilder = jsonBuilder(); uploadStatsResponse.toXContent(serviceContentBuilder, ToXContent.EMPTY_PARAMS); - String nodesResponseAsString = Strings.toString(serviceContentBuilder); + String nodesResponseAsString = serviceContentBuilder.toString(); assertNotNull(nodesResponseAsString); final List uploadMetrics = getUploadMetrics(nodeResponse); @@ -46,7 +45,7 @@ public void testXContentWithMetrics() throws IOException { XContentBuilder metricContent = XContentFactory.jsonBuilder().startObject(); metric.toXContent(metricContent, ToXContent.EMPTY_PARAMS); metricContent.endObject(); - final String metricAsString = Strings.toString(metricContent); + final String metricAsString = metricContent.toString(); assertNotNull(metricAsString); assertTrue(nodesResponseAsString.contains(removeStartAndEndObject(metricAsString))); } @@ -62,14 +61,14 @@ public void testXContentWithTotalUploads() throws IOException { ); final XContentBuilder serviceContentBuilder = jsonBuilder(); uploadStatsResponse.toXContent(serviceContentBuilder, ToXContent.EMPTY_PARAMS); - String nodesResponseAsString = Strings.toString(serviceContentBuilder); + String nodesResponseAsString = serviceContentBuilder.toString(); assertNotNull(nodesResponseAsString); TotalUploadStats totalUploadStats = new TotalUploadStats(getUploadStats(nodeResponse)); XContentBuilder totalUploadStatsContent = XContentFactory.jsonBuilder().startObject(); totalUploadStats.toXContent(totalUploadStatsContent, ToXContent.EMPTY_PARAMS); totalUploadStatsContent.endObject(); - final String totalUploadStatsAsString = Strings.toString(totalUploadStatsContent); + final String totalUploadStatsAsString = totalUploadStatsContent.toString(); assertNotNull(totalUploadStatsAsString); assertTrue(nodesResponseAsString.contains(removeStartAndEndObject(totalUploadStatsAsString))); } diff --git a/src/test/java/org/opensearch/geospatial/stats/upload/UploadStatsServiceTests.java b/src/test/java/org/opensearch/geospatial/stats/upload/UploadStatsServiceTests.java index b82e6b39..8098ea35 100644 --- a/src/test/java/org/opensearch/geospatial/stats/upload/UploadStatsServiceTests.java +++ b/src/test/java/org/opensearch/geospatial/stats/upload/UploadStatsServiceTests.java @@ -15,9 +15,8 @@ import java.util.List; import java.util.Map; -import org.opensearch.common.Strings; -import org.opensearch.common.xcontent.ToXContent; -import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.geospatial.GeospatialTestHelper; import org.opensearch.test.OpenSearchTestCase; @@ -43,7 +42,7 @@ public void testXContentWithNodeID() throws IOException { UploadStatsService service = new UploadStatsService(randomMap); final XContentBuilder serviceContentBuilder = jsonBuilder(); service.toXContent(serviceContentBuilder, ToXContent.EMPTY_PARAMS); - String content = Strings.toString(serviceContentBuilder); + String content = serviceContentBuilder.toString(); assertNotNull(content); for (String nodeID : randomMap.keySet()) { assertTrue(nodeID + " is missing", content.contains(buildFieldNameValuePair(UploadStatsService.NODE_ID, nodeID))); @@ -55,7 +54,7 @@ public void testXContentWithEmptyStats() throws IOException { final XContentBuilder contentBuilder = jsonBuilder(); service.toXContent(contentBuilder, ToXContent.EMPTY_PARAMS); String emptyContent = "{\"total\":{},\"metrics\":[]}"; - assertEquals(emptyContent, Strings.toString(contentBuilder)); + assertEquals(emptyContent, contentBuilder.toString()); } public void testXContentWithTotalUploadStats() throws IOException { @@ -68,14 +67,14 @@ public void testXContentWithTotalUploadStats() throws IOException { UploadStatsService service = new UploadStatsService(randomMap); final XContentBuilder serviceContentBuilder = jsonBuilder(); service.toXContent(serviceContentBuilder, ToXContent.EMPTY_PARAMS); - String content = Strings.toString(serviceContentBuilder); + String content = serviceContentBuilder.toString(); assertNotNull(content); final XContentBuilder summary = jsonBuilder().startObject(); TotalUploadStats expectedSummary = new TotalUploadStats(uploadStats); expectedSummary.toXContent(summary, ToXContent.EMPTY_PARAMS); summary.endObject(); - final String totalUploadStatsSummary = Strings.toString(summary); + final String totalUploadStatsSummary = summary.toString(); assertNotNull(totalUploadStatsSummary); assertTrue(content.contains(removeStartAndEndObject(totalUploadStatsSummary))); } @@ -91,14 +90,14 @@ public void testXContentWithMetrics() throws IOException { UploadStatsService service = new UploadStatsService(randomMap); final XContentBuilder serviceContentBuilder = jsonBuilder(); service.toXContent(serviceContentBuilder, ToXContent.EMPTY_PARAMS); - String content = Strings.toString(serviceContentBuilder); + String content = serviceContentBuilder.toString(); assertNotNull(content); for (UploadMetric metric : randomMetrics) { XContentBuilder metricsAsContent = jsonBuilder().startObject(); metric.toXContent(metricsAsContent, ToXContent.EMPTY_PARAMS); metricsAsContent.endObject(); - final String metricsAsString = Strings.toString(metricsAsContent); + final String metricsAsString = metricsAsContent.toString(); assertNotNull(metricsAsString); assertTrue(content.contains(removeStartAndEndObject(metricsAsString))); } diff --git a/src/test/java/org/opensearch/geospatial/stats/upload/UploadStatsTests.java b/src/test/java/org/opensearch/geospatial/stats/upload/UploadStatsTests.java index 273e5f96..4f982cbe 100644 --- a/src/test/java/org/opensearch/geospatial/stats/upload/UploadStatsTests.java +++ b/src/test/java/org/opensearch/geospatial/stats/upload/UploadStatsTests.java @@ -13,12 +13,11 @@ import java.util.Set; import java.util.stream.IntStream; -import org.opensearch.common.Strings; import org.opensearch.common.io.stream.BytesStreamOutput; -import org.opensearch.common.io.stream.StreamInput; -import org.opensearch.common.xcontent.ToXContent; -import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.geospatial.GeospatialTestHelper; import org.opensearch.test.OpenSearchTestCase; @@ -91,7 +90,7 @@ public void testStreams() throws IOException { public void testToXContent() throws IOException { UploadStats stats = UploadStatsBuilder.randomUploadStats(); XContentBuilder statsContent = XContentFactory.jsonBuilder().startObject(); - String statsAsString = Strings.toString(stats.toXContent(statsContent, ToXContent.EMPTY_PARAMS).endObject()); + String statsAsString = stats.toXContent(statsContent, ToXContent.EMPTY_PARAMS).endObject().toString(); assertNotNull(statsAsString); assertTrue(statsAsString.contains(buildFieldNameValuePair(UploadStats.FIELDS.REQUEST_COUNT.toString(), stats.getTotalAPICount()))); stats.getMetrics().forEach(uploadMetric -> { diff --git a/src/test/resources/ip2geo/manifest.json b/src/test/resources/ip2geo/manifest.json new file mode 100644 index 00000000..86a76e47 --- /dev/null +++ b/src/test/resources/ip2geo/manifest.json @@ -0,0 +1,8 @@ +{ + "url": "https://test.com/db.zip", + "db_name": "sample_valid.csv", + "sha256_hash": "safasdfaskkkesadfasdf", + "valid_for_in_days": 30, + "updated_at_in_epoch_milli": 3134012341236, + "provider": "sample_provider" +} \ No newline at end of file diff --git a/src/test/resources/ip2geo/manifest_invalid_url.json b/src/test/resources/ip2geo/manifest_invalid_url.json new file mode 100644 index 00000000..c9f1723e --- /dev/null +++ b/src/test/resources/ip2geo/manifest_invalid_url.json @@ -0,0 +1,8 @@ +{ + "url": "invalid://test.com/db.zip", + "db_name": "sample_valid.csv", + "sha256_hash": "safasdfaskkkesadfasdf", + "valid_for_in_days": 30, + "updated_at_in_epoch_milli": 3134012341236, + "provider": "sample_provider" +} \ No newline at end of file diff --git a/src/test/resources/ip2geo/manifest_template.json b/src/test/resources/ip2geo/manifest_template.json new file mode 100644 index 00000000..39665b74 --- /dev/null +++ b/src/test/resources/ip2geo/manifest_template.json @@ -0,0 +1,8 @@ +{ + "url": "URL", + "db_name": "sample_valid.csv", + "sha256_hash": "safasdfaskkkesadfasdf", + "valid_for_in_days": 30, + "updated_at_in_epoch_milli": 3134012341236, + "provider": "maxmind" +} \ No newline at end of file diff --git a/src/test/resources/ip2geo/sample_invalid_less_than_two_fields.csv b/src/test/resources/ip2geo/sample_invalid_less_than_two_fields.csv new file mode 100644 index 00000000..08670061 --- /dev/null +++ b/src/test/resources/ip2geo/sample_invalid_less_than_two_fields.csv @@ -0,0 +1,2 @@ +network +1.0.0.0/24 \ No newline at end of file diff --git a/src/test/resources/ip2geo/sample_valid.csv b/src/test/resources/ip2geo/sample_valid.csv new file mode 100644 index 00000000..a6d08935 --- /dev/null +++ b/src/test/resources/ip2geo/sample_valid.csv @@ -0,0 +1,3 @@ +network,country_name +1.0.0.0/24,Australia +10.0.0.0/24,USA \ No newline at end of file diff --git a/src/test/resources/ip2geo/sample_valid.zip b/src/test/resources/ip2geo/sample_valid.zip new file mode 100644 index 00000000..0bdeeadb Binary files /dev/null and b/src/test/resources/ip2geo/sample_valid.zip differ diff --git a/src/test/resources/ip2geo/server/city/city.zip b/src/test/resources/ip2geo/server/city/city.zip new file mode 100644 index 00000000..12fbd719 Binary files /dev/null and b/src/test/resources/ip2geo/server/city/city.zip differ diff --git a/src/test/resources/ip2geo/server/city/manifest.json b/src/test/resources/ip2geo/server/city/manifest.json new file mode 100644 index 00000000..ac903a18 --- /dev/null +++ b/src/test/resources/ip2geo/server/city/manifest.json @@ -0,0 +1,8 @@ +{ + "url": "https://github.com/opensearch-project/geospatial/raw/main/src/test/resources/ip2geo/server/city/city.zip", + "db_name": "data.csv", + "sha256_hash": "oDPgEv+9+kNov7bdQQiLrhr8jQeEPdLnuJ22Hz5npvk=", + "valid_for_in_days": 30, + "updated_at_in_epoch_milli": 1683590400000, + "provider": "opensearch" +} diff --git a/src/test/resources/ip2geo/server/city/manifest_local.json b/src/test/resources/ip2geo/server/city/manifest_local.json new file mode 100644 index 00000000..a69ccbef --- /dev/null +++ b/src/test/resources/ip2geo/server/city/manifest_local.json @@ -0,0 +1,8 @@ +{ + "url": "http://localhost:8001/city/city.zip", + "db_name": "data.csv", + "sha256_hash": "oDPgEv+9+kNov7bdQQiLrhr8jQeEPdLnuJ22Hz5npvk=", + "valid_for_in_days": 30, + "updated_at_in_epoch_milli": 1683590400000, + "provider": "opensearch" +} diff --git a/src/test/resources/ip2geo/server/country/country.zip b/src/test/resources/ip2geo/server/country/country.zip new file mode 100644 index 00000000..1c930b1a Binary files /dev/null and b/src/test/resources/ip2geo/server/country/country.zip differ diff --git a/src/test/resources/ip2geo/server/country/manifest.json b/src/test/resources/ip2geo/server/country/manifest.json new file mode 100644 index 00000000..ba59fc32 --- /dev/null +++ b/src/test/resources/ip2geo/server/country/manifest.json @@ -0,0 +1,8 @@ +{ + "url": "https://github.com/opensearch-project/geospatial/raw/main/src/test/resources/ip2geo/server/country/country.zip", + "db_name": "data.csv", + "sha256_hash": "oDPgEv+4+kNov7bdQQiLrhr8jQeEPdLnuJ11Hz5npvk=", + "valid_for_in_days": 30, + "updated_at_in_epoch_milli": 1683590400000, + "provider": "opensearch" +} diff --git a/src/test/resources/ip2geo/server/country/manifest_local.json b/src/test/resources/ip2geo/server/country/manifest_local.json new file mode 100644 index 00000000..4c63840b --- /dev/null +++ b/src/test/resources/ip2geo/server/country/manifest_local.json @@ -0,0 +1,8 @@ +{ + "url": "http://localhost:8001/country/country.zip", + "db_name": "data.csv", + "sha256_hash": "oDPgEv+4+kNov7bdQQiLrhr8jQeEPdLnuJ11Hz5npvk=", + "valid_for_in_days": 30, + "updated_at_in_epoch_milli": 1683590400000, + "provider": "opensearch" +} diff --git a/src/yamlRestTest/resources/rest-api-spec/test/10_basic.yml b/src/yamlRestTest/resources/rest-api-spec/test/10_basic.yml index 21c13a60..264ec585 100644 --- a/src/yamlRestTest/resources/rest-api-spec/test/10_basic.yml +++ b/src/yamlRestTest/resources/rest-api-spec/test/10_basic.yml @@ -1,8 +1,8 @@ -"Test that geospatial plugin is loaded in OpenSearch": +"Test that geospatial and job scheduler plugins are loaded in OpenSearch": - do: cat.plugins: local: true h: component - match: - $body: /^opensearch-geospatial\n$/ + $body: /^opensearch-geospatial\nopensearch-job-scheduler\n$/ diff --git a/src/yamlRestTest/resources/rest-api-spec/test/20_geohex_grid.yml b/src/yamlRestTest/resources/rest-api-spec/test/20_geohex_grid.yml new file mode 100644 index 00000000..da689826 --- /dev/null +++ b/src/yamlRestTest/resources/rest-api-spec/test/20_geohex_grid.yml @@ -0,0 +1,61 @@ +setup: + - do: + indices.create: + index: cities + body: + settings: + number_of_replicas: 0 + mappings: + properties: + location: + type: geo_point + +--- +"Basic test": + - do: + bulk: + refresh: true + body: + - index: + _index: cities + _id: 1 + - location: "52.374081,4.912350" + - index: + _index: cities + _id: 2 + - location: "52.369219,4.901618" + - index: + _index: cities + _id: 3 + - location: "52.371667,4.914722" + - index: + _index: cities + _id: 4 + - location: "51.222900,4.405200" + - index: + _index: cities + _id: 5 + - location: "48.861111,2.336389" + - index: + _index: cities + _id: 6 + - location: "48.860000,2.327000" + + - do: + search: + rest_total_hits_as_int: true + body: + aggregations: + grid: + geohex_grid: + field: location + precision: 4 + + + - match: { hits.total: 6 } + - match: { aggregations.grid.buckets.0.key: 841969dffffffff } + - match: { aggregations.grid.buckets.0.doc_count: 3 } + - match: { aggregations.grid.buckets.1.key: 841fb47ffffffff } + - match: { aggregations.grid.buckets.1.doc_count: 2 } + - match: { aggregations.grid.buckets.2.key: 841fa4dffffffff } + - match: { aggregations.grid.buckets.2.doc_count: 1 } diff --git a/src/yamlRestTest/resources/rest-api-spec/test/xypoint/10_basic.yml b/src/yamlRestTest/resources/rest-api-spec/test/xypoint/10_basic.yml new file mode 100644 index 00000000..0f8c8c25 --- /dev/null +++ b/src/yamlRestTest/resources/rest-api-spec/test/xypoint/10_basic.yml @@ -0,0 +1,135 @@ +setup: + - do: + indices.create: + index: test_1 + body: + settings: + number_of_replicas: 0 + mappings: + properties: + geometry: + type: xy_point + +--- +"Single point test": + - do: + bulk: + refresh: true + body: + - index: + _index: test_1 + _id: 1 + - geometry: + x: 52.374081 + y: 4.912350 + - index: + _index: test_1 + _id: 2 + - geometry: "52.369219,4.901618" + - index: + _index: test_1 + _id: 3 + - geometry: [ 52.371667, 4.914722 ] + - index: + _index: test_1 + _id: 4 + - geometry: "POINT (52.371667 4.914722)" + - index: + _index: test_1 + _id: 5 + - geometry: + type: Point + coordinates: [ 52.371667, 4.914722 ] + + - do: + search: + index: test_1 + rest_total_hits_as_int: true + body: + query: + xy_shape: + geometry: + shape: + type: "envelope" + coordinates: [ [ 51, 5 ], [ 53, 3 ] ] + + - match: { hits.total: 5 } + + - do: + search: + index: test_1 + rest_total_hits_as_int: true + body: + query: + xy_shape: + geometry: + shape: + type: "envelope" + coordinates: [ [ 151, 15 ], [ 153, 13 ] ] + + - match: { hits.total: 0 } + +--- +"Multi points test": + - do: + bulk: + refresh: true + body: + - index: + _index: test_1 + _id: 1 + - geometry: + - {x: 52.374081, y: 4.912350} + - {x: 152.374081, y: 14.912350} + - index: + _index: test_1 + _id: 2 + - geometry: + - "52.369219,4.901618" + - "152.369219,14.901618" + - index: + _index: test_1 + _id: 3 + - geometry: + - [ 52.371667, 4.914722 ] + - [ 152.371667, 14.914722 ] + - index: + _index: test_1 + _id: 4 + - geometry: + - "POINT (52.371667 4.914722)" + - "POINT (152.371667 14.914722)" + - index: + _index: test_1 + _id: 5 + - geometry: + - {type: Point, coordinates: [ 52.371667, 4.914722 ]} + - {type: Point, coordinates: [ 152.371667, 14.914722 ]} + + - do: + search: + index: test_1 + rest_total_hits_as_int: true + body: + query: + xy_shape: + geometry: + shape: + type: "envelope" + coordinates: [ [ 51, 5 ], [ 53, 3 ] ] + + - match: { hits.total: 5 } + + - do: + search: + index: test_1 + rest_total_hits_as_int: true + body: + query: + xy_shape: + geometry: + shape: + type: "envelope" + coordinates: [ [ 151, 15 ], [ 153, 13 ] ] + + - match: { hits.total: 5 }