diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 1dea59eff1988..089b30ddf8b31 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -87,6 +87,8 @@ class BuildPlugin implements Plugin { project.pluginManager.apply('nebula.info-scm') project.pluginManager.apply('nebula.info-jar') + project.getTasks().create("buildResources", ExportElasticsearchBuildResourcesTask) + globalBuildInfo(project) configureRepositories(project) configureConfigurations(project) @@ -101,6 +103,7 @@ class BuildPlugin implements Plugin { configureDependenciesInfo(project) } + /** Performs checks on the build environment and prints information about the build environment. */ static void globalBuildInfo(Project project) { if (project.rootProject.ext.has('buildChecksDone') == false) { @@ -116,12 +119,14 @@ class BuildPlugin implements Plugin { final Map javaVersions = [:] for (int version = 7; version <= Integer.parseInt(minimumCompilerVersion.majorVersion); version++) { - javaVersions.put(version, findJavaHome(version)); + if(System.getenv(getJavaHomeEnvVarName(version.toString())) != null) { + javaVersions.put(version, findJavaHome(version.toString())); + } } String javaVendor = System.getProperty('java.vendor') - String javaVersion = System.getProperty('java.version') - String gradleJavaVersionDetails = "${javaVendor} ${javaVersion}" + + String gradleJavaVersion = System.getProperty('java.version') + String gradleJavaVersionDetails = "${javaVendor} ${gradleJavaVersion}" + " [${System.getProperty('java.vm.name')} ${System.getProperty('java.vm.version')}]" String compilerJavaVersionDetails = gradleJavaVersionDetails @@ -144,33 +149,33 @@ class BuildPlugin implements Plugin { // Build debugging info println '=======================================' println 'Elasticsearch Build Hamster says Hello!' - println '=======================================' println " Gradle Version : ${project.gradle.gradleVersion}" println " OS Info : ${System.getProperty('os.name')} ${System.getProperty('os.version')} (${System.getProperty('os.arch')})" if (gradleJavaVersionDetails != compilerJavaVersionDetails || gradleJavaVersionDetails != runtimeJavaVersionDetails) { - println " JDK Version (gradle) : ${gradleJavaVersionDetails}" - println " JAVA_HOME (gradle) : ${gradleJavaHome}" - println " JDK Version (compile) : ${compilerJavaVersionDetails}" - println " JAVA_HOME (compile) : ${compilerJavaHome}" - println " JDK Version (runtime) : ${runtimeJavaVersionDetails}" - println " JAVA_HOME (runtime) : ${runtimeJavaHome}" + println " Compiler JDK Version : ${getPaddedMajorVersion(compilerJavaVersionEnum)} (${compilerJavaVersionDetails})" + println " Compiler java.home : ${compilerJavaHome}" + println " Runtime JDK Version : ${getPaddedMajorVersion(runtimeJavaVersionEnum)} (${runtimeJavaVersionDetails})" + println " Runtime java.home : ${runtimeJavaHome}" + println " Gradle JDK Version : ${getPaddedMajorVersion(JavaVersion.toVersion(gradleJavaVersion))} (${gradleJavaVersionDetails})" + println " Gradle java.home : ${gradleJavaHome}" } else { - println " JDK Version : ${gradleJavaVersionDetails}" + println " JDK Version : ${getPaddedMajorVersion(JavaVersion.toVersion(gradleJavaVersion))} (${gradleJavaVersionDetails})" println " JAVA_HOME : ${gradleJavaHome}" } println " Random Testing Seed : ${project.testSeed}" + println '=======================================' // enforce Java version if (compilerJavaVersionEnum < minimumCompilerVersion) { final String message = - "the environment variable JAVA_HOME must be set to a JDK installation directory for Java ${minimumCompilerVersion}" + + "the compiler java.home must be set to a JDK installation directory for Java ${minimumCompilerVersion}" + " but is [${compilerJavaHome}] corresponding to [${compilerJavaVersionEnum}]" throw new GradleException(message) } if (runtimeJavaVersionEnum < minimumRuntimeVersion) { final String message = - "the environment variable RUNTIME_JAVA_HOME must be set to a JDK installation directory for Java ${minimumRuntimeVersion}" + + "the runtime java.home must be set to a JDK installation directory for Java ${minimumRuntimeVersion}" + " but is [${runtimeJavaHome}] corresponding to [${runtimeJavaVersionEnum}]" throw new GradleException(message) } @@ -205,6 +210,7 @@ class BuildPlugin implements Plugin { project.rootProject.ext.minimumCompilerVersion = minimumCompilerVersion project.rootProject.ext.minimumRuntimeVersion = minimumRuntimeVersion project.rootProject.ext.inFipsJvm = inFipsJvm + project.rootProject.ext.gradleJavaVersion = JavaVersion.toVersion(gradleJavaVersion) } project.targetCompatibility = project.rootProject.ext.minimumRuntimeVersion @@ -217,11 +223,20 @@ class BuildPlugin implements Plugin { project.ext.runtimeJavaVersion = project.rootProject.ext.runtimeJavaVersion project.ext.javaVersions = project.rootProject.ext.javaVersions project.ext.inFipsJvm = project.rootProject.ext.inFipsJvm + project.ext.gradleJavaVersion = project.rootProject.ext.gradleJavaVersion + } + + private static String getPaddedMajorVersion(JavaVersion compilerJavaVersionEnum) { + compilerJavaVersionEnum.getMajorVersion().toString().padLeft(2) } private static String findCompilerJavaHome() { - final String javaHome = System.getenv('JAVA_HOME') - if (javaHome == null) { + final String compilerJavaHome = System.getenv('JAVA_HOME') + final String compilerJavaProperty = System.getProperty('compiler.java') + if (compilerJavaProperty != null) { + compilerJavaHome = findJavaHome(compilerJavaProperty) + } + if (compilerJavaHome == null) { if (System.getProperty("idea.active") != null || System.getProperty("eclipse.launcher") != null) { // IntelliJ does not set JAVA_HOME, so we use the JDK that Gradle was run with return Jvm.current().javaHome @@ -233,11 +248,24 @@ class BuildPlugin implements Plugin { ) } } - return javaHome + return compilerJavaHome } - private static String findJavaHome(int version) { - return System.getenv('JAVA' + version + '_HOME') + private static String findJavaHome(String version) { + String versionedVarName = getJavaHomeEnvVarName(version) + String versionedJavaHome = System.getenv(versionedVarName); + if (versionedJavaHome == null) { + throw new GradleException( + "$versionedVarName must be set to build Elasticsearch. " + + "Note that if the variable was just set you might have to run `./gradlew --stop` for " + + "it to be picked up. See https://github.com/elastic/elasticsearch/issues/31399 details." + ) + } + return versionedJavaHome + } + + private static String getJavaHomeEnvVarName(String version) { + return 'JAVA' + version + '_HOME' } /** Add a check before gradle execution phase which ensures java home for the given java version is set. */ @@ -271,7 +299,10 @@ class BuildPlugin implements Plugin { } private static String findRuntimeJavaHome(final String compilerJavaHome) { - assert compilerJavaHome != null + String runtimeJavaProperty = System.getProperty("runtime.java") + if (runtimeJavaProperty != null) { + return findJavaHome(runtimeJavaProperty) + } return System.getenv('RUNTIME_JAVA_HOME') ?: compilerJavaHome } @@ -769,6 +800,12 @@ class BuildPlugin implements Plugin { systemProperty 'tests.security.manager', 'true' systemProperty 'jna.nosys', 'true' systemProperty 'es.scripting.exception_for_missing_value', 'true' + systemProperty 'compiler.java', project.ext.compilerJavaVersion.getMajorVersion() + if (project.ext.inFipsJvm) { + systemProperty 'runtime.java', project.ext.runtimeJavaVersion.getMajorVersion() + "FIPS" + } else { + systemProperty 'runtime.java', project.ext.runtimeJavaVersion.getMajorVersion() + } // TODO: remove setting logging level via system property systemProperty 'tests.logger.level', 'WARN' for (Map.Entry property : System.properties.entrySet()) { @@ -783,9 +820,12 @@ class BuildPlugin implements Plugin { } } - // TODO: remove this once joda time is removed from scriptin in 7.0 + // TODO: remove this once joda time is removed from scripting in 7.0 systemProperty 'es.scripting.use_java_time', 'true' + // TODO: remove this once ctx isn't added to update script params in 7.0 + systemProperty 'es.scripting.update.ctx_in_params', 'false' + // Set the system keystore/truststore password if we're running tests in a FIPS-140 JVM if (project.inFipsJvm) { systemProperty 'javax.net.ssl.trustStorePassword', 'password' diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/SnippetsTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/SnippetsTask.groovy index ec012633f0893..8c0eedeb6f547 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/SnippetsTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/SnippetsTask.groovy @@ -284,6 +284,10 @@ public class SnippetsTask extends DefaultTask { contents.append(line).append('\n') return } + // Allow line continuations for console snippets within lists + if (snippet != null && line.trim() == '+') { + return + } // Just finished emit() } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/ExportElasticsearchBuildResourcesTask.java b/buildSrc/src/main/java/org/elasticsearch/gradle/ExportElasticsearchBuildResourcesTask.java new file mode 100644 index 0000000000000..7306d2832a9be --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/ExportElasticsearchBuildResourcesTask.java @@ -0,0 +1,114 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.gradle; + +import org.gradle.api.DefaultTask; +import org.gradle.api.GradleException; +import org.gradle.api.file.DirectoryProperty; +import org.gradle.api.file.RegularFileProperty; +import org.gradle.api.logging.Logger; +import org.gradle.api.logging.Logging; +import org.gradle.api.tasks.Classpath; +import org.gradle.api.tasks.Input; +import org.gradle.api.tasks.OutputDirectory; +import org.gradle.api.tasks.SkipWhenEmpty; +import org.gradle.api.tasks.StopExecutionException; +import org.gradle.api.tasks.TaskAction; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; + +/** + * Export Elasticsearch build resources to configurable paths + *

+ * Wil overwrite existing files and create missing directories. + * Useful for resources that that need to be passed to other processes trough the filesystem or otherwise can't be + * consumed from the classpath. + */ +public class ExportElasticsearchBuildResourcesTask extends DefaultTask { + + private final Logger logger = Logging.getLogger(ExportElasticsearchBuildResourcesTask.class); + + private final Set resources = new HashSet<>(); + + private DirectoryProperty outputDir; + + public ExportElasticsearchBuildResourcesTask() { + outputDir = getProject().getLayout().directoryProperty( + getProject().getLayout().getBuildDirectory().dir("build-tools-exported") + ); + } + + @OutputDirectory + public DirectoryProperty getOutputDir() { + return outputDir; + } + + @Input + @SkipWhenEmpty + public Set getResources() { + return Collections.unmodifiableSet(resources); + } + + @Classpath + public String getResourcesClasspath() { + // This will make sure the task is not considered up to date if the resources are changed. + logger.info("Classpath: {}", System.getProperty("java.class.path")); + return System.getProperty("java.class.path"); + } + + public void setOutputDir(DirectoryProperty outputDir) { + this.outputDir = outputDir; + } + + public RegularFileProperty take(String resource) { + if (getState().getExecuted() || getState().getExecuting()) { + throw new GradleException("buildResources can't be configured after the task ran. " + + "Make sure task is not used after configuration time" + ); + } + resources.add(resource); + return getProject().getLayout().fileProperty(outputDir.file(resource)); + } + + @TaskAction + public void doExport() { + if (resources.isEmpty()) { + throw new StopExecutionException(); + } + resources.stream().parallel() + .forEach(resourcePath -> { + Path destination = outputDir.get().file(resourcePath).getAsFile().toPath(); + try (InputStream is = getClass().getClassLoader().getResourceAsStream(resourcePath)) { + if (is == null) { + throw new GradleException("Can't export `" + resourcePath + "` from build-tools: not found"); + } + Files.copy(is, destination); + } catch (IOException e) { + throw new GradleException("Can't write resource `" + resourcePath + "` to " + destination); + } + }); + } + +} diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index 956562316b90a..7d5835cfd01a1 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -720,6 +720,7 @@ + diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/ExportElasticsearchBuildResourcesTaskIT.java b/buildSrc/src/test/java/org/elasticsearch/gradle/ExportElasticsearchBuildResourcesTaskIT.java new file mode 100644 index 0000000000000..323ea903063da --- /dev/null +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/ExportElasticsearchBuildResourcesTaskIT.java @@ -0,0 +1,89 @@ +package org.elasticsearch.gradle; + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.elasticsearch.gradle.test.GradleIntegrationTestCase; +import org.gradle.testkit.runner.BuildResult; +import org.gradle.testkit.runner.GradleRunner; + +public class ExportElasticsearchBuildResourcesTaskIT extends GradleIntegrationTestCase { + + public static final String PROJECT_NAME = "elasticsearch-build-resources"; + + public void testUpToDateWithSourcesConfigured() { + GradleRunner.create() + .withProjectDir(getProjectDir(PROJECT_NAME)) + .withArguments("clean", "-s") + .withPluginClasspath() + .build(); + + BuildResult result = GradleRunner.create() + .withProjectDir(getProjectDir(PROJECT_NAME)) + .withArguments("buildResources", "-s", "-i") + .withPluginClasspath() + .build(); + assertTaskSuccessfull(result, ":buildResources"); + assertBuildFileExists(result, PROJECT_NAME, "build-tools-exported/checkstyle.xml"); + assertBuildFileExists(result, PROJECT_NAME, "build-tools-exported/checkstyle_suppressions.xml"); + + result = GradleRunner.create() + .withProjectDir(getProjectDir(PROJECT_NAME)) + .withArguments("buildResources", "-s", "-i") + .withPluginClasspath() + .build(); + assertTaskUpToDate(result, ":buildResources"); + assertBuildFileExists(result, PROJECT_NAME, "build-tools-exported/checkstyle.xml"); + assertBuildFileExists(result, PROJECT_NAME, "build-tools-exported/checkstyle_suppressions.xml"); + } + + public void testImplicitTaskDependencyCopy() { + BuildResult result = GradleRunner.create() + .withProjectDir(getProjectDir(PROJECT_NAME)) + .withArguments("clean", "sampleCopyAll", "-s", "-i") + .withPluginClasspath() + .build(); + assertTaskSuccessfull(result, ":buildResources"); + assertTaskSuccessfull(result, ":sampleCopyAll"); + assertBuildFileExists(result, PROJECT_NAME, "sampleCopyAll/checkstyle.xml"); + // This is a side effect of compile time reference + assertBuildFileExists(result, PROJECT_NAME, "sampleCopyAll/checkstyle_suppressions.xml"); + } + + public void testImplicitTaskDependencyInputFileOfOther() { + BuildResult result = GradleRunner.create() + .withProjectDir(getProjectDir(PROJECT_NAME)) + .withArguments("clean", "sample", "-s", "-i") + .withPluginClasspath() + .build(); + + assertTaskSuccessfull(result, ":sample"); + assertBuildFileExists(result, PROJECT_NAME, "build-tools-exported/checkstyle.xml"); + assertBuildFileExists(result, PROJECT_NAME, "build-tools-exported/checkstyle_suppressions.xml"); + } + + public void testIncorrectUsage() { + BuildResult result = GradleRunner.create() + .withProjectDir(getProjectDir(PROJECT_NAME)) + .withArguments("noConfigAfterExecution", "-s", "-i") + .withPluginClasspath() + .buildAndFail(); + assertOutputContains("buildResources can't be configured after the task ran"); + } +} diff --git a/buildSrc/src/test/java/org/elasticsearch/gradle/test/GradleIntegrationTestCase.java b/buildSrc/src/test/java/org/elasticsearch/gradle/test/GradleIntegrationTestCase.java index 5c36fa61550d8..f00ab406a6c10 100644 --- a/buildSrc/src/test/java/org/elasticsearch/gradle/test/GradleIntegrationTestCase.java +++ b/buildSrc/src/test/java/org/elasticsearch/gradle/test/GradleIntegrationTestCase.java @@ -1,8 +1,13 @@ package org.elasticsearch.gradle.test; +import org.gradle.testkit.runner.BuildResult; +import org.gradle.testkit.runner.BuildTask; import org.gradle.testkit.runner.GradleRunner; +import org.gradle.testkit.runner.TaskOutcome; import java.io.File; +import java.nio.file.Files; +import java.nio.file.Path; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -15,7 +20,7 @@ protected File getProjectDir(String name) { throw new RuntimeException("Could not find resources dir for integration tests. " + "Note that these tests can only be ran by Gradle and are not currently supported by the IDE"); } - return new File(root, name); + return new File(root, name).getAbsoluteFile(); } protected GradleRunner getGradleRunner(String sampleProject) { @@ -61,4 +66,47 @@ protected void assertOutputDoesNotContain(String output, String... lines) { } } + protected void assertTaskSuccessfull(BuildResult result, String taskName) { + BuildTask task = result.task(taskName); + if (task == null) { + fail("Expected task `" + taskName + "` to be successful, but it did not run"); + } + assertEquals( + "Expected task to be successful but it was: " + task.getOutcome() + + "\n\nOutput is:\n" + result.getOutput() , + TaskOutcome.SUCCESS, + task.getOutcome() + ); + } + + protected void assertTaskUpToDate(BuildResult result, String taskName) { + BuildTask task = result.task(taskName); + if (task == null) { + fail("Expected task `" + taskName + "` to be up-to-date, but it did not run"); + } + assertEquals( + "Expected task to be up to date but it was: " + task.getOutcome() + + "\n\nOutput is:\n" + result.getOutput() , + TaskOutcome.UP_TO_DATE, + task.getOutcome() + ); + } + + protected void assertBuildFileExists(BuildResult result, String projectName, String path) { + Path absPath = getBuildDir(projectName).toPath().resolve(path); + assertTrue( + result.getOutput() + "\n\nExpected `" + absPath + "` to exists but it did not" + + "\n\nOutput is:\n" + result.getOutput(), + Files.exists(absPath) + ); + } + + protected void assertBuildFileDoesNotExists(BuildResult result, String projectName, String path) { + Path absPath = getBuildDir(projectName).toPath().resolve(path); + assertFalse( + result.getOutput() + "\n\nExpected `" + absPath + "` bo to exists but it did" + + "\n\nOutput is:\n" + result.getOutput(), + Files.exists(absPath) + ); + } } diff --git a/buildSrc/src/testKit/elasticsearch-build-resources/build.gradle b/buildSrc/src/testKit/elasticsearch-build-resources/build.gradle new file mode 100644 index 0000000000000..c0e3ac93133d3 --- /dev/null +++ b/buildSrc/src/testKit/elasticsearch-build-resources/build.gradle @@ -0,0 +1,38 @@ +plugins { + id 'elasticsearch.build' +} + +ext.licenseFile = file("$buildDir/dummy/license") +ext.noticeFile = file("$buildDir/dummy/notice") + +buildResources { + take 'checkstyle.xml' +} + +task sampleCopyAll(type: Sync) { + /** Note: no explicit dependency. This works with tasks that use the Provider API a.k.a "Lazy Configuration" **/ + from buildResources + into "$buildDir/sampleCopyAll" +} + +task sample { + // This does not work, task dependencies can't be providers + // dependsOn exportBuildResources.resource('minimumRuntimeVersion') + // Nor does this, despite https://github.com/gradle/gradle/issues/3811 + // dependsOn exportBuildResources.outputDir + // for now it's just + dependsOn buildResources + // we have to refference it at configuration time in order to be picked up + ext.checkstyle_suppressions = buildResources.take('checkstyle_suppressions.xml') + doLast { + println "This task is using ${file(checkstyle_suppressions)}" + } +} + +task noConfigAfterExecution { + dependsOn buildResources + doLast { + println "This should cause an error because we are refferencing " + + "${buildResources.take('checkstyle_suppressions.xml')} after the `buildResources` task has ran." + } +} \ No newline at end of file diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 0a7d323561b0e..46b532820a248 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,13 +1,15 @@ elasticsearch = 6.5.0 -lucene = 7.5.0-snapshot-608f0277b0 +lucene = 7.5.0-snapshot-13b9e28f9d # optional dependencies spatial4j = 0.7 jts = 1.15.0 -jackson = 2.8.10 +jackson = 2.8.11 snakeyaml = 1.17 +icu4j = 62.1 +supercsv = 2.4.0 # when updating log4j, please update also docs/java-api/index.asciidoc -log4j = 2.9.1 +log4j = 2.11.1 slf4j = 1.6.2 # when updating the JNA version, also update the version in buildSrc/build.gradle diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle index 65c5d094c7170..6f5eab6e1db1e 100644 --- a/client/rest-high-level/build.gradle +++ b/client/rest-high-level/build.gradle @@ -30,6 +30,14 @@ apply plugin: 'com.github.johnrengelman.shadow' group = 'org.elasticsearch.client' archivesBaseName = 'elasticsearch-rest-high-level-client' +publishing { + publications { + nebula { + artifactId = archivesBaseName + } + } +} + //we need to copy the yaml spec so we can check naming (see RestHighlevelClientTests#testApiNamingConventions) Task copyRestSpec = RestIntegTestTask.createCopyRestSpecTask(project, Providers.FALSE) test.dependsOn(copyRestSpec) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java index e889ec5beba80..99d50f6b46b7e 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java @@ -26,7 +26,7 @@ import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.ingest.SimulatePipelineResponse; -import org.elasticsearch.action.ingest.WritePipelineResponse; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import java.io.IOException; @@ -54,9 +54,9 @@ public final class IngestClient { * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response */ - public WritePipelineResponse putPipeline(PutPipelineRequest request, RequestOptions options) throws IOException { + public AcknowledgedResponse putPipeline(PutPipelineRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity( request, RequestConverters::putPipeline, options, - WritePipelineResponse::fromXContent, emptySet()); + AcknowledgedResponse::fromXContent, emptySet()); } /** @@ -67,9 +67,9 @@ public WritePipelineResponse putPipeline(PutPipelineRequest request, RequestOpti * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion */ - public void putPipelineAsync(PutPipelineRequest request, RequestOptions options, ActionListener listener) { + public void putPipelineAsync(PutPipelineRequest request, RequestOptions options, ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::putPipeline, options, - WritePipelineResponse::fromXContent, listener, emptySet()); + AcknowledgedResponse::fromXContent, listener, emptySet()); } /** @@ -109,9 +109,9 @@ public void getPipelineAsync(GetPipelineRequest request, RequestOptions options, * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response */ - public WritePipelineResponse deletePipeline(DeletePipelineRequest request, RequestOptions options) throws IOException { + public AcknowledgedResponse deletePipeline(DeletePipelineRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity( request, RequestConverters::deletePipeline, options, - WritePipelineResponse::fromXContent, emptySet()); + AcknowledgedResponse::fromXContent, emptySet()); } /** @@ -123,9 +123,9 @@ public WritePipelineResponse deletePipeline(DeletePipelineRequest request, Reque * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion */ - public void deletePipelineAsync(DeletePipelineRequest request, RequestOptions options, ActionListener listener) { + public void deletePipelineAsync(DeletePipelineRequest request, RequestOptions options, ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::deletePipeline, options, - WritePipelineResponse::fromXContent, listener, emptySet()); + AcknowledgedResponse::fromXContent, listener, emptySet()); } /** diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/LicenseClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/LicenseClient.java index 587578f3b35e1..589f187f397aa 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/LicenseClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/LicenseClient.java @@ -19,11 +19,27 @@ package org.elasticsearch.client; +import org.apache.http.HttpEntity; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.Streams; +import org.elasticsearch.common.xcontent.DeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest; +import org.elasticsearch.protocol.xpack.license.DeleteLicenseResponse; +import org.elasticsearch.protocol.xpack.license.GetLicenseRequest; +import org.elasticsearch.protocol.xpack.license.GetLicenseResponse; import org.elasticsearch.protocol.xpack.license.PutLicenseRequest; import org.elasticsearch.protocol.xpack.license.PutLicenseResponse; import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; import static java.util.Collections.emptySet; @@ -34,7 +50,7 @@ * See the * X-Pack Licensing APIs on elastic.co for more information. */ -public class LicenseClient { +public final class LicenseClient { private final RestHighLevelClient restHighLevelClient; @@ -54,7 +70,7 @@ public PutLicenseResponse putLicense(PutLicenseRequest request, RequestOptions o } /** - * Asynchronously updates license for the cluster cluster. + * Asynchronously updates license for the cluster. * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion */ @@ -63,4 +79,79 @@ public void putLicenseAsync(PutLicenseRequest request, RequestOptions options, A PutLicenseResponse::fromXContent, listener, emptySet()); } + /** + * Returns the current license for the cluster. + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public GetLicenseResponse getLicense(GetLicenseRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequest(request, RequestConverters::getLicense, options, + response -> new GetLicenseResponse(convertResponseToJson(response)), emptySet()); + } + + /** + * Asynchronously returns the current license for the cluster cluster. + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void getLicenseAsync(GetLicenseRequest request, RequestOptions options, ActionListener listener) { + restHighLevelClient.performRequestAsync(request, RequestConverters::getLicense, options, + response -> new GetLicenseResponse(convertResponseToJson(response)), listener, emptySet()); + } + + /** + * Deletes license from the cluster. + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public DeleteLicenseResponse deleteLicense(DeleteLicenseRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::deleteLicense, options, + DeleteLicenseResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously deletes license from the cluster. + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void deleteLicenseAsync(DeleteLicenseRequest request, RequestOptions options, ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::deleteLicense, options, + DeleteLicenseResponse::fromXContent, listener, emptySet()); + } + + /** + * Converts an entire response into a json string + * + * This is useful for responses that we don't parse on the client side, but instead work as string + * such as in case of the license JSON + */ + static String convertResponseToJson(Response response) throws IOException { + HttpEntity entity = response.getEntity(); + if (entity == null) { + throw new IllegalStateException("Response body expected but not returned"); + } + if (entity.getContentType() == null) { + throw new IllegalStateException("Elasticsearch didn't return the [Content-Type] header, unable to parse response body"); + } + XContentType xContentType = XContentType.fromMediaTypeOrFormat(entity.getContentType().getValue()); + if (xContentType == null) { + throw new IllegalStateException("Unsupported Content-Type: " + entity.getContentType().getValue()); + } + if (xContentType == XContentType.JSON) { + // No changes is required + return Streams.copyToString(new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8)); + } else { + // Need to convert into JSON + try (InputStream stream = response.getEntity().getContent(); + XContentParser parser = XContentFactory.xContent(xContentType).createParser(NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, stream)) { + parser.nextToken(); + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.copyCurrentStructure(parser); + return Strings.toString(builder); + } + } + } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java new file mode 100644 index 0000000000000..5244432a89407 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java @@ -0,0 +1,80 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.protocol.xpack.ml.PutJobRequest; +import org.elasticsearch.protocol.xpack.ml.PutJobResponse; + +import java.io.IOException; +import java.util.Collections; + +/** + * Machine Learning API client wrapper for the {@link RestHighLevelClient} + * + *

+ * See the + * X-Pack Machine Learning APIs for additional information. + */ +public final class MachineLearningClient { + + private final RestHighLevelClient restHighLevelClient; + + MachineLearningClient(RestHighLevelClient restHighLevelClient) { + this.restHighLevelClient = restHighLevelClient; + } + + /** + * Creates a new Machine Learning Job + *

+ * For additional info + * see ML PUT job documentation + * + * @param request the PutJobRequest containing the {@link org.elasticsearch.protocol.xpack.ml.job.config.Job} settings + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return PutJobResponse with enclosed {@link org.elasticsearch.protocol.xpack.ml.job.config.Job} object + * @throws IOException when there is a serialization issue sending the request or receiving the response + */ + public PutJobResponse putJob(PutJobRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, + RequestConverters::putMachineLearningJob, + options, + PutJobResponse::fromXContent, + Collections.emptySet()); + } + + /** + * Creates a new Machine Learning Job asynchronously and notifies listener on completion + *

+ * For additional info + * see ML PUT job documentation + * + * @param request the request containing the {@link org.elasticsearch.protocol.xpack.ml.job.config.Job} settings + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener Listener to be notified upon request completion + */ + public void putJobAsync(PutJobRequest request, RequestOptions options, ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, + RequestConverters::putMachineLearningJob, + options, + PutJobResponse::fromXContent, + listener, + Collections.emptySet()); + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MigrationClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MigrationClient.java new file mode 100644 index 0000000000000..7da3832994768 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MigrationClient.java @@ -0,0 +1,55 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest; +import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoResponse; + +import java.io.IOException; +import java.util.Collections; + +/** + * A wrapper for the {@link RestHighLevelClient} that provides methods for + * accessing the Elastic License-related methods + *

+ * See the + * X-Pack Migration APIs on elastic.co for more information. + */ +public final class MigrationClient { + + private final RestHighLevelClient restHighLevelClient; + + MigrationClient(RestHighLevelClient restHighLevelClient) { + this.restHighLevelClient = restHighLevelClient; + } + + /** + * Get Migration Assistance for one or more indices + * + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public IndexUpgradeInfoResponse getAssistance(IndexUpgradeInfoRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::getMigrationAssistance, options, + IndexUpgradeInfoResponse::fromXContent, Collections.emptySet()); + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 0d20821122147..ec359f0a0d4d4 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -39,12 +39,12 @@ import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; -import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; -import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; @@ -78,8 +78,8 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.ingest.DeletePipelineRequest; import org.elasticsearch.action.ingest.GetPipelineRequest; -import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.MultiSearchRequest; import org.elasticsearch.action.search.SearchRequest; @@ -96,7 +96,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContent; @@ -107,10 +107,14 @@ import org.elasticsearch.index.VersionType; import org.elasticsearch.index.rankeval.RankEvalRequest; import org.elasticsearch.protocol.xpack.XPackInfoRequest; -import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; -import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest; +import org.elasticsearch.protocol.xpack.license.GetLicenseRequest; import org.elasticsearch.protocol.xpack.license.PutLicenseRequest; +import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest; +import org.elasticsearch.protocol.xpack.ml.PutJobRequest; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.script.mustache.MultiSearchTemplateRequest; import org.elasticsearch.script.mustache.SearchTemplateRequest; @@ -428,8 +432,14 @@ static Request bulk(BulkRequest bulkRequest) throws IOException { BytesReference indexSource = indexRequest.source(); XContentType indexXContentType = indexRequest.getContentType(); - try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, indexSource, indexXContentType)) { + try (XContentParser parser = XContentHelper.createParser( + /* + * EMPTY and THROW are fine here because we just call + * copyCurrentStructure which doesn't touch the + * registry or deprecation. + */ + NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + indexSource, indexXContentType)) { try (XContentBuilder builder = XContentBuilder.builder(bulkContentType.xContent())) { builder.copyCurrentStructure(parser); source = BytesReference.bytes(builder).toBytesRef(); @@ -1165,7 +1175,11 @@ static Request xpackUsage(XPackUsageRequest usageRequest) { } static Request putLicense(PutLicenseRequest putLicenseRequest) { - Request request = new Request(HttpPut.METHOD_NAME, "/_xpack/license"); + String endpoint = new EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("license") + .build(); + Request request = new Request(HttpPut.METHOD_NAME, endpoint); Params parameters = new Params(request); parameters.withTimeout(putLicenseRequest.timeout()); parameters.withMasterTimeout(putLicenseRequest.masterNodeTimeout()); @@ -1176,6 +1190,48 @@ static Request putLicense(PutLicenseRequest putLicenseRequest) { return request; } + static Request getLicense(GetLicenseRequest getLicenseRequest) { + String endpoint = new EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("license") + .build(); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + Params parameters = new Params(request); + parameters.withLocal(getLicenseRequest.local()); + return request; + } + + static Request deleteLicense(DeleteLicenseRequest deleteLicenseRequest) { + Request request = new Request(HttpDelete.METHOD_NAME, "/_xpack/license"); + Params parameters = new Params(request); + parameters.withTimeout(deleteLicenseRequest.timeout()); + parameters.withMasterTimeout(deleteLicenseRequest.masterNodeTimeout()); + return request; + } + + static Request putMachineLearningJob(PutJobRequest putJobRequest) throws IOException { + String endpoint = new EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("ml") + .addPathPartAsIs("anomaly_detectors") + .addPathPart(putJobRequest.getJob().getId()) + .build(); + Request request = new Request(HttpPut.METHOD_NAME, endpoint); + request.setEntity(createEntity(putJobRequest, REQUEST_BODY_CONTENT_TYPE)); + return request; + } + + static Request getMigrationAssistance(IndexUpgradeInfoRequest indexUpgradeInfoRequest) { + EndpointBuilder endpointBuilder = new EndpointBuilder() + .addPathPartAsIs("_xpack/migration/assistance") + .addCommaSeparatedPathParts(indexUpgradeInfoRequest.indices()); + String endpoint = endpointBuilder.build(); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + Params parameters = new Params(request); + parameters.withIndicesOptions(indexUpgradeInfoRequest.indicesOptions()); + return request; + } + private static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException { BytesRef source = XContentHelper.toXContent(toXContent, xContentType, false).toBytesRef(); return new ByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType)); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index ac0e5988a2472..69223494d3163 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -59,7 +59,7 @@ import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ContextParser; -import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; @@ -207,6 +207,8 @@ public class RestHighLevelClient implements Closeable { private final XPackClient xPackClient = new XPackClient(this); private final WatcherClient watcherClient = new WatcherClient(this); private final LicenseClient licenseClient = new LicenseClient(this); + private final MigrationClient migrationClient = new MigrationClient(this); + private final MachineLearningClient machineLearningClient = new MachineLearningClient(this); /** * Creates a {@link RestHighLevelClient} given the low level {@link RestClientBuilder} that allows to build the @@ -330,6 +332,32 @@ public final XPackClient xpack() { */ public LicenseClient license() { return licenseClient; } + /** + * Provides methods for accessing the Elastic Licensed Licensing APIs that + * are shipped with the default distribution of Elasticsearch. All of + * these APIs will 404 if run against the OSS distribution of Elasticsearch. + *

+ * See the + * Migration APIs on elastic.co for more information. + */ + public MigrationClient migration() { + return migrationClient; + } + + /** + * Provides methods for accessing the Elastic Licensed Machine Learning APIs that + * are shipped with the Elastic Stack distribution of Elasticsearch. All of + * these APIs will 404 if run against the OSS distribution of Elasticsearch. + *

+ * See the + * Machine Learning APIs on elastic.co for more information. + * + * @return the client wrapper for making Machine Learning API calls + */ + public MachineLearningClient machineLearning() { + return machineLearningClient; + } + /** * Executes a bulk request using the Bulk API. * See Bulk API on elastic.co @@ -1415,8 +1443,7 @@ protected final Resp parseEntity(final HttpEntity entity, if (xContentType == null) { throw new IllegalStateException("Unsupported Content-Type: " + entity.getContentType().getValue()); } - try (XContentParser parser = xContentType.xContent().createParser(registry, - LoggingDeprecationHandler.INSTANCE, entity.getContent())) { + try (XContentParser parser = xContentType.xContent().createParser(registry, DEPRECATION_HANDLER, entity.getContent())) { return entityParser.apply(parser); } } @@ -1434,6 +1461,19 @@ static boolean convertExistsResponse(Response response) { return response.getStatusLine().getStatusCode() == 200; } + /** + * Ignores deprecation warnings. This is appropriate because it is only + * used to parse responses from Elasticsearch. Any deprecation warnings + * emitted there just mean that you are talking to an old version of + * Elasticsearch. There isn't anything you can do about the deprecation. + */ + private static final DeprecationHandler DEPRECATION_HANDLER = new DeprecationHandler() { + @Override + public void usedDeprecatedName(String usedName, String modernName) {} + @Override + public void usedDeprecatedField(String usedName, String replacedWith) {} + }; + static List getDefaultNamedXContents() { Map> map = new HashMap<>(); map.put(CardinalityAggregationBuilder.NAME, (p, c) -> ParsedCardinality.fromXContent(p, (String) c)); diff --git a/client/rest-high-level/src/main/resources/forbidden/rest-high-level-signatures.txt b/client/rest-high-level/src/main/resources/forbidden/rest-high-level-signatures.txt index fb2330f3f083c..33e136a66f44f 100644 --- a/client/rest-high-level/src/main/resources/forbidden/rest-high-level-signatures.txt +++ b/client/rest-high-level/src/main/resources/forbidden/rest-high-level-signatures.txt @@ -19,3 +19,6 @@ org.apache.http.entity.ContentType#create(java.lang.String) org.apache.http.entity.ContentType#create(java.lang.String,java.lang.String) org.apache.http.entity.ContentType#create(java.lang.String,java.nio.charset.Charset) org.apache.http.entity.ContentType#create(java.lang.String,org.apache.http.NameValuePair[]) + +@defaultMessage We can't rely on log4j2 being on the classpath so don't log deprecations! +org.elasticsearch.common.xcontent.LoggingDeprecationHandler diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java index 1f5914f392cf4..7068529619232 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java @@ -28,7 +28,7 @@ import org.elasticsearch.action.ingest.SimulateDocumentVerboseResult; import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.ingest.SimulatePipelineResponse; -import org.elasticsearch.action.ingest.WritePipelineResponse; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; @@ -50,7 +50,7 @@ public void testPutPipeline() throws IOException { BytesReference.bytes(pipelineBuilder), pipelineBuilder.contentType()); - WritePipelineResponse putPipelineResponse = + AcknowledgedResponse putPipelineResponse = execute(request, highLevelClient().ingest()::putPipeline, highLevelClient().ingest()::putPipelineAsync); assertTrue(putPipelineResponse.isAcknowledged()); } @@ -86,7 +86,7 @@ public void testDeletePipeline() throws IOException { DeletePipelineRequest request = new DeletePipelineRequest(id); - WritePipelineResponse response = + AcknowledgedResponse response = execute(request, highLevelClient().ingest()::deletePipeline, highLevelClient().ingest()::deletePipelineAsync); assertTrue(response.isAcknowledged()); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java new file mode 100644 index 0000000000000..f86eb5b5dca87 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java @@ -0,0 +1,74 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client; + +import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.protocol.xpack.ml.PutJobRequest; +import org.elasticsearch.protocol.xpack.ml.PutJobResponse; +import org.elasticsearch.protocol.xpack.ml.job.config.AnalysisConfig; +import org.elasticsearch.protocol.xpack.ml.job.config.DataDescription; +import org.elasticsearch.protocol.xpack.ml.job.config.Detector; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; + +import java.util.Arrays; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.Matchers.is; + +public class MachineLearningIT extends ESRestHighLevelClientTestCase { + + public void testPutJob() throws Exception { + String jobId = randomValidJobId(); + Job job = buildJob(jobId); + MachineLearningClient machineLearningClient = highLevelClient().machineLearning(); + + PutJobResponse putJobResponse = execute(new PutJobRequest(job), machineLearningClient::putJob, machineLearningClient::putJobAsync); + Job createdJob = putJobResponse.getResponse(); + + assertThat(createdJob.getId(), is(jobId)); + assertThat(createdJob.getJobType(), is(Job.ANOMALY_DETECTOR_JOB_TYPE)); + } + + public static String randomValidJobId() { + CodepointSetGenerator generator = new CodepointSetGenerator("abcdefghijklmnopqrstuvwxyz0123456789".toCharArray()); + return generator.ofCodePointsLength(random(), 10, 10); + } + + private static Job buildJob(String jobId) { + Job.Builder builder = new Job.Builder(jobId); + builder.setDescription(randomAlphaOfLength(10)); + + Detector detector = new Detector.Builder() + .setFieldName("total") + .setFunction("sum") + .setDetectorDescription(randomAlphaOfLength(10)) + .build(); + AnalysisConfig.Builder configBuilder = new AnalysisConfig.Builder(Arrays.asList(detector)); + configBuilder.setBucketSpan(new TimeValue(randomIntBetween(1, 10), TimeUnit.SECONDS)); + builder.setAnalysisConfig(configBuilder); + + DataDescription.Builder dataDescription = new DataDescription.Builder(); + dataDescription.setTimeFormat(randomFrom(DataDescription.EPOCH_MS, DataDescription.EPOCH)); + dataDescription.setTimeField(randomAlphaOfLength(10)); + builder.setDataDescription(dataDescription); + + return builder.build(); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MigrationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MigrationIT.java new file mode 100644 index 0000000000000..03614537bfe78 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MigrationIT.java @@ -0,0 +1,43 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest; +import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoResponse; + +import java.io.IOException; + +public class MigrationIT extends ESRestHighLevelClientTestCase { + + public void testGetAssistance() throws IOException { + RestHighLevelClient client = highLevelClient(); + { + IndexUpgradeInfoResponse response = client.migration().getAssistance(new IndexUpgradeInfoRequest(), RequestOptions.DEFAULT); + assertEquals(0, response.getActions().size()); + } + { + client.indices().create(new CreateIndexRequest("test"), RequestOptions.DEFAULT); + IndexUpgradeInfoResponse response = client.migration().getAssistance( + new IndexUpgradeInfoRequest("test"), RequestOptions.DEFAULT); + assertEquals(0, response.getActions().size()); + } + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index 18bdf5620296b..aad238e9e3655 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -127,6 +127,7 @@ import org.elasticsearch.index.rankeval.RatedRequest; import org.elasticsearch.index.rankeval.RestRankEvalAction; import org.elasticsearch.protocol.xpack.XPackInfoRequest; +import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest; import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.repositories.fs.FsRepository; @@ -2586,6 +2587,23 @@ public void testXPackInfo() { assertEquals(expectedParams, request.getParameters()); } + public void testGetMigrationAssistance() { + IndexUpgradeInfoRequest upgradeInfoRequest = new IndexUpgradeInfoRequest(); + String expectedEndpoint = "/_xpack/migration/assistance"; + if (randomBoolean()) { + String[] indices = randomIndicesNames(1, 5); + upgradeInfoRequest.indices(indices); + expectedEndpoint += "/" + String.join(",", indices); + } + Map expectedParams = new HashMap<>(); + setRandomIndicesOptions(upgradeInfoRequest::indicesOptions, upgradeInfoRequest::indicesOptions, expectedParams); + Request request = RequestConverters.getMigrationAssistance(upgradeInfoRequest); + assertEquals(HttpGet.METHOD_NAME, request.getMethod()); + assertEquals(expectedEndpoint, request.getEndpoint()); + assertNull(request.getEntity()); + assertEquals(expectedParams, request.getParameters()); + } + public void testXPackPutWatch() throws Exception { PutWatchRequest putWatchRequest = new PutWatchRequest(); String watchId = randomAlphaOfLength(10); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java index a75e89dc6cbbb..3ba11711b02ce 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java @@ -776,7 +776,9 @@ public void testApiNamingConventions() throws Exception { //TODO xpack api are currently ignored, we need to load xpack yaml spec too if (apiName.startsWith("xpack.") == false && apiName.startsWith("license.") == false && - apiName.startsWith("watcher.") == false) { + apiName.startsWith("machine_learning.") == false && + apiName.startsWith("watcher.") == false && + apiName.startsWith("migration.") == false) { apiNotFound.add(apiName); } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java index 5ea6ee2722082..2458cb9c1f20b 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java @@ -268,7 +268,7 @@ public void testSearchWithTermsAgg() throws IOException { Terms.Bucket type2 = termsAgg.getBucketByKey("type2"); assertEquals(2, type2.getDocCount()); assertEquals(0, type2.getAggregations().asList().size()); - } + } public void testSearchWithRangeAgg() throws IOException { { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SyncedFlushResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SyncedFlushResponseTests.java index 0756cfa6bab10..8057a92b3f279 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SyncedFlushResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SyncedFlushResponseTests.java @@ -24,7 +24,7 @@ import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -63,7 +63,7 @@ public void testXContentSerialization() throws IOException { .xContent() .createParser( xContentRegistry(), - LoggingDeprecationHandler.INSTANCE, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, BytesReference.bytes(serverResponsebuilder).streamInput() ).map() ); @@ -74,7 +74,7 @@ public void testXContentSerialization() throws IOException { .xContent() .createParser( xContentRegistry(), - LoggingDeprecationHandler.INSTANCE, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, BytesReference.bytes(clientResponsebuilder).streamInput() ) .map() @@ -94,7 +94,9 @@ public void testXContentDeserialization() throws IOException { .contentType() .xContent() .createParser( - xContentRegistry(), LoggingDeprecationHandler.INSTANCE, BytesReference.bytes(builder).streamInput() + xContentRegistry(), + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + BytesReference.bytes(builder).streamInput() ); SyncedFlushResponse originalResponse = plan.clientResult; SyncedFlushResponse parsedResponse = SyncedFlushResponse.fromXContent(parser); @@ -175,7 +177,8 @@ TestPlan createTestPlan() throws IOException { .contentType() .xContent() .createParser( - xContentRegistry(), LoggingDeprecationHandler.INSTANCE, + xContentRegistry(), + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, BytesReference.bytes(builder).streamInput() ) .map(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java index 98502e3668af1..4702c34c6de3d 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java @@ -31,7 +31,7 @@ import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.ingest.SimulatePipelineResponse; import org.elasticsearch.action.ingest.SimulateProcessorResult; -import org.elasticsearch.action.ingest.WritePipelineResponse; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.ESRestHighLevelClientTestCase; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestHighLevelClient; @@ -93,7 +93,7 @@ public void testPutPipeline() throws IOException { // end::put-pipeline-request-masterTimeout // tag::put-pipeline-execute - WritePipelineResponse response = client.ingest().putPipeline(request, RequestOptions.DEFAULT); // <1> + AcknowledgedResponse response = client.ingest().putPipeline(request, RequestOptions.DEFAULT); // <1> // end::put-pipeline-execute // tag::put-pipeline-response @@ -117,10 +117,10 @@ public void testPutPipelineAsync() throws Exception { ); // tag::put-pipeline-execute-listener - ActionListener listener = - new ActionListener() { + ActionListener listener = + new ActionListener() { @Override - public void onResponse(WritePipelineResponse response) { + public void onResponse(AcknowledgedResponse response) { // <1> } @@ -236,7 +236,7 @@ public void testDeletePipeline() throws IOException { // end::delete-pipeline-request-masterTimeout // tag::delete-pipeline-execute - WritePipelineResponse response = client.ingest().deletePipeline(request, RequestOptions.DEFAULT); // <1> + AcknowledgedResponse response = client.ingest().deletePipeline(request, RequestOptions.DEFAULT); // <1> // end::delete-pipeline-execute // tag::delete-pipeline-response @@ -257,10 +257,10 @@ public void testDeletePipelineAsync() throws Exception { DeletePipelineRequest request = new DeletePipelineRequest("my-pipeline-id"); // tag::delete-pipeline-execute-listener - ActionListener listener = - new ActionListener() { + ActionListener listener = + new ActionListener() { @Override - public void onResponse(WritePipelineResponse response) { + public void onResponse(AcknowledgedResponse response) { // <1> } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/LicensingDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/LicensingDocumentationIT.java index dc8ea56844665..6146030999c56 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/LicensingDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/LicensingDocumentationIT.java @@ -25,6 +25,11 @@ import org.elasticsearch.client.ESRestHighLevelClientTestCase; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestHighLevelClient; +import org.elasticsearch.common.Booleans; +import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest; +import org.elasticsearch.protocol.xpack.license.DeleteLicenseResponse; +import org.elasticsearch.protocol.xpack.license.GetLicenseRequest; +import org.elasticsearch.protocol.xpack.license.GetLicenseResponse; import org.elasticsearch.protocol.xpack.license.LicensesStatus; import org.elasticsearch.protocol.xpack.license.PutLicenseRequest; import org.elasticsearch.protocol.xpack.license.PutLicenseResponse; @@ -33,6 +38,8 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.startsWith; @@ -43,7 +50,7 @@ */ public class LicensingDocumentationIT extends ESRestHighLevelClientTestCase { - public void testPutLicense() throws Exception { + public void testLicense() throws Exception { assumeTrue("License is only valid when tested against snapshot/test builds", Build.CURRENT.isSnapshot()); RestHighLevelClient client = highLevelClient(); String license = "{\"license\": {\"uid\":\"893361dc-9749-4997-93cb-802e3d7fa4a8\",\"type\":\"gold\"," + @@ -82,7 +89,7 @@ public void testPutLicense() throws Exception { // tag::put-license-execute-listener ActionListener listener = new ActionListener() { @Override - public void onResponse(PutLicenseResponse indexResponse) { + public void onResponse(PutLicenseResponse putLicenseResponse) { // <1> } @@ -104,5 +111,108 @@ public void onFailure(Exception e) { assertTrue(latch.await(30L, TimeUnit.SECONDS)); } + + // we cannot actually delete the license, otherwise the remaining tests won't work + if (Booleans.isTrue("true")) { + return; + } + { + //tag::delete-license-execute + DeleteLicenseRequest request = new DeleteLicenseRequest(); + + DeleteLicenseResponse response = client.license().deleteLicense(request, RequestOptions.DEFAULT); + //end::delete-license-execute + + //tag::delete-license-response + boolean acknowledged = response.isAcknowledged(); // <1> + //end::delete-license-response + + assertTrue(acknowledged); + } + { + DeleteLicenseRequest request = new DeleteLicenseRequest(); + // tag::delete-license-execute-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(DeleteLicenseResponse deleteLicenseResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::delete-license-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::delete-license-execute-async + client.license().deleteLicenseAsync( + request, RequestOptions.DEFAULT, listener); // <1> + // end::delete-license-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + + public void testGetLicense() throws Exception { + RestHighLevelClient client = highLevelClient(); + { + //tag::get-license-execute + GetLicenseRequest request = new GetLicenseRequest(); + + GetLicenseResponse response = client.license().getLicense(request, RequestOptions.DEFAULT); + //end::get-license-execute + + //tag::get-license-response + String currentLicense = response.getLicenseDefinition(); // <1> + //end::get-license-response + + assertThat(currentLicense, containsString("trial")); + assertThat(currentLicense, containsString("client_rest-high-level_integTestCluster")); + } + { + GetLicenseRequest request = new GetLicenseRequest(); + // tag::get-license-execute-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(GetLicenseResponse indexResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::get-license-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::get-license-execute-async + client.license().getLicenseAsync( + request, RequestOptions.DEFAULT, listener); // <1> + // end::get-license-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + { + GetLicenseRequest request = new GetLicenseRequest(); + RequestOptions.Builder builder = RequestOptions.DEFAULT.toBuilder(); + // Make sure that it still works in other formats + builder.addHeader("Accept", randomFrom("application/smile", "application/cbor")); + RequestOptions options = builder.build(); + GetLicenseResponse response = client.license().getLicense(request, options); + String currentLicense = response.getLicenseDefinition(); + assertThat(currentLicense, startsWith("{")); + assertThat(currentLicense, containsString("trial")); + assertThat(currentLicense, containsString("client_rest-high-level_integTestCluster")); + assertThat(currentLicense, endsWith("}")); + } } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MigrationClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MigrationClientDocumentationIT.java new file mode 100644 index 0000000000000..c8310be8053b2 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MigrationClientDocumentationIT.java @@ -0,0 +1,83 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.documentation; + +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.client.ESRestHighLevelClientTestCase; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.RestHighLevelClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest; +import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoResponse; +import org.elasticsearch.protocol.xpack.migration.UpgradeActionRequired; + +import java.io.IOException; +import java.util.Map; + +/** + * This class is used to generate the Java Migration API documentation. + * You need to wrap your code between two tags like: + * // tag::example + * // end::example + * + * Where example is your tag name. + * + * Then in the documentation, you can extract what is between tag and end tags with + * ["source","java",subs="attributes,callouts,macros"] + * -------------------------------------------------- + * include-tagged::{doc-tests}/MigrationClientDocumentationIT.java[example] + * -------------------------------------------------- + * + * The column width of the code block is 84. If the code contains a line longer + * than 84, the line will be cut and a horizontal scroll bar will be displayed. + * (the code indentation of the tag is not included in the width) + */ +public class MigrationClientDocumentationIT extends ESRestHighLevelClientTestCase { + + public void testGetAssistance() throws IOException { + RestHighLevelClient client = highLevelClient(); + + // tag::get-assistance-request + IndexUpgradeInfoRequest request = new IndexUpgradeInfoRequest(); // <1> + // end::get-assistance-request + + // tag::get-assistance-request-indices + request.indices("index1", "index2"); // <1> + // end::get-assistance-request-indices + + request.indices(Strings.EMPTY_ARRAY); + + // tag::get-assistance-request-indices-options + request.indicesOptions(IndicesOptions.lenientExpandOpen()); // <1> + // end::get-assistance-request-indices-options + + // tag::get-assistance-execute + IndexUpgradeInfoResponse response = client.migration().getAssistance(request, RequestOptions.DEFAULT); + // end::get-assistance-execute + + // tag::get-assistance-response + Map actions = response.getActions(); + for (Map.Entry entry : actions.entrySet()) { + String index = entry.getKey(); // <1> + UpgradeActionRequired actionRequired = entry.getValue(); // <2> + } + // end::get-assistance-response + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java new file mode 100644 index 0000000000000..97bee81393864 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java @@ -0,0 +1,121 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.documentation; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.LatchedActionListener; +import org.elasticsearch.client.ESRestHighLevelClientTestCase; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.RestHighLevelClient; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.protocol.xpack.ml.PutJobRequest; +import org.elasticsearch.protocol.xpack.ml.PutJobResponse; +import org.elasticsearch.protocol.xpack.ml.job.config.AnalysisConfig; +import org.elasticsearch.protocol.xpack.ml.job.config.DataDescription; +import org.elasticsearch.protocol.xpack.ml.job.config.Detector; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; + +import java.util.Collections; +import java.util.Date; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.Matchers.greaterThan; + +public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { + + public void testCreateJob() throws Exception { + RestHighLevelClient client = highLevelClient(); + + //tag::x-pack-ml-put-job-detector + Detector.Builder detectorBuilder = new Detector.Builder() + .setFunction("sum") // <1> + .setFieldName("total") // <2> + .setDetectorDescription("Sum of total"); // <3> + //end::x-pack-ml-put-job-detector + + //tag::x-pack-ml-put-job-analysis-config + List detectors = Collections.singletonList(detectorBuilder.build()); // <1> + AnalysisConfig.Builder analysisConfigBuilder = new AnalysisConfig.Builder(detectors) // <2> + .setBucketSpan(TimeValue.timeValueMinutes(10)); // <3> + //end::x-pack-ml-put-job-analysis-config + + //tag::x-pack-ml-put-job-data-description + DataDescription.Builder dataDescriptionBuilder = new DataDescription.Builder() + .setTimeField("timestamp"); // <1> + //end::x-pack-ml-put-job-data-description + + { + String id = "job_1"; + + //tag::x-pack-ml-put-job-config + Job.Builder jobBuilder = new Job.Builder(id) // <1> + .setAnalysisConfig(analysisConfigBuilder) // <2> + .setDataDescription(dataDescriptionBuilder) // <3> + .setDescription("Total sum of requests"); // <4> + //end::x-pack-ml-put-job-config + + //tag::x-pack-ml-put-job-request + PutJobRequest request = new PutJobRequest(jobBuilder.build()); // <1> + //end::x-pack-ml-put-job-request + + //tag::x-pack-ml-put-job-execute + PutJobResponse response = client.machineLearning().putJob(request, RequestOptions.DEFAULT); + //end::x-pack-ml-put-job-execute + + //tag::x-pack-ml-put-job-response + Date createTime = response.getResponse().getCreateTime(); // <1> + //end::x-pack-ml-put-job-response + assertThat(createTime.getTime(), greaterThan(0L)); + } + { + String id = "job_2"; + Job.Builder jobBuilder = new Job.Builder(id) + .setAnalysisConfig(analysisConfigBuilder) + .setDataDescription(dataDescriptionBuilder) + .setDescription("Total sum of requests"); + + PutJobRequest request = new PutJobRequest(jobBuilder.build()); + // tag::x-pack-ml-put-job-execute-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(PutJobResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::x-pack-ml-put-job-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::x-pack-ml-put-job-execute-async + client.machineLearning().putJobAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::x-pack-ml-put-job-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } +} diff --git a/client/sniffer/licenses/jackson-core-2.8.10.jar.sha1 b/client/sniffer/licenses/jackson-core-2.8.10.jar.sha1 deleted file mode 100644 index a322d371e265e..0000000000000 --- a/client/sniffer/licenses/jackson-core-2.8.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -eb21a035c66ad307e66ec8fce37f5d50fd62d039 \ No newline at end of file diff --git a/client/sniffer/licenses/jackson-core-2.8.11.jar.sha1 b/client/sniffer/licenses/jackson-core-2.8.11.jar.sha1 new file mode 100644 index 0000000000000..e7ad1e74ed6b8 --- /dev/null +++ b/client/sniffer/licenses/jackson-core-2.8.11.jar.sha1 @@ -0,0 +1 @@ +876ead1db19f0c9e79c9789273a3ef8c6fd6c29b \ No newline at end of file diff --git a/distribution/bwc/build.gradle b/distribution/bwc/build.gradle index 6078dfefa2319..54f78542bc2ac 100644 --- a/distribution/bwc/build.gradle +++ b/distribution/bwc/build.gradle @@ -157,7 +157,7 @@ subprojects { environment('JAVA_HOME', getJavaHome(it, 8)) } else if ("6.2".equals(bwcBranch)) { environment('JAVA_HOME', getJavaHome(it, 9)) - } else if (["6.3", "6.x"].contains(bwcBranch)) { + } else if (["6.3", "6.4", "6.x"].contains(bwcBranch)) { environment('JAVA_HOME', getJavaHome(it, 10)) } else { environment('JAVA_HOME', project.compilerJavaHome) diff --git a/distribution/packages/src/common/systemd/elasticsearch.service b/distribution/packages/src/common/systemd/elasticsearch.service index 409f04f76d058..a4d67d8830a56 100644 --- a/distribution/packages/src/common/systemd/elasticsearch.service +++ b/distribution/packages/src/common/systemd/elasticsearch.service @@ -6,6 +6,7 @@ After=network-online.target [Service] RuntimeDirectory=elasticsearch +PrivateTmp=true Environment=ES_HOME=/usr/share/elasticsearch Environment=ES_PATH_CONF=${path.conf} Environment=PID_DIR=/var/run/elasticsearch diff --git a/docs/build.gradle b/docs/build.gradle index 6c06f36ab0070..804695ae41a36 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -40,6 +40,7 @@ integTestCluster { // TODO: remove this for 7.0, this exists to allow the doc examples in 6.x to continue using the defaults systemProperty 'es.scripting.use_java_time', 'false' + systemProperty 'es.scripting.update.ctx_in_params', 'false' } // remove when https://github.com/elastic/elasticsearch/issues/31305 is fixed diff --git a/docs/java-api/index.asciidoc b/docs/java-api/index.asciidoc index fb83f063272f7..fbbba6da884f4 100644 --- a/docs/java-api/index.asciidoc +++ b/docs/java-api/index.asciidoc @@ -101,7 +101,7 @@ You need to also include Log4j 2 dependencies: org.apache.logging.log4j log4j-core - 2.9.1 + 2.11.1 -------------------------------------------------- @@ -129,7 +129,7 @@ If you want to use another logger than Log4j 2, you can use http://www.slf4j.org org.apache.logging.log4j log4j-to-slf4j - 2.9.1 + 2.11.1 org.slf4j diff --git a/docs/java-api/query-dsl/percolate-query.asciidoc b/docs/java-api/query-dsl/percolate-query.asciidoc index e1968ae456a5c..9afce0842b9ae 100644 --- a/docs/java-api/query-dsl/percolate-query.asciidoc +++ b/docs/java-api/query-dsl/percolate-query.asciidoc @@ -49,7 +49,7 @@ XContentBuilder docBuilder = XContentFactory.jsonBuilder().startObject(); docBuilder.field("content", "This is amazing!"); docBuilder.endObject(); //End of the JSON root object -PercolateQueryBuilder percolateQuery = new PercolateQueryBuilder("query", "docs", docBuilder.bytes()); +PercolateQueryBuilder percolateQuery = new PercolateQueryBuilder("query", "docs", BytesReference.bytes(docBuilder)); // Percolate, by executing the percolator query in the query dsl: SearchResponse response = client().prepareSearch("myIndexName") diff --git a/docs/java-rest/high-level/licensing/delete-license.asciidoc b/docs/java-rest/high-level/licensing/delete-license.asciidoc new file mode 100644 index 0000000000000..d9aec6e57a14a --- /dev/null +++ b/docs/java-rest/high-level/licensing/delete-license.asciidoc @@ -0,0 +1,51 @@ +[[java-rest-high-delete-license]] +=== Delete License + +[[java-rest-high-delete-license-execution]] +==== Execution + +The license can be deleted using the `deleteLicense()` method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/LicensingDocumentationIT.java[delete-license-execute] +-------------------------------------------------- + +[[java-rest-high-delete-license-response]] +==== Response + +The returned `DeleteLicenseResponse` contains the `acknowledged` flag, which +returns true if the request was processed by all nodes. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/LicensingDocumentationIT.java[delete-license-response] +-------------------------------------------------- +<1> Check the acknowledge flag. It should be true if license deletion is acknowledged. + +[[java-rest-high-delete-license-async]] +==== Asynchronous Execution + +This request can be executed asynchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/LicensingDocumentationIT.java[delete-license-execute-async] +-------------------------------------------------- +<1> The `DeleteLicenseRequest` to execute and the `ActionListener` to use when +the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `DeleteLicenseResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/LicensingDocumentationIT.java[delete-license-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of failure. The raised exception is provided as an argument diff --git a/docs/java-rest/high-level/licensing/get-license.asciidoc b/docs/java-rest/high-level/licensing/get-license.asciidoc new file mode 100644 index 0000000000000..17eb89450fb15 --- /dev/null +++ b/docs/java-rest/high-level/licensing/get-license.asciidoc @@ -0,0 +1,50 @@ +[[java-rest-high-get-license]] +=== Get License + +[[java-rest-high-get-license-execution]] +==== Execution + +The license can be added or updated using the `getLicense()` method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/LicensingDocumentationIT.java[get-license-execute] +-------------------------------------------------- + +[[java-rest-high-get-license-response]] +==== Response + +The returned `GetLicenseResponse` contains the license in the JSON format. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/LicensingDocumentationIT.java[get-license-response] +-------------------------------------------------- +<1> The text of the license. + +[[java-rest-high-get-license-async]] +==== Asynchronous Execution + +This request can be executed asynchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/LicensingDocumentationIT.java[get-license-execute-async] +-------------------------------------------------- +<1> The `GetLicenseRequest` to execute and the `ActionListener` to use when +the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `GetLicenseResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/LicensingDocumentationIT.java[get-license-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of failure. The raised exception is provided as an argument diff --git a/docs/java-rest/high-level/migration/get-assistance.asciidoc b/docs/java-rest/high-level/migration/get-assistance.asciidoc new file mode 100644 index 0000000000000..20f857eb1fb41 --- /dev/null +++ b/docs/java-rest/high-level/migration/get-assistance.asciidoc @@ -0,0 +1,49 @@ +[[java-rest-high-migration-get-assistance]] +=== Migration Get Assistance + +[[java-rest-high-migraton-get-assistance-request]] +==== Index Upgrade Info Request + +An `IndexUpgradeInfoRequest` does not require any argument: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MigrationClientDocumentationIT.java[get-assistance-request] +-------------------------------------------------- +<1> Create a new request instance + +==== Optional arguments +The following arguments can optionally be provided: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MigrationClientDocumentationIT.java[get-assistance-request-indices] +-------------------------------------------------- +<1> Set the indices to the request + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MigrationClientDocumentationIT.java[get-assistance-request-indices-options] +-------------------------------------------------- +<1> Set the `IndicesOptions` to control how unavailable indices are resolved and +how wildcard expressions are expanded + +[[java-rest-high-migration-get-assistance-execution]] +==== Execution + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MigrationClientDocumentationIT.java[get-assistance-execute] +-------------------------------------------------- + +[[java-rest-high-migration-get-assistance-response]] +==== Response + +The returned `IndexUpgradeInfoResponse` contains the actions required for each index. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MigrationClientDocumentationIT.java[get-assistance-response] +-------------------------------------------------- +<1> Retrieve the index +<2> Retrieve the action required for the migration of the current index diff --git a/docs/java-rest/high-level/x-pack/x-pack-info.asciidoc b/docs/java-rest/high-level/miscellaneous/x-pack-info.asciidoc similarity index 100% rename from docs/java-rest/high-level/x-pack/x-pack-info.asciidoc rename to docs/java-rest/high-level/miscellaneous/x-pack-info.asciidoc diff --git a/docs/java-rest/high-level/x-pack/x-pack-usage.asciidoc b/docs/java-rest/high-level/miscellaneous/x-pack-usage.asciidoc similarity index 98% rename from docs/java-rest/high-level/x-pack/x-pack-usage.asciidoc rename to docs/java-rest/high-level/miscellaneous/x-pack-usage.asciidoc index 0927ae71c0bf5..c1e5ccf13e222 100644 --- a/docs/java-rest/high-level/x-pack/x-pack-usage.asciidoc +++ b/docs/java-rest/high-level/miscellaneous/x-pack-usage.asciidoc @@ -12,7 +12,7 @@ retrieved using the `usage()` method: include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[x-pack-usage-execute] -------------------------------------------------- -[[java-rest-high-x-pack-info-response]] +[[java-rest-high-x-pack-usage-response]] ==== Response The returned `XPackUsageResponse` contains a `Map` keyed by feature name. diff --git a/docs/java-rest/high-level/ml/put_job.asciidoc b/docs/java-rest/high-level/ml/put_job.asciidoc new file mode 100644 index 0000000000000..d51bb63d4054d --- /dev/null +++ b/docs/java-rest/high-level/ml/put_job.asciidoc @@ -0,0 +1,161 @@ +[[java-rest-high-x-pack-ml-put-job]] +=== Put Job API + +The Put Job API can be used to create a new {ml} job +in the cluster. The API accepts a `PutJobRequest` object +as a request and returns a `PutJobResponse`. + +[[java-rest-high-x-pack-ml-put-job-request]] +==== Put Job Request + +A `PutJobRequest` requires the following argument: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-request] +-------------------------------------------------- +<1> The configuration of the {ml} job to create as a `Job` + +[[java-rest-high-x-pack-ml-put-job-config]] +==== Job Configuration + +The `Job` object contains all the details about the {ml} job +configuration. + +A `Job` requires the following arguments: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-config] +-------------------------------------------------- +<1> The job ID +<2> An analysis configuration +<3> A data description +<4> Optionally, a human-readable description + +[[java-rest-high-x-pack-ml-put-job-analysis-config]] +==== Analysis Configuration + +The analysis configuration of the {ml} job is defined in the `AnalysisConfig`. +`AnalysisConfig` reflects all the configuration +settings that can be defined using the REST API. + +Using the REST API, we could define this analysis configuration: + +[source,js] +-------------------------------------------------- +"analysis_config" : { + "bucket_span" : "10m", + "detectors" : [ + { + "detector_description" : "Sum of total", + "function" : "sum", + "field_name" : "total" + } + ] +} +-------------------------------------------------- +// NOTCONSOLE + +Using the `AnalysisConfig` object and the high level REST client, the list +of detectors must be built first. + +An example of building a `Detector` instance is as follows: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-detector] +-------------------------------------------------- +<1> The function to use +<2> The field to apply the function to +<3> Optionally, a human-readable description + +Then the same configuration would be: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-analysis-config] +-------------------------------------------------- +<1> Create a list of detectors +<2> Pass the list of detectors to the analysis config builder constructor +<3> The bucket span + +[[java-rest-high-x-pack-ml-put-job-data-description]] +==== Data Description + +After defining the analysis config, the next thing to define is the +data description, using a `DataDescription` instance. `DataDescription` +reflects all the configuration settings that can be defined using the +REST API. + +Using the REST API, we could define this metrics configuration: + +[source,js] +-------------------------------------------------- +"data_description" : { + "time_field" : "timestamp" +} +-------------------------------------------------- +// NOTCONSOLE + +Using the `DataDescription` object and the high level REST client, the same +configuration would be: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-data-description] +-------------------------------------------------- +<1> The time field + +[[java-rest-high-x-pack-ml-put-job-execution]] +==== Execution + +The Put Job API can be executed through a `MachineLearningClient` +instance. Such an instance can be retrieved from a `RestHighLevelClient` +using the `machineLearning()` method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-execute] +-------------------------------------------------- + +[[java-rest-high-x-pack-ml-put-job-response]] +==== Response + +The returned `PutJobResponse` returns the full representation of +the new {ml} job if it has been successfully created. This will +contain the creation time and other fields initialized using +default values: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-response] +-------------------------------------------------- +<1> The creation time is a field that was not passed in the `Job` object in the request + +[[java-rest-high-x-pack-ml-put-job-async]] +==== Asynchronous Execution + +This request can be executed asynchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-execute-async] +-------------------------------------------------- +<1> The `PutMlJobRequest` to execute and the `ActionListener` to use when +the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `PutJobResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of failure. The raised exception is provided as an argument diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 63aef8659559d..808546f2c279c 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -54,14 +54,12 @@ The Java High Level REST Client supports the following Miscellaneous APIs: * <> * <> * <> -* <> -* <> +* <> include::miscellaneous/main.asciidoc[] include::miscellaneous/ping.asciidoc[] -include::x-pack/x-pack-info.asciidoc[] -include::x-pack/watcher/put-watch.asciidoc[] -include::x-pack/watcher/delete-watch.asciidoc[] +include::miscellaneous/x-pack-info.asciidoc[] +include::miscellaneous/x-pack-usage.asciidoc[] == Indices APIs @@ -190,11 +188,40 @@ The Java High Level REST Client supports the following Scripts APIs: include::script/get_script.asciidoc[] include::script/delete_script.asciidoc[] - == Licensing APIs The Java High Level REST Client supports the following Licensing APIs: * <> +* <> +* <> include::licensing/put-license.asciidoc[] +include::licensing/get-license.asciidoc[] +include::licensing/delete-license.asciidoc[] + +== Machine Learning APIs + +The Java High Level REST Client supports the following Machine Learning APIs: + +* <> + +include::ml/put_job.asciidoc[] + +== Migration APIs + +The Java High Level REST Client supports the following Migration APIs: + +* <> + +include::migration/get-assistance.asciidoc[] + +== Watcher APIs + +The Java High Level REST Client supports the following Watcher APIs: + +* <> +* <> + +include::watcher/put-watch.asciidoc[] +include::watcher/delete-watch.asciidoc[] diff --git a/docs/java-rest/high-level/x-pack/watcher/delete-watch.asciidoc b/docs/java-rest/high-level/watcher/delete-watch.asciidoc similarity index 98% rename from docs/java-rest/high-level/x-pack/watcher/delete-watch.asciidoc rename to docs/java-rest/high-level/watcher/delete-watch.asciidoc index d5f3581755860..615337ba317bf 100644 --- a/docs/java-rest/high-level/x-pack/watcher/delete-watch.asciidoc +++ b/docs/java-rest/high-level/watcher/delete-watch.asciidoc @@ -1,5 +1,5 @@ [[java-rest-high-x-pack-watcher-delete-watch]] -=== X-Pack Delete Watch API +=== Delete Watch API [[java-rest-high-x-pack-watcher-delete-watch-execution]] ==== Execution diff --git a/docs/java-rest/high-level/x-pack/watcher/put-watch.asciidoc b/docs/java-rest/high-level/watcher/put-watch.asciidoc similarity index 98% rename from docs/java-rest/high-level/x-pack/watcher/put-watch.asciidoc rename to docs/java-rest/high-level/watcher/put-watch.asciidoc index a76ba407a1a91..e5ee87bea34a6 100644 --- a/docs/java-rest/high-level/x-pack/watcher/put-watch.asciidoc +++ b/docs/java-rest/high-level/watcher/put-watch.asciidoc @@ -1,5 +1,5 @@ [[java-rest-high-x-pack-watcher-put-watch]] -=== X-Pack Put Watch API +=== Put Watch API [[java-rest-high-x-pack-watcher-put-watch-execution]] ==== Execution diff --git a/docs/painless/painless-contexts.asciidoc b/docs/painless/painless-contexts.asciidoc index 8b8a3b0eec6b4..cc7bc752ec6d9 100644 --- a/docs/painless/painless-contexts.asciidoc +++ b/docs/painless/painless-contexts.asciidoc @@ -14,6 +14,8 @@ specialized code may define new ways to use a Painless script. |==== | Name | Painless Documentation | Elasticsearch Documentation +| Ingest processor | <> + | {ref}/script-processor.html[Elasticsearch Documentation] | Update | <> | {ref}/docs-update.html[Elasticsearch Documentation] | Update by query | <> @@ -44,12 +46,12 @@ specialized code may define new ways to use a Painless script. | {ref}/search-aggregations-metrics-scripted-metric-aggregation.html[Elasticsearch Documentation] | Bucket aggregation | <> | {ref}/search-aggregations-pipeline-bucket-script-aggregation.html[Elasticsearch Documentation] -| Ingest processor | <> - | {ref}/script-processor.html[Elasticsearch Documentation] | Watcher condition | <> | {xpack-ref}/condition-script.html[Elasticsearch Documentation] | Watcher transform | <> | {xpack-ref}/transform-script.html[Elasticsearch Documentation] |==== +include::painless-contexts/painless-context-examples.asciidoc[] + include::painless-contexts/index.asciidoc[] diff --git a/docs/painless/painless-contexts/index.asciidoc b/docs/painless/painless-contexts/index.asciidoc index 64e4326e052f2..a9d3982133e1b 100644 --- a/docs/painless/painless-contexts/index.asciidoc +++ b/docs/painless/painless-contexts/index.asciidoc @@ -1,3 +1,5 @@ +include::painless-ingest-processor-context.asciidoc[] + include::painless-update-context.asciidoc[] include::painless-update-by-query-context.asciidoc[] @@ -28,8 +30,6 @@ include::painless-metric-agg-reduce-context.asciidoc[] include::painless-bucket-agg-context.asciidoc[] -include::painless-ingest-processor-context.asciidoc[] - include::painless-watcher-condition-context.asciidoc[] include::painless-watcher-transform-context.asciidoc[] diff --git a/docs/painless/painless-contexts/painless-context-examples.asciidoc b/docs/painless/painless-contexts/painless-context-examples.asciidoc new file mode 100644 index 0000000000000..469f425d1d89f --- /dev/null +++ b/docs/painless/painless-contexts/painless-context-examples.asciidoc @@ -0,0 +1,80 @@ +[[painless-context-examples]] +=== Context examples + +To run the examples, index the sample seat data into Elasticsearch. The examples +must be run sequentially to work correctly. + +. Download the +https://download.elastic.co/demos/painless/contexts/seats.json[seat data]. This +data set contains booking information for a collection of plays. Each document +represents a single seat for a play at a particular theater on a specific date +and time. ++ +Each document contains the following fields: ++ +`theatre` ({ref}/keyword.html[`keyword`]):: + The name of the theater the play is in. +`play` ({ref}/text.html[`text`]):: + The name of the play. +`actors` ({ref}/text.html[`text`]):: + A list of actors in the play. +`row` ({ref}/number.html[`integer`]):: + The row of the seat. +`number` ({ref}/number.html[`integer`]):: + The number of the seat within a row. +`cost` ({ref}/number.html[`double`]):: + The cost of the ticket for the seat. +`sold` ({ref}/boolean.html[`boolean`]):: + Whether or not the seat is sold. +`datetime` ({ref}/date.html[`date`]):: + The date and time of the play as a date object. +`date` ({ref}/keyword.html[`keyword`]):: + The date of the play as a keyword. +`time` ({ref}/keyword.html[`keyword`]):: + The time of the play as a keyword. + +. {defguide}/running-elasticsearch.html[Start] Elasticsearch. Note these +examples assume Elasticsearch and Kibana are running locally. To use the Console +editor with a remote Kibana instance, click the settings icon and enter the +Console URL. To submit a cURL request to a remote Elasticsearch instance, edit +the request URL. + +. Create {ref}/mapping.html[mappings] for the sample data: ++ +[source,js] +---- +PUT /seats +{ + "mappings": { + "seat": { + "properties": { + "theatre": { "type": "keyword" }, + "play": { "type": "text" }, + "actors": { "type": "text" }, + "row": { "type": "integer" }, + "number": { "type": "integer" }, + "cost": { "type": "double" }, + "sold": { "type": "boolean" }, + "datetime": { "type": "date" }, + "date": { "type": "keyword" }, + "time": { "type": "keyword" } + } + } + } +} +---- ++ +// CONSOLE + +. Run the <> +example. This sets up a script ingest processor used on each document as the +seat data is indexed. + +. Index the seat data: ++ +[source,js] +---- +curl -XPOST localhost:9200/seats/seat/_bulk?pipeline=seats -H "Content-Type: application/x-ndjson" --data-binary "@//seats.json" +---- +// NOTCONSOLE + diff --git a/docs/painless/painless-contexts/painless-ingest-processor-context.asciidoc b/docs/painless/painless-contexts/painless-ingest-processor-context.asciidoc index 5d451268dedcd..546057ab1a0b8 100644 --- a/docs/painless/painless-contexts/painless-ingest-processor-context.asciidoc +++ b/docs/painless/painless-contexts/painless-ingest-processor-context.asciidoc @@ -27,7 +27,7 @@ to modify documents upon insertion. {ref}/mapping-type-field.html[`ctx['_type']`]:: Modify this to change the type for the current document. -`ctx` (`Map`, read-only):: +`ctx` (`Map`):: Modify the values in the `Map/List` structure to add, modify, or delete the fields of a document. @@ -38,4 +38,158 @@ void:: *API* -The standard <> is available. \ No newline at end of file +The standard <> is available. + +*Example* + +To run this example, first follow the steps in +<>. + +The seat data contains: + +* A date in the format `YYYY-MM-DD` where the second digit of both month and day + is optional. +* A time in the format HH:MM* where the second digit of both hours and minutes + is optional. The star (*) represents either the `String` `AM` or `PM`. + +The following ingest script processes the date and time `Strings` and stores the +result in a `datetime` field. + +[source,Painless] +---- +String[] split(String s, char d) { <1> + int count = 0; + + for (char c : s.toCharArray()) { <2> + if (c == d) { + ++count; + } + } + + if (count == 0) { + return new String[] {s}; <3> + } + + String[] r = new String[count + 1]; <4> + int i0 = 0, i1 = 0; + count = 0; + + for (char c : s.toCharArray()) { <5> + if (c == d) { + r[count++] = s.substring(i0, i1); + i0 = i1 + 1; + } + + ++i1; + } + + r[count] = s.substring(i0, i1); <6> + + return r; +} + +String[] dateSplit = split(ctx.date, (char)"-"); <7> +String year = dateSplit[0].trim(); +String month = dateSplit[1].trim(); + +if (month.length() == 1) { <8> + month = "0" + month; +} + +String day = dateSplit[2].trim(); + +if (day.length() == 1) { <9> + day = "0" + day; +} + +boolean pm = ctx.time.substring(ctx.time.length() - 2).equals("PM"); <10> +String[] timeSplit = split( + ctx.time.substring(0, ctx.time.length() - 2), (char)":"); <11> +int hours = Integer.parseInt(timeSplit[0].trim()); +int minutes = Integer.parseInt(timeSplit[1].trim()); + +if (pm) { <12> + hours += 12; +} + +String dts = year + "-" + month + "-" + day + "T" + + (hours < 10 ? "0" + hours : "" + hours) + ":" + + (minutes < 10 ? "0" + minutes : "" + minutes) + + ":00+08:00"; <13> + +ZonedDateTime dt = ZonedDateTime.parse( + dts, DateTimeFormatter.ISO_OFFSET_DATE_TIME); <14> +ctx.datetime = dt.getLong(ChronoField.INSTANT_SECONDS)*1000L; <15> +---- +<1> Creates a `split` <> to split a + <> type value using a <> + type value as the delimiter. This is useful for handling the necessity of + pulling out the individual pieces of the date and time `Strings` from the + original seat data. +<2> The first pass through each `char` in the `String` collects how many new + `Strings` the original is split into. +<3> Returns the original `String` if there are no instances of the delimiting + `char`. +<4> Creates an <> value to collect the split `Strings` + into based on the number of `char` delimiters found in the first pass. +<5> The second pass through each `char` in the `String` collects each split + substring into an array type value of `Strings`. +<6> Collects the last substring into the array type value of `Strings`. +<7> Uses the `split` function to separate the date `String` from the seat data + into year, month, and day `Strings`. + Note:: + * The use of a `String` type value to `char` type value + <> as part of the second argument since + character literals do not exist. + * The use of the `ctx` ingest processor context variable to retrieve the + data from the `date` field. +<8> Appends the <> `"0"` value to a single + digit month since the format of the seat data allows for this case. +<9> Appends the <> `"0"` value to a single + digit day since the format of the seat data allows for this case. +<10> Sets the <> + <> to `true` if the time `String` is a time + in the afternoon or evening. + Note:: + * The use of the `ctx` ingest processor context variable to retrieve the + data from the `time` field. +<11> Uses the `split` function to separate the time `String` from the seat data + into hours and minutes `Strings`. + Note:: + * The use of the `substring` method to remove the `AM` or `PM` portion of + the time `String`. + * The use of a `String` type value to `char` type value + <> as part of the second argument since + character literals do not exist. + * The use of the `ctx` ingest processor context variable to retrieve the + data from the `date` field. +<12> If the time `String` is an afternoon or evening value adds the + <> `12` to the existing hours to move to + a 24-hour based time. +<13> Builds a new time `String` that is parsable using existing API methods. +<14> Creates a `ZonedDateTime` <> value by using + the API method `parse` to parse the new time `String`. +<15> Sets the datetime field `datetime` to the number of milliseconds retrieved + from the API method `getLong`. + Note:: + * The use of the `ctx` ingest processor context variable to set the field + `datetime`. Manipulate each document's fields with the `ctx` variable as + each document is indexed. + +Submit the following request: + +[source,js] +---- +PUT /_ingest/pipeline/seats +{ + "description": "update datetime for seats", + "processors": [ + { + "script": { + "source": "String[] split(String s, char d) { int count = 0; for (char c : s.toCharArray()) { if (c == d) { ++count; } } if (count == 0) { return new String[] {s}; } String[] r = new String[count + 1]; int i0 = 0, i1 = 0; count = 0; for (char c : s.toCharArray()) { if (c == d) { r[count++] = s.substring(i0, i1); i0 = i1 + 1; } ++i1; } r[count] = s.substring(i0, i1); return r; } String[] dateSplit = split(ctx.date, (char)\"-\"); String year = dateSplit[0].trim(); String month = dateSplit[1].trim(); if (month.length() == 1) { month = \"0\" + month; } String day = dateSplit[2].trim(); if (day.length() == 1) { day = \"0\" + day; } boolean pm = ctx.time.substring(ctx.time.length() - 2).equals(\"PM\"); String[] timeSplit = split(ctx.time.substring(0, ctx.time.length() - 2), (char)\":\"); int hours = Integer.parseInt(timeSplit[0].trim()); int minutes = Integer.parseInt(timeSplit[1].trim()); if (pm) { hours += 12; } String dts = year + \"-\" + month + \"-\" + day + \"T\" + (hours < 10 ? \"0\" + hours : \"\" + hours) + \":\" + (minutes < 10 ? \"0\" + minutes : \"\" + minutes) + \":00+08:00\"; ZonedDateTime dt = ZonedDateTime.parse(dts, DateTimeFormatter.ISO_OFFSET_DATE_TIME); ctx.datetime = dt.getLong(ChronoField.INSTANT_SECONDS)*1000L;" + } + } + ] +} +---- +// CONSOLE \ No newline at end of file diff --git a/docs/painless/painless-contexts/painless-metric-agg-combine-context.asciidoc b/docs/painless/painless-contexts/painless-metric-agg-combine-context.asciidoc index 31cb596ae8167..5cc9ad8ecbb93 100644 --- a/docs/painless/painless-contexts/painless-metric-agg-combine-context.asciidoc +++ b/docs/painless/painless-contexts/painless-metric-agg-combine-context.asciidoc @@ -12,7 +12,7 @@ optional as part of a full metric aggregation. `params` (`Map`, read-only):: User-defined parameters passed in as part of the query. -`params['_agg']` (`Map`):: +`state` (`Map`):: `Map` with values available from the prior map script. *Return* diff --git a/docs/painless/painless-contexts/painless-metric-agg-init-context.asciidoc b/docs/painless/painless-contexts/painless-metric-agg-init-context.asciidoc index 1503e3abb5891..8c0fddfa33961 100644 --- a/docs/painless/painless-contexts/painless-metric-agg-init-context.asciidoc +++ b/docs/painless/painless-contexts/painless-metric-agg-init-context.asciidoc @@ -12,13 +12,13 @@ full metric aggregation. `params` (`Map`, read-only):: User-defined parameters passed in as part of the query. -`params['_agg']` (`Map`):: +`state` (`Map`):: Empty `Map` used to add values for use in a <>. *Side Effects* -`params['_agg']` (`Map`):: +`state` (`Map`):: Add values to this `Map` to for use in a map. Additional values must be of the type `Map`, `List`, `String` or primitive. diff --git a/docs/painless/painless-contexts/painless-metric-agg-map-context.asciidoc b/docs/painless/painless-contexts/painless-metric-agg-map-context.asciidoc index 16016d1cf8171..a34308aa93887 100644 --- a/docs/painless/painless-contexts/painless-metric-agg-map-context.asciidoc +++ b/docs/painless/painless-contexts/painless-metric-agg-map-context.asciidoc @@ -13,10 +13,9 @@ part of a full metric aggregation. `params` (`Map`, read-only):: User-defined parameters passed in as part of the query. -`params['_agg']` (`Map`):: +`state` (`Map`):: `Map` used to add values for processing in a - <> or returned - directly. + <> or to be returned from the aggregation. `doc` (`Map`, read-only):: Contains the fields of the current document where each field is a @@ -27,15 +26,16 @@ part of a full metric aggregation. *Side Effects* -`params['_agg']` (`Map`):: +`state` (`Map`):: Use this `Map` to add values for processing in a combine script. Additional values must be of the type `Map`, `List`, `String` or - primitive. If an initialization script is provided as part the + primitive. The same `state` `Map` is shared between all aggregated documents + on a given shard. If an initialization script is provided as part of the aggregation then values added from the initialization script are - available as well. If no combine script is specified, values must be - directly stored in `_agg`. If no combine script and no + available. If no combine script is specified, values must be + directly stored in `state` in a usable form. If no combine script and no <> are specified, the - values are used as the result. + `state` values are used as the result. *Return* diff --git a/docs/painless/painless-contexts/painless-metric-agg-reduce-context.asciidoc b/docs/painless/painless-contexts/painless-metric-agg-reduce-context.asciidoc index b76e02b1b0499..b492207ef4468 100644 --- a/docs/painless/painless-contexts/painless-metric-agg-reduce-context.asciidoc +++ b/docs/painless/painless-contexts/painless-metric-agg-reduce-context.asciidoc @@ -14,7 +14,7 @@ specified) and is optional as part of a full metric aggregation. `params` (`Map`, read-only):: User-defined parameters passed in as part of the query. -`params['_aggs']` (`Map`):: +`states` (`Map`):: `Map` with values available from the prior combine script (or a map script if no combine script is specified). diff --git a/docs/painless/painless-keywords.asciidoc b/docs/painless/painless-keywords.asciidoc index 9463902c8d346..24371d3713c0b 100644 --- a/docs/painless/painless-keywords.asciidoc +++ b/docs/painless/painless-keywords.asciidoc @@ -5,7 +5,7 @@ Keywords are reserved tokens for built-in language features. *Errors* -If a keyword is used as an <>. +* If a keyword is used as an <>. *Keywords* diff --git a/docs/reference/release-notes/6.4.asciidoc b/docs/reference/release-notes/6.4.asciidoc index c2266f53e2af9..3ae58fa10bcc0 100644 --- a/docs/reference/release-notes/6.4.asciidoc +++ b/docs/reference/release-notes/6.4.asciidoc @@ -59,6 +59,10 @@ option. ({pull}30140[#29658]) A new analysis plugin called `analysis_nori` that exposes the Lucene Korean analysis module. ({pull}30397[#30397]) +Rollup:: +* A new API allows getting the rollup capabilities of specific rollup indices, +rather than by the target pattern ({pull}30401[#30401]) + [float] === Enhancements @@ -70,6 +74,11 @@ Geo:: Rollup:: * Validate timezone in range queries to ensure they match the selected job when searching ({pull}30338[#30338]) +* Rollup now indexes `null` values, meaning a single "unified" job for heterogeneous data is now the recommended pattern ({pull}31402[#31402]) +* Rollup Search endpoint now supports the `terms` query ({pull}30973[#30973]) +* Rollups no longer allow patterns that match it's `rollup_index`, which can lead to strange errors ({pull}30491[#30491]) +* Validation errors thrown while creating a rollup job are now a specialization of the previous `ActionRequestValidationException`, + making it easier to catch. The new exception is `RollupActionRequestValidationException` ({pull}30339[#30339]) [float] === Bug Fixes @@ -82,6 +91,15 @@ Do not ignore request analysis/similarity settings on index resize operations wh Fix NPE when CumulativeSum agg encounters null value/empty bucket ({pull}29641[#29641]) +Rollup:: +* Move to 128bit document IDs for Rollup. The old IDs were not wide enough and susceptible to hashing collisions. +Jobs that are running during cluster upgrade will "self-upgrade" to the new ID scheme, but it is recommended that users +fully rebuild Rollup indices from scratch if possible. Any existing collisions are not fixable and so data-loss may +affect the rollup index despite the new IDs being used. ({pull}32558[#32558]) +* Histo group configurations should support `scaled_float` ({pull}32048[#32048]) +* Fix rollup on date fields that don't support `epoch_millis` ({pull}31890[#31890]) +* Metric config properly validates itself now ({pull}31159[#31159]) + //[float] //=== Regressions diff --git a/docs/reference/setup/important-settings.asciidoc b/docs/reference/setup/important-settings.asciidoc index b9b99b708031e..8a9b59480a09b 100644 --- a/docs/reference/setup/important-settings.asciidoc +++ b/docs/reference/setup/important-settings.asciidoc @@ -14,6 +14,7 @@ The following settings *must* be considered before going to production: * <> * <> * <> +* <> include::important-settings/path-settings.asciidoc[] @@ -31,4 +32,6 @@ include::important-settings/heap-dump-path.asciidoc[] include::important-settings/gc-logging.asciidoc[] +include::important-settings/es-tmpdir.asciidoc[] + include::important-settings/error-file.asciidoc[] diff --git a/docs/reference/setup/important-settings/es-tmpdir.asciidoc b/docs/reference/setup/important-settings/es-tmpdir.asciidoc new file mode 100644 index 0000000000000..20959d969b879 --- /dev/null +++ b/docs/reference/setup/important-settings/es-tmpdir.asciidoc @@ -0,0 +1,23 @@ +[[es-tmpdir]] +=== Temp directory + +By default, Elasticsearch uses a private temporary directory that the startup +script creates immediately below the system temporary directory. + +On some Linux distributions a system utility will clean files and directories +from `/tmp` if they have not been recently accessed. This can lead to the +private temporary directory being removed while Elasticsearch is running if +features that require the temporary directory are not used for a long time. +This causes problems if a feature that requires the temporary directory is +subsequently used. + +If you install Elasticsearch using the `.deb` or `.rpm` packages and run it +under `systemd` then the private temporary directory that Elasticsearch uses +is excluded from periodic cleanup. + +However, if you intend to run the `.tar.gz` distribution on Linux for an +extended period then you should consider creating a dedicated temporary +directory for Elasticsearch that is not under a path that will have old files +and directories cleaned from it. This directory should have permissions set +so that only the user that Elasticsearch runs as can access it. Then set the +`$ES_TMPDIR` environment variable to point to it before starting Elasticsearch. diff --git a/docs/reference/setup/important-settings/node-name.asciidoc b/docs/reference/setup/important-settings/node-name.asciidoc index fab7ddcf11898..5980d8e284e1c 100644 --- a/docs/reference/setup/important-settings/node-name.asciidoc +++ b/docs/reference/setup/important-settings/node-name.asciidoc @@ -2,7 +2,7 @@ === `node.name` By default, Elasticsearch will use the first seven characters of the randomly -generated UUID as the node id.Note that the node id is persisted and does +generated UUID as the node id. Note that the node id is persisted and does not change when a node restarts and therefore the default node name will also not change. @@ -19,4 +19,4 @@ The `node.name` can also be set to the server's HOSTNAME as follows: [source,yaml] -------------------------------------------------- node.name: ${HOSTNAME} --------------------------------------------------- \ No newline at end of file +-------------------------------------------------- diff --git a/docs/reference/setup/install/docker.asciidoc b/docs/reference/setup/install/docker.asciidoc index d13f0bae1308a..a12b27151c0a8 100644 --- a/docs/reference/setup/install/docker.asciidoc +++ b/docs/reference/setup/install/docker.asciidoc @@ -4,8 +4,9 @@ {es} is also available as Docker images. The images use https://hub.docker.com/_/centos/[centos:7] as the base image. -A list of all published Docker images and tags can be found in https://www.docker.elastic.co[www.docker.elastic.co]. The source code can be found -on https://github.com/elastic/elasticsearch-docker/tree/{branch}[GitHub]. +A list of all published Docker images and tags is available at +https://www.docker.elastic.co[www.docker.elastic.co]. The source code is in +https://github.com/elastic/elasticsearch-docker/tree/{branch}[GitHub]. These images are free to use under the Elastic license. They contain open source and free commercial features and access to paid commercial features. @@ -28,15 +29,13 @@ endif::[] ifeval::["{release-state}"!="unreleased"] -For example, the Docker image can be retrieved with the following command: - ["source","sh",subs="attributes"] -------------------------------------------- docker pull {docker-repo}:{version} -------------------------------------------- Alternatively, you can download other Docker images that contain only features -that are available under the Apache 2.0 license from +available under the Apache 2.0 license. To download the images, go to https://www.docker.elastic.co[www.docker.elastic.co]. endif::[] diff --git a/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java b/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java index ec350e3ed05b3..4b5d808550225 100644 --- a/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java +++ b/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java @@ -92,8 +92,7 @@ protected ClientYamlTestClient initClientYamlTestClient( final RestClient restClient, final List hosts, final Version esVersion) { - return new ClientYamlDocsTestClient(restSpec, restClient, hosts, esVersion, - restClientBuilder -> configureClient(restClientBuilder, restClientSettings())); + return new ClientYamlDocsTestClient(restSpec, restClient, hosts, esVersion, this::getClientBuilderWithSniffedHosts); } /** diff --git a/libs/core/src/main/java/org/elasticsearch/common/concurrent/CompletableContext.java b/libs/core/src/main/java/org/elasticsearch/common/concurrent/CompletableContext.java new file mode 100644 index 0000000000000..e838842931837 --- /dev/null +++ b/libs/core/src/main/java/org/elasticsearch/common/concurrent/CompletableContext.java @@ -0,0 +1,63 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.concurrent; + +import java.util.concurrent.CompletableFuture; +import java.util.function.BiConsumer; + +/** + * A thread-safe completable context that allows listeners to be attached. This class relies on the + * {@link CompletableFuture} for the concurrency logic. However, it does not accept {@link Throwable} as + * an exceptional result. This allows attaching listeners that only handle {@link Exception}. + * + * @param the result type + */ +public class CompletableContext { + + private final CompletableFuture completableFuture = new CompletableFuture<>(); + + public void addListener(BiConsumer listener) { + BiConsumer castThrowable = (v, t) -> { + if (t == null) { + listener.accept(v, null); + } else { + assert !(t instanceof Error) : "Cannot be error"; + listener.accept(v, (Exception) t); + } + }; + completableFuture.whenComplete(castThrowable); + } + + public boolean isDone() { + return completableFuture.isDone(); + } + + public boolean isCompletedExceptionally() { + return completableFuture.isCompletedExceptionally(); + } + + public boolean completeExceptionally(Exception ex) { + return completableFuture.completeExceptionally(ex); + } + + public boolean complete(T value) { + return completableFuture.complete(value); + } +} diff --git a/libs/grok/src/main/java/org/elasticsearch/grok/ThreadWatchdog.java b/libs/grok/src/main/java/org/elasticsearch/grok/ThreadWatchdog.java index d0de7637d2c08..f3515fcfe83b0 100644 --- a/libs/grok/src/main/java/org/elasticsearch/grok/ThreadWatchdog.java +++ b/libs/grok/src/main/java/org/elasticsearch/grok/ThreadWatchdog.java @@ -21,6 +21,8 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiFunction; import java.util.function.LongSupplier; @@ -104,6 +106,8 @@ class Default implements ThreadWatchdog { private final long maxExecutionTime; private final LongSupplier relativeTimeSupplier; private final BiFunction> scheduler; + private final AtomicInteger registered = new AtomicInteger(0); + private final AtomicBoolean running = new AtomicBoolean(false); final ConcurrentHashMap registry = new ConcurrentHashMap<>(); private Default(long interval, @@ -114,11 +118,14 @@ private Default(long interval, this.maxExecutionTime = maxExecutionTime; this.relativeTimeSupplier = relativeTimeSupplier; this.scheduler = scheduler; - scheduler.apply(interval, this::interruptLongRunningExecutions); } public void register() { + registered.getAndIncrement(); Long previousValue = registry.put(Thread.currentThread(), relativeTimeSupplier.getAsLong()); + if (running.compareAndSet(false, true) == true) { + scheduler.apply(interval, this::interruptLongRunningExecutions); + } assert previousValue == null; } @@ -129,6 +136,7 @@ public long maxExecutionTimeInMillis() { public void unregister() { Long previousValue = registry.remove(Thread.currentThread()); + registered.decrementAndGet(); assert previousValue != null; } @@ -140,7 +148,11 @@ private void interruptLongRunningExecutions() { // not removing the entry here, this happens in the unregister() method. } } - scheduler.apply(interval, this::interruptLongRunningExecutions); + if (registered.get() > 0) { + scheduler.apply(interval, this::interruptLongRunningExecutions); + } else { + running.set(false); + } } } diff --git a/libs/grok/src/test/java/org/elasticsearch/grok/ThreadWatchdogTests.java b/libs/grok/src/test/java/org/elasticsearch/grok/ThreadWatchdogTests.java index 46faa4ae05d38..29e2351215f60 100644 --- a/libs/grok/src/test/java/org/elasticsearch/grok/ThreadWatchdogTests.java +++ b/libs/grok/src/test/java/org/elasticsearch/grok/ThreadWatchdogTests.java @@ -18,15 +18,25 @@ */ package org.elasticsearch.grok; -import org.elasticsearch.test.ESTestCase; - import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; +import org.elasticsearch.test.ESTestCase; +import org.mockito.Mockito; import static org.hamcrest.Matchers.is; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.verifyZeroInteractions; public class ThreadWatchdogTests extends ESTestCase { - + public void testInterrupt() throws Exception { AtomicBoolean run = new AtomicBoolean(true); // to avoid a lingering thread when test has completed ThreadWatchdog watchdog = ThreadWatchdog.newInstance(10, 100, System::currentTimeMillis, (delay, command) -> { @@ -43,7 +53,7 @@ public void testInterrupt() throws Exception { thread.start(); return null; }); - + Map registry = ((ThreadWatchdog.Default) watchdog).registry; assertThat(registry.size(), is(0)); // need to call #register() method on a different thread, assertBusy() fails if current thread gets interrupted @@ -66,5 +76,39 @@ public void testInterrupt() throws Exception { assertThat(registry.size(), is(0)); }); } - + + public void testIdleIfNothingRegistered() throws Exception { + long interval = 1L; + ScheduledExecutorService threadPool = mock(ScheduledExecutorService.class); + ThreadWatchdog watchdog = ThreadWatchdog.newInstance(interval, Long.MAX_VALUE, System::currentTimeMillis, + (delay, command) -> threadPool.schedule(command, delay, TimeUnit.MILLISECONDS)); + // Periodic action is not scheduled because no thread is registered + verifyZeroInteractions(threadPool); + CompletableFuture commandFuture = new CompletableFuture<>(); + // Periodic action is scheduled because a thread is registered + doAnswer(invocationOnMock -> { + commandFuture.complete((Runnable) invocationOnMock.getArguments()[0]); + return null; + }).when(threadPool).schedule( + any(Runnable.class), eq(interval), eq(TimeUnit.MILLISECONDS) + ); + watchdog.register(); + // Registering the first thread should have caused the command to get scheduled again + Runnable command = commandFuture.get(1L, TimeUnit.MILLISECONDS); + Mockito.reset(threadPool); + watchdog.unregister(); + command.run(); + // Periodic action is not scheduled again because no thread is registered + verifyZeroInteractions(threadPool); + watchdog.register(); + Thread otherThread = new Thread(watchdog::register); + try { + verify(threadPool).schedule(any(Runnable.class), eq(interval), eq(TimeUnit.MILLISECONDS)); + // Registering a second thread does not cause the command to get scheduled twice + verifyNoMoreInteractions(threadPool); + otherThread.start(); + } finally { + otherThread.join(); + } + } } diff --git a/libs/x-content/licenses/jackson-core-2.8.10.jar.sha1 b/libs/x-content/licenses/jackson-core-2.8.10.jar.sha1 deleted file mode 100644 index a322d371e265e..0000000000000 --- a/libs/x-content/licenses/jackson-core-2.8.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -eb21a035c66ad307e66ec8fce37f5d50fd62d039 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-core-2.8.11.jar.sha1 b/libs/x-content/licenses/jackson-core-2.8.11.jar.sha1 new file mode 100644 index 0000000000000..e7ad1e74ed6b8 --- /dev/null +++ b/libs/x-content/licenses/jackson-core-2.8.11.jar.sha1 @@ -0,0 +1 @@ +876ead1db19f0c9e79c9789273a3ef8c6fd6c29b \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-cbor-2.8.10.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-cbor-2.8.10.jar.sha1 deleted file mode 100644 index 1d3e18e21a694..0000000000000 --- a/libs/x-content/licenses/jackson-dataformat-cbor-2.8.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1c58cc9313ddf19f0900cd61ed044874278ce320 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-cbor-2.8.11.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-cbor-2.8.11.jar.sha1 new file mode 100644 index 0000000000000..378ba524422bc --- /dev/null +++ b/libs/x-content/licenses/jackson-dataformat-cbor-2.8.11.jar.sha1 @@ -0,0 +1 @@ +8b9826e16c3366764bfb7ad7362554f0471046c3 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-smile-2.8.10.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-smile-2.8.10.jar.sha1 deleted file mode 100644 index 4f4cacde22079..0000000000000 --- a/libs/x-content/licenses/jackson-dataformat-smile-2.8.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e853081fadaad3e98ed801937acc3d8f77580686 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-smile-2.8.11.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-smile-2.8.11.jar.sha1 new file mode 100644 index 0000000000000..510afb3df53e6 --- /dev/null +++ b/libs/x-content/licenses/jackson-dataformat-smile-2.8.11.jar.sha1 @@ -0,0 +1 @@ +d9d1c49c5d9d5e46e2aee55f3cdd119286fe0fc1 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-yaml-2.8.10.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-yaml-2.8.10.jar.sha1 deleted file mode 100644 index 40bcb05f69795..0000000000000 --- a/libs/x-content/licenses/jackson-dataformat-yaml-2.8.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1e08caf1d787c825307d8cc6362452086020d853 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-yaml-2.8.11.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-yaml-2.8.11.jar.sha1 new file mode 100644 index 0000000000000..78a68d715ec3d --- /dev/null +++ b/libs/x-content/licenses/jackson-dataformat-yaml-2.8.11.jar.sha1 @@ -0,0 +1 @@ +2e77c6ff7342cd61ab1ae7cb14ed16aebfc8a72a \ No newline at end of file diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentFactory.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentFactory.java index fb871590df7fd..38bc251be41dd 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentFactory.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentFactory.java @@ -35,7 +35,7 @@ */ public class XContentFactory { - private static final int GUESS_HEADER_LENGTH = 20; + static final int GUESS_HEADER_LENGTH = 20; /** * Returns a content builder using JSON format ({@link org.elasticsearch.common.xcontent.XContentType#JSON}. @@ -153,8 +153,10 @@ public static XContentType xContentType(CharSequence content) { return XContentType.JSON; } // Should we throw a failure here? Smile idea is to use it in bytes.... - if (length > 2 && first == SmileConstants.HEADER_BYTE_1 && content.charAt(1) == SmileConstants.HEADER_BYTE_2 && - content.charAt(2) == SmileConstants.HEADER_BYTE_3) { + if (length > 2 + && first == SmileConstants.HEADER_BYTE_1 + && content.charAt(1) == SmileConstants.HEADER_BYTE_2 + && content.charAt(2) == SmileConstants.HEADER_BYTE_3) { return XContentType.SMILE; } if (length > 2 && first == '-' && content.charAt(1) == '-' && content.charAt(2) == '-') { @@ -227,13 +229,29 @@ public static XContent xContent(byte[] data, int offset, int length) { */ @Deprecated public static XContentType xContentType(InputStream si) throws IOException { + /* + * We need to guess the content type. To do this, we look for the first non-whitespace character and then try to guess the content + * type on the GUESS_HEADER_LENGTH bytes that follow. We do this in a way that does not modify the initial read position in the + * underlying input stream. This is why the input stream must support mark/reset and why we repeatedly mark the read position and + * reset. + */ if (si.markSupported() == false) { throw new IllegalArgumentException("Cannot guess the xcontent type without mark/reset support on " + si.getClass()); } - si.mark(GUESS_HEADER_LENGTH); + si.mark(Integer.MAX_VALUE); try { + // scan until we find the first non-whitespace character or the end of the stream + int current; + do { + current = si.read(); + if (current == -1) { + return null; + } + } while (Character.isWhitespace((char) current)); + // now guess the content type off the next GUESS_HEADER_LENGTH bytes including the current byte final byte[] firstBytes = new byte[GUESS_HEADER_LENGTH]; - int read = 0; + firstBytes[0] = (byte) current; + int read = 1; while (read < GUESS_HEADER_LENGTH) { final int r = si.read(firstBytes, read, GUESS_HEADER_LENGTH - read); if (r == -1) { @@ -245,6 +263,7 @@ public static XContentType xContentType(InputStream si) throws IOException { } finally { si.reset(); } + } /** @@ -278,15 +297,17 @@ public static XContentType xContentType(byte[] bytes, int offset, int length) { if (first == '{') { return XContentType.JSON; } - if (length > 2 && first == SmileConstants.HEADER_BYTE_1 && bytes[offset + 1] == SmileConstants.HEADER_BYTE_2 && - bytes[offset + 2] == SmileConstants.HEADER_BYTE_3) { + if (length > 2 + && first == SmileConstants.HEADER_BYTE_1 + && bytes[offset + 1] == SmileConstants.HEADER_BYTE_2 + && bytes[offset + 2] == SmileConstants.HEADER_BYTE_3) { return XContentType.SMILE; } if (length > 2 && first == '-' && bytes[offset + 1] == '-' && bytes[offset + 2] == '-') { return XContentType.YAML; } // CBOR logic similar to CBORFactory#hasCBORFormat - if (first == CBORConstants.BYTE_OBJECT_INDEFINITE && length > 1){ + if (first == CBORConstants.BYTE_OBJECT_INDEFINITE && length > 1) { return XContentType.CBOR; } if (CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_TAG, first) && length > 2) { diff --git a/modules/lang-expression/licenses/lucene-expressions-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 0000000000000..0ebdddcc5f1b5 --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +fded6bb485b8b01bb2a9280162fd14d4d3ce4510 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-7.5.0-snapshot-608f0277b0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index 908f70131b39d..0000000000000 --- a/modules/lang-expression/licenses/lucene-expressions-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bd7d8078a2d0ad11a24f54156cc015630c96858a \ No newline at end of file diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java index a3a62225b09fb..23dc0fd276cbe 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java @@ -147,7 +147,7 @@ private static BucketAggregationScript.Factory newBucketAggregationScriptFactory } return new BucketAggregationScript(parameters) { @Override - public double execute() { + public Double execute() { getParams().forEach((name, value) -> { ReplaceableConstDoubleValues placeholder = functionValuesMap.get(name); if (placeholder == null) { diff --git a/modules/lang-painless/build.gradle b/modules/lang-painless/build.gradle index e3a7ccecae2e5..ed4b1d631e064 100644 --- a/modules/lang-painless/build.gradle +++ b/modules/lang-painless/build.gradle @@ -25,6 +25,7 @@ esplugin { integTestCluster { module project.project(':modules:mapper-extras') systemProperty 'es.scripting.use_java_time', 'true' + systemProperty 'es.scripting.update.ctx_in_params', 'false' } dependencies { diff --git a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/Whitelist.java b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/Whitelist.java index 55b64b0420df1..c38325edd1424 100644 --- a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/Whitelist.java +++ b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/Whitelist.java @@ -28,7 +28,7 @@ * constructors, methods, and fields that can be used within a Painless script at both compile-time * and run-time. * - * A whitelist consists of several pieces with {@link WhitelistClass}s as the top level. Each + * A whitelist consists of several pieces with {@link WhitelistClass}s as the top level. Each * {@link WhitelistClass} will contain zero-to-many {@link WhitelistConstructor}s, {@link WhitelistMethod}s, and * {@link WhitelistField}s which are what will be available with a Painless script. See each individual * whitelist object for more detail. @@ -56,14 +56,14 @@ public final class Whitelist { Collections.singletonList(WhitelistLoader.loadFromResourceFiles(Whitelist.class, BASE_WHITELIST_FILES)); /** The {@link ClassLoader} used to look up the whitelisted Java classes, constructors, methods, and fields. */ - public final ClassLoader javaClassLoader; + public final ClassLoader classLoader; /** The {@link List} of all the whitelisted Painless classes. */ - public final List whitelistStructs; + public final List whitelistClasses; /** Standard constructor. All values must be not {@code null}. */ - public Whitelist(ClassLoader javaClassLoader, List whitelistStructs) { - this.javaClassLoader = Objects.requireNonNull(javaClassLoader); - this.whitelistStructs = Collections.unmodifiableList(Objects.requireNonNull(whitelistStructs)); + public Whitelist(ClassLoader classLoader, List whitelistClasses) { + this.classLoader = Objects.requireNonNull(classLoader); + this.whitelistClasses = Collections.unmodifiableList(Objects.requireNonNull(whitelistClasses)); } } diff --git a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistClass.java b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistClass.java index 12aa5f5bdd634..0b216ae5c2953 100644 --- a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistClass.java +++ b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistClass.java @@ -30,7 +30,7 @@ * specific context, as long as multiple classes representing the same Java class have the same * class name and have legal constructor/method overloading they can be merged together. * - * Classes in Painless allow for arity overloading for constructors and methods. Arity overloading + * Classes in Painless allow for arity overloading for constructors and methods. Arity overloading * means that multiple constructors are allowed for a single class as long as they have a different * number of parameters, and multiples methods with the same name are allowed for a single class * as long as they have the same return type and a different number of parameters. @@ -40,7 +40,7 @@ */ public final class WhitelistClass { - /** Information about where this class was white-listed from. Can be used for error messages. */ + /** Information about where this class was white-listed from. */ public final String origin; /** The Java class name this class represents. */ @@ -49,7 +49,7 @@ public final class WhitelistClass { /** * Allow the Java class name to only be specified as the fully-qualified name. */ - public final boolean onlyFQNJavaClassName; + public final boolean noImport; /** The {@link List} of whitelisted ({@link WhitelistConstructor}s) available to this class. */ public final List whitelistConstructors; @@ -61,13 +61,14 @@ public final class WhitelistClass { public final List whitelistFields; /** Standard constructor. All values must be not {@code null}. */ - public WhitelistClass(String origin, String javaClassName, boolean onlyFQNJavaClassName, + public WhitelistClass(String origin, String javaClassName, boolean noImport, List whitelistConstructors, List whitelistMethods, List whitelistFields) { + this.origin = Objects.requireNonNull(origin); this.javaClassName = Objects.requireNonNull(javaClassName); - this.onlyFQNJavaClassName = onlyFQNJavaClassName; + this.noImport = noImport; this.whitelistConstructors = Collections.unmodifiableList(Objects.requireNonNull(whitelistConstructors)); this.whitelistMethods = Collections.unmodifiableList(Objects.requireNonNull(whitelistMethods)); diff --git a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistConstructor.java b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistConstructor.java index 0e70552760208..032ef397def01 100644 --- a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistConstructor.java +++ b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistConstructor.java @@ -25,24 +25,24 @@ /** * Constructor represents the equivalent of a Java constructor available as a whitelisted class - * constructor within Painless. Constructors for Painless classes may be accessed exactly as - * constructors for Java classes are using the 'new' keyword. Painless classes may have multiple + * constructor within Painless. Constructors for Painless classes may be accessed exactly as + * constructors for Java classes are using the 'new' keyword. Painless classes may have multiple * constructors as long as they comply with arity overloading described for {@link WhitelistClass}. */ public final class WhitelistConstructor { - /** Information about where this constructor was whitelisted from. Can be used for error messages. */ + /** Information about where this constructor was whitelisted from. */ public final String origin; /** * A {@link List} of {@link String}s that are the Painless type names for the parameters of the * constructor which can be used to look up the Java constructor through reflection. */ - public final List painlessParameterTypeNames; + public final List canonicalTypeNameParameters; /** Standard constructor. All values must be not {@code null}. */ - public WhitelistConstructor(String origin, List painlessParameterTypeNames) { + public WhitelistConstructor(String origin, List canonicalTypeNameParameters) { this.origin = Objects.requireNonNull(origin); - this.painlessParameterTypeNames = Collections.unmodifiableList(Objects.requireNonNull(painlessParameterTypeNames)); + this.canonicalTypeNameParameters = Collections.unmodifiableList(Objects.requireNonNull(canonicalTypeNameParameters)); } } diff --git a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistField.java b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistField.java index 116aea98fcf89..44ed31a227e1c 100644 --- a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistField.java +++ b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistField.java @@ -23,24 +23,24 @@ /** * Field represents the equivalent of a Java field available as a whitelisted class field - * within Painless. Fields for Painless classes may be accessed exactly as fields for Java classes + * within Painless. Fields for Painless classes may be accessed exactly as fields for Java classes * are using the '.' operator on an existing class variable/field. */ public class WhitelistField { - /** Information about where this method was whitelisted from. Can be used for error messages. */ + /** Information about where this method was whitelisted from. */ public final String origin; - /** The Java field name used to look up the Java field through reflection. */ - public final String javaFieldName; + /** The field name used to look up the field reflection object. */ + public final String fieldName; - /** The Painless type name for the field which can be used to look up the Java field through reflection. */ - public final String painlessFieldTypeName; + /** The canonical type name for the field which can be used to look up the Java field through reflection. */ + public final String canonicalTypeNameParameter; /** Standard constructor. All values must be not {@code null}. */ - public WhitelistField(String origin, String javaFieldName, String painlessFieldTypeName) { + public WhitelistField(String origin, String fieldName, String canonicalTypeNameParameter) { this.origin = Objects.requireNonNull(origin); - this.javaFieldName = Objects.requireNonNull(javaFieldName); - this.painlessFieldTypeName = Objects.requireNonNull(painlessFieldTypeName); + this.fieldName = Objects.requireNonNull(fieldName); + this.canonicalTypeNameParameter = Objects.requireNonNull(canonicalTypeNameParameter); } } diff --git a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistLoader.java b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistLoader.java index b104d03f1ea82..a4a0076626a9c 100644 --- a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistLoader.java +++ b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistLoader.java @@ -35,14 +35,14 @@ public final class WhitelistLoader { /** - * Loads and creates a {@link Whitelist} from one to many text files. The file paths are passed in as an array of + * Loads and creates a {@link Whitelist} from one to many text files. The file paths are passed in as an array of * {@link String}s with a single {@link Class} to be be used to load the resources where each {@link String} - * is the path of a single text file. The {@link Class}'s {@link ClassLoader} will be used to lookup the Java + * is the path of a single text file. The {@link Class}'s {@link ClassLoader} will be used to lookup the Java * reflection objects for each individual {@link Class}, {@link Constructor}, {@link Method}, and {@link Field} * specified as part of the whitelist in the text file. * * A single pass is made through each file to collect all the information about each class, constructor, method, - * and field. Most validation will be done at a later point after all whitelists have been gathered and their + * and field. Most validation will be done at a later point after all whitelists have been gathered and their * merging takes place. * * A painless type name is one of the following: @@ -52,20 +52,20 @@ public final class WhitelistLoader { *

  • fully-qualified Java type name - Any whitelisted Java class will have the equivalent name as * a Painless type name with the exception that any dollar symbols used as part of inner classes will * be replaced with dot symbols.
  • - *
  • short Java type name - The text after the final dot symbol of any specified Java class. A - * short type Java name may be excluded by using the 'only_fqn' token during Painless class parsing + *
  • short Java type name - The text after the final dot symbol of any specified Java class. A + * short type Java name may be excluded by using the 'no_import' token during Painless class parsing * as described later.
  • * * * The following can be parsed from each whitelist text file: *
      *
    • Blank lines will be ignored by the parser.
    • - *
    • Comments may be created starting with a pound '#' symbol and end with a newline. These will + *
    • Comments may be created starting with a pound '#' symbol and end with a newline. These will * be ignored by the parser.
    • *
    • Primitive types may be specified starting with 'class' and followed by the Java type name, * an opening bracket, a newline, a closing bracket, and a final newline.
    • *
    • Complex types may be specified starting with 'class' and followed the fully-qualified Java - * class name, optionally followed by an 'only_fqn' token, an opening bracket, a newline, + * class name, optionally followed by an 'no_import' token, an opening bracket, a newline, * constructor/method/field specifications, a closing bracket, and a final newline. Within a complex * type the following may be parsed: *
        @@ -93,10 +93,10 @@ public final class WhitelistLoader { * * Note there must be a one-to-one correspondence of Painless type names to Java type/class names. * If the same Painless type is defined across multiple files and the Java class is the same, all - * specified constructors, methods, and fields will be merged into a single Painless type. The + * specified constructors, methods, and fields will be merged into a single Painless type. The * Painless dynamic type, 'def', used as part of constructor, method, and field definitions will - * be appropriately parsed and handled. Painless complex types must be specified with the - * fully-qualified Java class name. Method argument types, method return types, and field types + * be appropriately parsed and handled. Painless complex types must be specified with the + * fully-qualified Java class name. Method argument types, method return types, and field types * must be specified with Painless type names (def, fully-qualified, or short) as described earlier. * * The following example is used to create a single whitelist text file: @@ -109,7 +109,7 @@ public final class WhitelistLoader { * * # complex types * - * class my.package.Example only_fqn { + * class my.package.Example no_import { * # constructors * () * (int) @@ -132,7 +132,7 @@ public final class WhitelistLoader { * } */ public static Whitelist loadFromResourceFiles(Class resource, String... filepaths) { - List whitelistStructs = new ArrayList<>(); + List whitelistClasses = new ArrayList<>(); // Execute a single pass through the whitelist text files. This will gather all the // constructors, methods, augmented methods, and fields for each whitelisted class. @@ -143,9 +143,9 @@ public static Whitelist loadFromResourceFiles(Class resource, String... filep try (LineNumberReader reader = new LineNumberReader( new InputStreamReader(resource.getResourceAsStream(filepath), StandardCharsets.UTF_8))) { - String whitelistStructOrigin = null; + String whitelistClassOrigin = null; String javaClassName = null; - boolean onlyFQNJavaClassName = false; + boolean noImport = false; List whitelistConstructors = null; List whitelistMethods = null; List whitelistFields = null; @@ -160,7 +160,7 @@ public static Whitelist loadFromResourceFiles(Class resource, String... filep } // Handle a new class by resetting all the variables necessary to construct a new WhitelistClass for the whitelist. - // Expects the following format: 'class' ID 'only_fqn'? '{' '\n' + // Expects the following format: 'class' ID 'no_import'? '{' '\n' if (line.startsWith("class ")) { // Ensure the final token of the line is '{'. if (line.endsWith("{") == false) { @@ -172,13 +172,13 @@ public static Whitelist loadFromResourceFiles(Class resource, String... filep String[] tokens = line.substring(5, line.length() - 1).trim().split("\\s+"); // Ensure the correct number of tokens. - if (tokens.length == 2 && "only_fqn".equals(tokens[1])) { - onlyFQNJavaClassName = true; + if (tokens.length == 2 && "no_import".equals(tokens[1])) { + noImport = true; } else if (tokens.length != 1) { throw new IllegalArgumentException("invalid class definition: failed to parse class name [" + line + "]"); } - whitelistStructOrigin = "[" + filepath + "]:[" + number + "]"; + whitelistClassOrigin = "[" + filepath + "]:[" + number + "]"; javaClassName = tokens[0]; // Reset all the constructors, methods, and fields to support a new class. @@ -194,13 +194,13 @@ public static Whitelist loadFromResourceFiles(Class resource, String... filep throw new IllegalArgumentException("invalid class definition: extraneous closing bracket"); } - whitelistStructs.add(new WhitelistClass(whitelistStructOrigin, javaClassName, onlyFQNJavaClassName, + whitelistClasses.add(new WhitelistClass(whitelistClassOrigin, javaClassName, noImport, whitelistConstructors, whitelistMethods, whitelistFields)); // Set all the variables to null to ensure a new class definition is found before other parsable values. - whitelistStructOrigin = null; + whitelistClassOrigin = null; javaClassName = null; - onlyFQNJavaClassName = false; + noImport = false; whitelistConstructors = null; whitelistMethods = null; whitelistFields = null; @@ -300,7 +300,7 @@ public static Whitelist loadFromResourceFiles(Class resource, String... filep } ClassLoader loader = AccessController.doPrivileged((PrivilegedAction)resource::getClassLoader); - return new Whitelist(loader, whitelistStructs); + return new Whitelist(loader, whitelistClasses); } private WhitelistLoader() {} diff --git a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistMethod.java b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistMethod.java index df86619055b08..5cd023a3591ad 100644 --- a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistMethod.java +++ b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistMethod.java @@ -25,52 +25,53 @@ /** * Method represents the equivalent of a Java method available as a whitelisted class method - * within Painless. Methods for Painless classes may be accessed exactly as methods for Java classes - * are using the '.' operator on an existing class variable/field. Painless classes may have multiple - * methods with the same name as long as they comply with arity overloading described for {@link WhitelistMethod}. + * within Painless. Methods for Painless classes may be accessed exactly as methods for Java classes + * are using the '.' operator on an existing class variable/field. Painless classes may have multiple + * methods with the same name as long as they comply with arity overloading described in + * {@link WhitelistClass}. * * Classes may also have additional methods that are not part of the Java class the class represents - - * these are known as augmented methods. An augmented method can be added to a class as a part of any + * these are known as augmented methods. An augmented method can be added to a class as a part of any * Java class as long as the method is static and the first parameter of the method is the Java class - * represented by the class. Note that the augmented method's parent Java class does not need to be + * represented by the class. Note that the augmented method's parent Java class does not need to be * whitelisted. */ public class WhitelistMethod { - /** Information about where this method was whitelisted from. Can be used for error messages. */ + /** Information about where this method was whitelisted from. */ public final String origin; /** - * The Java class name for the owner of an augmented method. If the method is not augmented + * The class name for the owner of an augmented method. If the method is not augmented * this should be {@code null}. */ - public final String javaAugmentedClassName; + public final String augmentedCanonicalClassName; - /** The Java method name used to look up the Java method through reflection. */ - public final String javaMethodName; + /** The method name used to look up the method reflection object. */ + public final String methodName; /** - * The Painless type name for the return type of the method which can be used to look up the Java - * method through reflection. + * The canonical type name for the return type. */ - public final String painlessReturnTypeName; + public final String returnCanonicalTypeName; /** - * A {@link List} of {@link String}s that are the Painless type names for the parameters of the - * method which can be used to look up the Java method through reflection. + * A {@link List} of {@link String}s that are the canonical type names for the parameters of the + * method used to look up the method reflection object. */ - public final List painlessParameterTypeNames; + public final List canonicalTypeNameParameters; /** - * Standard constructor. All values must be not {@code null} with the exception of jAugmentedClass; - * jAugmentedClass will be {@code null} unless the method is augmented as described in the class documentation. + * Standard constructor. All values must be not {@code null} with the exception of + * augmentedCanonicalClassName; augmentedCanonicalClassName will be {@code null} unless the method + * is augmented as described in the class documentation. */ - public WhitelistMethod(String origin, String javaAugmentedClassName, String javaMethodName, - String painlessReturnTypeName, List painlessParameterTypeNames) { + public WhitelistMethod(String origin, String augmentedCanonicalClassName, String methodName, + String returnCanonicalTypeName, List canonicalTypeNameParameters) { this.origin = Objects.requireNonNull(origin); - this.javaAugmentedClassName = javaAugmentedClassName; - this.javaMethodName = javaMethodName; - this.painlessReturnTypeName = Objects.requireNonNull(painlessReturnTypeName); - this.painlessParameterTypeNames = Collections.unmodifiableList(Objects.requireNonNull(painlessParameterTypeNames)); + this.augmentedCanonicalClassName = augmentedCanonicalClassName; + this.methodName = methodName; + this.returnCanonicalTypeName = Objects.requireNonNull(returnCanonicalTypeName); + this.canonicalTypeNameParameters = Collections.unmodifiableList(Objects.requireNonNull(canonicalTypeNameParameters)); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java index fe53a3c11001c..588fe8ef5f7cf 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java @@ -41,421 +41,421 @@ public static PainlessCast getLegalCast(Location location, Class actual, Clas if (actual == def.class) { if (expected == boolean.class) { - return PainlessCast.unboxTo(def.class, Boolean.class, explicit, boolean.class); + return PainlessCast.unboxTargetType(def.class, Boolean.class, explicit, boolean.class); } else if (expected == byte.class) { - return PainlessCast.unboxTo(def.class, Byte.class, explicit, byte.class); + return PainlessCast.unboxTargetType(def.class, Byte.class, explicit, byte.class); } else if (expected == short.class) { - return PainlessCast.unboxTo(def.class, Short.class, explicit, short.class); + return PainlessCast.unboxTargetType(def.class, Short.class, explicit, short.class); } else if (expected == char.class) { - return PainlessCast.unboxTo(def.class, Character.class, explicit, char.class); + return PainlessCast.unboxTargetType(def.class, Character.class, explicit, char.class); } else if (expected == int.class) { - return PainlessCast.unboxTo(def.class, Integer.class, explicit, int.class); + return PainlessCast.unboxTargetType(def.class, Integer.class, explicit, int.class); } else if (expected == long.class) { - return PainlessCast.unboxTo(def.class, Long.class, explicit, long.class); + return PainlessCast.unboxTargetType(def.class, Long.class, explicit, long.class); } else if (expected == float.class) { - return PainlessCast.unboxTo(def.class, Float.class, explicit, float.class); + return PainlessCast.unboxTargetType(def.class, Float.class, explicit, float.class); } else if (expected == double.class) { - return PainlessCast.unboxTo(def.class, Double.class, explicit, double.class); + return PainlessCast.unboxTargetType(def.class, Double.class, explicit, double.class); } } else if (actual == Object.class) { if (expected == byte.class && explicit && internal) { - return PainlessCast.unboxTo(Object.class, Byte.class, true, byte.class); + return PainlessCast.unboxTargetType(Object.class, Byte.class, true, byte.class); } else if (expected == short.class && explicit && internal) { - return PainlessCast.unboxTo(Object.class, Short.class, true, short.class); + return PainlessCast.unboxTargetType(Object.class, Short.class, true, short.class); } else if (expected == char.class && explicit && internal) { - return PainlessCast.unboxTo(Object.class, Character.class, true, char.class); + return PainlessCast.unboxTargetType(Object.class, Character.class, true, char.class); } else if (expected == int.class && explicit && internal) { - return PainlessCast.unboxTo(Object.class, Integer.class, true, int.class); + return PainlessCast.unboxTargetType(Object.class, Integer.class, true, int.class); } else if (expected == long.class && explicit && internal) { - return PainlessCast.unboxTo(Object.class, Long.class, true, long.class); + return PainlessCast.unboxTargetType(Object.class, Long.class, true, long.class); } else if (expected == float.class && explicit && internal) { - return PainlessCast.unboxTo(Object.class, Float.class, true, float.class); + return PainlessCast.unboxTargetType(Object.class, Float.class, true, float.class); } else if (expected == double.class && explicit && internal) { - return PainlessCast.unboxTo(Object.class, Double.class, true, double.class); + return PainlessCast.unboxTargetType(Object.class, Double.class, true, double.class); } } else if (actual == Number.class) { if (expected == byte.class && explicit && internal) { - return PainlessCast.unboxTo(Number.class, Byte.class, true, byte.class); + return PainlessCast.unboxTargetType(Number.class, Byte.class, true, byte.class); } else if (expected == short.class && explicit && internal) { - return PainlessCast.unboxTo(Number.class, Short.class, true, short.class); + return PainlessCast.unboxTargetType(Number.class, Short.class, true, short.class); } else if (expected == char.class && explicit && internal) { - return PainlessCast.unboxTo(Number.class, Character.class, true, char.class); + return PainlessCast.unboxTargetType(Number.class, Character.class, true, char.class); } else if (expected == int.class && explicit && internal) { - return PainlessCast.unboxTo(Number.class, Integer.class, true, int.class); + return PainlessCast.unboxTargetType(Number.class, Integer.class, true, int.class); } else if (expected == long.class && explicit && internal) { - return PainlessCast.unboxTo(Number.class, Long.class, true, long.class); + return PainlessCast.unboxTargetType(Number.class, Long.class, true, long.class); } else if (expected == float.class && explicit && internal) { - return PainlessCast.unboxTo(Number.class, Float.class, true, float.class); + return PainlessCast.unboxTargetType(Number.class, Float.class, true, float.class); } else if (expected == double.class && explicit && internal) { - return PainlessCast.unboxTo(Number.class, Double.class, true, double.class); + return PainlessCast.unboxTargetType(Number.class, Double.class, true, double.class); } } else if (actual == String.class) { if (expected == char.class && explicit) { - return PainlessCast.standard(String.class, char.class, true); + return PainlessCast.originalTypetoTargetType(String.class, char.class, true); } } else if (actual == boolean.class) { if (expected == def.class) { - return PainlessCast.boxFrom(Boolean.class, def.class, explicit, boolean.class); + return PainlessCast.boxOriginalType(Boolean.class, def.class, explicit, boolean.class); } else if (expected == Object.class && internal) { - return PainlessCast.boxFrom(Boolean.class, Object.class, explicit, boolean.class); + return PainlessCast.boxOriginalType(Boolean.class, Object.class, explicit, boolean.class); } else if (expected == Boolean.class && internal) { - return PainlessCast.boxTo(boolean.class, boolean.class, explicit, boolean.class); + return PainlessCast.boxTargetType(boolean.class, boolean.class, explicit, boolean.class); } } else if (actual == byte.class) { if (expected == def.class) { - return PainlessCast.boxFrom(Byte.class, def.class, explicit, byte.class); + return PainlessCast.boxOriginalType(Byte.class, def.class, explicit, byte.class); } else if (expected == Object.class && internal) { - return PainlessCast.boxFrom(Byte.class, Object.class, explicit, byte.class); + return PainlessCast.boxOriginalType(Byte.class, Object.class, explicit, byte.class); } else if (expected == Number.class && internal) { - return PainlessCast.boxFrom(Byte.class, Number.class, explicit, byte.class); + return PainlessCast.boxOriginalType(Byte.class, Number.class, explicit, byte.class); } else if (expected == short.class) { - return PainlessCast.standard(byte.class, short.class, explicit); + return PainlessCast.originalTypetoTargetType(byte.class, short.class, explicit); } else if (expected == char.class && explicit) { - return PainlessCast.standard(byte.class, char.class, true); + return PainlessCast.originalTypetoTargetType(byte.class, char.class, true); } else if (expected == int.class) { - return PainlessCast.standard(byte.class, int.class, explicit); + return PainlessCast.originalTypetoTargetType(byte.class, int.class, explicit); } else if (expected == long.class) { - return PainlessCast.standard(byte.class, long.class, explicit); + return PainlessCast.originalTypetoTargetType(byte.class, long.class, explicit); } else if (expected == float.class) { - return PainlessCast.standard(byte.class, float.class, explicit); + return PainlessCast.originalTypetoTargetType(byte.class, float.class, explicit); } else if (expected == double.class) { - return PainlessCast.standard(byte.class, double.class, explicit); + return PainlessCast.originalTypetoTargetType(byte.class, double.class, explicit); } else if (expected == Byte.class && internal) { - return PainlessCast.boxTo(byte.class, byte.class, explicit, byte.class); + return PainlessCast.boxTargetType(byte.class, byte.class, explicit, byte.class); } else if (expected == Short.class && internal) { - return PainlessCast.boxTo(byte.class, short.class, explicit, short.class); + return PainlessCast.boxTargetType(byte.class, short.class, explicit, short.class); } else if (expected == Character.class && explicit && internal) { - return PainlessCast.boxTo(byte.class, char.class, true, char.class); + return PainlessCast.boxTargetType(byte.class, char.class, true, char.class); } else if (expected == Integer.class && internal) { - return PainlessCast.boxTo(byte.class, int.class, explicit, int.class); + return PainlessCast.boxTargetType(byte.class, int.class, explicit, int.class); } else if (expected == Long.class && internal) { - return PainlessCast.boxTo(byte.class, long.class, explicit, long.class); + return PainlessCast.boxTargetType(byte.class, long.class, explicit, long.class); } else if (expected == Float.class && internal) { - return PainlessCast.boxTo(byte.class, float.class, explicit, float.class); + return PainlessCast.boxTargetType(byte.class, float.class, explicit, float.class); } else if (expected == Double.class && internal) { - return PainlessCast.boxTo(byte.class, double.class, explicit, double.class); + return PainlessCast.boxTargetType(byte.class, double.class, explicit, double.class); } } else if (actual == short.class) { if (expected == def.class) { - return PainlessCast.boxFrom(Short.class, def.class, explicit, short.class); + return PainlessCast.boxOriginalType(Short.class, def.class, explicit, short.class); } else if (expected == Object.class && internal) { - return PainlessCast.boxFrom(Short.class, Object.class, explicit, short.class); + return PainlessCast.boxOriginalType(Short.class, Object.class, explicit, short.class); } else if (expected == Number.class && internal) { - return PainlessCast.boxFrom(Short.class, Number.class, explicit, short.class); + return PainlessCast.boxOriginalType(Short.class, Number.class, explicit, short.class); } else if (expected == byte.class && explicit) { - return PainlessCast.standard(short.class, byte.class, true); + return PainlessCast.originalTypetoTargetType(short.class, byte.class, true); } else if (expected == char.class && explicit) { - return PainlessCast.standard(short.class, char.class, true); + return PainlessCast.originalTypetoTargetType(short.class, char.class, true); } else if (expected == int.class) { - return PainlessCast.standard(short.class, int.class, explicit); + return PainlessCast.originalTypetoTargetType(short.class, int.class, explicit); } else if (expected == long.class) { - return PainlessCast.standard(short.class, long.class, explicit); + return PainlessCast.originalTypetoTargetType(short.class, long.class, explicit); } else if (expected == float.class) { - return PainlessCast.standard(short.class, float.class, explicit); + return PainlessCast.originalTypetoTargetType(short.class, float.class, explicit); } else if (expected == double.class) { - return PainlessCast.standard(short.class, double.class, explicit); + return PainlessCast.originalTypetoTargetType(short.class, double.class, explicit); } else if (expected == Byte.class && explicit && internal) { - return PainlessCast.boxTo(short.class, byte.class, true, byte.class); + return PainlessCast.boxTargetType(short.class, byte.class, true, byte.class); } else if (expected == Short.class && internal) { - return PainlessCast.boxTo(short.class, short.class, explicit, short.class); + return PainlessCast.boxTargetType(short.class, short.class, explicit, short.class); } else if (expected == Character.class && explicit && internal) { - return PainlessCast.boxTo(short.class, char.class, true, char.class); + return PainlessCast.boxTargetType(short.class, char.class, true, char.class); } else if (expected == Integer.class && internal) { - return PainlessCast.boxTo(short.class, int.class, explicit, int.class); + return PainlessCast.boxTargetType(short.class, int.class, explicit, int.class); } else if (expected == Long.class && internal) { - return PainlessCast.boxTo(short.class, long.class, explicit, long.class); + return PainlessCast.boxTargetType(short.class, long.class, explicit, long.class); } else if (expected == Float.class && internal) { - return PainlessCast.boxTo(short.class, float.class, explicit, float.class); + return PainlessCast.boxTargetType(short.class, float.class, explicit, float.class); } else if (expected == Double.class && internal) { - return PainlessCast.boxTo(short.class, double.class, explicit, double.class); + return PainlessCast.boxTargetType(short.class, double.class, explicit, double.class); } } else if (actual == char.class) { if (expected == def.class) { - return PainlessCast.boxFrom(Character.class, def.class, explicit, char.class); + return PainlessCast.boxOriginalType(Character.class, def.class, explicit, char.class); } else if (expected == Object.class && internal) { - return PainlessCast.boxFrom(Character.class, Object.class, explicit, char.class); + return PainlessCast.boxOriginalType(Character.class, Object.class, explicit, char.class); } else if (expected == Number.class && internal) { - return PainlessCast.boxFrom(Character.class, Number.class, explicit, char.class); + return PainlessCast.boxOriginalType(Character.class, Number.class, explicit, char.class); } else if (expected == String.class) { - return PainlessCast.standard(char.class, String.class, explicit); + return PainlessCast.originalTypetoTargetType(char.class, String.class, explicit); } else if (expected == byte.class && explicit) { - return PainlessCast.standard(char.class, byte.class, true); + return PainlessCast.originalTypetoTargetType(char.class, byte.class, true); } else if (expected == short.class && explicit) { - return PainlessCast.standard(char.class, short.class, true); + return PainlessCast.originalTypetoTargetType(char.class, short.class, true); } else if (expected == int.class) { - return PainlessCast.standard(char.class, int.class, explicit); + return PainlessCast.originalTypetoTargetType(char.class, int.class, explicit); } else if (expected == long.class) { - return PainlessCast.standard(char.class, long.class, explicit); + return PainlessCast.originalTypetoTargetType(char.class, long.class, explicit); } else if (expected == float.class) { - return PainlessCast.standard(char.class, float.class, explicit); + return PainlessCast.originalTypetoTargetType(char.class, float.class, explicit); } else if (expected == double.class) { - return PainlessCast.standard(char.class, double.class, explicit); + return PainlessCast.originalTypetoTargetType(char.class, double.class, explicit); } else if (expected == Byte.class && explicit && internal) { - return PainlessCast.boxTo(char.class, byte.class, true, byte.class); + return PainlessCast.boxTargetType(char.class, byte.class, true, byte.class); } else if (expected == Short.class && internal) { - return PainlessCast.boxTo(char.class, short.class, explicit, short.class); + return PainlessCast.boxTargetType(char.class, short.class, explicit, short.class); } else if (expected == Character.class && internal) { - return PainlessCast.boxTo(char.class, char.class, true, char.class); + return PainlessCast.boxTargetType(char.class, char.class, true, char.class); } else if (expected == Integer.class && internal) { - return PainlessCast.boxTo(char.class, int.class, explicit, int.class); + return PainlessCast.boxTargetType(char.class, int.class, explicit, int.class); } else if (expected == Long.class && internal) { - return PainlessCast.boxTo(char.class, long.class, explicit, long.class); + return PainlessCast.boxTargetType(char.class, long.class, explicit, long.class); } else if (expected == Float.class && internal) { - return PainlessCast.boxTo(char.class, float.class, explicit, float.class); + return PainlessCast.boxTargetType(char.class, float.class, explicit, float.class); } else if (expected == Double.class && internal) { - return PainlessCast.boxTo(char.class, double.class, explicit, double.class); + return PainlessCast.boxTargetType(char.class, double.class, explicit, double.class); } } else if (actual == int.class) { if (expected == def.class) { - return PainlessCast.boxFrom(Integer.class, def.class, explicit, int.class); + return PainlessCast.boxOriginalType(Integer.class, def.class, explicit, int.class); } else if (expected == Object.class && internal) { - return PainlessCast.boxFrom(Integer.class, Object.class, explicit, int.class); + return PainlessCast.boxOriginalType(Integer.class, Object.class, explicit, int.class); } else if (expected == Number.class && internal) { - return PainlessCast.boxFrom(Integer.class, Number.class, explicit, int.class); + return PainlessCast.boxOriginalType(Integer.class, Number.class, explicit, int.class); } else if (expected == byte.class && explicit) { - return PainlessCast.standard(int.class, byte.class, true); + return PainlessCast.originalTypetoTargetType(int.class, byte.class, true); } else if (expected == char.class && explicit) { - return PainlessCast.standard(int.class, char.class, true); + return PainlessCast.originalTypetoTargetType(int.class, char.class, true); } else if (expected == short.class && explicit) { - return PainlessCast.standard(int.class, short.class, true); + return PainlessCast.originalTypetoTargetType(int.class, short.class, true); } else if (expected == long.class) { - return PainlessCast.standard(int.class, long.class, explicit); + return PainlessCast.originalTypetoTargetType(int.class, long.class, explicit); } else if (expected == float.class) { - return PainlessCast.standard(int.class, float.class, explicit); + return PainlessCast.originalTypetoTargetType(int.class, float.class, explicit); } else if (expected == double.class) { - return PainlessCast.standard(int.class, double.class, explicit); + return PainlessCast.originalTypetoTargetType(int.class, double.class, explicit); } else if (expected == Byte.class && explicit && internal) { - return PainlessCast.boxTo(int.class, byte.class, true, byte.class); + return PainlessCast.boxTargetType(int.class, byte.class, true, byte.class); } else if (expected == Short.class && explicit && internal) { - return PainlessCast.boxTo(int.class, short.class, true, short.class); + return PainlessCast.boxTargetType(int.class, short.class, true, short.class); } else if (expected == Character.class && explicit && internal) { - return PainlessCast.boxTo(int.class, char.class, true, char.class); + return PainlessCast.boxTargetType(int.class, char.class, true, char.class); } else if (expected == Integer.class && internal) { - return PainlessCast.boxTo(int.class, int.class, explicit, int.class); + return PainlessCast.boxTargetType(int.class, int.class, explicit, int.class); } else if (expected == Long.class && internal) { - return PainlessCast.boxTo(int.class, long.class, explicit, long.class); + return PainlessCast.boxTargetType(int.class, long.class, explicit, long.class); } else if (expected == Float.class && internal) { - return PainlessCast.boxTo(int.class, float.class, explicit, float.class); + return PainlessCast.boxTargetType(int.class, float.class, explicit, float.class); } else if (expected == Double.class && internal) { - return PainlessCast.boxTo(int.class, double.class, explicit, double.class); + return PainlessCast.boxTargetType(int.class, double.class, explicit, double.class); } } else if (actual == long.class) { if (expected == def.class) { - return PainlessCast.boxFrom(Long.class, def.class, explicit, long.class); + return PainlessCast.boxOriginalType(Long.class, def.class, explicit, long.class); } else if (expected == Object.class && internal) { - return PainlessCast.boxFrom(Long.class, Object.class, explicit, long.class); + return PainlessCast.boxOriginalType(Long.class, Object.class, explicit, long.class); } else if (expected == Number.class && internal) { - return PainlessCast.boxFrom(Long.class, Number.class, explicit, long.class); + return PainlessCast.boxOriginalType(Long.class, Number.class, explicit, long.class); } else if (expected == byte.class && explicit) { - return PainlessCast.standard(long.class, byte.class, true); + return PainlessCast.originalTypetoTargetType(long.class, byte.class, true); } else if (expected == char.class && explicit) { - return PainlessCast.standard(long.class, char.class, true); + return PainlessCast.originalTypetoTargetType(long.class, char.class, true); } else if (expected == short.class && explicit) { - return PainlessCast.standard(long.class, short.class, true); + return PainlessCast.originalTypetoTargetType(long.class, short.class, true); } else if (expected == int.class && explicit) { - return PainlessCast.standard(long.class, int.class, true); + return PainlessCast.originalTypetoTargetType(long.class, int.class, true); } else if (expected == float.class) { - return PainlessCast.standard(long.class, float.class, explicit); + return PainlessCast.originalTypetoTargetType(long.class, float.class, explicit); } else if (expected == double.class) { - return PainlessCast.standard(long.class, double.class, explicit); + return PainlessCast.originalTypetoTargetType(long.class, double.class, explicit); } else if (expected == Byte.class && explicit && internal) { - return PainlessCast.boxTo(long.class, byte.class, true, byte.class); + return PainlessCast.boxTargetType(long.class, byte.class, true, byte.class); } else if (expected == Short.class && explicit && internal) { - return PainlessCast.boxTo(long.class, short.class, true, short.class); + return PainlessCast.boxTargetType(long.class, short.class, true, short.class); } else if (expected == Character.class && explicit && internal) { - return PainlessCast.boxTo(long.class, char.class, true, char.class); + return PainlessCast.boxTargetType(long.class, char.class, true, char.class); } else if (expected == Integer.class && explicit && internal) { - return PainlessCast.boxTo(long.class, int.class, true, int.class); + return PainlessCast.boxTargetType(long.class, int.class, true, int.class); } else if (expected == Long.class && internal) { - return PainlessCast.boxTo(long.class, long.class, explicit, long.class); + return PainlessCast.boxTargetType(long.class, long.class, explicit, long.class); } else if (expected == Float.class && internal) { - return PainlessCast.boxTo(long.class, float.class, explicit, float.class); + return PainlessCast.boxTargetType(long.class, float.class, explicit, float.class); } else if (expected == Double.class && internal) { - return PainlessCast.boxTo(long.class, double.class, explicit, double.class); + return PainlessCast.boxTargetType(long.class, double.class, explicit, double.class); } } else if (actual == float.class) { if (expected == def.class) { - return PainlessCast.boxFrom(Float.class, def.class, explicit, float.class); + return PainlessCast.boxOriginalType(Float.class, def.class, explicit, float.class); } else if (expected == Object.class && internal) { - return PainlessCast.boxFrom(Float.class, Object.class, explicit, float.class); + return PainlessCast.boxOriginalType(Float.class, Object.class, explicit, float.class); } else if (expected == Number.class && internal) { - return PainlessCast.boxFrom(Float.class, Number.class, explicit, float.class); + return PainlessCast.boxOriginalType(Float.class, Number.class, explicit, float.class); } else if (expected == byte.class && explicit) { - return PainlessCast.standard(float.class, byte.class, true); + return PainlessCast.originalTypetoTargetType(float.class, byte.class, true); } else if (expected == char.class && explicit) { - return PainlessCast.standard(float.class, char.class, true); + return PainlessCast.originalTypetoTargetType(float.class, char.class, true); } else if (expected == short.class && explicit) { - return PainlessCast.standard(float.class, short.class, true); + return PainlessCast.originalTypetoTargetType(float.class, short.class, true); } else if (expected == int.class && explicit) { - return PainlessCast.standard(float.class, int.class, true); + return PainlessCast.originalTypetoTargetType(float.class, int.class, true); } else if (expected == long.class && explicit) { - return PainlessCast.standard(float.class, long.class, true); + return PainlessCast.originalTypetoTargetType(float.class, long.class, true); } else if (expected == double.class) { - return PainlessCast.standard(float.class, double.class, explicit); + return PainlessCast.originalTypetoTargetType(float.class, double.class, explicit); } else if (expected == Byte.class && explicit && internal) { - return PainlessCast.boxTo(float.class, byte.class, true, byte.class); + return PainlessCast.boxTargetType(float.class, byte.class, true, byte.class); } else if (expected == Short.class && explicit && internal) { - return PainlessCast.boxTo(float.class, short.class, true, short.class); + return PainlessCast.boxTargetType(float.class, short.class, true, short.class); } else if (expected == Character.class && explicit && internal) { - return PainlessCast.boxTo(float.class, char.class, true, char.class); + return PainlessCast.boxTargetType(float.class, char.class, true, char.class); } else if (expected == Integer.class && explicit && internal) { - return PainlessCast.boxTo(float.class, int.class, true, int.class); + return PainlessCast.boxTargetType(float.class, int.class, true, int.class); } else if (expected == Long.class && explicit && internal) { - return PainlessCast.boxTo(float.class, long.class, true, long.class); + return PainlessCast.boxTargetType(float.class, long.class, true, long.class); } else if (expected == Float.class && internal) { - return PainlessCast.boxTo(float.class, float.class, explicit, float.class); + return PainlessCast.boxTargetType(float.class, float.class, explicit, float.class); } else if (expected == Double.class && internal) { - return PainlessCast.boxTo(float.class, double.class, explicit, double.class); + return PainlessCast.boxTargetType(float.class, double.class, explicit, double.class); } } else if (actual == double.class) { if (expected == def.class) { - return PainlessCast.boxFrom(Double.class, def.class, explicit, double.class); + return PainlessCast.boxOriginalType(Double.class, def.class, explicit, double.class); } else if (expected == Object.class && internal) { - return PainlessCast.boxFrom(Double.class, Object.class, explicit, double.class); + return PainlessCast.boxOriginalType(Double.class, Object.class, explicit, double.class); } else if (expected == Number.class && internal) { - return PainlessCast.boxFrom(Double.class, Number.class, explicit, double.class); + return PainlessCast.boxOriginalType(Double.class, Number.class, explicit, double.class); } else if (expected == byte.class && explicit) { - return PainlessCast.standard(double.class, byte.class, true); + return PainlessCast.originalTypetoTargetType(double.class, byte.class, true); } else if (expected == char.class && explicit) { - return PainlessCast.standard(double.class, char.class, true); + return PainlessCast.originalTypetoTargetType(double.class, char.class, true); } else if (expected == short.class && explicit) { - return PainlessCast.standard(double.class, short.class, true); + return PainlessCast.originalTypetoTargetType(double.class, short.class, true); } else if (expected == int.class && explicit) { - return PainlessCast.standard(double.class, int.class, true); + return PainlessCast.originalTypetoTargetType(double.class, int.class, true); } else if (expected == long.class && explicit) { - return PainlessCast.standard(double.class, long.class, true); + return PainlessCast.originalTypetoTargetType(double.class, long.class, true); } else if (expected == float.class && explicit) { - return PainlessCast.standard(double.class, float.class, true); + return PainlessCast.originalTypetoTargetType(double.class, float.class, true); } else if (expected == Byte.class && explicit && internal) { - return PainlessCast.boxTo(double.class, byte.class, true, byte.class); + return PainlessCast.boxTargetType(double.class, byte.class, true, byte.class); } else if (expected == Short.class && explicit && internal) { - return PainlessCast.boxTo(double.class, short.class, true, short.class); + return PainlessCast.boxTargetType(double.class, short.class, true, short.class); } else if (expected == Character.class && explicit && internal) { - return PainlessCast.boxTo(double.class, char.class, true, char.class); + return PainlessCast.boxTargetType(double.class, char.class, true, char.class); } else if (expected == Integer.class && explicit && internal) { - return PainlessCast.boxTo(double.class, int.class, true, int.class); + return PainlessCast.boxTargetType(double.class, int.class, true, int.class); } else if (expected == Long.class && explicit && internal) { - return PainlessCast.boxTo(double.class, long.class, true, long.class); + return PainlessCast.boxTargetType(double.class, long.class, true, long.class); } else if (expected == Float.class && explicit && internal) { - return PainlessCast.boxTo(double.class, float.class, true, float.class); + return PainlessCast.boxTargetType(double.class, float.class, true, float.class); } else if (expected == Double.class && internal) { - return PainlessCast.boxTo(double.class, double.class, explicit, double.class); + return PainlessCast.boxTargetType(double.class, double.class, explicit, double.class); } } else if (actual == Boolean.class) { if (expected == boolean.class && internal) { - return PainlessCast.unboxFrom(boolean.class, boolean.class, explicit, boolean.class); + return PainlessCast.unboxOriginalType(boolean.class, boolean.class, explicit, boolean.class); } } else if (actual == Byte.class) { if (expected == byte.class && internal) { - return PainlessCast.unboxFrom(byte.class, byte.class, explicit, byte.class); + return PainlessCast.unboxOriginalType(byte.class, byte.class, explicit, byte.class); } else if (expected == short.class && internal) { - return PainlessCast.unboxFrom(byte.class, short.class, explicit, byte.class); + return PainlessCast.unboxOriginalType(byte.class, short.class, explicit, byte.class); } else if (expected == char.class && explicit && internal) { - return PainlessCast.unboxFrom(byte.class, char.class, true, byte.class); + return PainlessCast.unboxOriginalType(byte.class, char.class, true, byte.class); } else if (expected == int.class && internal) { - return PainlessCast.unboxFrom(byte.class, int.class, explicit, byte.class); + return PainlessCast.unboxOriginalType(byte.class, int.class, explicit, byte.class); } else if (expected == long.class && internal) { - return PainlessCast.unboxFrom(byte.class, long.class, explicit, byte.class); + return PainlessCast.unboxOriginalType(byte.class, long.class, explicit, byte.class); } else if (expected == float.class && internal) { - return PainlessCast.unboxFrom(byte.class, float.class, explicit, byte.class); + return PainlessCast.unboxOriginalType(byte.class, float.class, explicit, byte.class); } else if (expected == double.class && internal) { - return PainlessCast.unboxFrom(byte.class, double.class, explicit, byte.class); + return PainlessCast.unboxOriginalType(byte.class, double.class, explicit, byte.class); } } else if (actual == Short.class) { if (expected == byte.class && explicit && internal) { - return PainlessCast.unboxFrom(short.class, byte.class, true, short.class); + return PainlessCast.unboxOriginalType(short.class, byte.class, true, short.class); } else if (expected == short.class && internal) { - return PainlessCast.unboxFrom(short.class, short.class, explicit, short.class); + return PainlessCast.unboxOriginalType(short.class, short.class, explicit, short.class); } else if (expected == char.class && explicit && internal) { - return PainlessCast.unboxFrom(short.class, char.class, true, short.class); + return PainlessCast.unboxOriginalType(short.class, char.class, true, short.class); } else if (expected == int.class && internal) { - return PainlessCast.unboxFrom(short.class, int.class, explicit, short.class); + return PainlessCast.unboxOriginalType(short.class, int.class, explicit, short.class); } else if (expected == long.class && internal) { - return PainlessCast.unboxFrom(short.class, long.class, explicit, short.class); + return PainlessCast.unboxOriginalType(short.class, long.class, explicit, short.class); } else if (expected == float.class && internal) { - return PainlessCast.unboxFrom(short.class, float.class, explicit, short.class); + return PainlessCast.unboxOriginalType(short.class, float.class, explicit, short.class); } else if (expected == double.class && internal) { - return PainlessCast.unboxFrom(short.class, double.class, explicit, short.class); + return PainlessCast.unboxOriginalType(short.class, double.class, explicit, short.class); } } else if (actual == Character.class) { if (expected == byte.class && explicit && internal) { - return PainlessCast.unboxFrom(char.class, byte.class, true, char.class); + return PainlessCast.unboxOriginalType(char.class, byte.class, true, char.class); } else if (expected == short.class && explicit && internal) { - return PainlessCast.unboxFrom(char.class, short.class, true, char.class); + return PainlessCast.unboxOriginalType(char.class, short.class, true, char.class); } else if (expected == char.class && internal) { - return PainlessCast.unboxFrom(char.class, char.class, explicit, char.class); + return PainlessCast.unboxOriginalType(char.class, char.class, explicit, char.class); } else if (expected == int.class && internal) { - return PainlessCast.unboxFrom(char.class, int.class, explicit, char.class); + return PainlessCast.unboxOriginalType(char.class, int.class, explicit, char.class); } else if (expected == long.class && internal) { - return PainlessCast.unboxFrom(char.class, long.class, explicit, char.class); + return PainlessCast.unboxOriginalType(char.class, long.class, explicit, char.class); } else if (expected == float.class && internal) { - return PainlessCast.unboxFrom(char.class, float.class, explicit, char.class); + return PainlessCast.unboxOriginalType(char.class, float.class, explicit, char.class); } else if (expected == double.class && internal) { - return PainlessCast.unboxFrom(char.class, double.class, explicit, char.class); + return PainlessCast.unboxOriginalType(char.class, double.class, explicit, char.class); } } else if (actual == Integer.class) { if (expected == byte.class && explicit && internal) { - return PainlessCast.unboxFrom(int.class, byte.class, true, int.class); + return PainlessCast.unboxOriginalType(int.class, byte.class, true, int.class); } else if (expected == short.class && explicit && internal) { - return PainlessCast.unboxFrom(int.class, short.class, true, int.class); + return PainlessCast.unboxOriginalType(int.class, short.class, true, int.class); } else if (expected == char.class && explicit && internal) { - return PainlessCast.unboxFrom(int.class, char.class, true, int.class); + return PainlessCast.unboxOriginalType(int.class, char.class, true, int.class); } else if (expected == int.class && internal) { - return PainlessCast.unboxFrom(int.class, int.class, explicit, int.class); + return PainlessCast.unboxOriginalType(int.class, int.class, explicit, int.class); } else if (expected == long.class && internal) { - return PainlessCast.unboxFrom(int.class, long.class, explicit, int.class); + return PainlessCast.unboxOriginalType(int.class, long.class, explicit, int.class); } else if (expected == float.class && internal) { - return PainlessCast.unboxFrom(int.class, float.class, explicit, int.class); + return PainlessCast.unboxOriginalType(int.class, float.class, explicit, int.class); } else if (expected == double.class && internal) { - return PainlessCast.unboxFrom(int.class, double.class, explicit, int.class); + return PainlessCast.unboxOriginalType(int.class, double.class, explicit, int.class); } } else if (actual == Long.class) { if (expected == byte.class && explicit && internal) { - return PainlessCast.unboxFrom(long.class, byte.class, true, long.class); + return PainlessCast.unboxOriginalType(long.class, byte.class, true, long.class); } else if (expected == short.class && explicit && internal) { - return PainlessCast.unboxFrom(long.class, short.class, true, long.class); + return PainlessCast.unboxOriginalType(long.class, short.class, true, long.class); } else if (expected == char.class && explicit && internal) { - return PainlessCast.unboxFrom(long.class, char.class, true, long.class); + return PainlessCast.unboxOriginalType(long.class, char.class, true, long.class); } else if (expected == int.class && explicit && internal) { - return PainlessCast.unboxFrom(long.class, int.class, true, long.class); + return PainlessCast.unboxOriginalType(long.class, int.class, true, long.class); } else if (expected == long.class && internal) { - return PainlessCast.unboxFrom(long.class, long.class, explicit, long.class); + return PainlessCast.unboxOriginalType(long.class, long.class, explicit, long.class); } else if (expected == float.class && internal) { - return PainlessCast.unboxFrom(long.class, float.class, explicit, long.class); + return PainlessCast.unboxOriginalType(long.class, float.class, explicit, long.class); } else if (expected == double.class && internal) { - return PainlessCast.unboxFrom(long.class, double.class, explicit, long.class); + return PainlessCast.unboxOriginalType(long.class, double.class, explicit, long.class); } } else if (actual == Float.class) { if (expected == byte.class && explicit && internal) { - return PainlessCast.unboxFrom(float.class, byte.class, true, float.class); + return PainlessCast.unboxOriginalType(float.class, byte.class, true, float.class); } else if (expected == short.class && explicit && internal) { - return PainlessCast.unboxFrom(float.class, short.class, true, float.class); + return PainlessCast.unboxOriginalType(float.class, short.class, true, float.class); } else if (expected == char.class && explicit && internal) { - return PainlessCast.unboxFrom(float.class, char.class, true, float.class); + return PainlessCast.unboxOriginalType(float.class, char.class, true, float.class); } else if (expected == int.class && explicit && internal) { - return PainlessCast.unboxFrom(float.class, int.class, true, float.class); + return PainlessCast.unboxOriginalType(float.class, int.class, true, float.class); } else if (expected == long.class && explicit && internal) { - return PainlessCast.unboxFrom(float.class, long.class, true, float.class); + return PainlessCast.unboxOriginalType(float.class, long.class, true, float.class); } else if (expected == float.class && internal) { - return PainlessCast.unboxFrom(float.class, float.class, explicit, float.class); + return PainlessCast.unboxOriginalType(float.class, float.class, explicit, float.class); } else if (expected == double.class && internal) { - return PainlessCast.unboxFrom(float.class, double.class, explicit, float.class); + return PainlessCast.unboxOriginalType(float.class, double.class, explicit, float.class); } } else if (actual == Double.class) { if (expected == byte.class && explicit && internal) { - return PainlessCast.unboxFrom(double.class, byte.class, true, double.class); + return PainlessCast.unboxOriginalType(double.class, byte.class, true, double.class); } else if (expected == short.class && explicit && internal) { - return PainlessCast.unboxFrom(double.class, short.class, true, double.class); + return PainlessCast.unboxOriginalType(double.class, short.class, true, double.class); } else if (expected == char.class && explicit && internal) { - return PainlessCast.unboxFrom(double.class, char.class, true, double.class); + return PainlessCast.unboxOriginalType(double.class, char.class, true, double.class); } else if (expected == int.class && explicit && internal) { - return PainlessCast.unboxFrom(double.class, int.class, true, double.class); + return PainlessCast.unboxOriginalType(double.class, int.class, true, double.class); } else if (expected == long.class && explicit && internal) { - return PainlessCast.unboxFrom(double.class, long.class, true, double.class); + return PainlessCast.unboxOriginalType(double.class, long.class, true, double.class); } else if (expected == float.class && explicit && internal) { - return PainlessCast.unboxFrom(double.class, float.class, true, double.class); + return PainlessCast.unboxOriginalType(double.class, float.class, true, double.class); } else if (expected == double.class && internal) { - return PainlessCast.unboxFrom(double.class, double.class, explicit, double.class); + return PainlessCast.unboxOriginalType(double.class, double.class, explicit, double.class); } } @@ -463,7 +463,7 @@ public static PainlessCast getLegalCast(Location location, Class actual, Clas (actual != void.class && expected == def.class) || expected.isAssignableFrom(actual) || (actual.isAssignableFrom(expected) && explicit)) { - return PainlessCast.standard(actual, expected, explicit); + return PainlessCast.originalTypetoTargetType(actual, expected, explicit); } else { throw location.createError(new ClassCastException("Cannot cast from " + "[" + PainlessLookupUtility.typeToCanonicalTypeName(actual) + "] to " + @@ -472,8 +472,8 @@ public static PainlessCast getLegalCast(Location location, Class actual, Clas } public static Object constCast(Location location, Object constant, PainlessCast cast) { - Class fsort = cast.from; - Class tsort = cast.to; + Class fsort = cast.originalType; + Class tsort = cast.targetType; if (fsort == tsort) { return constant; @@ -499,11 +499,11 @@ public static Object constCast(Location location, Object constant, PainlessCast else if (tsort == double.class) return number.doubleValue(); else { throw location.createError(new IllegalStateException("Cannot cast from " + - "[" + cast.from.getCanonicalName() + "] to [" + cast.to.getCanonicalName() + "].")); + "[" + cast.originalType.getCanonicalName() + "] to [" + cast.targetType.getCanonicalName() + "].")); } } else { throw location.createError(new IllegalStateException("Cannot cast from " + - "[" + cast.from.getCanonicalName() + "] to [" + cast.to.getCanonicalName() + "].")); + "[" + cast.originalType.getCanonicalName() + "] to [" + cast.targetType.getCanonicalName() + "].")); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java index 2096d7d797025..97dddbdfe52c5 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java @@ -20,6 +20,7 @@ package org.elasticsearch.painless; import org.elasticsearch.bootstrap.BootstrapInfo; +import org.elasticsearch.painless.Locals.LocalMethod; import org.elasticsearch.painless.antlr.Walker; import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.node.SSource; @@ -32,6 +33,7 @@ import java.security.CodeSource; import java.security.SecureClassLoader; import java.security.cert.Certificate; +import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import static org.elasticsearch.painless.WriterConstants.CLASS_NAME; @@ -200,7 +202,7 @@ Constructor compile(Loader loader, MainMethodReserved reserved, String name, ScriptClassInfo scriptClassInfo = new ScriptClassInfo(painlessLookup, scriptClass); SSource root = Walker.buildPainlessTree(scriptClassInfo, reserved, name, source, settings, painlessLookup, null); - root.analyze(painlessLookup); + Map localMethods = root.analyze(painlessLookup); root.write(); try { @@ -209,6 +211,7 @@ Constructor compile(Loader loader, MainMethodReserved reserved, String name, clazz.getField("$SOURCE").set(null, source); clazz.getField("$STATEMENTS").set(null, root.getStatements()); clazz.getField("$DEFINITION").set(null, painlessLookup); + clazz.getField("$LOCALS").set(null, localMethods); return clazz.getConstructors()[0]; } catch (Exception exception) { // Catch everything to let the user know this is something caused internally. diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java index 10806b64d0e95..1e17d6024d4d1 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java @@ -19,7 +19,7 @@ package org.elasticsearch.painless; -import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.Locals.LocalMethod; import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; @@ -37,6 +37,8 @@ import java.util.stream.Collectors; import java.util.stream.Stream; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToCanonicalTypeName; + /** * Support for dynamic type (def). *

        @@ -166,52 +168,6 @@ static MethodHandle arrayLengthGetter(Class arrayType) { } } - /** - * Looks up method entry for a dynamic method call. - *

        - * A dynamic method call for variable {@code x} of type {@code def} looks like: - * {@code x.method(args...)} - *

        - * This method traverses {@code recieverClass}'s class hierarchy (including interfaces) - * until it finds a matching whitelisted method. If one is not found, it throws an exception. - * Otherwise it returns the matching method. - *

        - * @params painlessLookup the whitelist - * @param receiverClass Class of the object to invoke the method on. - * @param name Name of the method. - * @param arity arity of method - * @return matching method to invoke. never returns null. - * @throws IllegalArgumentException if no matching whitelisted method was found. - */ - static PainlessMethod lookupMethodInternal(PainlessLookup painlessLookup, Class receiverClass, String name, int arity) { - String key = PainlessLookupUtility.buildPainlessMethodKey(name, arity); - // check whitelist for matching method - for (Class clazz = receiverClass; clazz != null; clazz = clazz.getSuperclass()) { - PainlessClass struct = painlessLookup.lookupPainlessClass(clazz); - - if (struct != null) { - PainlessMethod method = struct.methods.get(key); - if (method != null) { - return method; - } - } - - for (Class iface : clazz.getInterfaces()) { - struct = painlessLookup.lookupPainlessClass(iface); - - if (struct != null) { - PainlessMethod method = struct.methods.get(key); - if (method != null) { - return method; - } - } - } - } - - throw new IllegalArgumentException("Unable to find dynamic method [" + name + "] with [" + arity + "] arguments " + - "for class [" + receiverClass.getCanonicalName() + "]."); - } - /** * Looks up handle for a dynamic method call, with lambda replacement *

        @@ -232,13 +188,22 @@ static PainlessMethod lookupMethodInternal(PainlessLookup painlessLookup, Class< * @throws IllegalArgumentException if no matching whitelisted method was found. * @throws Throwable if a method reference cannot be converted to an functional interface */ - static MethodHandle lookupMethod(PainlessLookup painlessLookup, MethodHandles.Lookup methodHandlesLookup, MethodType callSiteType, - Class receiverClass, String name, Object args[]) throws Throwable { + static MethodHandle lookupMethod(PainlessLookup painlessLookup, Map localMethods, + MethodHandles.Lookup methodHandlesLookup, MethodType callSiteType, Class receiverClass, String name, Object args[]) + throws Throwable { + String recipeString = (String) args[0]; int numArguments = callSiteType.parameterCount(); // simple case: no lambdas if (recipeString.isEmpty()) { - return lookupMethodInternal(painlessLookup, receiverClass, name, numArguments - 1).methodHandle; + PainlessMethod painlessMethod = painlessLookup.lookupRuntimePainlessMethod(receiverClass, name, numArguments - 1); + + if (painlessMethod == null) { + throw new IllegalArgumentException("dynamic method " + + "[" + typeToCanonicalTypeName(receiverClass) + ", " + name + "/" + (numArguments - 1) + "] not found"); + } + + return painlessMethod.methodHandle; } // convert recipe string to a bitset for convenience (the code below should be refactored...) @@ -261,7 +226,13 @@ static MethodHandle lookupMethod(PainlessLookup painlessLookup, MethodHandles.Lo // lookup the method with the proper arity, then we know everything (e.g. interface types of parameters). // based on these we can finally link any remaining lambdas that were deferred. - PainlessMethod method = lookupMethodInternal(painlessLookup, receiverClass, name, arity); + PainlessMethod method = painlessLookup.lookupRuntimePainlessMethod(receiverClass, name, arity); + + if (method == null) { + throw new IllegalArgumentException( + "dynamic method [" + typeToCanonicalTypeName(receiverClass) + ", " + name + "/" + arity + "] not found"); + } + MethodHandle handle = method.methodHandle; int replaced = 0; @@ -276,27 +247,29 @@ static MethodHandle lookupMethod(PainlessLookup painlessLookup, MethodHandles.Lo String type = signature.substring(1, separator); String call = signature.substring(separator+1, separator2); int numCaptures = Integer.parseInt(signature.substring(separator2+1)); - Class captures[] = new Class[numCaptures]; - for (int capture = 0; capture < captures.length; capture++) { - captures[capture] = callSiteType.parameterType(i + 1 + capture); - } MethodHandle filter; Class interfaceType = method.typeParameters.get(i - 1 - replaced); if (signature.charAt(0) == 'S') { // the implementation is strongly typed, now that we know the interface type, // we have everything. filter = lookupReferenceInternal(painlessLookup, + localMethods, methodHandlesLookup, interfaceType, type, call, - captures); + numCaptures); } else if (signature.charAt(0) == 'D') { // the interface type is now known, but we need to get the implementation. // this is dynamically based on the receiver type (and cached separately, underneath // this cache). It won't blow up since we never nest here (just references) + Class captures[] = new Class[numCaptures]; + for (int capture = 0; capture < captures.length; capture++) { + captures[capture] = callSiteType.parameterType(i + 1 + capture); + } MethodType nestedType = MethodType.methodType(interfaceType, captures); CallSite nested = DefBootstrap.bootstrap(painlessLookup, + localMethods, methodHandlesLookup, call, nestedType, @@ -324,70 +297,44 @@ static MethodHandle lookupMethod(PainlessLookup painlessLookup, MethodHandles.Lo * This is just like LambdaMetaFactory, only with a dynamic type. The interface type is known, * so we simply need to lookup the matching implementation method based on receiver type. */ - static MethodHandle lookupReference(PainlessLookup painlessLookup, MethodHandles.Lookup methodHandlesLookup, String interfaceClass, - Class receiverClass, String name) throws Throwable { - Class interfaceType = painlessLookup.canonicalTypeNameToType(interfaceClass); - PainlessMethod interfaceMethod = painlessLookup.lookupPainlessClass(interfaceType).functionalMethod; - if (interfaceMethod == null) { - throw new IllegalArgumentException("Class [" + interfaceClass + "] is not a functional interface"); - } - int arity = interfaceMethod.typeParameters.size(); - PainlessMethod implMethod = lookupMethodInternal(painlessLookup, receiverClass, name, arity); - return lookupReferenceInternal(painlessLookup, methodHandlesLookup, interfaceType, - PainlessLookupUtility.typeToCanonicalTypeName(implMethod.targetClass), - implMethod.javaMethod.getName(), receiverClass); + static MethodHandle lookupReference(PainlessLookup painlessLookup, Map localMethods, + MethodHandles.Lookup methodHandlesLookup, String interfaceClass, Class receiverClass, String name) throws Throwable { + Class interfaceType = painlessLookup.canonicalTypeNameToType(interfaceClass); + if (interfaceType == null) { + throw new IllegalArgumentException("type [" + interfaceClass + "] not found"); + } + PainlessMethod interfaceMethod = painlessLookup.lookupFunctionalInterfacePainlessMethod(interfaceType); + if (interfaceMethod == null) { + throw new IllegalArgumentException("Class [" + interfaceClass + "] is not a functional interface"); + } + int arity = interfaceMethod.typeParameters.size(); + PainlessMethod implMethod = painlessLookup.lookupRuntimePainlessMethod(receiverClass, name, arity); + if (implMethod == null) { + throw new IllegalArgumentException( + "dynamic method [" + typeToCanonicalTypeName(receiverClass) + ", " + name + "/" + arity + "] not found"); + } + + return lookupReferenceInternal(painlessLookup, localMethods, methodHandlesLookup, + interfaceType, PainlessLookupUtility.typeToCanonicalTypeName(implMethod.targetClass), + implMethod.javaMethod.getName(), 1); } /** Returns a method handle to an implementation of clazz, given method reference signature. */ - private static MethodHandle lookupReferenceInternal(PainlessLookup painlessLookup, MethodHandles.Lookup methodHandlesLookup, - Class clazz, String type, String call, Class... captures) - throws Throwable { - final FunctionRef ref; - if ("this".equals(type)) { - // user written method - PainlessMethod interfaceMethod = painlessLookup.lookupPainlessClass(clazz).functionalMethod; - if (interfaceMethod == null) { - throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + PainlessLookupUtility.typeToCanonicalTypeName(clazz) + "], not a functional interface"); - } - int arity = interfaceMethod.typeParameters.size() + captures.length; - final MethodHandle handle; - try { - MethodHandle accessor = methodHandlesLookup.findStaticGetter(methodHandlesLookup.lookupClass(), - getUserFunctionHandleFieldName(call, arity), - MethodHandle.class); - handle = (MethodHandle)accessor.invokeExact(); - } catch (NoSuchFieldException | IllegalAccessException e) { - // is it a synthetic method? If we generated the method ourselves, be more helpful. It can only fail - // because the arity does not match the expected interface type. - if (call.contains("$")) { - throw new IllegalArgumentException("Incorrect number of parameters for [" + interfaceMethod.javaMethod.getName() + - "] in [" + clazz + "]"); - } - throw new IllegalArgumentException("Unknown call [" + call + "] with [" + arity + "] arguments."); - } - ref = new FunctionRef(clazz, interfaceMethod, call, handle.type(), captures.length); - } else { - // whitelist lookup - ref = FunctionRef.resolveFromLookup(painlessLookup, clazz, type, call, captures.length); - } - final CallSite callSite = LambdaBootstrap.lambdaBootstrap( - methodHandlesLookup, - ref.interfaceMethodName, - ref.factoryMethodType, - ref.interfaceMethodType, - ref.delegateClassName, - ref.delegateInvokeType, - ref.delegateMethodName, - ref.delegateMethodType, - ref.isDelegateInterface ? 1 : 0 - ); - return callSite.dynamicInvoker().asType(MethodType.methodType(clazz, captures)); - } - - /** gets the field name used to lookup up the MethodHandle for a function. */ - public static String getUserFunctionHandleFieldName(String name, int arity) { - return "handle$" + name + "$" + arity; + private static MethodHandle lookupReferenceInternal(PainlessLookup painlessLookup, Map localMethods, + MethodHandles.Lookup methodHandlesLookup, Class clazz, String type, String call, int captures) throws Throwable { + final FunctionRef ref = FunctionRef.create(painlessLookup, localMethods, null, clazz, type, call, captures); + final CallSite callSite = LambdaBootstrap.lambdaBootstrap( + methodHandlesLookup, + ref.interfaceMethodName, + ref.factoryMethodType, + ref.interfaceMethodType, + ref.delegateClassName, + ref.delegateInvokeType, + ref.delegateMethodName, + ref.delegateMethodType, + ref.isDelegateInterface ? 1 : 0 + ); + return callSite.dynamicInvoker().asType(MethodType.methodType(clazz, ref.factoryMethodType.parameterArray())); } /** @@ -418,27 +365,12 @@ public static String getUserFunctionHandleFieldName(String name, int arity) { */ static MethodHandle lookupGetter(PainlessLookup painlessLookup, Class receiverClass, String name) { // first try whitelist - for (Class clazz = receiverClass; clazz != null; clazz = clazz.getSuperclass()) { - PainlessClass struct = painlessLookup.lookupPainlessClass(clazz); - - if (struct != null) { - MethodHandle handle = struct.getterMethodHandles.get(name); - if (handle != null) { - return handle; - } - } + MethodHandle getter = painlessLookup.lookupRuntimeGetterMethodHandle(receiverClass, name); - for (final Class iface : clazz.getInterfaces()) { - struct = painlessLookup.lookupPainlessClass(iface); - - if (struct != null) { - MethodHandle handle = struct.getterMethodHandles.get(name); - if (handle != null) { - return handle; - } - } - } + if (getter != null) { + return getter; } + // special case: arrays, maps, and lists if (receiverClass.isArray() && "length".equals(name)) { // arrays expose .length as a read-only getter @@ -455,12 +387,12 @@ static MethodHandle lookupGetter(PainlessLookup painlessLookup, Class receive int index = Integer.parseInt(name); return MethodHandles.insertArguments(LIST_GET, 1, index); } catch (NumberFormatException exception) { - throw new IllegalArgumentException( "Illegal list shortcut value [" + name + "]."); + throw new IllegalArgumentException("Illegal list shortcut value [" + name + "]."); } } - throw new IllegalArgumentException("Unable to find dynamic field [" + name + "] " + - "for class [" + receiverClass.getCanonicalName() + "]."); + throw new IllegalArgumentException( + "dynamic getter [" + typeToCanonicalTypeName(receiverClass) + ", " + name + "] not found"); } /** @@ -489,27 +421,12 @@ static MethodHandle lookupGetter(PainlessLookup painlessLookup, Class receive */ static MethodHandle lookupSetter(PainlessLookup painlessLookup, Class receiverClass, String name) { // first try whitelist - for (Class clazz = receiverClass; clazz != null; clazz = clazz.getSuperclass()) { - PainlessClass struct = painlessLookup.lookupPainlessClass(clazz); - - if (struct != null) { - MethodHandle handle = struct.setterMethodHandles.get(name); - if (handle != null) { - return handle; - } - } - - for (final Class iface : clazz.getInterfaces()) { - struct = painlessLookup.lookupPainlessClass(iface); + MethodHandle setter = painlessLookup.lookupRuntimeSetterMethodHandle(receiverClass, name); - if (struct != null) { - MethodHandle handle = struct.setterMethodHandles.get(name); - if (handle != null) { - return handle; - } - } - } + if (setter != null) { + return setter; } + // special case: maps, and lists if (Map.class.isAssignableFrom(receiverClass)) { // maps allow access like mymap.key @@ -523,12 +440,12 @@ static MethodHandle lookupSetter(PainlessLookup painlessLookup, Class receive int index = Integer.parseInt(name); return MethodHandles.insertArguments(LIST_SET, 1, index); } catch (final NumberFormatException exception) { - throw new IllegalArgumentException( "Illegal list shortcut value [" + name + "]."); + throw new IllegalArgumentException("Illegal list shortcut value [" + name + "]."); } } - throw new IllegalArgumentException("Unable to find dynamic field [" + name + "] " + - "for class [" + receiverClass.getCanonicalName() + "]."); + throw new IllegalArgumentException( + "dynamic getter [" + typeToCanonicalTypeName(receiverClass) + ", " + name + "] not found"); } /** diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrap.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrap.java index 2fadaf30964a6..2488b6f218a7f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrap.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrap.java @@ -20,6 +20,7 @@ package org.elasticsearch.painless; import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.painless.Locals.LocalMethod; import org.elasticsearch.painless.lookup.PainlessLookup; import java.lang.invoke.CallSite; @@ -28,6 +29,7 @@ import java.lang.invoke.MethodType; import java.lang.invoke.MutableCallSite; import java.lang.invoke.WrongMethodTypeException; +import java.util.Map; /** * Painless invokedynamic bootstrap for the call site. @@ -105,19 +107,21 @@ static final class PIC extends MutableCallSite { static final int MAX_DEPTH = 5; private final PainlessLookup painlessLookup; + private final Map localMethods; private final MethodHandles.Lookup methodHandlesLookup; private final String name; private final int flavor; private final Object[] args; int depth; // pkg-protected for testing - PIC(PainlessLookup painlessLookup, MethodHandles.Lookup methodHandlesLookup, - String name, MethodType type, int initialDepth, int flavor, Object[] args) { + PIC(PainlessLookup painlessLookup, Map localMethods, + MethodHandles.Lookup methodHandlesLookup, String name, MethodType type, int initialDepth, int flavor, Object[] args) { super(type); if (type.parameterType(0) != Object.class) { throw new BootstrapMethodError("The receiver type (1st arg) of invokedynamic descriptor must be Object."); } this.painlessLookup = painlessLookup; + this.localMethods = localMethods; this.methodHandlesLookup = methodHandlesLookup; this.name = name; this.flavor = flavor; @@ -145,7 +149,7 @@ static boolean checkClass(Class clazz, Object receiver) { private MethodHandle lookup(int flavor, String name, Class receiver) throws Throwable { switch(flavor) { case METHOD_CALL: - return Def.lookupMethod(painlessLookup, methodHandlesLookup, type(), receiver, name, args); + return Def.lookupMethod(painlessLookup, localMethods, methodHandlesLookup, type(), receiver, name, args); case LOAD: return Def.lookupGetter(painlessLookup, receiver, name); case STORE: @@ -157,7 +161,7 @@ private MethodHandle lookup(int flavor, String name, Class receiver) throws T case ITERATOR: return Def.lookupIterator(receiver); case REFERENCE: - return Def.lookupReference(painlessLookup, methodHandlesLookup, (String) args[0], receiver, name); + return Def.lookupReference(painlessLookup, localMethods, methodHandlesLookup, (String) args[0], receiver, name); case INDEX_NORMALIZE: return Def.lookupIndexNormalize(receiver); default: throw new AssertionError(); @@ -432,8 +436,9 @@ static boolean checkBoth(Class left, Class right, Object leftObject, Objec *

        * see https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-6.html#jvms-6.5.invokedynamic */ - public static CallSite bootstrap(PainlessLookup painlessLookup, MethodHandles.Lookup methodHandlesLookup, String name, - MethodType type, int initialDepth, int flavor, Object... args) { + @SuppressWarnings("unchecked") + public static CallSite bootstrap(PainlessLookup painlessLookup, Map localMethods, + MethodHandles.Lookup methodHandlesLookup, String name, MethodType type, int initialDepth, int flavor, Object... args) { // validate arguments switch(flavor) { // "function-call" like things get a polymorphic cache @@ -452,7 +457,7 @@ public static CallSite bootstrap(PainlessLookup painlessLookup, MethodHandles.Lo if (args.length != numLambdas + 1) { throw new BootstrapMethodError("Illegal number of parameters: expected " + numLambdas + " references"); } - return new PIC(painlessLookup, methodHandlesLookup, name, type, initialDepth, flavor, args); + return new PIC(painlessLookup, localMethods, methodHandlesLookup, name, type, initialDepth, flavor, args); case LOAD: case STORE: case ARRAY_LOAD: @@ -462,7 +467,7 @@ public static CallSite bootstrap(PainlessLookup painlessLookup, MethodHandles.Lo if (args.length > 0) { throw new BootstrapMethodError("Illegal static bootstrap parameters for flavor: " + flavor); } - return new PIC(painlessLookup, methodHandlesLookup, name, type, initialDepth, flavor, args); + return new PIC(painlessLookup, localMethods, methodHandlesLookup, name, type, initialDepth, flavor, args); case REFERENCE: if (args.length != 1) { throw new BootstrapMethodError("Invalid number of parameters for reference call"); @@ -470,7 +475,7 @@ public static CallSite bootstrap(PainlessLookup painlessLookup, MethodHandles.Lo if (args[0] instanceof String == false) { throw new BootstrapMethodError("Illegal parameter for reference call: " + args[0]); } - return new PIC(painlessLookup, methodHandlesLookup, name, type, initialDepth, flavor, args); + return new PIC(painlessLookup, localMethods, methodHandlesLookup, name, type, initialDepth, flavor, args); // operators get monomorphic cache, with a generic impl for a fallback case UNARY_OPERATOR: diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java index cc55848944636..2580d7da3e8e7 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java @@ -20,17 +20,17 @@ package org.elasticsearch.painless; import org.elasticsearch.painless.Locals.LocalMethod; -import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.lookup.PainlessConstructor; import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.objectweb.asm.Type; import java.lang.invoke.MethodType; -import java.lang.reflect.Constructor; import java.lang.reflect.Modifier; +import java.util.ArrayList; import java.util.List; +import java.util.Map; +import java.util.Objects; import static org.elasticsearch.painless.WriterConstants.CLASS_NAME; import static org.objectweb.asm.Opcodes.H_INVOKEINTERFACE; @@ -39,251 +39,205 @@ import static org.objectweb.asm.Opcodes.H_NEWINVOKESPECIAL; /** - * Reference to a function or lambda. - *

        - * Once you have created one of these, you have "everything you need" to call {@link LambdaBootstrap} - * either statically from bytecode with invokedynamic, or at runtime from Java. + * Contains all the values necessary to write the instruction to initiate a + * {@link LambdaBootstrap} for either a function reference or a user-defined + * lambda function. */ public class FunctionRef { - /** functional interface method name */ - public final String interfaceMethodName; - /** factory (CallSite) method signature */ - public final MethodType factoryMethodType; - /** functional interface method signature */ - public final MethodType interfaceMethodType; - /** class of the delegate method to be called */ - public final String delegateClassName; - /** the invocation type of the delegate method */ - public final int delegateInvokeType; - /** the name of the delegate method */ - public final String delegateMethodName; - /** delegate method signature */ - public final MethodType delegateMethodType; - - /** interface method */ - public final PainlessMethod interfaceMethod; - /** delegate method type parameters */ - public final List> delegateTypeParameters; - /** delegate method return type */ - public final Class delegateReturnType; - - /** factory method type descriptor */ - public final String factoryDescriptor; - /** functional interface method as type */ - public final Type interfaceType; - /** delegate method type method as type */ - public final Type delegateType; - - /** whether a call is made on a delegate interface */ - public final boolean isDelegateInterface; - /** - * Creates a new FunctionRef, which will resolve {@code type::call} from the whitelist. + * Creates a new FunctionRef which will resolve {@code type::call} from the whitelist. * @param painlessLookup the whitelist against which this script is being compiled - * @param expected functional interface type to implement. - * @param type the left hand side of a method reference expression - * @param call the right hand side of a method reference expression - * @param numCaptures number of captured arguments + * @param localMethods user-defined and synthetic methods generated directly on the script class + * @param location the character number within the script at compile-time + * @param targetClass functional interface type to implement. + * @param typeName the left hand side of a method reference expression + * @param methodName the right hand side of a method reference expression + * @param numberOfCaptures number of captured arguments */ - public static FunctionRef resolveFromLookup( - PainlessLookup painlessLookup, Class expected, String type, String call, int numCaptures) { - - if ("new".equals(call)) { - return new FunctionRef(expected, painlessLookup.lookupPainlessClass(expected).functionalMethod, - lookup(painlessLookup, expected, type), numCaptures); - } else { - return new FunctionRef(expected, painlessLookup.lookupPainlessClass(expected).functionalMethod, - lookup(painlessLookup, expected, type, call, numCaptures > 0), numCaptures); - } - } - - /** - * Creates a new FunctionRef (already resolved) - * @param expected functional interface type to implement - * @param interfaceMethod functional interface method - * @param delegateConstructor implementation constructor - * @param numCaptures number of captured arguments - */ - public FunctionRef(Class expected, PainlessMethod interfaceMethod, PainlessConstructor delegateConstructor, int numCaptures) { - Constructor javaConstructor = delegateConstructor.javaConstructor; - MethodType delegateMethodType = delegateConstructor.methodType; - - this.interfaceMethodName = interfaceMethod.javaMethod.getName(); - this.factoryMethodType = MethodType.methodType(expected, - delegateMethodType.dropParameterTypes(numCaptures, delegateMethodType.parameterCount())); - this.interfaceMethodType = interfaceMethod.methodType.dropParameterTypes(0, 1); - - this.delegateClassName = javaConstructor.getDeclaringClass().getName(); - this.isDelegateInterface = false; - this.delegateInvokeType = H_NEWINVOKESPECIAL; - this.delegateMethodName = PainlessLookupUtility.CONSTRUCTOR_NAME; - this.delegateMethodType = delegateMethodType.dropParameterTypes(0, numCaptures); - - this.interfaceMethod = interfaceMethod; - this.delegateTypeParameters = delegateConstructor.typeParameters; - this.delegateReturnType = void.class; - - this.factoryDescriptor = factoryMethodType.toMethodDescriptorString(); - this.interfaceType = Type.getMethodType(interfaceMethodType.toMethodDescriptorString()); - this.delegateType = Type.getMethodType(this.delegateMethodType.toMethodDescriptorString()); - } + public static FunctionRef create(PainlessLookup painlessLookup, Map localMethods, Location location, + Class targetClass, String typeName, String methodName, int numberOfCaptures) { - /** - * Creates a new FunctionRef (already resolved) - * @param expected functional interface type to implement - * @param interfaceMethod functional interface method - * @param delegateMethod implementation method - * @param numCaptures number of captured arguments - */ - public FunctionRef(Class expected, PainlessMethod interfaceMethod, PainlessMethod delegateMethod, int numCaptures) { - MethodType delegateMethodType = delegateMethod.methodType; - - this.interfaceMethodName = interfaceMethod.javaMethod.getName(); - this.factoryMethodType = MethodType.methodType(expected, - delegateMethodType.dropParameterTypes(numCaptures, delegateMethodType.parameterCount())); - this.interfaceMethodType = interfaceMethod.methodType.dropParameterTypes(0, 1); - - this.delegateClassName = delegateMethod.javaMethod.getDeclaringClass().getName(); - this.isDelegateInterface = delegateMethod.javaMethod.getDeclaringClass().isInterface(); - - if (Modifier.isStatic(delegateMethod.javaMethod.getModifiers())) { - this.delegateInvokeType = H_INVOKESTATIC; - } else if (delegateMethod.javaMethod.getDeclaringClass().isInterface()) { - this.delegateInvokeType = H_INVOKEINTERFACE; - } else { - this.delegateInvokeType = H_INVOKEVIRTUAL; - } + Objects.requireNonNull(painlessLookup); + Objects.requireNonNull(targetClass); + Objects.requireNonNull(typeName); + Objects.requireNonNull(methodName); - this.delegateMethodName = delegateMethod.javaMethod.getName(); - this.delegateMethodType = delegateMethodType.dropParameterTypes(0, numCaptures); + String targetClassName = PainlessLookupUtility.typeToCanonicalTypeName(targetClass); + PainlessMethod interfaceMethod; - this.interfaceMethod = interfaceMethod; - this.delegateTypeParameters = delegateMethod.typeParameters; - this.delegateReturnType = delegateMethod.returnType; + try { + interfaceMethod = painlessLookup.lookupFunctionalInterfacePainlessMethod(targetClass); - this.factoryDescriptor = factoryMethodType.toMethodDescriptorString(); - this.interfaceType = Type.getMethodType(interfaceMethodType.toMethodDescriptorString()); - this.delegateType = Type.getMethodType(this.delegateMethodType.toMethodDescriptorString()); - } - - /** - * Creates a new FunctionRef (already resolved) - * @param expected functional interface type to implement - * @param interfaceMethod functional interface method - * @param delegateMethod implementation method - * @param numCaptures number of captured arguments - */ - public FunctionRef(Class expected, PainlessMethod interfaceMethod, LocalMethod delegateMethod, int numCaptures) { - MethodType delegateMethodType = delegateMethod.methodType; - - this.interfaceMethodName = interfaceMethod.javaMethod.getName(); - this.factoryMethodType = MethodType.methodType(expected, - delegateMethodType.dropParameterTypes(numCaptures, delegateMethodType.parameterCount())); - this.interfaceMethodType = interfaceMethod.methodType.dropParameterTypes(0, 1); - - this.delegateClassName = CLASS_NAME; - this.isDelegateInterface = false; - this.delegateInvokeType = H_INVOKESTATIC; - - this.delegateMethodName = delegateMethod.name; - this.delegateMethodType = delegateMethodType.dropParameterTypes(0, numCaptures); - - this.interfaceMethod = interfaceMethod; - this.delegateTypeParameters = delegateMethod.typeParameters; - this.delegateReturnType = delegateMethod.returnType; - - this.factoryDescriptor = factoryMethodType.toMethodDescriptorString(); - this.interfaceType = Type.getMethodType(interfaceMethodType.toMethodDescriptorString()); - this.delegateType = Type.getMethodType(this.delegateMethodType.toMethodDescriptorString()); - } - - /** - * Creates a new FunctionRef (low level). - * It is for runtime use only. - */ - public FunctionRef(Class expected, - PainlessMethod interfaceMethod, String delegateMethodName, MethodType delegateMethodType, int numCaptures) { - this.interfaceMethodName = interfaceMethod.javaMethod.getName(); - this.factoryMethodType = MethodType.methodType(expected, - delegateMethodType.dropParameterTypes(numCaptures, delegateMethodType.parameterCount())); - this.interfaceMethodType = interfaceMethod.methodType.dropParameterTypes(0, 1); - - this.delegateClassName = CLASS_NAME; - this.delegateInvokeType = H_INVOKESTATIC; - this.delegateMethodName = delegateMethodName; - this.delegateMethodType = delegateMethodType.dropParameterTypes(0, numCaptures); - this.isDelegateInterface = false; + if (interfaceMethod == null) { + throw new IllegalArgumentException("cannot convert function reference [" + typeName + "::" + methodName + "] " + + "to a non-functional interface [" + targetClassName + "]"); + } - this.interfaceMethod = null; - this.delegateTypeParameters = null; - this.delegateReturnType = null; + String interfaceMethodName = interfaceMethod.javaMethod.getName(); + MethodType interfaceMethodType = interfaceMethod.methodType.dropParameterTypes(0, 1); + String delegateClassName; + boolean isDelegateInterface; + int delegateInvokeType; + String delegateMethodName; + MethodType delegateMethodType; + + Class delegateMethodReturnType; + List> delegateMethodParameters; + int interfaceTypeParametersSize = interfaceMethod.typeParameters.size(); + + if ("this".equals(typeName)) { + Objects.requireNonNull(localMethods); + + if (numberOfCaptures < 0) { + throw new IllegalStateException("internal error"); + } + + String localMethodKey = Locals.buildLocalMethodKey(methodName, numberOfCaptures + interfaceTypeParametersSize); + LocalMethod localMethod = localMethods.get(localMethodKey); + + if (localMethod == null) { + throw new IllegalArgumentException("function reference [this::" + localMethodKey + "] " + + "matching [" + targetClassName + ", " + interfaceMethodName + "/" + interfaceTypeParametersSize + "] " + + "not found" + (localMethodKey.contains("$") ? " due to an incorrect number of arguments" : "") + ); + } + + delegateClassName = CLASS_NAME; + isDelegateInterface = false; + delegateInvokeType = H_INVOKESTATIC; + delegateMethodName = localMethod.name; + delegateMethodType = localMethod.methodType; + + delegateMethodReturnType = localMethod.returnType; + delegateMethodParameters = localMethod.typeParameters; + } else if ("new".equals(methodName)) { + if (numberOfCaptures != 0) { + throw new IllegalStateException("internal error"); + } + + PainlessConstructor painlessConstructor = painlessLookup.lookupPainlessConstructor(typeName, interfaceTypeParametersSize); + + if (painlessConstructor == null) { + throw new IllegalArgumentException("function reference [" + typeName + "::new/" + interfaceTypeParametersSize + "] " + + "matching [" + targetClassName + ", " + interfaceMethodName + "/" + interfaceTypeParametersSize + "] " + + "not found"); + } + + delegateClassName = painlessConstructor.javaConstructor.getDeclaringClass().getName(); + isDelegateInterface = false; + delegateInvokeType = H_NEWINVOKESPECIAL; + delegateMethodName = PainlessLookupUtility.CONSTRUCTOR_NAME; + delegateMethodType = painlessConstructor.methodType; + + delegateMethodReturnType = painlessConstructor.javaConstructor.getDeclaringClass(); + delegateMethodParameters = painlessConstructor.typeParameters; + } else { + if (numberOfCaptures != 0 && numberOfCaptures != 1) { + throw new IllegalStateException("internal error"); + } + + boolean captured = numberOfCaptures == 1; + PainlessMethod painlessMethod = + painlessLookup.lookupPainlessMethod(typeName, true, methodName, interfaceTypeParametersSize); + + if (painlessMethod == null) { + painlessMethod = painlessLookup.lookupPainlessMethod(typeName, false, methodName, + captured ? interfaceTypeParametersSize : interfaceTypeParametersSize - 1); + + if (painlessMethod == null) { + throw new IllegalArgumentException( + "function reference " + "[" + typeName + "::" + methodName + "/" + interfaceTypeParametersSize + "] " + + "matching [" + targetClassName + ", " + interfaceMethodName + "/" + interfaceTypeParametersSize + "] " + + "not found"); + } + } else if (captured) { + throw new IllegalStateException("internal error"); + } + + delegateClassName = painlessMethod.javaMethod.getDeclaringClass().getName(); + isDelegateInterface = painlessMethod.javaMethod.getDeclaringClass().isInterface(); + + if (Modifier.isStatic(painlessMethod.javaMethod.getModifiers())) { + delegateInvokeType = H_INVOKESTATIC; + } else if (isDelegateInterface) { + delegateInvokeType = H_INVOKEINTERFACE; + } else { + delegateInvokeType = H_INVOKEVIRTUAL; + } + + delegateMethodName = painlessMethod.javaMethod.getName(); + delegateMethodType = painlessMethod.methodType; + + delegateMethodReturnType = painlessMethod.returnType; + + if (delegateMethodType.parameterList().size() > painlessMethod.typeParameters.size()) { + delegateMethodParameters = new ArrayList<>(painlessMethod.typeParameters); + delegateMethodParameters.add(0, delegateMethodType.parameterType(0)); + } else { + delegateMethodParameters = painlessMethod.typeParameters; + } + } - this.factoryDescriptor = null; - this.interfaceType = null; - this.delegateType = null; - } + if (location != null) { + for (int typeParameter = 0; typeParameter < interfaceTypeParametersSize; ++typeParameter) { + Class from = interfaceMethod.typeParameters.get(typeParameter); + Class to = delegateMethodParameters.get(numberOfCaptures + typeParameter); + AnalyzerCaster.getLegalCast(location, from, to, false, true); + } - /** - * Looks up {@code type} from the whitelist, and returns a matching constructor. - */ - private static PainlessConstructor lookup(PainlessLookup painlessLookup, Class expected, String type) { - // check its really a functional interface - // for e.g. Comparable - PainlessMethod method = painlessLookup.lookupPainlessClass(expected).functionalMethod; - if (method == null) { - throw new IllegalArgumentException("Cannot convert function reference [" + type + "::new] " + - "to [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], not a functional interface"); - } + if (interfaceMethod.returnType != void.class) { + AnalyzerCaster.getLegalCast(location, delegateMethodReturnType, interfaceMethod.returnType, false, true); + } + } - // lookup requested constructor - PainlessClass struct = painlessLookup.lookupPainlessClass(painlessLookup.canonicalTypeNameToType(type)); - PainlessConstructor impl = struct.constructors.get(PainlessLookupUtility.buildPainlessConstructorKey(method.typeParameters.size())); + MethodType factoryMethodType = MethodType.methodType(targetClass, + delegateMethodType.dropParameterTypes(numberOfCaptures, delegateMethodType.parameterCount())); + delegateMethodType = delegateMethodType.dropParameterTypes(0, numberOfCaptures); + + return new FunctionRef(interfaceMethodName, interfaceMethodType, + delegateClassName, isDelegateInterface, delegateInvokeType, delegateMethodName, delegateMethodType, + factoryMethodType + ); + } catch (IllegalArgumentException iae) { + if (location != null) { + throw location.createError(iae); + } - if (impl == null) { - throw new IllegalArgumentException("Unknown reference [" + type + "::new] matching [" + expected + "]"); + throw iae; } - - return impl; } - /** - * Looks up {@code type::call} from the whitelist, and returns a matching method. - */ - private static PainlessMethod lookup(PainlessLookup painlessLookup, Class expected, - String type, String call, boolean receiverCaptured) { - // check its really a functional interface - // for e.g. Comparable - PainlessMethod method = painlessLookup.lookupPainlessClass(expected).functionalMethod; - if (method == null) { - throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], not a functional interface"); - } + /** functional interface method name */ + public final String interfaceMethodName; + /** functional interface method signature */ + public final MethodType interfaceMethodType; + /** class of the delegate method to be called */ + public final String delegateClassName; + /** whether a call is made on a delegate interface */ + public final boolean isDelegateInterface; + /** the invocation type of the delegate method */ + public final int delegateInvokeType; + /** the name of the delegate method */ + public final String delegateMethodName; + /** delegate method signature */ + public final MethodType delegateMethodType; + /** factory (CallSite) method signature */ + public final MethodType factoryMethodType; - // lookup requested method - PainlessClass struct = painlessLookup.lookupPainlessClass(painlessLookup.canonicalTypeNameToType(type)); - final PainlessMethod impl; - // look for a static impl first - PainlessMethod staticImpl = - struct.staticMethods.get(PainlessLookupUtility.buildPainlessMethodKey(call, method.typeParameters.size())); - if (staticImpl == null) { - // otherwise a virtual impl - final int arity; - if (receiverCaptured) { - // receiver captured - arity = method.typeParameters.size(); - } else { - // receiver passed - arity = method.typeParameters.size() - 1; - } - impl = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey(call, arity)); - } else { - impl = staticImpl; - } - if (impl == null) { - throw new IllegalArgumentException("Unknown reference [" + type + "::" + call + "] matching " + - "[" + expected + "]"); - } - return impl; + private FunctionRef( + String interfaceMethodName, MethodType interfaceMethodType, + String delegateClassName, boolean isDelegateInterface, + int delegateInvokeType, String delegateMethodName, MethodType delegateMethodType, + MethodType factoryMethodType) { + + this.interfaceMethodName = interfaceMethodName; + this.interfaceMethodType = interfaceMethodType; + this.delegateClassName = delegateClassName; + this.isDelegateInterface = isDelegateInterface; + this.delegateInvokeType = delegateInvokeType; + this.delegateMethodName = delegateMethodName; + this.delegateMethodType = delegateMethodType; + this.factoryMethodType = factoryMethodType; } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java index f2c7e02c637c1..e07c016ddd0e3 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java @@ -32,6 +32,9 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.stream.Collectors; + +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToJavaType; /** * Tracks user defined methods and variables across compilation phases. @@ -74,7 +77,10 @@ public LocalMethod(String name, Class returnType, List> typeParamete /** Creates a new local variable scope (e.g. loop) inside the current scope */ public static Locals newLocalScope(Locals currentScope) { - return new Locals(currentScope); + Locals locals = new Locals(currentScope); + locals.methods = currentScope.methods; + + return locals; } /** @@ -82,9 +88,13 @@ public static Locals newLocalScope(Locals currentScope) { *

        * This is just like {@link #newFunctionScope}, except the captured parameters are made read-only. */ - public static Locals newLambdaScope(Locals programScope, Class returnType, List parameters, + public static Locals newLambdaScope(Locals programScope, String name, Class returnType, List parameters, int captureCount, int maxLoopCounter) { Locals locals = new Locals(programScope, programScope.painlessLookup, returnType, KEYWORDS); + locals.methods = programScope.methods; + List> typeParameters = parameters.stream().map(parameter -> typeToJavaType(parameter.clazz)).collect(Collectors.toList()); + locals.methods.put(buildLocalMethodKey(name, parameters.size()), new LocalMethod(name, returnType, typeParameters, + MethodType.methodType(typeToJavaType(returnType), typeParameters))); for (int i = 0; i < parameters.size(); i++) { Parameter parameter = parameters.get(i); // TODO: allow non-captures to be r/w: @@ -104,6 +114,7 @@ public static Locals newLambdaScope(Locals programScope, Class returnType, Li /** Creates a new function scope inside the current scope */ public static Locals newFunctionScope(Locals programScope, Class returnType, List parameters, int maxLoopCounter) { Locals locals = new Locals(programScope, programScope.painlessLookup, returnType, KEYWORDS); + locals.methods = programScope.methods; for (Parameter parameter : parameters) { locals.addVariable(parameter.location, parameter.clazz, parameter.name, false); } @@ -118,6 +129,7 @@ public static Locals newFunctionScope(Locals programScope, Class returnType, public static Locals newMainMethodScope(ScriptClassInfo scriptClassInfo, Locals programScope, int maxLoopCounter) { Locals locals = new Locals( programScope, programScope.painlessLookup, scriptClassInfo.getExecuteMethodReturnType(), KEYWORDS); + locals.methods = programScope.methods; // This reference. Internal use only. locals.defineVariable(null, Object.class, THIS, true); @@ -136,6 +148,7 @@ public static Locals newMainMethodScope(ScriptClassInfo scriptClassInfo, Locals /** Creates a new program scope: the list of methods. It is the parent for all methods */ public static Locals newProgramScope(PainlessLookup painlessLookup, Collection methods) { Locals locals = new Locals(null, painlessLookup, null, null); + locals.methods = new HashMap<>(); for (LocalMethod method : methods) { locals.addMethod(method); } @@ -167,15 +180,8 @@ public Variable getVariable(Location location, String name) { } /** Looks up a method. Returns null if the method does not exist. */ - public LocalMethod getMethod(String key) { - LocalMethod method = lookupMethod(key); - if (method != null) { - return method; - } - if (parent != null) { - return parent.getMethod(key); - } - return null; + public LocalMethod getMethod(String methodName, int methodArity) { + return methods.get(buildLocalMethodKey(methodName, methodArity)); } /** Creates a new variable. Throws IAE if the variable has already been defined (even in a parent) or reserved. */ @@ -260,15 +266,10 @@ private Variable lookupVariable(Location location, String name) { return variables.get(name); } - /** Looks up a method at this scope only. Returns null if the method does not exist. */ - private LocalMethod lookupMethod(String key) { - if (methods == null) { - return null; - } - return methods.get(key); + public Map getMethods() { + return Collections.unmodifiableMap(methods); } - /** Defines a variable at this scope internally. */ private Variable defineVariable(Location location, Class type, String name, boolean readonly) { if (variables == null) { @@ -281,14 +282,9 @@ private Variable defineVariable(Location location, Class type, String name, b } private void addMethod(LocalMethod method) { - if (methods == null) { - methods = new HashMap<>(); - } methods.put(buildLocalMethodKey(method.name, method.typeParameters.size()), method); - // TODO: check result } - private int getNextSlot() { return nextSlotNumber; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java index 72435562a3bd0..dca638b3dddac 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java @@ -56,6 +56,7 @@ import static org.elasticsearch.painless.WriterConstants.DEF_TO_SHORT_IMPLICIT; import static org.elasticsearch.painless.WriterConstants.DEF_UTIL_TYPE; import static org.elasticsearch.painless.WriterConstants.INDY_STRING_CONCAT_BOOTSTRAP_HANDLE; +import static org.elasticsearch.painless.WriterConstants.LAMBDA_BOOTSTRAP_HANDLE; import static org.elasticsearch.painless.WriterConstants.MAX_INDY_STRING_CONCAT_ARGS; import static org.elasticsearch.painless.WriterConstants.PAINLESS_ERROR_TYPE; import static org.elasticsearch.painless.WriterConstants.STRINGBUILDER_APPEND_BOOLEAN; @@ -134,52 +135,52 @@ public void writeLoopCounter(int slot, int count, Location location) { public void writeCast(PainlessCast cast) { if (cast != null) { - if (cast.from == char.class && cast.to == String.class) { + if (cast.originalType == char.class && cast.targetType == String.class) { invokeStatic(UTILITY_TYPE, CHAR_TO_STRING); - } else if (cast.from == String.class && cast.to == char.class) { + } else if (cast.originalType == String.class && cast.targetType == char.class) { invokeStatic(UTILITY_TYPE, STRING_TO_CHAR); - } else if (cast.unboxFrom != null) { - unbox(getType(cast.unboxFrom)); - writeCast(cast.from, cast.to); - } else if (cast.unboxTo != null) { - if (cast.from == def.class) { - if (cast.explicit) { - if (cast.to == Boolean.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_BOOLEAN); - else if (cast.to == Byte.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_BYTE_EXPLICIT); - else if (cast.to == Short.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_SHORT_EXPLICIT); - else if (cast.to == Character.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_CHAR_EXPLICIT); - else if (cast.to == Integer.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_INT_EXPLICIT); - else if (cast.to == Long.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_LONG_EXPLICIT); - else if (cast.to == Float.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_FLOAT_EXPLICIT); - else if (cast.to == Double.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_DOUBLE_EXPLICIT); + } else if (cast.unboxOriginalType != null) { + unbox(getType(cast.unboxOriginalType)); + writeCast(cast.originalType, cast.targetType); + } else if (cast.unboxTargetType != null) { + if (cast.originalType == def.class) { + if (cast.explicitCast) { + if (cast.targetType == Boolean.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_BOOLEAN); + else if (cast.targetType == Byte.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_BYTE_EXPLICIT); + else if (cast.targetType == Short.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_SHORT_EXPLICIT); + else if (cast.targetType == Character.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_CHAR_EXPLICIT); + else if (cast.targetType == Integer.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_INT_EXPLICIT); + else if (cast.targetType == Long.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_LONG_EXPLICIT); + else if (cast.targetType == Float.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_FLOAT_EXPLICIT); + else if (cast.targetType == Double.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_DOUBLE_EXPLICIT); else { throw new IllegalStateException("Illegal tree structure."); } } else { - if (cast.to == Boolean.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_BOOLEAN); - else if (cast.to == Byte.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_BYTE_IMPLICIT); - else if (cast.to == Short.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_SHORT_IMPLICIT); - else if (cast.to == Character.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_CHAR_IMPLICIT); - else if (cast.to == Integer.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_INT_IMPLICIT); - else if (cast.to == Long.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_LONG_IMPLICIT); - else if (cast.to == Float.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_FLOAT_IMPLICIT); - else if (cast.to == Double.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_DOUBLE_IMPLICIT); + if (cast.targetType == Boolean.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_BOOLEAN); + else if (cast.targetType == Byte.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_BYTE_IMPLICIT); + else if (cast.targetType == Short.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_SHORT_IMPLICIT); + else if (cast.targetType == Character.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_CHAR_IMPLICIT); + else if (cast.targetType == Integer.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_INT_IMPLICIT); + else if (cast.targetType == Long.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_LONG_IMPLICIT); + else if (cast.targetType == Float.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_FLOAT_IMPLICIT); + else if (cast.targetType == Double.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_DOUBLE_IMPLICIT); else { throw new IllegalStateException("Illegal tree structure."); } } } else { - writeCast(cast.from, cast.to); - unbox(getType(cast.unboxTo)); + writeCast(cast.originalType, cast.targetType); + unbox(getType(cast.unboxTargetType)); } - } else if (cast.boxFrom != null) { - box(getType(cast.boxFrom)); - writeCast(cast.from, cast.to); - } else if (cast.boxTo != null) { - writeCast(cast.from, cast.to); - box(getType(cast.boxTo)); + } else if (cast.boxOriginalType != null) { + box(getType(cast.boxOriginalType)); + writeCast(cast.originalType, cast.targetType); + } else if (cast.boxTargetType != null) { + writeCast(cast.originalType, cast.targetType); + box(getType(cast.boxTargetType)); } else { - writeCast(cast.from, cast.to); + writeCast(cast.originalType, cast.targetType); } } } @@ -439,4 +440,18 @@ public void invokeMethodCall(PainlessMethod painlessMethod) { invokeVirtual(type, method); } } + + public void invokeLambdaCall(FunctionRef functionRef) { + invokeDynamic( + functionRef.interfaceMethodName, + functionRef.factoryMethodType.toMethodDescriptorString(), + LAMBDA_BOOTSTRAP_HANDLE, + Type.getMethodType(functionRef.interfaceMethodType.toMethodDescriptorString()), + functionRef.delegateClassName, + functionRef.delegateInvokeType, + functionRef.delegateMethodName, + Type.getMethodType(functionRef.delegateMethodType.toMethodDescriptorString()), + functionRef.isDelegateInterface ? 1 : 0 + ); + } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java index db3aeff0483f6..9c3d991080d26 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java @@ -120,7 +120,7 @@ public final class WriterConstants { DEF_BOOTSTRAP_METHOD.getDescriptor(), false); public static final Type DEF_BOOTSTRAP_DELEGATE_TYPE = Type.getType(DefBootstrap.class); public static final Method DEF_BOOTSTRAP_DELEGATE_METHOD = getAsmMethod(CallSite.class, "bootstrap", PainlessLookup.class, - MethodHandles.Lookup.class, String.class, MethodType.class, int.class, int.class, Object[].class); + Map.class, MethodHandles.Lookup.class, String.class, MethodType.class, int.class, int.class, Object[].class); public static final Type DEF_UTIL_TYPE = Type.getType(Def.class); public static final Method DEF_TO_BOOLEAN = getAsmMethod(boolean.class, "DefToboolean" , Object.class); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java index 2440fb45d4dfb..f87f8a134b8c4 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java @@ -22,46 +22,55 @@ public class PainlessCast { /** Create a standard cast with no boxing/unboxing. */ - public static PainlessCast standard(Class from, Class to, boolean explicit) { - return new PainlessCast(from, to, explicit, null, null, null, null); + public static PainlessCast originalTypetoTargetType(Class originalType, Class targetType, boolean explicitCast) { + return new PainlessCast(originalType, targetType, explicitCast, null, null, null, null); } - /** Create a cast where the from type will be unboxed, and then the cast will be performed. */ - public static PainlessCast unboxFrom(Class from, Class to, boolean explicit, Class unboxFrom) { - return new PainlessCast(from, to, explicit, unboxFrom, null, null, null); + /** Create a cast where the original type will be unboxed, and then the cast will be performed. */ + public static PainlessCast unboxOriginalType( + Class originalType, Class targetType, boolean explicitCast, Class unboxOriginalType) { + + return new PainlessCast(originalType, targetType, explicitCast, unboxOriginalType, null, null, null); } - /** Create a cast where the to type will be unboxed, and then the cast will be performed. */ - public static PainlessCast unboxTo(Class from, Class to, boolean explicit, Class unboxTo) { - return new PainlessCast(from, to, explicit, null, unboxTo, null, null); + /** Create a cast where the target type will be unboxed, and then the cast will be performed. */ + public static PainlessCast unboxTargetType( + Class originalType, Class targetType, boolean explicitCast, Class unboxTargetType) { + + return new PainlessCast(originalType, targetType, explicitCast, null, unboxTargetType, null, null); } - /** Create a cast where the from type will be boxed, and then the cast will be performed. */ - public static PainlessCast boxFrom(Class from, Class to, boolean explicit, Class boxFrom) { - return new PainlessCast(from, to, explicit, null, null, boxFrom, null); + /** Create a cast where the original type will be boxed, and then the cast will be performed. */ + public static PainlessCast boxOriginalType( + Class originalType, Class targetType, boolean explicitCast, Class boxOriginalType) { + + return new PainlessCast(originalType, targetType, explicitCast, null, null, boxOriginalType, null); } - /** Create a cast where the to type will be boxed, and then the cast will be performed. */ - public static PainlessCast boxTo(Class from, Class to, boolean explicit, Class boxTo) { - return new PainlessCast(from, to, explicit, null, null, null, boxTo); + /** Create a cast where the target type will be boxed, and then the cast will be performed. */ + public static PainlessCast boxTargetType( + Class originalType, Class targetType, boolean explicitCast, Class boxTargetType) { + + return new PainlessCast(originalType, targetType, explicitCast, null, null, null, boxTargetType); } - public final Class from; - public final Class to; - public final boolean explicit; - public final Class unboxFrom; - public final Class unboxTo; - public final Class boxFrom; - public final Class boxTo; + public final Class originalType; + public final Class targetType; + public final boolean explicitCast; + public final Class unboxOriginalType; + public final Class unboxTargetType; + public final Class boxOriginalType; + public final Class boxTargetType; + + private PainlessCast(Class originalType, Class targetType, boolean explicitCast, + Class unboxOriginalType, Class unboxTargetType, Class boxOriginalType, Class boxTargetType) { - private PainlessCast(Class from, Class to, boolean explicit, - Class unboxFrom, Class unboxTo, Class boxFrom, Class boxTo) { - this.from = from; - this.to = to; - this.explicit = explicit; - this.unboxFrom = unboxFrom; - this.unboxTo = unboxTo; - this.boxFrom = boxFrom; - this.boxTo = boxTo; + this.originalType = originalType; + this.targetType = targetType; + this.explicitCast = explicitCast; + this.unboxOriginalType = unboxOriginalType; + this.unboxTargetType = unboxTargetType; + this.boxOriginalType = boxOriginalType; + this.boxTargetType = boxTargetType; } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java index 835bfb5c505a4..50bb79dcfbdf5 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java @@ -35,13 +35,13 @@ public final class PainlessClass { public final Map getterMethodHandles; public final Map setterMethodHandles; - public final PainlessMethod functionalMethod; + public final PainlessMethod functionalInterfaceMethod; PainlessClass(Map constructors, Map staticMethods, Map methods, Map staticFields, Map fields, Map getterMethodHandles, Map setterMethodHandles, - PainlessMethod functionalMethod) { + PainlessMethod functionalInterfaceMethod) { this.constructors = Collections.unmodifiableMap(constructors); @@ -54,6 +54,6 @@ public final class PainlessClass { this.getterMethodHandles = Collections.unmodifiableMap(getterMethodHandles); this.setterMethodHandles = Collections.unmodifiableMap(setterMethodHandles); - this.functionalMethod = functionalMethod; + this.functionalInterfaceMethod = functionalInterfaceMethod; } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java index 866f711ba0f3e..a61215e9ed749 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java @@ -35,7 +35,7 @@ final class PainlessClassBuilder { final Map getterMethodHandles; final Map setterMethodHandles; - PainlessMethod functionalMethod; + PainlessMethod functionalInterfaceMethod; PainlessClassBuilder() { constructors = new HashMap<>(); @@ -49,11 +49,11 @@ final class PainlessClassBuilder { getterMethodHandles = new HashMap<>(); setterMethodHandles = new HashMap<>(); - functionalMethod = null; + functionalInterfaceMethod = null; } PainlessClass build() { return new PainlessClass(constructors, staticMethods, methods, staticFields, fields, - getterMethodHandles, setterMethodHandles, functionalMethod); + getterMethodHandles, setterMethodHandles, functionalInterfaceMethod); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java index 786248f726982..16b8ac14f14f2 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java @@ -19,15 +19,17 @@ package org.elasticsearch.painless.lookup; +import java.lang.invoke.MethodHandle; import java.util.Collections; import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.function.Function; import static org.elasticsearch.painless.lookup.PainlessLookupUtility.buildPainlessConstructorKey; import static org.elasticsearch.painless.lookup.PainlessLookupUtility.buildPainlessFieldKey; import static org.elasticsearch.painless.lookup.PainlessLookupUtility.buildPainlessMethodKey; -import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToCanonicalTypeName; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToBoxedType; public final class PainlessLookup { @@ -48,10 +50,10 @@ public boolean isValidCanonicalClassName(String canonicalClassName) { return canonicalClassNamesToClasses.containsKey(canonicalClassName); } - public Class canonicalTypeNameToType(String painlessType) { - Objects.requireNonNull(painlessType); + public Class canonicalTypeNameToType(String canonicalTypeName) { + Objects.requireNonNull(canonicalTypeName); - return PainlessLookupUtility.canonicalTypeNameToType(painlessType, canonicalClassNamesToClasses); + return PainlessLookupUtility.canonicalTypeNameToType(canonicalTypeName, canonicalClassNamesToClasses); } public Set> getClasses() { @@ -62,6 +64,18 @@ public PainlessClass lookupPainlessClass(Class targetClass) { return classesToPainlessClasses.get(targetClass); } + public PainlessConstructor lookupPainlessConstructor(String targetCanonicalClassName, int constructorArity) { + Objects.requireNonNull(targetCanonicalClassName); + + Class targetClass = canonicalTypeNameToType(targetCanonicalClassName); + + if (targetClass == null) { + return null; + } + + return lookupPainlessConstructor(targetClass, constructorArity); + } + public PainlessConstructor lookupPainlessConstructor(Class targetClass, int constructorArity) { Objects.requireNonNull(targetClass); @@ -69,46 +83,60 @@ public PainlessConstructor lookupPainlessConstructor(Class targetClass, int c String painlessConstructorKey = buildPainlessConstructorKey(constructorArity); if (targetPainlessClass == null) { - throw new IllegalArgumentException("target class [" + typeToCanonicalTypeName(targetClass) + "] " + - "not found for constructor [" + painlessConstructorKey + "]"); + return null; } PainlessConstructor painlessConstructor = targetPainlessClass.constructors.get(painlessConstructorKey); if (painlessConstructor == null) { - throw new IllegalArgumentException( - "constructor [" + typeToCanonicalTypeName(targetClass) + ", " + painlessConstructorKey + "] not found"); + return null; } return painlessConstructor; } + public PainlessMethod lookupPainlessMethod(String targetCanonicalClassName, boolean isStatic, String methodName, int methodArity) { + Objects.requireNonNull(targetCanonicalClassName); + + Class targetClass = canonicalTypeNameToType(targetCanonicalClassName); + + if (targetClass == null) { + return null; + } + + return lookupPainlessMethod(targetClass, isStatic, methodName, methodArity); + } + public PainlessMethod lookupPainlessMethod(Class targetClass, boolean isStatic, String methodName, int methodArity) { Objects.requireNonNull(targetClass); Objects.requireNonNull(methodName); if (targetClass.isPrimitive()) { - targetClass = PainlessLookupUtility.typeToBoxedType(targetClass); + targetClass = typeToBoxedType(targetClass); } PainlessClass targetPainlessClass = classesToPainlessClasses.get(targetClass); String painlessMethodKey = buildPainlessMethodKey(methodName, methodArity); if (targetPainlessClass == null) { - throw new IllegalArgumentException( - "target class [" + typeToCanonicalTypeName(targetClass) + "] not found for method [" + painlessMethodKey + "]"); + return null; } - PainlessMethod painlessMethod = isStatic ? + return isStatic ? targetPainlessClass.staticMethods.get(painlessMethodKey) : targetPainlessClass.methods.get(painlessMethodKey); + } - if (painlessMethod == null) { - throw new IllegalArgumentException( - "method [" + typeToCanonicalTypeName(targetClass) + ", " + painlessMethodKey + "] not found"); + public PainlessField lookupPainlessField(String targetCanonicalClassName, boolean isStatic, String fieldName) { + Objects.requireNonNull(targetCanonicalClassName); + + Class targetClass = canonicalTypeNameToType(targetCanonicalClassName); + + if (targetClass == null) { + return null; } - return painlessMethod; + return lookupPainlessField(targetClass, isStatic, fieldName); } public PainlessField lookupPainlessField(Class targetClass, boolean isStatic, String fieldName) { @@ -119,8 +147,7 @@ public PainlessField lookupPainlessField(Class targetClass, boolean isStatic, String painlessFieldKey = buildPainlessFieldKey(fieldName); if (targetPainlessClass == null) { - throw new IllegalArgumentException( - "target class [" + typeToCanonicalTypeName(targetClass) + "] not found for field [" + painlessFieldKey + "]"); + return null; } PainlessField painlessField = isStatic ? @@ -128,10 +155,85 @@ public PainlessField lookupPainlessField(Class targetClass, boolean isStatic, targetPainlessClass.fields.get(painlessFieldKey); if (painlessField == null) { - throw new IllegalArgumentException( - "field [" + typeToCanonicalTypeName(targetClass) + ", " + painlessFieldKey + "] not found"); + return null; } return painlessField; } + + public PainlessMethod lookupFunctionalInterfacePainlessMethod(Class targetClass) { + PainlessClass targetPainlessClass = classesToPainlessClasses.get(targetClass); + + if (targetPainlessClass == null) { + return null; + } + + return targetPainlessClass.functionalInterfaceMethod; + } + + public PainlessMethod lookupRuntimePainlessMethod(Class originalTargetClass, String methodName, int methodArity) { + Objects.requireNonNull(originalTargetClass); + Objects.requireNonNull(methodName); + + String painlessMethodKey = buildPainlessMethodKey(methodName, methodArity); + Function objectLookup = targetPainlessClass -> targetPainlessClass.methods.get(painlessMethodKey); + + return lookupRuntimePainlessObject(originalTargetClass, objectLookup); + } + + public MethodHandle lookupRuntimeGetterMethodHandle(Class originalTargetClass, String getterName) { + Objects.requireNonNull(originalTargetClass); + Objects.requireNonNull(getterName); + + Function objectLookup = targetPainlessClass -> targetPainlessClass.getterMethodHandles.get(getterName); + + return lookupRuntimePainlessObject(originalTargetClass, objectLookup); + } + + public MethodHandle lookupRuntimeSetterMethodHandle(Class originalTargetClass, String setterName) { + Objects.requireNonNull(originalTargetClass); + Objects.requireNonNull(setterName); + + Function objectLookup = targetPainlessClass -> targetPainlessClass.setterMethodHandles.get(setterName); + + return lookupRuntimePainlessObject(originalTargetClass, objectLookup); + } + + private T lookupRuntimePainlessObject(Class originalTargetClass, Function objectLookup) { + Class currentTargetClass = originalTargetClass; + + while (currentTargetClass != null) { + PainlessClass targetPainlessClass = classesToPainlessClasses.get(currentTargetClass); + + if (targetPainlessClass != null) { + T painlessObject = objectLookup.apply(targetPainlessClass); + + if (painlessObject != null) { + return painlessObject; + } + } + + currentTargetClass = currentTargetClass.getSuperclass(); + } + + currentTargetClass = originalTargetClass; + + while (currentTargetClass != null) { + for (Class targetInterface : currentTargetClass.getInterfaces()) { + PainlessClass targetPainlessClass = classesToPainlessClasses.get(targetInterface); + + if (targetPainlessClass != null) { + T painlessObject = objectLookup.apply(targetPainlessClass); + + if (painlessObject != null) { + return painlessObject; + } + } + } + + currentTargetClass = currentTargetClass.getSuperclass(); + } + + return null; + } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java index a0cab7f1a5bda..e644453a4c1ba 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java @@ -166,35 +166,35 @@ public static PainlessLookup buildFromWhitelists(List whitelists) { try { for (Whitelist whitelist : whitelists) { - for (WhitelistClass whitelistClass : whitelist.whitelistStructs) { + for (WhitelistClass whitelistClass : whitelist.whitelistClasses) { origin = whitelistClass.origin; painlessLookupBuilder.addPainlessClass( - whitelist.javaClassLoader, whitelistClass.javaClassName, whitelistClass.onlyFQNJavaClassName == false); + whitelist.classLoader, whitelistClass.javaClassName, whitelistClass.noImport == false); } } for (Whitelist whitelist : whitelists) { - for (WhitelistClass whitelistClass : whitelist.whitelistStructs) { + for (WhitelistClass whitelistClass : whitelist.whitelistClasses) { String targetCanonicalClassName = whitelistClass.javaClassName.replace('$', '.'); for (WhitelistConstructor whitelistConstructor : whitelistClass.whitelistConstructors) { origin = whitelistConstructor.origin; painlessLookupBuilder.addPainlessConstructor( - targetCanonicalClassName, whitelistConstructor.painlessParameterTypeNames); + targetCanonicalClassName, whitelistConstructor.canonicalTypeNameParameters); } for (WhitelistMethod whitelistMethod : whitelistClass.whitelistMethods) { origin = whitelistMethod.origin; painlessLookupBuilder.addPainlessMethod( - whitelist.javaClassLoader, targetCanonicalClassName, whitelistMethod.javaAugmentedClassName, - whitelistMethod.javaMethodName, whitelistMethod.painlessReturnTypeName, - whitelistMethod.painlessParameterTypeNames); + whitelist.classLoader, targetCanonicalClassName, whitelistMethod.augmentedCanonicalClassName, + whitelistMethod.methodName, whitelistMethod.returnCanonicalTypeName, + whitelistMethod.canonicalTypeNameParameters); } for (WhitelistField whitelistField : whitelistClass.whitelistFields) { origin = whitelistField.origin; painlessLookupBuilder.addPainlessField( - targetCanonicalClassName, whitelistField.javaFieldName, whitelistField.painlessFieldTypeName); + targetCanonicalClassName, whitelistField.fieldName, whitelistField.canonicalTypeNameParameter); } } } @@ -220,8 +220,12 @@ private Class canonicalTypeNameToType(String canonicalTypeName) { return PainlessLookupUtility.canonicalTypeNameToType(canonicalTypeName, canonicalClassNamesToClasses); } - private void validateType(Class type) { - PainlessLookupUtility.validateType(type, classesToPainlessClassBuilders.keySet()); + private boolean isValidType(Class type) { + while (type.getComponentType() != null) { + type = type.getComponentType(); + } + + return classesToPainlessClassBuilders.containsKey(type); } public void addPainlessClass(ClassLoader classLoader, String javaClassName, boolean importClassName) { @@ -289,7 +293,7 @@ public void addPainlessClass(Class clazz, boolean importClassName) { if (canonicalClassName.equals(importedCanonicalClassName)) { if (importClassName == true) { - throw new IllegalArgumentException("must use only_fqn parameter on class [" + canonicalClassName + "] with no package"); + throw new IllegalArgumentException("must use no_import parameter on class [" + canonicalClassName + "] with no package"); } } else { Class importedPainlessClass = canonicalClassNamesToClasses.get(importedCanonicalClassName); @@ -297,7 +301,8 @@ public void addPainlessClass(Class clazz, boolean importClassName) { if (importedPainlessClass == null) { if (importClassName) { if (existingPainlessClassBuilder != null) { - throw new IllegalArgumentException("inconsistent only_fqn parameters found for class [" + canonicalClassName + "]"); + throw new IllegalArgumentException( + "inconsistent no_import parameters found for class [" + canonicalClassName + "]"); } canonicalClassNamesToClasses.put(importedCanonicalClassName, clazz); @@ -306,32 +311,33 @@ public void addPainlessClass(Class clazz, boolean importClassName) { throw new IllegalArgumentException("imported class [" + importedCanonicalClassName + "] cannot represent multiple " + "classes [" + canonicalClassName + "] and [" + typeToCanonicalTypeName(importedPainlessClass) + "]"); } else if (importClassName == false) { - throw new IllegalArgumentException("inconsistent only_fqn parameters found for class [" + canonicalClassName + "]"); + throw new IllegalArgumentException("inconsistent no_import parameters found for class [" + canonicalClassName + "]"); } } } - public void addPainlessConstructor(String targetCanonicalClassName, List typeNameParameters) { + public void addPainlessConstructor(String targetCanonicalClassName, List canonicalTypeNameParameters) { Objects.requireNonNull(targetCanonicalClassName); - Objects.requireNonNull(typeNameParameters); + Objects.requireNonNull(canonicalTypeNameParameters); Class targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName); if (targetClass == null) { throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found" + - "for constructor [[" + targetCanonicalClassName + "], " + typeNameParameters + "]"); + "for constructor [[" + targetCanonicalClassName + "], " + canonicalTypeNameParameters + "]"); } - List> typeParameters = new ArrayList<>(typeNameParameters.size()); + List> typeParameters = new ArrayList<>(canonicalTypeNameParameters.size()); - for (String typeNameParameter : typeNameParameters) { - try { - Class typeParameter = canonicalTypeNameToType(typeNameParameter); - typeParameters.add(typeParameter); - } catch (IllegalArgumentException iae) { - throw new IllegalArgumentException("type parameter [" + typeNameParameter + "] not found " + - "for constructor [[" + targetCanonicalClassName + "], " + typeNameParameters + "]", iae); + for (String canonicalTypeNameParameter : canonicalTypeNameParameters) { + Class typeParameter = canonicalTypeNameToType(canonicalTypeNameParameter); + + if (typeParameter == null) { + throw new IllegalArgumentException("type parameter [" + canonicalTypeNameParameter + "] not found " + + "for constructor [[" + targetCanonicalClassName + "], " + canonicalTypeNameParameters + "]"); } + + typeParameters.add(typeParameter); } addPainlessConstructor(targetClass, typeParameters); @@ -357,11 +363,9 @@ public void addPainlessConstructor(Class targetClass, List> typePara List> javaTypeParameters = new ArrayList<>(typeParametersSize); for (Class typeParameter : typeParameters) { - try { - validateType(typeParameter); - } catch (IllegalArgumentException iae) { + if (isValidType(typeParameter) == false) { throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] not found " + - "for constructor [[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "]", iae); + "for constructor [[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "]"); } javaTypeParameters.add(typeToJavaType(typeParameter)); @@ -406,19 +410,19 @@ public void addPainlessConstructor(Class targetClass, List> typePara } public void addPainlessMethod(ClassLoader classLoader, String targetCanonicalClassName, String augmentedCanonicalClassName, - String methodName, String returnCanonicalTypeName, List typeNameParameters) { + String methodName, String returnCanonicalTypeName, List canonicalTypeNameParameters) { Objects.requireNonNull(classLoader); Objects.requireNonNull(targetCanonicalClassName); Objects.requireNonNull(methodName); Objects.requireNonNull(returnCanonicalTypeName); - Objects.requireNonNull(typeNameParameters); + Objects.requireNonNull(canonicalTypeNameParameters); Class targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName); if (targetClass == null) { throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found for method " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typeNameParameters + "]"); + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]"); } Class augmentedClass = null; @@ -428,29 +432,28 @@ public void addPainlessMethod(ClassLoader classLoader, String targetCanonicalCla augmentedClass = Class.forName(augmentedCanonicalClassName, true, classLoader); } catch (ClassNotFoundException cnfe) { throw new IllegalArgumentException("augmented class [" + augmentedCanonicalClassName + "] not found for method " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typeNameParameters + "]", cnfe); + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]", cnfe); } } - List> typeParameters = new ArrayList<>(typeNameParameters.size()); + List> typeParameters = new ArrayList<>(canonicalTypeNameParameters.size()); - for (String typeNameParameter : typeNameParameters) { - try { - Class typeParameter = canonicalTypeNameToType(typeNameParameter); - typeParameters.add(typeParameter); - } catch (IllegalArgumentException iae) { - throw new IllegalArgumentException("parameter type [" + typeNameParameter + "] not found for method " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typeNameParameters + "]", iae); + for (String canonicalTypeNameParameter : canonicalTypeNameParameters) { + Class typeParameter = canonicalTypeNameToType(canonicalTypeNameParameter); + + if (typeParameter == null) { + throw new IllegalArgumentException("parameter type [" + canonicalTypeNameParameter + "] not found for method " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]"); } + + typeParameters.add(typeParameter); } - Class returnType; + Class returnType = canonicalTypeNameToType(returnCanonicalTypeName); - try { - returnType = canonicalTypeNameToType(returnCanonicalTypeName); - } catch (IllegalArgumentException iae) { + if (returnType == null) { throw new IllegalArgumentException("parameter type [" + returnCanonicalTypeName + "] not found for method " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typeNameParameters + "]", iae); + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]"); } addPainlessMethod(targetClass, augmentedClass, methodName, returnType, typeParameters); @@ -490,22 +493,18 @@ public void addPainlessMethod(Class targetClass, Class augmentedClass, Str } for (Class typeParameter : typeParameters) { - try { - validateType(typeParameter); - } catch (IllegalArgumentException iae) { + if (isValidType(typeParameter) == false) { throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] " + "not found for method [[" + targetCanonicalClassName + "], [" + methodName + "], " + - typesToCanonicalTypeNames(typeParameters) + "]", iae); + typesToCanonicalTypeNames(typeParameters) + "]"); } javaTypeParameters.add(typeToJavaType(typeParameter)); } - try { - validateType(returnType); - } catch (IllegalArgumentException iae) { + if (isValidType(returnType) == false) { throw new IllegalArgumentException("return type [" + typeToCanonicalTypeName(returnType) + "] not found for method " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "]", iae); + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "]"); } Method javaMethod; @@ -570,7 +569,6 @@ public void addPainlessMethod(Class targetClass, Class augmentedClass, Str PainlessMethod painlessMethod = painlessClassBuilder.staticMethods.get(painlessMethodKey); if (painlessMethod == null) { - org.objectweb.asm.commons.Method asmMethod = org.objectweb.asm.commons.Method.getMethod(javaMethod); MethodHandle methodHandle; if (augmentedClass == null) { @@ -610,10 +608,10 @@ public void addPainlessMethod(Class targetClass, Class augmentedClass, Str } } - public void addPainlessField(String targetCanonicalClassName, String fieldName, String typeNameParameter) { + public void addPainlessField(String targetCanonicalClassName, String fieldName, String canonicalTypeNameParameter) { Objects.requireNonNull(targetCanonicalClassName); Objects.requireNonNull(fieldName); - Objects.requireNonNull(typeNameParameter); + Objects.requireNonNull(canonicalTypeNameParameter); Class targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName); @@ -621,12 +619,10 @@ public void addPainlessField(String targetCanonicalClassName, String fieldName, throw new IllegalArgumentException("class [" + targetCanonicalClassName + "] not found"); } - Class typeParameter; + Class typeParameter = canonicalTypeNameToType(canonicalTypeNameParameter); - try { - typeParameter = canonicalTypeNameToType(typeNameParameter); - } catch (IllegalArgumentException iae) { - throw new IllegalArgumentException("type parameter [" + typeNameParameter + "] not found " + + if (typeParameter == null) { + throw new IllegalArgumentException("type parameter [" + canonicalTypeNameParameter + "] not found " + "for field [[" + targetCanonicalClassName + "], [" + fieldName + "]"); } @@ -657,11 +653,9 @@ public void addPainlessField(Class targetClass, String fieldName, Class ty throw new IllegalArgumentException("class [" + targetCanonicalClassName + "] not found"); } - try { - validateType(typeParameter); - } catch (IllegalArgumentException iae) { + if (isValidType(typeParameter) == false) { throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] not found " + - "for field [[" + targetCanonicalClassName + "], [" + fieldName + "]", iae); + "for field [[" + targetCanonicalClassName + "], [" + fieldName + "]"); } Field javaField; @@ -876,7 +870,7 @@ private void setFunctionalInterfaceMethod(Class targetClass, PainlessClassBui } else if (javaMethods.size() == 1) { java.lang.reflect.Method javaMethod = javaMethods.get(0); String painlessMethodKey = buildPainlessMethodKey(javaMethod.getName(), javaMethod.getParameterCount()); - painlessClassBuilder.functionalMethod = painlessClassBuilder.methods.get(painlessMethodKey); + painlessClassBuilder.functionalInterfaceMethod = painlessClassBuilder.methods.get(painlessMethodKey); } } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java index 0a181c5f1b02d..f2eb434516961 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java @@ -20,7 +20,6 @@ package org.elasticsearch.painless.lookup; import java.util.Arrays; -import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Objects; @@ -101,45 +100,47 @@ public static Class canonicalTypeNameToType(String canonicalTypeName, Map type) { String canonicalTypeName = type.getCanonicalName(); - if (canonicalTypeName.startsWith(def.class.getCanonicalName())) { + if (canonicalTypeName == null) { + canonicalTypeName = ANONYMOUS_CLASS_NAME; + } else if (canonicalTypeName.startsWith(def.class.getCanonicalName())) { canonicalTypeName = canonicalTypeName.replace(def.class.getCanonicalName(), DEF_CLASS_NAME); } @@ -252,22 +255,6 @@ public static Class typeToJavaType(Class type) { return type; } - /** - * Ensures a type exists based on the terminology specified as part of {@link PainlessLookupUtility}. Throws an - * {@link IllegalArgumentException} if the type does not exist. - */ - public static void validateType(Class type, Collection> classes) { - String canonicalTypeName = typeToCanonicalTypeName(type); - - while (type.getComponentType() != null) { - type = type.getComponentType(); - } - - if (classes.contains(type) == false) { - throw new IllegalArgumentException("type [" + canonicalTypeName + "] not found"); - } - } - /** * Converts a type to its boxed type equivalent if one exists based on the terminology specified as part of * {@link PainlessLookupUtility}. Otherwise, this behaves as an identity function. @@ -357,6 +344,11 @@ public static String buildPainlessFieldKey(String fieldName) { return fieldName; } + /** + * The name for an anonymous class. + */ + public static final String ANONYMOUS_CLASS_NAME = "$anonymous"; + /** * The def type name as specified in the source for a script. */ diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java index 7605a0c9f7f40..1f9973df19224 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java @@ -24,7 +24,6 @@ import org.elasticsearch.painless.Locals.LocalMethod; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; -import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.objectweb.asm.commons.Method; import java.util.List; @@ -59,8 +58,7 @@ void extractVariables(Set variables) { @Override void analyze(Locals locals) { - String methodKey = PainlessLookupUtility.buildPainlessMethodKey(name, arguments.size()); - method = locals.getMethod(methodKey); + method = locals.getMethod(name, arguments.size()); if (method == null) { throw createError(new IllegalArgumentException("Unknown call [" + name + "] with [" + arguments.size() + "] arguments.")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java index e78b3c67210b8..a649fa7611c65 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.AnalyzerCaster; import org.elasticsearch.painless.DefBootstrap; import org.elasticsearch.painless.FunctionRef; import org.elasticsearch.painless.Globals; @@ -35,8 +34,6 @@ import java.util.Objects; import java.util.Set; -import static org.elasticsearch.painless.WriterConstants.LAMBDA_BOOTSTRAP_HANDLE; - /** * Represents a capturing function reference. */ @@ -76,23 +73,8 @@ void analyze(Locals locals) { defPointer = null; // static case if (captured.clazz != def.class) { - try { - ref = FunctionRef.resolveFromLookup(locals.getPainlessLookup(), expected, - PainlessLookupUtility.typeToCanonicalTypeName(captured.clazz), call, 1); - - // check casts between the interface method and the delegate method are legal - for (int i = 0; i < ref.interfaceMethod.typeParameters.size(); ++i) { - Class from = ref.interfaceMethod.typeParameters.get(i); - Class to = ref.delegateTypeParameters.get(i); - AnalyzerCaster.getLegalCast(location, from, to, false, true); - } - - if (ref.interfaceMethod.returnType != void.class) { - AnalyzerCaster.getLegalCast(location, ref.delegateReturnType, ref.interfaceMethod.returnType, false, true); - } - } catch (IllegalArgumentException e) { - throw createError(e); - } + ref = FunctionRef.create(locals.getPainlessLookup(), locals.getMethods(), location, + expected, PainlessLookupUtility.typeToCanonicalTypeName(captured.clazz), call, 1); } actual = expected; } @@ -114,17 +96,7 @@ void write(MethodWriter writer, Globals globals) { } else { // typed interface, typed implementation writer.visitVarInsn(MethodWriter.getType(captured.clazz).getOpcode(Opcodes.ILOAD), captured.getSlot()); - writer.invokeDynamic( - ref.interfaceMethodName, - ref.factoryDescriptor, - LAMBDA_BOOTSTRAP_HANDLE, - ref.interfaceType, - ref.delegateClassName, - ref.delegateInvokeType, - ref.delegateMethodName, - ref.delegateType, - ref.isDelegateInterface ? 1 : 0 - ); + writer.invokeLambdaCall(ref); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java index b07613714b8ef..08236a965fe52 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java @@ -63,6 +63,6 @@ void write(MethodWriter writer, Globals globals) { @Override public String toString() { - return singleLineToString(PainlessLookupUtility.typeToCanonicalTypeName(cast.to), child); + return singleLineToString(PainlessLookupUtility.typeToCanonicalTypeName(cast.targetType), child); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java index c58d51e45cb4d..3ad3018c61e34 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java @@ -49,9 +49,9 @@ void extractVariables(Set variables) { @Override void analyze(Locals locals) { - try { - actual = locals.getPainlessLookup().canonicalTypeNameToType(type); - } catch (IllegalArgumentException exception) { + actual = locals.getPainlessLookup().canonicalTypeNameToType(type); + + if (actual == null) { throw createError(new IllegalArgumentException("Not a type [" + type + "].")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java index ead2e0c5f7070..c97cc66c7c7ca 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java @@ -19,22 +19,16 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.AnalyzerCaster; import org.elasticsearch.painless.FunctionRef; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; -import org.elasticsearch.painless.Locals.LocalMethod; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; -import org.elasticsearch.painless.lookup.PainlessLookupUtility; -import org.elasticsearch.painless.lookup.PainlessMethod; import org.objectweb.asm.Type; import java.util.Objects; import java.util.Set; -import static org.elasticsearch.painless.WriterConstants.LAMBDA_BOOTSTRAP_HANDLE; - /** * Represents a function reference. */ @@ -63,39 +57,7 @@ void analyze(Locals locals) { defPointer = "S" + type + "." + call + ",0"; } else { defPointer = null; - try { - if ("this".equals(type)) { - // user's own function - PainlessMethod interfaceMethod = locals.getPainlessLookup().lookupPainlessClass(expected).functionalMethod; - if (interfaceMethod == null) { - throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], not a functional interface"); - } - LocalMethod delegateMethod = locals.getMethod(Locals.buildLocalMethodKey(call, interfaceMethod.typeParameters.size())); - if (delegateMethod == null) { - throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], function not found"); - } - ref = new FunctionRef(expected, interfaceMethod, delegateMethod, 0); - - // check casts between the interface method and the delegate method are legal - for (int i = 0; i < interfaceMethod.typeParameters.size(); ++i) { - Class from = interfaceMethod.typeParameters.get(i); - Class to = delegateMethod.typeParameters.get(i); - AnalyzerCaster.getLegalCast(location, from, to, false, true); - } - - if (interfaceMethod.returnType != void.class) { - AnalyzerCaster.getLegalCast(location, delegateMethod.returnType, interfaceMethod.returnType, false, true); - } - } else { - // whitelist lookup - ref = FunctionRef.resolveFromLookup(locals.getPainlessLookup(), expected, type, call, 0); - } - - } catch (IllegalArgumentException e) { - throw createError(e); - } + ref = FunctionRef.create(locals.getPainlessLookup(), locals.getMethods(), location, expected, type, call, 0); actual = expected; } } @@ -104,17 +66,7 @@ void analyze(Locals locals) { void write(MethodWriter writer, Globals globals) { if (ref != null) { writer.writeDebugInfo(location); - writer.invokeDynamic( - ref.interfaceMethodName, - ref.factoryDescriptor, - LAMBDA_BOOTSTRAP_HANDLE, - ref.interfaceType, - ref.delegateClassName, - ref.delegateInvokeType, - ref.delegateMethodName, - ref.delegateType, - ref.isDelegateInterface ? 1 : 0 - ); + writer.invokeLambdaCall(ref); } else { // TODO: don't do this: its just to cutover :) writer.push((String)null); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java index 8585b7fc0bb54..73e4f176ea1ba 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java @@ -54,12 +54,11 @@ void extractVariables(Set variables) { @Override void analyze(Locals locals) { - Class clazz; // ensure the specified type is part of the definition - try { - clazz = locals.getPainlessLookup().canonicalTypeNameToType(this.type); - } catch (IllegalArgumentException exception) { + Class clazz = locals.getPainlessLookup().canonicalTypeNameToType(this.type); + + if (clazz == null) { throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java index e84ab0065011a..af906416ca7bc 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java @@ -19,11 +19,9 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.AnalyzerCaster; import org.elasticsearch.painless.FunctionRef; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; -import org.elasticsearch.painless.Locals.LocalMethod; import org.elasticsearch.painless.Locals.Variable; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; @@ -40,8 +38,6 @@ import java.util.Objects; import java.util.Set; -import static org.elasticsearch.painless.WriterConstants.LAMBDA_BOOTSTRAP_HANDLE; - /** * Lambda expression node. *

        @@ -119,9 +115,10 @@ void analyze(Locals locals) { actualParamTypeStrs.add(type); } } + } else { // we know the method statically, infer return type and any unknown/def types - interfaceMethod = locals.getPainlessLookup().lookupPainlessClass(expected).functionalMethod; + interfaceMethod = locals.getPainlessLookup().lookupFunctionalInterfacePainlessMethod(expected); if (interfaceMethod == null) { throw createError(new IllegalArgumentException("Cannot pass lambda to " + "[" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], not a functional interface")); @@ -173,7 +170,7 @@ void analyze(Locals locals) { desugared = new SFunction(reserved, location, PainlessLookupUtility.typeToCanonicalTypeName(returnType), name, paramTypes, paramNames, statements, true); desugared.generateSignature(locals.getPainlessLookup()); - desugared.analyze(Locals.newLambdaScope(locals.getProgramScope(), returnType, + desugared.analyze(Locals.newLambdaScope(locals.getProgramScope(), desugared.name, returnType, desugared.parameters, captures.size(), reserved.getMaxLoopCounter())); // setup method reference to synthetic method @@ -183,25 +180,8 @@ void analyze(Locals locals) { defPointer = "Sthis." + name + "," + captures.size(); } else { defPointer = null; - try { - LocalMethod localMethod = - new LocalMethod(desugared.name, desugared.returnType, desugared.typeParameters, desugared.methodType); - ref = new FunctionRef(expected, interfaceMethod, localMethod, captures.size()); - } catch (IllegalArgumentException e) { - throw createError(e); - } - - // check casts between the interface method and the delegate method are legal - for (int i = 0; i < interfaceMethod.typeParameters.size(); ++i) { - Class from = interfaceMethod.typeParameters.get(i); - Class to = desugared.parameters.get(i + captures.size()).clazz; - AnalyzerCaster.getLegalCast(location, from, to, false, true); - } - - if (interfaceMethod.returnType != void.class) { - AnalyzerCaster.getLegalCast(location, desugared.returnType, interfaceMethod.returnType, false, true); - } - + ref = FunctionRef.create( + locals.getPainlessLookup(), locals.getMethods(), location, expected, "this", desugared.name, captures.size()); actual = expected; } } @@ -217,17 +197,7 @@ void write(MethodWriter writer, Globals globals) { writer.visitVarInsn(MethodWriter.getType(capture.clazz).getOpcode(Opcodes.ILOAD), capture.getSlot()); } - writer.invokeDynamic( - ref.interfaceMethodName, - ref.factoryDescriptor, - LAMBDA_BOOTSTRAP_HANDLE, - ref.interfaceType, - ref.delegateClassName, - ref.delegateInvokeType, - ref.delegateMethodName, - ref.delegateType, - ref.isDelegateInterface ? 1 : 0 - ); + writer.invokeLambdaCall(ref); } else { // placeholder writer.push((String)null); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java index bd931558b620d..8c9154aaaf304 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java @@ -33,6 +33,8 @@ import java.util.List; import java.util.Set; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToCanonicalTypeName; + /** * Represents a list initialization shortcut. */ @@ -63,16 +65,17 @@ void analyze(Locals locals) { actual = ArrayList.class; - try { - constructor = locals.getPainlessLookup().lookupPainlessConstructor(actual, 0); - } catch (IllegalArgumentException iae) { - throw createError(iae); + constructor = locals.getPainlessLookup().lookupPainlessConstructor(actual, 0); + + if (constructor == null) { + throw createError(new IllegalArgumentException( + "constructor [" + typeToCanonicalTypeName(actual) + ", /0] not found")); } - try { - method = locals.getPainlessLookup().lookupPainlessMethod(actual, false, "add", 1); - } catch (IllegalArgumentException iae) { - throw createError(iae); + method = locals.getPainlessLookup().lookupPainlessMethod(actual, false, "add", 1); + + if (method == null) { + throw createError(new IllegalArgumentException("method [" + typeToCanonicalTypeName(actual) + ", add/1] not found")); } for (int index = 0; index < values.size(); ++index) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java index 91332672c0510..11c12b2cd0a96 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java @@ -33,6 +33,8 @@ import java.util.List; import java.util.Set; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToCanonicalTypeName; + /** * Represents a map initialization shortcut. */ @@ -69,16 +71,17 @@ void analyze(Locals locals) { actual = HashMap.class; - try { - constructor = locals.getPainlessLookup().lookupPainlessConstructor(actual, 0); - } catch (IllegalArgumentException iae) { - throw createError(iae); + constructor = locals.getPainlessLookup().lookupPainlessConstructor(actual, 0); + + if (constructor == null) { + throw createError(new IllegalArgumentException( + "constructor [" + typeToCanonicalTypeName(actual) + ", /0] not found")); } - try { - method = locals.getPainlessLookup().lookupPainlessMethod(actual, false, "put", 2); - } catch (IllegalArgumentException iae) { - throw createError(iae); + method = locals.getPainlessLookup().lookupPainlessMethod(actual, false, "put", 2); + + if (method == null) { + throw createError(new IllegalArgumentException("method [" + typeToCanonicalTypeName(actual) + ", put/2] not found")); } if (keys.size() != values.size()) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java index e0a49ebd6158e..cef005de9c3bc 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java @@ -54,15 +54,13 @@ void extractVariables(Set variables) { @Override void analyze(Locals locals) { - if (!read) { - throw createError(new IllegalArgumentException("A newly created array must be read from.")); + if (!read) { + throw createError(new IllegalArgumentException("A newly created array must be read from.")); } - Class clazz; + Class clazz = locals.getPainlessLookup().canonicalTypeNameToType(this.type); - try { - clazz = locals.getPainlessLookup().canonicalTypeNameToType(this.type); - } catch (IllegalArgumentException exception) { + if (clazz == null) { throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java index 55ba60feb3e77..9423ed5d109de 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java @@ -32,6 +32,8 @@ import java.util.Objects; import java.util.Set; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToCanonicalTypeName; + /** * Represents and object instantiation. */ @@ -58,16 +60,17 @@ void extractVariables(Set variables) { @Override void analyze(Locals locals) { - try { - actual = locals.getPainlessLookup().canonicalTypeNameToType(this.type); - } catch (IllegalArgumentException exception) { + actual = locals.getPainlessLookup().canonicalTypeNameToType(this.type); + + if (actual == null) { throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } - try { - constructor = locals.getPainlessLookup().lookupPainlessConstructor(actual, arguments.size()); - } catch (IllegalArgumentException iae) { - throw createError(iae); + constructor = locals.getPainlessLookup().lookupPainlessConstructor(actual, arguments.size()); + + if (constructor == null) { + throw createError(new IllegalArgumentException( + "constructor [" + typeToCanonicalTypeName(actual) + ", /" + arguments.size() + "] not found")); } Class[] types = new Class[constructor.typeParameters.size()]; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EStatic.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EStatic.java index e5909d93e9dc2..0d8c94db0f1fc 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EStatic.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EStatic.java @@ -47,9 +47,9 @@ void extractVariables(Set variables) { @Override void analyze(Locals locals) { - try { - actual = locals.getPainlessLookup().canonicalTypeNameToType(type); - } catch (IllegalArgumentException exception) { + actual = locals.getPainlessLookup().canonicalTypeNameToType(type); + + if (actual == null) { throw createError(new IllegalArgumentException("Not a type [" + type + "].")); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java index 9406b4ca41127..25ae1ed97742a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java @@ -30,6 +30,8 @@ import java.util.Objects; import java.util.Set; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToCanonicalTypeName; + /** * Represents a method call and defers to a child subnode. */ @@ -67,13 +69,15 @@ void analyze(Locals locals) { if (prefix.actual == def.class) { sub = new PSubDefCall(location, name, arguments); } else { - try { - PainlessMethod method = - locals.getPainlessLookup().lookupPainlessMethod(prefix.actual, prefix instanceof EStatic, name, arguments.size()); - sub = new PSubCallInvoke(location, method, prefix.actual, arguments); - } catch (IllegalArgumentException iae) { - throw createError(iae); + PainlessMethod method = + locals.getPainlessLookup().lookupPainlessMethod(prefix.actual, prefix instanceof EStatic, name, arguments.size()); + + if (method == null) { + throw createError(new IllegalArgumentException( + "method [" + typeToCanonicalTypeName(prefix.actual) + ", " + name + "/" + arguments.size() + "] not found")); } + + sub = new PSubCallInvoke(location, method, prefix.actual, arguments); } if (nullSafe) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java index 59cbfd405b7fd..7efd6a29899c4 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java @@ -23,6 +23,7 @@ import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessField; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.lookup.def; @@ -32,6 +33,8 @@ import java.util.Objects; import java.util.Set; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToCanonicalTypeName; + /** * Represents a field load/store and defers to a child subnode. */ @@ -65,31 +68,22 @@ void analyze(Locals locals) { } else if (prefix.actual == def.class) { sub = new PSubDefField(location, value); } else { - try { - sub = new PSubField(location, - locals.getPainlessLookup().lookupPainlessField(prefix.actual, prefix instanceof EStatic, value)); - } catch (IllegalArgumentException fieldIAE) { + PainlessField field = locals.getPainlessLookup().lookupPainlessField(prefix.actual, prefix instanceof EStatic, value); + + if (field == null) { PainlessMethod getter; PainlessMethod setter; - try { + getter = locals.getPainlessLookup().lookupPainlessMethod(prefix.actual, false, + "get" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0); + + if (getter == null) { getter = locals.getPainlessLookup().lookupPainlessMethod(prefix.actual, false, - "get" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0); - } catch (IllegalArgumentException getIAE) { - try { - getter = locals.getPainlessLookup().lookupPainlessMethod(prefix.actual, false, - "is" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0); - } catch (IllegalArgumentException isIAE) { - getter = null; - } + "is" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0); } - try { - setter = locals.getPainlessLookup().lookupPainlessMethod(prefix.actual, false, - "set" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0); - } catch (IllegalArgumentException setIAE) { - setter = null; - } + setter = locals.getPainlessLookup().lookupPainlessMethod(prefix.actual, false, + "set" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0); if (getter != null || setter != null) { sub = new PSubShortcut(location, value, PainlessLookupUtility.typeToCanonicalTypeName(prefix.actual), getter, setter); @@ -107,8 +101,11 @@ void analyze(Locals locals) { } if (sub == null) { - throw createError(fieldIAE); + throw createError(new IllegalArgumentException( + "field [" + typeToCanonicalTypeName(prefix.actual) + ", " + value + "] not found")); } + } else { + sub = new PSubField(location, field); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java index 838756fcc67b4..3bc4913fde940 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java @@ -57,12 +57,8 @@ void extractVariables(Set variables) { void analyze(Locals locals) { String canonicalClassName = PainlessLookupUtility.typeToCanonicalTypeName(targetClass); - try { - getter = locals.getPainlessLookup().lookupPainlessMethod(targetClass, false, "get", 1); - setter = locals.getPainlessLookup().lookupPainlessMethod(targetClass, false, "set", 2); - } catch (IllegalArgumentException iae) { - throw createError(iae); - } + getter = locals.getPainlessLookup().lookupPainlessMethod(targetClass, false, "get", 1); + setter = locals.getPainlessLookup().lookupPainlessMethod(targetClass, false, "set", 2); if (getter != null && (getter.returnType == void.class || getter.typeParameters.size() != 1 || getter.typeParameters.get(0) != int.class)) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java index 27a3f69775aa9..0a0f099bd6841 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java @@ -56,12 +56,8 @@ void extractVariables(Set variables) { void analyze(Locals locals) { String canonicalClassName = PainlessLookupUtility.typeToCanonicalTypeName(targetClass); - try { - getter = locals.getPainlessLookup().lookupPainlessMethod(targetClass, false, "get", 1); - setter = locals.getPainlessLookup().lookupPainlessMethod(targetClass, false, "put", 2); - } catch (IllegalArgumentException iae) { - throw createError(iae); - } + getter = locals.getPainlessLookup().lookupPainlessMethod(targetClass, false, "get", 1); + setter = locals.getPainlessLookup().lookupPainlessMethod(targetClass, false, "put", 2); if (getter != null && (getter.returnType == void.class || getter.typeParameters.size() != 1)) { throw createError(new IllegalArgumentException("Illegal map get shortcut for type [" + canonicalClassName + "].")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java index 04b0462b53383..0c8ba5de6b2cf 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java @@ -64,11 +64,9 @@ void extractVariables(Set variables) { @Override void analyze(Locals locals) { - Class clazz; + Class clazz = locals.getPainlessLookup().canonicalTypeNameToType(this.type); - try { - clazz = locals.getPainlessLookup().canonicalTypeNameToType(this.type); - } catch (IllegalArgumentException exception) { + if (clazz == null) { throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java index f3774885cfd58..7ead673c70b7a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java @@ -59,11 +59,9 @@ void extractVariables(Set variables) { @Override void analyze(Locals locals) { - Class clazz; + Class clazz = locals.getPainlessLookup().canonicalTypeNameToType(this.type); - try { - clazz = locals.getPainlessLookup().canonicalTypeNameToType(this.type); - } catch (IllegalArgumentException exception) { + if (clazz == null) { throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java index a83f501df3292..cf41105c4fe36 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java @@ -68,11 +68,9 @@ void analyze(Locals locals) { expression.expected = expression.actual; expression = expression.cast(locals); - Class clazz; + Class clazz = locals.getPainlessLookup().canonicalTypeNameToType(this.type); - try { - clazz = locals.getPainlessLookup().canonicalTypeNameToType(this.type); - } catch (IllegalArgumentException exception) { + if (clazz == null) { throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java index 8230b5436979f..6fe09627f9dfd 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java @@ -20,20 +20,16 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.CompilerSettings; -import org.elasticsearch.painless.Constant; -import org.elasticsearch.painless.Def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Parameter; import org.elasticsearch.painless.Locals.Variable; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; -import org.elasticsearch.painless.WriterConstants; import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.node.SSource.Reserved; import org.objectweb.asm.ClassVisitor; -import org.objectweb.asm.Handle; import org.objectweb.asm.Opcodes; import java.lang.invoke.MethodType; @@ -46,7 +42,6 @@ import static java.util.Collections.emptyList; import static java.util.Collections.unmodifiableSet; -import static org.elasticsearch.painless.WriterConstants.CLASS_TYPE; /** * Represents a user-defined function. @@ -120,9 +115,9 @@ void extractVariables(Set variables) { } void generateSignature(PainlessLookup painlessLookup) { - try { - returnType = painlessLookup.canonicalTypeNameToType(rtnTypeStr); - } catch (IllegalArgumentException exception) { + returnType = painlessLookup.canonicalTypeNameToType(rtnTypeStr); + + if (returnType == null) { throw createError(new IllegalArgumentException("Illegal return type [" + rtnTypeStr + "] for function [" + name + "].")); } @@ -134,16 +129,16 @@ void generateSignature(PainlessLookup painlessLookup) { List> paramTypes = new ArrayList<>(); for (int param = 0; param < this.paramTypeStrs.size(); ++param) { - try { Class paramType = painlessLookup.canonicalTypeNameToType(this.paramTypeStrs.get(param)); - paramClasses[param] = PainlessLookupUtility.typeToJavaType(paramType); - paramTypes.add(paramType); - parameters.add(new Parameter(location, paramNameStrs.get(param), paramType)); - } catch (IllegalArgumentException exception) { + if (paramType == null) { throw createError(new IllegalArgumentException( "Illegal parameter type [" + this.paramTypeStrs.get(param) + "] for function [" + name + "].")); } + + paramClasses[param] = PainlessLookupUtility.typeToJavaType(paramType); + paramTypes.add(paramType); + parameters.add(new Parameter(location, paramNameStrs.get(param), paramType)); } typeParameters = paramTypes; @@ -218,15 +213,6 @@ void write(MethodWriter function, Globals globals) { throw createError(new IllegalStateException("Illegal tree structure.")); } } - - String staticHandleFieldName = Def.getUserFunctionHandleFieldName(name, parameters.size()); - globals.addConstantInitializer(new Constant(location, WriterConstants.METHOD_HANDLE_TYPE, - staticHandleFieldName, this::initializeConstant)); - } - - private void initializeConstant(MethodWriter writer) { - final Handle handle = new Handle(Opcodes.H_INVOKESTATIC, CLASS_TYPE.getInternalName(), name, method.getDescriptor(), false); - writer.push(handle); } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java index e64d52739a4dc..8aa72707b16fe 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java @@ -69,6 +69,7 @@ import static org.elasticsearch.painless.WriterConstants.GET_NAME_METHOD; import static org.elasticsearch.painless.WriterConstants.GET_SOURCE_METHOD; import static org.elasticsearch.painless.WriterConstants.GET_STATEMENTS_METHOD; +import static org.elasticsearch.painless.WriterConstants.MAP_TYPE; import static org.elasticsearch.painless.WriterConstants.OUT_OF_MEMORY_ERROR_TYPE; import static org.elasticsearch.painless.WriterConstants.PAINLESS_ERROR_TYPE; import static org.elasticsearch.painless.WriterConstants.PAINLESS_EXPLAIN_ERROR_GET_HEADERS_METHOD; @@ -166,7 +167,7 @@ void extractVariables(Set variables) { throw new IllegalStateException("Illegal tree structure."); } - public void analyze(PainlessLookup painlessLookup) { + public Map analyze(PainlessLookup painlessLookup) { Map methods = new HashMap<>(); for (SFunction function : functions) { @@ -180,7 +181,10 @@ public void analyze(PainlessLookup painlessLookup) { } } - analyze(Locals.newProgramScope(painlessLookup, methods.values())); + Locals locals = Locals.newProgramScope(painlessLookup, methods.values()); + analyze(locals); + + return locals.getMethods(); } @Override @@ -256,6 +260,7 @@ public void write() { globals.getStatements(), settings); bootstrapDef.visitCode(); bootstrapDef.getStatic(CLASS_TYPE, "$DEFINITION", DEFINITION_TYPE); + bootstrapDef.getStatic(CLASS_TYPE, "$LOCALS", MAP_TYPE); bootstrapDef.loadArgs(); bootstrapDef.invokeStatic(DEF_BOOTSTRAP_DELEGATE_TYPE, DEF_BOOTSTRAP_DELEGATE_METHOD); bootstrapDef.returnValue(); @@ -266,8 +271,9 @@ public void write() { visitor.visitField(Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC, "$SOURCE", STRING_TYPE.getDescriptor(), null, null).visitEnd(); visitor.visitField(Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC, "$STATEMENTS", BITSET_TYPE.getDescriptor(), null, null).visitEnd(); - // Write the static variable used by the method to bootstrap def calls + // Write the static variables used by the method to bootstrap def calls visitor.visitField(Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC, "$DEFINITION", DEFINITION_TYPE.getDescriptor(), null, null).visitEnd(); + visitor.visitField(Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC, "$LOCALS", MAP_TYPE.getDescriptor(), null, null).visitEnd(); org.objectweb.asm.commons.Method init; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java index 577d1d51d09b0..46dfa056874f2 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java @@ -40,6 +40,7 @@ import static org.elasticsearch.painless.WriterConstants.ITERATOR_HASNEXT; import static org.elasticsearch.painless.WriterConstants.ITERATOR_NEXT; import static org.elasticsearch.painless.WriterConstants.ITERATOR_TYPE; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typeToCanonicalTypeName; /** * Represents a for-each loop for iterables. @@ -76,10 +77,11 @@ void analyze(Locals locals) { if (expression.actual == def.class) { method = null; } else { - try { - method = locals.getPainlessLookup().lookupPainlessMethod(expression.actual, false, "iterator", 0); - } catch (IllegalArgumentException iae) { - throw createError(iae); + method = locals.getPainlessLookup().lookupPainlessMethod(expression.actual, false, "iterator", 0); + + if (method == null) { + throw createError(new IllegalArgumentException( + "method [" + typeToCanonicalTypeName(expression.actual) + ", iterator/0] not found")); } } diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt index 9b0408b018bca..b3d9040cc6295 100644 --- a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt @@ -24,31 +24,31 @@ #### Primitive types -class void only_fqn { +class void no_import { } -class boolean only_fqn { +class boolean no_import { } -class byte only_fqn { +class byte no_import { } -class short only_fqn { +class short no_import { } -class char only_fqn { +class char no_import { } -class int only_fqn { +class int no_import { } -class long only_fqn { +class long no_import { } -class float only_fqn { +class float no_import { } -class double only_fqn { +class double no_import { } #### Painless debugging API @@ -138,7 +138,7 @@ class org.elasticsearch.index.mapper.IpFieldMapper$IpFieldType$IpScriptDocValues # for testing. # currently FeatureTest exposes overloaded constructor, field load store, and overloaded static methods -class org.elasticsearch.painless.FeatureTest only_fqn { +class org.elasticsearch.painless.FeatureTest no_import { int z () (int,int) diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/AnalyzerCasterTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/AnalyzerCasterTests.java index 34bc2c78de662..58864d73c4120 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/AnalyzerCasterTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/AnalyzerCasterTests.java @@ -35,8 +35,8 @@ private static void assertCast(Class actual, Class expected, boolean mustB } PainlessCast cast = AnalyzerCaster.getLegalCast(location, actual, expected, true, false); - assertEquals(actual, cast.from); - assertEquals(expected, cast.to); + assertEquals(actual, cast.originalType); + assertEquals(expected, cast.targetType); if (mustBeExplicit) { ClassCastException error = expectThrows(ClassCastException.class, @@ -44,8 +44,8 @@ private static void assertCast(Class actual, Class expected, boolean mustB assertTrue(error.getMessage().startsWith("Cannot cast")); } else { cast = AnalyzerCaster.getLegalCast(location, actual, expected, false, false); - assertEquals(actual, cast.from); - assertEquals(expected, cast.to); + assertEquals(actual, cast.originalType); + assertEquals(expected, cast.targetType); } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ContextExampleTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ContextExampleTests.java new file mode 100644 index 0000000000000..15eed75bcb8df --- /dev/null +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ContextExampleTests.java @@ -0,0 +1,311 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless; + +/** + * These tests run the Painless scripts used in the context docs against + * slightly modified data designed around unit tests rather than a fully- + * running Elasticsearch server. + */ +public class ContextExampleTests extends ScriptTestCase { + + // **** Docs Generator Code **** + + /* + + import java.io.FileWriter; + import java.io.IOException; + + public class Generator { + + public final static String[] theatres = new String[] {"Down Port", "Graye", "Skyline", "Courtyard"}; + public final static String[] plays = new String[] {"Driving", "Pick It Up", "Sway and Pull", "Harriot", + "The Busline", "Ants Underground", "Exploria", "Line and Single", "Shafted", "Sunnyside Down", + "Test Run", "Auntie Jo"}; + public final static String[] actors = new String[] {"James Holland", "Krissy Smith", "Joe Muir", "Ryan Earns", + "Joel Madigan", "Jessica Brown", "Baz Knight", "Jo Hangum", "Rachel Grass", "Phoebe Miller", "Sarah Notch", + "Brayden Green", "Joshua Iller", "Jon Hittle", "Rob Kettleman", "Laura Conrad", "Simon Hower", "Nora Blue", + "Mike Candlestick", "Jacey Bell"}; + + public static void writeSeat(FileWriter writer, int id, String theatre, String play, String[] actors, + String date, String time, int row, int number, double cost, boolean sold) throws IOException { + StringBuilder builder = new StringBuilder(); + builder.append("{ \"create\" : { \"_index\" : \"seats\", \"_type\" : \"seat\", \"_id\" : \""); + builder.append(id); + builder.append("\" } }\n"); + builder.append("{ \"theatre\" : \""); + builder.append(theatre); + builder.append("\", \"play\" : \""); + builder.append(play); + builder.append("\", \"actors\": [ \""); + for (String actor : actors) { + builder.append(actor); + if (actor.equals(actors[actors.length - 1]) == false) { + builder.append("\", \""); + } + } + builder.append("\" ], \"date\": \""); + builder.append(date); + builder.append("\", \"time\": \""); + builder.append(time); + builder.append("\", \"row\": "); + builder.append(row); + builder.append(", \"number\": "); + builder.append(number); + builder.append(", \"cost\": "); + builder.append(cost); + builder.append(", \"sold\": "); + builder.append(sold ? "true" : "false"); + builder.append(" }\n"); + writer.write(builder.toString()); + } + + public static void main(String args[]) throws IOException { + FileWriter writer = new FileWriter("/home/jdconrad/test/seats.json"); + int id = 0; + + for (int playCount = 0; playCount < 12; ++playCount) { + String play = plays[playCount]; + String theatre; + String[] actor; + int startMonth; + int endMonth; + String time; + + if (playCount == 0) { + theatre = theatres[0]; + actor = new String[] {actors[0], actors[1], actors[2], actors[3]}; + startMonth = 4; + endMonth = 5; + time = "3:00PM"; + } else if (playCount == 1) { + theatre = theatres[0]; + actor = new String[] {actors[4], actors[5], actors[6], actors[7], actors[8], actors[9]}; + startMonth = 4; + endMonth = 6; + time = "8:00PM"; + } else if (playCount == 2) { + theatre = theatres[0]; + actor = new String[] {actors[0], actors[1], actors[2], actors[3], + actors[4], actors[5], actors[6], actors[7]}; + startMonth = 6; + endMonth = 8; + time = "3:00 PM"; + } else if (playCount == 3) { + theatre = theatres[0]; + actor = new String[] {actors[9], actors[10], actors[11], actors[12], actors[13], actors[14], + actors[15], actors[16], actors[17], actors[18], actors[19]}; + startMonth = 7; + endMonth = 8; + time = "8:00PM"; + } else if (playCount == 4) { + theatre = theatres[0]; + actor = new String[] {actors[13], actors[14], actors[15], actors[17], actors[18], actors[19]}; + startMonth = 8; + endMonth = 10; + time = "3:00PM"; + } else if (playCount == 5) { + theatre = theatres[0]; + actor = new String[] {actors[8], actors[9], actors[10], actors[11], actors[12]}; + startMonth = 8; + endMonth = 10; + time = "8:00PM"; + } else if (playCount == 6) { + theatre = theatres[1]; + actor = new String[] {actors[10], actors[11], actors[12], actors[13], actors[14], actors[15], actors[16]}; + startMonth = 4; + endMonth = 5; + time = "11:00AM"; + } else if (playCount == 7) { + theatre = theatres[1]; + actor = new String[] {actors[17], actors[18]}; + startMonth = 6; + endMonth = 9; + time = "2:00PM"; + } else if (playCount == 8) { + theatre = theatres[1]; + actor = new String[] {actors[0], actors[1], actors[2], actors[3], actors[16]}; + startMonth = 10; + endMonth = 11; + time = "11:00AM"; + } else if (playCount == 9) { + theatre = theatres[2]; + actor = new String[] {actors[1], actors[2], actors[3], actors[17], actors[18], actors[19]}; + startMonth = 3; + endMonth = 6; + time = "4:00PM"; + } else if (playCount == 10) { + theatre = theatres[2]; + actor = new String[] {actors[2], actors[3], actors[4], actors[5]}; + startMonth = 7; + endMonth = 8; + time = "7:30PM"; + } else if (playCount == 11) { + theatre = theatres[2]; + actor = new String[] {actors[7], actors[13], actors[14], actors[15], actors[16], actors[17]}; + startMonth = 9; + endMonth = 12; + time = "5:40PM"; + } else { + throw new RuntimeException("too many plays"); + } + + int rows; + int number; + + if (playCount < 6) { + rows = 3; + number = 12; + } else if (playCount < 9) { + rows = 5; + number = 9; + } else if (playCount < 12) { + rows = 11; + number = 15; + } else { + throw new RuntimeException("too many seats"); + } + + for (int month = startMonth; month <= endMonth; ++month) { + for (int day = 1; day <= 14; ++day) { + for (int row = 1; row <= rows; ++row) { + for (int count = 1; count <= number; ++count) { + String date = "2018-" + month + "-" + day; + double cost = (25 - row) * 1.25; + + writeSeat(writer, ++id, theatre, play, actor, date, time, row, count, cost, false); + } + } + } + } + } + + writer.write("\n"); + writer.close(); + } + } + + */ + + // **** Initial Mappings **** + + /* + + curl -X PUT "localhost:9200/seats" -H 'Content-Type: application/json' -d' + { + "mappings": { + "seat": { + "properties": { + "theatre": { "type": "keyword" }, + "play": { "type": "text" }, + "actors": { "type": "text" }, + "row": { "type": "integer" }, + "number": { "type": "integer" }, + "cost": { "type": "double" }, + "sold": { "type": "boolean" }, + "datetime": { "type": "date" }, + "date": { "type": "keyword" }, + "time": { "type": "keyword" } + } + } + } + } + ' + + */ + + // Create Ingest to Modify Dates: + + /* + + curl -X PUT "localhost:9200/_ingest/pipeline/seats" -H 'Content-Type: application/json' -d' + { + "description": "update datetime for seats", + "processors": [ + { + "script": { + "source": "String[] split(String s, char d) { int count = 0; for (char c : s.toCharArray()) { if (c == d) { ++count; } } if (count == 0) { return new String[] {s}; } String[] r = new String[count + 1]; int i0 = 0, i1 = 0; count = 0; for (char c : s.toCharArray()) { if (c == d) { r[count++] = s.substring(i0, i1); i0 = i1 + 1; } ++i1; } r[count] = s.substring(i0, i1); return r; } String[] dateSplit = split(ctx.date, (char)\"-\"); String year = dateSplit[0].trim(); String month = dateSplit[1].trim(); if (month.length() == 1) { month = \"0\" + month; } String day = dateSplit[2].trim(); if (day.length() == 1) { day = \"0\" + day; } boolean pm = ctx.time.substring(ctx.time.length() - 2).equals(\"PM\"); String[] timeSplit = split(ctx.time.substring(0, ctx.time.length() - 2), (char)\":\"); int hours = Integer.parseInt(timeSplit[0].trim()); int minutes = Integer.parseInt(timeSplit[1].trim()); if (pm) { hours += 12; } String dts = year + \"-\" + month + \"-\" + day + \"T\" + (hours < 10 ? \"0\" + hours : \"\" + hours) + \":\" + (minutes < 10 ? \"0\" + minutes : \"\" + minutes) + \":00+08:00\"; ZonedDateTime dt = ZonedDateTime.parse(dts, DateTimeFormatter.ISO_OFFSET_DATE_TIME); ctx.datetime = dt.getLong(ChronoField.INSTANT_SECONDS)*1000L;" + } + } + ] + } + ' + + */ + + public void testIngestProcessorScript() { + assertEquals(1535785200000L, + exec("String[] split(String s, char d) {" + + " int count = 0;" + + " for (char c : s.toCharArray()) {" + + " if (c == d) {" + + " ++count;" + + " }" + + " }" + + " if (count == 0) {" + + " return new String[] {s};" + + " }" + + " String[] r = new String[count + 1];" + + " int i0 = 0, i1 = 0;" + + " count = 0;" + + " for (char c : s.toCharArray()) {" + + " if (c == d) {" + + " r[count++] = s.substring(i0, i1);" + + " i0 = i1 + 1;" + + " }" + + " ++i1;" + + " }" + + " r[count] = s.substring(i0, i1);" + + " return r;" + + "}" + + "def x = ['date': '2018-9-1', 'time': '3:00 PM'];" + + "String[] dateSplit = split(x.date, (char)'-');" + + "String year = dateSplit[0].trim();" + + "String month = dateSplit[1].trim();" + + "if (month.length() == 1) {" + + " month = '0' + month;" + + "}" + + "String day = dateSplit[2].trim();" + + "if (day.length() == 1) {" + + " day = '0' + day;" + + "}" + + "boolean pm = x.time.substring(x.time.length() - 2).equals('PM');" + + "String[] timeSplit = split(x.time.substring(0, x.time.length() - 2), (char)':');" + + "int hours = Integer.parseInt(timeSplit[0].trim());" + + "String minutes = timeSplit[1].trim();" + + "if (pm) {" + + " hours += 12;" + + "}" + + "String dts = year + '-' + month + '-' + day + " + + "'T' + (hours < 10 ? '0' + hours : '' + hours) + ':' + minutes + ':00+08:00';" + + "ZonedDateTime dt = ZonedDateTime.parse(dts, DateTimeFormatter.ISO_OFFSET_DATE_TIME);" + + "return dt.getLong(ChronoField.INSTANT_SECONDS) * 1000L" + ) + ); + } + + // Post Generated Data: + + /* + + curl -XPOST localhost:9200/seats/seat/_bulk?pipeline=seats -H "Content-Type: application/x-ndjson" --data-binary "@/home/jdconrad/test/seats.json" + + */ +} diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java index 799be9f93f031..c4b85521e098f 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java @@ -38,6 +38,7 @@ public class DefBootstrapTests extends ESTestCase { /** calls toString() on integers, twice */ public void testOneType() throws Throwable { CallSite site = DefBootstrap.bootstrap(painlessLookup, + Collections.emptyMap(), MethodHandles.publicLookup(), "toString", MethodType.methodType(String.class, Object.class), @@ -58,6 +59,7 @@ public void testOneType() throws Throwable { public void testTwoTypes() throws Throwable { CallSite site = DefBootstrap.bootstrap(painlessLookup, + Collections.emptyMap(), MethodHandles.publicLookup(), "toString", MethodType.methodType(String.class, Object.class), @@ -83,6 +85,7 @@ public void testTooManyTypes() throws Throwable { // if this changes, test must be rewritten assertEquals(5, DefBootstrap.PIC.MAX_DEPTH); CallSite site = DefBootstrap.bootstrap(painlessLookup, + Collections.emptyMap(), MethodHandles.publicLookup(), "toString", MethodType.methodType(String.class, Object.class), @@ -109,6 +112,7 @@ public void testTooManyTypes() throws Throwable { /** test that we revert to the megamorphic classvalue cache and that it works as expected */ public void testMegamorphic() throws Throwable { DefBootstrap.PIC site = (DefBootstrap.PIC) DefBootstrap.bootstrap(painlessLookup, + Collections.emptyMap(), MethodHandles.publicLookup(), "size", MethodType.methodType(int.class, Object.class), @@ -130,7 +134,7 @@ public void testMegamorphic() throws Throwable { final IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> { Integer.toString((int)handle.invokeExact(new Object())); }); - assertEquals("Unable to find dynamic method [size] with [0] arguments for class [java.lang.Object].", iae.getMessage()); + assertEquals("dynamic method [java.lang.Object, size/0] not found", iae.getMessage()); assertTrue("Does not fail inside ClassValue.computeValue()", Arrays.stream(iae.getStackTrace()).anyMatch(e -> { return e.getMethodName().equals("computeValue") && e.getClassName().startsWith("org.elasticsearch.painless.DefBootstrap$PIC$"); @@ -141,6 +145,7 @@ public void testMegamorphic() throws Throwable { public void testNullGuardAdd() throws Throwable { DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(painlessLookup, + Collections.emptyMap(), MethodHandles.publicLookup(), "add", MethodType.methodType(Object.class, Object.class, Object.class), @@ -153,6 +158,7 @@ public void testNullGuardAdd() throws Throwable { public void testNullGuardAddWhenCached() throws Throwable { DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(painlessLookup, + Collections.emptyMap(), MethodHandles.publicLookup(), "add", MethodType.methodType(Object.class, Object.class, Object.class), @@ -166,6 +172,7 @@ public void testNullGuardAddWhenCached() throws Throwable { public void testNullGuardEq() throws Throwable { DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(painlessLookup, + Collections.emptyMap(), MethodHandles.publicLookup(), "eq", MethodType.methodType(boolean.class, Object.class, Object.class), @@ -179,6 +186,7 @@ public void testNullGuardEq() throws Throwable { public void testNullGuardEqWhenCached() throws Throwable { DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(painlessLookup, + Collections.emptyMap(), MethodHandles.publicLookup(), "eq", MethodType.methodType(boolean.class, Object.class, Object.class), @@ -197,6 +205,7 @@ public void testNullGuardEqWhenCached() throws Throwable { public void testNoNullGuardAdd() throws Throwable { DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(painlessLookup, + Collections.emptyMap(), MethodHandles.publicLookup(), "add", MethodType.methodType(Object.class, int.class, Object.class), @@ -211,6 +220,7 @@ public void testNoNullGuardAdd() throws Throwable { public void testNoNullGuardAddWhenCached() throws Throwable { DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(painlessLookup, + Collections.emptyMap(), MethodHandles.publicLookup(), "add", MethodType.methodType(Object.class, int.class, Object.class), diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/FunctionRefTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/FunctionRefTests.java index fd47db6b83d41..5829593f52441 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/FunctionRefTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/FunctionRefTests.java @@ -27,7 +27,6 @@ import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.endsWith; -import static org.hamcrest.Matchers.startsWith; public class FunctionRefTests extends ScriptTestCase { @@ -193,14 +192,15 @@ public void testMethodMissing() { Exception e = expectScriptThrows(IllegalArgumentException.class, () -> { exec("List l = [2, 1]; l.sort(Integer::bogus); return l.get(0);"); }); - assertThat(e.getMessage(), startsWith("Unknown reference")); + assertThat(e.getMessage(), containsString("function reference [Integer::bogus/2] matching [java.util.Comparator")); } public void testQualifiedMethodMissing() { Exception e = expectScriptThrows(IllegalArgumentException.class, () -> { exec("List l = [2, 1]; l.sort(org.joda.time.ReadableDateTime::bogus); return l.get(0);", false); }); - assertThat(e.getMessage(), startsWith("Unknown reference")); + assertThat(e.getMessage(), + containsString("function reference [org.joda.time.ReadableDateTime::bogus/2] matching [java.util.Comparator")); } public void testClassMissing() { @@ -223,11 +223,12 @@ public void testNotFunctionalInterface() { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("List l = new ArrayList(); l.add(2); l.add(1); l.add(Integer::bogus); return l.get(0);"); }); - assertThat(expected.getMessage(), containsString("Cannot convert function reference")); + assertThat(expected.getMessage(), + containsString("cannot convert function reference [Integer::bogus] to a non-functional interface [def]")); } public void testIncompatible() { - expectScriptThrows(BootstrapMethodError.class, () -> { + expectScriptThrows(ClassCastException.class, () -> { exec("List l = new ArrayList(); l.add(2); l.add(1); l.sort(String::startsWith); return l.get(0);"); }); } @@ -236,28 +237,32 @@ public void testWrongArity() { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("Optional.empty().orElseGet(String::startsWith);"); }); - assertThat(expected.getMessage(), containsString("Unknown reference")); + assertThat(expected.getMessage(), + containsString("function reference [String::startsWith/0] matching [java.util.function.Supplier")); } public void testWrongArityNotEnough() { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("List l = new ArrayList(); l.add(2); l.add(1); l.sort(String::isEmpty);"); }); - assertTrue(expected.getMessage().contains("Unknown reference")); + assertThat(expected.getMessage(), containsString( + "function reference [String::isEmpty/2] matching [java.util.Comparator")); } public void testWrongArityDef() { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("def y = Optional.empty(); return y.orElseGet(String::startsWith);"); }); - assertThat(expected.getMessage(), containsString("Unknown reference")); + assertThat(expected.getMessage(), + containsString("function reference [String::startsWith/0] matching [java.util.function.Supplier")); } public void testWrongArityNotEnoughDef() { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("def l = new ArrayList(); l.add(2); l.add(1); l.sort(String::isEmpty);"); }); - assertThat(expected.getMessage(), containsString("Unknown reference")); + assertThat(expected.getMessage(), + containsString("function reference [String::isEmpty/2] matching [java.util.Comparator")); } public void testReturnVoid() { diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/LambdaTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/LambdaTests.java index 20e257e574709..1f1a6f95b3608 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/LambdaTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/LambdaTests.java @@ -184,7 +184,7 @@ public void testWrongArityDef() { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("def y = Optional.empty(); return y.orElseGet(x -> x);"); }); - assertTrue(expected.getMessage(), expected.getMessage().contains("Incorrect number of parameters")); + assertTrue(expected.getMessage(), expected.getMessage().contains("due to an incorrect number of arguments")); } public void testWrongArityNotEnough() { @@ -200,7 +200,7 @@ public void testWrongArityNotEnoughDef() { exec("def l = new ArrayList(); l.add(1); l.add(1); " + "return l.stream().mapToInt(() -> 5).sum();"); }); - assertTrue(expected.getMessage().contains("Incorrect number of parameters")); + assertTrue(expected.getMessage(), expected.getMessage().contains("due to an incorrect number of arguments")); } public void testLambdaInFunction() { diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/OverloadTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/OverloadTests.java index 1b90d58299953..52c28799fae34 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/OverloadTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/OverloadTests.java @@ -37,7 +37,7 @@ public void testMethodDynamic() { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("def x = 'abc123abc'; return x.indexOf('c', 3, 'bogus');"); }); - assertTrue(expected.getMessage().contains("dynamic method [indexOf]")); + assertTrue(expected.getMessage().contains("dynamic method [java.lang.String, indexOf/3] not found")); } public void testConstructor() { diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java index 8eeb25c9676c7..f2d93aa759d07 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java @@ -219,7 +219,7 @@ public void testIllegalDynamicMethod() { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("def x = 'test'; return x.getClass().toString()"); }); - assertTrue(expected.getMessage().contains("Unable to find dynamic method")); + assertTrue(expected.getMessage().contains("dynamic method [java.lang.String, getClass/0] not found")); } public void testDynamicNPE() { diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java index f6ad38f997ed4..12d57fab11d98 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java @@ -162,12 +162,12 @@ public void testECapturingFunctionRef() { public void testECast() { Location l = new Location(getTestName(), 0); AExpression child = new EConstant(l, "test"); - PainlessCast cast = PainlessCast.standard(String.class, Integer.class, true); + PainlessCast cast = PainlessCast.originalTypetoTargetType(String.class, Integer.class, true); assertEquals("(ECast java.lang.Integer (EConstant String 'test'))", new ECast(l, child, cast).toString()); l = new Location(getTestName(), 1); child = new EBinary(l, Operation.ADD, new EConstant(l, "test"), new EConstant(l, 12)); - cast = PainlessCast.standard(Integer.class, Boolean.class, true); + cast = PainlessCast.originalTypetoTargetType(Integer.class, Boolean.class, true); assertEquals("(ECast java.lang.Boolean (EBinary (EConstant String 'test') + (EConstant Integer 12)))", new ECast(l, child, cast).toString()); } diff --git a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/15_update.yml b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/15_update.yml index 20047e7d4825d..f2e1cb616b980 100644 --- a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/15_update.yml +++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/15_update.yml @@ -132,7 +132,7 @@ body: script: lang: painless - source: "for (def key : params.keySet()) { ctx._source[key] = params[key]}" + source: "ctx._source.ctx = ctx" params: { bar: 'xxx' } - match: { error.root_cause.0.type: "remote_transport_exception" } diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java index 439fe42fd6d22..a6410d4714795 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java @@ -88,9 +88,9 @@ protected Collection> getPlugins() { } @Override - protected Settings indexSettings() { + protected Settings createTestIndexSettings() { return Settings.builder() - .put(super.indexSettings()) + .put(super.createTestIndexSettings()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); } diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java index 30c0f55ed85fb..2fa5bbfe993bb 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java @@ -73,9 +73,9 @@ protected Collection> getPlugins() { } @Override - protected Settings indexSettings() { + protected Settings createTestIndexSettings() { return Settings.builder() - .put(super.indexSettings()) + .put(super.createTestIndexSettings()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); } diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyHasChildQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyHasChildQueryBuilderTests.java index 2e6deae470863..ea4bc4a71b648 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyHasChildQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyHasChildQueryBuilderTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.join.query; import com.carrotsearch.randomizedtesting.generators.RandomPicks; + import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; @@ -111,9 +112,9 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws } @Override - protected Settings indexSettings() { + protected Settings createTestIndexSettings() { return Settings.builder() - .put(super.indexSettings()) + .put(super.createTestIndexSettings()) .put("index.version.created", Version.V_5_6_0) // multi type .build(); } diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyHasParentQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyHasParentQueryBuilderTests.java index 535276a35d0bb..8efbefce5c7cf 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyHasParentQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyHasParentQueryBuilderTests.java @@ -70,9 +70,9 @@ protected Collection> getPlugins() { } @Override - protected Settings indexSettings() { + protected Settings createTestIndexSettings() { return Settings.builder() - .put(super.indexSettings()) + .put(super.createTestIndexSettings()) .put("index.version.created", Version.V_5_6_0) // legacy needs multi types .build(); } diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyParentIdQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyParentIdQueryBuilderTests.java index 74f3e30d63840..c6e2dbb82a884 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyParentIdQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/LegacyParentIdQueryBuilderTests.java @@ -57,9 +57,9 @@ protected Collection> getPlugins() { } @Override - protected Settings indexSettings() { + protected Settings createTestIndexSettings() { return Settings.builder() - .put(super.indexSettings()) + .put(super.createTestIndexSettings()) .put("index.version.created", Version.V_5_6_0) // legacy needs multi type .build(); } diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentIdQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentIdQueryBuilderTests.java index 43f91fc6cd0a3..8037b0346ed8b 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentIdQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/ParentIdQueryBuilderTests.java @@ -62,9 +62,9 @@ protected Collection> getPlugins() { } @Override - protected Settings indexSettings() { + protected Settings createTestIndexSettings() { return Settings.builder() - .put(super.indexSettings()) + .put(super.createTestIndexSettings()) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .build(); } diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java index 9954095ba01dd..84ba7fa65b736 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java @@ -772,7 +772,7 @@ public BitSetProducer bitsetFilter(Query query) { @Override @SuppressWarnings("unchecked") public > IFD getForField(MappedFieldType fieldType) { - IndexFieldData.Builder builder = fieldType.fielddataBuilder(shardContext.getFullyQualifiedIndexName()); + IndexFieldData.Builder builder = fieldType.fielddataBuilder(shardContext.getFullyQualifiedIndex().getName()); IndexFieldDataCache cache = new IndexFieldDataCache.None(); CircuitBreakerService circuitBreaker = new NoneCircuitBreakerService(); return (IFD) builder.build(shardContext.getIndexSettings(), fieldType, cache, circuitBreaker, @@ -780,5 +780,4 @@ public > IFD getForField(MappedFieldType fieldType } }; } - } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java index 660cbf291c801..4bd3430691e24 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java @@ -255,7 +255,7 @@ public void testRequiredParameters() { public void testFromJsonNoDocumentType() throws IOException { QueryShardContext queryShardContext = createShardContext(); QueryBuilder queryBuilder = parseQuery("{\"percolate\" : { \"document\": {}, \"field\":\"" + queryField + "\"}}"); - if (indexVersionCreated.before(Version.V_6_0_0_alpha1)) { + if (indexSettings().getIndexVersionCreated().before(Version.V_6_0_0_alpha1)) { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> queryBuilder.toQuery(queryShardContext)); assertThat(e.getMessage(), equalTo("[percolate] query is missing required [document_type] parameter")); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java index f1302c5318ab4..009642767842e 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java @@ -49,9 +49,9 @@ import org.elasticsearch.index.mapper.TypeFieldMapper; import org.elasticsearch.index.mapper.VersionFieldMapper; import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure; -import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.UpdateScript; import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.threadpool.ThreadPool; @@ -772,7 +772,7 @@ public abstract static class ScriptApplier implements BiFunction params; - private ExecutableScript executable; + private UpdateScript executable; private Map context; public ScriptApplier(WorkerBulkByScrollTaskState taskWorker, @@ -792,7 +792,7 @@ public RequestWrapper apply(RequestWrapper request, ScrollableHitSource.Hi return request; } if (executable == null) { - ExecutableScript.Factory factory = scriptService.compile(script, ExecutableScript.UPDATE_CONTEXT); + UpdateScript.Factory factory = scriptService.compile(script, UpdateScript.CONTEXT); executable = factory.newInstance(params); } if (context == null) { @@ -815,8 +815,7 @@ public RequestWrapper apply(RequestWrapper request, ScrollableHitSource.Hi OpType oldOpType = OpType.INDEX; context.put("op", oldOpType.toString()); - executable.setNextVar("ctx", context); - executable.run(); + executable.execute(context); String newOp = (String) context.remove("op"); if (newOp == null) { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java index 955c99568c7d2..94f375e9333ed 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollActionScriptTestCase.java @@ -26,8 +26,10 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.UpdateScript; import org.junit.Before; +import java.util.Collections; import java.util.Map; import java.util.function.Consumer; @@ -54,10 +56,16 @@ public void setupScriptService() { protected T applyScript(Consumer> scriptBody) { IndexRequest index = new IndexRequest("index", "type", "1").source(singletonMap("foo", "bar")); ScrollableHitSource.Hit doc = new ScrollableHitSource.BasicHit("test", "type", "id", 0); - ExecutableScript executableScript = new SimpleExecutableScript(scriptBody); - ExecutableScript.Factory factory = params -> executableScript; - when(scriptService.compile(any(), eq(ExecutableScript.CONTEXT))).thenReturn(factory); - when(scriptService.compile(any(), eq(ExecutableScript.UPDATE_CONTEXT))).thenReturn(factory); + UpdateScript updateScript = new UpdateScript(Collections.emptyMap()) { + @Override + public void execute(Map ctx) { + scriptBody.accept(ctx); + } + }; + UpdateScript.Factory factory = params -> updateScript; + ExecutableScript simpleExecutableScript = new SimpleExecutableScript(scriptBody); + when(scriptService.compile(any(), eq(ExecutableScript.CONTEXT))).thenReturn(params -> simpleExecutableScript); + when(scriptService.compile(any(), eq(UpdateScript.CONTEXT))).thenReturn(factory); AbstractAsyncBulkByScrollAction action = action(scriptService, request().setScript(mockScript(""))); RequestWrapper result = action.buildScriptApplier().apply(AbstractAsyncBulkByScrollAction.wrap(index), doc); return (result != null) ? (T) result.self() : null; diff --git a/plugins/analysis-icu/build.gradle b/plugins/analysis-icu/build.gradle index ad5a7b7c57b61..1883e3bf1b9d6 100644 --- a/plugins/analysis-icu/build.gradle +++ b/plugins/analysis-icu/build.gradle @@ -30,9 +30,9 @@ forbiddenApis { dependencies { compile "org.apache.lucene:lucene-analyzers-icu:${versions.lucene}" - compile 'com.ibm.icu:icu4j:62.1' + compile "com.ibm.icu:icu4j:${versions.icu4j}" } dependencyLicenses { mapping from: /lucene-.*/, to: 'lucene' -} \ No newline at end of file +} diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 0000000000000..1e79e1e70ef8f --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +a010e852be8d56efe1906e6da5292e4541239724 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.5.0-snapshot-608f0277b0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index 5b6947a9c7578..0000000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7a37816def72a748416c4ae8b0f6817e30efb99f \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 0000000000000..2d9669e436229 --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +88e0ed90d433a9088528485cd4f59311735d92a4 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.5.0-snapshot-608f0277b0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index d39638c188466..0000000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ca7437178cdbf7b8bfe0d75c75e3c8eb93925724 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 0000000000000..f7b8fdd4bc187 --- /dev/null +++ b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +0daec9ac3c4bba5f91b1bc413c651b7a98313982 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.5.0-snapshot-608f0277b0.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index 21c25d2bb2404..0000000000000 --- a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3f5dec44f380d6d58bc1c8aec51964fcb5390b60 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 0000000000000..80cf627011b4e --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +f5af81eec04c1da0d6969cff18f360ff379b1bf7 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.5.0-snapshot-608f0277b0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index f58c597eadd6d..0000000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -453bf1d60df0415439095624e0b3e42492ad4716 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 0000000000000..14be684b96f3d --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +9e649088ee298293aa95a05391dff9cb0582648e \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.5.0-snapshot-608f0277b0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index 8ccec8dbf3786..0000000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -70095a45257bca9f46629b5fb6cedf9eff5e2b07 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 0000000000000..ea55c790537f4 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +47fb370054ba7413d050f13c177edf01180c31ca \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.5.0-snapshot-608f0277b0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index ec9c33119f556..0000000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7199d6962d268b7877f7b5160e98e4ff21cce5c7 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 0000000000000..2d6f580c35a23 --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +bc0708acbac195772b67b5ad2e9c4683d27ff450 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.5.0-snapshot-608f0277b0.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index ba9148ef1b32a..0000000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -12aff508d39d206a1aead5013ecd11882062eb06 \ No newline at end of file diff --git a/plugins/ingest-geoip/licenses/jackson-annotations-2.8.10.jar.sha1 b/plugins/ingest-geoip/licenses/jackson-annotations-2.8.10.jar.sha1 deleted file mode 100644 index 16addfe35076c..0000000000000 --- a/plugins/ingest-geoip/licenses/jackson-annotations-2.8.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5e924646d6f893bc9036939c5f2b4ecaee85e5da \ No newline at end of file diff --git a/plugins/ingest-geoip/licenses/jackson-annotations-2.8.11.jar.sha1 b/plugins/ingest-geoip/licenses/jackson-annotations-2.8.11.jar.sha1 new file mode 100644 index 0000000000000..30e7d1a7b1a74 --- /dev/null +++ b/plugins/ingest-geoip/licenses/jackson-annotations-2.8.11.jar.sha1 @@ -0,0 +1 @@ +391de20b4e29cb3fb07d2454ace64be2c82ac91f \ No newline at end of file diff --git a/plugins/ingest-geoip/licenses/jackson-databind-2.8.10.jar.sha1 b/plugins/ingest-geoip/licenses/jackson-databind-2.8.10.jar.sha1 deleted file mode 100644 index f8c5539424553..0000000000000 --- a/plugins/ingest-geoip/licenses/jackson-databind-2.8.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f7b83cb2bc4b88d53961e749e1ad32f49ef017b7 \ No newline at end of file diff --git a/plugins/ingest-geoip/licenses/jackson-databind-2.8.11.jar.sha1 b/plugins/ingest-geoip/licenses/jackson-databind-2.8.11.jar.sha1 new file mode 100644 index 0000000000000..53d6c1fa20834 --- /dev/null +++ b/plugins/ingest-geoip/licenses/jackson-databind-2.8.11.jar.sha1 @@ -0,0 +1 @@ +0569a9f220273024523799dba9dd358121b0ee09 \ No newline at end of file diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 304e0f4ae0e1f..a02e748d180c8 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -565,7 +565,6 @@ thirdPartyAudit.excludes = [ // we are not pulling in slf4j-ext, this is okay, Log4j will fallback gracefully 'org.slf4j.ext.EventData', - 'org.apache.log4j.AppenderSkeleton', 'org.apache.log4j.AsyncAppender', 'org.apache.log4j.helpers.ISO8601DateFormat', 'org.apache.log4j.spi.ThrowableInformation', diff --git a/plugins/repository-hdfs/licenses/log4j-slf4j-impl-2.11.1.jar.sha1 b/plugins/repository-hdfs/licenses/log4j-slf4j-impl-2.11.1.jar.sha1 new file mode 100644 index 0000000000000..6178556b31848 --- /dev/null +++ b/plugins/repository-hdfs/licenses/log4j-slf4j-impl-2.11.1.jar.sha1 @@ -0,0 +1 @@ +4b41b53a3a2d299ce381a69d165381ca19f62912 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/log4j-slf4j-impl-2.9.1.jar.sha1 b/plugins/repository-hdfs/licenses/log4j-slf4j-impl-2.9.1.jar.sha1 deleted file mode 100644 index 66119e87e211f..0000000000000 --- a/plugins/repository-hdfs/licenses/log4j-slf4j-impl-2.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0a97a849b18b3798c4af1a2ca5b10c66cef17e3a \ No newline at end of file diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index 6001ed570652e..7f0ca209db797 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -132,7 +132,7 @@ if (!s3TemporaryAccessKey && !s3TemporarySecretKey && !s3TemporaryBucket && !s3T final String minioVersion = 'RELEASE.2018-06-22T23-48-46Z' final String minioBinDir = "${buildDir}/minio/bin" final String minioDataDir = "${buildDir}/minio/data" -final String minioAddress = "127.0.0.1:60920" +final String minioAddress = "127.0.0.1" final String minioDistribution final String minioCheckSum @@ -187,15 +187,30 @@ if (useFixture && minioDistribution) { dependsOn installMinio ext.minioPid = 0L + ext.minioPort = 0 doLast { + // get free port + for (int port = 60920; port < 60940; port++) { + try { + javax.net.ServerSocketFactory.getDefault().createServerSocket(port, 1, InetAddress.getByName(minioAddress)).close() + minioPort = port + break + } catch (BindException e) { + logger.info("Port " + port + " for Minio process is already taken", e) + } + } + if (minioPort == 0) { + throw new GradleException("Could not find a free port for Minio") + } + new File("${minioDataDir}/${s3PermanentBucket}").mkdirs() // we skip these tests on Windows so we do no need to worry about compatibility here final ProcessBuilder minio = new ProcessBuilder( "${minioBinDir}/${minioFileName}", "server", "--address", - minioAddress, + minioAddress + ":" + minioPort, minioDataDir) minio.environment().put('MINIO_ACCESS_KEY', s3PermanentAccessKey) minio.environment().put('MINIO_SECRET_KEY', s3PermanentSecretKey) @@ -227,6 +242,7 @@ if (useFixture && minioDistribution) { final int index = line.lastIndexOf(":") assert index >= 0 httpPort = Integer.parseInt(line.substring(index + 1)) + assert httpPort == minioPort : "Port mismatch, expected ${minioPort} but was ${httpPort}" final File script = new File(project.buildDir, "minio/minio.killer.sh") script.setText( @@ -269,10 +285,15 @@ if (useFixture && minioDistribution) { project.afterEvaluate { ClusterConfiguration cluster = project.extensions.getByName('integTestMinioCluster') as ClusterConfiguration cluster.dependsOn(project.bundlePlugin) + cluster.dependsOn(startMinio) // otherwise we don't know the Minio port cluster.keystoreSetting 's3.client.integration_test_permanent.access_key', s3PermanentAccessKey cluster.keystoreSetting 's3.client.integration_test_permanent.secret_key', s3PermanentSecretKey - cluster.setting 's3.client.integration_test_permanent.endpoint', "http://${minioAddress}" + Closure minioAddressAndPort = { + assert startMinio.minioPort > 0 + return 'http://' + minioAddress + ':' + startMinio.minioPort + } + cluster.setting 's3.client.integration_test_permanent.endpoint', "${ -> minioAddressAndPort.call()}" Task restIntegTestTask = project.tasks.getByName('integTestMinio') restIntegTestTask.clusterConfig.plugin(project.path) diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java index ff9e1cb4ded19..ede61da1369f5 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java @@ -27,7 +27,9 @@ import org.apache.logging.log4j.core.appender.ConsoleAppender; import org.apache.logging.log4j.core.appender.CountingNoOpAppender; import org.apache.logging.log4j.core.config.Configurator; +import org.apache.logging.log4j.spi.ExtendedLogger; import org.apache.logging.log4j.message.ParameterizedMessage; +import org.apache.lucene.util.Constants; import org.elasticsearch.cli.UserException; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.Randomness; @@ -298,8 +300,8 @@ public void testFindAppender() throws IOException, UserException { public void testPrefixLogger() throws IOException, IllegalAccessException, UserException { setupLogging("prefix"); - final String prefix = randomBoolean() ? null : randomAlphaOfLength(16); - final Logger logger = Loggers.getLogger("prefix", prefix); + final String prefix = randomAlphaOfLength(16); + final Logger logger = new PrefixLogger((ExtendedLogger) LogManager.getLogger("prefix_test"), "prefix_test", prefix); logger.info("test"); logger.info("{}", "test"); final Exception e = new Exception("exception"); @@ -319,13 +321,8 @@ public void testPrefixLogger() throws IOException, IllegalAccessException, UserE final int expectedLogLines = 3; assertThat(events.size(), equalTo(expectedLogLines + stackTraceLength)); for (int i = 0; i < expectedLogLines; i++) { - if (prefix == null) { - assertThat("Contents of [" + path + "] are wrong", - events.get(i), startsWith("[" + getTestName() + "] test")); - } else { - assertThat("Contents of [" + path + "] are wrong", - events.get(i), startsWith("[" + getTestName() + "][" + prefix + "] test")); - } + assertThat("Contents of [" + path + "] are wrong", + events.get(i), startsWith("[" + getTestName() + "]" + prefix + " test")); } } @@ -334,8 +331,8 @@ public void testPrefixLoggerMarkersCanBeCollected() throws IOException, UserExce final int prefixes = 1 << 19; // to ensure enough markers that the GC should collect some when we force a GC below for (int i = 0; i < prefixes; i++) { - Loggers.getLogger("prefix" + i, "prefix" + i); // this has the side effect of caching a marker with this prefix - + // this has the side effect of caching a marker with this prefix + new PrefixLogger((ExtendedLogger) LogManager.getLogger("prefix" + i), "prefix" + i, "prefix" + i); } System.gc(); // this will free the weakly referenced keys in the marker cache @@ -360,7 +357,6 @@ public void testProperties() throws IOException, UserException { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32546") public void testNoNodeNameWarning() throws IOException, UserException { setupLogging("no_node_name"); @@ -376,7 +372,11 @@ public void testNoNodeNameWarning() throws IOException, UserException { + "have %node_name. We will automatically add %node_name to the pattern to ease the migration for users " + "who customize log4j2.properties but will stop this behavior in 7.0. You should manually replace " + "`%node_name` with `\\[%node_name\\]%marker ` in these locations:"); - assertThat(events.get(1), endsWith("no_node_name/log4j2.properties")); + if (Constants.WINDOWS) { + assertThat(events.get(1), endsWith("no_node_name\\log4j2.properties")); + } else { + assertThat(events.get(1), endsWith("no_node_name/log4j2.properties")); + } } private void setupLogging(final String config) throws IOException, UserException { diff --git a/qa/vagrant/src/test/resources/packaging/tests/60_systemd.bats b/qa/vagrant/src/test/resources/packaging/tests/60_systemd.bats index cb9e6658d3d99..a7628d08bbaff 100644 --- a/qa/vagrant/src/test/resources/packaging/tests/60_systemd.bats +++ b/qa/vagrant/src/test/resources/packaging/tests/60_systemd.bats @@ -189,7 +189,10 @@ setup() { @test "[SYSTEMD] start Elasticsearch with custom JVM options" { assert_file_exist $ESENVFILE - local temp=`mktemp -d` + # The custom config directory is not under /tmp or /var/tmp because + # systemd's private temp directory functionaly means different + # processes can have different views of what's in these directories + local temp=`mktemp -p /etc -d` cp "$ESCONFIG"/elasticsearch.yml "$temp" cp "$ESCONFIG"/log4j2.properties "$temp" touch "$temp/jvm.options" diff --git a/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash b/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash index 6e543444079d1..f377d710dbc0e 100644 --- a/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash +++ b/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash @@ -91,11 +91,14 @@ fi @test "[$GROUP] install a sample plugin with a symlinked plugins path" { # Clean up after the last time this test was run - rm -rf /tmp/plugins.* - rm -rf /tmp/old_plugins.* + rm -rf /var/plugins.* + rm -rf /var/old_plugins.* rm -rf "$ESPLUGINS" - local es_plugins=$(mktemp -d -t 'plugins.XXXX') + # The custom plugins directory is not under /tmp or /var/tmp because + # systemd's private temp directory functionaly means different + # processes can have different views of what's in these directories + local es_plugins=$(mktemp -p /var -d -t 'plugins.XXXX') chown -R elasticsearch:elasticsearch "$es_plugins" ln -s "$es_plugins" "$ESPLUGINS" diff --git a/qa/vagrant/src/test/resources/packaging/utils/utils.bash b/qa/vagrant/src/test/resources/packaging/utils/utils.bash index 72b3552184422..83ea26f6a159e 100644 --- a/qa/vagrant/src/test/resources/packaging/utils/utils.bash +++ b/qa/vagrant/src/test/resources/packaging/utils/utils.bash @@ -502,7 +502,10 @@ run_elasticsearch_tests() { # Move the config directory to another directory and properly chown it. move_config() { local oldConfig="$ESCONFIG" - export ESCONFIG="${1:-$(mktemp -d -t 'config.XXXX')}" + # The custom config directory is not under /tmp or /var/tmp because + # systemd's private temp directory functionaly means different + # processes can have different views of what's in these directories + export ESCONFIG="${1:-$(mktemp -p /etc -d -t 'config.XXXX')}" echo "Moving configuration directory from $oldConfig to $ESCONFIG" # Move configuration files to the new configuration directory diff --git a/qa/wildfly/build.gradle b/qa/wildfly/build.gradle index 0bab0c70d147d..b55aa68573c1c 100644 --- a/qa/wildfly/build.gradle +++ b/qa/wildfly/build.gradle @@ -84,7 +84,7 @@ task deploy(type: Copy) { } task writeElasticsearchProperties { - onlyIf { !Os.isFamily(Os.FAMILY_WINDOWS) && !inFipsJvm } + onlyIf { !Os.isFamily(Os.FAMILY_WINDOWS) } dependsOn 'integTestCluster#wait', deploy doLast { final File elasticsearchProperties = file("${wildflyInstall}/standalone/configuration/elasticsearch.properties") @@ -177,7 +177,7 @@ task stopWildfly(type: LoggedExec) { commandLine "${wildflyInstall}/bin/jboss-cli.sh", "--controller=localhost:${-> managementPort}", "--connect", "command=shutdown" } -if (!Os.isFamily(Os.FAMILY_WINDOWS) && !inFipsJvm) { +if (!Os.isFamily(Os.FAMILY_WINDOWS)) { integTestRunner.dependsOn(configureTransportClient) final TaskExecutionAdapter logDumpListener = new TaskExecutionAdapter() { @Override diff --git a/qa/wildfly/src/test/java/org/elasticsearch/wildfly/WildflyIT.java b/qa/wildfly/src/test/java/org/elasticsearch/wildfly/WildflyIT.java index 46fafebeb4e2d..1da713ea9bcd9 100644 --- a/qa/wildfly/src/test/java/org/elasticsearch/wildfly/WildflyIT.java +++ b/qa/wildfly/src/test/java/org/elasticsearch/wildfly/WildflyIT.java @@ -28,6 +28,7 @@ import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.util.EntityUtils; import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.util.TestRuleLimitSysouts; import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterModule; @@ -50,6 +51,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; +@TestRuleLimitSysouts.Limit(bytes = 14000) public class WildflyIT extends LuceneTestCase { public void testTransportClient() throws URISyntaxException, IOException { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml index 76bd3a1ba7286..012a3f52a542f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml @@ -1,7 +1,13 @@ setup: - do: indices.create: - index: test + index: test + body: + mappings: + test: + properties: + numeric_group: { type: integer } + - do: index: index: test @@ -387,3 +393,33 @@ setup: - match: { hits.hits.2.inner_hits.sub_hits.hits.hits.0._version: 55 } - match: { hits.hits.2.inner_hits.sub_hits.hits.hits.1._id: "4" } - match: { hits.hits.2.inner_hits.sub_hits.hits.hits.1._version: 44 } + +--- +"field collapsing on a field alias": + - skip: + version: " - 6.3.99" + reason: Field aliases were introduced in 6.4.0. + - do: + indices.put_mapping: + index: test + type: test + body: + test: + properties: + group_alias: { type: alias, path: numeric_group } + - do: + search: + index: test + body: + collapse: { field: group_alias, inner_hits: { name: sub_hits } } + sort: [{ sort: desc }] + + - match: { hits.total: 6 } + - length: { hits.hits: 3 } + + - match: { hits.hits.0.fields.group_alias: [3] } + - match: { hits.hits.0.inner_hits.sub_hits.hits.total: 1} + - match: { hits.hits.1.fields.group_alias: [1] } + - match: { hits.hits.1.inner_hits.sub_hits.hits.total: 3} + - match: { hits.hits.2.fields.group_alias: [25] } + - match: { hits.hits.2.inner_hits.sub_hits.hits.total: 2} diff --git a/server/build.gradle b/server/build.gradle index 6d8aa672ae1b0..24018a40ae217 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -230,7 +230,6 @@ thirdPartyAudit.excludes = [ 'com.fasterxml.jackson.dataformat.xml.JacksonXmlModule', 'com.fasterxml.jackson.dataformat.xml.XmlMapper', 'com.fasterxml.jackson.dataformat.xml.util.DefaultXmlPrettyPrinter', - 'com.fasterxml.jackson.databind.node.JsonNodeFactory', 'com.fasterxml.jackson.databind.node.ObjectNode', 'org.fusesource.jansi.Ansi', 'org.fusesource.jansi.AnsiRenderer$Code', @@ -272,12 +271,6 @@ thirdPartyAudit.excludes = [ 'javax.mail.internet.MimeMultipart', 'javax.mail.internet.MimeUtility', 'javax.mail.util.ByteArrayDataSource', - 'javax.persistence.AttributeConverter', - 'javax.persistence.EntityManager', - 'javax.persistence.EntityManagerFactory', - 'javax.persistence.EntityTransaction', - 'javax.persistence.Persistence', - 'javax.persistence.PersistenceException', 'org.apache.commons.compress.compressors.CompressorStreamFactory', 'org.apache.commons.compress.utils.IOUtils', 'org.apache.commons.csv.CSVFormat', @@ -321,6 +314,16 @@ thirdPartyAudit.excludes = [ 'com.google.common.geometry.S2LatLng', ] +if (JavaVersion.current() <= JavaVersion.VERSION_1_8) { + // Used by Log4J 2.11.1 + thirdPartyAudit.excludes += [ + 'java.io.ObjectInputFilter', + 'java.io.ObjectInputFilter$Config', + 'java.io.ObjectInputFilter$FilterInfo', + 'java.io.ObjectInputFilter$Status' + ] +} + if (JavaVersion.current() > JavaVersion.VERSION_1_8) { thirdPartyAudit.excludes += ['javax.xml.bind.DatatypeConverter'] } diff --git a/server/licenses/log4j-1.2-api-2.11.1.jar.sha1 b/server/licenses/log4j-1.2-api-2.11.1.jar.sha1 new file mode 100644 index 0000000000000..575d75dbda8c5 --- /dev/null +++ b/server/licenses/log4j-1.2-api-2.11.1.jar.sha1 @@ -0,0 +1 @@ +3aba3398fe064a3eab4331f88161c7480e848418 \ No newline at end of file diff --git a/server/licenses/log4j-1.2-api-2.9.1.jar.sha1 b/server/licenses/log4j-1.2-api-2.9.1.jar.sha1 deleted file mode 100644 index 0b5acc62b7a13..0000000000000 --- a/server/licenses/log4j-1.2-api-2.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -894f96d677880d4ab834a1356f62b875e579caaa \ No newline at end of file diff --git a/server/licenses/log4j-api-2.11.1.jar.sha1 b/server/licenses/log4j-api-2.11.1.jar.sha1 new file mode 100644 index 0000000000000..4b1bfffac179f --- /dev/null +++ b/server/licenses/log4j-api-2.11.1.jar.sha1 @@ -0,0 +1 @@ +268f0fe4df3eefe052b57c87ec48517d64fb2a10 \ No newline at end of file diff --git a/server/licenses/log4j-api-2.9.1.jar.sha1 b/server/licenses/log4j-api-2.9.1.jar.sha1 deleted file mode 100644 index e1a89fadfed95..0000000000000 --- a/server/licenses/log4j-api-2.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7a2999229464e7a324aa503c0a52ec0f05efe7bd \ No newline at end of file diff --git a/server/licenses/log4j-core-2.11.1.jar.sha1 b/server/licenses/log4j-core-2.11.1.jar.sha1 new file mode 100644 index 0000000000000..2fb8589380a03 --- /dev/null +++ b/server/licenses/log4j-core-2.11.1.jar.sha1 @@ -0,0 +1 @@ +592a48674c926b01a9a747c7831bcd82a9e6d6e4 \ No newline at end of file diff --git a/server/licenses/log4j-core-2.9.1.jar.sha1 b/server/licenses/log4j-core-2.9.1.jar.sha1 deleted file mode 100644 index 990ea322a7613..0000000000000 --- a/server/licenses/log4j-core-2.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c041978c686866ee8534f538c6220238db3bb6be \ No newline at end of file diff --git a/server/licenses/lucene-analyzers-common-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-analyzers-common-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 0000000000000..2cbf39687624c --- /dev/null +++ b/server/licenses/lucene-analyzers-common-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +c547b30525ad80d0ceeaa40c2d3a901c7e76fd46 \ No newline at end of file diff --git a/server/licenses/lucene-analyzers-common-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-analyzers-common-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index 8b2a098a3a2eb..0000000000000 --- a/server/licenses/lucene-analyzers-common-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d27958843ca118db2ffd2c242ae3761bd5a47328 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-backward-codecs-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 0000000000000..9e2473361f033 --- /dev/null +++ b/server/licenses/lucene-backward-codecs-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +9c327295d54d5abd2684e00c3aefe58aa1caace7 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-backward-codecs-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index d8496a0a86ae2..0000000000000 --- a/server/licenses/lucene-backward-codecs-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7ea220ba8e4accb8b04e280463042ad470e23bc0 \ No newline at end of file diff --git a/server/licenses/lucene-core-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-core-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 0000000000000..fdedaf3fc5756 --- /dev/null +++ b/server/licenses/lucene-core-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +73dd7703a94ec2357581f65ee7c1c4d618ff310f \ No newline at end of file diff --git a/server/licenses/lucene-core-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-core-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index d38fb392c350b..0000000000000 --- a/server/licenses/lucene-core-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -471096d6e92338b208aa91f3a85feb2f9cfc4afd \ No newline at end of file diff --git a/server/licenses/lucene-grouping-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-grouping-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 0000000000000..4e555692b0f9a --- /dev/null +++ b/server/licenses/lucene-grouping-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +1c3802fa30990a1758f2df19d17fe2c95fc45870 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-grouping-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index 7f83082fa0c1d..0000000000000 --- a/server/licenses/lucene-grouping-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f0af947c60d24f779c22f774e81ebd7dd91cc932 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-highlighter-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 0000000000000..73b6c15f332f9 --- /dev/null +++ b/server/licenses/lucene-highlighter-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +8d7abdbb7900d7e6a76c391d8be07217c0d882ca \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-highlighter-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index 6b9f2cb724dd0..0000000000000 --- a/server/licenses/lucene-highlighter-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fbc83ac5a0139ed7e7faf6c95a2718f46f28c641 \ No newline at end of file diff --git a/server/licenses/lucene-join-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-join-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 0000000000000..23414b8e8e134 --- /dev/null +++ b/server/licenses/lucene-join-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +011f78ae9d9a386fcf20ceea29ba30e75fb512e8 \ No newline at end of file diff --git a/server/licenses/lucene-join-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-join-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index a085943140ec2..0000000000000 --- a/server/licenses/lucene-join-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -30adfe493982b0db059dc243e269eea38d850d46 \ No newline at end of file diff --git a/server/licenses/lucene-memory-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-memory-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 0000000000000..d227ebaf46368 --- /dev/null +++ b/server/licenses/lucene-memory-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +c3dd461a7cebdcacc77304660218513e10f89adb \ No newline at end of file diff --git a/server/licenses/lucene-memory-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-memory-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index 7acc70be15182..0000000000000 --- a/server/licenses/lucene-memory-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -656f304261d9aad05070fb68593beffafe9147e3 \ No newline at end of file diff --git a/server/licenses/lucene-misc-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-misc-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 0000000000000..a892f3a2272ba --- /dev/null +++ b/server/licenses/lucene-misc-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +d63101181708d78eccc441b0d1193dd91d1a0bf1 \ No newline at end of file diff --git a/server/licenses/lucene-misc-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-misc-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index e46b138ba7b21..0000000000000 --- a/server/licenses/lucene-misc-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8bf22ad81a7480c255b55bada401eb131bfdb4df \ No newline at end of file diff --git a/server/licenses/lucene-queries-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-queries-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 0000000000000..5d0fead48cbc9 --- /dev/null +++ b/server/licenses/lucene-queries-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +22e56fbd44d6a47d7dddbdda3c17ce22ad0a6680 \ No newline at end of file diff --git a/server/licenses/lucene-queries-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-queries-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index a7114feef6282..0000000000000 --- a/server/licenses/lucene-queries-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -edb3de4d68a34c1e1ca08f79fe4d103b10e98ad1 \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-queryparser-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 0000000000000..8be3d6447b0bb --- /dev/null +++ b/server/licenses/lucene-queryparser-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +36b38a1d71045f5bee5dc40526f8d57084dbdc00 \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-queryparser-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index cf3011c9a45d0..0000000000000 --- a/server/licenses/lucene-queryparser-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7ece30d5f1e18d96f61644451c858c3d9960558f \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-sandbox-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 0000000000000..6d968f5400c52 --- /dev/null +++ b/server/licenses/lucene-sandbox-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +21eb8b111bcb94f4abb8c6402dfd10f51ecc0b38 \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-sandbox-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index 30513e58bf6bb..0000000000000 --- a/server/licenses/lucene-sandbox-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ad3bd0c2ed96556193c7215bef328e689d0b157f \ No newline at end of file diff --git a/server/licenses/lucene-spatial-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-spatial-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 0000000000000..b6aec2eae1dda --- /dev/null +++ b/server/licenses/lucene-spatial-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +d60081c5641ed21aea82d5d0976b40e1f184c8e5 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-spatial-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index 6146b055c13f0..0000000000000 --- a/server/licenses/lucene-spatial-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8a6bd97e39ee5af60126adbe8c8375dc41b1ea8e \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-spatial-extras-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 0000000000000..6999baccc89e9 --- /dev/null +++ b/server/licenses/lucene-spatial-extras-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +2d42b373546aa8923d25e4e9a673dd186064f9bd \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-spatial-extras-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index c812f0449271d..0000000000000 --- a/server/licenses/lucene-spatial-extras-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -07e748d2d80000a7a213f3405b82b6e26b452948 \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-spatial3d-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 0000000000000..b866b1985568b --- /dev/null +++ b/server/licenses/lucene-spatial3d-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +7f31607959e5a2ed84ab2d9a007a3f76e9a2d38c \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-spatial3d-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index b5ad83ac9fe13..0000000000000 --- a/server/licenses/lucene-spatial3d-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fd737bd5562f3943618ee7e73a0aaffb6319fdb2 \ No newline at end of file diff --git a/server/licenses/lucene-suggest-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-suggest-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 0000000000000..55e1c5990de63 --- /dev/null +++ b/server/licenses/lucene-suggest-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +f7619348f0619867c52f4801531c70358f49873a \ No newline at end of file diff --git a/server/licenses/lucene-suggest-7.5.0-snapshot-608f0277b0.jar.sha1 b/server/licenses/lucene-suggest-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index 452b96420f8d7..0000000000000 --- a/server/licenses/lucene-suggest-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ff3f260d1dc8c18bc67f3c33aa84a0ad290daac5 \ No newline at end of file diff --git a/server/src/main/java/org/elasticsearch/ExceptionsHelper.java b/server/src/main/java/org/elasticsearch/ExceptionsHelper.java index 04fd7702aa02a..d57012012f61d 100644 --- a/server/src/main/java/org/elasticsearch/ExceptionsHelper.java +++ b/server/src/main/java/org/elasticsearch/ExceptionsHelper.java @@ -279,13 +279,8 @@ private static class GroupBy { } } this.index = indexName; - if (cause == null) { - this.reason = failure.reason(); - this.causeType = null; - } else { - this.reason = cause.getMessage(); - this.causeType = cause.getClass(); - } + this.reason = cause.getMessage(); + this.causeType = cause.getClass(); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/ShardOperationFailedException.java b/server/src/main/java/org/elasticsearch/action/ShardOperationFailedException.java index 013bf06d2f8b2..08a97d4d993a6 100644 --- a/server/src/main/java/org/elasticsearch/action/ShardOperationFailedException.java +++ b/server/src/main/java/org/elasticsearch/action/ShardOperationFailedException.java @@ -19,39 +19,70 @@ package org.elasticsearch.action; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.rest.RestStatus; +import java.util.Objects; + /** * An exception indicating that a failure occurred performing an operation on the shard. * - * */ -public interface ShardOperationFailedException extends Streamable, ToXContent { +public abstract class ShardOperationFailedException implements Streamable, ToXContent { + + protected String index; + protected int shardId; + protected String reason; + protected RestStatus status; + protected Throwable cause; + + protected ShardOperationFailedException() { + + } + + protected ShardOperationFailedException(@Nullable String index, int shardId, String reason, RestStatus status, Throwable cause) { + this.index = index; + this.shardId = shardId; + this.reason = Objects.requireNonNull(reason, "reason cannot be null"); + this.status = Objects.requireNonNull(status, "status cannot be null"); + this.cause = Objects.requireNonNull(cause, "cause cannot be null"); + } /** * The index the operation failed on. Might return {@code null} if it can't be derived. */ - String index(); + @Nullable + public final String index() { + return index; + } /** * The index the operation failed on. Might return {@code -1} if it can't be derived. */ - int shardId(); + public final int shardId() { + return shardId; + } /** * The reason of the failure. */ - String reason(); + public final String reason() { + return reason; + } /** * The status of the failure. */ - RestStatus status(); + public final RestStatus status() { + return status; + } /** * The cause of this failure */ - Throwable getCause(); + public final Throwable getCause() { + return cause; + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresResponse.java index 6cf160897482c..72aeb7f757528 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresResponse.java @@ -21,7 +21,6 @@ import com.carrotsearch.hppc.cursors.IntObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionResponse; @@ -248,7 +247,7 @@ public String nodeId() { return nodeId; } - public static Failure readFailure(StreamInput in) throws IOException { + static Failure readFailure(StreamInput in) throws IOException { Failure failure = new Failure(); failure.readFrom(in); return failure; diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResultHolder.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResultHolder.java deleted file mode 100644 index 3e7ee41b914b7..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResultHolder.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.bulk; - -import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.index.engine.VersionConflictEngineException; - -/** - * A struct-like holder for a bulk items reponse, result, and the resulting - * replica operation to be executed. - */ -class BulkItemResultHolder { - public final @Nullable DocWriteResponse response; - public final @Nullable Engine.Result operationResult; - public final BulkItemRequest replicaRequest; - - BulkItemResultHolder(@Nullable DocWriteResponse response, - @Nullable Engine.Result operationResult, - BulkItemRequest replicaRequest) { - this.response = response; - this.operationResult = operationResult; - this.replicaRequest = replicaRequest; - } - - public boolean isVersionConflict() { - return operationResult == null ? false : - operationResult.getFailure() instanceof VersionConflictEngineException; - } -} diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkPrimaryExecutionContext.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkPrimaryExecutionContext.java new file mode 100644 index 0000000000000..5f61d90d500e7 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkPrimaryExecutionContext.java @@ -0,0 +1,345 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.bulk; + +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.delete.DeleteResponse; +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.support.replication.ReplicationResponse; +import org.elasticsearch.action.support.replication.TransportWriteAction; +import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.translog.Translog; + +import java.util.Arrays; + +/** + * This is a utility class that holds the per request state needed to perform bulk operations on the primary. + * More specifically, it maintains an index to the current executing bulk item, which allows execution + * to stop and wait for external events such as mapping updates. + */ +class BulkPrimaryExecutionContext { + + enum ItemProcessingState { + /** Item execution is ready to start, no operations have been performed yet */ + INITIAL, + /** + * The incoming request has been translated to a request that can be executed on the shard. + * This is used to convert update requests to a fully specified index or delete requests. + */ + TRANSLATED, + /** + * the request can not execute with the current mapping and should wait for a new mapping + * to arrive from the master. A mapping request for the needed changes has already been + * submitted + */ + WAIT_FOR_MAPPING_UPDATE, + /** + * The request should be executed again, but there is no need to wait for an external event. + * This is needed to support retry on conflicts during updates. + */ + IMMEDIATE_RETRY, + /** The request has been executed on the primary shard (successfully or not) */ + EXECUTED, + /** + * No further handling of current request is needed. The result has been converted to a user response + * and execution can continue to the next item (if available). + */ + COMPLETED + } + + private final BulkShardRequest request; + private final IndexShard primary; + private Translog.Location locationToSync = null; + private int currentIndex = -1; + + private ItemProcessingState currentItemState; + private DocWriteRequest requestToExecute; + private BulkItemResponse executionResult; + private int retryCounter; + + + BulkPrimaryExecutionContext(BulkShardRequest request, IndexShard primary) { + this.request = request; + this.primary = primary; + advance(); + } + + + private int findNextNonAborted(int startIndex) { + final int length = request.items().length; + while (startIndex < length && isAborted(request.items()[startIndex].getPrimaryResponse())) { + startIndex++; + } + return startIndex; + } + + private static boolean isAborted(BulkItemResponse response) { + return response != null && response.isFailed() && response.getFailure().isAborted(); + } + + /** move to the next item to execute */ + private void advance() { + assert currentItemState == ItemProcessingState.COMPLETED || currentIndex == -1 : + "moving to next but current item wasn't completed (state: " + currentItemState + ")"; + currentItemState = ItemProcessingState.INITIAL; + currentIndex = findNextNonAborted(currentIndex + 1); + retryCounter = 0; + requestToExecute = null; + executionResult = null; + assert assertInvariants(ItemProcessingState.INITIAL); + } + + /** gets the current, untranslated item request */ + public DocWriteRequest getCurrent() { + return getCurrentItem().request(); + } + + public BulkShardRequest getBulkShardRequest() { + return request; + } + + /** returns the result of the request that has been executed on the shard */ + public BulkItemResponse getExecutionResult() { + assert assertInvariants(ItemProcessingState.EXECUTED); + return executionResult; + } + + /** returns the number of times the current operation has been retried */ + public int getRetryCounter() { + return retryCounter; + } + + /** returns true if the current request has been executed on the primary */ + public boolean isOperationExecuted() { + return currentItemState == ItemProcessingState.EXECUTED; + } + + /** returns true if the request needs to wait for a mapping update to arrive from the master */ + public boolean requiresWaitingForMappingUpdate() { + return currentItemState == ItemProcessingState.WAIT_FOR_MAPPING_UPDATE; + } + + /** returns true if the current request should be retried without waiting for an external event */ + public boolean requiresImmediateRetry() { + return currentItemState == ItemProcessingState.IMMEDIATE_RETRY; + } + + /** + * returns true if the current request has been completed and it's result translated to a user + * facing response + */ + public boolean isCompleted() { + return currentItemState == ItemProcessingState.COMPLETED; + } + + /** + * returns true if the current request is in INITIAL state + */ + public boolean isInitial() { + return currentItemState == ItemProcessingState.INITIAL; + } + + /** + * returns true if {@link #advance()} has moved the current item beyond the + * end of the {@link BulkShardRequest#items()} array. + */ + public boolean hasMoreOperationsToExecute() { + return currentIndex < request.items().length; + } + + + /** returns the name of the index the current request used */ + public String getConcreteIndex() { + return getCurrentItem().index(); + } + + /** returns any primary response that was set by a previous primary */ + public BulkItemResponse getPreviousPrimaryResponse() { + return getCurrentItem().getPrimaryResponse(); + } + + /** returns a translog location that is needed to be synced in order to persist all operations executed so far */ + public Translog.Location getLocationToSync() { + assert hasMoreOperationsToExecute() == false; + // we always get to the end of the list by using advance, which in turn sets the state to INITIAL + assert assertInvariants(ItemProcessingState.INITIAL); + return locationToSync; + } + + private BulkItemRequest getCurrentItem() { + return request.items()[currentIndex]; + } + + /** returns the primary shard */ + public IndexShard getPrimary() { + return primary; + } + + /** + * sets the request that should actually be executed on the primary. This can be different then the request + * received from the user (specifically, an update request is translated to an indexing or delete request). + */ + public void setRequestToExecute(DocWriteRequest writeRequest) { + assert assertInvariants(ItemProcessingState.INITIAL); + requestToExecute = writeRequest; + currentItemState = ItemProcessingState.TRANSLATED; + assert assertInvariants(ItemProcessingState.TRANSLATED); + } + + /** returns the request that should be executed on the shard. */ + public > T getRequestToExecute() { + assert assertInvariants(ItemProcessingState.TRANSLATED); + return (T) requestToExecute; + } + + /** indicates that the current operation can not be completed and needs to wait for a new mapping from the master */ + public void markAsRequiringMappingUpdate() { + assert assertInvariants(ItemProcessingState.TRANSLATED); + currentItemState = ItemProcessingState.WAIT_FOR_MAPPING_UPDATE; + requestToExecute = null; + assert assertInvariants(ItemProcessingState.WAIT_FOR_MAPPING_UPDATE); + } + + /** resets the current item state, prepare for a new execution */ + public void resetForExecutionForRetry() { + assertInvariants(ItemProcessingState.WAIT_FOR_MAPPING_UPDATE, ItemProcessingState.EXECUTED); + currentItemState = ItemProcessingState.INITIAL; + requestToExecute = null; + executionResult = null; + assertInvariants(ItemProcessingState.INITIAL); + } + + /** completes the operation without doing anything on the primary */ + public void markOperationAsNoOp(DocWriteResponse response) { + assertInvariants(ItemProcessingState.INITIAL); + executionResult = new BulkItemResponse(getCurrentItem().id(), getCurrentItem().request().opType(), response); + currentItemState = ItemProcessingState.EXECUTED; + assertInvariants(ItemProcessingState.EXECUTED); + } + + /** indicates that the operation needs to be failed as the required mapping didn't arrive in time */ + public void failOnMappingUpdate(Exception cause) { + assert assertInvariants(ItemProcessingState.WAIT_FOR_MAPPING_UPDATE); + currentItemState = ItemProcessingState.EXECUTED; + final DocWriteRequest docWriteRequest = getCurrentItem().request(); + executionResult = new BulkItemResponse(getCurrentItem().id(), docWriteRequest.opType(), + // Make sure to use getCurrentItem().index() here, if you use docWriteRequest.index() it will use the + // concrete index instead of an alias if used! + new BulkItemResponse.Failure(getCurrentItem().index(), docWriteRequest.type(), docWriteRequest.id(), cause)); + markAsCompleted(executionResult); + } + + /** the current operation has been executed on the primary with the specified result */ + public void markOperationAsExecuted(Engine.Result result) { + assertInvariants(ItemProcessingState.TRANSLATED); + final BulkItemRequest current = getCurrentItem(); + DocWriteRequest docWriteRequest = getRequestToExecute(); + switch (result.getResultType()) { + case SUCCESS: + final DocWriteResponse response; + if (result.getOperationType() == Engine.Operation.TYPE.INDEX) { + Engine.IndexResult indexResult = (Engine.IndexResult) result; + response = new IndexResponse(primary.shardId(), requestToExecute.type(), requestToExecute.id(), + result.getSeqNo(), result.getTerm(), indexResult.getVersion(), indexResult.isCreated()); + } else if (result.getOperationType() == Engine.Operation.TYPE.DELETE) { + Engine.DeleteResult deleteResult = (Engine.DeleteResult) result; + response = new DeleteResponse(primary.shardId(), requestToExecute.type(), requestToExecute.id(), + deleteResult.getSeqNo(), result.getTerm(), deleteResult.getVersion(), deleteResult.isFound()); + + } else { + throw new AssertionError("unknown result type :" + result.getResultType()); + } + executionResult = new BulkItemResponse(current.id(), current.request().opType(), response); + // set a blank ShardInfo so we can safely send it to the replicas. We won't use it in the real response though. + executionResult.getResponse().setShardInfo(new ReplicationResponse.ShardInfo()); + locationToSync = TransportWriteAction.locationToSync(locationToSync, result.getTranslogLocation()); + break; + case FAILURE: + executionResult = new BulkItemResponse(current.id(), docWriteRequest.opType(), + // Make sure to use request.index() here, if you + // use docWriteRequest.index() it will use the + // concrete index instead of an alias if used! + new BulkItemResponse.Failure(request.index(), docWriteRequest.type(), docWriteRequest.id(), + result.getFailure(), result.getSeqNo())); + break; + default: + throw new AssertionError("unknown result type for " + getCurrentItem() + ": " + result.getResultType()); + } + currentItemState = ItemProcessingState.EXECUTED; + } + + /** finishes the execution of the current request, with the response that should be returned to the user */ + public void markAsCompleted(BulkItemResponse translatedResponse) { + assertInvariants(ItemProcessingState.EXECUTED); + assert executionResult != null && translatedResponse.getItemId() == executionResult.getItemId(); + assert translatedResponse.getItemId() == getCurrentItem().id(); + + if (translatedResponse.isFailed() == false && requestToExecute != null && requestToExecute != getCurrent()) { + request.items()[currentIndex] = new BulkItemRequest(request.items()[currentIndex].id(), requestToExecute); + } + getCurrentItem().setPrimaryResponse(translatedResponse); + currentItemState = ItemProcessingState.COMPLETED; + advance(); + } + + /** builds the bulk shard response to return to the user */ + public BulkShardResponse buildShardResponse() { + assert hasMoreOperationsToExecute() == false; + return new BulkShardResponse(request.shardId(), + Arrays.stream(request.items()).map(BulkItemRequest::getPrimaryResponse).toArray(BulkItemResponse[]::new)); + } + + private boolean assertInvariants(ItemProcessingState... expectedCurrentState) { + assert Arrays.asList(expectedCurrentState).contains(currentItemState): + "expected current state [" + currentItemState + "] to be one of " + Arrays.toString(expectedCurrentState); + assert currentIndex >= 0 : currentIndex; + assert retryCounter >= 0 : retryCounter; + switch (currentItemState) { + case INITIAL: + assert requestToExecute == null : requestToExecute; + assert executionResult == null : executionResult; + break; + case TRANSLATED: + assert requestToExecute != null; + assert executionResult == null : executionResult; + break; + case WAIT_FOR_MAPPING_UPDATE: + assert requestToExecute == null; + assert executionResult == null : executionResult; + break; + case IMMEDIATE_RETRY: + assert requestToExecute != null; + assert executionResult == null : executionResult; + break; + case EXECUTED: + // requestToExecute can be null if the update ended up as NOOP + assert executionResult != null; + break; + case COMPLETED: + assert requestToExecute != null; + assert executionResult != null; + assert getCurrentItem().getPrimaryResponse() != null; + break; + } + return true; + } +} diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index 7ac9f92c6df8c..de17adf09b186 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -29,30 +29,34 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.replication.ReplicationOperation; -import org.elasticsearch.action.support.replication.ReplicationResponse.ShardInfo; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.replication.TransportReplicationAction; import org.elasticsearch.action.support.replication.TransportWriteAction; import org.elasticsearch.action.update.UpdateHelper; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.CheckedRunnable; import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.get.GetResult; +import org.elasticsearch.index.mapper.MapperException; import org.elasticsearch.index.mapper.Mapping; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.seqno.SequenceNumbers; @@ -60,12 +64,14 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.node.NodeClosedException; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.Map; +import java.util.function.Consumer; import java.util.function.Function; import java.util.function.LongSupplier; @@ -106,174 +112,167 @@ protected boolean resolveIndex() { } @Override - public WritePrimaryResult shardOperationOnPrimary( - BulkShardRequest request, IndexShard primary) throws Exception { - return performOnPrimary(request, primary, updateHelper, threadPool::absoluteTimeInMillis, new ConcreteMappingUpdatePerformer()); + protected WritePrimaryResult shardOperationOnPrimary(BulkShardRequest request, IndexShard primary) + throws Exception { + ClusterStateObserver observer = new ClusterStateObserver(clusterService, request.timeout(), logger, threadPool.getThreadContext()); + CheckedRunnable waitForMappingUpdate = () -> { + PlainActionFuture waitingFuture = new PlainActionFuture<>(); + observer.waitForNextChange(new ClusterStateObserver.Listener() { + @Override + public void onNewClusterState(ClusterState state) { + waitingFuture.onResponse(null); + } + + @Override + public void onClusterServiceClose() { + waitingFuture.onFailure(new NodeClosedException(clusterService.localNode())); + } + + @Override + public void onTimeout(TimeValue timeout) { + waitingFuture.onFailure( + new MapperException("timed out while waiting for a dynamic mapping update")); + } + }); + waitingFuture.get(); + }; + return performOnPrimary(request, primary, updateHelper, threadPool::absoluteTimeInMillis, + new ConcreteMappingUpdatePerformer(), waitForMappingUpdate); } public static WritePrimaryResult performOnPrimary( - BulkShardRequest request, - IndexShard primary, - UpdateHelper updateHelper, - LongSupplier nowInMillisSupplier, - MappingUpdatePerformer mappingUpdater) throws Exception { - final IndexMetaData metaData = primary.indexSettings().getIndexMetaData(); - Translog.Location location = null; - for (int requestIndex = 0; requestIndex < request.items().length; requestIndex++) { - if (isAborted(request.items()[requestIndex].getPrimaryResponse()) == false) { - location = executeBulkItemRequest(metaData, primary, request, location, requestIndex, - updateHelper, nowInMillisSupplier, mappingUpdater); - } - } - BulkItemResponse[] responses = new BulkItemResponse[request.items().length]; - BulkItemRequest[] items = request.items(); - for (int i = 0; i < items.length; i++) { - responses[i] = items[i].getPrimaryResponse(); - } - BulkShardResponse response = new BulkShardResponse(request.shardId(), responses); - return new WritePrimaryResult<>(request, response, location, null, primary, logger); + BulkShardRequest request, + IndexShard primary, + UpdateHelper updateHelper, + LongSupplier nowInMillisSupplier, + MappingUpdatePerformer mappingUpdater, + CheckedRunnable waitForMappingUpdate) throws Exception { + BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(request, primary); + return performOnPrimary(context, updateHelper, nowInMillisSupplier, mappingUpdater, waitForMappingUpdate); } - private static BulkItemResultHolder executeIndexRequest(final IndexRequest indexRequest, - final BulkItemRequest bulkItemRequest, - final IndexShard primary, - final MappingUpdatePerformer mappingUpdater) throws Exception { - Engine.IndexResult indexResult = executeIndexRequestOnPrimary(indexRequest, primary, mappingUpdater); - switch (indexResult.getResultType()) { - case SUCCESS: - IndexResponse response = new IndexResponse(primary.shardId(), indexRequest.type(), indexRequest.id(), - indexResult.getSeqNo(), indexResult.getTerm(), indexResult.getVersion(), indexResult.isCreated()); - return new BulkItemResultHolder(response, indexResult, bulkItemRequest); - case FAILURE: - return new BulkItemResultHolder(null, indexResult, bulkItemRequest); - default: - throw new AssertionError("unknown result type for " + indexRequest + ": " + indexResult.getResultType()); + private static WritePrimaryResult performOnPrimary( + BulkPrimaryExecutionContext context, UpdateHelper updateHelper, LongSupplier nowInMillisSupplier, + MappingUpdatePerformer mappingUpdater, CheckedRunnable waitForMappingUpdate) throws Exception { + + while (context.hasMoreOperationsToExecute()) { + executeBulkItemRequest(context, updateHelper, nowInMillisSupplier, mappingUpdater, waitForMappingUpdate); + assert context.isInitial(); // either completed and moved to next or reset } + return new WritePrimaryResult<>(context.getBulkShardRequest(), context.buildShardResponse(), context.getLocationToSync(), + null, context.getPrimary(), logger); } - private static BulkItemResultHolder executeDeleteRequest(final DeleteRequest deleteRequest, - final BulkItemRequest bulkItemRequest, - final IndexShard primary, - final MappingUpdatePerformer mappingUpdater) throws Exception { - Engine.DeleteResult deleteResult = executeDeleteRequestOnPrimary(deleteRequest, primary, mappingUpdater); - switch (deleteResult.getResultType()) { - case SUCCESS: - DeleteResponse response = new DeleteResponse(primary.shardId(), deleteRequest.type(), deleteRequest.id(), - deleteResult.getSeqNo(), deleteResult.getTerm(), deleteResult.getVersion(), deleteResult.isFound()); - return new BulkItemResultHolder(response, deleteResult, bulkItemRequest); - case FAILURE: - return new BulkItemResultHolder(null, deleteResult, bulkItemRequest); - case MAPPING_UPDATE_REQUIRED: - throw new AssertionError("delete operation leaked a mapping update " + deleteRequest); - default: - throw new AssertionError("unknown result type for " + deleteRequest + ": " + deleteResult.getResultType()); + /** Executes bulk item requests and handles request execution exceptions */ + static void executeBulkItemRequest(BulkPrimaryExecutionContext context, UpdateHelper updateHelper, LongSupplier nowInMillisSupplier, + MappingUpdatePerformer mappingUpdater, CheckedRunnable waitForMappingUpdate) + throws Exception { + final DocWriteRequest.OpType opType = context.getCurrent().opType(); + + final UpdateHelper.Result updateResult; + if (opType == DocWriteRequest.OpType.UPDATE) { + final UpdateRequest updateRequest = (UpdateRequest) context.getCurrent(); + try { + updateResult = updateHelper.prepare(updateRequest, context.getPrimary(), nowInMillisSupplier); + } catch (Exception failure) { + // we may fail translating a update to index or delete operation + // we use index result to communicate failure while translating update request + final Engine.Result result = new Engine.IndexResult(failure, updateRequest.version(), SequenceNumbers.UNASSIGNED_SEQ_NO); + context.setRequestToExecute(updateRequest); + context.markOperationAsExecuted(result); + context.markAsCompleted(context.getExecutionResult()); + return; + } + // execute translated update request + switch (updateResult.getResponseResult()) { + case CREATED: + case UPDATED: + IndexRequest indexRequest = updateResult.action(); + IndexMetaData metaData = context.getPrimary().indexSettings().getIndexMetaData(); + MappingMetaData mappingMd = metaData.mappingOrDefault(indexRequest.type()); + indexRequest.process(metaData.getCreationVersion(), mappingMd, updateRequest.concreteIndex()); + context.setRequestToExecute(indexRequest); + break; + case DELETED: + context.setRequestToExecute(updateResult.action()); + break; + case NOOP: + context.markOperationAsNoOp(updateResult.action()); + context.markAsCompleted(context.getExecutionResult()); + return; + default: + throw new IllegalStateException("Illegal update operation " + updateResult.getResponseResult()); + } + } else { + context.setRequestToExecute(context.getCurrent()); + updateResult = null; } - } - static Translog.Location calculateTranslogLocation(final Translog.Location originalLocation, - final BulkItemResultHolder bulkItemResult) { - final Engine.Result operationResult = bulkItemResult.operationResult; - if (operationResult != null && operationResult.getResultType() == Engine.Result.Type.SUCCESS) { - return locationToSync(originalLocation, operationResult.getTranslogLocation()); + assert context.getRequestToExecute() != null; // also checks that we're in TRANSLATED state + + if (context.getRequestToExecute().opType() == DocWriteRequest.OpType.DELETE) { + executeDeleteRequestOnPrimary(context, mappingUpdater); } else { - return originalLocation; + executeIndexRequestOnPrimary(context, mappingUpdater); + } + + if (context.requiresWaitingForMappingUpdate()) { + try { + waitForMappingUpdate.run(); + context.resetForExecutionForRetry(); + } catch (Exception e) { + context.failOnMappingUpdate(e); + } + return; + } + + assert context.isOperationExecuted(); + + if (opType == DocWriteRequest.OpType.UPDATE && + context.getExecutionResult().isFailed() && + isConflictException(context.getExecutionResult().getFailure().getCause())) { + final UpdateRequest updateRequest = (UpdateRequest) context.getCurrent(); + if (context.getRetryCounter() < updateRequest.retryOnConflict()) { + context.resetForExecutionForRetry(); + return; + } } + + finalizePrimaryOperationOnCompletion(context, opType, updateResult); } - // Visible for unit testing - /** - * Creates a BulkItemResponse for the primary operation and returns it. If no bulk response is - * needed (because one already exists and the operation failed), then return null. - */ - static BulkItemResponse createPrimaryResponse(BulkItemResultHolder bulkItemResult, - final DocWriteRequest.OpType opType, - BulkShardRequest request) { - final Engine.Result operationResult = bulkItemResult.operationResult; - final DocWriteResponse response = bulkItemResult.response; - final BulkItemRequest replicaRequest = bulkItemResult.replicaRequest; - - if (operationResult == null) { // in case of noop update operation - assert response.getResult() == DocWriteResponse.Result.NOOP : "only noop updates can have a null operation"; - return new BulkItemResponse(replicaRequest.id(), opType, response); - - } else if (operationResult.getResultType() == Engine.Result.Type.SUCCESS) { - BulkItemResponse primaryResponse = new BulkItemResponse(replicaRequest.id(), opType, response); - // set a blank ShardInfo so we can safely send it to the replicas. We won't use it in the real response though. - primaryResponse.getResponse().setShardInfo(new ShardInfo()); - return primaryResponse; - - } else if (operationResult.getResultType() == Engine.Result.Type.FAILURE) { - DocWriteRequest docWriteRequest = replicaRequest.request(); - Exception failure = operationResult.getFailure(); - if (isConflictException(failure)) { + private static void finalizePrimaryOperationOnCompletion(BulkPrimaryExecutionContext context, DocWriteRequest.OpType opType, + UpdateHelper.Result updateResult) { + final BulkItemResponse executionResult = context.getExecutionResult(); + if (opType == DocWriteRequest.OpType.UPDATE) { + final UpdateRequest updateRequest = (UpdateRequest) context.getCurrent(); + context.markAsCompleted( + processUpdateResponse(updateRequest, context.getConcreteIndex(), executionResult, updateResult)); + } else if (executionResult.isFailed()) { + final Exception failure = executionResult.getFailure().getCause(); + final DocWriteRequest docWriteRequest = context.getCurrent(); + if (TransportShardBulkAction.isConflictException(failure)) { logger.trace(() -> new ParameterizedMessage("{} failed to execute bulk item ({}) {}", - request.shardId(), docWriteRequest.opType().getLowercase(), request), failure); + context.getPrimary().shardId(), docWriteRequest.opType().getLowercase(), docWriteRequest), failure); } else { logger.debug(() -> new ParameterizedMessage("{} failed to execute bulk item ({}) {}", - request.shardId(), docWriteRequest.opType().getLowercase(), request), failure); + context.getPrimary().shardId(), docWriteRequest.opType().getLowercase(), docWriteRequest), failure); } + final BulkItemResponse primaryResponse; // if it's a conflict failure, and we already executed the request on a primary (and we execute it // again, due to primary relocation and only processing up to N bulk items when the shard gets closed) // then just use the response we got from the failed execution - if (replicaRequest.getPrimaryResponse() == null || isConflictException(failure) == false) { - return new BulkItemResponse(replicaRequest.id(), docWriteRequest.opType(), - // Make sure to use request.index() here, if you - // use docWriteRequest.index() it will use the - // concrete index instead of an alias if used! - new BulkItemResponse.Failure(request.index(), docWriteRequest.type(), docWriteRequest.id(), - failure, operationResult.getSeqNo())); + if (TransportShardBulkAction.isConflictException(failure) && context.getPreviousPrimaryResponse() != null) { + primaryResponse = context.getPreviousPrimaryResponse(); } else { - assert replicaRequest.getPrimaryResponse() != null : "replica request must have a primary response"; - return null; + primaryResponse = executionResult; } + context.markAsCompleted(primaryResponse); } else { - throw new AssertionError("unknown result type for " + request + ": " + operationResult.getResultType()); - } - } - - /** Executes bulk item requests and handles request execution exceptions */ - static Translog.Location executeBulkItemRequest(IndexMetaData metaData, IndexShard primary, - BulkShardRequest request, Translog.Location location, - int requestIndex, UpdateHelper updateHelper, - LongSupplier nowInMillisSupplier, - final MappingUpdatePerformer mappingUpdater) throws Exception { - final DocWriteRequest itemRequest = request.items()[requestIndex].request(); - final DocWriteRequest.OpType opType = itemRequest.opType(); - final BulkItemResultHolder responseHolder; - switch (itemRequest.opType()) { - case CREATE: - case INDEX: - responseHolder = executeIndexRequest((IndexRequest) itemRequest, - request.items()[requestIndex], primary, mappingUpdater); - break; - case UPDATE: - responseHolder = executeUpdateRequest((UpdateRequest) itemRequest, primary, metaData, request, - requestIndex, updateHelper, nowInMillisSupplier, mappingUpdater); - break; - case DELETE: - responseHolder = executeDeleteRequest((DeleteRequest) itemRequest, request.items()[requestIndex], primary, mappingUpdater); - break; - default: throw new IllegalStateException("unexpected opType [" + itemRequest.opType() + "] found"); + context.markAsCompleted(executionResult); } - - final BulkItemRequest replicaRequest = responseHolder.replicaRequest; - - // update the bulk item request because update request execution can mutate the bulk item request - request.items()[requestIndex] = replicaRequest; - - // Retrieve the primary response, and update the replica request with the primary's response - BulkItemResponse primaryResponse = createPrimaryResponse(responseHolder, opType, request); - if (primaryResponse != null) { - replicaRequest.setPrimaryResponse(primaryResponse); - } - - // Update the translog with the new location, if needed - return calculateTranslogLocation(location, responseHolder); - } - - private static boolean isAborted(BulkItemResponse response) { - return response != null && response.isFailed() && response.getFailure().isAborted(); + assert context.isInitial(); } private static boolean isConflictException(final Exception e) { @@ -283,151 +282,50 @@ private static boolean isConflictException(final Exception e) { /** * Creates a new bulk item result from the given requests and result of performing the update operation on the shard. */ - static BulkItemResultHolder processUpdateResponse(final UpdateRequest updateRequest, final String concreteIndex, - final Engine.Result result, final UpdateHelper.Result translate, - final IndexShard primary, final int bulkReqId) { - assert result.getSeqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO : "failed result should not have a sequence number"; - - Engine.Operation.TYPE opType = result.getOperationType(); - - final UpdateResponse updateResponse; - final BulkItemRequest replicaRequest; - - // enrich update response and set translated update (index/delete) request for replica execution in bulk items - if (opType == Engine.Operation.TYPE.INDEX) { - assert result instanceof Engine.IndexResult : result.getClass(); - final IndexRequest updateIndexRequest = translate.action(); - final IndexResponse indexResponse = new IndexResponse(primary.shardId(), updateIndexRequest.type(), updateIndexRequest.id(), - result.getSeqNo(), result.getTerm(), result.getVersion(), ((Engine.IndexResult) result).isCreated()); - updateResponse = new UpdateResponse(indexResponse.getShardInfo(), indexResponse.getShardId(), indexResponse.getType(), - indexResponse.getId(), indexResponse.getSeqNo(), indexResponse.getPrimaryTerm(), indexResponse.getVersion(), - indexResponse.getResult()); - - if ((updateRequest.fetchSource() != null && updateRequest.fetchSource().fetchSource()) || + static BulkItemResponse processUpdateResponse(final UpdateRequest updateRequest, final String concreteIndex, + BulkItemResponse operationResponse, + final UpdateHelper.Result translate) { + + final BulkItemResponse response; + DocWriteResponse.Result translatedResult = translate.getResponseResult(); + if (operationResponse.isFailed()) { + response = new BulkItemResponse(operationResponse.getItemId(), DocWriteRequest.OpType.UPDATE, operationResponse.getFailure()); + } else { + final UpdateResponse updateResponse; + if (translatedResult == DocWriteResponse.Result.CREATED || translatedResult == DocWriteResponse.Result.UPDATED) { + final IndexRequest updateIndexRequest = translate.action(); + final IndexResponse indexResponse = operationResponse.getResponse(); + updateResponse = new UpdateResponse(indexResponse.getShardInfo(), indexResponse.getShardId(), + indexResponse.getType(), indexResponse.getId(), indexResponse.getSeqNo(), indexResponse.getPrimaryTerm(), + indexResponse.getVersion(), indexResponse.getResult()); + + if ((updateRequest.fetchSource() != null && updateRequest.fetchSource().fetchSource()) || (updateRequest.fields() != null && updateRequest.fields().length > 0)) { - final BytesReference indexSourceAsBytes = updateIndexRequest.source(); - final Tuple> sourceAndContent = + final BytesReference indexSourceAsBytes = updateIndexRequest.source(); + final Tuple> sourceAndContent = XContentHelper.convertToMap(indexSourceAsBytes, true, updateIndexRequest.getContentType()); - updateResponse.setGetResult(UpdateHelper.extractGetResult(updateRequest, concreteIndex, - indexResponse.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), indexSourceAsBytes)); - } - // set translated request as replica request - replicaRequest = new BulkItemRequest(bulkReqId, updateIndexRequest); - - } else if (opType == Engine.Operation.TYPE.DELETE) { - assert result instanceof Engine.DeleteResult : result.getClass(); - final DeleteRequest updateDeleteRequest = translate.action(); - - final DeleteResponse deleteResponse = new DeleteResponse(primary.shardId(), updateDeleteRequest.type(), updateDeleteRequest.id(), - result.getSeqNo(), result.getTerm(), result.getVersion(), ((Engine.DeleteResult) result).isFound()); - - updateResponse = new UpdateResponse(deleteResponse.getShardInfo(), deleteResponse.getShardId(), + updateResponse.setGetResult(UpdateHelper.extractGetResult(updateRequest, concreteIndex, + indexResponse.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), indexSourceAsBytes)); + } + } else if (translatedResult == DocWriteResponse.Result.DELETED) { + final DeleteResponse deleteResponse = operationResponse.getResponse(); + updateResponse = new UpdateResponse(deleteResponse.getShardInfo(), deleteResponse.getShardId(), deleteResponse.getType(), deleteResponse.getId(), deleteResponse.getSeqNo(), deleteResponse.getPrimaryTerm(), deleteResponse.getVersion(), deleteResponse.getResult()); - final GetResult getResult = UpdateHelper.extractGetResult(updateRequest, concreteIndex, deleteResponse.getVersion(), + final GetResult getResult = UpdateHelper.extractGetResult(updateRequest, concreteIndex, deleteResponse.getVersion(), translate.updatedSourceAsMap(), translate.updateSourceContentType(), null); - updateResponse.setGetResult(getResult); - // set translated request as replica request - replicaRequest = new BulkItemRequest(bulkReqId, updateDeleteRequest); - - } else { - throw new IllegalArgumentException("unknown operation type: " + opType); - } - - return new BulkItemResultHolder(updateResponse, result, replicaRequest); - } - - /** - * Executes update request once, delegating to a index or delete operation after translation. - * NOOP updates are indicated by returning a null operation in {@link BulkItemResultHolder} - */ - static BulkItemResultHolder executeUpdateRequestOnce(UpdateRequest updateRequest, IndexShard primary, - IndexMetaData metaData, String concreteIndex, - UpdateHelper updateHelper, LongSupplier nowInMillis, - BulkItemRequest primaryItemRequest, int bulkReqId, - final MappingUpdatePerformer mappingUpdater) throws Exception { - final UpdateHelper.Result translate; - // translate update request - try { - translate = updateHelper.prepare(updateRequest, primary, nowInMillis); - } catch (Exception failure) { - // we may fail translating a update to index or delete operation - // we use index result to communicate failure while translating update request - final Engine.Result result = primary.getFailedIndexResult(failure, updateRequest.version()); - return new BulkItemResultHolder(null, result, primaryItemRequest); - } - - final Engine.Result result; - // execute translated update request - switch (translate.getResponseResult()) { - case CREATED: - case UPDATED: - IndexRequest indexRequest = translate.action(); - MappingMetaData mappingMd = metaData.mappingOrDefault(indexRequest.type()); - indexRequest.process(metaData.getCreationVersion(), mappingMd, concreteIndex); - result = executeIndexRequestOnPrimary(indexRequest, primary, mappingUpdater); - break; - case DELETED: - DeleteRequest deleteRequest = translate.action(); - result = executeDeleteRequestOnPrimary(deleteRequest, primary, mappingUpdater); - break; - case NOOP: - primary.noopUpdate(updateRequest.type()); - result = null; - break; - default: throw new IllegalStateException("Illegal update operation " + translate.getResponseResult()); - } - - if (result == null) { - // this is a noop operation - final UpdateResponse updateResponse = translate.action(); - return new BulkItemResultHolder(updateResponse, result, primaryItemRequest); - } else if (result.getResultType() == Engine.Result.Type.FAILURE) { - // There was a result, and the result was a failure - return new BulkItemResultHolder(null, result, primaryItemRequest); - } else if (result.getResultType() == Engine.Result.Type.SUCCESS) { - // It was successful, we need to construct the response and return it - return processUpdateResponse(updateRequest, concreteIndex, result, translate, primary, bulkReqId); - } else { - throw new AssertionError("unknown result type for " + updateRequest + ": " + result.getResultType()); - } - } - - /** - * Executes update request, delegating to a index or delete operation after translation, - * handles retries on version conflict and constructs update response - * NOOP updates are indicated by returning a null operation - * in {@link BulkItemResultHolder} - */ - private static BulkItemResultHolder executeUpdateRequest(UpdateRequest updateRequest, IndexShard primary, - IndexMetaData metaData, BulkShardRequest request, - int requestIndex, UpdateHelper updateHelper, - LongSupplier nowInMillis, - final MappingUpdatePerformer mappingUpdater) throws Exception { - BulkItemRequest primaryItemRequest = request.items()[requestIndex]; - assert primaryItemRequest.request() == updateRequest - : "expected bulk item request to contain the original update request, got: " + - primaryItemRequest.request() + " and " + updateRequest; - - BulkItemResultHolder holder = null; - // There must be at least one attempt - int maxAttempts = Math.max(1, updateRequest.retryOnConflict()); - for (int attemptCount = 0; attemptCount < maxAttempts; attemptCount++) { - - holder = executeUpdateRequestOnce(updateRequest, primary, metaData, request.index(), updateHelper, - nowInMillis, primaryItemRequest, request.items()[requestIndex].id(), mappingUpdater); - - // It was either a successful request, or it was a non-conflict failure - if (holder.isVersionConflict() == false) { - return holder; + updateResponse.setGetResult(getResult); + } else { + throw new IllegalArgumentException("unknown operation type: " + translatedResult); } + response = new BulkItemResponse(operationResponse.getItemId(), DocWriteRequest.OpType.UPDATE, updateResponse); } - // We ran out of tries and haven't returned a valid bulk item response, so return the last one generated - return holder; + return response; } + /** Modes for executing item request on replica depending on corresponding primary execution result */ public enum ReplicaItemExecutionMode { @@ -450,6 +348,7 @@ public enum ReplicaItemExecutionMode { /** * Determines whether a bulk item request should be executed on the replica. + * * @return {@link ReplicaItemExecutionMode#NORMAL} upon normal primary execution with no failures * {@link ReplicaItemExecutionMode#FAILURE} upon primary execution failure after sequence no generation * {@link ReplicaItemExecutionMode#NOOP} upon primary execution failure before sequence no generation or @@ -460,8 +359,8 @@ static ReplicaItemExecutionMode replicaItemExecutionMode(final BulkItemRequest r assert primaryResponse != null : "expected primary response to be set for item [" + index + "] request [" + request.request() + "]"; if (primaryResponse.isFailed()) { return primaryResponse.getFailure().getSeqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO - ? ReplicaItemExecutionMode.FAILURE // we have a seq no generated with the failure, replicate as no-op - : ReplicaItemExecutionMode.NOOP; // no seq no generated, ignore replication + ? ReplicaItemExecutionMode.FAILURE // we have a seq no generated with the failure, replicate as no-op + : ReplicaItemExecutionMode.NOOP; // no seq no generated, ignore replication } else { // TODO: once we know for sure that every operation that has been processed on the primary is assigned a seq# // (i.e., all nodes on the cluster are on v6.0.0 or higher) we can use the existence of a seq# to indicate whether @@ -469,8 +368,8 @@ static ReplicaItemExecutionMode replicaItemExecutionMode(final BulkItemRequest r // ReplicaItemExecutionMode enum and have a simple boolean check for seq != UNASSIGNED_SEQ_NO which will work for // both failures and indexing operations. return primaryResponse.getResponse().getResult() != DocWriteResponse.Result.NOOP - ? ReplicaItemExecutionMode.NORMAL // execution successful on primary - : ReplicaItemExecutionMode.NOOP; // ignore replication + ? ReplicaItemExecutionMode.NORMAL // execution successful on primary + : ReplicaItemExecutionMode.NOOP; // ignore replication } } @@ -527,7 +426,7 @@ private static Engine.Result performOpOnReplica(DocWriteResponse primaryResponse break; case DELETE: DeleteRequest deleteRequest = (DeleteRequest) docWriteRequest; - result = replica.applyDeleteOperationOnReplica(primaryResponse.getSeqNo(), primaryResponse.getVersion(), + result = replica.applyDeleteOperationOnReplica(primaryResponse.getSeqNo(), primaryResponse.getVersion(), deleteRequest.type(), deleteRequest.id(), deleteRequest.versionType().versionTypeForReplicationAndRecovery()); break; default: @@ -550,56 +449,62 @@ private static Engine.Result performOpOnReplica(DocWriteResponse primaryResponse } /** Executes index operation on primary shard after updates mapping if dynamic mappings are found */ - static Engine.IndexResult executeIndexRequestOnPrimary(IndexRequest request, IndexShard primary, - MappingUpdatePerformer mappingUpdater) throws Exception { + private static void executeIndexRequestOnPrimary(BulkPrimaryExecutionContext context, + MappingUpdatePerformer mappingUpdater) throws Exception { + final IndexRequest request = context.getRequestToExecute(); + final IndexShard primary = context.getPrimary(); final SourceToParse sourceToParse = SourceToParse.source(request.index(), request.type(), request.id(), request.source(), request.getContentType()) .routing(request.routing()).parent(request.parent()); - return executeOnPrimaryWhileHandlingMappingUpdates(primary.shardId(), request.type(), + executeOnPrimaryWhileHandlingMappingUpdates(context, () -> primary.applyIndexOperationOnPrimary(request.version(), request.versionType(), sourceToParse, request.getAutoGeneratedTimestamp(), request.isRetry()), e -> primary.getFailedIndexResult(e, request.version()), - mappingUpdater); + context::markOperationAsExecuted, + mapping -> mappingUpdater.updateMappings(mapping, primary.shardId(), request.type())); } - private static Engine.DeleteResult executeDeleteRequestOnPrimary(DeleteRequest request, IndexShard primary, - MappingUpdatePerformer mappingUpdater) throws Exception { - return executeOnPrimaryWhileHandlingMappingUpdates(primary.shardId(), request.type(), + private static void executeDeleteRequestOnPrimary(BulkPrimaryExecutionContext context, + MappingUpdatePerformer mappingUpdater) throws Exception { + final DeleteRequest request = context.getRequestToExecute(); + final IndexShard primary = context.getPrimary(); + executeOnPrimaryWhileHandlingMappingUpdates(context, () -> primary.applyDeleteOperationOnPrimary(request.version(), request.type(), request.id(), request.versionType()), e -> primary.getFailedDeleteResult(e, request.version()), - mappingUpdater); + context::markOperationAsExecuted, + mapping -> mappingUpdater.updateMappings(mapping, primary.shardId(), request.type())); } - private static T executeOnPrimaryWhileHandlingMappingUpdates(ShardId shardId, String type, - CheckedSupplier toExecute, - Function onError, - MappingUpdatePerformer mappingUpdater) + private static void executeOnPrimaryWhileHandlingMappingUpdates( + BulkPrimaryExecutionContext context, CheckedSupplier toExecute, + Function exceptionToResult, Consumer onComplete, Consumer mappingUpdater) throws IOException { T result = toExecute.get(); if (result.getResultType() == Engine.Result.Type.MAPPING_UPDATE_REQUIRED) { // try to update the mappings and try again. try { - mappingUpdater.updateMappings(result.getRequiredMappingUpdate(), shardId, type); + mappingUpdater.accept(result.getRequiredMappingUpdate()); } catch (Exception e) { // failure to update the mapping should translate to a failure of specific requests. Other requests // still need to be executed and replicated. - return onError.apply(e); + onComplete.accept(exceptionToResult.apply(e)); + return; } + // TODO - we can fall back to a wait for cluster state update but I'm keeping the logic the same for now result = toExecute.get(); if (result.getResultType() == Engine.Result.Type.MAPPING_UPDATE_REQUIRED) { // double mapping update. We assume that the successful mapping update wasn't yet processed on the node // and retry the entire request again. - throw new ReplicationOperation.RetryOnPrimaryException(shardId, - "Dynamic mappings are not available on the node that holds the primary yet"); + context.markAsRequiringMappingUpdate(); + } else { + onComplete.accept(result); } + } else { + onComplete.accept(result); } - assert result.getFailure() instanceof ReplicationOperation.RetryOnPrimaryException == false : - "IndexShard shouldn't use RetryOnPrimaryException. got " + result.getFailure(); - return result; - } class ConcreteMappingUpdatePerformer implements MappingUpdatePerformer { diff --git a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineAction.java b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineAction.java index ba1dd5d385fea..f19ab12c8d356 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineAction.java @@ -20,9 +20,10 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.action.Action; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.ElasticsearchClient; -public class DeletePipelineAction extends Action { +public class DeletePipelineAction extends Action { public static final DeletePipelineAction INSTANCE = new DeletePipelineAction(); public static final String NAME = "cluster:admin/ingest/pipeline/delete"; @@ -36,8 +37,7 @@ public DeletePipelineRequestBuilder newRequestBuilder(ElasticsearchClient client return new DeletePipelineRequestBuilder(client, this); } - @Override - public WritePipelineResponse newResponse() { - return new WritePipelineResponse(); + public AcknowledgedResponse newResponse() { + return new AcknowledgedResponse(); } } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequestBuilder.java index 90cbce135af2c..f938bf6cb14d0 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequestBuilder.java @@ -20,9 +20,10 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.ElasticsearchClient; -public class DeletePipelineRequestBuilder extends ActionRequestBuilder { +public class DeletePipelineRequestBuilder extends ActionRequestBuilder { public DeletePipelineRequestBuilder(ElasticsearchClient client, DeletePipelineAction action) { super(client, action, new DeletePipelineRequest()); diff --git a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java index 45cb83634f84f..d3cd052ecad1e 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineTransportAction.java @@ -21,6 +21,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -34,7 +35,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -public class DeletePipelineTransportAction extends TransportMasterNodeAction { +public class DeletePipelineTransportAction extends TransportMasterNodeAction { private final PipelineStore pipelineStore; private final ClusterService clusterService; @@ -54,12 +55,12 @@ protected String executor() { } @Override - protected WritePipelineResponse newResponse() { - return new WritePipelineResponse(); + protected AcknowledgedResponse newResponse() { + return new AcknowledgedResponse(); } @Override - protected void masterOperation(DeletePipelineRequest request, ClusterState state, ActionListener listener) throws Exception { + protected void masterOperation(DeletePipelineRequest request, ClusterState state, ActionListener listener) throws Exception { pipelineStore.delete(clusterService, request, listener); } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineAction.java b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineAction.java index cdf0191e85fe7..17bc6456274dc 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineAction.java @@ -20,9 +20,10 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.action.Action; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.ElasticsearchClient; -public class PutPipelineAction extends Action { +public class PutPipelineAction extends Action { public static final PutPipelineAction INSTANCE = new PutPipelineAction(); public static final String NAME = "cluster:admin/ingest/pipeline/put"; @@ -37,7 +38,7 @@ public PutPipelineRequestBuilder newRequestBuilder(ElasticsearchClient client) { } @Override - public WritePipelineResponse newResponse() { - return new WritePipelineResponse(); + public AcknowledgedResponse newResponse() { + return new AcknowledgedResponse(); } } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequestBuilder.java index c03b3b84f8b5b..b0542387ba2da 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequestBuilder.java @@ -20,11 +20,12 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentType; -public class PutPipelineRequestBuilder extends ActionRequestBuilder { +public class PutPipelineRequestBuilder extends ActionRequestBuilder { public PutPipelineRequestBuilder(ElasticsearchClient client, PutPipelineAction action) { super(client, action, new PutPipelineRequest()); diff --git a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java index 7dde981804939..ea20a8e5b80b1 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineTransportAction.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.admin.cluster.node.info.TransportNodesInfoAction; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -43,7 +44,7 @@ import java.util.HashMap; import java.util.Map; -public class PutPipelineTransportAction extends TransportMasterNodeAction { +public class PutPipelineTransportAction extends TransportMasterNodeAction { private final PipelineStore pipelineStore; private final ClusterService clusterService; @@ -66,12 +67,12 @@ protected String executor() { } @Override - protected WritePipelineResponse newResponse() { - return new WritePipelineResponse(); + protected AcknowledgedResponse newResponse() { + return new AcknowledgedResponse(); } @Override - protected void masterOperation(PutPipelineRequest request, ClusterState state, ActionListener listener) throws Exception { + protected void masterOperation(PutPipelineRequest request, ClusterState state, ActionListener listener) throws Exception { NodesInfoRequest nodesInfoRequest = new NodesInfoRequest(); nodesInfoRequest.clear(); nodesInfoRequest.ingest(true); diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseExecutionException.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseExecutionException.java index c6e0b21dffd5d..3d3737b0638cd 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseExecutionException.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseExecutionException.java @@ -138,8 +138,7 @@ protected void metadataToXContent(XContentBuilder builder, Params params) throws builder.field("grouped", group); // notify that it's grouped builder.field("failed_shards"); builder.startArray(); - ShardOperationFailedException[] failures = params.paramAsBoolean("group_shard_failures", true) ? - ExceptionsHelper.groupBy(shardFailures) : shardFailures; + ShardOperationFailedException[] failures = group ? ExceptionsHelper.groupBy(shardFailures) : shardFailures; for (ShardOperationFailedException failure : failures) { builder.startObject(); failure.toXContent(builder, params); diff --git a/server/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java b/server/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java index 0a8fe3c674395..98418153d501a 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java +++ b/server/src/main/java/org/elasticsearch/action/search/ShardSearchFailure.java @@ -43,7 +43,7 @@ /** * Represents a failure to search on a specific shard. */ -public class ShardSearchFailure implements ShardOperationFailedException { +public class ShardSearchFailure extends ShardOperationFailedException { private static final String REASON_FIELD = "reason"; private static final String NODE_FIELD = "node"; @@ -53,9 +53,6 @@ public class ShardSearchFailure implements ShardOperationFailedException { public static final ShardSearchFailure[] EMPTY_ARRAY = new ShardSearchFailure[0]; private SearchShardTarget shardTarget; - private String reason; - private RestStatus status; - private Throwable cause; private ShardSearchFailure() { @@ -66,25 +63,18 @@ public ShardSearchFailure(Exception e) { } public ShardSearchFailure(Exception e, @Nullable SearchShardTarget shardTarget) { + super(shardTarget == null ? null : shardTarget.getFullyQualifiedIndexName(), + shardTarget == null ? -1 : shardTarget.getShardId().getId(), + ExceptionsHelper.detailedMessage(e), + ExceptionsHelper.status(ExceptionsHelper.unwrapCause(e)), + ExceptionsHelper.unwrapCause(e)); + final Throwable actual = ExceptionsHelper.unwrapCause(e); if (actual instanceof SearchException) { this.shardTarget = ((SearchException) actual).shard(); } else if (shardTarget != null) { this.shardTarget = shardTarget; } - status = ExceptionsHelper.status(actual); - this.reason = ExceptionsHelper.detailedMessage(e); - this.cause = actual; - } - - public ShardSearchFailure(String reason, SearchShardTarget shardTarget) { - this(reason, shardTarget, RestStatus.INTERNAL_SERVER_ERROR); - } - - private ShardSearchFailure(String reason, SearchShardTarget shardTarget, RestStatus status) { - this.shardTarget = shardTarget; - this.reason = reason; - this.status = status; } /** @@ -95,41 +85,6 @@ public SearchShardTarget shard() { return this.shardTarget; } - @Override - public RestStatus status() { - return this.status; - } - - /** - * The index the search failed on. - */ - @Override - public String index() { - if (shardTarget != null) { - return shardTarget.getFullyQualifiedIndexName(); - } - return null; - } - - /** - * The shard id the search failed on. - */ - @Override - public int shardId() { - if (shardTarget != null) { - return shardTarget.getShardId().id(); - } - return -1; - } - - /** - * The reason of the failure. - */ - @Override - public String reason() { - return this.reason; - } - @Override public String toString() { return "shard [" + (shardTarget == null ? "_na" : shardTarget) + "], reason [" + reason + "], cause [" + @@ -172,12 +127,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (shardTarget != null) { builder.field(NODE_FIELD, shardTarget.getNodeId()); } - if (cause != null) { - builder.field(REASON_FIELD); - builder.startObject(); - ElasticsearchException.generateThrowableXContent(builder, params, cause); - builder.endObject(); - } + builder.field(REASON_FIELD); + builder.startObject(); + ElasticsearchException.generateThrowableXContent(builder, params, cause); + builder.endObject(); return builder; } @@ -225,9 +178,4 @@ public static ShardSearchFailure fromXContent(XContentParser parser) throws IOEx } return new ShardSearchFailure(exception, searchShardTarget); } - - @Override - public Throwable getCause() { - return cause; - } } diff --git a/server/src/main/java/org/elasticsearch/action/support/DefaultShardOperationFailedException.java b/server/src/main/java/org/elasticsearch/action/support/DefaultShardOperationFailedException.java index 8a4a787fbe5f2..d297df478a4b8 100644 --- a/server/src/main/java/org/elasticsearch/action/support/DefaultShardOperationFailedException.java +++ b/server/src/main/java/org/elasticsearch/action/support/DefaultShardOperationFailedException.java @@ -28,8 +28,6 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; import java.io.IOException; @@ -37,7 +35,7 @@ import static org.elasticsearch.ExceptionsHelper.detailedMessage; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; -public class DefaultShardOperationFailedException implements ShardOperationFailedException { +public class DefaultShardOperationFailedException extends ShardOperationFailedException { private static final String INDEX = "index"; private static final String SHARD_ID = "shard"; @@ -52,56 +50,16 @@ public class DefaultShardOperationFailedException implements ShardOperationFaile PARSER.declareObject(constructorArg(), (p, c) -> ElasticsearchException.fromXContent(p), new ParseField(REASON)); } - private String index; - - private int shardId; - - private Throwable reason; - - private RestStatus status; - protected DefaultShardOperationFailedException() { } public DefaultShardOperationFailedException(ElasticsearchException e) { - Index index = e.getIndex(); - this.index = index == null ? null : index.getName(); - ShardId shardId = e.getShardId(); - this.shardId = shardId == null ? -1 : shardId.id(); - this.reason = e; - this.status = e.status(); + super(e.getIndex() == null ? null : e.getIndex().getName(), e.getShardId() == null ? -1 : e.getShardId().getId(), + detailedMessage(e), e.status(), e); } - public DefaultShardOperationFailedException(String index, int shardId, Throwable reason) { - this.index = index; - this.shardId = shardId; - this.reason = reason; - this.status = ExceptionsHelper.status(reason); - } - - @Override - public String index() { - return this.index; - } - - @Override - public int shardId() { - return this.shardId; - } - - @Override - public String reason() { - return detailedMessage(reason); - } - - @Override - public RestStatus status() { - return status; - } - - @Override - public Throwable getCause() { - return reason; + public DefaultShardOperationFailedException(String index, int shardId, Throwable cause) { + super(index, shardId, detailedMessage(cause), ExceptionsHelper.status(cause), cause); } public static DefaultShardOperationFailedException readShardOperationFailed(StreamInput in) throws IOException { @@ -112,24 +70,17 @@ public static DefaultShardOperationFailedException readShardOperationFailed(Stre @Override public void readFrom(StreamInput in) throws IOException { - if (in.readBoolean()) { - index = in.readString(); - } + index = in.readOptionalString(); shardId = in.readVInt(); - reason = in.readException(); + cause = in.readException(); status = RestStatus.readFrom(in); } @Override public void writeTo(StreamOutput out) throws IOException { - if (index == null) { - out.writeBoolean(false); - } else { - out.writeBoolean(true); - out.writeString(index); - } + out.writeOptionalString(index); out.writeVInt(shardId); - out.writeException(reason); + out.writeException(cause); RestStatus.writeTo(out, status); } @@ -145,7 +96,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field("status", status.name()); if (reason != null) { builder.startObject("reason"); - ElasticsearchException.generateThrowableXContent(builder, params, reason); + ElasticsearchException.generateThrowableXContent(builder, params, cause); builder.endObject(); } return builder; diff --git a/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedResponse.java b/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedResponse.java index 594dcda8c662f..654b398e79cd4 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedResponse.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedResponse.java @@ -34,10 +34,9 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; /** - * Abstract class that allows to mark action responses that support acknowledgements. - * Facilitates consistency across different api. + * A response that indicates that a request has been acknowledged */ -public abstract class AcknowledgedResponse extends ActionResponse implements ToXContentObject { +public class AcknowledgedResponse extends ActionResponse implements ToXContentObject { private static final ParseField ACKNOWLEDGED = new ParseField("acknowledged"); @@ -48,11 +47,10 @@ protected static void declareAcknowledgedField( protected boolean acknowledged; - protected AcknowledgedResponse() { - + public AcknowledgedResponse() { } - protected AcknowledgedResponse(boolean acknowledged) { + public AcknowledgedResponse(boolean acknowledged) { this.acknowledged = acknowledged; } @@ -100,10 +98,15 @@ protected void addCustomFields(XContentBuilder builder, Params params) throws IO ObjectParser.ValueType.BOOLEAN); } - protected static boolean parseAcknowledged(XContentParser parser) { + @Deprecated + public static boolean parseAcknowledged(XContentParser parser) { return ACKNOWLEDGED_FLAG_PARSER.apply(parser, null); } + public static AcknowledgedResponse fromXContent(XContentParser parser) throws IOException { + return new AcknowledgedResponse(ACKNOWLEDGED_FLAG_PARSER.apply(parser, null)); + } + @Override public boolean equals(Object o) { if (this == o) { diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationResponse.java b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationResponse.java index b8a5f3782bd64..bc5c696894a6a 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationResponse.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationResponse.java @@ -218,13 +218,13 @@ public String toString() { '}'; } - public static ShardInfo readShardInfo(StreamInput in) throws IOException { + static ShardInfo readShardInfo(StreamInput in) throws IOException { ShardInfo shardInfo = new ShardInfo(); shardInfo.readFrom(in); return shardInfo; } - public static class Failure implements ShardOperationFailedException, ToXContentObject { + public static class Failure extends ShardOperationFailedException implements ToXContentObject { private static final String _INDEX = "_index"; private static final String _SHARD = "_shard"; @@ -235,37 +235,18 @@ public static class Failure implements ShardOperationFailedException, ToXContent private ShardId shardId; private String nodeId; - private Exception cause; - private RestStatus status; private boolean primary; public Failure(ShardId shardId, @Nullable String nodeId, Exception cause, RestStatus status, boolean primary) { + super(shardId.getIndexName(), shardId.getId(), ExceptionsHelper.detailedMessage(cause), status, cause); this.shardId = shardId; this.nodeId = nodeId; - this.cause = cause; - this.status = status; this.primary = primary; } Failure() { } - /** - * @return On what index the failure occurred. - */ - @Override - public String index() { - return shardId.getIndexName(); - } - - /** - * @return On what shard id the failure occurred. - */ - @Override - public int shardId() { - return shardId.id(); - } - public ShardId fullShardId() { return shardId; } @@ -278,27 +259,6 @@ public String nodeId() { return nodeId; } - /** - * @return A text description of the failure - */ - @Override - public String reason() { - return ExceptionsHelper.detailedMessage(cause); - } - - /** - * @return The status to report if this failure was a primary failure. - */ - @Override - public RestStatus status() { - return status; - } - - @Override - public Throwable getCause() { - return cause; - } - /** * @return Whether this failure occurred on a primary shard. * (this only reports true for delete by query) @@ -310,6 +270,8 @@ public boolean primary() { @Override public void readFrom(StreamInput in) throws IOException { shardId = ShardId.readShardId(in); + super.shardId = shardId.getId(); + super.index = shardId.getIndexName(); nodeId = in.readOptionalString(); cause = in.readException(); status = RestStatus.readFrom(in); diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java index 577426637eceb..5d425b16d1617 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java @@ -83,7 +83,7 @@ protected static Location syncOperationResultOrThrow(final Engine.Result operati return location; } - protected static Location locationToSync(Location current, Location next) { + public static Location locationToSync(Location current, Location next) { /* here we are moving forward in the translog with each operation. Under the hood this might * cross translog files which is ok since from the user perspective the translog is like a * tape where only the highest location needs to be fsynced in order to sync all previous diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java index ab10aa710cce6..166b8f38dd52d 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java @@ -19,6 +19,10 @@ package org.elasticsearch.action.update; +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.function.LongSupplier; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.DocWriteResponse; @@ -44,21 +48,23 @@ import org.elasticsearch.index.mapper.RoutingFieldMapper; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.UpdateScript; import org.elasticsearch.search.lookup.SourceLookup; -import java.io.IOException; import java.util.ArrayList; -import java.util.HashMap; -import java.util.Map; -import java.util.function.LongSupplier; +import static org.elasticsearch.common.Booleans.parseBoolean; /** * Helper for translating an update request to an index, delete request or update response. */ public class UpdateHelper extends AbstractComponent { + + /** Whether scripts should add the ctx variable to the params map. */ + private static final boolean CTX_IN_PARAMS = + parseBoolean(System.getProperty("es.scripting.update.ctx_in_params"), true); + private final ScriptService scriptService; public UpdateHelper(Settings settings, ScriptService scriptService) { @@ -297,10 +303,18 @@ Result prepareUpdateScriptRequest(ShardId shardId, UpdateRequest request, GetRes private Map executeScript(Script script, Map ctx) { try { if (scriptService != null) { - ExecutableScript.Factory factory = scriptService.compile(script, ExecutableScript.UPDATE_CONTEXT); - ExecutableScript executableScript = factory.newInstance(script.getParams()); - executableScript.setNextVar(ContextFields.CTX, ctx); - executableScript.run(); + UpdateScript.Factory factory = scriptService.compile(script, UpdateScript.CONTEXT); + final Map params; + if (CTX_IN_PARAMS) { + params = new HashMap<>(script.getParams()); + params.put(ContextFields.CTX, ctx); + deprecationLogger.deprecated("Using `ctx` via `params.ctx` is deprecated. " + + "Use -Des.scripting.update.ctx_in_params=false to enforce non-deprecated usage."); + } else { + params = script.getParams(); + } + UpdateScript executableScript = factory.newInstance(params); + executableScript.execute(ctx); } } catch (Exception e) { throw new IllegalArgumentException("failed to execute script", e); diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java index 9ee8b51209688..53b7c66aa5c3a 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java @@ -348,10 +348,7 @@ static void init( if (foreground && maybeConsoleAppender != null) { Loggers.removeAppender(rootLogger, maybeConsoleAppender); } - Logger logger = Loggers.getLogger(Bootstrap.class); - if (INSTANCE.node != null) { - logger = Loggers.getLogger(Bootstrap.class, Node.NODE_NAME_SETTING.get(INSTANCE.node.settings())); - } + Logger logger = LogManager.getLogger(Bootstrap.class); // HACK, it sucks to do this, but we will run users out of disk space otherwise if (e instanceof CreationException) { // guice: log the shortened exc to the log file diff --git a/server/src/main/java/org/elasticsearch/client/ClusterAdminClient.java b/server/src/main/java/org/elasticsearch/client/ClusterAdminClient.java index 949b0110fff20..f6c71c90f9d14 100644 --- a/server/src/main/java/org/elasticsearch/client/ClusterAdminClient.java +++ b/server/src/main/java/org/elasticsearch/client/ClusterAdminClient.java @@ -113,7 +113,7 @@ import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.ingest.SimulatePipelineRequestBuilder; import org.elasticsearch.action.ingest.SimulatePipelineResponse; -import org.elasticsearch.action.ingest.WritePipelineResponse; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.tasks.TaskId; @@ -574,12 +574,12 @@ public interface ClusterAdminClient extends ElasticsearchClient { /** * Stores an ingest pipeline */ - void putPipeline(PutPipelineRequest request, ActionListener listener); + void putPipeline(PutPipelineRequest request, ActionListener listener); /** * Stores an ingest pipeline */ - ActionFuture putPipeline(PutPipelineRequest request); + ActionFuture putPipeline(PutPipelineRequest request); /** * Stores an ingest pipeline @@ -596,12 +596,12 @@ public interface ClusterAdminClient extends ElasticsearchClient { /** * Deletes a stored ingest pipeline */ - void deletePipeline(DeletePipelineRequest request, ActionListener listener); + void deletePipeline(DeletePipelineRequest request, ActionListener listener); /** * Deletes a stored ingest pipeline */ - ActionFuture deletePipeline(DeletePipelineRequest request); + ActionFuture deletePipeline(DeletePipelineRequest request); /** * Deletes a stored ingest pipeline diff --git a/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java b/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java index 09bc1446a3592..4d204b7cc3fa6 100644 --- a/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java +++ b/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java @@ -308,7 +308,6 @@ import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.ingest.SimulatePipelineRequestBuilder; import org.elasticsearch.action.ingest.SimulatePipelineResponse; -import org.elasticsearch.action.ingest.WritePipelineResponse; import org.elasticsearch.action.search.ClearScrollAction; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.ClearScrollRequestBuilder; @@ -326,6 +325,7 @@ import org.elasticsearch.action.search.SearchScrollRequestBuilder; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.ThreadedActionListener; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.termvectors.MultiTermVectorsAction; import org.elasticsearch.action.termvectors.MultiTermVectorsRequest; import org.elasticsearch.action.termvectors.MultiTermVectorsRequestBuilder; @@ -1094,12 +1094,12 @@ public SnapshotsStatusRequestBuilder prepareSnapshotStatus() { } @Override - public void putPipeline(PutPipelineRequest request, ActionListener listener) { + public void putPipeline(PutPipelineRequest request, ActionListener listener) { execute(PutPipelineAction.INSTANCE, request, listener); } @Override - public ActionFuture putPipeline(PutPipelineRequest request) { + public ActionFuture putPipeline(PutPipelineRequest request) { return execute(PutPipelineAction.INSTANCE, request); } @@ -1114,12 +1114,12 @@ public PutPipelineRequestBuilder preparePutPipeline(String id, BytesReference so } @Override - public void deletePipeline(DeletePipelineRequest request, ActionListener listener) { + public void deletePipeline(DeletePipelineRequest request, ActionListener listener) { execute(DeletePipelineAction.INSTANCE, request, listener); } @Override - public ActionFuture deletePipeline(DeletePipelineRequest request) { + public ActionFuture deletePipeline(DeletePipelineRequest request) { return execute(DeletePipelineAction.INSTANCE, request); } diff --git a/server/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java b/server/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java index b45b55609f595..2159014f8258f 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java +++ b/server/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java @@ -62,14 +62,29 @@ public static Logger getLogger(String prefix, Logger logger) { return new PrefixLogger((ExtendedLogger)logger, logger.getName(), prefix); } + /** + * Get or build a logger. + * @deprecated Prefer {@link LogManager#getLogger} + */ + @Deprecated public static Logger getLogger(Class clazz) { return getLogger(null, clazz); } + /** + * Get or build a logger. + * @deprecated Prefer {@link LogManager#getLogger} + */ + @Deprecated public static Logger getLogger(String name) { return getLogger(null, name); } + /** + * Get the root logger. + * @deprecated Prefer {@link LogManager#getRootLogger} + */ + @Deprecated public static Logger getRootLogger() { return LogManager.getRootLogger(); } diff --git a/server/src/main/java/org/elasticsearch/common/logging/Loggers.java b/server/src/main/java/org/elasticsearch/common/logging/Loggers.java index 58ffe2775316b..b2a24faf643fe 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/Loggers.java +++ b/server/src/main/java/org/elasticsearch/common/logging/Loggers.java @@ -67,11 +67,11 @@ public static Logger getLogger(Class clazz, Settings settings, Index index, S } public static Logger getLogger(Class clazz, Settings settings, String... prefixes) { - return Loggers.getLogger(clazz, prefixes); + return ESLoggerFactory.getLogger(formatPrefix(prefixes), clazz); } public static Logger getLogger(String loggerName, Settings settings, String... prefixes) { - return Loggers.getLogger(loggerName, prefixes); + return ESLoggerFactory.getLogger(formatPrefix(prefixes), loggerName); } public static Logger getLogger(Logger parentLogger, String s) { @@ -82,22 +82,24 @@ public static Logger getLogger(Logger parentLogger, String s) { return ESLoggerFactory.getLogger(prefix, parentLogger.getName() + s); } + /** + * Get or build a logger. + * @deprecated Prefer {@link LogManager#getLogger} + */ + @Deprecated public static Logger getLogger(String s) { return ESLoggerFactory.getLogger(s); } + /** + * Get or build a logger. + * @deprecated Prefer {@link LogManager#getLogger} + */ + @Deprecated public static Logger getLogger(Class clazz) { return ESLoggerFactory.getLogger(clazz); } - public static Logger getLogger(Class clazz, String... prefixes) { - return ESLoggerFactory.getLogger(formatPrefix(prefixes), clazz); - } - - public static Logger getLogger(String name, String... prefixes) { - return ESLoggerFactory.getLogger(formatPrefix(prefixes), name); - } - private static String formatPrefix(String... prefixes) { String prefix = null; if (prefixes != null && prefixes.length > 0) { diff --git a/server/src/main/java/org/elasticsearch/common/time/CompoundDateTimeFormatter.java b/server/src/main/java/org/elasticsearch/common/time/CompoundDateTimeFormatter.java index df459679c22b4..31683b43ebd87 100644 --- a/server/src/main/java/org/elasticsearch/common/time/CompoundDateTimeFormatter.java +++ b/server/src/main/java/org/elasticsearch/common/time/CompoundDateTimeFormatter.java @@ -70,4 +70,5 @@ public CompoundDateTimeFormatter withZone(ZoneId zoneId) { public String format(TemporalAccessor accessor) { return printer.format(accessor); } + } diff --git a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java index eef2ab5558789..baaad48a31855 100644 --- a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java +++ b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java @@ -52,30 +52,10 @@ public class DateFormatters { - private static final DateTimeFormatter TIME_ZONE_FORMATTER_ZONE_ID = new DateTimeFormatterBuilder() - .appendZoneId() - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter TIME_ZONE_FORMATTER_WITHOUT_COLON = new DateTimeFormatterBuilder() + private static final DateTimeFormatter TIME_ZONE_FORMATTER_NO_COLON = new DateTimeFormatterBuilder() .appendOffset("+HHmm", "Z") .toFormatter(Locale.ROOT); - private static final DateTimeFormatter TIME_ZONE_FORMATTER_WITH_COLON = new DateTimeFormatterBuilder() - .appendOffset("+HH:mm", "Z") - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter TIME_ZONE_FORMATTER = new DateTimeFormatterBuilder() - .optionalStart().appendZoneId().optionalEnd() - .optionalStart().appendOffset("+HHmm", "Z").optionalEnd() - .optionalStart().appendOffset("+HH:mm", "Z").optionalEnd() - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter OPTIONAL_TIME_ZONE_FORMATTER = new DateTimeFormatterBuilder() - .optionalStart() - .append(TIME_ZONE_FORMATTER) - .optionalEnd() - .toFormatter(Locale.ROOT); - private static final DateTimeFormatter STRICT_YEAR_MONTH_DAY_FORMATTER = new DateTimeFormatterBuilder() .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) .appendLiteral("-") @@ -101,7 +81,7 @@ public class DateFormatters { .appendFraction(MILLI_OF_SECOND, 3, 3, true) .optionalEnd() .optionalStart() - .append(TIME_ZONE_FORMATTER_WITHOUT_COLON) + .appendOffset("+HHmm", "Z") .optionalEnd() .optionalEnd() .toFormatter(Locale.ROOT); @@ -115,483 +95,183 @@ public class DateFormatters { .appendFraction(MILLI_OF_SECOND, 3, 3, true) .optionalEnd() .optionalStart() - .append(TIME_ZONE_FORMATTER_WITH_COLON) - .optionalEnd() - .optionalEnd() - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_FORMATTER_3 = new DateTimeFormatterBuilder() - .append(STRICT_YEAR_MONTH_DAY_FORMATTER) - .optionalStart() - .appendLiteral('T') - .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER) - .optionalStart() - .appendFraction(MILLI_OF_SECOND, 3, 3, true) - .optionalEnd() - .optionalStart() - .append(TIME_ZONE_FORMATTER_ZONE_ID) + .appendZoneOrOffsetId() .optionalEnd() .optionalEnd() .toFormatter(Locale.ROOT); + /** + * Returns a generic ISO datetime parser where the date is mandatory and the time is optional. + */ private static final CompoundDateTimeFormatter STRICT_DATE_OPTIONAL_TIME = - new CompoundDateTimeFormatter(STRICT_DATE_OPTIONAL_TIME_FORMATTER_1, STRICT_DATE_OPTIONAL_TIME_FORMATTER_2, - STRICT_DATE_OPTIONAL_TIME_FORMATTER_3); - - private static final DateTimeFormatter BASIC_TIME_NO_MILLIS_FORMATTER = new DateTimeFormatterBuilder() + new CompoundDateTimeFormatter(STRICT_DATE_OPTIONAL_TIME_FORMATTER_1, STRICT_DATE_OPTIONAL_TIME_FORMATTER_2); + + ///////////////////////////////////////// + // + // BEGIN basic time formatters + // + // these formatters to not have any splitting characters between hours, minutes, seconds, milliseconds + // this means they have to be strict with the exception of the last element + // + ///////////////////////////////////////// + + private static final DateTimeFormatter BASIC_TIME_NO_MILLIS_BASE = new DateTimeFormatterBuilder() .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) - .append(OPTIONAL_TIME_ZONE_FORMATTER) .toFormatter(Locale.ROOT); - private static final CompoundDateTimeFormatter BASIC_TIME_NO_MILLIS = new CompoundDateTimeFormatter(BASIC_TIME_NO_MILLIS_FORMATTER); + /* + * Returns a basic formatter for a two digit hour of day, two digit minute + * of hour, two digit second of minute, and time zone offset (HHmmssZ). + */ + private static final CompoundDateTimeFormatter BASIC_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().append(BASIC_TIME_NO_MILLIS_BASE).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(BASIC_TIME_NO_MILLIS_BASE).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); private static final DateTimeFormatter BASIC_TIME_FORMATTER = new DateTimeFormatterBuilder() .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) .appendFraction(MILLI_OF_SECOND, 1, 3, true) - .append(OPTIONAL_TIME_ZONE_FORMATTER) .toFormatter(Locale.ROOT); - private static final CompoundDateTimeFormatter BASIC_TIME = new CompoundDateTimeFormatter(BASIC_TIME_FORMATTER); - - private static final DateTimeFormatter BASIC_T_TIME_FORMATTER = new DateTimeFormatterBuilder() - .appendLiteral("T") - .append(BASIC_TIME_FORMATTER) + private static final DateTimeFormatter BASIC_TIME_PRINTER = new DateTimeFormatterBuilder() + .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) + .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) + .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) + .appendFraction(MILLI_OF_SECOND, 3, 3, true) .toFormatter(Locale.ROOT); - private static final CompoundDateTimeFormatter BASIC_T_TIME = new CompoundDateTimeFormatter(BASIC_T_TIME_FORMATTER); + /* + * Returns a basic formatter for a two digit hour of day, two digit minute + * of hour, two digit second of minute, three digit millis, and time zone + * offset (HHmmss.SSSZ). + */ + private static final CompoundDateTimeFormatter BASIC_TIME = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().append(BASIC_TIME_PRINTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(BASIC_TIME_FORMATTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(BASIC_TIME_FORMATTER).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); - private static final CompoundDateTimeFormatter BASIC_T_TIME_NO_MILLIS = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .appendLiteral("T") - .append(BASIC_TIME_NO_MILLIS_FORMATTER) - .toFormatter(Locale.ROOT)); + private static final DateTimeFormatter BASIC_T_TIME_PRINTER = + new DateTimeFormatterBuilder().appendLiteral("T").append(BASIC_TIME_PRINTER).toFormatter(Locale.ROOT); + + private static final DateTimeFormatter BASIC_T_TIME_FORMATTER = + new DateTimeFormatterBuilder().appendLiteral("T").append(BASIC_TIME_FORMATTER).toFormatter(Locale.ROOT); + + /* + * Returns a basic formatter for a two digit hour of day, two digit minute + * of hour, two digit second of minute, three digit millis, and time zone + * offset prefixed by 'T' ('T'HHmmss.SSSZ). + */ + private static final CompoundDateTimeFormatter BASIC_T_TIME = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().append(BASIC_T_TIME_PRINTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(BASIC_T_TIME_FORMATTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(BASIC_T_TIME_FORMATTER).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); - private static final CompoundDateTimeFormatter BASIC_DATE_TIME = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .appendValue(ChronoField.YEAR, 4, 4, SignStyle.NORMAL) - .appendValue(MONTH_OF_YEAR, 2, 2, SignStyle.NOT_NEGATIVE) - .appendValue(DAY_OF_MONTH, 2, 2, SignStyle.NOT_NEGATIVE) - .append(BASIC_T_TIME_FORMATTER) - .toFormatter(Locale.ROOT)); + /* + * Returns a basic formatter for a two digit hour of day, two digit minute + * of hour, two digit second of minute, and time zone offset prefixed by 'T' + * ('T'HHmmssZ). + */ + private static final CompoundDateTimeFormatter BASIC_T_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().appendLiteral("T").append(BASIC_TIME_NO_MILLIS_BASE) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().appendLiteral("T").append(BASIC_TIME_NO_MILLIS_BASE) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); - private static final CompoundDateTimeFormatter BASIC_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + private static final DateTimeFormatter BASIC_YEAR_MONTH_DAY_FORMATTER = new DateTimeFormatterBuilder() .appendValue(ChronoField.YEAR, 4, 4, SignStyle.NORMAL) .appendValue(MONTH_OF_YEAR, 2, 2, SignStyle.NOT_NEGATIVE) .appendValue(DAY_OF_MONTH, 2, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral("T") - .append(BASIC_TIME_NO_MILLIS_FORMATTER) - .toFormatter(Locale.ROOT)); - - private static final CompoundDateTimeFormatter BASIC_ORDINAL_DATE = new CompoundDateTimeFormatter( - DateTimeFormatter.ofPattern("yyyyDDD", Locale.ROOT)); - - private static final CompoundDateTimeFormatter BASIC_ORDINAL_DATE_TIME = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .appendPattern("yyyyDDD") - .append(BASIC_T_TIME_FORMATTER) - .toFormatter(Locale.ROOT)); - - private static final CompoundDateTimeFormatter BASIC_ORDINAL_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( - new DateTimeFormatterBuilder() - .appendPattern("yyyyDDD") - .appendLiteral("T") - .append(BASIC_TIME_NO_MILLIS_FORMATTER) - .toFormatter(Locale.ROOT)); - - private static final DateTimeFormatter BASIC_WEEK_DATE_FORMATTER = new DateTimeFormatterBuilder() - .appendValue(IsoFields.WEEK_BASED_YEAR) - .appendLiteral("W") - .appendValue(IsoFields.WEEK_OF_WEEK_BASED_YEAR, 1, 2, SignStyle.NEVER) - .appendValue(ChronoField.DAY_OF_WEEK) .toFormatter(Locale.ROOT); - private static final CompoundDateTimeFormatter BASIC_WEEK_DATE = new CompoundDateTimeFormatter(BASIC_WEEK_DATE_FORMATTER); - - private static final CompoundDateTimeFormatter BASIC_WEEK_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( - new DateTimeFormatterBuilder() - .append(BASIC_WEEK_DATE_FORMATTER) - .appendLiteral("T") - .append(BASIC_TIME_NO_MILLIS_FORMATTER) - .toFormatter(Locale.ROOT)); - - private static final CompoundDateTimeFormatter BASIC_WEEK_DATE_TIME = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .append(BASIC_WEEK_DATE_FORMATTER) + private static final DateTimeFormatter BASIC_DATE_TIME_FORMATTER = new DateTimeFormatterBuilder() + .append(BASIC_YEAR_MONTH_DAY_FORMATTER) .append(BASIC_T_TIME_FORMATTER) - .toFormatter(Locale.ROOT)); - - private static final DateTimeFormatter DATE_FORMATTER = new DateTimeFormatterBuilder() - .appendValue(ChronoField.YEAR, 1, 4, SignStyle.NORMAL) - .appendLiteral('-') - .appendValue(MONTH_OF_YEAR, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral('-') - .appendValue(DAY_OF_MONTH, 1, 2, SignStyle.NOT_NEGATIVE) - .toFormatter(Locale.ROOT); - - private static final CompoundDateTimeFormatter DATE = new CompoundDateTimeFormatter(DATE_FORMATTER); - - private static final CompoundDateTimeFormatter HOUR = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) - .toFormatter(Locale.ROOT)); - - private static final CompoundDateTimeFormatter DATE_HOUR = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .append(DATE_FORMATTER) - .appendLiteral("T") - .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) - .toFormatter(Locale.ROOT)); - - private static final DateTimeFormatter HOUR_MINUTE_FORMATTER = new DateTimeFormatterBuilder() - .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) - .toFormatter(Locale.ROOT); - - private static final CompoundDateTimeFormatter HOUR_MINUTE = new CompoundDateTimeFormatter(HOUR_MINUTE_FORMATTER); - - private static final DateTimeFormatter DATE_TIME_PREFIX = new DateTimeFormatterBuilder() - .append(DATE_FORMATTER) - .appendLiteral('T') - .append(HOUR_MINUTE_FORMATTER) - .optionalStart() - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) - .optionalEnd() - .toFormatter(Locale.ROOT); - - // only the formatter, nothing optional here - private static final DateTimeFormatter DATE_TIME_NO_MILLIS_FORMATTER = new DateTimeFormatterBuilder() - .append(DATE_FORMATTER) - .appendLiteral('T') - .append(HOUR_MINUTE_FORMATTER) - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) - .appendZoneId() - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter DATE_TIME_NO_MILLIS_1 = new DateTimeFormatterBuilder() - .append(DATE_TIME_PREFIX) - .append(TIME_ZONE_FORMATTER_WITH_COLON) - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter DATE_TIME_NO_MILLIS_2 = new DateTimeFormatterBuilder() - .append(DATE_TIME_PREFIX) - .append(TIME_ZONE_FORMATTER_WITHOUT_COLON) - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter DATE_TIME_NO_MILLIS_3 = new DateTimeFormatterBuilder() - .append(DATE_TIME_PREFIX) - .append(TIME_ZONE_FORMATTER_ZONE_ID) - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter DATE_TIME_NO_MILLIS_4 = new DateTimeFormatterBuilder() - .append(DATE_TIME_PREFIX) - .optionalStart() - .append(TIME_ZONE_FORMATTER_WITH_COLON) - .optionalEnd() - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter DATE_TIME_NO_MILLIS_5 = new DateTimeFormatterBuilder() - .append(DATE_TIME_PREFIX) - .optionalStart() - .append(TIME_ZONE_FORMATTER_WITHOUT_COLON) - .optionalEnd() - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter DATE_TIME_NO_MILLIS_6 = new DateTimeFormatterBuilder() - .append(DATE_TIME_PREFIX) - .optionalStart() - .append(TIME_ZONE_FORMATTER_ZONE_ID) - .optionalEnd() - .toFormatter(Locale.ROOT); - - private static final CompoundDateTimeFormatter DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter(DATE_TIME_NO_MILLIS_FORMATTER, - DATE_TIME_NO_MILLIS_1, DATE_TIME_NO_MILLIS_2, DATE_TIME_NO_MILLIS_3, DATE_TIME_NO_MILLIS_4, DATE_TIME_NO_MILLIS_5, - DATE_TIME_NO_MILLIS_6); - - private static final CompoundDateTimeFormatter DATE_TIME = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .append(DATE_FORMATTER) - .appendLiteral('T') - .append(HOUR_MINUTE_FORMATTER) - .optionalStart() - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) - .appendFraction(MILLI_OF_SECOND, 1, 3, true) - .optionalEnd() - .append(OPTIONAL_TIME_ZONE_FORMATTER) - .toFormatter(Locale.ROOT)); - - private static final CompoundDateTimeFormatter DATE_OPTIONAL_TIME = new CompoundDateTimeFormatter(STRICT_DATE_OPTIONAL_TIME.printer, - new DateTimeFormatterBuilder() - .append(DATE_FORMATTER) - .parseLenient() - .optionalStart() - .appendLiteral('T') - .append(HOUR_MINUTE_FORMATTER) - .optionalStart() - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) - .appendFraction(MILLI_OF_SECOND, 1, 3, true) - .optionalEnd() - .append(OPTIONAL_TIME_ZONE_FORMATTER) - .optionalEnd() - .toFormatter(Locale.ROOT)); - - private static final DateTimeFormatter HOUR_MINUTE_SECOND_FORMATTER = new DateTimeFormatterBuilder() - .append(HOUR_MINUTE_FORMATTER) - .appendLiteral(":") - .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) - .toFormatter(Locale.ROOT); - - private static final CompoundDateTimeFormatter HOUR_MINUTE_SECOND = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .append(HOUR_MINUTE_FORMATTER) - .appendLiteral(":") - .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) - .toFormatter(Locale.ROOT)); - - private static final CompoundDateTimeFormatter DATE_HOUR_MINUTE_SECOND = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .append(DATE_FORMATTER) - .appendLiteral("T") - .append(HOUR_MINUTE_SECOND_FORMATTER) - .toFormatter(Locale.ROOT)); - - private static final CompoundDateTimeFormatter DATE_HOUR_MINUTE = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .append(DATE_FORMATTER) - .appendLiteral("T") - .append(HOUR_MINUTE_FORMATTER) - .toFormatter(Locale.ROOT)); - - private static final DateTimeFormatter HOUR_MINUTE_SECOND_MILLIS_FORMATTER = new DateTimeFormatterBuilder() - .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) - .appendFraction(MILLI_OF_SECOND, 1, 3, true) - .toFormatter(Locale.ROOT); - - private static final CompoundDateTimeFormatter HOUR_MINUTE_SECOND_MILLIS = - new CompoundDateTimeFormatter(HOUR_MINUTE_SECOND_MILLIS_FORMATTER); - - private static final CompoundDateTimeFormatter DATE_HOUR_MINUTE_SECOND_MILLIS = - new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .append(DATE_FORMATTER) - .appendLiteral("T") - .append(HOUR_MINUTE_SECOND_MILLIS_FORMATTER) - .toFormatter(Locale.ROOT)); - - private static final CompoundDateTimeFormatter DATE_HOUR_MINUTE_SECOND_FRACTION = - new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .append(DATE_FORMATTER) - .appendLiteral("T") - .append(HOUR_MINUTE_SECOND_MILLIS_FORMATTER) - .toFormatter(Locale.ROOT)); - - private static final DateTimeFormatter ORDINAL_DATE_FORMATTER = new DateTimeFormatterBuilder() - .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) - .appendLiteral('-') - .appendValue(DAY_OF_YEAR, 1, 3, SignStyle.NOT_NEGATIVE) - .toFormatter(Locale.ROOT); - - private static final CompoundDateTimeFormatter ORDINAL_DATE = new CompoundDateTimeFormatter(ORDINAL_DATE_FORMATTER); - - private static final CompoundDateTimeFormatter ORDINAL_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( - new DateTimeFormatterBuilder() - .append(ORDINAL_DATE_FORMATTER) - .appendLiteral('T') - .append(HOUR_MINUTE_SECOND_FORMATTER) - .append(OPTIONAL_TIME_ZONE_FORMATTER) - .toFormatter(Locale.ROOT)); - - private static final CompoundDateTimeFormatter ORDINAL_DATE_TIME = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .append(ORDINAL_DATE_FORMATTER) - .appendLiteral('T') - .append(HOUR_MINUTE_FORMATTER) - .optionalStart() - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) - .appendFraction(MILLI_OF_SECOND, 1, 3, true) - .optionalEnd() - .append(TIME_ZONE_FORMATTER) - .toFormatter(Locale.ROOT)); - - private static final DateTimeFormatter TIME_FORMATTER_1 = new DateTimeFormatterBuilder() - .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) - .appendFraction(MILLI_OF_SECOND, 1, 3, true) - .append(TIME_ZONE_FORMATTER_ZONE_ID) - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter TIME_FORMATTER_2 = new DateTimeFormatterBuilder() - .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) - .appendFraction(MILLI_OF_SECOND, 1, 3, true) - .append(TIME_ZONE_FORMATTER_WITH_COLON) - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter TIME_FORMATTER_3 = new DateTimeFormatterBuilder() - .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) - .appendFraction(MILLI_OF_SECOND, 1, 3, true) - .append(TIME_ZONE_FORMATTER_WITHOUT_COLON) - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter TIME_PREFIX = new DateTimeFormatterBuilder() - .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) - .appendFraction(MILLI_OF_SECOND, 1, 3, true) .toFormatter(Locale.ROOT); - private static final DateTimeFormatter TIME_ZONE_ID = new DateTimeFormatterBuilder() - .append(TIME_PREFIX) - .append(TIME_ZONE_FORMATTER_ZONE_ID) + private static final DateTimeFormatter BASIC_DATE_TIME_PRINTER = new DateTimeFormatterBuilder() + .append(BASIC_YEAR_MONTH_DAY_FORMATTER) + .append(BASIC_T_TIME_PRINTER) .toFormatter(Locale.ROOT); - private static final DateTimeFormatter TIME_ZONE_WITH_COLON = new DateTimeFormatterBuilder() - .append(TIME_PREFIX) - .append(TIME_ZONE_FORMATTER_WITH_COLON) - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter TIME_ZONE_WITHOUT_COLON = new DateTimeFormatterBuilder() - .append(TIME_PREFIX) - .append(TIME_ZONE_FORMATTER_WITHOUT_COLON) - .toFormatter(Locale.ROOT); - - private static final CompoundDateTimeFormatter T_TIME = new CompoundDateTimeFormatter( - new DateTimeFormatterBuilder().appendLiteral("T").append(TIME_FORMATTER_1).toFormatter(Locale.ROOT), - new DateTimeFormatterBuilder().appendLiteral("T").append(TIME_FORMATTER_2).toFormatter(Locale.ROOT), - new DateTimeFormatterBuilder().appendLiteral("T").append(TIME_FORMATTER_3).toFormatter(Locale.ROOT) + /* + * Returns a basic formatter that combines a basic date and time, separated + * by a 'T' (yyyyMMdd'T'HHmmss.SSSZ). + */ + private static final CompoundDateTimeFormatter BASIC_DATE_TIME = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().append(BASIC_DATE_TIME_PRINTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(BASIC_DATE_TIME_FORMATTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(BASIC_DATE_TIME_FORMATTER).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) ); - private static final DateTimeFormatter TIME_NO_MILLIS_FORMATTER_1 = new DateTimeFormatterBuilder() - .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) - .append(TIME_ZONE_FORMATTER_ZONE_ID) - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter TIME_NO_MILLIS_FORMATTER_2 = new DateTimeFormatterBuilder() - .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) - .append(TIME_ZONE_FORMATTER_WITH_COLON) - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter TIME_NO_MILLIS_FORMATTER_3 = new DateTimeFormatterBuilder() - .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) - .append(TIME_ZONE_FORMATTER_WITHOUT_COLON) - .toFormatter(Locale.ROOT); - - private static final CompoundDateTimeFormatter TIME = new CompoundDateTimeFormatter(TIME_ZONE_ID, TIME_ZONE_WITH_COLON, - TIME_ZONE_WITHOUT_COLON); - - private static final CompoundDateTimeFormatter TIME_NO_MILLIS = - new CompoundDateTimeFormatter(TIME_NO_MILLIS_FORMATTER_1, TIME_NO_MILLIS_FORMATTER_2, TIME_NO_MILLIS_FORMATTER_3); - - private static final DateTimeFormatter T_TIME_NO_MILLIS_FORMATTER_1 = new DateTimeFormatterBuilder() - .appendLiteral("T") - .append(TIME_NO_MILLIS_FORMATTER_1) - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter T_TIME_NO_MILLIS_FORMATTER_2 = new DateTimeFormatterBuilder() - .appendLiteral("T") - .append(TIME_NO_MILLIS_FORMATTER_2) - .toFormatter(Locale.ROOT); - - private static final DateTimeFormatter T_TIME_NO_MILLIS_FORMATTER_3 = new DateTimeFormatterBuilder() - .appendLiteral("T") - .append(TIME_NO_MILLIS_FORMATTER_3) - .toFormatter(Locale.ROOT); - - private static final CompoundDateTimeFormatter T_TIME_NO_MILLIS = - new CompoundDateTimeFormatter(T_TIME_NO_MILLIS_FORMATTER_1, T_TIME_NO_MILLIS_FORMATTER_2, T_TIME_NO_MILLIS_FORMATTER_3); - - private static final DateTimeFormatter WEEK_DATE_FORMATTER = new DateTimeFormatterBuilder() - .appendValue(IsoFields.WEEK_BASED_YEAR, 4, 10, SignStyle.EXCEEDS_PAD) - .appendLiteral("-W") - .appendValue(IsoFields.WEEK_OF_WEEK_BASED_YEAR, 1, 2, SignStyle.NOT_NEGATIVE) - .appendLiteral('-') - .appendValue(DAY_OF_WEEK, 1) - .toFormatter(Locale.ROOT); - - private static final CompoundDateTimeFormatter WEEK_DATE = new CompoundDateTimeFormatter(WEEK_DATE_FORMATTER); - - private static final CompoundDateTimeFormatter WEEK_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( - new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER).append(T_TIME_NO_MILLIS_FORMATTER_1).toFormatter(Locale.ROOT), - new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER).append(T_TIME_NO_MILLIS_FORMATTER_2).toFormatter(Locale.ROOT), - new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER).append(T_TIME_NO_MILLIS_FORMATTER_3).toFormatter(Locale.ROOT) - ); - - private static final CompoundDateTimeFormatter WEEK_DATE_TIME = new CompoundDateTimeFormatter( - new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER).appendLiteral("T").append(TIME_FORMATTER_1).toFormatter(Locale.ROOT), - new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER).appendLiteral("T").append(TIME_FORMATTER_2).toFormatter(Locale.ROOT), - new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER).appendLiteral("T").append(TIME_FORMATTER_3).toFormatter(Locale.ROOT) + private static final DateTimeFormatter BASIC_DATE_T = + new DateTimeFormatterBuilder().append(BASIC_YEAR_MONTH_DAY_FORMATTER).appendLiteral("T").toFormatter(Locale.ROOT); + + /* + * Returns a basic formatter that combines a basic date and time without millis, + * separated by a 'T' (yyyyMMdd'T'HHmmssZ). + */ + private static final CompoundDateTimeFormatter BASIC_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().append(BASIC_DATE_T).append(BASIC_TIME_NO_MILLIS_BASE) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(BASIC_DATE_T).append(BASIC_TIME_NO_MILLIS_BASE) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) ); - private static final CompoundDateTimeFormatter WEEK_YEAR = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .appendValue(WeekFields.ISO.weekBasedYear()) - .toFormatter(Locale.ROOT)); - - private static final CompoundDateTimeFormatter WEEKYEAR_WEEK = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .appendValue(WeekFields.ISO.weekBasedYear()) - .appendLiteral("-W") - .appendValue(WeekFields.ISO.weekOfWeekBasedYear()) - .toFormatter(Locale.ROOT)); - - private static final CompoundDateTimeFormatter WEEKYEAR_WEEK_DAY = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .appendValue(WeekFields.ISO.weekBasedYear()) - .appendLiteral("-W") - .appendValue(WeekFields.ISO.weekOfWeekBasedYear()) - .appendLiteral("-") - .appendValue(WeekFields.ISO.dayOfWeek()) - .toFormatter(Locale.ROOT)); - - private static final CompoundDateTimeFormatter YEAR = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .appendValue(ChronoField.YEAR) - .toFormatter(Locale.ROOT)); - - private static final CompoundDateTimeFormatter YEAR_MONTH = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .appendValue(ChronoField.YEAR) - .appendLiteral("-") - .appendValue(MONTH_OF_YEAR) - .toFormatter(Locale.ROOT)); - - private static final CompoundDateTimeFormatter YEAR_MONTH_DAY = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .appendValue(ChronoField.YEAR) - .appendLiteral("-") - .appendValue(MONTH_OF_YEAR) - .appendLiteral("-") - .appendValue(DAY_OF_MONTH) - .toFormatter(Locale.ROOT)); + /* + * Returns a formatter for a full ordinal date, using a four + * digit year and three digit dayOfYear (yyyyDDD). + */ + private static final CompoundDateTimeFormatter BASIC_ORDINAL_DATE = new CompoundDateTimeFormatter( + DateTimeFormatter.ofPattern("yyyyDDD", Locale.ROOT)); - private static final CompoundDateTimeFormatter EPOCH_SECOND = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .appendValue(ChronoField.INSTANT_SECONDS) - .toFormatter(Locale.ROOT)); + /* + * Returns a formatter for a full ordinal date and time, using a four + * digit year and three digit dayOfYear (yyyyDDD'T'HHmmss.SSSZ). + */ + private static final CompoundDateTimeFormatter BASIC_ORDINAL_DATE_TIME = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().appendPattern("yyyyDDD").append(BASIC_T_TIME_PRINTER) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().appendPattern("yyyyDDD").append(BASIC_T_TIME_FORMATTER) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) - private static final CompoundDateTimeFormatter EPOCH_MILLIS = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .appendValue(ChronoField.INSTANT_SECONDS, 1, 19, SignStyle.NEVER) - .appendValue(ChronoField.MILLI_OF_SECOND, 3) - .toFormatter(Locale.ROOT)); + ); + + /* + * Returns a formatter for a full ordinal date and time without millis, + * using a four digit year and three digit dayOfYear (yyyyDDD'T'HHmmssZ). + */ + private static final CompoundDateTimeFormatter BASIC_ORDINAL_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().appendPattern("yyyyDDD").appendLiteral("T").append(BASIC_TIME_NO_MILLIS_BASE) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().appendPattern("yyyyDDD").appendLiteral("T").append(BASIC_TIME_NO_MILLIS_BASE) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + + private static final DateTimeFormatter BASIC_WEEK_DATE_FORMATTER = new DateTimeFormatterBuilder() + .appendValue(IsoFields.WEEK_BASED_YEAR) + .appendLiteral("W") + .appendValue(IsoFields.WEEK_OF_WEEK_BASED_YEAR, 1, 2, SignStyle.NEVER) + .appendValue(ChronoField.DAY_OF_WEEK) + .toFormatter(Locale.ROOT); + ///////////////////////////////////////// + // + // END basic time formatters + // + ///////////////////////////////////////// + + ///////////////////////////////////////// + // + // start strict formatters + // + ///////////////////////////////////////// private static final DateTimeFormatter STRICT_BASIC_WEEK_DATE_FORMATTER = new DateTimeFormatterBuilder() .parseStrict() .appendValue(IsoFields.WEEK_BASED_YEAR, 4) @@ -600,97 +280,208 @@ public class DateFormatters { .appendValue(ChronoField.DAY_OF_WEEK) .toFormatter(Locale.ROOT); - private static final CompoundDateTimeFormatter STRICT_BASIC_WEEK_DATE = new CompoundDateTimeFormatter(STRICT_BASIC_WEEK_DATE_FORMATTER); + private static final DateTimeFormatter STRICT_BASIC_WEEK_DATE_PRINTER = new DateTimeFormatterBuilder() + .parseStrict() + .appendValue(IsoFields.WEEK_BASED_YEAR, 4) + .appendLiteral("W") + .appendValue(IsoFields.WEEK_OF_WEEK_BASED_YEAR, 2, 2, SignStyle.NEVER) + .appendValue(ChronoField.DAY_OF_WEEK) + .toFormatter(Locale.ROOT); + /* + * Returns a basic formatter for a full date as four digit weekyear, two + * digit week of weekyear, and one digit day of week (xxxx'W'wwe). + */ + private static final CompoundDateTimeFormatter STRICT_BASIC_WEEK_DATE = + new CompoundDateTimeFormatter(STRICT_BASIC_WEEK_DATE_PRINTER, STRICT_BASIC_WEEK_DATE_FORMATTER); + + /* + * Returns a basic formatter that combines a basic weekyear date and time + * without millis, separated by a 'T' (xxxx'W'wwe'T'HHmmssX). + */ private static final CompoundDateTimeFormatter STRICT_BASIC_WEEK_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( new DateTimeFormatterBuilder() - .append(STRICT_BASIC_WEEK_DATE_FORMATTER) - .append(DateTimeFormatter.ofPattern("'T'HHmmssX", Locale.ROOT)) - .toFormatter(Locale.ROOT)); + .append(STRICT_BASIC_WEEK_DATE_PRINTER).append(DateTimeFormatter.ofPattern("'T'HHmmssX", Locale.ROOT)) + .toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder() + .append(STRICT_BASIC_WEEK_DATE_FORMATTER).append(DateTimeFormatter.ofPattern("'T'HHmmssX", Locale.ROOT)) + .toFormatter(Locale.ROOT) + ); + /* + * Returns a basic formatter that combines a basic weekyear date and time, + * separated by a 'T' (xxxx'W'wwe'T'HHmmss.SSSX). + */ private static final CompoundDateTimeFormatter STRICT_BASIC_WEEK_DATE_TIME = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder() + .append(STRICT_BASIC_WEEK_DATE_PRINTER) + .append(DateTimeFormatter.ofPattern("'T'HHmmss.SSSX", Locale.ROOT)) + .toFormatter(Locale.ROOT), new DateTimeFormatterBuilder() .append(STRICT_BASIC_WEEK_DATE_FORMATTER) .append(DateTimeFormatter.ofPattern("'T'HHmmss.SSSX", Locale.ROOT)) - .toFormatter(Locale.ROOT)); + .toFormatter(Locale.ROOT) + ); + /* + * An ISO date formatter that formats or parses a date without an offset, such as '2011-12-03'. + */ private static final CompoundDateTimeFormatter STRICT_DATE = new CompoundDateTimeFormatter( DateTimeFormatter.ISO_LOCAL_DATE.withResolverStyle(ResolverStyle.LENIENT)); + /* + * A date formatter that formats or parses a date plus an hour without an offset, such as '2011-12-03T01'. + */ private static final CompoundDateTimeFormatter STRICT_DATE_HOUR = new CompoundDateTimeFormatter( DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH", Locale.ROOT)); + /* + * A date formatter that formats or parses a date plus an hour/minute without an offset, such as '2011-12-03T01:10'. + */ private static final CompoundDateTimeFormatter STRICT_DATE_HOUR_MINUTE = new CompoundDateTimeFormatter( DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm", Locale.ROOT)); + /* + * A strict date formatter that formats or parses a date without an offset, such as '2011-12-03'. + */ private static final CompoundDateTimeFormatter STRICT_YEAR_MONTH_DAY = new CompoundDateTimeFormatter(STRICT_YEAR_MONTH_DAY_FORMATTER); + /* + * A strict formatter that formats or parses a year and a month, such as '2011-12'. + */ private static final CompoundDateTimeFormatter STRICT_YEAR_MONTH = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) .appendLiteral("-") .appendValue(MONTH_OF_YEAR, 2, 2, SignStyle.NOT_NEGATIVE) .toFormatter(Locale.ROOT)); + /* + * A strict formatter that formats or parses a year, such as '2011'. + */ private static final CompoundDateTimeFormatter STRICT_YEAR = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) .toFormatter(Locale.ROOT)); + /* + * A strict formatter that formats or parses a hour, minute and second, such as '09:43:25'. + */ private static final CompoundDateTimeFormatter STRICT_HOUR_MINUTE_SECOND = new CompoundDateTimeFormatter(STRICT_HOUR_MINUTE_SECOND_FORMATTER); - private static final CompoundDateTimeFormatter STRICT_DATE_TIME = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + private static final DateTimeFormatter STRICT_DATE_FORMATTER = new DateTimeFormatterBuilder() .append(STRICT_YEAR_MONTH_DAY_FORMATTER) .appendLiteral('T') .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER) .optionalStart() .appendFraction(MILLI_OF_SECOND, 3, 3, true) .optionalEnd() - .append(OPTIONAL_TIME_ZONE_FORMATTER) - .toFormatter(Locale.ROOT)); + .toFormatter(Locale.ROOT); - private static final CompoundDateTimeFormatter STRICT_ORDINAL_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( - new DateTimeFormatterBuilder() + /* + * Returns a formatter that combines a full date and time, separated by a 'T' + * (yyyy-MM-dd'T'HH:mm:ss.SSSZZ). + */ + private static final CompoundDateTimeFormatter STRICT_DATE_TIME = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().append(STRICT_DATE_FORMATTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(STRICT_DATE_FORMATTER).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + + private static final DateTimeFormatter STRICT_ORDINAL_DATE_TIME_NO_MILLIS_BASE = new DateTimeFormatterBuilder() .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) .appendLiteral('-') .appendValue(DAY_OF_YEAR, 3, 3, SignStyle.NOT_NEGATIVE) .appendLiteral('T') .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER) - .append(OPTIONAL_TIME_ZONE_FORMATTER) - .toFormatter(Locale.ROOT)); + .toFormatter(Locale.ROOT); + + /* + * Returns a formatter for a full ordinal date and time without millis, + * using a four digit year and three digit dayOfYear (yyyy-DDD'T'HH:mm:ssZZ). + */ + private static final CompoundDateTimeFormatter STRICT_ORDINAL_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_NO_MILLIS_BASE) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_NO_MILLIS_BASE) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); - private static final CompoundDateTimeFormatter STRICT_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + private static final DateTimeFormatter STRICT_DATE_TIME_NO_MILLIS_FORMATTER = new DateTimeFormatterBuilder() .append(STRICT_YEAR_MONTH_DAY_FORMATTER) .appendLiteral('T') .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER) - .append(OPTIONAL_TIME_ZONE_FORMATTER) - .toFormatter(Locale.ROOT)); + .toFormatter(Locale.ROOT); + + /* + * Returns a formatter that combines a full date and time without millis, + * separated by a 'T' (yyyy-MM-dd'T'HH:mm:ssZZ). + */ + private static final CompoundDateTimeFormatter STRICT_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().append(STRICT_DATE_TIME_NO_MILLIS_FORMATTER) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(STRICT_DATE_TIME_NO_MILLIS_FORMATTER) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + // NOTE: this is not a strict formatter to retain the joda time based behaviour, even though it's named like this private static final DateTimeFormatter STRICT_HOUR_MINUTE_SECOND_MILLIS_FORMATTER = new DateTimeFormatterBuilder() .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER) .appendFraction(MILLI_OF_SECOND, 1, 3, true) .toFormatter(Locale.ROOT); + private static final DateTimeFormatter STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER = new DateTimeFormatterBuilder() + .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER) + .appendFraction(MILLI_OF_SECOND, 3, 3, true) + .toFormatter(Locale.ROOT); + + /* + * Returns a formatter for a two digit hour of day, two digit minute of + * hour, two digit second of minute, and three digit fraction of + * second (HH:mm:ss.SSS). + * + * NOTE: this is not a strict formatter to retain the joda time based behaviour, + * even though it's named like this + */ private static final CompoundDateTimeFormatter STRICT_HOUR_MINUTE_SECOND_MILLIS = - new CompoundDateTimeFormatter(STRICT_HOUR_MINUTE_SECOND_MILLIS_FORMATTER); + new CompoundDateTimeFormatter(STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER, STRICT_HOUR_MINUTE_SECOND_MILLIS_FORMATTER); private static final CompoundDateTimeFormatter STRICT_HOUR_MINUTE_SECOND_FRACTION = STRICT_HOUR_MINUTE_SECOND_MILLIS; + /* + * Returns a formatter that combines a full date, two digit hour of day, + * two digit minute of hour, two digit second of minute, and three digit + * fraction of second (yyyy-MM-dd'T'HH:mm:ss.SSS). + */ private static final CompoundDateTimeFormatter STRICT_DATE_HOUR_MINUTE_SECOND_FRACTION = new CompoundDateTimeFormatter( new DateTimeFormatterBuilder() - .append(STRICT_YEAR_MONTH_DAY_FORMATTER) - .appendLiteral("T") - .append(STRICT_HOUR_MINUTE_SECOND_MILLIS_FORMATTER) - .toFormatter(Locale.ROOT)); + .append(STRICT_YEAR_MONTH_DAY_FORMATTER) + .appendLiteral("T") + .append(STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER) + .toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder() + .append(STRICT_YEAR_MONTH_DAY_FORMATTER) + .appendLiteral("T") + .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER) + // this one here is lenient as well to retain joda time based bwc compatibility + .appendFraction(MILLI_OF_SECOND, 1, 3, true) + .toFormatter(Locale.ROOT) + ); private static final CompoundDateTimeFormatter STRICT_DATE_HOUR_MINUTE_SECOND_MILLIS = STRICT_DATE_HOUR_MINUTE_SECOND_FRACTION; + /* + * Returns a formatter for a two digit hour of day. (HH) + */ private static final CompoundDateTimeFormatter STRICT_HOUR = new CompoundDateTimeFormatter(DateTimeFormatter.ofPattern("HH", Locale.ROOT)); + /* + * Returns a formatter for a two digit hour of day and two digit minute of + * hour. (HH:mm) + */ private static final CompoundDateTimeFormatter STRICT_HOUR_MINUTE = new CompoundDateTimeFormatter(DateTimeFormatter.ofPattern("HH:mm", Locale.ROOT)); - private static final CompoundDateTimeFormatter STRICT_ORDINAL_DATE_TIME = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + private static final DateTimeFormatter STRICT_ORDINAL_DATE_TIME_FORMATTER_BASE = new DateTimeFormatterBuilder() .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) .appendLiteral('-') .appendValue(DAY_OF_YEAR, 3, 3, SignStyle.NOT_NEGATIVE) @@ -699,62 +490,139 @@ public class DateFormatters { .optionalStart() .appendLiteral(':') .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) - .appendFraction(MILLI_OF_SECOND, 1, 3, true) + .appendFraction(MILLI_OF_SECOND, 3, 3, true) .optionalEnd() - .append(OPTIONAL_TIME_ZONE_FORMATTER) - .toFormatter(Locale.ROOT)); + .toFormatter(Locale.ROOT); - private static final DateTimeFormatter STRICT_TIME_FORMATTER = new DateTimeFormatterBuilder() + /* + * Returns a formatter for a full ordinal date and time, using a four + * digit year and three digit dayOfYear (yyyy-DDD'T'HH:mm:ss.SSSZZ). + */ + private static final CompoundDateTimeFormatter STRICT_ORDINAL_DATE_TIME = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_FORMATTER_BASE) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(STRICT_ORDINAL_DATE_TIME_FORMATTER_BASE) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + + // Note: milliseconds parsing is not strict, others are + private static final DateTimeFormatter STRICT_TIME_FORMATTER_BASE = new DateTimeFormatterBuilder() .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) .appendLiteral(':') .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) .appendLiteral(':') .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) .appendFraction(MILLI_OF_SECOND, 1, 3, true) - .append(TIME_ZONE_FORMATTER) - .toFormatter(Locale.ROOT); - - private static final CompoundDateTimeFormatter STRICT_TIME = new CompoundDateTimeFormatter(STRICT_TIME_FORMATTER); - - private static final DateTimeFormatter STRICT_T_TIME_FORMATTER = new DateTimeFormatterBuilder() - .appendLiteral("T") - .append(STRICT_TIME_FORMATTER) .toFormatter(Locale.ROOT); - private static final CompoundDateTimeFormatter STRICT_T_TIME = new CompoundDateTimeFormatter(STRICT_T_TIME_FORMATTER); - - private static final DateTimeFormatter STRICT_TIME_NO_MILLIS_FORMATTER = new DateTimeFormatterBuilder() + private static final DateTimeFormatter STRICT_TIME_PRINTER = new DateTimeFormatterBuilder() .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) .appendLiteral(':') .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) .appendLiteral(':') .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) - .append(TIME_ZONE_FORMATTER) + .appendFraction(MILLI_OF_SECOND, 3, 3, true) .toFormatter(Locale.ROOT); - private static final CompoundDateTimeFormatter STRICT_TIME_NO_MILLIS = new CompoundDateTimeFormatter(STRICT_TIME_NO_MILLIS_FORMATTER); + /* + * Returns a formatter for a two digit hour of day, two digit minute of + * hour, two digit second of minute, three digit fraction of second, and + * time zone offset (HH:mm:ss.SSSZZ). + */ + private static final CompoundDateTimeFormatter STRICT_TIME = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().append(STRICT_TIME_PRINTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(STRICT_TIME_FORMATTER_BASE).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(STRICT_TIME_FORMATTER_BASE).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + + /* + * Returns a formatter for a two digit hour of day, two digit minute of + * hour, two digit second of minute, three digit fraction of second, and + * time zone offset prefixed by 'T' ('T'HH:mm:ss.SSSZZ). + */ + private static final CompoundDateTimeFormatter STRICT_T_TIME = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().appendLiteral('T').append(STRICT_TIME_PRINTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().appendLiteral('T').append(STRICT_TIME_FORMATTER_BASE) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().appendLiteral('T').append(STRICT_TIME_FORMATTER_BASE) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); - private static final DateTimeFormatter STRICT_T_TIME_NO_MILLIS_FORMATTER = new DateTimeFormatterBuilder() - .appendLiteral("T") - .append(STRICT_TIME_NO_MILLIS_FORMATTER) + private static final DateTimeFormatter STRICT_TIME_NO_MILLIS_BASE = new DateTimeFormatterBuilder() + .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) .toFormatter(Locale.ROOT); - private static final CompoundDateTimeFormatter STRICT_T_TIME_NO_MILLIS = - new CompoundDateTimeFormatter(STRICT_T_TIME_NO_MILLIS_FORMATTER); + /* + * Returns a formatter for a two digit hour of day, two digit minute of + * hour, two digit second of minute, and time zone offset (HH:mm:ssZZ). + */ + private static final CompoundDateTimeFormatter STRICT_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().append(STRICT_TIME_NO_MILLIS_BASE).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(STRICT_TIME_NO_MILLIS_BASE).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); - private static final CompoundDateTimeFormatter STRICT_WEEK_DATE = new CompoundDateTimeFormatter(DateTimeFormatter.ISO_WEEK_DATE); + /* + * Returns a formatter for a two digit hour of day, two digit minute of + * hour, two digit second of minute, and time zone offset prefixed + * by 'T' ('T'HH:mm:ssZZ). + */ + private static final CompoundDateTimeFormatter STRICT_T_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().appendLiteral("T").append(STRICT_TIME_NO_MILLIS_BASE) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().appendLiteral("T").append(STRICT_TIME_NO_MILLIS_BASE) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + private static final DateTimeFormatter ISO_WEEK_DATE = new DateTimeFormatterBuilder() + .parseCaseInsensitive() + .appendValue(IsoFields.WEEK_BASED_YEAR, 4, 10, SignStyle.EXCEEDS_PAD) + .appendLiteral("-W") + .appendValue(IsoFields.WEEK_OF_WEEK_BASED_YEAR, 2) + .appendLiteral('-') + .appendValue(DAY_OF_WEEK, 1) + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter ISO_WEEK_DATE_T = new DateTimeFormatterBuilder() + .append(ISO_WEEK_DATE) + .appendLiteral('T') + .toFormatter(Locale.ROOT); + + /* + * Returns a formatter for a full date as four digit weekyear, two digit + * week of weekyear, and one digit day of week (xxxx-'W'ww-e). + */ + private static final CompoundDateTimeFormatter STRICT_WEEK_DATE = new CompoundDateTimeFormatter(ISO_WEEK_DATE); + + /* + * Returns a formatter that combines a full weekyear date and time without millis, + * separated by a 'T' (xxxx-'W'ww-e'T'HH:mm:ssZZ). + */ private static final CompoundDateTimeFormatter STRICT_WEEK_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( - new DateTimeFormatterBuilder() - .append(DateTimeFormatter.ISO_WEEK_DATE) - .append(STRICT_T_TIME_NO_MILLIS_FORMATTER) - .toFormatter(Locale.ROOT)); + new DateTimeFormatterBuilder().append(ISO_WEEK_DATE_T) + .append(STRICT_TIME_NO_MILLIS_BASE).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(ISO_WEEK_DATE_T) + .append(STRICT_TIME_NO_MILLIS_BASE).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); - private static final CompoundDateTimeFormatter STRICT_WEEK_DATE_TIME = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() - .append(DateTimeFormatter.ISO_WEEK_DATE) - .append(STRICT_T_TIME_FORMATTER) - .toFormatter(Locale.ROOT)); + /* + * Returns a formatter that combines a full weekyear date and time, + * separated by a 'T' (xxxx-'W'ww-e'T'HH:mm:ss.SSSZZ). + */ + private static final CompoundDateTimeFormatter STRICT_WEEK_DATE_TIME = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().append(ISO_WEEK_DATE_T).append(STRICT_TIME_PRINTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(ISO_WEEK_DATE_T).append(STRICT_TIME_FORMATTER_BASE) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(ISO_WEEK_DATE_T).append(STRICT_TIME_FORMATTER_BASE) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + /* + * Returns a formatter for a four digit weekyear + */ private static final CompoundDateTimeFormatter STRICT_WEEKYEAR = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() .appendValue(WeekFields.ISO.weekBasedYear(), 4, 10, SignStyle.EXCEEDS_PAD) .toFormatter(Locale.ROOT)); @@ -765,19 +633,560 @@ public class DateFormatters { .appendValue(WeekFields.ISO.weekOfWeekBasedYear(), 2, 2, SignStyle.NOT_NEGATIVE) .toFormatter(Locale.ROOT); + /* + * Returns a formatter for a four digit weekyear and two digit week of + * weekyear. (xxxx-'W'ww) + */ private static final CompoundDateTimeFormatter STRICT_WEEKYEAR_WEEK = new CompoundDateTimeFormatter(STRICT_WEEKYEAR_WEEK_FORMATTER); + /* + * Returns a formatter for a four digit weekyear, two digit week of + * weekyear, and one digit day of week. (xxxx-'W'ww-e) + */ private static final CompoundDateTimeFormatter STRICT_WEEKYEAR_WEEK_DAY = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() .append(STRICT_WEEKYEAR_WEEK_FORMATTER) .appendLiteral("-") .appendValue(WeekFields.ISO.dayOfWeek()) .toFormatter(Locale.ROOT)); - private static final CompoundDateTimeFormatter BASIC_ISO_DATE = new CompoundDateTimeFormatter(DateTimeFormatter.BASIC_ISO_DATE); - private static final CompoundDateTimeFormatter ISO_ORDINAL_DATE = new CompoundDateTimeFormatter(DateTimeFormatter.ISO_ORDINAL_DATE); + /* + * Returns a formatter that combines a full date, two digit hour of day, + * two digit minute of hour, and two digit second of + * minute. (yyyy-MM-dd'T'HH:mm:ss) + */ private static final CompoundDateTimeFormatter STRICT_DATE_HOUR_MINUTE_SECOND = new CompoundDateTimeFormatter(DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss", Locale.ROOT)); + /* + * A basic formatter for a full date as four digit year, two digit + * month of year, and two digit day of month (yyyyMMdd). + */ + private static final CompoundDateTimeFormatter BASIC_DATE = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder() + .appendValue(ChronoField.YEAR, 4, 4, SignStyle.NORMAL) + .appendValue(MONTH_OF_YEAR, 2, 2, SignStyle.NOT_NEGATIVE) + .appendValue(DAY_OF_MONTH, 2, 2, SignStyle.NOT_NEGATIVE) + .toFormatter(Locale.ROOT).withZone(ZoneOffset.UTC), + new DateTimeFormatterBuilder() + .appendValue(ChronoField.YEAR, 1, 4, SignStyle.NORMAL) + .appendValue(MONTH_OF_YEAR, 1, 2, SignStyle.NOT_NEGATIVE) + .appendValue(DAY_OF_MONTH, 1, 2, SignStyle.NOT_NEGATIVE) + .toFormatter(Locale.ROOT).withZone(ZoneOffset.UTC) + ); + + private static final DateTimeFormatter STRICT_ORDINAL_DATE_FORMATTER = new DateTimeFormatterBuilder() + .parseCaseInsensitive() + .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) + .appendLiteral('-') + .appendValue(DAY_OF_YEAR, 3) + .optionalStart() + .toFormatter(Locale.ROOT); + + /* + * Returns a formatter for a full ordinal date, using a four + * digit year and three digit dayOfYear (yyyy-DDD). + */ + private static final CompoundDateTimeFormatter STRICT_ORDINAL_DATE = new CompoundDateTimeFormatter(STRICT_ORDINAL_DATE_FORMATTER); + + ///////////////////////////////////////// + // + // end strict formatters + // + ///////////////////////////////////////// + + ///////////////////////////////////////// + // + // start lenient formatters + // + ///////////////////////////////////////// + + private static final DateTimeFormatter DATE_FORMATTER = new DateTimeFormatterBuilder() + .appendValue(ChronoField.YEAR, 1, 4, SignStyle.NORMAL) + .appendLiteral('-') + .appendValue(MONTH_OF_YEAR, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral('-') + .appendValue(DAY_OF_MONTH, 1, 2, SignStyle.NOT_NEGATIVE) + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter HOUR_MINUTE_FORMATTER = new DateTimeFormatterBuilder() + .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) + .toFormatter(Locale.ROOT); + + /* + * a date formatter with optional time, being very lenient, format is + * yyyy-MM-dd'T'HH:mm:ss.SSSZ + */ + private static final CompoundDateTimeFormatter DATE_OPTIONAL_TIME = new CompoundDateTimeFormatter(STRICT_DATE_OPTIONAL_TIME.printer, + new DateTimeFormatterBuilder() + .append(DATE_FORMATTER) + .optionalStart() + .appendLiteral('T') + .append(HOUR_MINUTE_FORMATTER) + .optionalStart() + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .optionalEnd() + .optionalStart() + .appendFraction(MILLI_OF_SECOND, 1, 3, true) + .optionalEnd() + .optionalStart().appendZoneOrOffsetId().optionalEnd() + .optionalEnd() + .toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder() + .append(DATE_FORMATTER) + .optionalStart() + .appendLiteral('T') + .append(HOUR_MINUTE_FORMATTER) + .optionalStart() + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .optionalEnd() + .optionalStart() + .appendFraction(MILLI_OF_SECOND, 1, 3, true) + .optionalEnd() + .optionalStart().appendOffset("+HHmm", "Z").optionalEnd() + .optionalEnd() + .toFormatter(Locale.ROOT)); + + private static final DateTimeFormatter HOUR_MINUTE_SECOND_FORMATTER = new DateTimeFormatterBuilder() + .append(HOUR_MINUTE_FORMATTER) + .appendLiteral(":") + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter HOUR_MINUTE_SECOND_MILLIS_FORMATTER = new DateTimeFormatterBuilder() + .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .appendFraction(MILLI_OF_SECOND, 1, 3, true) + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter ORDINAL_DATE_FORMATTER = new DateTimeFormatterBuilder() + .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) + .appendLiteral('-') + .appendValue(DAY_OF_YEAR, 1, 3, SignStyle.NOT_NEGATIVE) + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter ORDINAL_DATE_PRINTER = new DateTimeFormatterBuilder() + .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) + .appendLiteral('-') + .appendValue(DAY_OF_YEAR, 3, 3, SignStyle.NOT_NEGATIVE) + .toFormatter(Locale.ROOT); + + /* + * Returns a formatter for a full ordinal date, using a four + * digit year and three digit dayOfYear (yyyy-DDD). + */ + private static final CompoundDateTimeFormatter ORDINAL_DATE = + new CompoundDateTimeFormatter(ORDINAL_DATE_PRINTER, ORDINAL_DATE_FORMATTER); + + private static final DateTimeFormatter TIME_NO_MILLIS_FORMATTER = new DateTimeFormatterBuilder() + .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter T_TIME_NO_MILLIS_FORMATTER = + new DateTimeFormatterBuilder().appendLiteral("T").append(TIME_NO_MILLIS_FORMATTER).toFormatter(Locale.ROOT); + + private static final DateTimeFormatter TIME_PREFIX = new DateTimeFormatterBuilder() + .append(TIME_NO_MILLIS_FORMATTER) + .appendFraction(MILLI_OF_SECOND, 1, 3, true) + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter WEEK_DATE_FORMATTER = new DateTimeFormatterBuilder() + .appendValue(IsoFields.WEEK_BASED_YEAR, 4, 10, SignStyle.EXCEEDS_PAD) + .appendLiteral("-W") + .appendValue(IsoFields.WEEK_OF_WEEK_BASED_YEAR, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral('-') + .appendValue(DAY_OF_WEEK, 1) + .toFormatter(Locale.ROOT); + + /* + * Returns a formatter for a four digit weekyear. (YYYY) + */ + private static final CompoundDateTimeFormatter WEEK_YEAR = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().appendValue(WeekFields.ISO.weekBasedYear()).toFormatter(Locale.ROOT)); + + /* + * Returns a formatter for a four digit weekyear. (uuuu) + */ + private static final CompoundDateTimeFormatter YEAR = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().appendValue(ChronoField.YEAR).toFormatter(Locale.ROOT)); + + /* + * Returns a formatter for parsing the seconds since the epoch + */ + private static final CompoundDateTimeFormatter EPOCH_SECOND = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().appendValue(ChronoField.INSTANT_SECONDS).toFormatter(Locale.ROOT)); + + /* + * Returns a formatter for parsing the milliseconds since the epoch + */ + private static final CompoundDateTimeFormatter EPOCH_MILLIS = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .appendValue(ChronoField.INSTANT_SECONDS, 1, 19, SignStyle.NEVER) + .appendValue(ChronoField.MILLI_OF_SECOND, 3) + .toFormatter(Locale.ROOT)); + + /* + * Returns a formatter that combines a full date and two digit hour of + * day. (yyyy-MM-dd'T'HH) + */ + private static final CompoundDateTimeFormatter DATE_HOUR = new CompoundDateTimeFormatter(STRICT_DATE_HOUR.printer, + new DateTimeFormatterBuilder() + .append(DATE_FORMATTER) + .appendLiteral("T") + .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) + .toFormatter(Locale.ROOT)); + + /* + * Returns a formatter that combines a full date, two digit hour of day, + * two digit minute of hour, two digit second of minute, and three digit + * fraction of second (yyyy-MM-dd'T'HH:mm:ss.SSS). Parsing will parse up + * to 3 fractional second digits. + */ + private static final CompoundDateTimeFormatter DATE_HOUR_MINUTE_SECOND_MILLIS = + new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder() + .append(STRICT_YEAR_MONTH_DAY_FORMATTER) + .appendLiteral("T") + .append(STRICT_HOUR_MINUTE_SECOND_MILLIS_PRINTER) + .toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder() + .append(DATE_FORMATTER) + .appendLiteral("T") + .append(HOUR_MINUTE_SECOND_MILLIS_FORMATTER) + .toFormatter(Locale.ROOT)); + + private static final CompoundDateTimeFormatter DATE_HOUR_MINUTE_SECOND_FRACTION = DATE_HOUR_MINUTE_SECOND_MILLIS; + + /* + * Returns a formatter that combines a full date, two digit hour of day, + * and two digit minute of hour. (yyyy-MM-dd'T'HH:mm) + */ + private static final CompoundDateTimeFormatter DATE_HOUR_MINUTE = new CompoundDateTimeFormatter(STRICT_DATE_HOUR_MINUTE.printer, + new DateTimeFormatterBuilder() + .append(DATE_FORMATTER) + .appendLiteral("T") + .append(HOUR_MINUTE_FORMATTER) + .toFormatter(Locale.ROOT)); + + /* + * Returns a formatter that combines a full date, two digit hour of day, + * two digit minute of hour, and two digit second of + * minute. (yyyy-MM-dd'T'HH:mm:ss) + */ + private static final CompoundDateTimeFormatter DATE_HOUR_MINUTE_SECOND = new CompoundDateTimeFormatter( + STRICT_DATE_HOUR_MINUTE_SECOND.printer, + new DateTimeFormatterBuilder() + .append(DATE_FORMATTER) + .appendLiteral("T") + .append(HOUR_MINUTE_SECOND_FORMATTER) + .toFormatter(Locale.ROOT)); + + private static final DateTimeFormatter DATE_TIME_FORMATTER = new DateTimeFormatterBuilder() + .append(DATE_FORMATTER) + .appendLiteral('T') + .append(HOUR_MINUTE_FORMATTER) + .optionalStart() + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .appendFraction(MILLI_OF_SECOND, 1, 3, true) + .optionalEnd() + .toFormatter(Locale.ROOT); + + /* + * Returns a formatter that combines a full date and time, separated by a 'T' + * (yyyy-MM-dd'T'HH:mm:ss.SSSZZ). + */ + private static final CompoundDateTimeFormatter DATE_TIME = new CompoundDateTimeFormatter( + STRICT_DATE_TIME.printer, + new DateTimeFormatterBuilder().append(DATE_TIME_FORMATTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(DATE_TIME_FORMATTER).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + + /* + * Returns a basic formatter for a full date as four digit weekyear, two + * digit week of weekyear, and one digit day of week (YYYY'W'wwe). + */ + private static final CompoundDateTimeFormatter BASIC_WEEK_DATE = + new CompoundDateTimeFormatter(STRICT_BASIC_WEEK_DATE.printer, BASIC_WEEK_DATE_FORMATTER); + + /* + * Returns a formatter for a full date as four digit year, two digit month + * of year, and two digit day of month (yyyy-MM-dd). + */ + private static final CompoundDateTimeFormatter DATE = new CompoundDateTimeFormatter(STRICT_DATE.printer, DATE_FORMATTER); + + // only the formatter, nothing optional here + private static final DateTimeFormatter DATE_TIME_NO_MILLIS_PRINTER = new DateTimeFormatterBuilder() + .append(STRICT_DATE.printer) + .appendLiteral('T') + .append(STRICT_HOUR_MINUTE.printer) + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) + .appendZoneId() + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter DATE_TIME_PREFIX = new DateTimeFormatterBuilder() + .append(DATE_FORMATTER) + .appendLiteral('T') + .append(HOUR_MINUTE_FORMATTER) + .optionalStart() + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .optionalEnd() + .toFormatter(Locale.ROOT); + + /* + * Returns a formatter that combines a full date and time without millis, but with a timezone that can be optional + * separated by a 'T' (yyyy-MM-dd'T'HH:mm:ssZ). + */ + private static final CompoundDateTimeFormatter DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter(DATE_TIME_NO_MILLIS_PRINTER, + new DateTimeFormatterBuilder().append(DATE_TIME_PREFIX).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(DATE_TIME_PREFIX).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(DATE_TIME_PREFIX) + .optionalStart().appendZoneOrOffsetId().optionalEnd().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(DATE_TIME_PREFIX) + .optionalStart().append(TIME_ZONE_FORMATTER_NO_COLON).optionalEnd().toFormatter(Locale.ROOT) + ); + + /* + * Returns a formatter for a two digit hour of day, two digit minute of + * hour, two digit second of minute, and three digit fraction of + * second (HH:mm:ss.SSS). + */ + private static final CompoundDateTimeFormatter HOUR_MINUTE_SECOND_MILLIS = + new CompoundDateTimeFormatter(STRICT_HOUR_MINUTE_SECOND_FRACTION.printer, HOUR_MINUTE_SECOND_MILLIS_FORMATTER); + + /* + * Returns a formatter for a two digit hour of day and two digit minute of + * hour. (HH:mm) + */ + private static final CompoundDateTimeFormatter HOUR_MINUTE = + new CompoundDateTimeFormatter(STRICT_HOUR_MINUTE.printer, HOUR_MINUTE_FORMATTER); + + /* + * A strict formatter that formats or parses a hour, minute and second, such as '09:43:25'. + */ + private static final CompoundDateTimeFormatter HOUR_MINUTE_SECOND = new CompoundDateTimeFormatter( + STRICT_HOUR_MINUTE_SECOND.printer, + new DateTimeFormatterBuilder() + .append(HOUR_MINUTE_FORMATTER) + .appendLiteral(":") + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .toFormatter(Locale.ROOT) + ); + + /* + * Returns a formatter for a two digit hour of day. (HH) + */ + private static final CompoundDateTimeFormatter HOUR = new CompoundDateTimeFormatter( + STRICT_HOUR.printer, + new DateTimeFormatterBuilder().appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE).toFormatter(Locale.ROOT) + ); + + private static final DateTimeFormatter ORDINAL_DATE_TIME_FORMATTER_BASE = new DateTimeFormatterBuilder() + .append(ORDINAL_DATE_FORMATTER) + .appendLiteral('T') + .append(HOUR_MINUTE_FORMATTER) + .optionalStart() + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .appendFraction(MILLI_OF_SECOND, 1, 3, true) + .optionalEnd() + .toFormatter(Locale.ROOT); + + /* + * Returns a formatter for a full ordinal date and time, using a four + * digit year and three digit dayOfYear (yyyy-DDD'T'HH:mm:ss.SSSZZ). + */ + private static final CompoundDateTimeFormatter ORDINAL_DATE_TIME = new CompoundDateTimeFormatter( + STRICT_ORDINAL_DATE_TIME.printer, + new DateTimeFormatterBuilder().append(ORDINAL_DATE_TIME_FORMATTER_BASE) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(ORDINAL_DATE_TIME_FORMATTER_BASE) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + + private static final DateTimeFormatter ORDINAL_DATE_TIME_NO_MILLIS_BASE = new DateTimeFormatterBuilder() + .append(ORDINAL_DATE_FORMATTER) + .appendLiteral('T') + .append(HOUR_MINUTE_SECOND_FORMATTER) + .toFormatter(Locale.ROOT); + + /* + * Returns a formatter for a full ordinal date and time without millis, + * using a four digit year and three digit dayOfYear (yyyy-DDD'T'HH:mm:ssZZ). + */ + private static final CompoundDateTimeFormatter ORDINAL_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + STRICT_ORDINAL_DATE_TIME_NO_MILLIS.printer, + new DateTimeFormatterBuilder().append(ORDINAL_DATE_TIME_NO_MILLIS_BASE) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(ORDINAL_DATE_TIME_NO_MILLIS_BASE) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + + /* + * Returns a formatter that combines a full weekyear date and time, + * separated by a 'T' (xxxx-'W'ww-e'T'HH:mm:ss.SSSZZ). + */ + private static final CompoundDateTimeFormatter WEEK_DATE_TIME = new CompoundDateTimeFormatter( + STRICT_WEEK_DATE_TIME.printer, + new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER).appendLiteral("T").append(TIME_PREFIX) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER).appendLiteral("T").append(TIME_PREFIX) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + + /* + * Returns a formatter that combines a full weekyear date and time, + * separated by a 'T' (xxxx-'W'ww-e'T'HH:mm:ssZZ). + */ + private static final CompoundDateTimeFormatter WEEK_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + STRICT_WEEK_DATE_TIME_NO_MILLIS.printer, + new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER).append(T_TIME_NO_MILLIS_FORMATTER) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER).append(T_TIME_NO_MILLIS_FORMATTER) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + + /* + * Returns a basic formatter that combines a basic weekyear date and time, + * separated by a 'T' (xxxx'W'wwe'T'HHmmss.SSSX). + */ + private static final CompoundDateTimeFormatter BASIC_WEEK_DATE_TIME = new CompoundDateTimeFormatter( + STRICT_BASIC_WEEK_DATE_TIME.printer, + new DateTimeFormatterBuilder().append(BASIC_WEEK_DATE_FORMATTER).append(BASIC_T_TIME_FORMATTER) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(BASIC_WEEK_DATE_FORMATTER).append(BASIC_T_TIME_FORMATTER) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + + /* + * Returns a basic formatter that combines a basic weekyear date and time, + * separated by a 'T' (xxxx'W'wwe'T'HHmmssX). + */ + private static final CompoundDateTimeFormatter BASIC_WEEK_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + STRICT_BASIC_WEEK_DATE_TIME_NO_MILLIS.printer, + new DateTimeFormatterBuilder().append(BASIC_WEEK_DATE_FORMATTER).appendLiteral("T").append(BASIC_TIME_NO_MILLIS_BASE) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(BASIC_WEEK_DATE_FORMATTER).appendLiteral("T").append(BASIC_TIME_NO_MILLIS_BASE) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + + /* + * Returns a formatter for a two digit hour of day, two digit minute of + * hour, two digit second of minute, three digit fraction of second, and + * time zone offset (HH:mm:ss.SSSZZ). + */ + private static final CompoundDateTimeFormatter TIME = new CompoundDateTimeFormatter( + STRICT_TIME.printer, + new DateTimeFormatterBuilder().append(TIME_PREFIX).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(TIME_PREFIX).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + + /* + * Returns a formatter for a two digit hour of day, two digit minute of + * hour, two digit second of minute, andtime zone offset (HH:mm:ssZZ). + */ + private static final CompoundDateTimeFormatter TIME_NO_MILLIS = new CompoundDateTimeFormatter( + STRICT_TIME_NO_MILLIS.printer, + new DateTimeFormatterBuilder().append(TIME_NO_MILLIS_FORMATTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(TIME_NO_MILLIS_FORMATTER).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + + /* + * Returns a formatter for a two digit hour of day, two digit minute of + * hour, two digit second of minute, three digit fraction of second, and + * time zone offset prefixed by 'T' ('T'HH:mm:ss.SSSZZ). + */ + private static final CompoundDateTimeFormatter T_TIME = new CompoundDateTimeFormatter( + STRICT_T_TIME.printer, + new DateTimeFormatterBuilder().appendLiteral("T").append(TIME_PREFIX) + .appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().appendLiteral("T").append(TIME_PREFIX) + .append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + + /* + * Returns a formatter for a two digit hour of day, two digit minute of + * hour, two digit second of minute, and time zone offset prefixed + * by 'T' ('T'HH:mm:ssZZ). + */ + private static final CompoundDateTimeFormatter T_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + STRICT_T_TIME_NO_MILLIS.printer, + new DateTimeFormatterBuilder().append(T_TIME_NO_MILLIS_FORMATTER).appendZoneOrOffsetId().toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(T_TIME_NO_MILLIS_FORMATTER).append(TIME_ZONE_FORMATTER_NO_COLON).toFormatter(Locale.ROOT) + ); + + /* + * A strict formatter that formats or parses a year and a month, such as '2011-12'. + */ + private static final CompoundDateTimeFormatter YEAR_MONTH = new CompoundDateTimeFormatter( + STRICT_YEAR_MONTH.printer, + new DateTimeFormatterBuilder().appendValue(ChronoField.YEAR).appendLiteral("-").appendValue(MONTH_OF_YEAR).toFormatter(Locale.ROOT) + ); + + /* + * A strict date formatter that formats or parses a date without an offset, such as '2011-12-03'. + */ + private static final CompoundDateTimeFormatter YEAR_MONTH_DAY = new CompoundDateTimeFormatter( + STRICT_YEAR_MONTH_DAY.printer, + new DateTimeFormatterBuilder() + .appendValue(ChronoField.YEAR) + .appendLiteral("-") + .appendValue(MONTH_OF_YEAR) + .appendLiteral("-") + .appendValue(DAY_OF_MONTH) + .toFormatter(Locale.ROOT) + ); + + /* + * Returns a formatter for a full date as four digit weekyear, two digit + * week of weekyear, and one digit day of week (xxxx-'W'ww-e). + */ + private static final CompoundDateTimeFormatter WEEK_DATE = new CompoundDateTimeFormatter(STRICT_WEEK_DATE.printer, WEEK_DATE_FORMATTER); + + /* + * Returns a formatter for a four digit weekyear and two digit week of + * weekyear. (xxxx-'W'ww) + */ + private static final CompoundDateTimeFormatter WEEKYEAR_WEEK = new CompoundDateTimeFormatter(STRICT_WEEKYEAR_WEEK.printer, + new DateTimeFormatterBuilder() + .appendValue(WeekFields.ISO.weekBasedYear()) + .appendLiteral("-W") + .appendValue(WeekFields.ISO.weekOfWeekBasedYear()) + .toFormatter(Locale.ROOT) + ); + + /* + * Returns a formatter for a four digit weekyear, two digit week of + * weekyear, and one digit day of week. (xxxx-'W'ww-e) + */ + private static final CompoundDateTimeFormatter WEEKYEAR_WEEK_DAY = new CompoundDateTimeFormatter( + STRICT_WEEKYEAR_WEEK_DAY.printer, + new DateTimeFormatterBuilder() + .appendValue(WeekFields.ISO.weekBasedYear()) + .appendLiteral("-W") + .appendValue(WeekFields.ISO.weekOfWeekBasedYear()) + .appendLiteral("-") + .appendValue(WeekFields.ISO.dayOfWeek()) + .toFormatter(Locale.ROOT) + ); + + ///////////////////////////////////////// + // + // end lenient formatters + // + ///////////////////////////////////////// + public static CompoundDateTimeFormatter forPattern(String input) { return forPattern(input, Locale.ROOT); } @@ -791,7 +1200,7 @@ public static CompoundDateTimeFormatter forPattern(String input, Locale locale) } if ("basicDate".equals(input) || "basic_date".equals(input)) { - return BASIC_ISO_DATE; + return BASIC_DATE; } else if ("basicDateTime".equals(input) || "basic_date_time".equals(input)) { return BASIC_DATE_TIME; } else if ("basicDateTimeNoMillis".equals(input) || "basic_date_time_no_millis".equals(input)) { @@ -916,7 +1325,7 @@ public static CompoundDateTimeFormatter forPattern(String input, Locale locale) } else if ("strictHourMinuteSecondMillis".equals(input) || "strict_hour_minute_second_millis".equals(input)) { return STRICT_HOUR_MINUTE_SECOND_MILLIS; } else if ("strictOrdinalDate".equals(input) || "strict_ordinal_date".equals(input)) { - return ISO_ORDINAL_DATE; + return STRICT_ORDINAL_DATE; } else if ("strictOrdinalDateTime".equals(input) || "strict_ordinal_date_time".equals(input)) { return STRICT_ORDINAL_DATE_TIME; } else if ("strictOrdinalDateTimeNoMillis".equals(input) || "strict_ordinal_date_time_no_millis".equals(input)) { diff --git a/server/src/main/java/org/elasticsearch/index/analysis/ESSolrSynonymParser.java b/server/src/main/java/org/elasticsearch/index/analysis/ESSolrSynonymParser.java index bcc249f8a8a51..006973dd9b6bc 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/ESSolrSynonymParser.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/ESSolrSynonymParser.java @@ -20,19 +20,18 @@ package org.elasticsearch.index.analysis; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.synonym.SolrSynonymParser; import org.apache.lucene.util.CharsRef; import org.apache.lucene.util.CharsRefBuilder; -import org.elasticsearch.common.logging.Loggers; import java.io.IOException; public class ESSolrSynonymParser extends SolrSynonymParser { + private static final Logger logger = LogManager.getLogger(ESSolrSynonymParser.class); private final boolean lenient; - private static final Logger logger = - Loggers.getLogger(ESSolrSynonymParser.class, "ESSolrSynonymParser"); public ESSolrSynonymParser(boolean dedup, boolean expand, boolean lenient, Analyzer analyzer) { super(dedup, expand, analyzer); diff --git a/server/src/main/java/org/elasticsearch/index/analysis/ESWordnetSynonymParser.java b/server/src/main/java/org/elasticsearch/index/analysis/ESWordnetSynonymParser.java index 3764820c4343d..ebcd84e39d7a2 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/ESWordnetSynonymParser.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/ESWordnetSynonymParser.java @@ -20,19 +20,18 @@ package org.elasticsearch.index.analysis; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.synonym.WordnetSynonymParser; import org.apache.lucene.util.CharsRef; import org.apache.lucene.util.CharsRefBuilder; -import org.elasticsearch.common.logging.Loggers; import java.io.IOException; public class ESWordnetSynonymParser extends WordnetSynonymParser { + private static final Logger logger = LogManager.getLogger(ESWordnetSynonymParser.class); private final boolean lenient; - private static final Logger logger = - Loggers.getLogger(ESSolrSynonymParser.class, "ESWordnetSynonymParser"); public ESWordnetSynonymParser(boolean dedup, boolean expand, boolean lenient, Analyzer analyzer) { super(dedup, expand, analyzer); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 3424a89e234cc..6b6ab95f2e199 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -299,7 +299,8 @@ public Mapper parse(ParseContext context) throws IOException { context.doc().add(field); } } catch (Exception e) { - throw new MapperParsingException("failed to parse [" + fieldType().name() + "]", e); + throw new MapperParsingException("failed to parse field [{}] of type [{}]", e, fieldType().name(), + fieldType().typeName()); } multiFields.parse(this, context); return null; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java index c4e165d648255..cc17f9859c36d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java @@ -519,7 +519,8 @@ public Mapper parse(ParseContext context) throws IOException { indexShape(context, shape); } catch (Exception e) { if (ignoreMalformed.value() == false) { - throw new MapperParsingException("failed to parse [" + fieldType().name() + "]", e); + throw new MapperParsingException("failed to parse field [{}] of type [{}]", e, fieldType().name(), + fieldType().typeName()); } context.addIgnoredField(fieldType.name()); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java index 01998c4b6dcc4..01d78c0456582 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java @@ -130,7 +130,7 @@ public Query existsQuery(QueryShardContext context) { */ @Override public Query termQuery(Object value, @Nullable QueryShardContext context) { - if (isSameIndex(value, context.getFullyQualifiedIndexName())) { + if (isSameIndex(value, context.getFullyQualifiedIndex().getName())) { return Queries.newMatchAllQuery(); } else { return Queries.newMatchNoDocsQuery("Index didn't match. Index queried: " + context.index().getName() + " vs. " + value); @@ -143,14 +143,14 @@ public Query termsQuery(List values, QueryShardContext context) { return super.termsQuery(values, context); } for (Object value : values) { - if (isSameIndex(value, context.getFullyQualifiedIndexName())) { + if (isSameIndex(value, context.getFullyQualifiedIndex().getName())) { // No need to OR these clauses - we can only logically be // running in the context of just one of these index names. return Queries.newMatchAllQuery(); } } // None of the listed index names are this one - return Queries.newMatchNoDocsQuery("Index didn't match. Index queried: " + context.getFullyQualifiedIndexName() + return Queries.newMatchNoDocsQuery("Index didn't match. Index queried: " + context.getFullyQualifiedIndex().getName() + " vs. " + values); } @@ -193,5 +193,4 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { // nothing to do } - } diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java b/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java index c7bea4e8b1ff3..eee2c67af4d2d 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java @@ -82,7 +82,7 @@ public class QueryShardContext extends QueryRewriteContext { private String[] types = Strings.EMPTY_ARRAY; private boolean cachable = true; private final SetOnce frozen = new SetOnce<>(); - private final String fullyQualifiedIndexName; + private final Index fullyQualifiedIndex; public void setTypes(String... types) { this.types = types; @@ -115,7 +115,8 @@ public QueryShardContext(int shardId, IndexSettings indexSettings, BitsetFilterC this.indexSettings = indexSettings; this.reader = reader; this.clusterAlias = clusterAlias; - this.fullyQualifiedIndexName = RemoteClusterAware.buildRemoteIndexName(clusterAlias, indexSettings.getIndex().getName()); + this.fullyQualifiedIndex = new Index(RemoteClusterAware.buildRemoteIndexName(clusterAlias, indexSettings.getIndex().getName()), + indexSettings.getIndex().getUUID()); } public QueryShardContext(QueryShardContext source) { @@ -162,7 +163,7 @@ public BitSetProducer bitsetFilter(Query filter) { } public > IFD getForField(MappedFieldType fieldType) { - return (IFD) indexFieldDataService.apply(fieldType, fullyQualifiedIndexName); + return (IFD) indexFieldDataService.apply(fieldType, fullyQualifiedIndex.getName()); } public void addNamedQuery(String name, Query query) { @@ -276,7 +277,7 @@ public Collection queryTypes() { public SearchLookup lookup() { if (lookup == null) { lookup = new SearchLookup(getMapperService(), - mappedFieldType -> indexFieldDataService.apply(mappedFieldType, fullyQualifiedIndexName), types); + mappedFieldType -> indexFieldDataService.apply(mappedFieldType, fullyQualifiedIndex.getName()), types); } return lookup; } @@ -427,9 +428,9 @@ public IndexReader getIndexReader() { } /** - * Returns the fully qualified index name including a remote cluster alias if applicable + * Returns the fully qualified index including a remote cluster alias if applicable, and the index uuid */ - public String getFullyQualifiedIndexName() { - return fullyQualifiedIndexName; + public Index getFullyQualifiedIndex() { + return fullyQualifiedIndex; } } diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryShardException.java b/server/src/main/java/org/elasticsearch/index/query/QueryShardException.java index b52bc07ca78a7..843ff931d4fc2 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryShardException.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryShardException.java @@ -37,16 +37,15 @@ public QueryShardException(QueryShardContext context, String msg, Object... args } public QueryShardException(QueryShardContext context, String msg, Throwable cause, Object... args) { - super(msg, cause, args); - setIndex(context.getFullyQualifiedIndexName()); + this(context.getFullyQualifiedIndex(), msg, cause, args); } /** * This constructor is provided for use in unit tests where a * {@link QueryShardContext} may not be available */ - public QueryShardException(Index index, String msg, Throwable cause) { - super(msg, cause); + public QueryShardException(Index index, String msg, Throwable cause, Object... args) { + super(msg, cause, args); setIndex(index); } diff --git a/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java b/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java index e868da5e82ac6..b406621e978da 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java @@ -39,6 +39,7 @@ import java.util.HashMap; import java.util.HashSet; import java.util.Map; +import java.util.Objects; import java.util.OptionalLong; import java.util.Set; import java.util.function.Function; @@ -127,6 +128,13 @@ public class ReplicationTracker extends AbstractIndexShardComponent implements L */ final Map checkpoints; + /** + * A callback invoked when the global checkpoint is updated. For primary mode this occurs if the computed global checkpoint advances on + * the basis of state changes tracked here. For non-primary mode this occurs if the local knowledge of the global checkpoint advances + * due to an update from the primary. + */ + private final LongConsumer onGlobalCheckpointUpdated; + /** * This set contains allocation IDs for which there is a thread actively waiting for the local checkpoint to advance to at least the * current global checkpoint. @@ -391,7 +399,8 @@ public ReplicationTracker( final ShardId shardId, final String allocationId, final IndexSettings indexSettings, - final long globalCheckpoint) { + final long globalCheckpoint, + final LongConsumer onGlobalCheckpointUpdated) { super(shardId, indexSettings); assert globalCheckpoint >= SequenceNumbers.UNASSIGNED_SEQ_NO : "illegal initial global checkpoint: " + globalCheckpoint; this.shardAllocationId = allocationId; @@ -400,6 +409,7 @@ public ReplicationTracker( this.appliedClusterStateVersion = -1L; this.checkpoints = new HashMap<>(1 + indexSettings.getNumberOfReplicas()); checkpoints.put(allocationId, new CheckpointState(SequenceNumbers.UNASSIGNED_SEQ_NO, globalCheckpoint, false, false)); + this.onGlobalCheckpointUpdated = Objects.requireNonNull(onGlobalCheckpointUpdated); this.pendingInSync = new HashSet<>(); this.routingTable = null; this.replicationGroup = null; @@ -456,7 +466,10 @@ public synchronized void updateGlobalCheckpointOnReplica(final long globalCheckp updateGlobalCheckpoint( shardAllocationId, globalCheckpoint, - current -> logger.trace("updating global checkpoint from [{}] to [{}] due to [{}]", current, globalCheckpoint, reason)); + current -> { + logger.trace("updated global checkpoint from [{}] to [{}] due to [{}]", current, globalCheckpoint, reason); + onGlobalCheckpointUpdated.accept(globalCheckpoint); + }); assert invariant(); } @@ -474,7 +487,7 @@ public synchronized void updateGlobalCheckpointForShard(final String allocationI allocationId, globalCheckpoint, current -> logger.trace( - "updating local knowledge for [{}] on the primary of the global checkpoint from [{}] to [{}]", + "updated local knowledge for [{}] on the primary of the global checkpoint from [{}] to [{}]", allocationId, current, globalCheckpoint)); @@ -485,8 +498,8 @@ private void updateGlobalCheckpoint(final String allocationId, final long global final CheckpointState cps = checkpoints.get(allocationId); assert !this.shardAllocationId.equals(allocationId) || cps != null; if (cps != null && globalCheckpoint > cps.globalCheckpoint) { - ifUpdated.accept(cps.globalCheckpoint); cps.globalCheckpoint = globalCheckpoint; + ifUpdated.accept(cps.globalCheckpoint); } } @@ -737,8 +750,9 @@ private synchronized void updateGlobalCheckpointOnPrimary() { assert computedGlobalCheckpoint >= globalCheckpoint : "new global checkpoint [" + computedGlobalCheckpoint + "] is lower than previous one [" + globalCheckpoint + "]"; if (globalCheckpoint != computedGlobalCheckpoint) { - logger.trace("global checkpoint updated to [{}]", computedGlobalCheckpoint); cps.globalCheckpoint = computedGlobalCheckpoint; + logger.trace("updated global checkpoint to [{}]", computedGlobalCheckpoint); + onGlobalCheckpointUpdated.accept(computedGlobalCheckpoint); } } diff --git a/server/src/main/java/org/elasticsearch/index/shard/GlobalCheckpointListeners.java b/server/src/main/java/org/elasticsearch/index/shard/GlobalCheckpointListeners.java new file mode 100644 index 0000000000000..e279badec4a04 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/shard/GlobalCheckpointListeners.java @@ -0,0 +1,166 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.shard; + +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; + +import java.io.Closeable; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.Executor; + +import static org.elasticsearch.index.seqno.SequenceNumbers.NO_OPS_PERFORMED; +import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; + +/** + * Represents a collection of global checkpoint listeners. This collection can be added to, and all listeners present at the time of an + * update will be notified together. All listeners will be notified when the shard is closed. + */ +public class GlobalCheckpointListeners implements Closeable { + + /** + * A global checkpoint listener consisting of a callback that is notified when the global checkpoint is updated or the shard is closed. + */ + @FunctionalInterface + public interface GlobalCheckpointListener { + /** + * Callback when the global checkpoint is updated or the shard is closed. If the shard is closed, the value of the global checkpoint + * will be set to {@link org.elasticsearch.index.seqno.SequenceNumbers#UNASSIGNED_SEQ_NO} and the exception will be non-null. If the + * global checkpoint is updated, the exception will be null. + * + * @param globalCheckpoint the updated global checkpoint + * @param e if non-null, the shard is closed + */ + void accept(long globalCheckpoint, IndexShardClosedException e); + } + + // guarded by this + private boolean closed; + private volatile List listeners; + private long lastKnownGlobalCheckpoint = UNASSIGNED_SEQ_NO; + + private final ShardId shardId; + private final Executor executor; + private final Logger logger; + + /** + * Construct a global checkpoint listeners collection. + * + * @param shardId the shard ID on which global checkpoint updates can be listened to + * @param executor the executor for listener notifications + * @param logger a shard-level logger + */ + GlobalCheckpointListeners( + final ShardId shardId, + final Executor executor, + final Logger logger) { + this.shardId = Objects.requireNonNull(shardId); + this.executor = Objects.requireNonNull(executor); + this.logger = Objects.requireNonNull(logger); + } + + /** + * Add a global checkpoint listener. If the global checkpoint is above the current global checkpoint known to the listener then the + * listener will be asynchronously notified on the executor used to construct this collection of global checkpoint listeners. If the + * shard is closed then the listener will be asynchronously notified on the executor used to construct this collection of global + * checkpoint listeners. The listener will only be notified of at most one event, either the global checkpoint is updated or the shard + * is closed. A listener must re-register after one of these events to receive subsequent events. + * + * @param currentGlobalCheckpoint the current global checkpoint known to the listener + * @param listener the listener + */ + synchronized void add(final long currentGlobalCheckpoint, final GlobalCheckpointListener listener) { + if (closed) { + executor.execute(() -> notifyListener(listener, UNASSIGNED_SEQ_NO, new IndexShardClosedException(shardId))); + return; + } + if (lastKnownGlobalCheckpoint > currentGlobalCheckpoint) { + // notify directly + executor.execute(() -> notifyListener(listener, lastKnownGlobalCheckpoint, null)); + return; + } else { + if (listeners == null) { + listeners = new ArrayList<>(); + } + listeners.add(listener); + } + } + + @Override + public synchronized void close() throws IOException { + closed = true; + notifyListeners(UNASSIGNED_SEQ_NO, new IndexShardClosedException(shardId)); + } + + synchronized int pendingListeners() { + return listeners == null ? 0 : listeners.size(); + } + + /** + * Invoke to notify all registered listeners of an updated global checkpoint. + * + * @param globalCheckpoint the updated global checkpoint + */ + synchronized void globalCheckpointUpdated(final long globalCheckpoint) { + assert globalCheckpoint >= NO_OPS_PERFORMED; + assert globalCheckpoint > lastKnownGlobalCheckpoint + : "updated global checkpoint [" + globalCheckpoint + "]" + + " is not more than the last known global checkpoint [" + lastKnownGlobalCheckpoint + "]"; + lastKnownGlobalCheckpoint = globalCheckpoint; + notifyListeners(globalCheckpoint, null); + } + + private void notifyListeners(final long globalCheckpoint, final IndexShardClosedException e) { + assert Thread.holdsLock(this); + assert (globalCheckpoint == UNASSIGNED_SEQ_NO && e != null) || (globalCheckpoint >= NO_OPS_PERFORMED && e == null); + if (listeners != null) { + // capture the current listeners + final List currentListeners = listeners; + listeners = null; + if (currentListeners != null) { + executor.execute(() -> { + for (final GlobalCheckpointListener listener : currentListeners) { + notifyListener(listener, globalCheckpoint, e); + } + }); + } + } + } + + private void notifyListener(final GlobalCheckpointListener listener, final long globalCheckpoint, final IndexShardClosedException e) { + try { + listener.accept(globalCheckpoint, e); + } catch (final Exception caught) { + if (globalCheckpoint != UNASSIGNED_SEQ_NO) { + logger.warn( + new ParameterizedMessage( + "error notifying global checkpoint listener of updated global checkpoint [{}]", + globalCheckpoint), + caught); + } else { + logger.warn("error notifying global checkpoint listener of closed shard", caught); + } + } + } + +} diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index f5328520ef6f8..0615d7e31741d 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -164,6 +164,8 @@ import java.util.stream.StreamSupport; import static org.elasticsearch.index.mapper.SourceToParse.source; +import static org.elasticsearch.index.seqno.SequenceNumbers.NO_OPS_PERFORMED; +import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; public class IndexShard extends AbstractIndexShardComponent implements IndicesClusterStateService.Shard { @@ -192,6 +194,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl private final SearchOperationListener searchOperationListener; + private final GlobalCheckpointListeners globalCheckpointListeners; private final ReplicationTracker replicationTracker; protected volatile ShardRouting shardRouting; @@ -297,8 +300,11 @@ public IndexShard( this.checkIndexOnStartup = indexSettings.getValue(IndexSettings.INDEX_CHECK_ON_STARTUP); this.translogConfig = new TranslogConfig(shardId, shardPath().resolveTranslog(), indexSettings, bigArrays); - this.replicationTracker = new ReplicationTracker(shardId, shardRouting.allocationId().getId(), indexSettings, - SequenceNumbers.UNASSIGNED_SEQ_NO); + final String aId = shardRouting.allocationId().getId(); + this.globalCheckpointListeners = new GlobalCheckpointListeners(shardId, threadPool.executor(ThreadPool.Names.LISTENER), logger); + this.replicationTracker = + new ReplicationTracker(shardId, aId, indexSettings, UNASSIGNED_SEQ_NO, globalCheckpointListeners::globalCheckpointUpdated); + // the query cache is a node-level thing, however we want the most popular filters // to be computed on a per-shard basis if (IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.get(settings)) { @@ -1229,7 +1235,7 @@ public void close(String reason, boolean flushEngine) throws IOException { } finally { // playing safe here and close the engine even if the above succeeds - close can be called multiple times // Also closing refreshListeners to prevent us from accumulating any more listeners - IOUtils.close(engine, refreshListeners); + IOUtils.close(engine, globalCheckpointListeners, refreshListeners); indexShardOperationPermits.close(); } } @@ -1488,10 +1494,10 @@ private void ensureWriteAllowed(Engine.Operation.Origin origin) throws IllegalIn } } else { if (origin == Engine.Operation.Origin.PRIMARY) { - verifyPrimary(); + assert assertPrimaryMode(); } else { assert origin == Engine.Operation.Origin.REPLICA; - verifyReplicationTarget(); + assert assertReplicationTarget(); } if (writeAllowedStates.contains(state) == false) { throw new IllegalIndexShardStateException(shardId, state, "operation only allowed when shard state is one of " + writeAllowedStates + ", origin [" + origin + "]"); @@ -1499,19 +1505,14 @@ private void ensureWriteAllowed(Engine.Operation.Origin origin) throws IllegalIn } } - private void verifyPrimary() { - if (shardRouting.primary() == false) { - throw new IllegalStateException("shard " + shardRouting + " is not a primary"); - } + private boolean assertPrimaryMode() { + assert shardRouting.primary() && replicationTracker.isPrimaryMode() : "shard " + shardRouting + " is not a primary shard in primary mode"; + return true; } - private void verifyReplicationTarget() { - final IndexShardState state = state(); - if (shardRouting.primary() && shardRouting.active() && replicationTracker.isPrimaryMode()) { - // must use exception that is not ignored by replication logic. See TransportActions.isShardNotAvailableException - throw new IllegalStateException("active primary shard " + shardRouting + " cannot be a replication target before " + - "relocation hand off, state is [" + state + "]"); - } + private boolean assertReplicationTarget() { + assert replicationTracker.isPrimaryMode() == false : "shard " + shardRouting + " in primary mode cannot be a replication target"; + return true; } private void verifyNotClosed() throws IllegalIndexShardStateException { @@ -1791,7 +1792,7 @@ public void writeIndexingBuffer() { * @param checkpoint the local checkpoint for the shard */ public void updateLocalCheckpointForShard(final String allocationId, final long checkpoint) { - verifyPrimary(); + assert assertPrimaryMode(); verifyNotClosed(); replicationTracker.updateLocalCheckpoint(allocationId, checkpoint); } @@ -1803,11 +1804,24 @@ public void updateLocalCheckpointForShard(final String allocationId, final long * @param globalCheckpoint the global checkpoint */ public void updateGlobalCheckpointForShard(final String allocationId, final long globalCheckpoint) { - verifyPrimary(); + assert assertPrimaryMode(); verifyNotClosed(); replicationTracker.updateGlobalCheckpointForShard(allocationId, globalCheckpoint); } + /** + * Add a global checkpoint listener. If the global checkpoint is above the current global checkpoint known to the listener then the + * listener will fire immediately on the calling thread. + * + * @param currentGlobalCheckpoint the current global checkpoint known to the listener + * @param listener the listener + */ + public void addGlobalCheckpointListener( + final long currentGlobalCheckpoint, + final GlobalCheckpointListeners.GlobalCheckpointListener listener) { + this.globalCheckpointListeners.add(currentGlobalCheckpoint, listener); + } + /** * Waits for all operations up to the provided sequence number to complete. * @@ -1825,7 +1839,7 @@ public void waitForOpsToComplete(final long seqNo) throws InterruptedException { * @param allocationId the allocation ID of the shard for which recovery was initiated */ public void initiateTracking(final String allocationId) { - verifyPrimary(); + assert assertPrimaryMode(); replicationTracker.initiateTracking(allocationId); } @@ -1838,7 +1852,7 @@ public void initiateTracking(final String allocationId) { * @param localCheckpoint the current local checkpoint on the shard */ public void markAllocationIdAsInSync(final String allocationId, final long localCheckpoint) throws InterruptedException { - verifyPrimary(); + assert assertPrimaryMode(); replicationTracker.markAllocationIdAsInSync(allocationId, localCheckpoint); } @@ -1873,7 +1887,7 @@ public long getLastSyncedGlobalCheckpoint() { * @return a map from allocation ID to the local knowledge of the global checkpoint for that allocation ID */ public ObjectLongMap getInSyncGlobalCheckpoints() { - verifyPrimary(); + assert assertPrimaryMode(); verifyNotClosed(); return replicationTracker.getInSyncGlobalCheckpoints(); } @@ -1883,11 +1897,12 @@ public ObjectLongMap getInSyncGlobalCheckpoints() { * primary. */ public void maybeSyncGlobalCheckpoint(final String reason) { - verifyPrimary(); verifyNotClosed(); + assert shardRouting.primary() : "only call maybeSyncGlobalCheckpoint on primary shard"; if (replicationTracker.isPrimaryMode() == false) { return; } + assert assertPrimaryMode(); // only sync if there are not operations in flight final SeqNoStats stats = getEngine().getSeqNoStats(replicationTracker.getGlobalCheckpoint()); if (stats.getMaxSeqNo() == stats.getGlobalCheckpoint()) { @@ -1913,7 +1928,7 @@ public void maybeSyncGlobalCheckpoint(final String reason) { * @return the replication group */ public ReplicationGroup getReplicationGroup() { - verifyPrimary(); + assert assertPrimaryMode(); verifyNotClosed(); return replicationTracker.getReplicationGroup(); } @@ -1925,7 +1940,7 @@ public ReplicationGroup getReplicationGroup() { * @param reason the reason the global checkpoint was updated */ public void updateGlobalCheckpointOnReplica(final long globalCheckpoint, final String reason) { - verifyReplicationTarget(); + assert assertReplicationTarget(); final long localCheckpoint = getLocalCheckpoint(); if (globalCheckpoint > localCheckpoint) { /* @@ -1952,8 +1967,7 @@ assert state() != IndexShardState.POST_RECOVERY && state() != IndexShardState.ST * @param primaryContext the sequence number context */ public void activateWithPrimaryContext(final ReplicationTracker.PrimaryContext primaryContext) { - verifyPrimary(); - assert shardRouting.isRelocationTarget() : "only relocation target can update allocation IDs from primary context: " + shardRouting; + assert shardRouting.primary() && shardRouting.isRelocationTarget() : "only primary relocation target can update allocation IDs from primary context: " + shardRouting; assert primaryContext.getCheckpointStates().containsKey(routingEntry().allocationId().getId()) && getLocalCheckpoint() == primaryContext.getCheckpointStates().get(routingEntry().allocationId().getId()).getLocalCheckpoint(); synchronized (mutex) { @@ -1967,7 +1981,7 @@ public void activateWithPrimaryContext(final ReplicationTracker.PrimaryContext p * @return {@code true} if there is at least one shard pending in-sync, otherwise false */ public boolean pendingInSync() { - verifyPrimary(); + assert assertPrimaryMode(); return replicationTracker.pendingInSync(); } @@ -2284,7 +2298,7 @@ private EngineConfig newEngineConfig() { */ public void acquirePrimaryOperationPermit(ActionListener onPermitAcquired, String executorOnDelay, Object debugInfo) { verifyNotClosed(); - verifyPrimary(); + assert shardRouting.primary() : "acquirePrimaryOperationPermit should only be called on primary shard: " + shardRouting; indexShardOperationPermits.acquire(onPermitAcquired, executorOnDelay, false, debugInfo); } @@ -2334,7 +2348,6 @@ public void acquireReplicaOperationPermit(final long opPrimaryTerm, final long g final ActionListener onPermitAcquired, final String executorOnDelay, final Object debugInfo) { verifyNotClosed(); - verifyReplicationTarget(); if (opPrimaryTerm > pendingPrimaryTerm) { synchronized (mutex) { if (opPrimaryTerm > pendingPrimaryTerm) { @@ -2353,8 +2366,8 @@ public void acquireReplicaOperationPermit(final long opPrimaryTerm, final long g updateGlobalCheckpointOnReplica(globalCheckpoint, "primary term transition"); final long currentGlobalCheckpoint = getGlobalCheckpoint(); final long localCheckpoint; - if (currentGlobalCheckpoint == SequenceNumbers.UNASSIGNED_SEQ_NO) { - localCheckpoint = SequenceNumbers.NO_OPS_PERFORMED; + if (currentGlobalCheckpoint == UNASSIGNED_SEQ_NO) { + localCheckpoint = NO_OPS_PERFORMED; } else { localCheckpoint = currentGlobalCheckpoint; } @@ -2387,6 +2400,7 @@ public void onResponse(final Releasable releasable) { operationPrimaryTerm); onPermitAcquired.onFailure(new IllegalStateException(message)); } else { + assert assertReplicationTarget(); try { updateGlobalCheckpointOnReplica(globalCheckpoint, "operation"); } catch (Exception e) { diff --git a/server/src/main/java/org/elasticsearch/index/translog/BaseTranslogReader.java b/server/src/main/java/org/elasticsearch/index/translog/BaseTranslogReader.java index ff226ae00bef5..41c3252eab07a 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/BaseTranslogReader.java +++ b/server/src/main/java/org/elasticsearch/index/translog/BaseTranslogReader.java @@ -79,7 +79,9 @@ protected final int readSize(ByteBuffer reusableBuffer, long position) throws IO final int size = reusableBuffer.getInt() + 4; final long maxSize = sizeInBytes() - position; if (size < 0 || size > maxSize) { - throw new TranslogCorruptedException("operation size is corrupted must be [0.." + maxSize + "] but was: " + size); + throw new TranslogCorruptedException( + path.toString(), + "operation size is corrupted must be [0.." + maxSize + "] but was: " + size); } return size; } @@ -103,14 +105,16 @@ protected final BufferedChecksumStreamInput checksummedStream(ByteBuffer reusabl buffer.limit(opSize); readBytes(buffer, position); buffer.flip(); - return new BufferedChecksumStreamInput(new ByteBufferStreamInput(buffer), reuse); + return new BufferedChecksumStreamInput(new ByteBufferStreamInput(buffer), path.toString(), reuse); } protected Translog.Operation read(BufferedChecksumStreamInput inStream) throws IOException { final Translog.Operation op = Translog.readOperation(inStream); if (op.primaryTerm() > getPrimaryTerm() && getPrimaryTerm() != TranslogHeader.UNKNOWN_PRIMARY_TERM) { - throw new TranslogCorruptedException("Operation's term is newer than translog header term; " + - "operation term[" + op.primaryTerm() + "], translog header term [" + getPrimaryTerm() + "]"); + throw new TranslogCorruptedException( + path.toString(), + "operation's term is newer than translog header term; " + + "operation term[" + op.primaryTerm() + "], translog header term [" + getPrimaryTerm() + "]"); } return op; } diff --git a/server/src/main/java/org/elasticsearch/index/translog/BufferedChecksumStreamInput.java b/server/src/main/java/org/elasticsearch/index/translog/BufferedChecksumStreamInput.java index 37740b460b766..8e815d3599a6b 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/BufferedChecksumStreamInput.java +++ b/server/src/main/java/org/elasticsearch/index/translog/BufferedChecksumStreamInput.java @@ -35,14 +35,11 @@ public final class BufferedChecksumStreamInput extends FilterStreamInput { private static final int SKIP_BUFFER_SIZE = 1024; private byte[] skipBuffer; private final Checksum digest; + private final String source; - public BufferedChecksumStreamInput(StreamInput in) { - super(in); - this.digest = new BufferedChecksum(new CRC32()); - } - - public BufferedChecksumStreamInput(StreamInput in, BufferedChecksumStreamInput reuse) { + public BufferedChecksumStreamInput(StreamInput in, String source, BufferedChecksumStreamInput reuse) { super(in); + this.source = source; if (reuse == null ) { this.digest = new BufferedChecksum(new CRC32()); } else { @@ -52,6 +49,10 @@ public BufferedChecksumStreamInput(StreamInput in, BufferedChecksumStreamInput r } } + public BufferedChecksumStreamInput(StreamInput in, String source) { + this(in, source, null); + } + public long getChecksum() { return this.digest.getValue(); } @@ -85,7 +86,6 @@ public boolean markSupported() { return delegate.markSupported(); } - @Override public long skip(long numBytes) throws IOException { if (numBytes < 0) { @@ -104,7 +104,6 @@ public long skip(long numBytes) throws IOException { return skipped; } - @Override public synchronized void mark(int readlimit) { delegate.mark(readlimit); @@ -114,4 +113,7 @@ public void resetDigest() { digest.reset(); } + public String getSource(){ + return source; + } } diff --git a/server/src/main/java/org/elasticsearch/index/translog/Translog.java b/server/src/main/java/org/elasticsearch/index/translog/Translog.java index f382eac1ed430..6eb7ab4847e50 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/server/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -1484,7 +1484,7 @@ static void verifyChecksum(BufferedChecksumStreamInput in) throws IOException { long expectedChecksum = in.getChecksum(); long readChecksum = Integer.toUnsignedLong(in.readInt()); if (readChecksum != expectedChecksum) { - throw new TranslogCorruptedException("translog stream is corrupted, expected: 0x" + + throw new TranslogCorruptedException(in.getSource(), "checksum verification failed - expected: 0x" + Long.toHexString(expectedChecksum) + ", got: 0x" + Long.toHexString(readChecksum)); } } @@ -1492,10 +1492,10 @@ static void verifyChecksum(BufferedChecksumStreamInput in) throws IOException { /** * Reads a list of operations written with {@link #writeOperations(StreamOutput, List)} */ - public static List readOperations(StreamInput input) throws IOException { + public static List readOperations(StreamInput input, String source) throws IOException { ArrayList operations = new ArrayList<>(); int numOps = input.readInt(); - final BufferedChecksumStreamInput checksumStreamInput = new BufferedChecksumStreamInput(input); + final BufferedChecksumStreamInput checksumStreamInput = new BufferedChecksumStreamInput(input, source); for (int i = 0; i < numOps; i++) { operations.add(readOperation(checksumStreamInput)); } @@ -1507,7 +1507,7 @@ static Translog.Operation readOperation(BufferedChecksumStreamInput in) throws I try { final int opSize = in.readInt(); if (opSize < 4) { // 4byte for the checksum - throw new TranslogCorruptedException("operation size must be at least 4 but was: " + opSize); + throw new TranslogCorruptedException(in.getSource(), "operation size must be at least 4 but was: " + opSize); } in.resetDigest(); // size is not part of the checksum! if (in.markSupported()) { // if we can we validate the checksum first @@ -1522,17 +1522,15 @@ static Translog.Operation readOperation(BufferedChecksumStreamInput in) throws I } operation = Translog.Operation.readOperation(in); verifyChecksum(in); - } catch (TranslogCorruptedException e) { - throw e; } catch (EOFException e) { - throw new TruncatedTranslogException("reached premature end of file, translog is truncated", e); + throw new TruncatedTranslogException(in.getSource(), "reached premature end of file, translog is truncated", e); } return operation; } /** * Writes all operations in the given iterable to the given output stream including the size of the array - * use {@link #readOperations(StreamInput)} to read it back. + * use {@link #readOperations(StreamInput, String)} to read it back. */ public static void writeOperations(StreamOutput outStream, List toWrite) throws IOException { final ReleasableBytesStreamOutput out = new ReleasableBytesStreamOutput(BigArrays.NON_RECYCLING_INSTANCE); @@ -1773,7 +1771,7 @@ private static Checkpoint readCheckpoint(Path location, String expectedTranslogU } catch (TranslogCorruptedException ex) { throw ex; // just bubble up. } catch (Exception ex) { - throw new TranslogCorruptedException("Translog at [" + location + "] is corrupted", ex); + throw new TranslogCorruptedException(location.toString(), ex); } return checkpoint; } diff --git a/server/src/main/java/org/elasticsearch/index/translog/TranslogCorruptedException.java b/server/src/main/java/org/elasticsearch/index/translog/TranslogCorruptedException.java index 07700b3037cbb..ab1a48b216706 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/TranslogCorruptedException.java +++ b/server/src/main/java/org/elasticsearch/index/translog/TranslogCorruptedException.java @@ -25,15 +25,27 @@ import java.io.IOException; public class TranslogCorruptedException extends ElasticsearchException { - public TranslogCorruptedException(String msg) { - super(msg); + public TranslogCorruptedException(String source, String details) { + super(corruptedMessage(source, details)); } - public TranslogCorruptedException(String msg, Throwable cause) { - super(msg, cause); + public TranslogCorruptedException(String source, Throwable cause) { + this(source, null, cause); } - public TranslogCorruptedException(StreamInput in) throws IOException{ + public TranslogCorruptedException(String source, String details, Throwable cause) { + super(corruptedMessage(source, details), cause); + } + + private static String corruptedMessage(String source, String details) { + String msg = "translog from source [" + source + "] is corrupted"; + if (details != null) { + msg += ", " + details; + } + return msg; + } + + public TranslogCorruptedException(StreamInput in) throws IOException { super(in); } } diff --git a/server/src/main/java/org/elasticsearch/index/translog/TranslogHeader.java b/server/src/main/java/org/elasticsearch/index/translog/TranslogHeader.java index 0fde24d8bb4d5..20aadf21bcb48 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/TranslogHeader.java +++ b/server/src/main/java/org/elasticsearch/index/translog/TranslogHeader.java @@ -110,13 +110,15 @@ private static int headerSizeInBytes(int version, int uuidLength) { static TranslogHeader read(final String translogUUID, final Path path, final FileChannel channel) throws IOException { // This input is intentionally not closed because closing it will close the FileChannel. final BufferedChecksumStreamInput in = - new BufferedChecksumStreamInput(new InputStreamStreamInput(java.nio.channels.Channels.newInputStream(channel), channel.size())); + new BufferedChecksumStreamInput( + new InputStreamStreamInput(java.nio.channels.Channels.newInputStream(channel), channel.size()), + path.toString()); final int version; try { version = CodecUtil.checkHeader(new InputStreamDataInput(in), TRANSLOG_CODEC, VERSION_CHECKSUMS, VERSION_PRIMARY_TERM); } catch (CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException e) { tryReportOldVersionError(path, channel); - throw new TranslogCorruptedException("Translog header corrupted. path:" + path, e); + throw new TranslogCorruptedException(path.toString(), "translog header corrupted", e); } if (version == VERSION_CHECKSUMS) { throw new IllegalStateException("pre-2.0 translog found [" + path + "]"); @@ -124,15 +126,19 @@ static TranslogHeader read(final String translogUUID, final Path path, final Fil // Read the translogUUID final int uuidLen = in.readInt(); if (uuidLen > channel.size()) { - throw new TranslogCorruptedException("uuid length can't be larger than the translog"); + throw new TranslogCorruptedException( + path.toString(), + "UUID length can't be larger than the translog"); } final BytesRef uuid = new BytesRef(uuidLen); uuid.length = uuidLen; in.read(uuid.bytes, uuid.offset, uuid.length); final BytesRef expectedUUID = new BytesRef(translogUUID); if (uuid.bytesEquals(expectedUUID) == false) { - throw new TranslogCorruptedException("expected shard UUID " + expectedUUID + " but got: " + uuid + - " this translog file belongs to a different translog. path:" + path); + throw new TranslogCorruptedException( + path.toString(), + "expected shard UUID " + expectedUUID + " but got: " + uuid + + " this translog file belongs to a different translog"); } // Read the primary term final long primaryTerm; @@ -164,7 +170,9 @@ private static void tryReportOldVersionError(final Path path, final FileChannel // 0x00 => version 0 of the translog final byte b1 = Channels.readFromFileChannel(channel, 0, 1)[0]; if (b1 == 0x3f) { // LUCENE_CODEC_HEADER_BYTE - throw new TranslogCorruptedException("translog looks like version 1 or later, but has corrupted header. path:" + path); + throw new TranslogCorruptedException( + path.toString(), + "translog looks like version 1 or later, but has corrupted header" ); } else if (b1 == 0x00) { // UNVERSIONED_TRANSLOG_HEADER_BYTE throw new IllegalStateException("pre-1.4 translog found [" + path + "]"); } diff --git a/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java index 8bafb0bb51c31..4db758af207c2 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java +++ b/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java @@ -201,8 +201,9 @@ private synchronized boolean assertNoSeqNumberConflict(long seqNo, BytesReferenc } else if (seenSequenceNumbers.containsKey(seqNo)) { final Tuple previous = seenSequenceNumbers.get(seqNo); if (previous.v1().equals(data) == false) { - Translog.Operation newOp = Translog.readOperation(new BufferedChecksumStreamInput(data.streamInput())); - Translog.Operation prvOp = Translog.readOperation(new BufferedChecksumStreamInput(previous.v1().streamInput())); + Translog.Operation newOp = Translog.readOperation(new BufferedChecksumStreamInput(data.streamInput(), "assertion")); + Translog.Operation prvOp = Translog.readOperation(new BufferedChecksumStreamInput(previous.v1().streamInput(), + "assertion")); // we need to exclude versionType from this check because it's removed in 7.0 final boolean sameOp; if (prvOp instanceof Translog.Index && newOp instanceof Translog.Index) { @@ -237,7 +238,8 @@ synchronized boolean assertNoSeqAbove(long belowTerm, long aboveSeqNo) { .forEach(e -> { final Translog.Operation op; try { - op = Translog.readOperation(new BufferedChecksumStreamInput(e.getValue().v1().streamInput())); + op = Translog.readOperation( + new BufferedChecksumStreamInput(e.getValue().v1().streamInput(), "assertion")); } catch (IOException ex) { throw new RuntimeException(ex); } diff --git a/server/src/main/java/org/elasticsearch/index/translog/TruncatedTranslogException.java b/server/src/main/java/org/elasticsearch/index/translog/TruncatedTranslogException.java index e04eb58068d2f..5e0be02b7fcfc 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/TruncatedTranslogException.java +++ b/server/src/main/java/org/elasticsearch/index/translog/TruncatedTranslogException.java @@ -25,11 +25,12 @@ public class TruncatedTranslogException extends TranslogCorruptedException { - public TruncatedTranslogException(String msg, Throwable cause) { - super(msg, cause); - } - public TruncatedTranslogException(StreamInput in) throws IOException { super(in); } + + public TruncatedTranslogException(String source, String details, Throwable cause) { + super(source, details, cause); + } + } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTranslogOperationsRequest.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTranslogOperationsRequest.java index 4649462692006..be399e0f81fd0 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTranslogOperationsRequest.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTranslogOperationsRequest.java @@ -66,7 +66,7 @@ public void readFrom(StreamInput in) throws IOException { super.readFrom(in); recoveryId = in.readLong(); shardId = ShardId.readShardId(in); - operations = Translog.readOperations(in); + operations = Translog.readOperations(in, "recovery"); totalTranslogOps = in.readVInt(); } diff --git a/server/src/main/java/org/elasticsearch/ingest/PipelineStore.java b/server/src/main/java/org/elasticsearch/ingest/PipelineStore.java index c6dce0bd45b3c..9fceaf1a9a573 100644 --- a/server/src/main/java/org/elasticsearch/ingest/PipelineStore.java +++ b/server/src/main/java/org/elasticsearch/ingest/PipelineStore.java @@ -25,7 +25,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ingest.DeletePipelineRequest; import org.elasticsearch.action.ingest.PutPipelineRequest; -import org.elasticsearch.action.ingest.WritePipelineResponse; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; @@ -121,13 +121,13 @@ public String getType() { /** * Deletes the pipeline specified by id in the request. */ - public void delete(ClusterService clusterService, DeletePipelineRequest request, ActionListener listener) { + public void delete(ClusterService clusterService, DeletePipelineRequest request, ActionListener listener) { clusterService.submitStateUpdateTask("delete-pipeline-" + request.getId(), - new AckedClusterStateUpdateTask(request, listener) { + new AckedClusterStateUpdateTask(request, listener) { @Override - protected WritePipelineResponse newResponse(boolean acknowledged) { - return new WritePipelineResponse(acknowledged); + protected AcknowledgedResponse newResponse(boolean acknowledged) { + return new AcknowledgedResponse(acknowledged); } @Override @@ -169,15 +169,15 @@ ClusterState innerDelete(DeletePipelineRequest request, ClusterState currentStat * Stores the specified pipeline definition in the request. */ public void put(ClusterService clusterService, Map ingestInfos, PutPipelineRequest request, - ActionListener listener) throws Exception { + ActionListener listener) throws Exception { // validates the pipeline and processor configuration before submitting a cluster update task: validatePipeline(ingestInfos, request); clusterService.submitStateUpdateTask("put-pipeline-" + request.getId(), - new AckedClusterStateUpdateTask(request, listener) { + new AckedClusterStateUpdateTask(request, listener) { @Override - protected WritePipelineResponse newResponse(boolean acknowledged) { - return new WritePipelineResponse(acknowledged); + protected AcknowledgedResponse newResponse(boolean acknowledged) { + return new AcknowledgedResponse(acknowledged); } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/action/RestMainAction.java b/server/src/main/java/org/elasticsearch/rest/action/RestMainAction.java index c155b76cb810f..b219dfe74ce14 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/RestMainAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/RestMainAction.java @@ -69,4 +69,9 @@ static BytesRestResponse convertMainResponse(MainResponse response, RestRequest response.toXContent(builder, request); return new BytesRestResponse(status, builder); } + + @Override + public boolean canTripCircuitBreaker() { + return false; + } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java index 3a76c7ca0c952..1a859933ad3fe 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java @@ -39,6 +39,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.Table; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.index.Index; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -380,7 +381,8 @@ Table buildTable(RestRequest request, Index[] indices, ClusterHealthResponse res table.addCell(primaryStats.getDocs() == null ? null : primaryStats.getDocs().getDeleted()); table.addCell(indexMetaData.getCreationDate()); - table.addCell(ZonedDateTime.ofInstant(Instant.ofEpochMilli(indexMetaData.getCreationDate()), ZoneOffset.UTC)); + ZonedDateTime creationTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(indexMetaData.getCreationDate()), ZoneOffset.UTC); + table.addCell(DateFormatters.forPattern("strict_date_time").format(creationTime)); table.addCell(totalStats.getStore() == null ? null : totalStats.getStore().size()); table.addCell(primaryStats.getStore() == null ? null : primaryStats.getStore().size()); diff --git a/server/src/main/java/org/elasticsearch/script/BucketAggregationScript.java b/server/src/main/java/org/elasticsearch/script/BucketAggregationScript.java index 5d4a934969424..5fa8d1fbf94a4 100644 --- a/server/src/main/java/org/elasticsearch/script/BucketAggregationScript.java +++ b/server/src/main/java/org/elasticsearch/script/BucketAggregationScript.java @@ -46,7 +46,7 @@ public Map getParams() { return params; } - public abstract double execute(); + public abstract Double execute(); public interface Factory { BucketAggregationScript newInstance(Map params); diff --git a/server/src/main/java/org/elasticsearch/script/ExecutableScript.java b/server/src/main/java/org/elasticsearch/script/ExecutableScript.java index 1bd4c31ebf349..d0d8020371bd1 100644 --- a/server/src/main/java/org/elasticsearch/script/ExecutableScript.java +++ b/server/src/main/java/org/elasticsearch/script/ExecutableScript.java @@ -46,7 +46,4 @@ interface Factory { } ScriptContext CONTEXT = new ScriptContext<>("executable", Factory.class); - - // TODO: remove these once each has its own script interface - ScriptContext UPDATE_CONTEXT = new ScriptContext<>("update", Factory.class); } diff --git a/server/src/main/java/org/elasticsearch/script/ScriptModule.java b/server/src/main/java/org/elasticsearch/script/ScriptModule.java index 0a7eb77d8fd1e..a4c0c5fa822b5 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptModule.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptModule.java @@ -49,10 +49,10 @@ public class ScriptModule { SearchScript.SCRIPT_SORT_CONTEXT, SearchScript.TERMS_SET_QUERY_CONTEXT, ExecutableScript.CONTEXT, + UpdateScript.CONTEXT, BucketAggregationScript.CONTEXT, BucketAggregationSelectorScript.CONTEXT, SignificantTermsHeuristicScoreScript.CONTEXT, - ExecutableScript.UPDATE_CONTEXT, IngestScript.CONTEXT, FilterScript.CONTEXT, SimilarityScript.CONTEXT, diff --git a/server/src/main/java/org/elasticsearch/script/ScriptService.java b/server/src/main/java/org/elasticsearch/script/ScriptService.java index 9fff02b611f7b..ebb87dbd9602e 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptService.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptService.java @@ -285,7 +285,7 @@ public FactoryType compile(Script script, ScriptContext CONTEXT = new ScriptContext<>("update", Factory.class); + + /** The generic runtime parameters for the script. */ + private final Map params; + + public UpdateScript(Map params) { + this.params = params; + } + + /** Return the parameters for this script. */ + public Map getParams() { + return params; + } + + public abstract void execute(Map ctx); + + public interface Factory { + UpdateScript newInstance(Map params); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index da5467986d3b2..247c9aa47aeda 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -809,7 +809,7 @@ private void parseSource(DefaultSearchContext context, SearchSourceBuilder sourc throw new SearchContextException(context, "failed to create SearchContextHighlighter", e); } } - if (source.scriptFields() != null) { + if (source.scriptFields() != null && source.size() != 0) { int maxAllowedScriptFields = context.mapperService().getIndexSettings().getMaxScriptFields(); if (source.scriptFields().size() > maxAllowedScriptFields) { throw new IllegalArgumentException( diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/ParsedSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/ParsedSignificantTerms.java index 1b4739c184d5e..26c4ec420d080 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/ParsedSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/ParsedSignificantTerms.java @@ -175,7 +175,7 @@ static B parseSignificantTermsBucketXContent(final XCon bucket.subsetDf = value; bucket.setDocCount(value); } else if (InternalSignificantTerms.SCORE.equals(currentFieldName)) { - bucket.score = parser.longValue(); + bucket.score = parser.doubleValue(); } else if (InternalSignificantTerms.BG_COUNT.equals(currentFieldName)) { bucket.supersetDf = parser.longValue(); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java index c8117f9029bb6..042a30695c61d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketscript/BucketScriptPipelineAggregator.java @@ -110,13 +110,17 @@ public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext if (skipBucket) { newBuckets.add(bucket); } else { - double returned = factory.newInstance(vars).execute(); - final List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false).map( + Double returned = factory.newInstance(vars).execute(); + if (returned == null) { + newBuckets.add(bucket); + } else { + final List aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false).map( (p) -> (InternalAggregation) p).collect(Collectors.toList()); - aggs.add(new InternalSimpleValue(name(), returned, formatter, new ArrayList<>(), metaData())); - InternalMultiBucketAggregation.InternalBucket newBucket = originalAgg.createBucket(new InternalAggregations(aggs), + aggs.add(new InternalSimpleValue(name(), returned, formatter, new ArrayList<>(), metaData())); + InternalMultiBucketAggregation.InternalBucket newBucket = originalAgg.createBucket(new InternalAggregations(aggs), bucket); - newBuckets.add(newBucket); + newBuckets.add(newBucket); + } } } return originalAgg.create(newBuckets); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketsort/BucketSortPipelineAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketsort/BucketSortPipelineAggregator.java index de5e2638c6cea..e10d5c35800fe 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketsort/BucketSortPipelineAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketsort/BucketSortPipelineAggregator.java @@ -180,7 +180,7 @@ public int compareTo(ComparableBucket that) { private static class TopNPriorityQueue extends PriorityQueue { private TopNPriorityQueue(int n) { - super(n, false); + super(n); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/collapse/CollapseBuilder.java b/server/src/main/java/org/elasticsearch/search/collapse/CollapseBuilder.java index 90e35c34e28f8..ccab5e2cb93b3 100644 --- a/server/src/main/java/org/elasticsearch/search/collapse/CollapseBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/collapse/CollapseBuilder.java @@ -247,6 +247,6 @@ public CollapseContext build(SearchContext context) { + field + "`, " + "only indexed field can retrieve `inner_hits`"); } - return new CollapseContext(fieldType, innerHits); + return new CollapseContext(field, fieldType, innerHits); } } diff --git a/server/src/main/java/org/elasticsearch/search/collapse/CollapseContext.java b/server/src/main/java/org/elasticsearch/search/collapse/CollapseContext.java index 95fee901a30e1..82a7657f18079 100644 --- a/server/src/main/java/org/elasticsearch/search/collapse/CollapseContext.java +++ b/server/src/main/java/org/elasticsearch/search/collapse/CollapseContext.java @@ -25,24 +25,29 @@ import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.query.InnerHitBuilder; -import java.util.Collections; import java.util.List; /** * Context used for field collapsing */ public class CollapseContext { + private final String fieldName; private final MappedFieldType fieldType; private final List innerHits; - public CollapseContext(MappedFieldType fieldType, InnerHitBuilder innerHit) { + public CollapseContext(String fieldName, + MappedFieldType fieldType, + List innerHits) { + this.fieldName = fieldName; this.fieldType = fieldType; - this.innerHits = Collections.singletonList(innerHit); + this.innerHits = innerHits; } - public CollapseContext(MappedFieldType fieldType, List innerHits) { - this.fieldType = fieldType; - this.innerHits = innerHits; + /** + * The requested field name to collapse on. + */ + public String getFieldName() { + return fieldName; } /** The field type used for collapsing **/ diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsFetchSubPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsFetchSubPhase.java index a1562e118fb86..3ef3064697a72 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsFetchSubPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsFetchSubPhase.java @@ -61,7 +61,7 @@ public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOExcept if (context.collapse() != null) { // retrieve the `doc_value` associated with the collapse field - String name = context.collapse().getFieldType().name(); + String name = context.collapse().getFieldName(); if (context.docValueFieldsContext() == null) { context.docValueFieldsContext(new DocValueFieldsContext( Collections.singletonList(new FieldAndFormat(name, DocValueFieldsContext.USE_DEFAULT_FORMAT)))); diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardFailure.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardFailure.java index 1578cb0a289ee..f2bdc2ba5df4f 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardFailure.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardFailure.java @@ -40,15 +40,11 @@ /** * Stores information about failures that occurred during shard snapshotting process */ -public class SnapshotShardFailure implements ShardOperationFailedException { - private ShardId shardId; - - private String reason; +public class SnapshotShardFailure extends ShardOperationFailedException { @Nullable private String nodeId; - - private RestStatus status; + private ShardId shardId; private SnapshotShardFailure() { @@ -74,56 +70,9 @@ public SnapshotShardFailure(@Nullable String nodeId, ShardId shardId, String rea * @param status rest status */ private SnapshotShardFailure(@Nullable String nodeId, ShardId shardId, String reason, RestStatus status) { - assert reason != null; + super(shardId.getIndexName(), shardId.id(), reason, status, new IndexShardSnapshotFailedException(shardId, reason)); this.nodeId = nodeId; this.shardId = shardId; - this.reason = reason; - this.status = status; - } - - /** - * Returns index where failure occurred - * - * @return index - */ - @Override - public String index() { - return this.shardId.getIndexName(); - } - - /** - * Returns shard id where failure occurred - * - * @return shard id - */ - @Override - public int shardId() { - return this.shardId.id(); - } - - /** - * Returns reason for the failure - * - * @return reason for the failure - */ - @Override - public String reason() { - return this.reason; - } - - /** - * Returns {@link RestStatus} corresponding to this failure - * - * @return REST status - */ - @Override - public RestStatus status() { - return status; - } - - @Override - public Throwable getCause() { - return new IndexShardSnapshotFailedException(shardId, reason); } /** @@ -142,7 +91,7 @@ public String nodeId() { * @param in stream input * @return shard failure information */ - public static SnapshotShardFailure readSnapshotShardFailure(StreamInput in) throws IOException { + static SnapshotShardFailure readSnapshotShardFailure(StreamInput in) throws IOException { SnapshotShardFailure exp = new SnapshotShardFailure(); exp.readFrom(in); return exp; @@ -152,6 +101,8 @@ public static SnapshotShardFailure readSnapshotShardFailure(StreamInput in) thro public void readFrom(StreamInput in) throws IOException { nodeId = in.readOptionalString(); shardId = ShardId.readShardId(in); + super.shardId = shardId.getId(); + super.index = shardId.getIndexName(); reason = in.readString(); status = RestStatus.readFrom(in); } diff --git a/server/src/test/java/org/elasticsearch/ExceptionsHelperTests.java b/server/src/test/java/org/elasticsearch/ExceptionsHelperTests.java index 11f0b4d3409e0..1d2a4ca6d5f75 100644 --- a/server/src/test/java/org/elasticsearch/ExceptionsHelperTests.java +++ b/server/src/test/java/org/elasticsearch/ExceptionsHelperTests.java @@ -174,36 +174,13 @@ private static ShardSearchFailure createShardFailureQueryShardException(String e return new ShardSearchFailure(queryShardException, null); } - public void testGroupByNullCause() { - ShardOperationFailedException[] failures = new ShardOperationFailedException[] { - new ShardSearchFailure("error", createSearchShardTarget("node0", 0, "index", null)), - new ShardSearchFailure("error", createSearchShardTarget("node1", 1, "index", null)), - new ShardSearchFailure("error", createSearchShardTarget("node1", 1, "index2", null)), - new ShardSearchFailure("error", createSearchShardTarget("node2", 2, "index", "cluster1")), - new ShardSearchFailure("error", createSearchShardTarget("node1", 1, "index", "cluster1")), - new ShardSearchFailure("a different error", createSearchShardTarget("node3", 3, "index", "cluster1")) - }; - - ShardOperationFailedException[] groupBy = ExceptionsHelper.groupBy(failures); - assertThat(groupBy.length, equalTo(4)); - String[] expectedIndices = new String[]{"index", "index2", "cluster1:index", "cluster1:index"}; - String[] expectedErrors = new String[]{"error", "error", "error", "a different error"}; - - int i = 0; - for (ShardOperationFailedException shardOperationFailedException : groupBy) { - assertThat(shardOperationFailedException.reason(), equalTo(expectedErrors[i])); - assertThat(shardOperationFailedException.index(), equalTo(expectedIndices[i++])); - } - } - public void testGroupByNullIndex() { ShardOperationFailedException[] failures = new ShardOperationFailedException[] { - new ShardSearchFailure("error", null), new ShardSearchFailure(new IllegalArgumentException("error")), new ShardSearchFailure(new ParsingException(0, 0, "error", null)), }; ShardOperationFailedException[] groupBy = ExceptionsHelper.groupBy(failures); - assertThat(groupBy.length, equalTo(3)); + assertThat(groupBy.length, equalTo(2)); } } diff --git a/server/src/test/java/org/elasticsearch/action/ShardOperationFailedExceptionTests.java b/server/src/test/java/org/elasticsearch/action/ShardOperationFailedExceptionTests.java new file mode 100644 index 0000000000000..1348445b62752 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/ShardOperationFailedExceptionTests.java @@ -0,0 +1,76 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action; + +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; + +public class ShardOperationFailedExceptionTests extends ESTestCase { + + public void testCauseCannotBeNull() { + NullPointerException nullPointerException = expectThrows(NullPointerException.class, () -> new Failure( + randomAlphaOfLengthBetween(3, 10), randomInt(), randomAlphaOfLengthBetween(5, 10), randomFrom(RestStatus.values()), null)); + assertEquals("cause cannot be null", nullPointerException.getMessage()); + } + + public void testStatusCannotBeNull() { + NullPointerException nullPointerException = expectThrows(NullPointerException.class, () -> new Failure( + randomAlphaOfLengthBetween(3, 10), randomInt(), randomAlphaOfLengthBetween(5, 10), null, new IllegalArgumentException())); + assertEquals("status cannot be null", nullPointerException.getMessage()); + } + + public void testReasonCannotBeNull() { + NullPointerException nullPointerException = expectThrows(NullPointerException.class, () -> new Failure( + randomAlphaOfLengthBetween(3, 10), randomInt(), null, randomFrom(RestStatus.values()), new IllegalArgumentException())); + assertEquals("reason cannot be null", nullPointerException.getMessage()); + } + + public void testIndexIsNullable() { + new Failure(null, randomInt(), randomAlphaOfLengthBetween(5, 10), randomFrom(RestStatus.values()), new IllegalArgumentException()); + } + + private static class Failure extends ShardOperationFailedException { + + Failure(@Nullable String index, int shardId, String reason, RestStatus status, Throwable cause) { + super(index, shardId, reason, status, cause); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return null; + } + } +} diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequestStreamableTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequestStreamableTests.java index e8dd3943cb762..83ddb45655182 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequestStreamableTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequestStreamableTests.java @@ -41,7 +41,7 @@ protected UpdateSettingsRequest mutateInstance(UpdateSettingsRequest request) { List mutators = new ArrayList<>(); mutators.add(() -> mutation .masterNodeTimeout(randomValueOtherThan(request.masterNodeTimeout().getStringRep(), ESTestCase::randomTimeValue))); - mutators.add(() -> mutation.timeout(randomValueOtherThan(request.masterNodeTimeout().getStringRep(), ESTestCase::randomTimeValue))); + mutators.add(() -> mutation.timeout(randomValueOtherThan(request.timeout().getStringRep(), ESTestCase::randomTimeValue))); mutators.add(() -> mutation.settings(mutateSettings(request.settings()))); mutators.add(() -> mutation.indices(mutateIndices(request.indices()))); mutators.add(() -> mutation.indicesOptions(randomValueOtherThan(request.indicesOptions(), diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkPrimaryExecutionContextTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkPrimaryExecutionContextTests.java new file mode 100644 index 0000000000000..de7444fac0903 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkPrimaryExecutionContextTests.java @@ -0,0 +1,158 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.bulk; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.bulk.TransportShardBulkActionTests.FakeDeleteResult; +import org.elasticsearch.action.bulk.TransportShardBulkActionTests.FakeIndexResult; +import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.translog.Translog; +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; + +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class BulkPrimaryExecutionContextTests extends ESTestCase { + + public void testAbortedSkipped() { + BulkShardRequest shardRequest = generateRandomRequest(); + + ArrayList> nonAbortedRequests = new ArrayList<>(); + for (BulkItemRequest request : shardRequest.items()) { + if (randomBoolean()) { + request.abort("index", new ElasticsearchException("bla")); + } else { + nonAbortedRequests.add(request.request()); + } + } + + ArrayList> visitedRequests = new ArrayList<>(); + for (BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(shardRequest, null); + context.hasMoreOperationsToExecute(); + ) { + visitedRequests.add(context.getCurrent()); + context.setRequestToExecute(context.getCurrent()); + // using failures prevents caring about types + context.markOperationAsExecuted(new Engine.IndexResult(new ElasticsearchException("bla"), 1, 1)); + context.markAsCompleted(context.getExecutionResult()); + } + + assertThat(visitedRequests, equalTo(nonAbortedRequests)); + } + + private BulkShardRequest generateRandomRequest() { + BulkItemRequest[] items = new BulkItemRequest[randomInt(20)]; + for (int i = 0; i < items.length; i++) { + final DocWriteRequest request; + switch (randomFrom(DocWriteRequest.OpType.values())) { + case INDEX: + request = new IndexRequest("index", "_doc", "id_" + i); + break; + case CREATE: + request = new IndexRequest("index", "_doc", "id_" + i).create(true); + break; + case UPDATE: + request = new UpdateRequest("index", "_doc", "id_" + i); + break; + case DELETE: + request = new DeleteRequest("index", "_doc", "id_" + i); + break; + default: + throw new AssertionError("unknown type"); + } + items[i] = new BulkItemRequest(i, request); + } + return new BulkShardRequest(new ShardId("index", "_na_", 0), + randomFrom(WriteRequest.RefreshPolicy.values()), items); + } + + public void testTranslogLocation() { + + BulkShardRequest shardRequest = generateRandomRequest(); + + Translog.Location expectedLocation = null; + final IndexShard primary = mock(IndexShard.class); + when(primary.shardId()).thenReturn(shardRequest.shardId()); + + long translogGen = 0; + long translogOffset = 0; + + BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(shardRequest, primary); + while (context.hasMoreOperationsToExecute()) { + final Engine.Result result; + final DocWriteRequest current = context.getCurrent(); + final boolean failure = rarely(); + if (frequently()) { + translogGen += randomIntBetween(1, 4); + translogOffset = 0; + } else { + translogOffset += randomIntBetween(200, 400); + } + + Translog.Location location = new Translog.Location(translogGen, translogOffset, randomInt(200)); + switch (current.opType()) { + case INDEX: + case CREATE: + context.setRequestToExecute(current); + if (failure) { + result = new Engine.IndexResult(new ElasticsearchException("bla"), 1, 1); + } else { + result = new FakeIndexResult(1, 1, randomLongBetween(0, 200), randomBoolean(), location); + } + break; + case UPDATE: + context.setRequestToExecute(new IndexRequest(current.index(), current.type(), current.id())); + if (failure) { + result = new Engine.IndexResult(new ElasticsearchException("bla"), 1, 1, 1); + } else { + result = new FakeIndexResult(1, 1, randomLongBetween(0, 200), randomBoolean(), location); + } + break; + case DELETE: + context.setRequestToExecute(current); + if (failure) { + result = new Engine.DeleteResult(new ElasticsearchException("bla"), 1, 1); + } else { + result = new FakeDeleteResult(1, 1, randomLongBetween(0, 200), randomBoolean(), location); + } + break; + default: + throw new AssertionError("unknown type:" + current.opType()); + } + if (failure == false) { + expectedLocation = location; + } + context.markOperationAsExecuted(result); + context.markAsCompleted(context.getExecutionResult()); + } + + assertThat(context.getLocationToSync(), equalTo(expectedLocation)); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java index e1b17327850f8..34a091ba884a0 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; +import org.elasticsearch.index.get.GetResult; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; @@ -279,7 +280,8 @@ public void testBulkUpdateMalformedScripts() throws Exception { assertThat(bulkResponse.getItems()[1].getResponse().getId(), equalTo("2")); assertThat(bulkResponse.getItems()[1].getResponse().getVersion(), equalTo(2L)); - assertThat(((UpdateResponse) bulkResponse.getItems()[1].getResponse()).getGetResult().field("field").getValue(), equalTo(2)); + final GetResult getResult = ((UpdateResponse) bulkResponse.getItems()[1].getResponse()).getGetResult(); + assertThat(getResult.field("field").getValue(), equalTo(2)); assertThat(bulkResponse.getItems()[1].getFailure(), nullValue()); assertThat(bulkResponse.getItems()[2].getFailure().getId(), equalTo("3")); @@ -407,16 +409,20 @@ public void testBulkUpdateLargerVolume() throws Exception { assertThat("expected no failures but got: " + response.buildFailureMessage(), response.hasFailures(), equalTo(false)); assertThat(response.getItems().length, equalTo(numDocs)); for (int i = 0; i < numDocs; i++) { - assertThat(response.getItems()[i].getItemId(), equalTo(i)); - assertThat(response.getItems()[i].getId(), equalTo(Integer.toString(i))); - assertThat(response.getItems()[i].getIndex(), equalTo("test")); - assertThat(response.getItems()[i].getType(), equalTo("type1")); - assertThat(response.getItems()[i].getOpType(), equalTo(OpType.UPDATE)); + final BulkItemResponse itemResponse = response.getItems()[i]; + assertThat(itemResponse.getFailure(), nullValue()); + assertThat(itemResponse.isFailed(), equalTo(false)); + assertThat(itemResponse.getItemId(), equalTo(i)); + assertThat(itemResponse.getId(), equalTo(Integer.toString(i))); + assertThat(itemResponse.getIndex(), equalTo("test")); + assertThat(itemResponse.getType(), equalTo("type1")); + assertThat(itemResponse.getOpType(), equalTo(OpType.UPDATE)); for (int j = 0; j < 5; j++) { GetResponse getResponse = client().prepareGet("test", "type1", Integer.toString(i)).get(); assertThat(getResponse.isExists(), equalTo(false)); } } + assertThat(response.hasFailures(), equalTo(false)); } public void testBulkIndexingWhileInitializing() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java index a4d80a7d98d84..8ec822f63a0fe 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java @@ -30,7 +30,6 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; -import org.elasticsearch.action.support.replication.ReplicationOperation; import org.elasticsearch.action.support.replication.TransportWriteAction.WritePrimaryResult; import org.elasticsearch.action.update.UpdateHelper; import org.elasticsearch.action.update.UpdateRequest; @@ -39,12 +38,13 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.VersionConflictEngineException; -import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.Mapping; +import org.elasticsearch.index.mapper.MetadataFieldMapper; +import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardTestCase; import org.elasticsearch.index.shard.ShardId; @@ -52,10 +52,9 @@ import org.elasticsearch.rest.RestStatus; import java.io.IOException; -import java.util.HashMap; -import java.util.Map; +import java.util.Collections; +import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.LongSupplier; import static org.elasticsearch.action.bulk.TransportShardBulkAction.replicaItemExecutionMode; import static org.hamcrest.CoreMatchers.equalTo; @@ -63,102 +62,103 @@ import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.instanceOf; -import static org.mockito.Mockito.any; -import static org.mockito.Mockito.anyBoolean; -import static org.mockito.Mockito.anyLong; +import static org.hamcrest.Matchers.nullValue; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyBoolean; +import static org.mockito.Matchers.anyLong; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; public class TransportShardBulkActionTests extends IndexShardTestCase { private final ShardId shardId = new ShardId("index", "_na_", 0); private final Settings idxSettings = Settings.builder() - .put("index.number_of_shards", 1) - .put("index.number_of_replicas", 0) - .put("index.version.created", Version.CURRENT.id) - .build(); + .put("index.number_of_shards", 1) + .put("index.number_of_replicas", 0) + .put("index.version.created", Version.CURRENT.id) + .build(); private IndexMetaData indexMetaData() throws IOException { return IndexMetaData.builder("index") - .putMapping("_doc", - "{\"properties\":{\"foo\":{\"type\":\"text\",\"fields\":" + - "{\"keyword\":{\"type\":\"keyword\",\"ignore_above\":256}}}}}") - .settings(idxSettings) - .primaryTerm(0, 1).build(); + .putMapping("_doc", + "{\"properties\":{\"foo\":{\"type\":\"text\",\"fields\":" + + "{\"keyword\":{\"type\":\"keyword\",\"ignore_above\":256}}}}}") + .settings(idxSettings) + .primaryTerm(0, 1).build(); } public void testShouldExecuteReplicaItem() throws Exception { // Successful index request should be replicated DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id") - .source(Requests.INDEX_CONTENT_TYPE, "foo", "bar"); + .source(Requests.INDEX_CONTENT_TYPE, "foo", "bar"); DocWriteResponse response = new IndexResponse(shardId, "type", "id", 1, 17, 1, randomBoolean()); BulkItemRequest request = new BulkItemRequest(0, writeRequest); request.setPrimaryResponse(new BulkItemResponse(0, DocWriteRequest.OpType.INDEX, response)); assertThat(replicaItemExecutionMode(request, 0), - equalTo(ReplicaItemExecutionMode.NORMAL)); + equalTo(ReplicaItemExecutionMode.NORMAL)); // Failed index requests without sequence no should not be replicated writeRequest = new IndexRequest("index", "_doc", "id") - .source(Requests.INDEX_CONTENT_TYPE, "foo", "bar"); + .source(Requests.INDEX_CONTENT_TYPE, "foo", "bar"); request = new BulkItemRequest(0, writeRequest); request.setPrimaryResponse( - new BulkItemResponse(0, DocWriteRequest.OpType.INDEX, - new BulkItemResponse.Failure("index", "type", "id", - new IllegalArgumentException("i died")))); + new BulkItemResponse(0, DocWriteRequest.OpType.INDEX, + new BulkItemResponse.Failure("index", "type", "id", + new IllegalArgumentException("i died")))); assertThat(replicaItemExecutionMode(request, 0), - equalTo(ReplicaItemExecutionMode.NOOP)); + equalTo(ReplicaItemExecutionMode.NOOP)); // Failed index requests with sequence no should be replicated request = new BulkItemRequest(0, writeRequest); request.setPrimaryResponse( - new BulkItemResponse(0, DocWriteRequest.OpType.INDEX, - new BulkItemResponse.Failure("index", "type", "id", - new IllegalArgumentException( - "i died after sequence no was generated"), - 1))); + new BulkItemResponse(0, DocWriteRequest.OpType.INDEX, + new BulkItemResponse.Failure("index", "type", "id", + new IllegalArgumentException( + "i died after sequence no was generated"), + 1))); assertThat(replicaItemExecutionMode(request, 0), - equalTo(ReplicaItemExecutionMode.FAILURE)); + equalTo(ReplicaItemExecutionMode.FAILURE)); // NOOP requests should not be replicated writeRequest = new UpdateRequest("index", "type", "id"); response = new UpdateResponse(shardId, "type", "id", 1, DocWriteResponse.Result.NOOP); request = new BulkItemRequest(0, writeRequest); request.setPrimaryResponse(new BulkItemResponse(0, DocWriteRequest.OpType.UPDATE, - response)); + response)); assertThat(replicaItemExecutionMode(request, 0), - equalTo(ReplicaItemExecutionMode.NOOP)); + equalTo(ReplicaItemExecutionMode.NOOP)); } - public void testExecuteBulkIndexRequest() throws Exception { - IndexMetaData metaData = indexMetaData(); IndexShard shard = newStartedShard(true); BulkItemRequest[] items = new BulkItemRequest[1]; boolean create = randomBoolean(); - DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id").source(Requests.INDEX_CONTENT_TYPE).create(create); + DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id").source(Requests.INDEX_CONTENT_TYPE) + .create(create); BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest); items[0] = primaryRequest; BulkShardRequest bulkShardRequest = - new BulkShardRequest(shardId, RefreshPolicy.NONE, items); + new BulkShardRequest(shardId, RefreshPolicy.NONE, items); - Translog.Location location = new Translog.Location(0, 0, 0); UpdateHelper updateHelper = null; - - Translog.Location newLocation = TransportShardBulkAction.executeBulkItemRequest(metaData, - shard, bulkShardRequest, location, 0, updateHelper, - threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer()); + BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(bulkShardRequest, shard); + TransportShardBulkAction.executeBulkItemRequest(context, updateHelper, threadPool::absoluteTimeInMillis, + new NoopMappingUpdatePerformer(), () -> {}); + assertFalse(context.hasMoreOperationsToExecute()); // Translog should change, since there were no problems - assertThat(newLocation, not(location)); + assertNotNull(context.getLocationToSync()); BulkItemResponse primaryResponse = bulkShardRequest.items()[0].getPrimaryResponse(); assertThat(primaryResponse.getItemId(), equalTo(0)); assertThat(primaryResponse.getId(), equalTo("id")); assertThat(primaryResponse.getOpType(), - equalTo(create ? DocWriteRequest.OpType.CREATE : DocWriteRequest.OpType.INDEX)); + equalTo(create ? DocWriteRequest.OpType.CREATE : DocWriteRequest.OpType.INDEX)); assertFalse(primaryResponse.isFailed()); // Assert that the document actually made it there @@ -169,13 +169,12 @@ public void testExecuteBulkIndexRequest() throws Exception { items[0] = primaryRequest; bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); - Translog.Location secondLocation = - TransportShardBulkAction.executeBulkItemRequest( metaData, - shard, bulkShardRequest, newLocation, 0, updateHelper, - threadPool::absoluteTimeInMillis, new ThrowingMappingUpdatePerformer(new RuntimeException("fail"))); + BulkPrimaryExecutionContext secondContext = new BulkPrimaryExecutionContext(bulkShardRequest, shard); + TransportShardBulkAction.executeBulkItemRequest(secondContext, updateHelper, + threadPool::absoluteTimeInMillis, new ThrowingMappingUpdatePerformer(new RuntimeException("fail")), () -> {}); + assertFalse(context.hasMoreOperationsToExecute()); - // Translog should not change, since the document was not indexed due to a version conflict - assertThat(secondLocation, equalTo(newLocation)); + assertNull(secondContext.getLocationToSync()); BulkItemRequest replicaRequest = bulkShardRequest.items()[0]; @@ -193,7 +192,7 @@ public void testExecuteBulkIndexRequest() throws Exception { assertThat(failure.getId(), equalTo("id")); assertThat(failure.getCause().getClass(), equalTo(VersionConflictEngineException.class)); assertThat(failure.getCause().getMessage(), - containsString("version conflict, document already exists (current version [1])")); + containsString("version conflict, document already exists (current version [1])")); assertThat(failure.getStatus(), equalTo(RestStatus.CONFLICT)); assertThat(replicaRequest, equalTo(primaryRequest)); @@ -223,7 +222,8 @@ public void testSkipBulkIndexRequestIfAborted() throws Exception { UpdateHelper updateHelper = null; WritePrimaryResult result = TransportShardBulkAction.performOnPrimary( - bulkShardRequest, shard, updateHelper, threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer()); + bulkShardRequest, shard, updateHelper, threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer(), + () -> {}); // since at least 1 item passed, the tran log location should exist, assertThat(result.location, notNullValue()); @@ -253,52 +253,85 @@ public void testSkipBulkIndexRequestIfAborted() throws Exception { closeShards(shard); } - public void testExecuteBulkIndexRequestWithRejection() throws Exception { - IndexMetaData metaData = indexMetaData(); - IndexShard shard = newStartedShard(true); + public void testExecuteBulkIndexRequestWithMappingUpdates() throws Exception { BulkItemRequest[] items = new BulkItemRequest[1]; - DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id") - .source(Requests.INDEX_CONTENT_TYPE, "foo", "bar"); + DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id") + .source(Requests.INDEX_CONTENT_TYPE, "foo", "bar"); items[0] = new BulkItemRequest(0, writeRequest); BulkShardRequest bulkShardRequest = - new BulkShardRequest(shardId, RefreshPolicy.NONE, items); + new BulkShardRequest(shardId, RefreshPolicy.NONE, items); + + Engine.IndexResult mappingUpdate = + new Engine.IndexResult(new Mapping(null, null, new MetadataFieldMapper[0], Collections.emptyMap())); + Translog.Location resultLocation = new Translog.Location(42, 42, 42); + Engine.IndexResult success = new FakeIndexResult(1, 1, 13, true, resultLocation); + + IndexShard shard = mock(IndexShard.class); + when(shard.applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyBoolean())).thenReturn(mappingUpdate); + + // Pretend the mappings haven't made it to the node yet + BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(bulkShardRequest, shard); + AtomicInteger updateCalled = new AtomicInteger(); + TransportShardBulkAction.executeBulkItemRequest(context, null, threadPool::absoluteTimeInMillis, + (update, shardId, type) -> { + // There should indeed be a mapping update + assertNotNull(update); + updateCalled.incrementAndGet(); + }, () -> {}); + assertTrue(context.isInitial()); + assertTrue(context.hasMoreOperationsToExecute()); + + assertThat("mappings were \"updated\" once", updateCalled.get(), equalTo(1)); + + // Verify that the shard "executed" the operation twice + verify(shard, times(2)).applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyBoolean()); + + when(shard.applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyBoolean())).thenReturn(success); + + TransportShardBulkAction.executeBulkItemRequest(context, null, threadPool::absoluteTimeInMillis, + (update, shardId, type) -> fail("should not have had to update the mappings"), () -> {}); + + + // Verify that the shard "executed" the operation only once (2 for previous invocations plus + // 1 for this execution) + verify(shard, times(3)).applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyBoolean()); - Translog.Location location = new Translog.Location(0, 0, 0); - UpdateHelper updateHelper = null; - // Pretend the mappings haven't made it to the node yet, and throw a rejection - expectThrows(ReplicationOperation.RetryOnPrimaryException.class, - () -> TransportShardBulkAction.executeBulkItemRequest(metaData, shard, bulkShardRequest, - location, 0, updateHelper, threadPool::absoluteTimeInMillis, - new NoopMappingUpdatePerformer())); + BulkItemResponse primaryResponse = bulkShardRequest.items()[0].getPrimaryResponse(); + + assertThat(primaryResponse.getItemId(), equalTo(0)); + assertThat(primaryResponse.getId(), equalTo("id")); + assertThat(primaryResponse.getOpType(), equalTo(writeRequest.opType())); + assertFalse(primaryResponse.isFailed()); closeShards(shard); } public void testExecuteBulkIndexRequestWithErrorWhileUpdatingMapping() throws Exception { - IndexMetaData metaData = indexMetaData(); IndexShard shard = newStartedShard(true); BulkItemRequest[] items = new BulkItemRequest[1]; - DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id") - .source(Requests.INDEX_CONTENT_TYPE, "foo", "bar"); + DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id") + .source(Requests.INDEX_CONTENT_TYPE, "foo", "bar"); items[0] = new BulkItemRequest(0, writeRequest); - BulkShardRequest bulkShardRequest = - new BulkShardRequest(shardId, RefreshPolicy.NONE, items); + BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); - Translog.Location location = new Translog.Location(0, 0, 0); UpdateHelper updateHelper = null; - // Return an exception when trying to update the mapping + // Return an exception when trying to update the mapping, or when waiting for it to come RuntimeException err = new RuntimeException("some kind of exception"); - Translog.Location newLocation = TransportShardBulkAction.executeBulkItemRequest(metaData, - shard, bulkShardRequest, location, 0, updateHelper, - threadPool::absoluteTimeInMillis, new ThrowingMappingUpdatePerformer(err)); + boolean errorOnWait = randomBoolean(); - // Translog shouldn't change, as there were conflicting mappings - assertThat(newLocation, equalTo(location)); + BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(bulkShardRequest, shard); + TransportShardBulkAction.executeBulkItemRequest(context, updateHelper, threadPool::absoluteTimeInMillis, + errorOnWait == false ? new ThrowingMappingUpdatePerformer(err) : new NoopMappingUpdatePerformer(), + errorOnWait ? () -> { throw err; } : () -> {}); + assertFalse(context.hasMoreOperationsToExecute()); + + // Translog shouldn't be synced, as there were conflicting mappings + assertThat(context.getLocationToSync(), nullValue()); BulkItemResponse primaryResponse = bulkShardRequest.items()[0].getPrimaryResponse(); @@ -319,24 +352,24 @@ public void testExecuteBulkIndexRequestWithErrorWhileUpdatingMapping() throws Ex } public void testExecuteBulkDeleteRequest() throws Exception { - IndexMetaData metaData = indexMetaData(); IndexShard shard = newStartedShard(true); BulkItemRequest[] items = new BulkItemRequest[1]; DocWriteRequest writeRequest = new DeleteRequest("index", "_doc", "id"); items[0] = new BulkItemRequest(0, writeRequest); BulkShardRequest bulkShardRequest = - new BulkShardRequest(shardId, RefreshPolicy.NONE, items); + new BulkShardRequest(shardId, RefreshPolicy.NONE, items); Translog.Location location = new Translog.Location(0, 0, 0); UpdateHelper updateHelper = null; - Translog.Location newLocation = TransportShardBulkAction.executeBulkItemRequest(metaData, - shard, bulkShardRequest, location, 0, updateHelper, - threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer()); + BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(bulkShardRequest, shard); + TransportShardBulkAction.executeBulkItemRequest(context, updateHelper, threadPool::absoluteTimeInMillis, + new NoopMappingUpdatePerformer(), () -> {}); + assertFalse(context.hasMoreOperationsToExecute()); // Translog changes, even though the document didn't exist - assertThat(newLocation, not(location)); + assertThat(context.getLocationToSync(), not(location)); BulkItemRequest replicaRequest = bulkShardRequest.items()[0]; DocWriteRequest replicaDeleteRequest = replicaRequest.request(); @@ -368,14 +401,15 @@ public void testExecuteBulkDeleteRequest() throws Exception { items[0] = new BulkItemRequest(0, writeRequest); bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); - location = newLocation; + location = context.getLocationToSync(); - newLocation = TransportShardBulkAction.executeBulkItemRequest(metaData, shard, - bulkShardRequest, location, 0, updateHelper, threadPool::absoluteTimeInMillis, - new NoopMappingUpdatePerformer()); + context = new BulkPrimaryExecutionContext(bulkShardRequest, shard); + TransportShardBulkAction.executeBulkItemRequest(context, updateHelper, threadPool::absoluteTimeInMillis, + new NoopMappingUpdatePerformer(), () -> {}); + assertFalse(context.hasMoreOperationsToExecute()); // Translog changes, because the document was deleted - assertThat(newLocation, not(location)); + assertThat(context.getLocationToSync(), not(location)); replicaRequest = bulkShardRequest.items()[0]; replicaDeleteRequest = replicaRequest.request(); @@ -404,63 +438,81 @@ public void testExecuteBulkDeleteRequest() throws Exception { closeShards(shard); } - public void testNoopUpdateReplicaRequest() throws Exception { - DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id") - .source(Requests.INDEX_CONTENT_TYPE, "field", "value"); - BulkItemRequest replicaRequest = new BulkItemRequest(0, writeRequest); + public void testNoopUpdateRequest() throws Exception { + DocWriteRequest writeRequest = new UpdateRequest("index", "_doc", "id") + .doc(Requests.INDEX_CONTENT_TYPE, "field", "value"); + BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest); DocWriteResponse noopUpdateResponse = new UpdateResponse(shardId, "_doc", "id", 0, - DocWriteResponse.Result.NOOP); - BulkItemResultHolder noopResults = new BulkItemResultHolder(noopUpdateResponse, null, - replicaRequest); + DocWriteResponse.Result.NOOP); - Translog.Location location = new Translog.Location(0, 0, 0); - BulkItemRequest[] items = new BulkItemRequest[0]; + IndexShard shard = mock(IndexShard.class); + + UpdateHelper updateHelper = mock(UpdateHelper.class); + when(updateHelper.prepare(any(), eq(shard), any())).thenReturn( + new UpdateHelper.Result(noopUpdateResponse, DocWriteResponse.Result.NOOP, + Collections.singletonMap("field", "value"), Requests.INDEX_CONTENT_TYPE)); + + BulkItemRequest[] items = new BulkItemRequest[]{primaryRequest}; BulkShardRequest bulkShardRequest = - new BulkShardRequest(shardId, RefreshPolicy.NONE, items); + new BulkShardRequest(shardId, RefreshPolicy.NONE, items); - BulkItemResponse primaryResponse = TransportShardBulkAction.createPrimaryResponse( - noopResults, DocWriteRequest.OpType.UPDATE, bulkShardRequest); + BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(bulkShardRequest, shard); + TransportShardBulkAction.executeBulkItemRequest(context, updateHelper, threadPool::absoluteTimeInMillis, + new NoopMappingUpdatePerformer(), () -> {}); - Translog.Location newLocation = - TransportShardBulkAction.calculateTranslogLocation(location, noopResults); + assertFalse(context.hasMoreOperationsToExecute()); // Basically nothing changes in the request since it's a noop - assertThat(newLocation, equalTo(location)); + assertThat(context.getLocationToSync(), nullValue()); + BulkItemResponse primaryResponse = bulkShardRequest.items()[0].getPrimaryResponse(); assertThat(primaryResponse.getItemId(), equalTo(0)); assertThat(primaryResponse.getId(), equalTo("id")); assertThat(primaryResponse.getOpType(), equalTo(DocWriteRequest.OpType.UPDATE)); assertThat(primaryResponse.getResponse(), equalTo(noopUpdateResponse)); assertThat(primaryResponse.getResponse().getResult(), - equalTo(DocWriteResponse.Result.NOOP)); + equalTo(DocWriteResponse.Result.NOOP)); + assertThat(bulkShardRequest.items().length, equalTo(1)); + assertEquals(primaryRequest, bulkShardRequest.items()[0]); // check that bulk item was not mutated + assertThat(primaryResponse.getResponse().getSeqNo(), equalTo(SequenceNumbers.UNASSIGNED_SEQ_NO)); } - public void testUpdateReplicaRequestWithFailure() throws Exception { - DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id").source(Requests.INDEX_CONTENT_TYPE); - BulkItemRequest replicaRequest = new BulkItemRequest(0, writeRequest); + public void testUpdateRequestWithFailure() throws Exception { + IndexSettings indexSettings = new IndexSettings(indexMetaData(), Settings.EMPTY); + DocWriteRequest writeRequest = new UpdateRequest("index", "_doc", "id") + .doc(Requests.INDEX_CONTENT_TYPE, "field", "value"); + BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest); + + IndexRequest updateResponse = new IndexRequest("index", "_doc", "id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"); Exception err = new ElasticsearchException("I'm dead <(x.x)>"); Engine.IndexResult indexResult = new Engine.IndexResult(err, 0, 0, 0); - BulkItemResultHolder failedResults = new BulkItemResultHolder(null, indexResult, - replicaRequest); + IndexShard shard = mock(IndexShard.class); + when(shard.applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyBoolean())).thenReturn(indexResult); + when(shard.indexSettings()).thenReturn(indexSettings); - Translog.Location location = new Translog.Location(0, 0, 0); - BulkItemRequest[] items = new BulkItemRequest[0]; + UpdateHelper updateHelper = mock(UpdateHelper.class); + when(updateHelper.prepare(any(), eq(shard), any())).thenReturn( + new UpdateHelper.Result(updateResponse, randomBoolean() ? DocWriteResponse.Result.CREATED : DocWriteResponse.Result.UPDATED, + Collections.singletonMap("field", "value"), Requests.INDEX_CONTENT_TYPE)); + + BulkItemRequest[] items = new BulkItemRequest[]{primaryRequest}; BulkShardRequest bulkShardRequest = - new BulkShardRequest(shardId, RefreshPolicy.NONE, items); - BulkItemResponse primaryResponse = - TransportShardBulkAction.createPrimaryResponse( - failedResults, DocWriteRequest.OpType.UPDATE, bulkShardRequest); + new BulkShardRequest(shardId, RefreshPolicy.NONE, items); + - Translog.Location newLocation = - TransportShardBulkAction.calculateTranslogLocation(location, failedResults); + BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(bulkShardRequest, shard); + TransportShardBulkAction.executeBulkItemRequest(context, updateHelper, threadPool::absoluteTimeInMillis, + new NoopMappingUpdatePerformer(), () -> {}); + assertFalse(context.hasMoreOperationsToExecute()); // Since this was not a conflict failure, the primary response // should be filled out with the failure information - assertThat(newLocation, equalTo(location)); + assertNull(context.getLocationToSync()); + BulkItemResponse primaryResponse = bulkShardRequest.items()[0].getPrimaryResponse(); assertThat(primaryResponse.getItemId(), equalTo(0)); assertThat(primaryResponse.getId(), equalTo("id")); - assertThat(primaryResponse.getOpType(), equalTo(DocWriteRequest.OpType.INDEX)); + assertThat(primaryResponse.getOpType(), equalTo(DocWriteRequest.OpType.UPDATE)); assertTrue(primaryResponse.isFailed()); assertThat(primaryResponse.getFailureMessage(), containsString("I'm dead <(x.x)>")); BulkItemResponse.Failure failure = primaryResponse.getFailure(); @@ -471,33 +523,41 @@ public void testUpdateReplicaRequestWithFailure() throws Exception { assertThat(failure.getStatus(), equalTo(RestStatus.INTERNAL_SERVER_ERROR)); } - public void testUpdateReplicaRequestWithConflictFailure() throws Exception { - DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id").source(Requests.INDEX_CONTENT_TYPE); - BulkItemRequest replicaRequest = new BulkItemRequest(0, writeRequest); + public void testUpdateRequestWithConflictFailure() throws Exception { + IndexSettings indexSettings = new IndexSettings(indexMetaData(), Settings.EMPTY); + DocWriteRequest writeRequest = new UpdateRequest("index", "_doc", "id") + .doc(Requests.INDEX_CONTENT_TYPE, "field", "value"); + BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest); + + IndexRequest updateResponse = new IndexRequest("index", "_doc", "id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"); Exception err = new VersionConflictEngineException(shardId, "_doc", "id", - "I'm conflicted <(;_;)>"); + "I'm conflicted <(;_;)>"); Engine.IndexResult indexResult = new Engine.IndexResult(err, 0, 0, 0); - BulkItemResultHolder failedResults = new BulkItemResultHolder(null, indexResult, - replicaRequest); + IndexShard shard = mock(IndexShard.class); + when(shard.applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyBoolean())).thenReturn(indexResult); + when(shard.indexSettings()).thenReturn(indexSettings); - Translog.Location location = new Translog.Location(0, 0, 0); - BulkItemRequest[] items = new BulkItemRequest[0]; + UpdateHelper updateHelper = mock(UpdateHelper.class); + when(updateHelper.prepare(any(), eq(shard), any())).thenReturn( + new UpdateHelper.Result(updateResponse, randomBoolean() ? DocWriteResponse.Result.CREATED : DocWriteResponse.Result.UPDATED, + Collections.singletonMap("field", "value"), Requests.INDEX_CONTENT_TYPE)); + + BulkItemRequest[] items = new BulkItemRequest[]{primaryRequest}; BulkShardRequest bulkShardRequest = - new BulkShardRequest(shardId, RefreshPolicy.NONE, items); - BulkItemResponse primaryResponse = - TransportShardBulkAction.createPrimaryResponse( - failedResults, DocWriteRequest.OpType.UPDATE, bulkShardRequest); + new BulkShardRequest(shardId, RefreshPolicy.NONE, items); - Translog.Location newLocation = - TransportShardBulkAction.calculateTranslogLocation(location, failedResults); - // Since this was not a conflict failure, the primary response - // should be filled out with the failure information - assertThat(newLocation, equalTo(location)); + BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(bulkShardRequest, shard); + TransportShardBulkAction.executeBulkItemRequest(context, updateHelper, threadPool::absoluteTimeInMillis, + new NoopMappingUpdatePerformer(), () -> {}); + assertFalse(context.hasMoreOperationsToExecute()); + + assertNull(context.getLocationToSync()); + BulkItemResponse primaryResponse = bulkShardRequest.items()[0].getPrimaryResponse(); assertThat(primaryResponse.getItemId(), equalTo(0)); assertThat(primaryResponse.getId(), equalTo("id")); - assertThat(primaryResponse.getOpType(), equalTo(DocWriteRequest.OpType.INDEX)); + assertThat(primaryResponse.getOpType(), equalTo(DocWriteRequest.OpType.UPDATE)); assertTrue(primaryResponse.isFailed()); assertThat(primaryResponse.getFailureMessage(), containsString("I'm conflicted <(;_;)>")); BulkItemResponse.Failure failure = primaryResponse.getFailure(); @@ -508,336 +568,268 @@ public void testUpdateReplicaRequestWithConflictFailure() throws Exception { assertThat(failure.getStatus(), equalTo(RestStatus.CONFLICT)); } - public void testUpdateReplicaRequestWithSuccess() throws Exception { - DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id") - .source(Requests.INDEX_CONTENT_TYPE); - BulkItemRequest replicaRequest = new BulkItemRequest(0, writeRequest); + public void testUpdateRequestWithSuccess() throws Exception { + IndexSettings indexSettings = new IndexSettings(indexMetaData(), Settings.EMPTY); + DocWriteRequest writeRequest = new UpdateRequest("index", "_doc", "id") + .doc(Requests.INDEX_CONTENT_TYPE, "field", "value"); + BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest); + + IndexRequest updateResponse = new IndexRequest("index", "_doc", "id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"); boolean created = randomBoolean(); Translog.Location resultLocation = new Translog.Location(42, 42, 42); - Engine.IndexResult indexResult = new FakeResult(1, 1, 1, created, resultLocation); - DocWriteResponse indexResponse = new IndexResponse(shardId, "_doc", "id", 1, 17, 1, created); - BulkItemResultHolder goodResults = - new BulkItemResultHolder(indexResponse, indexResult, replicaRequest); + Engine.IndexResult indexResult = new FakeIndexResult(1, 1, 13, created, resultLocation); + IndexShard shard = mock(IndexShard.class); + when(shard.applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyBoolean())).thenReturn(indexResult); + when(shard.indexSettings()).thenReturn(indexSettings); + + UpdateHelper updateHelper = mock(UpdateHelper.class); + when(updateHelper.prepare(any(), eq(shard), any())).thenReturn( + new UpdateHelper.Result(updateResponse, created ? DocWriteResponse.Result.CREATED : DocWriteResponse.Result.UPDATED, + Collections.singletonMap("field", "value"), Requests.INDEX_CONTENT_TYPE)); - Translog.Location originalLocation = new Translog.Location(21, 21, 21); - BulkItemRequest[] items = new BulkItemRequest[0]; + BulkItemRequest[] items = new BulkItemRequest[]{primaryRequest}; BulkShardRequest bulkShardRequest = - new BulkShardRequest(shardId, RefreshPolicy.NONE, items); - BulkItemResponse primaryResponse = - TransportShardBulkAction.createPrimaryResponse( - goodResults, DocWriteRequest.OpType.INDEX, bulkShardRequest); + new BulkShardRequest(shardId, RefreshPolicy.NONE, items); + - Translog.Location newLocation = - TransportShardBulkAction.calculateTranslogLocation(originalLocation, goodResults); + BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(bulkShardRequest, shard); + TransportShardBulkAction.executeBulkItemRequest(context, updateHelper, threadPool::absoluteTimeInMillis, + new NoopMappingUpdatePerformer(), () -> {}); + assertFalse(context.hasMoreOperationsToExecute()); // Check that the translog is successfully advanced - assertThat(newLocation, equalTo(resultLocation)); + assertThat(context.getLocationToSync(), equalTo(resultLocation)); + assertThat(bulkShardRequest.items()[0].request(), equalTo(updateResponse)); // Since this was not a conflict failure, the primary response // should be filled out with the failure information + BulkItemResponse primaryResponse = bulkShardRequest.items()[0].getPrimaryResponse(); assertThat(primaryResponse.getItemId(), equalTo(0)); assertThat(primaryResponse.getId(), equalTo("id")); - assertThat(primaryResponse.getOpType(), equalTo(DocWriteRequest.OpType.INDEX)); + assertThat(primaryResponse.getOpType(), equalTo(DocWriteRequest.OpType.UPDATE)); DocWriteResponse response = primaryResponse.getResponse(); assertThat(response.status(), equalTo(created ? RestStatus.CREATED : RestStatus.OK)); + assertThat(response.getSeqNo(), equalTo(13L)); } - public void testCalculateTranslogLocation() throws Exception { - final Translog.Location original = new Translog.Location(0, 0, 0); + public void testUpdateWithDelete() throws Exception { + IndexSettings indexSettings = new IndexSettings(indexMetaData(), Settings.EMPTY); + DocWriteRequest writeRequest = new UpdateRequest("index", "_doc", "id") + .doc(Requests.INDEX_CONTENT_TYPE, "field", "value"); + BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest); - DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id") - .source(Requests.INDEX_CONTENT_TYPE); - BulkItemRequest replicaRequest = new BulkItemRequest(0, writeRequest); - BulkItemResultHolder results = new BulkItemResultHolder(null, null, replicaRequest); + DeleteRequest updateResponse = new DeleteRequest("index", "_doc", "id"); - assertThat(TransportShardBulkAction.calculateTranslogLocation(original, results), - equalTo(original)); + boolean found = randomBoolean(); + Translog.Location resultLocation = new Translog.Location(42, 42, 42); + final long resultSeqNo = 13; + Engine.DeleteResult deleteResult = new FakeDeleteResult(1, 1, resultSeqNo, found, resultLocation); + IndexShard shard = mock(IndexShard.class); + when(shard.applyDeleteOperationOnPrimary(anyLong(), any(), any(), any())).thenReturn(deleteResult); + when(shard.indexSettings()).thenReturn(indexSettings); + + UpdateHelper updateHelper = mock(UpdateHelper.class); + when(updateHelper.prepare(any(), eq(shard), any())).thenReturn( + new UpdateHelper.Result(updateResponse, DocWriteResponse.Result.DELETED, + Collections.singletonMap("field", "value"), Requests.INDEX_CONTENT_TYPE)); + + BulkItemRequest[] items = new BulkItemRequest[]{primaryRequest}; + BulkShardRequest bulkShardRequest = + new BulkShardRequest(shardId, RefreshPolicy.NONE, items); - boolean created = randomBoolean(); - DocWriteResponse indexResponse = new IndexResponse(shardId, "_doc", "id", 1, 17, 1, created); - Translog.Location newLocation = new Translog.Location(1, 1, 1); - final long version = randomNonNegativeLong(); - final long seqNo = randomNonNegativeLong(); - Engine.IndexResult indexResult = new IndexResultWithLocation(version, 0L, seqNo, created, newLocation); - results = new BulkItemResultHolder(indexResponse, indexResult, replicaRequest); - assertThat(TransportShardBulkAction.calculateTranslogLocation(original, results), - equalTo(newLocation)); - } + BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(bulkShardRequest, shard); + TransportShardBulkAction.executeBulkItemRequest(context, updateHelper, threadPool::absoluteTimeInMillis, + new NoopMappingUpdatePerformer(), () -> {}); + assertFalse(context.hasMoreOperationsToExecute()); - public void testNoOpReplicationOnPrimaryDocumentFailure() throws Exception { - final IndexShard shard = spy(newStartedShard(false)); - BulkItemRequest itemRequest = new BulkItemRequest(0, new IndexRequest("index", "_doc").source(Requests.INDEX_CONTENT_TYPE)); - final String failureMessage = "simulated primary failure"; - final IOException exception = new IOException(failureMessage); - itemRequest.setPrimaryResponse(new BulkItemResponse(0, - randomFrom( - DocWriteRequest.OpType.CREATE, - DocWriteRequest.OpType.DELETE, - DocWriteRequest.OpType.INDEX - ), - new BulkItemResponse.Failure("index", "_doc", "1", - exception, 1L) - )); - BulkItemRequest[] itemRequests = new BulkItemRequest[1]; - itemRequests[0] = itemRequest; - BulkShardRequest bulkShardRequest = new BulkShardRequest( - shard.shardId(), RefreshPolicy.NONE, itemRequests); - TransportShardBulkAction.performOnReplica(bulkShardRequest, shard); - verify(shard, times(1)).markSeqNoAsNoop(1, exception.toString()); - closeShards(shard); + // Check that the translog is successfully advanced + assertThat(context.getLocationToSync(), equalTo(resultLocation)); + assertThat(bulkShardRequest.items()[0].request(), equalTo(updateResponse)); + BulkItemResponse primaryResponse = bulkShardRequest.items()[0].getPrimaryResponse(); + assertThat(primaryResponse.getItemId(), equalTo(0)); + assertThat(primaryResponse.getId(), equalTo("id")); + assertThat(primaryResponse.getOpType(), equalTo(DocWriteRequest.OpType.UPDATE)); + DocWriteResponse response = primaryResponse.getResponse(); + assertThat(response.status(), equalTo(RestStatus.OK)); + assertThat(response.getSeqNo(), equalTo(resultSeqNo)); } - public void testMappingUpdateParsesCorrectNumberOfTimes() throws Exception { - IndexMetaData metaData = indexMetaData(); - logger.info("--> metadata.getIndex(): {}", metaData.getIndex()); - final IndexShard shard = spy(newStartedShard(true)); - - IndexRequest request = new IndexRequest("index", "_doc", "id") - .source(Requests.INDEX_CONTENT_TYPE, "foo", "bar"); - - final AtomicInteger updateCalled = new AtomicInteger(0); - expectThrows(ReplicationOperation.RetryOnPrimaryException.class, - () -> TransportShardBulkAction.executeIndexRequestOnPrimary(request, shard, - (update, shardId, type) -> { - // There should indeed be a mapping update - assertNotNull(update); - updateCalled.incrementAndGet(); - })); - assertThat("mappings were \"updated\" once", updateCalled.get(), equalTo(1)); + public void testFailureDuringUpdateProcessing() throws Exception { + DocWriteRequest writeRequest = new UpdateRequest("index", "_doc", "id") + .doc(Requests.INDEX_CONTENT_TYPE, "field", "value"); + BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest); + IndexShard shard = mock(IndexShard.class); - // Verify that the shard "executed" the operation twice - verify(shard, times(2)).applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyBoolean()); + UpdateHelper updateHelper = mock(UpdateHelper.class); + final ElasticsearchException err = new ElasticsearchException("oops"); + when(updateHelper.prepare(any(), eq(shard), any())).thenThrow(err); + BulkItemRequest[] items = new BulkItemRequest[]{primaryRequest}; + BulkShardRequest bulkShardRequest = + new BulkShardRequest(shardId, RefreshPolicy.NONE, items); - // Update the mapping, so the next mapping updater doesn't do anything - final MapperService mapperService = shard.mapperService(); - logger.info("--> mapperService.index(): {}", mapperService.index()); - mapperService.updateMapping(metaData); - TransportShardBulkAction.executeIndexRequestOnPrimary(request, shard, - (update, shardId, type) -> fail("should not have had to update the mappings")); + BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(bulkShardRequest, shard); + TransportShardBulkAction.executeBulkItemRequest(context, updateHelper, threadPool::absoluteTimeInMillis, + new NoopMappingUpdatePerformer(), () -> {}); + assertFalse(context.hasMoreOperationsToExecute()); - // Verify that the shard "executed" the operation only once (2 for previous invocations plus - // 1 for this execution) - verify(shard, times(3)).applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyBoolean()); - - closeShards(shard); + assertNull(context.getLocationToSync()); + BulkItemResponse primaryResponse = bulkShardRequest.items()[0].getPrimaryResponse(); + assertThat(primaryResponse.getItemId(), equalTo(0)); + assertThat(primaryResponse.getId(), equalTo("id")); + assertThat(primaryResponse.getOpType(), equalTo(DocWriteRequest.OpType.UPDATE)); + assertTrue(primaryResponse.isFailed()); + assertThat(primaryResponse.getFailureMessage(), containsString("oops")); + BulkItemResponse.Failure failure = primaryResponse.getFailure(); + assertThat(failure.getIndex(), equalTo("index")); + assertThat(failure.getType(), equalTo("_doc")); + assertThat(failure.getId(), equalTo("id")); + assertThat(failure.getCause(), equalTo(err)); + assertThat(failure.getStatus(), equalTo(RestStatus.INTERNAL_SERVER_ERROR)); } - public class IndexResultWithLocation extends Engine.IndexResult { - private final Translog.Location location; - public IndexResultWithLocation(long version, long term, long seqNo, boolean created, Translog.Location newLocation) { - super(version, term, seqNo, created); - this.location = newLocation; + public void testTranslogPositionToSync() throws Exception { + IndexShard shard = newStartedShard(true); + + BulkItemRequest[] items = new BulkItemRequest[randomIntBetween(2, 5)]; + for (int i = 0; i < items.length; i++) { + DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id_" + i) + .source(Requests.INDEX_CONTENT_TYPE) + .opType(DocWriteRequest.OpType.INDEX); + items[i] = new BulkItemRequest(i, writeRequest); } + BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, RefreshPolicy.NONE, items); - @Override - public Translog.Location getTranslogLocation() { - return this.location; + BulkPrimaryExecutionContext context = new BulkPrimaryExecutionContext(bulkShardRequest, shard); + while (context.hasMoreOperationsToExecute()) { + TransportShardBulkAction.executeBulkItemRequest(context, null, threadPool::absoluteTimeInMillis, + new NoopMappingUpdatePerformer(), () -> {}); } - } - public void testProcessUpdateResponse() throws Exception { - IndexShard shard = newStartedShard(false); + assertTrue(shard.isSyncNeeded()); - UpdateRequest updateRequest = new UpdateRequest("index", "_doc", "id"); - BulkItemRequest request = new BulkItemRequest(0, updateRequest); - Exception err = new VersionConflictEngineException(shardId, "_doc", "id", - "I'm conflicted <(;_;)>"); - Engine.IndexResult indexResult = new Engine.IndexResult(err, 0, 0, 0); - Engine.DeleteResult deleteResult = new Engine.DeleteResult(1, 1, 1, true); - DocWriteResponse.Result docWriteResult = DocWriteResponse.Result.CREATED; - DocWriteResponse.Result deleteWriteResult = DocWriteResponse.Result.DELETED; - IndexRequest indexRequest = new IndexRequest("index", "_doc", "id"); - DeleteRequest deleteRequest = new DeleteRequest("index", "_doc", "id"); - UpdateHelper.Result translate = new UpdateHelper.Result(indexRequest, docWriteResult, - new HashMap(), XContentType.JSON); - UpdateHelper.Result translateDelete = new UpdateHelper.Result(deleteRequest, deleteWriteResult, - new HashMap(), XContentType.JSON); + // if we sync the location, nothing else is unsynced + CountDownLatch latch = new CountDownLatch(1); + shard.sync(context.getLocationToSync(), e -> { + if (e != null) { + throw new AssertionError(e); + } + latch.countDown(); + }); - BulkItemRequest[] itemRequests = new BulkItemRequest[1]; - itemRequests[0] = request; - BulkShardRequest bulkShardRequest = new BulkShardRequest(shard.shardId(), RefreshPolicy.NONE, itemRequests); - - BulkItemResultHolder holder = TransportShardBulkAction.processUpdateResponse(updateRequest, - "index", indexResult, translate, shard, 7); - - assertTrue(holder.isVersionConflict()); - assertThat(holder.response, instanceOf(UpdateResponse.class)); - UpdateResponse updateResp = (UpdateResponse) holder.response; - assertThat(updateResp.getGetResult(), equalTo(null)); - assertThat(holder.operationResult, equalTo(indexResult)); - BulkItemRequest replicaBulkRequest = holder.replicaRequest; - assertThat(replicaBulkRequest.id(), equalTo(7)); - DocWriteRequest replicaRequest = replicaBulkRequest.request(); - assertThat(replicaRequest, instanceOf(IndexRequest.class)); - assertThat(replicaRequest, equalTo(indexRequest)); - - BulkItemResultHolder deleteHolder = TransportShardBulkAction.processUpdateResponse(updateRequest, - "index", deleteResult, translateDelete, shard, 8); - - assertFalse(deleteHolder.isVersionConflict()); - assertThat(deleteHolder.response, instanceOf(UpdateResponse.class)); - UpdateResponse delUpdateResp = (UpdateResponse) deleteHolder.response; - assertThat(delUpdateResp.getGetResult(), equalTo(null)); - assertThat(deleteHolder.operationResult, equalTo(deleteResult)); - BulkItemRequest delReplicaBulkRequest = deleteHolder.replicaRequest; - assertThat(delReplicaBulkRequest.id(), equalTo(8)); - DocWriteRequest delReplicaRequest = delReplicaBulkRequest.request(); - assertThat(delReplicaRequest, instanceOf(DeleteRequest.class)); - assertThat(delReplicaRequest, equalTo(deleteRequest)); + latch.await(); + assertFalse(shard.isSyncNeeded()); closeShards(shard); } - public void testExecuteUpdateRequestOnce() throws Exception { - IndexMetaData metaData = indexMetaData(); - IndexShard shard = newStartedShard(true); + public void testNoOpReplicationOnPrimaryDocumentFailure() throws Exception { + final IndexShard shard = spy(newStartedShard(false)); + BulkItemRequest itemRequest = new BulkItemRequest(0, new IndexRequest("index", "_doc").source(Requests.INDEX_CONTENT_TYPE)); + final String failureMessage = "simulated primary failure"; + final IOException exception = new IOException(failureMessage); + itemRequest.setPrimaryResponse(new BulkItemResponse(0, + randomFrom( + DocWriteRequest.OpType.CREATE, + DocWriteRequest.OpType.DELETE, + DocWriteRequest.OpType.INDEX + ), + new BulkItemResponse.Failure("index", "_doc", "1", + exception, 1L) + )); + BulkItemRequest[] itemRequests = new BulkItemRequest[1]; + itemRequests[0] = itemRequest; + BulkShardRequest bulkShardRequest = new BulkShardRequest( + shard.shardId(), RefreshPolicy.NONE, itemRequests); + TransportShardBulkAction.performOnReplica(bulkShardRequest, shard); + verify(shard, times(1)).markSeqNoAsNoop(1, exception.toString()); + closeShards(shard); + } - Map source = new HashMap<>(); - BulkItemRequest[] items = new BulkItemRequest[1]; - boolean create = randomBoolean(); - DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id").source(Requests.INDEX_CONTENT_TYPE).create(create); + public void testRetries() throws Exception { + IndexSettings indexSettings = new IndexSettings(indexMetaData(), Settings.EMPTY); + UpdateRequest writeRequest = new UpdateRequest("index", "_doc", "id") + .doc(Requests.INDEX_CONTENT_TYPE, "field", "value"); + // the beating will continue until success has come. + writeRequest.retryOnConflict(Integer.MAX_VALUE); BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest); - items[0] = primaryRequest; - BulkShardRequest bulkShardRequest = - new BulkShardRequest(shardId, RefreshPolicy.NONE, items); - Translog.Location location = new Translog.Location(0, 0, 0); - IndexRequest indexRequest = new IndexRequest("index", "_doc", "id"); - indexRequest.source(source); - - DocWriteResponse.Result docWriteResult = DocWriteResponse.Result.CREATED; - UpdateHelper.Result translate = new UpdateHelper.Result(indexRequest, docWriteResult, - new HashMap(), XContentType.JSON); - UpdateHelper updateHelper = new MockUpdateHelper(translate); - UpdateRequest updateRequest = new UpdateRequest("index", "_doc", "id"); - updateRequest.upsert(source); - - BulkItemResultHolder holder = TransportShardBulkAction.executeUpdateRequestOnce(updateRequest, shard, metaData, - "index", updateHelper, threadPool::absoluteTimeInMillis, primaryRequest, 0, - new ThrowingMappingUpdatePerformer(new RuntimeException())); - - assertFalse(holder.isVersionConflict()); - assertNotNull(holder.response); - assertNotNull(holder.operationResult); - assertNotNull(holder.replicaRequest); - - assertThat(holder.response, instanceOf(UpdateResponse.class)); - UpdateResponse updateResp = (UpdateResponse) holder.response; - assertThat(updateResp.getGetResult(), equalTo(null)); - BulkItemRequest replicaBulkRequest = holder.replicaRequest; - assertThat(replicaBulkRequest.id(), equalTo(0)); - DocWriteRequest replicaRequest = replicaBulkRequest.request(); - assertThat(replicaRequest, instanceOf(IndexRequest.class)); - assertThat(replicaRequest, equalTo(indexRequest)); + IndexRequest updateResponse = new IndexRequest("index", "_doc", "id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"); - // Assert that the document actually made it there - assertDocCount(shard, 1); - closeShards(shard); - } + Exception err = new VersionConflictEngineException(shardId, "_doc", "id", + "I'm conflicted <(;_;)>"); + Engine.IndexResult conflictedResult = new Engine.IndexResult(err, 0, 0); + Engine.IndexResult mappingUpdate = + new Engine.IndexResult(new Mapping(null, null, new MetadataFieldMapper[0], Collections.emptyMap())); + Translog.Location resultLocation = new Translog.Location(42, 42, 42); + Engine.IndexResult success = new FakeIndexResult(1, 1, 13, true, resultLocation); - public void testExecuteUpdateRequestOnceWithFailure() throws Exception { - IndexMetaData metaData = indexMetaData(); - IndexShard shard = newStartedShard(true); + IndexShard shard = mock(IndexShard.class); + when(shard.applyIndexOperationOnPrimary(anyLong(), any(), any(), anyLong(), anyBoolean())).thenAnswer(ir -> { + if (randomBoolean()) { + return conflictedResult; + } + if (randomBoolean()) { + return mappingUpdate; + } else { + return success; + } + }); + when(shard.indexSettings()).thenReturn(indexSettings); - Map source = new HashMap<>(); - source.put("foo", "bar"); - BulkItemRequest[] items = new BulkItemRequest[1]; - boolean create = randomBoolean(); - DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id") - .source(Requests.INDEX_CONTENT_TYPE, "foo", "bar") - .create(create); - BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest); - items[0] = primaryRequest; - BulkShardRequest bulkShardRequest = - new BulkShardRequest(shardId, RefreshPolicy.NONE, items); + UpdateHelper updateHelper = mock(UpdateHelper.class); + when(updateHelper.prepare(any(), eq(shard), any())).thenReturn( + new UpdateHelper.Result(updateResponse, randomBoolean() ? DocWriteResponse.Result.CREATED : DocWriteResponse.Result.UPDATED, + Collections.singletonMap("field", "value"), Requests.INDEX_CONTENT_TYPE)); - Translog.Location location = new Translog.Location(0, 0, 0); - IndexRequest indexRequest = new IndexRequest("index", "_doc", "id"); - indexRequest.source(source); - - DocWriteResponse.Result docWriteResult = DocWriteResponse.Result.CREATED; - Exception prepareFailure = new IllegalArgumentException("I failed to do something!"); - UpdateHelper updateHelper = new FailingUpdateHelper(prepareFailure); - UpdateRequest updateRequest = new UpdateRequest("index", "_doc", "id"); - updateRequest.upsert(source); - - BulkItemResultHolder holder = TransportShardBulkAction.executeUpdateRequestOnce(updateRequest, shard, metaData, - "index", updateHelper, threadPool::absoluteTimeInMillis, primaryRequest, 0, new NoopMappingUpdatePerformer()); - - assertFalse(holder.isVersionConflict()); - assertNull(holder.response); - assertNotNull(holder.operationResult); - assertNotNull(holder.replicaRequest); - - Engine.IndexResult opResult = (Engine.IndexResult) holder.operationResult; - assertThat(opResult.getResultType(), equalTo(Engine.Result.Type.FAILURE)); - assertFalse(opResult.isCreated()); - Exception e = opResult.getFailure(); - assertThat(e.getMessage(), containsString("I failed to do something!")); - - BulkItemRequest replicaBulkRequest = holder.replicaRequest; - assertThat(replicaBulkRequest.id(), equalTo(0)); - assertThat(replicaBulkRequest.request(), instanceOf(IndexRequest.class)); - IndexRequest replicaRequest = (IndexRequest) replicaBulkRequest.request(); - assertThat(replicaRequest.index(), equalTo("index")); - assertThat(replicaRequest.type(), equalTo("_doc")); - assertThat(replicaRequest.id(), equalTo("id")); - assertThat(replicaRequest.sourceAsMap(), equalTo(source)); - - // Assert that the document did not make it there, since it should have failed - assertDocCount(shard, 0); - closeShards(shard); - } - - /** - * Fake UpdateHelper that always returns whatever result you give it - */ - private static class MockUpdateHelper extends UpdateHelper { - private final UpdateHelper.Result result; + BulkItemRequest[] items = new BulkItemRequest[]{primaryRequest}; + BulkShardRequest bulkShardRequest = + new BulkShardRequest(shardId, RefreshPolicy.NONE, items); - MockUpdateHelper(UpdateHelper.Result result) { - super(Settings.EMPTY, null); - this.result = result; - } + WritePrimaryResult result = TransportShardBulkAction.performOnPrimary( + bulkShardRequest, shard, updateHelper, threadPool::absoluteTimeInMillis, new NoopMappingUpdatePerformer(), + () -> {}); - @Override - public UpdateHelper.Result prepare(UpdateRequest u, IndexShard s, LongSupplier n) { - logger.info("--> preparing update for {} - {}", s, u); - return result; - } + assertThat(result.location, equalTo(resultLocation)); + BulkItemResponse primaryResponse = result.replicaRequest().items()[0].getPrimaryResponse(); + assertThat(primaryResponse.getItemId(), equalTo(0)); + assertThat(primaryResponse.getId(), equalTo("id")); + assertThat(primaryResponse.getOpType(), equalTo(DocWriteRequest.OpType.UPDATE)); + DocWriteResponse response = primaryResponse.getResponse(); + assertThat(response.status(), equalTo(RestStatus.CREATED)); + assertThat(response.getSeqNo(), equalTo(13L)); } /** - * An update helper that always fails to prepare the update + * Fake IndexResult that has a settable translog location */ - private static class FailingUpdateHelper extends UpdateHelper { - private final Exception e; + static class FakeIndexResult extends Engine.IndexResult { + + private final Translog.Location location; - FailingUpdateHelper(Exception failure) { - super(Settings.EMPTY, null); - this.e = failure; + protected FakeIndexResult(long version, long term, long seqNo, boolean created, Translog.Location location) { + super(version, term, seqNo, created); + this.location = location; } @Override - public UpdateHelper.Result prepare(UpdateRequest u, IndexShard s, LongSupplier n) { - logger.info("--> preparing failing update for {} - {}", s, u); - throw new ElasticsearchException(e); + public Translog.Location getTranslogLocation() { + return this.location; } } /** - * Fake IndexResult that has a settable translog location + * Fake DeleteResult that has a settable translog location */ - private static class FakeResult extends Engine.IndexResult { + static class FakeDeleteResult extends Engine.DeleteResult { private final Translog.Location location; - protected FakeResult(long version, long term, long seqNo, boolean created, Translog.Location location) { - super(version, term, seqNo, created); + protected FakeDeleteResult(long version, long term, long seqNo, boolean found, Translog.Location location) { + super(version, term, seqNo, found); this.location = location; } @@ -856,6 +848,7 @@ public void updateMappings(Mapping update, ShardId shardId, String type) { /** Always throw the given exception */ private class ThrowingMappingUpdatePerformer implements MappingUpdatePerformer { private final RuntimeException e; + ThrowingMappingUpdatePerformer(RuntimeException e) { this.e = e; } diff --git a/server/src/test/java/org/elasticsearch/action/ingest/WritePipelineResponseTests.java b/server/src/test/java/org/elasticsearch/action/ingest/WritePipelineResponseTests.java deleted file mode 100644 index f68545c3f7921..0000000000000 --- a/server/src/test/java/org/elasticsearch/action/ingest/WritePipelineResponseTests.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.ingest; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.test.AbstractStreamableXContentTestCase; - -import java.io.IOException; - -import static org.hamcrest.CoreMatchers.equalTo; - -public class WritePipelineResponseTests extends AbstractStreamableXContentTestCase { - - public void testSerializationWithoutError() throws IOException { - boolean isAcknowledged = randomBoolean(); - WritePipelineResponse response; - response = new WritePipelineResponse(isAcknowledged); - BytesStreamOutput out = new BytesStreamOutput(); - response.writeTo(out); - StreamInput streamInput = out.bytes().streamInput(); - WritePipelineResponse otherResponse = new WritePipelineResponse(); - otherResponse.readFrom(streamInput); - - assertThat(otherResponse.isAcknowledged(), equalTo(response.isAcknowledged())); - } - - public void testSerializationWithError() throws IOException { - WritePipelineResponse response = new WritePipelineResponse(); - BytesStreamOutput out = new BytesStreamOutput(); - response.writeTo(out); - StreamInput streamInput = out.bytes().streamInput(); - WritePipelineResponse otherResponse = new WritePipelineResponse(); - otherResponse.readFrom(streamInput); - - assertThat(otherResponse.isAcknowledged(), equalTo(response.isAcknowledged())); - } - - public void testToXContent() { - WritePipelineResponse response = new WritePipelineResponse(true); - String output = Strings.toString(response); - assertEquals("{\"acknowledged\":true}", output); - } - - @Override - protected WritePipelineResponse doParseInstance(XContentParser parser) { - return WritePipelineResponse.fromXContent(parser); - } - - @Override - protected WritePipelineResponse createTestInstance() { - return new WritePipelineResponse(randomBoolean()); - } - - @Override - protected WritePipelineResponse createBlankInstance() { - return new WritePipelineResponse(); - } - - @Override - protected WritePipelineResponse mutateInstance(WritePipelineResponse response) { - return new WritePipelineResponse(response.isAcknowledged() == false); - } -} diff --git a/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java b/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java index d6f733d7c1cd4..dd2aa0e325e03 100644 --- a/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java +++ b/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java @@ -453,10 +453,12 @@ public void testSeveralTimeFormats() { } private void assertSamePrinterOutput(String format, ZonedDateTime javaDate, DateTime jodaDate) { - assertThat(jodaDate.getMillis(), is(javaDate.toEpochSecond() * 1000)); - String javaTimeOut = DateFormatters.forPattern("dateOptionalTime").format(javaDate); - String jodaTimeOut = Joda.forPattern("dateOptionalTime").printer().print(jodaDate); - assertThat(javaTimeOut, is(jodaTimeOut)); + assertThat(jodaDate.getMillis(), is(javaDate.toInstant().toEpochMilli())); + String javaTimeOut = DateFormatters.forPattern(format).format(javaDate); + String jodaTimeOut = Joda.forPattern(format).printer().print(jodaDate); + String message = String.format(Locale.ROOT, "expected string representation to be equal for format [%s]: joda [%s], java [%s]", + format, jodaTimeOut, javaTimeOut); + assertThat(message, javaTimeOut, is(jodaTimeOut)); } private void assertSameDate(String input, String format) { diff --git a/server/src/test/java/org/elasticsearch/common/logging/LoggersTests.java b/server/src/test/java/org/elasticsearch/common/logging/LoggersTests.java index 6c18bd0afab1b..9b69a876c1d2a 100644 --- a/server/src/test/java/org/elasticsearch/common/logging/LoggersTests.java +++ b/server/src/test/java/org/elasticsearch/common/logging/LoggersTests.java @@ -46,7 +46,7 @@ static class MockAppender extends AbstractAppender { @Override public void append(LogEvent event) { - lastEvent = event; + lastEvent = event.toImmutable(); } ParameterizedMessage lastParameterizedMessage() { diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java index a893fb63ec8cc..1a0d0dead6e6d 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java @@ -28,6 +28,7 @@ import java.io.ByteArrayInputStream; import java.io.IOException; +import java.util.Arrays; import static org.hamcrest.Matchers.equalTo; @@ -54,8 +55,21 @@ private void testGuessType(XContentType type) throws IOException { builder.field("field1", "value1"); builder.endObject(); - assertThat(XContentHelper.xContentType(BytesReference.bytes(builder)), equalTo(type)); - assertThat(XContentFactory.xContentType(BytesReference.bytes(builder).streamInput()), equalTo(type)); + final BytesReference bytes; + if (type == XContentType.JSON && randomBoolean()) { + final int length = randomIntBetween(0, 8 * XContentFactory.GUESS_HEADER_LENGTH); + final String content = Strings.toString(builder); + final StringBuilder sb = new StringBuilder(length + content.length()); + final char[] chars = new char[length]; + Arrays.fill(chars, ' '); + sb.append(new String(chars)).append(content); + bytes = new BytesArray(sb.toString()); + } else { + bytes = BytesReference.bytes(builder); + } + + assertThat(XContentHelper.xContentType(bytes), equalTo(type)); + assertThat(XContentFactory.xContentType(bytes.streamInput()), equalTo(type)); // CBOR is binary, cannot use String if (type != XContentType.CBOR && type != XContentType.SMILE) { diff --git a/server/src/test/java/org/elasticsearch/document/ShardInfoIT.java b/server/src/test/java/org/elasticsearch/document/ShardInfoIT.java index 5a5f279985f4c..682b1deb14662 100644 --- a/server/src/test/java/org/elasticsearch/document/ShardInfoIT.java +++ b/server/src/test/java/org/elasticsearch/document/ShardInfoIT.java @@ -91,6 +91,7 @@ public void testBulkWithUpdateItems() throws Exception { BulkResponse bulkResponse = bulkRequestBuilder.get(); for (BulkItemResponse item : bulkResponse) { + assertThat(item.getFailure(), nullValue()); assertThat(item.isFailed(), equalTo(false)); assertShardInfo(item.getResponse()); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java index aaab2beb19860..9e248fcf47a0b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java @@ -196,7 +196,7 @@ public void testParsesEs6BooleansStrict() throws IOException { .endObject()); MapperParsingException ex = expectThrows(MapperParsingException.class, () -> defaultMapper.parse(SourceToParse.source("test", "type", "1", source, XContentType.JSON))); - assertEquals("failed to parse [field]", ex.getMessage()); + assertEquals("failed to parse field [field] of type [boolean]", ex.getMessage()); } public void testMultiFields() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java index 852f89ef32206..9e7b41673ad9d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java @@ -126,6 +126,35 @@ public void testDotsWithExistingNestedMapper() throws Exception { e.getMessage()); } + public void testUnexpectedFieldMappingType() throws Exception { + DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + .startObject("foo").field("type", "long").endObject() + .startObject("bar").field("type", "boolean").endObject() + .startObject("geo").field("type", "geo_shape").endObject() + .endObject().endObject().endObject()); + DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); + { + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("foo", true).endObject()); + MapperException exception = expectThrows(MapperException.class, + () -> mapper.parse(SourceToParse.source("test", "type", "1", bytes, XContentType.JSON))); + assertThat(exception.getMessage(), containsString("failed to parse field [foo] of type [long]")); + } + { + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("bar", "bar").endObject()); + MapperException exception = expectThrows(MapperException.class, + () -> mapper.parse(SourceToParse.source("test", "type", "2", bytes, XContentType.JSON))); + assertThat(exception.getMessage(), containsString("failed to parse field [bar] of type [boolean]")); + } + { + BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("geo", 123).endObject()); + MapperException exception = expectThrows(MapperException.class, + () -> mapper.parse(SourceToParse.source("test", "type", "2", bytes, XContentType.JSON))); + assertThat(exception.getMessage(), containsString("failed to parse field [geo] of type [geo_shape]")); + } + + } + public void testDotsWithDynamicNestedMapper() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryShardContextTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryShardContextTests.java index 39cc3c8219ceb..b27b9e26ab7dc 100644 --- a/server/src/test/java/org/elasticsearch/index/query/QueryShardContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/QueryShardContextTests.java @@ -22,8 +22,10 @@ import org.apache.lucene.search.Query; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.plain.AbstractAtomicOrdinalsFieldData; @@ -37,6 +39,7 @@ import org.hamcrest.Matchers; import java.io.IOException; +import java.util.Collections; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -49,24 +52,7 @@ public class QueryShardContextTests extends ESTestCase { public void testFailIfFieldMappingNotFound() { - IndexMetaData.Builder indexMetadataBuilder = new IndexMetaData.Builder("index"); - indexMetadataBuilder.settings(Settings.builder().put("index.version.created", Version.CURRENT) - .put("index.number_of_shards", 1) - .put("index.number_of_replicas", 1) - ); - IndexMetaData indexMetaData = indexMetadataBuilder.build(); - IndexSettings indexSettings = new IndexSettings(indexMetaData, Settings.EMPTY); - MapperService mapperService = mock(MapperService.class); - when(mapperService.getIndexSettings()).thenReturn(indexSettings); - when(mapperService.index()).thenReturn(indexMetaData.getIndex()); - final long nowInMillis = randomNonNegativeLong(); - - QueryShardContext context = new QueryShardContext( - 0, indexSettings, null, (mappedFieldType, idxName) -> - mappedFieldType.fielddataBuilder(idxName).build(indexSettings, mappedFieldType, null, null, null) - , mapperService, null, null, xContentRegistry(), writableRegistry(), null, null, - () -> nowInMillis, null); - + QueryShardContext context = createQueryShardContext(IndexMetaData.INDEX_UUID_NA_VALUE, null); context.setAllowUnmappedFields(false); MappedFieldType fieldType = new TextFieldMapper.TextFieldType(); MappedFieldType result = context.failIfFieldMappingNotFound("name", fieldType); @@ -91,30 +77,16 @@ public void testFailIfFieldMappingNotFound() { } public void testClusterAlias() throws IOException { - IndexMetaData.Builder indexMetadataBuilder = new IndexMetaData.Builder("index"); - indexMetadataBuilder.settings(Settings.builder().put("index.version.created", Version.CURRENT) - .put("index.number_of_shards", 1) - .put("index.number_of_replicas", 1) - ); - IndexMetaData indexMetaData = indexMetadataBuilder.build(); - IndexSettings indexSettings = new IndexSettings(indexMetaData, Settings.EMPTY); - MapperService mapperService = mock(MapperService.class); - when(mapperService.getIndexSettings()).thenReturn(indexSettings); - when(mapperService.index()).thenReturn(indexMetaData.getIndex()); - final long nowInMillis = randomNonNegativeLong(); + final String clusterAlias = randomBoolean() ? null : "remote_cluster"; + QueryShardContext context = createQueryShardContext(IndexMetaData.INDEX_UUID_NA_VALUE, clusterAlias); - Mapper.BuilderContext ctx = new Mapper.BuilderContext(indexSettings.getSettings(), new ContentPath()); + + Mapper.BuilderContext ctx = new Mapper.BuilderContext(context.getIndexSettings().getSettings(), new ContentPath()); IndexFieldMapper mapper = new IndexFieldMapper.Builder(null).build(ctx); - final String clusterAlias = randomBoolean() ? null : "remote_cluster"; - QueryShardContext context = new QueryShardContext( - 0, indexSettings, null, (mappedFieldType, indexname) -> - mappedFieldType.fielddataBuilder(indexname).build(indexSettings, mappedFieldType, null, null, mapperService) - , mapperService, null, null, xContentRegistry(), writableRegistry(), null, null, - () -> nowInMillis, clusterAlias); IndexFieldData forField = context.getForField(mapper.fieldType()); - String expected = clusterAlias == null ? indexMetaData.getIndex().getName() - : clusterAlias + ":" + indexMetaData.getIndex().getName(); + String expected = clusterAlias == null ? context.getIndexSettings().getIndexMetaData().getIndex().getName() + : clusterAlias + ":" + context.getIndexSettings().getIndex().getName(); assertEquals(expected, ((AbstractAtomicOrdinalsFieldData)forField.load(null)).getOrdinalsValues().lookupOrd(0).utf8ToString()); Query query = mapper.fieldType().termQuery("index", context); if (clusterAlias == null) { @@ -133,4 +105,32 @@ public void testClusterAlias() throws IOException { assertThat(query, Matchers.instanceOf(MatchNoDocsQuery.class)); } + public void testGetFullyQualifiedIndex() { + String clusterAlias = randomAlphaOfLengthBetween(5, 10); + String indexUuid = randomAlphaOfLengthBetween(3, 10); + QueryShardContext shardContext = createQueryShardContext(indexUuid, clusterAlias); + assertThat(shardContext.getFullyQualifiedIndex().getName(), equalTo(clusterAlias + ":index")); + assertThat(shardContext.getFullyQualifiedIndex().getUUID(), equalTo(indexUuid)); + } + + public static QueryShardContext createQueryShardContext(String indexUuid, String clusterAlias) { + IndexMetaData.Builder indexMetadataBuilder = new IndexMetaData.Builder("index"); + indexMetadataBuilder.settings(Settings.builder().put("index.version.created", Version.CURRENT) + .put("index.number_of_shards", 1) + .put("index.number_of_replicas", 1) + .put(IndexMetaData.SETTING_INDEX_UUID, indexUuid) + ); + IndexMetaData indexMetaData = indexMetadataBuilder.build(); + IndexSettings indexSettings = new IndexSettings(indexMetaData, Settings.EMPTY); + MapperService mapperService = mock(MapperService.class); + when(mapperService.getIndexSettings()).thenReturn(indexSettings); + when(mapperService.index()).thenReturn(indexMetaData.getIndex()); + final long nowInMillis = randomNonNegativeLong(); + + return new QueryShardContext( + 0, indexSettings, null, (mappedFieldType, idxName) -> + mappedFieldType.fielddataBuilder(idxName).build(indexSettings, mappedFieldType, null, null, null) + , mapperService, null, null, NamedXContentRegistry.EMPTY, new NamedWriteableRegistry(Collections.emptyList()), null, null, + () -> nowInMillis, clusterAlias); + } } diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryShardExceptionTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryShardExceptionTests.java new file mode 100644 index 0000000000000..8557f568e078f --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/query/QueryShardExceptionTests.java @@ -0,0 +1,53 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.query; + +import org.elasticsearch.index.Index; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.CoreMatchers.equalTo; + +public class QueryShardExceptionTests extends ESTestCase { + + public void testCreateFromQueryShardContext() { + String indexUuid = randomAlphaOfLengthBetween(5, 10); + String clusterAlias = randomAlphaOfLengthBetween(5, 10); + QueryShardContext queryShardContext = QueryShardContextTests.createQueryShardContext(indexUuid, clusterAlias); + { + QueryShardException queryShardException = new QueryShardException(queryShardContext, "error"); + assertThat(queryShardException.getIndex().getName(), equalTo(clusterAlias + ":index")); + assertThat(queryShardException.getIndex().getUUID(), equalTo(indexUuid)); + } + { + QueryShardException queryShardException = new QueryShardException(queryShardContext, "error", new IllegalArgumentException()); + assertThat(queryShardException.getIndex().getName(), equalTo(clusterAlias + ":index")); + assertThat(queryShardException.getIndex().getUUID(), equalTo(indexUuid)); + } + } + + public void testCreateFromIndex() { + String indexUuid = randomAlphaOfLengthBetween(5, 10); + String indexName = randomAlphaOfLengthBetween(5, 10); + Index index = new Index(indexName, indexUuid); + QueryShardException queryShardException = new QueryShardException(index, "error", new IllegalArgumentException()); + assertThat(queryShardException.getIndex().getName(), equalTo(indexName)); + assertThat(queryShardException.getIndex().getUUID(), equalTo(indexUuid)); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java index 9b4b0b77ae6d8..bee5f7c608529 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java @@ -234,7 +234,7 @@ public void testFieldsCannotBeSetToNull() { public void testDefaultFieldParsing() throws IOException { assumeTrue("5.x behaves differently, so skip on non-6.x indices", - indexVersionCreated.onOrAfter(Version.V_6_0_0_alpha1)); + indexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_0_0_alpha1)); String query = randomAlphaOfLengthBetween(1, 10).toLowerCase(Locale.ROOT); String contentString = "{\n" + diff --git a/server/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java index 48f43eefeb3ca..88e8310bf01bb 100644 --- a/server/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java @@ -144,7 +144,7 @@ public void testIndexWildcard() throws IOException { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); QueryShardContext context = createShardContext(); - String index = context.getFullyQualifiedIndexName(); + String index = context.getFullyQualifiedIndex().getName(); Query query = new WildcardQueryBuilder("_index", index).doToQuery(context); assertThat(query instanceof MatchAllDocsQuery, equalTo(true)); diff --git a/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java b/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java index b37e51adfe878..5ea717873143c 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java @@ -69,6 +69,7 @@ import static org.elasticsearch.index.translog.SnapshotMatchers.containsOperationsInAnyOrder; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.instanceOf; @@ -265,7 +266,7 @@ public void testReplicaTermIncrementWithConcurrentPrimaryPromotion() throws Exce barrier.await(); indexOnReplica(replicationRequest, shards, replica2, newReplica1Term); } catch (IllegalStateException ise) { - assertThat(ise.getMessage(), containsString("is too old")); + assertThat(ise.getMessage(), either(containsString("is too old")).or(containsString("cannot be a replication target"))); } catch (Exception e) { throw new RuntimeException(e); } @@ -307,7 +308,7 @@ public void testReplicaOperationWithConcurrentPrimaryPromotion() throws Exceptio indexOnReplica(replicationRequest, shards, replica, primaryPrimaryTerm); successFullyIndexed.set(true); } catch (IllegalStateException ise) { - assertThat(ise.getMessage(), containsString("is too old")); + assertThat(ise.getMessage(), either(containsString("is too old")).or(containsString("cannot be a replication target"))); } catch (Exception e) { throw new RuntimeException(e); } diff --git a/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTests.java b/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTests.java index e001f82809b07..3948da9c1119c 100644 --- a/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTests.java +++ b/server/src/test/java/org/elasticsearch/index/seqno/ReplicationTrackerTests.java @@ -47,7 +47,9 @@ import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; import java.util.function.Function; +import java.util.function.LongConsumer; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; @@ -60,7 +62,7 @@ import static org.hamcrest.Matchers.not; public class ReplicationTrackerTests extends ESTestCase { - + public void testEmptyShards() { final ReplicationTracker tracker = newTracker(AllocationId.newInitializing()); assertThat(tracker.getGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO)); @@ -99,6 +101,11 @@ private static Set ids(Set allocationIds) { return allocationIds.stream().map(AllocationId::getId).collect(Collectors.toSet()); } + private void updateLocalCheckpoint(final ReplicationTracker tracker, final String allocationId, final long localCheckpoint) { + tracker.updateLocalCheckpoint(allocationId, localCheckpoint); + assertThat(updatedGlobalCheckpoint.get(), equalTo(tracker.getGlobalCheckpoint())); + } + public void testGlobalCheckpointUpdate() { final long initialClusterStateVersion = randomNonNegativeLong(); Map allocations = new HashMap<>(); @@ -137,14 +144,14 @@ public void testGlobalCheckpointUpdate() { assertThat(tracker.getReplicationGroup().getReplicationTargets().size(), equalTo(1)); initializing.forEach(aId -> markAsTrackingAndInSyncQuietly(tracker, aId.getId(), NO_OPS_PERFORMED)); assertThat(tracker.getReplicationGroup().getReplicationTargets().size(), equalTo(1 + initializing.size())); - allocations.keySet().forEach(aId -> tracker.updateLocalCheckpoint(aId.getId(), allocations.get(aId))); + allocations.keySet().forEach(aId -> updateLocalCheckpoint(tracker, aId.getId(), allocations.get(aId))); assertThat(tracker.getGlobalCheckpoint(), equalTo(minLocalCheckpoint)); // increment checkpoints active.forEach(aId -> allocations.put(aId, allocations.get(aId) + 1 + randomInt(4))); initializing.forEach(aId -> allocations.put(aId, allocations.get(aId) + 1 + randomInt(4))); - allocations.keySet().forEach(aId -> tracker.updateLocalCheckpoint(aId.getId(), allocations.get(aId))); + allocations.keySet().forEach(aId -> updateLocalCheckpoint(tracker, aId.getId(), allocations.get(aId))); final long minLocalCheckpointAfterUpdates = allocations.entrySet().stream().map(Map.Entry::getValue).min(Long::compareTo).orElse(UNASSIGNED_SEQ_NO); @@ -153,7 +160,7 @@ public void testGlobalCheckpointUpdate() { final AllocationId extraId = AllocationId.newInitializing(); // first check that adding it without the master blessing doesn't change anything. - tracker.updateLocalCheckpoint(extraId.getId(), minLocalCheckpointAfterUpdates + 1 + randomInt(4)); + updateLocalCheckpoint(tracker, extraId.getId(), minLocalCheckpointAfterUpdates + 1 + randomInt(4)); assertNull(tracker.checkpoints.get(extraId)); expectThrows(IllegalStateException.class, () -> tracker.initiateTracking(extraId.getId())); @@ -165,7 +172,7 @@ public void testGlobalCheckpointUpdate() { // now notify for the new id if (randomBoolean()) { - tracker.updateLocalCheckpoint(extraId.getId(), minLocalCheckpointAfterUpdates + 1 + randomInt(4)); + updateLocalCheckpoint(tracker, extraId.getId(), minLocalCheckpointAfterUpdates + 1 + randomInt(4)); markAsTrackingAndInSyncQuietly(tracker, extraId.getId(), randomInt((int) minLocalCheckpointAfterUpdates)); } else { markAsTrackingAndInSyncQuietly(tracker, extraId.getId(), minLocalCheckpointAfterUpdates + 1 + randomInt(4)); @@ -175,6 +182,64 @@ public void testGlobalCheckpointUpdate() { assertThat(tracker.getGlobalCheckpoint(), greaterThan(minLocalCheckpoint)); } + public void testUpdateGlobalCheckpointOnReplica() { + final AllocationId active = AllocationId.newInitializing(); + final ReplicationTracker tracker = newTracker(active); + final long globalCheckpoint = randomLongBetween(NO_OPS_PERFORMED, Long.MAX_VALUE - 1); + tracker.updateGlobalCheckpointOnReplica(globalCheckpoint, "test"); + assertThat(updatedGlobalCheckpoint.get(), equalTo(globalCheckpoint)); + final long nonUpdate = randomLongBetween(NO_OPS_PERFORMED, globalCheckpoint); + updatedGlobalCheckpoint.set(UNASSIGNED_SEQ_NO); + tracker.updateGlobalCheckpointOnReplica(nonUpdate, "test"); + assertThat(updatedGlobalCheckpoint.get(), equalTo(UNASSIGNED_SEQ_NO)); + final long update = randomLongBetween(globalCheckpoint, Long.MAX_VALUE); + tracker.updateGlobalCheckpointOnReplica(update, "test"); + assertThat(updatedGlobalCheckpoint.get(), equalTo(update)); + } + + public void testMarkAllocationIdAsInSync() throws BrokenBarrierException, InterruptedException { + final long initialClusterStateVersion = randomNonNegativeLong(); + Map activeWithCheckpoints = randomAllocationsWithLocalCheckpoints(1, 1); + Set active = new HashSet<>(activeWithCheckpoints.keySet()); + Map initializingWithCheckpoints = randomAllocationsWithLocalCheckpoints(1, 1); + Set initializing = new HashSet<>(initializingWithCheckpoints.keySet()); + final AllocationId primaryId = active.iterator().next(); + final AllocationId replicaId = initializing.iterator().next(); + final ReplicationTracker tracker = newTracker(primaryId); + tracker.updateFromMaster(initialClusterStateVersion, ids(active), routingTable(initializing, primaryId), emptySet()); + final long localCheckpoint = randomLongBetween(0, Long.MAX_VALUE - 1); + tracker.activatePrimaryMode(localCheckpoint); + tracker.initiateTracking(replicaId.getId()); + final CyclicBarrier barrier = new CyclicBarrier(2); + final Thread thread = new Thread(() -> { + try { + barrier.await(); + tracker.markAllocationIdAsInSync( + replicaId.getId(), + randomLongBetween(NO_OPS_PERFORMED, localCheckpoint - 1)); + barrier.await(); + } catch (BrokenBarrierException | InterruptedException e) { + throw new AssertionError(e); + } + }); + thread.start(); + barrier.await(); + awaitBusy(tracker::pendingInSync); + final long updatedLocalCheckpoint = randomLongBetween(1 + localCheckpoint, Long.MAX_VALUE); + // there is a shard copy pending in sync, the global checkpoint can not advance + updatedGlobalCheckpoint.set(UNASSIGNED_SEQ_NO); + tracker.updateLocalCheckpoint(primaryId.getId(), updatedLocalCheckpoint); + assertThat(updatedGlobalCheckpoint.get(), equalTo(UNASSIGNED_SEQ_NO)); + // we are implicitly marking the pending in sync copy as in sync with the current global checkpoint, no advancement should occur + tracker.updateLocalCheckpoint(replicaId.getId(), localCheckpoint); + assertThat(updatedGlobalCheckpoint.get(), equalTo(UNASSIGNED_SEQ_NO)); + barrier.await(); + thread.join(); + // now we expect that the global checkpoint would advance + tracker.markAllocationIdAsInSync(replicaId.getId(), updatedLocalCheckpoint); + assertThat(updatedGlobalCheckpoint.get(), equalTo(updatedLocalCheckpoint)); + } + public void testMissingActiveIdsPreventAdvance() { final Map active = randomAllocationsWithLocalCheckpoints(2, 5); final Map initializing = randomAllocationsWithLocalCheckpoints(0, 5); @@ -191,14 +256,16 @@ public void testMissingActiveIdsPreventAdvance() { .entrySet() .stream() .filter(e -> !e.getKey().equals(missingActiveID)) - .forEach(e -> tracker.updateLocalCheckpoint(e.getKey().getId(), e.getValue())); + .forEach(e -> updateLocalCheckpoint(tracker, e.getKey().getId(), e.getValue())); if (missingActiveID.equals(primaryId) == false) { assertThat(tracker.getGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO)); + assertThat(updatedGlobalCheckpoint.get(), equalTo(UNASSIGNED_SEQ_NO)); } // now update all knowledge of all shards - assigned.forEach((aid, localCP) -> tracker.updateLocalCheckpoint(aid.getId(), localCP)); + assigned.forEach((aid, localCP) -> updateLocalCheckpoint(tracker, aid.getId(), localCP)); assertThat(tracker.getGlobalCheckpoint(), not(equalTo(UNASSIGNED_SEQ_NO))); + assertThat(updatedGlobalCheckpoint.get(), not(equalTo(UNASSIGNED_SEQ_NO))); } public void testMissingInSyncIdsPreventAdvance() { @@ -213,13 +280,15 @@ public void testMissingInSyncIdsPreventAdvance() { randomSubsetOf(randomIntBetween(1, initializing.size() - 1), initializing.keySet()).forEach(aId -> markAsTrackingAndInSyncQuietly(tracker, aId.getId(), NO_OPS_PERFORMED)); - active.forEach((aid, localCP) -> tracker.updateLocalCheckpoint(aid.getId(), localCP)); + active.forEach((aid, localCP) -> updateLocalCheckpoint(tracker, aid.getId(), localCP)); assertThat(tracker.getGlobalCheckpoint(), equalTo(NO_OPS_PERFORMED)); + assertThat(updatedGlobalCheckpoint.get(), equalTo(NO_OPS_PERFORMED)); // update again - initializing.forEach((aid, localCP) -> tracker.updateLocalCheckpoint(aid.getId(), localCP)); + initializing.forEach((aid, localCP) -> updateLocalCheckpoint(tracker, aid.getId(), localCP)); assertThat(tracker.getGlobalCheckpoint(), not(equalTo(UNASSIGNED_SEQ_NO))); + assertThat(updatedGlobalCheckpoint.get(), not(equalTo(UNASSIGNED_SEQ_NO))); } public void testInSyncIdsAreIgnoredIfNotValidatedByMaster() { @@ -236,7 +305,7 @@ public void testInSyncIdsAreIgnoredIfNotValidatedByMaster() { List> allocations = Arrays.asList(active, initializing, nonApproved); Collections.shuffle(allocations, random()); - allocations.forEach(a -> a.forEach((aid, localCP) -> tracker.updateLocalCheckpoint(aid.getId(), localCP))); + allocations.forEach(a -> a.forEach((aid, localCP) -> updateLocalCheckpoint(tracker, aid.getId(), localCP))); assertThat(tracker.getGlobalCheckpoint(), not(equalTo(UNASSIGNED_SEQ_NO))); } @@ -271,7 +340,7 @@ public void testInSyncIdsAreRemovedIfNotValidatedByMaster() { initializing.forEach(k -> markAsTrackingAndInSyncQuietly(tracker, k.getId(), NO_OPS_PERFORMED)); } if (randomBoolean()) { - allocations.forEach((aid, localCP) -> tracker.updateLocalCheckpoint(aid.getId(), localCP)); + allocations.forEach((aid, localCP) -> updateLocalCheckpoint(tracker, aid.getId(), localCP)); } // now remove shards @@ -281,9 +350,9 @@ public void testInSyncIdsAreRemovedIfNotValidatedByMaster() { ids(activeToStay.keySet()), routingTable(initializingToStay.keySet(), primaryId), emptySet()); - allocations.forEach((aid, ckp) -> tracker.updateLocalCheckpoint(aid.getId(), ckp + 10L)); + allocations.forEach((aid, ckp) -> updateLocalCheckpoint(tracker, aid.getId(), ckp + 10L)); } else { - allocations.forEach((aid, ckp) -> tracker.updateLocalCheckpoint(aid.getId(), ckp + 10L)); + allocations.forEach((aid, ckp) -> updateLocalCheckpoint(tracker, aid.getId(), ckp + 10L)); tracker.updateFromMaster( initialClusterStateVersion + 2, ids(activeToStay.keySet()), @@ -331,7 +400,7 @@ public void testWaitForAllocationIdToBeInSync() throws Exception { final List elements = IntStream.rangeClosed(0, globalCheckpoint - 1).boxed().collect(Collectors.toList()); Randomness.shuffle(elements); for (int i = 0; i < elements.size(); i++) { - tracker.updateLocalCheckpoint(trackingAllocationId.getId(), elements.get(i)); + updateLocalCheckpoint(tracker, trackingAllocationId.getId(), elements.get(i)); assertFalse(complete.get()); assertFalse(tracker.getTrackedLocalCheckpointForShard(trackingAllocationId.getId()).inSync); assertBusy(() -> assertTrue(tracker.pendingInSync.contains(trackingAllocationId.getId()))); @@ -339,7 +408,7 @@ public void testWaitForAllocationIdToBeInSync() throws Exception { if (randomBoolean()) { // normal path, shard catches up - tracker.updateLocalCheckpoint(trackingAllocationId.getId(), randomIntBetween(globalCheckpoint, 64)); + updateLocalCheckpoint(tracker, trackingAllocationId.getId(), randomIntBetween(globalCheckpoint, 64)); // synchronize with the waiting thread to mark that it is complete barrier.await(); assertTrue(complete.get()); @@ -355,13 +424,16 @@ public void testWaitForAllocationIdToBeInSync() throws Exception { assertFalse(tracker.pendingInSync.contains(trackingAllocationId.getId())); thread.join(); } + + private AtomicLong updatedGlobalCheckpoint = new AtomicLong(UNASSIGNED_SEQ_NO); private ReplicationTracker newTracker(final AllocationId allocationId) { return new ReplicationTracker( new ShardId("test", "_na_", 0), allocationId.getId(), IndexSettingsModule.newIndexSettings("test", Settings.EMPTY), - UNASSIGNED_SEQ_NO); + UNASSIGNED_SEQ_NO, + updatedGlobalCheckpoint::set); } public void testWaitForAllocationIdToBeInSyncCanBeInterrupted() throws BrokenBarrierException, InterruptedException { @@ -488,10 +560,10 @@ public void testUpdateAllocationIdsFromMaster() throws Exception { // the tracking allocation IDs should play no role in determining the global checkpoint final Map activeLocalCheckpoints = newActiveAllocationIds.stream().collect(Collectors.toMap(Function.identity(), a -> randomIntBetween(1, 1024))); - activeLocalCheckpoints.forEach((a, l) -> tracker.updateLocalCheckpoint(a.getId(), l)); + activeLocalCheckpoints.forEach((a, l) -> updateLocalCheckpoint(tracker, a.getId(), l)); final Map initializingLocalCheckpoints = newInitializingAllocationIds.stream().collect(Collectors.toMap(Function.identity(), a -> randomIntBetween(1, 1024))); - initializingLocalCheckpoints.forEach((a, l) -> tracker.updateLocalCheckpoint(a.getId(), l)); + initializingLocalCheckpoints.forEach((a, l) -> updateLocalCheckpoint(tracker, a.getId(), l)); assertTrue( activeLocalCheckpoints .entrySet() @@ -504,6 +576,7 @@ public void testUpdateAllocationIdsFromMaster() throws Exception { .allMatch(e -> tracker.getTrackedLocalCheckpointForShard(e.getKey().getId()).getLocalCheckpoint() == e.getValue())); final long minimumActiveLocalCheckpoint = (long) activeLocalCheckpoints.values().stream().min(Integer::compareTo).get(); assertThat(tracker.getGlobalCheckpoint(), equalTo(minimumActiveLocalCheckpoint)); + assertThat(updatedGlobalCheckpoint.get(), equalTo(minimumActiveLocalCheckpoint)); final long minimumInitailizingLocalCheckpoint = (long) initializingLocalCheckpoints.values().stream().min(Integer::compareTo).get(); // now we are going to add a new allocation ID and bring it in sync which should move it to the in-sync allocation IDs @@ -635,10 +708,11 @@ public void testPrimaryContextHandoff() throws IOException { FakeClusterState clusterState = initialState(); final AllocationId primaryAllocationId = clusterState.routingTable.primaryShard().allocationId(); + final LongConsumer onUpdate = updatedGlobalCheckpoint -> {}; ReplicationTracker oldPrimary = - new ReplicationTracker(shardId, primaryAllocationId.getId(), indexSettings, UNASSIGNED_SEQ_NO); + new ReplicationTracker(shardId, primaryAllocationId.getId(), indexSettings, UNASSIGNED_SEQ_NO, onUpdate); ReplicationTracker newPrimary = - new ReplicationTracker(shardId, primaryAllocationId.getRelocationId(), indexSettings, UNASSIGNED_SEQ_NO); + new ReplicationTracker(shardId, primaryAllocationId.getRelocationId(), indexSettings, UNASSIGNED_SEQ_NO, onUpdate); Set allocationIds = new HashSet<>(Arrays.asList(oldPrimary.shardAllocationId, newPrimary.shardAllocationId)); diff --git a/server/src/test/java/org/elasticsearch/index/shard/GlobalCheckpointListenersTests.java b/server/src/test/java/org/elasticsearch/index/shard/GlobalCheckpointListenersTests.java new file mode 100644 index 0000000000000..d9240602d8519 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/shard/GlobalCheckpointListenersTests.java @@ -0,0 +1,423 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.shard; + +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.elasticsearch.index.Index; +import org.elasticsearch.test.ESTestCase; +import org.mockito.ArgumentCaptor; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.List; +import java.util.concurrent.BrokenBarrierException; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.Executor; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; + +import static org.elasticsearch.index.seqno.SequenceNumbers.NO_OPS_PERFORMED; +import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.reset; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +public class GlobalCheckpointListenersTests extends ESTestCase { + + final ShardId shardId = new ShardId(new Index("index", "uuid"), 0); + + public void testGlobalCheckpointUpdated() throws IOException { + final GlobalCheckpointListeners globalCheckpointListeners = new GlobalCheckpointListeners(shardId, Runnable::run, logger); + globalCheckpointListeners.globalCheckpointUpdated(NO_OPS_PERFORMED); + final int numberOfListeners = randomIntBetween(0, 16); + final long[] globalCheckpoints = new long[numberOfListeners]; + for (int i = 0; i < numberOfListeners; i++) { + final int index = i; + final AtomicBoolean invoked = new AtomicBoolean(); + final GlobalCheckpointListeners.GlobalCheckpointListener listener = + (g, e) -> { + if (invoked.compareAndSet(false, true) == false) { + throw new IllegalStateException("listener invoked twice"); + } + assert g != UNASSIGNED_SEQ_NO; + assert e == null; + globalCheckpoints[index] = g; + }; + globalCheckpointListeners.add(NO_OPS_PERFORMED, listener); + } + final long globalCheckpoint = randomLongBetween(NO_OPS_PERFORMED, Long.MAX_VALUE); + globalCheckpointListeners.globalCheckpointUpdated(globalCheckpoint); + for (int i = 0; i < numberOfListeners; i++) { + assertThat(globalCheckpoints[i], equalTo(globalCheckpoint)); + } + + // test the listeners are not invoked twice + final long nextGlobalCheckpoint = randomLongBetween(globalCheckpoint + 1, Long.MAX_VALUE); + globalCheckpointListeners.globalCheckpointUpdated(nextGlobalCheckpoint); + for (int i = 0; i < numberOfListeners; i++) { + assertThat(globalCheckpoints[i], equalTo(globalCheckpoint)); + } + + // closing should also not notify the listeners + globalCheckpointListeners.close(); + for (int i = 0; i < numberOfListeners; i++) { + assertThat(globalCheckpoints[i], equalTo(globalCheckpoint)); + } + } + + public void testListenersReadyToBeNotified() throws IOException { + final GlobalCheckpointListeners globalCheckpointListeners = new GlobalCheckpointListeners(shardId, Runnable::run, logger); + final long globalCheckpoint = randomLongBetween(NO_OPS_PERFORMED + 1, Long.MAX_VALUE); + globalCheckpointListeners.globalCheckpointUpdated(globalCheckpoint); + final int numberOfListeners = randomIntBetween(0, 16); + final long[] globalCheckpoints = new long[numberOfListeners]; + for (int i = 0; i < numberOfListeners; i++) { + final int index = i; + final AtomicBoolean invoked = new AtomicBoolean(); + final GlobalCheckpointListeners.GlobalCheckpointListener listener = + (g, e) -> { + if (invoked.compareAndSet(false, true) == false) { + throw new IllegalStateException("listener invoked twice"); + } + assert g != UNASSIGNED_SEQ_NO; + assert e == null; + globalCheckpoints[index] = g; + }; + globalCheckpointListeners.add(randomLongBetween(NO_OPS_PERFORMED, globalCheckpoint - 1), listener); + // the listener should be notified immediately + assertThat(globalCheckpoints[index], equalTo(globalCheckpoint)); + } + + // test the listeners are not invoked twice + final long nextGlobalCheckpoint = randomLongBetween(globalCheckpoint + 1, Long.MAX_VALUE); + globalCheckpointListeners.globalCheckpointUpdated(nextGlobalCheckpoint); + for (int i = 0; i < numberOfListeners; i++) { + assertThat(globalCheckpoints[i], equalTo(globalCheckpoint)); + } + + // closing should also not notify the listeners + globalCheckpointListeners.close(); + for (int i = 0; i < numberOfListeners; i++) { + assertThat(globalCheckpoints[i], equalTo(globalCheckpoint)); + } + } + + public void testFailingListenerReadyToBeNotified() { + final Logger mockLogger = mock(Logger.class); + final GlobalCheckpointListeners globalCheckpointListeners = new GlobalCheckpointListeners(shardId, Runnable::run, mockLogger); + final long globalCheckpoint = randomLongBetween(NO_OPS_PERFORMED + 1, Long.MAX_VALUE); + globalCheckpointListeners.globalCheckpointUpdated(globalCheckpoint); + final int numberOfListeners = randomIntBetween(0, 16); + final long[] globalCheckpoints = new long[numberOfListeners]; + for (int i = 0; i < numberOfListeners; i++) { + final int index = i; + final boolean failure = randomBoolean(); + final GlobalCheckpointListeners.GlobalCheckpointListener listener = + (g, e) -> { + assert globalCheckpoint != UNASSIGNED_SEQ_NO; + assert e == null; + if (failure) { + globalCheckpoints[index] = Long.MIN_VALUE; + throw new RuntimeException("failure"); + } else { + globalCheckpoints[index] = globalCheckpoint; + } + }; + globalCheckpointListeners.add(randomLongBetween(NO_OPS_PERFORMED, globalCheckpoint - 1), listener); + // the listener should be notified immediately + if (failure) { + assertThat(globalCheckpoints[i], equalTo(Long.MIN_VALUE)); + final ArgumentCaptor message = ArgumentCaptor.forClass(ParameterizedMessage.class); + final ArgumentCaptor t = ArgumentCaptor.forClass(RuntimeException.class); + verify(mockLogger).warn(message.capture(), t.capture()); + reset(mockLogger); + assertThat( + message.getValue().getFormat(), + equalTo("error notifying global checkpoint listener of updated global checkpoint [{}]")); + assertNotNull(message.getValue().getParameters()); + assertThat(message.getValue().getParameters().length, equalTo(1)); + assertThat(message.getValue().getParameters()[0], equalTo(globalCheckpoint)); + assertNotNull(t.getValue()); + assertThat(t.getValue().getMessage(), equalTo("failure")); + } else { + assertThat(globalCheckpoints[i], equalTo(globalCheckpoint)); + } + } + } + + public void testClose() throws IOException { + final GlobalCheckpointListeners globalCheckpointListeners = new GlobalCheckpointListeners(shardId, Runnable::run, logger); + globalCheckpointListeners.globalCheckpointUpdated(NO_OPS_PERFORMED); + final int numberOfListeners = randomIntBetween(0, 16); + final IndexShardClosedException[] exceptions = new IndexShardClosedException[numberOfListeners]; + for (int i = 0; i < numberOfListeners; i++) { + final int index = i; + final AtomicBoolean invoked = new AtomicBoolean(); + final GlobalCheckpointListeners.GlobalCheckpointListener listener = + (globalCheckpoint, e) -> { + if (invoked.compareAndSet(false, true) == false) { + throw new IllegalStateException("listener invoked twice"); + } + assert globalCheckpoint == UNASSIGNED_SEQ_NO; + assert e != null; + exceptions[index] = e; + }; + globalCheckpointListeners.add(NO_OPS_PERFORMED, listener); + } + globalCheckpointListeners.close(); + for (int i = 0; i < numberOfListeners; i++) { + assertNotNull(exceptions[i]); + assertThat(exceptions[i].getShardId(), equalTo(shardId)); + } + + // test the listeners are not invoked twice + for (int i = 0; i < numberOfListeners; i++) { + exceptions[i] = null; + } + globalCheckpointListeners.close(); + for (int i = 0; i < numberOfListeners; i++) { + assertNull(exceptions[i]); + } + } + + public void testAddAfterClose() throws InterruptedException, IOException { + final GlobalCheckpointListeners globalCheckpointListeners = new GlobalCheckpointListeners(shardId, Runnable::run, logger); + globalCheckpointListeners.globalCheckpointUpdated(NO_OPS_PERFORMED); + globalCheckpointListeners.close(); + final AtomicBoolean invoked = new AtomicBoolean(); + final CountDownLatch latch = new CountDownLatch(1); + final GlobalCheckpointListeners.GlobalCheckpointListener listener = (g, e) -> { + assert g == UNASSIGNED_SEQ_NO; + assert e != null; + if (invoked.compareAndSet(false, true) == false) { + latch.countDown(); + throw new IllegalStateException("listener invoked twice"); + } + latch.countDown(); + }; + globalCheckpointListeners.add(randomLongBetween(NO_OPS_PERFORMED, Long.MAX_VALUE), listener); + latch.await(); + assertTrue(invoked.get()); + } + + public void testFailingListenerOnUpdate() { + final Logger mockLogger = mock(Logger.class); + final GlobalCheckpointListeners globalCheckpointListeners = new GlobalCheckpointListeners(shardId, Runnable::run, mockLogger); + globalCheckpointListeners.globalCheckpointUpdated(NO_OPS_PERFORMED); + final int numberOfListeners = randomIntBetween(0, 16); + final boolean[] failures = new boolean[numberOfListeners]; + final long[] globalCheckpoints = new long[numberOfListeners]; + for (int i = 0; i < numberOfListeners; i++) { + final int index = i; + final boolean failure = randomBoolean(); + failures[index] = failure; + final GlobalCheckpointListeners.GlobalCheckpointListener listener = + (g, e) -> { + assert g != UNASSIGNED_SEQ_NO; + assert e == null; + if (failure) { + globalCheckpoints[index] = Long.MIN_VALUE; + throw new RuntimeException("failure"); + } else { + globalCheckpoints[index] = g; + } + }; + globalCheckpointListeners.add(NO_OPS_PERFORMED, listener); + } + final long globalCheckpoint = randomLongBetween(NO_OPS_PERFORMED, Long.MAX_VALUE); + globalCheckpointListeners.globalCheckpointUpdated(globalCheckpoint); + for (int i = 0; i < numberOfListeners; i++) { + if (failures[i]) { + assertThat(globalCheckpoints[i], equalTo(Long.MIN_VALUE)); + } else { + assertThat(globalCheckpoints[i], equalTo(globalCheckpoint)); + } + } + int failureCount = 0; + for (int i = 0; i < numberOfListeners; i++) { + if (failures[i]) { + failureCount++; + } + } + if (failureCount > 0) { + final ArgumentCaptor message = ArgumentCaptor.forClass(ParameterizedMessage.class); + final ArgumentCaptor t = ArgumentCaptor.forClass(RuntimeException.class); + verify(mockLogger, times(failureCount)).warn(message.capture(), t.capture()); + assertThat( + message.getValue().getFormat(), + equalTo("error notifying global checkpoint listener of updated global checkpoint [{}]")); + assertNotNull(message.getValue().getParameters()); + assertThat(message.getValue().getParameters().length, equalTo(1)); + assertThat(message.getValue().getParameters()[0], equalTo(globalCheckpoint)); + assertNotNull(t.getValue()); + assertThat(t.getValue().getMessage(), equalTo("failure")); + } + } + + public void testFailingListenerOnClose() throws IOException { + final Logger mockLogger = mock(Logger.class); + final GlobalCheckpointListeners globalCheckpointListeners = new GlobalCheckpointListeners(shardId, Runnable::run, mockLogger); + globalCheckpointListeners.globalCheckpointUpdated(NO_OPS_PERFORMED); + final int numberOfListeners = randomIntBetween(0, 16); + final boolean[] failures = new boolean[numberOfListeners]; + final IndexShardClosedException[] exceptions = new IndexShardClosedException[numberOfListeners]; + for (int i = 0; i < numberOfListeners; i++) { + final int index = i; + final boolean failure = randomBoolean(); + failures[index] = failure; + final GlobalCheckpointListeners.GlobalCheckpointListener listener = + (g, e) -> { + assert g == UNASSIGNED_SEQ_NO; + assert e != null; + if (failure) { + throw new RuntimeException("failure"); + } else { + exceptions[index] = e; + } + }; + globalCheckpointListeners.add(NO_OPS_PERFORMED, listener); + } + globalCheckpointListeners.close(); + for (int i = 0; i < numberOfListeners; i++) { + if (failures[i]) { + assertNull(exceptions[i]); + } else { + assertNotNull(exceptions[i]); + assertThat(exceptions[i].getShardId(), equalTo(shardId)); + } + } + int failureCount = 0; + for (int i = 0; i < numberOfListeners; i++) { + if (failures[i]) { + failureCount++; + } + } + if (failureCount > 0) { + final ArgumentCaptor message = ArgumentCaptor.forClass(String.class); + final ArgumentCaptor t = ArgumentCaptor.forClass(RuntimeException.class); + verify(mockLogger, times(failureCount)).warn(message.capture(), t.capture()); + assertThat(message.getValue(), equalTo("error notifying global checkpoint listener of closed shard")); + assertNotNull(t.getValue()); + assertThat(t.getValue().getMessage(), equalTo("failure")); + } + } + + public void testNotificationUsesExecutor() { + final AtomicInteger count = new AtomicInteger(); + final Executor executor = command -> { + count.incrementAndGet(); + command.run(); + }; + final GlobalCheckpointListeners globalCheckpointListeners = new GlobalCheckpointListeners(shardId, executor, logger); + globalCheckpointListeners.globalCheckpointUpdated(NO_OPS_PERFORMED); + final int numberOfListeners = randomIntBetween(0, 16); + for (int i = 0; i < numberOfListeners; i++) { + globalCheckpointListeners.add(NO_OPS_PERFORMED, (g, e) -> {}); + } + globalCheckpointListeners.globalCheckpointUpdated(randomLongBetween(NO_OPS_PERFORMED, Long.MAX_VALUE)); + assertThat(count.get(), equalTo(1)); + } + + public void testConcurrency() throws BrokenBarrierException, InterruptedException { + final ExecutorService executor = Executors.newFixedThreadPool(randomIntBetween(1, 8)); + final GlobalCheckpointListeners globalCheckpointListeners = new GlobalCheckpointListeners(shardId, executor, logger); + final AtomicLong globalCheckpoint = new AtomicLong(NO_OPS_PERFORMED); + globalCheckpointListeners.globalCheckpointUpdated(globalCheckpoint.get()); + // we are going to synchronize the actions of three threads: the updating thread, the listener thread, and the main test thread + final CyclicBarrier barrier = new CyclicBarrier(3); + final int numberOfIterations = randomIntBetween(1, 1024); + final AtomicBoolean closed = new AtomicBoolean(); + final Thread updatingThread = new Thread(() -> { + // synchronize starting with the listener thread and the main test thread + awaitQuietly(barrier); + for (int i = 0; i < numberOfIterations; i++) { + if (rarely() && closed.get() == false) { + closed.set(true); + try { + globalCheckpointListeners.close(); + } catch (final IOException e) { + throw new UncheckedIOException(e); + } + } + if (closed.get() == false) { + globalCheckpointListeners.globalCheckpointUpdated(globalCheckpoint.incrementAndGet()); + } + } + // synchronize ending with the listener thread and the main test thread + awaitQuietly(barrier); + }); + + final List invocations = new CopyOnWriteArrayList<>(); + final Thread listenersThread = new Thread(() -> { + // synchronize starting with the updating thread and the main test thread + awaitQuietly(barrier); + for (int i = 0; i < numberOfIterations; i++) { + final AtomicBoolean invocation = new AtomicBoolean(); + invocations.add(invocation); + // sometimes this will notify the listener immediately + globalCheckpointListeners.add( + globalCheckpoint.get(), + (g, e) -> { + if (invocation.compareAndSet(false, true) == false) { + throw new IllegalStateException("listener invoked twice"); + } + }); + } + // synchronize ending with the updating thread and the main test thread + awaitQuietly(barrier); + }); + updatingThread.start(); + listenersThread.start(); + // synchronize starting with the updating thread and the listener thread + barrier.await(); + // synchronize ending with the updating thread and the listener thread + barrier.await(); + // one last update to ensure all listeners are notified + if (closed.get() == false) { + globalCheckpointListeners.globalCheckpointUpdated(globalCheckpoint.incrementAndGet()); + } + assertThat(globalCheckpointListeners.pendingListeners(), equalTo(0)); + executor.shutdown(); + executor.awaitTermination(Long.MAX_VALUE, TimeUnit.SECONDS); + for (final AtomicBoolean invocation : invocations) { + assertTrue(invocation.get()); + } + updatingThread.join(); + listenersThread.join(); + } + + private void awaitQuietly(final CyclicBarrier barrier) { + try { + barrier.await(); + } catch (final BrokenBarrierException | InterruptedException e) { + throw new AssertionError(e); + } + } + +} diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java index 57c430e66ff7d..95c0f07927d89 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java @@ -86,6 +86,7 @@ import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Predicate; @@ -96,6 +97,8 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; +import static org.elasticsearch.index.seqno.SequenceNumbers.NO_OPS_PERFORMED; +import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; import static org.elasticsearch.index.shard.IndexShardTestCase.getTranslog; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; @@ -631,4 +634,48 @@ private static ShardRouting getInitializingShardRouting(ShardRouting existingSha RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE); return shardRouting; } + + public void testGlobalCheckpointListeners() throws Exception { + createIndex("test", Settings.builder().put("index.number_of_shards", 1).put("index.number_of_replicas", 0).build()); + ensureGreen(); + final IndicesService indicesService = getInstanceFromNode(IndicesService.class); + final IndexService test = indicesService.indexService(resolveIndex("test")); + final IndexShard shard = test.getShardOrNull(0); + final int numberOfUpdates = randomIntBetween(1, 128); + for (int i = 0; i < numberOfUpdates; i++) { + final int index = i; + final AtomicLong globalCheckpoint = new AtomicLong(); + shard.addGlobalCheckpointListener( + i - 1, + (g, e) -> { + assert g >= NO_OPS_PERFORMED; + assert e == null; + globalCheckpoint.set(g); + }); + client().prepareIndex("test", "_doc", Integer.toString(i)).setSource("{}", XContentType.JSON).get(); + assertBusy(() -> assertThat(globalCheckpoint.get(), equalTo((long) index))); + // adding a listener expecting a lower global checkpoint should fire immediately + final AtomicLong immediateGlobalCheckpint = new AtomicLong(); + shard.addGlobalCheckpointListener( + randomLongBetween(NO_OPS_PERFORMED, i - 1), + (g, e) -> { + assert g >= NO_OPS_PERFORMED; + assert e == null; + immediateGlobalCheckpint.set(g); + }); + assertBusy(() -> assertThat(immediateGlobalCheckpint.get(), equalTo((long) index))); + } + final AtomicBoolean invoked = new AtomicBoolean(); + shard.addGlobalCheckpointListener( + numberOfUpdates - 1, + (g, e) -> { + invoked.set(true); + assert g == UNASSIGNED_SEQ_NO; + assert e != null; + assertThat(e.getShardId(), equalTo(shard.shardId())); + }); + shard.close("closed", randomBoolean()); + assertBusy(() -> assertTrue(invoked.get())); + } + } diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 1b166a08ffca6..2b2f034337b00 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -33,6 +33,7 @@ import org.apache.lucene.store.IOContext; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Constants; +import org.elasticsearch.Assertions; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.flush.FlushRequest; @@ -568,28 +569,20 @@ public void testOperationPermitsOnPrimaryShards() throws InterruptedException, E ShardRouting primaryRouting = newShardRouting(replicaRouting.shardId(), replicaRouting.currentNodeId(), null, true, ShardRoutingState.STARTED, replicaRouting.allocationId()); final long newPrimaryTerm = indexShard.getPendingPrimaryTerm() + between(1, 1000); + CountDownLatch latch = new CountDownLatch(1); indexShard.updateShardState(primaryRouting, newPrimaryTerm, (shard, listener) -> { assertThat(TestTranslog.getCurrentTerm(getTranslog(indexShard)), equalTo(newPrimaryTerm)); + latch.countDown(); }, 0L, Collections.singleton(indexShard.routingEntry().allocationId().getId()), new IndexShardRoutingTable.Builder(indexShard.shardId()).addShard(primaryRouting).build(), Collections.emptySet()); + latch.await(); } else { indexShard = newStartedShard(true); } final long primaryTerm = indexShard.getPendingPrimaryTerm(); assertEquals(0, indexShard.getActiveOperationsCount()); - if (indexShard.routingEntry().isRelocationTarget() == false) { - try { - final PlainActionFuture permitAcquiredFuture = new PlainActionFuture<>(); - indexShard.acquireReplicaOperationPermit(primaryTerm, indexShard.getGlobalCheckpoint(), permitAcquiredFuture, - ThreadPool.Names.WRITE, ""); - permitAcquiredFuture.actionGet(); - fail("shard shouldn't accept operations as replica"); - } catch (IllegalStateException ignored) { - - } - } Releasable operation1 = acquirePrimaryOperationPermitBlockingly(indexShard); assertEquals(1, indexShard.getActiveOperationsCount()); Releasable operation2 = acquirePrimaryOperationPermitBlockingly(indexShard); @@ -598,6 +591,22 @@ public void testOperationPermitsOnPrimaryShards() throws InterruptedException, E Releasables.close(operation1, operation2); assertEquals(0, indexShard.getActiveOperationsCount()); + if (Assertions.ENABLED && indexShard.routingEntry().isRelocationTarget() == false) { + assertThat(expectThrows(AssertionError.class, () -> indexShard.acquireReplicaOperationPermit(primaryTerm, + indexShard.getGlobalCheckpoint(), new ActionListener() { + @Override + public void onResponse(Releasable releasable) { + fail(); + } + + @Override + public void onFailure(Exception e) { + fail(); + } + }, + ThreadPool.Names.WRITE, "")).getMessage(), containsString("in primary mode cannot be a replication target")); + } + closeShards(indexShard); } @@ -655,11 +664,11 @@ public void testOperationPermitOnReplicaShards() throws Exception { logger.info("shard routing to {}", shardRouting); assertEquals(0, indexShard.getActiveOperationsCount()); - if (shardRouting.primary() == false) { - final IllegalStateException e = - expectThrows(IllegalStateException.class, - () -> indexShard.acquirePrimaryOperationPermit(null, ThreadPool.Names.INDEX, "")); - assertThat(e, hasToString(containsString("shard " + shardRouting + " is not a primary"))); + if (shardRouting.primary() == false && Assertions.ENABLED) { + final AssertionError e = + expectThrows(AssertionError.class, + () -> indexShard.acquirePrimaryOperationPermit(null, ThreadPool.Names.WRITE, "")); + assertThat(e, hasToString(containsString("acquirePrimaryOperationPermit should only be called on primary shard"))); } final long primaryTerm = indexShard.getPendingPrimaryTerm(); @@ -2389,6 +2398,7 @@ public void testRecoverFromLocalShard() throws IOException { closeShards(sourceShard, targetShard); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32766") public void testDocStats() throws IOException { IndexShard indexShard = null; try { diff --git a/server/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java b/server/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java index 9cc6d86bc2fba..7d548fc42d695 100644 --- a/server/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java +++ b/server/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java @@ -44,6 +44,7 @@ import org.elasticsearch.test.transport.MockTransportService; import java.io.IOException; +import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; import java.util.Collection; @@ -86,7 +87,7 @@ public void testCorruptTranslogFiles() throws Exception { indexRandom(false, false, false, Arrays.asList(builders)); // this one // Corrupt the translog file(s) - corruptRandomTranslogFiles(); + corruptRandomTranslogFile(); // Restart the single node internalCluster().fullRestart(); @@ -102,7 +103,7 @@ public void testCorruptTranslogFiles() throws Exception { } - private void corruptRandomTranslogFiles() throws IOException { + private void corruptRandomTranslogFile() throws IOException { ClusterState state = client().admin().cluster().prepareState().get().getState(); GroupShardsIterator shardIterators = state.getRoutingTable().activePrimaryShardsGrouped(new String[]{"test"}, false); final Index test = state.metaData().index("test").getIndex(); @@ -119,9 +120,12 @@ private void corruptRandomTranslogFiles() throws IOException { String path = fsPath.getPath(); String relativeDataLocationPath = "indices/" + test.getUUID() + "/" + Integer.toString(shardRouting.getId()) + "/translog"; Path translogDir = PathUtils.get(path).resolve(relativeDataLocationPath); - translogDirs.add(translogDir); + if (Files.isDirectory(translogDir)) { + translogDirs.add(translogDir); + } } - TestTranslog.corruptTranslogFiles(logger, random(), translogDirs); + Path translogDir = RandomPicks.randomFrom(random(), translogDirs); + TestTranslog.corruptRandomTranslogFile(logger, random(), translogDir, TestTranslog.minTranslogGenUsedInRecovery(translogDir)); } /** Disables translog flushing for the specified index */ diff --git a/server/src/test/java/org/elasticsearch/index/translog/TestTranslog.java b/server/src/test/java/org/elasticsearch/index/translog/TestTranslog.java index 7ab9fa6733011..f37ec5a8e55d5 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TestTranslog.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TestTranslog.java @@ -34,8 +34,6 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; -import java.util.Collection; -import java.util.HashSet; import java.util.List; import java.util.Random; import java.util.Set; @@ -45,7 +43,8 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.not; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsNot.not; /** * Helpers for testing translog. @@ -54,44 +53,33 @@ public class TestTranslog { static final Pattern TRANSLOG_FILE_PATTERN = Pattern.compile("translog-(\\d+)\\.tlog"); /** - * Corrupts some translog files (translog-N.tlog) from the given translog directories. + * Corrupts random translog file (translog-N.tlog) from the given translog directory. * - * @return a collection of tlog files that have been corrupted. + * @return a translog file which has been corrupted. */ - public static Set corruptTranslogFiles(Logger logger, Random random, Collection translogDirs) throws IOException { + public static Path corruptRandomTranslogFile(Logger logger, Random random, Path translogDir, long minGeneration) throws + IOException { Set candidates = new TreeSet<>(); // TreeSet makes sure iteration order is deterministic - for (Path translogDir : translogDirs) { - if (Files.isDirectory(translogDir)) { - final long minUsedTranslogGen = minTranslogGenUsedInRecovery(translogDir); - logger.info("--> Translog dir [{}], minUsedTranslogGen [{}]", translogDir, minUsedTranslogGen); - try (DirectoryStream stream = Files.newDirectoryStream(translogDir)) { - for (Path item : stream) { - if (Files.isRegularFile(item)) { - // Makes sure that we will corrupt tlog files that are referenced by the Checkpoint. - final Matcher matcher = TRANSLOG_FILE_PATTERN.matcher(item.getFileName().toString()); - if (matcher.matches() && Long.parseLong(matcher.group(1)) >= minUsedTranslogGen) { - candidates.add(item); - } - } + logger.info("--> Translog dir [{}], minUsedTranslogGen [{}]", translogDir, minGeneration); + try (DirectoryStream stream = Files.newDirectoryStream(translogDir)) { + for (Path item : stream) { + if (Files.isRegularFile(item)) { + final Matcher matcher = TRANSLOG_FILE_PATTERN.matcher(item.getFileName().toString()); + if (matcher.matches() && Long.parseLong(matcher.group(1)) >= minGeneration) { + candidates.add(item); } } } } + assertThat(candidates, is(not(empty()))); - Set corruptedFiles = new HashSet<>(); - if (!candidates.isEmpty()) { - int corruptions = RandomNumbers.randomIntBetween(random, 5, 20); - for (int i = 0; i < corruptions; i++) { - Path fileToCorrupt = RandomPicks.randomFrom(random, candidates); - corruptFile(logger, random, fileToCorrupt); - corruptedFiles.add(fileToCorrupt); - } - } - assertThat("no translog file corrupted", corruptedFiles, not(empty())); - return corruptedFiles; + Path corruptedFile = RandomPicks.randomFrom(random, candidates); + corruptFile(logger, random, corruptedFile); + return corruptedFile; } - static void corruptFile(Logger logger, Random random, Path fileToCorrupt) throws IOException { + + static void corruptFile(Logger logger, Random random, Path fileToCorrupt) throws IOException { try (FileChannel raf = FileChannel.open(fileToCorrupt, StandardOpenOption.READ, StandardOpenOption.WRITE)) { // read raf.position(RandomNumbers.randomLongBetween(random, 0, raf.size() - 1)); @@ -117,7 +105,7 @@ static void corruptFile(Logger logger, Random random, Path fileToCorrupt) throws /** * Lists all existing commits in a given index path, then read the minimum translog generation that will be used in recoverFromTranslog. */ - private static long minTranslogGenUsedInRecovery(Path translogPath) throws IOException { + public static long minTranslogGenUsedInRecovery(Path translogPath) throws IOException { try (NIOFSDirectory directory = new NIOFSDirectory(translogPath.getParent().resolve("index"))) { List commits = DirectoryReader.listCommits(directory); final String translogUUID = commits.get(commits.size() - 1).getUserData().get(Translog.TRANSLOG_UUID_KEY); diff --git a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index 74f1491cdfb83..8f832fe82d0fb 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -750,7 +750,9 @@ public void testConcurrentWritesWithVaryingSize() throws Throwable { } - public void testTranslogChecksums() throws Exception { + public void testTranslogCorruption() throws Exception { + TranslogConfig config = translog.getConfig(); + String uuid = translog.getTranslogUUID(); List locations = new ArrayList<>(); int translogOperations = randomIntBetween(10, 100); @@ -758,23 +760,23 @@ public void testTranslogChecksums() throws Exception { String ascii = randomAlphaOfLengthBetween(1, 50); locations.add(translog.add(new Translog.Index("test", "" + op, op, primaryTerm.get(), ascii.getBytes("UTF-8")))); } - translog.sync(); + translog.close(); - corruptTranslogs(translogDir); + TestTranslog.corruptRandomTranslogFile(logger, random(), translogDir, 0); + int corruptionsCaught = 0; - AtomicInteger corruptionsCaught = new AtomicInteger(0); - try (Translog.Snapshot snapshot = translog.newSnapshot()) { - for (Translog.Location location : locations) { - try { - Translog.Operation next = snapshot.next(); - assertNotNull(next); - } catch (TranslogCorruptedException e) { - corruptionsCaught.incrementAndGet(); + try (Translog translog = openTranslog(config, uuid)) { + try (Translog.Snapshot snapshot = translog.newSnapshot()) { + for (Location loc : locations) { + snapshot.next(); } } - expectThrows(TranslogCorruptedException.class, snapshot::next); - assertThat("at least one corruption was caused and caught", corruptionsCaught.get(), greaterThanOrEqualTo(1)); + } catch (TranslogCorruptedException e) { + assertThat(e.getMessage(), containsString(translogDir.toString())); + corruptionsCaught++; } + + assertThat("corruption is caught", corruptionsCaught, greaterThanOrEqualTo(1)); } public void testTruncatedTranslogs() throws Exception { @@ -818,25 +820,6 @@ private void truncateTranslogs(Path directory) throws Exception { } - /** - * Randomly overwrite some bytes in the translog files - */ - private void corruptTranslogs(Path directory) throws Exception { - Path[] files = FileSystemUtils.files(directory, "translog-*"); - for (Path file : files) { - logger.info("--> corrupting {}...", file); - FileChannel f = FileChannel.open(file, StandardOpenOption.READ, StandardOpenOption.WRITE); - int corruptions = scaledRandomIntBetween(10, 50); - for (int i = 0; i < corruptions; i++) { - // note: with the current logic, this will sometimes be a no-op - long pos = randomIntBetween(0, (int) f.size()); - ByteBuffer junk = ByteBuffer.wrap(new byte[]{randomByte()}); - f.write(junk, pos); - } - f.close(); - } - } - private Term newUid(ParsedDocument doc) { return new Term("_uid", Uid.createUidAsBytes(doc.type(), doc.id())); } @@ -1507,7 +1490,8 @@ public void testSnapshotFromStreamInput() throws IOException { ops.add(test); } Translog.writeOperations(out, ops); - final List readOperations = Translog.readOperations(out.bytes().streamInput()); + final List readOperations = Translog.readOperations( + out.bytes().streamInput(), "testSnapshotFromStreamInput"); assertEquals(ops.size(), readOperations.size()); assertEquals(ops, readOperations); } diff --git a/server/src/test/java/org/elasticsearch/index/translog/TruncateTranslogIT.java b/server/src/test/java/org/elasticsearch/index/translog/TruncateTranslogIT.java index 297314fc2dedf..8b5cb54f3f251 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TruncateTranslogIT.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TruncateTranslogIT.java @@ -154,9 +154,9 @@ public void testCorruptTranslogTruncation() throws Exception { // shut down the replica node to be tested later internalCluster().stopRandomNode(InternalTestCluster.nameFilter(replicaNode)); - // Corrupt the translog file(s) + // Corrupt the translog file logger.info("--> corrupting translog"); - corruptRandomTranslogFiles("test"); + corruptRandomTranslogFile("test"); // Restart the single node logger.info("--> restarting node"); @@ -265,15 +265,16 @@ public void testCorruptTranslogTruncationOfReplica() throws Exception { // sample the replica node translog dirs final ShardId shardId = new ShardId(resolveIndex("test"), 0); Set translogDirs = getTranslogDirs(replicaNode, shardId); + Path tdir = randomFrom(translogDirs); // stop the cluster nodes. we don't use full restart so the node start up order will be the same // and shard roles will be maintained internalCluster().stopRandomDataNode(); internalCluster().stopRandomDataNode(); - // Corrupt the translog file(s) + // Corrupt the translog file logger.info("--> corrupting translog"); - TestTranslog.corruptTranslogFiles(logger, random(), translogDirs); + TestTranslog.corruptRandomTranslogFile(logger, random(), tdir, TestTranslog.minTranslogGenUsedInRecovery(tdir)); // Restart the single node logger.info("--> starting node"); @@ -354,9 +355,10 @@ private Set getTranslogDirs(String nodeId, ShardId shardId) { return translogDirs; } - private void corruptRandomTranslogFiles(String indexName) throws IOException { + private void corruptRandomTranslogFile(String indexName) throws IOException { Set translogDirs = getTranslogDirs(indexName); - TestTranslog.corruptTranslogFiles(logger, random(), translogDirs); + Path translogDir = randomFrom(translogDirs); + TestTranslog.corruptRandomTranslogFile(logger, random(), translogDir, TestTranslog.minTranslogGenUsedInRecovery(translogDir)); } /** Disables translog flushing for the specified index */ diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java index 36e57058edf00..b70951202975f 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java @@ -44,6 +44,7 @@ import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.translog.SnapshotMatchers; import org.elasticsearch.index.translog.Translog; +import org.elasticsearch.test.junit.annotations.TestLogging; import java.util.HashMap; import java.util.List; @@ -74,6 +75,7 @@ public void testTranslogHistoryTransferred() throws Exception { } } + @TestLogging("_root:TRACE") public void testRetentionPolicyChangeDuringRecovery() throws Exception { try (ReplicationGroup shards = createGroup(0)) { shards.startPrimary(); @@ -100,7 +102,10 @@ public void testRetentionPolicyChangeDuringRecovery() throws Exception { releaseRecovery.countDown(); future.get(); // rolling/flushing is async - assertBusy(() -> assertThat(getTranslog(replica).totalOperations(), equalTo(0))); + assertBusy(() -> { + assertThat(replica.getLastSyncedGlobalCheckpoint(), equalTo(19L)); + assertThat(getTranslog(replica).totalOperations(), equalTo(0)); + }); } } diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/server/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 9e97e9bbfd449..5d42a31632f47 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -34,7 +34,7 @@ import org.elasticsearch.action.ingest.SimulateDocumentBaseResult; import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.ingest.SimulatePipelineResponse; -import org.elasticsearch.action.ingest.WritePipelineResponse; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.client.Requests; import org.elasticsearch.common.bytes.BytesReference; @@ -126,7 +126,7 @@ public void testSimulate() throws Exception { assertThat(simulateDocumentBaseResult.getFailure(), nullValue()); // cleanup - WritePipelineResponse deletePipelineResponse = client().admin().cluster().prepareDeletePipeline("_id").get(); + AcknowledgedResponse deletePipelineResponse = client().admin().cluster().prepareDeletePipeline("_id").get(); assertTrue(deletePipelineResponse.isAcknowledged()); } @@ -172,7 +172,7 @@ public void testBulkWithIngestFailures() throws Exception { } // cleanup - WritePipelineResponse deletePipelineResponse = client().admin().cluster().prepareDeletePipeline("_id").get(); + AcknowledgedResponse deletePipelineResponse = client().admin().cluster().prepareDeletePipeline("_id").get(); assertTrue(deletePipelineResponse.isAcknowledged()); } @@ -246,7 +246,7 @@ public void test() throws Exception { assertThat(doc.get("processed"), equalTo(true)); DeletePipelineRequest deletePipelineRequest = new DeletePipelineRequest("_id"); - WritePipelineResponse response = client().admin().cluster().deletePipeline(deletePipelineRequest).get(); + AcknowledgedResponse response = client().admin().cluster().deletePipeline(deletePipelineRequest).get(); assertThat(response.isAcknowledged(), is(true)); getResponse = client().admin().cluster().prepareGetPipeline("_id").get(); diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java b/server/src/test/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java index 84d9327a0910a..338e5b662c5da 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java @@ -20,7 +20,7 @@ package org.elasticsearch.ingest; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.action.ingest.WritePipelineResponse; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.node.NodeService; @@ -95,7 +95,7 @@ public void testFailStartNode() throws Exception { installPlugin = true; String node1 = internalCluster().startNode(); - WritePipelineResponse response = client().admin().cluster().preparePutPipeline("_id", pipelineSource, XContentType.JSON).get(); + AcknowledgedResponse response = client().admin().cluster().preparePutPipeline("_id", pipelineSource, XContentType.JSON).get(); assertThat(response.isAcknowledged(), is(true)); Pipeline pipeline = internalCluster().getInstance(NodeService.class, node1).getIngestService().getPipelineStore().get("_id"); assertThat(pipeline, notNullValue()); diff --git a/server/src/test/java/org/elasticsearch/rest/action/RestActionsTests.java b/server/src/test/java/org/elasticsearch/rest/action/RestActionsTests.java index d0fdc7c4c1cd4..d44b2b70a05ef 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/RestActionsTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/RestActionsTests.java @@ -202,49 +202,6 @@ private static SearchShardTarget createSearchShardTarget(String nodeId, int shar new ShardId(new Index(index, IndexMetaData.INDEX_UUID_NA_VALUE), shardId), clusterAlias, OriginalIndices.NONE); } - public void testBuildBroadcastShardsHeaderNullCause() throws Exception { - ShardOperationFailedException[] failures = new ShardOperationFailedException[] { - new ShardSearchFailure("error", createSearchShardTarget("node0", 0, "index", null)), - new ShardSearchFailure("error", createSearchShardTarget("node1", 1, "index", null)), - new ShardSearchFailure("error", createSearchShardTarget("node2", 2, "index", "cluster1")), - new ShardSearchFailure("error", createSearchShardTarget("node1", 1, "index", "cluster1")), - new ShardSearchFailure("a different error", createSearchShardTarget("node3", 3, "index", "cluster1")) - }; - - XContentBuilder builder = JsonXContent.contentBuilder(); - builder.prettyPrint(); - builder.startObject(); - RestActions.buildBroadcastShardsHeader(builder, ToXContent.EMPTY_PARAMS, 12, 3, 0, 9, failures); - builder.endObject(); - //TODO the reason is not printed out, as a follow-up we should probably either print it out when the cause is null, - //or even better enforce that the cause can't be null - assertThat(Strings.toString(builder), equalTo("{\n" + - " \"_shards\" : {\n" + - " \"total\" : 12,\n" + - " \"successful\" : 3,\n" + - " \"skipped\" : 0,\n" + - " \"failed\" : 9,\n" + - " \"failures\" : [\n" + - " {\n" + - " \"shard\" : 0,\n" + - " \"index\" : \"index\",\n" + - " \"node\" : \"node0\"\n" + - " },\n" + - " {\n" + - " \"shard\" : 2,\n" + - " \"index\" : \"cluster1:index\",\n" + - " \"node\" : \"node2\"\n" + - " },\n" + - " {\n" + - " \"shard\" : 3,\n" + - " \"index\" : \"cluster1:index\",\n" + - " \"node\" : \"node3\"\n" + - " }\n" + - " ]\n" + - " }\n" + - "}")); - } - @Override protected NamedXContentRegistry xContentRegistry() { return xContentRegistry; diff --git a/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java b/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java index 585f860165160..ea8b6a9223412 100644 --- a/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java +++ b/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java @@ -167,7 +167,7 @@ public void testAllowAllScriptContextSettings() throws IOException { assertCompileAccepted("painless", "script", ScriptType.INLINE, SearchScript.CONTEXT); assertCompileAccepted("painless", "script", ScriptType.INLINE, SearchScript.AGGS_CONTEXT); - assertCompileAccepted("painless", "script", ScriptType.INLINE, ExecutableScript.UPDATE_CONTEXT); + assertCompileAccepted("painless", "script", ScriptType.INLINE, UpdateScript.CONTEXT); assertCompileAccepted("painless", "script", ScriptType.INLINE, IngestScript.CONTEXT); } @@ -187,7 +187,7 @@ public void testAllowSomeScriptContextSettings() throws IOException { assertCompileAccepted("painless", "script", ScriptType.INLINE, SearchScript.CONTEXT); assertCompileAccepted("painless", "script", ScriptType.INLINE, SearchScript.AGGS_CONTEXT); - assertCompileRejected("painless", "script", ScriptType.INLINE, ExecutableScript.UPDATE_CONTEXT); + assertCompileRejected("painless", "script", ScriptType.INLINE, UpdateScript.CONTEXT); } public void testAllowNoScriptTypeSettings() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java index c58a158fc677d..2562683466a8c 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -345,6 +345,23 @@ searchSourceBuilder, new String[0], false, new AliasFilter(null, Strings.EMPTY_A } } + public void testIgnoreScriptfieldIfSizeZero() throws IOException { + createIndex("index"); + final SearchService service = getInstanceFromNode(SearchService.class); + final IndicesService indicesService = getInstanceFromNode(IndicesService.class); + final IndexService indexService = indicesService.indexServiceSafe(resolveIndex("index")); + final IndexShard indexShard = indexService.getShard(0); + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.scriptField("field" + 0, + new Script(ScriptType.INLINE, MockScriptEngine.NAME, CustomScriptPlugin.DUMMY_SCRIPT, Collections.emptyMap())); + searchSourceBuilder.size(0); + try (SearchContext context = service.createContext(new ShardSearchLocalRequest(indexShard.shardId(), 1, SearchType.DEFAULT, + searchSourceBuilder, new String[0], false, new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, true, null, null))) { + assertEquals(0, context.scriptFields().fields().size()); + } + } + public static class FailOnRewriteQueryPlugin extends Plugin implements SearchPlugin { @Override public List> getQueries() { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantLongTermsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantLongTermsTests.java index 985d82d4e1bee..755cb6e85292d 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantLongTermsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantLongTermsTests.java @@ -57,7 +57,10 @@ protected InternalSignificantTerms createTestInstance(String name, Set terms = new HashSet<>(); for (int i = 0; i < numBuckets; ++i) { long term = randomValueOtherThanMany(l -> terms.add(l) == false, random()::nextLong); - buckets.add(new SignificantLongTerms.Bucket(subsetDfs[i], subsetSize, supersetDfs[i], supersetSize, term, aggs, format)); + SignificantLongTerms.Bucket bucket = new SignificantLongTerms.Bucket(subsetDfs[i], subsetSize, + supersetDfs[i], supersetSize, term, aggs, format); + bucket.updateScore(significanceHeuristic); + buckets.add(bucket); } return new SignificantLongTerms(name, requiredSize, 1L, pipelineAggregators, metaData, format, subsetSize, supersetSize, significanceHeuristic, buckets); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantStringTermsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantStringTermsTests.java index 5dafc1e846111..2255373fd346d 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantStringTermsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantStringTermsTests.java @@ -50,7 +50,10 @@ protected InternalSignificantTerms createTestInstance(String name, Set terms = new HashSet<>(); for (int i = 0; i < numBuckets; ++i) { BytesRef term = randomValueOtherThanMany(b -> terms.add(b) == false, () -> new BytesRef(randomAlphaOfLength(10))); - buckets.add(new SignificantStringTerms.Bucket(term, subsetDfs[i], subsetSize, supersetDfs[i], supersetSize, aggs, format)); + SignificantStringTerms.Bucket bucket = new SignificantStringTerms.Bucket(term, subsetDfs[i], subsetSize, + supersetDfs[i], supersetSize, aggs, format); + bucket.updateScore(significanceHeuristic); + buckets.add(bucket); } return new SignificantStringTerms(name, requiredSize, 1L, pipelineAggregators, metaData, format, subsetSize, supersetSize, significanceHeuristic, buckets); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptIT.java index 398044edcaf53..9e85455d96de9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptIT.java @@ -117,6 +117,8 @@ protected Map, Object>> pluginScripts() { return value0 + value1 + value2; }); + scripts.put("return null", vars -> null); + return scripts; } } @@ -478,6 +480,33 @@ public void testInlineScriptInsertZeros() { } } + public void testInlineScriptReturnNull() { + SearchResponse response = client() + .prepareSearch("idx") + .addAggregation( + histogram("histo") + .field(FIELD_1_NAME).interval(interval) + .subAggregation( + bucketScript( + "nullField", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "return null", Collections.emptyMap()) + ) + ) + ).execute().actionGet(); + + assertSearchResponse(response); + + Histogram histo = response.getAggregations().get("histo"); + assertThat(histo, notNullValue()); + assertThat(histo.getName(), equalTo("histo")); + List buckets = histo.getBuckets(); + + for (int i = 0; i < buckets.size(); ++i) { + Histogram.Bucket bucket = buckets.get(i); + assertNull(bucket.getAggregations().get("nullField")); + } + } + public void testStoredScript() { assertAcked(client().admin().cluster().preparePutStoredScript() .setId("my_script") diff --git a/server/src/test/java/org/elasticsearch/update/UpdateIT.java b/server/src/test/java/org/elasticsearch/update/UpdateIT.java index 1113077e2fd3d..66fbd97b429b1 100644 --- a/server/src/test/java/org/elasticsearch/update/UpdateIT.java +++ b/server/src/test/java/org/elasticsearch/update/UpdateIT.java @@ -94,6 +94,7 @@ protected Map, Object>> pluginScripts() { } Map source = (Map) ctx.get("_source"); + params.remove("ctx"); source.putAll(params); return ctx; diff --git a/test/build.gradle b/test/build.gradle index 2055896bda4e5..d0a3065e7c80a 100644 --- a/test/build.gradle +++ b/test/build.gradle @@ -28,19 +28,10 @@ subprojects { apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' - - // the main files are actually test files, so use the appropriate forbidden api sigs - forbiddenApisMain { - signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt'), - PrecommitTasks.getResource('/forbidden/es-signatures.txt'), - PrecommitTasks.getResource('/forbidden/es-test-signatures.txt')] - } - // TODO: should we have licenses for our test deps? dependencyLicenses.enabled = false dependenciesInfo.enabled = false // TODO: why is the test framework pulled in... - forbiddenApisMain.enabled = false jarHell.enabled = false } diff --git a/test/fixtures/build.gradle b/test/fixtures/build.gradle index e69de29bb2d1d..153124e84b069 100644 --- a/test/fixtures/build.gradle +++ b/test/fixtures/build.gradle @@ -0,0 +1,5 @@ + +subprojects { + // fixtures are mostly external and by default we don't want to check forbidden apis + forbiddenApisMain.enabled = false +} diff --git a/test/framework/build.gradle b/test/framework/build.gradle index 39f1b75242880..88ff5b87acbb2 100644 --- a/test/framework/build.gradle +++ b/test/framework/build.gradle @@ -42,8 +42,9 @@ compileTestJava.options.compilerArgs << '-Xlint:-rawtypes' // the main files are actually test files, so use the appropriate forbidden api sigs forbiddenApisMain { - signaturesURLs = [PrecommitTasks.getResource('/forbidden/all-signatures.txt'), - PrecommitTasks.getResource('/forbidden/test-signatures.txt')] + signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt'), + PrecommitTasks.getResource('/forbidden/es-all-signatures.txt'), + PrecommitTasks.getResource('/forbidden/es-test-signatures.txt')] } // TODO: should we have licenses for our test deps? diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java index 85ce4bad35edb..e469fc269b5d3 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java @@ -563,7 +563,7 @@ public void onFailedEngine(String reason, @Nullable Exception e) { TimeValue.timeValueMinutes(5), refreshListenerList, Collections.emptyList(), indexSort, handler, new NoneCircuitBreakerService(), globalCheckpointSupplier == null ? - new ReplicationTracker(shardId, allocationId.getId(), indexSettings, SequenceNumbers.NO_OPS_PERFORMED) : + new ReplicationTracker(shardId, allocationId.getId(), indexSettings, SequenceNumbers.NO_OPS_PERFORMED, update -> {}) : globalCheckpointSupplier, primaryTerm::get, tombstoneDocSupplier()); return config; } diff --git a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java index 33b254c4d65f7..9e8524328d976 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java @@ -654,7 +654,8 @@ private TransportWriteAction.WritePrimaryResult result; try (Releasable ignored = permitAcquiredFuture.actionGet()) { MappingUpdatePerformer noopMappingUpdater = (update, shardId, type) -> { }; - result = TransportShardBulkAction.performOnPrimary(request, primary, null, System::currentTimeMillis, noopMappingUpdater); + result = TransportShardBulkAction.performOnPrimary(request, primary, null, System::currentTimeMillis, noopMappingUpdater, + null); } TransportWriteActionTestHelper.performPostWriteActions(primary, request, result.location, logger); return result; diff --git a/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java b/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java index 3fded43d858ed..5298c3995cec2 100644 --- a/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java @@ -67,7 +67,7 @@ private static String toCamelCase(String s) { Matcher m = UNDERSCORE_THEN_ANYTHING.matcher(s); StringBuffer sb = new StringBuffer(); while (m.find()) { - m.appendReplacement(sb, m.group(1).toUpperCase()); + m.appendReplacement(sb, m.group(1).toUpperCase(Locale.ROOT)); } m.appendTail(sb); sb.setCharAt(0, Character.toUpperCase(sb.charAt(0))); diff --git a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java index 14dcac926f7b0..0d340a91d4cea 100644 --- a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java +++ b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java @@ -96,11 +96,28 @@ public void execute(Map ctx) { } }; return context.factoryClazz.cast(factory); + } else if (context.instanceClazz.equals(UpdateScript.class)) { + UpdateScript.Factory factory = parameters -> new UpdateScript(parameters) { + @Override + public void execute(Map ctx) { + final Map vars = new HashMap<>(); + vars.put("ctx", ctx); + vars.put("params", parameters); + vars.putAll(parameters); + script.apply(vars); + } + }; + return context.factoryClazz.cast(factory); } else if (context.instanceClazz.equals(BucketAggregationScript.class)) { BucketAggregationScript.Factory factory = parameters -> new BucketAggregationScript(parameters) { @Override - public double execute() { - return ((Number) script.apply(getParams())).doubleValue(); + public Double execute() { + Object ret = script.apply(getParams()); + if (ret == null) { + return null; + } else { + return ((Number) ret).doubleValue(); + } } }; return context.factoryClazz.cast(factory); diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java index 050a793375005..3d37ec7e1c6ee 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java @@ -19,6 +19,8 @@ package org.elasticsearch.test; +import com.carrotsearch.randomizedtesting.RandomizedTest; +import com.carrotsearch.randomizedtesting.SeedUtils; import org.apache.lucene.index.IndexReader; import org.apache.lucene.util.Accountable; import org.elasticsearch.Version; @@ -86,6 +88,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.function.Function; import java.util.stream.Stream; @@ -126,14 +129,13 @@ public abstract class AbstractBuilderTestCase extends ESTestCase { ALIAS_TO_CONCRETE_FIELD_NAME.put(GEO_POINT_ALIAS_FIELD_NAME, GEO_POINT_FIELD_NAME); } - protected static Version indexVersionCreated; - private static ServiceHolder serviceHolder; private static int queryNameId = 0; private static Settings nodeSettings; private static Index index; private static String[] currentTypes; protected static String[] randomTypes; + private static long nowInMillis; protected static Index getIndex() { return index; @@ -162,6 +164,7 @@ public static void beforeClass() { .build(); index = new Index(randomAlphaOfLengthBetween(1, 10), "_na_"); + nowInMillis = randomNonNegativeLong(); // Set a single type in the index switch (random().nextInt(3)) { @@ -211,15 +214,19 @@ protected static String createUniqueRandomName() { return queryName; } - protected Settings indexSettings() { + protected Settings createTestIndexSettings() { // we have to prefer CURRENT since with the range of versions we support it's rather unlikely to get the current actually. - indexVersionCreated = randomBoolean() ? Version.CURRENT - : VersionUtils.randomVersionBetween(random(), null, Version.CURRENT); + Version indexVersionCreated = randomBoolean() ? Version.CURRENT + : VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.CURRENT); return Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, indexVersionCreated) .build(); } + protected static IndexSettings indexSettings() { + return serviceHolder.idxSettings; + } + protected static String expectedFieldName(String builderFieldName) { if (currentTypes.length == 0 || !isSingleType()) { return builderFieldName; @@ -234,10 +241,20 @@ public static void afterClass() throws Exception { } @Before - public void beforeTest() throws IOException { + public void beforeTest() throws Exception { if (serviceHolder == null) { - serviceHolder = new ServiceHolder(nodeSettings, indexSettings(), getPlugins(), this); + // we initialize the serviceHolder and serviceHolderWithNoType just once, but need some + // calls to the randomness source during its setup. In order to not mix these calls with + // the randomness source that is later used in the test method, we use the master seed during + // this setup + long masterSeed = SeedUtils.parseSeed(RandomizedTest.getContext().getRunnerSeedAsString()); + RandomizedTest.getContext().runWithPrivateRandomness(masterSeed, (Callable) () -> { + serviceHolder = new ServiceHolder(nodeSettings, createTestIndexSettings(), getPlugins(), nowInMillis, + AbstractBuilderTestCase.this); + return null; + }); } + serviceHolder.clientInvocationHandler.delegate = this; } @@ -333,11 +350,12 @@ private static class ServiceHolder implements Closeable { private final BitsetFilterCache bitsetFilterCache; private final ScriptService scriptService; private final Client client; - private final long nowInMillis = randomNonNegativeLong(); + private final long nowInMillis; - ServiceHolder(Settings nodeSettings, Settings indexSettings, - Collection> plugins, AbstractBuilderTestCase testCase) throws IOException { + ServiceHolder(Settings nodeSettings, Settings indexSettings, Collection> plugins, long nowInMillis, + AbstractBuilderTestCase testCase) throws IOException { Environment env = InternalSettingsPreparer.prepareEnvironment(nodeSettings, null); + this.nowInMillis = nowInMillis; PluginsService pluginsService; pluginsService = new PluginsService(nodeSettings, null, env.modulesFile(), env.pluginsFile(), plugins); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 59b65f9f34a67..440f9ab087d16 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -53,6 +53,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.CheckedBiFunction; import org.elasticsearch.common.CheckedRunnable; +import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.io.PathUtilsForTesting; @@ -197,13 +198,9 @@ public static void resetPortCounter() { } static { - System.setProperty("log4j.shutdownHookEnabled", "false"); - System.setProperty("log4j2.disable.jmx", "true"); - + setTestSysProps(); LogConfigurator.loadLog4jPlugins(); - // Enable Netty leak detection and monitor logger for logged leak errors - System.setProperty("io.netty.leakDetection.level", "paranoid"); String leakLoggerName = "io.netty.util.ResourceLeakDetector"; Logger leakLogger = LogManager.getLogger(leakLoggerName); Appender leakAppender = new AbstractAppender(leakLoggerName, null, @@ -242,6 +239,14 @@ public void append(LogEvent event) { Collections.sort(javaZoneIds); JAVA_ZONE_IDS = Collections.unmodifiableList(javaZoneIds); } + @SuppressForbidden(reason = "force log4j and netty sysprops") + private static void setTestSysProps() { + System.setProperty("log4j.shutdownHookEnabled", "false"); + System.setProperty("log4j2.disable.jmx", "true"); + + // Enable Netty leak detection and monitor logger for logged leak errors + System.setProperty("io.netty.leakDetection.level", "paranoid"); + } protected final Logger logger = Loggers.getLogger(getClass()); protected final DeprecationLogger deprecationLogger = new DeprecationLogger(logger); diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 5157792e1338d..30f559db8516b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -2025,8 +2025,10 @@ public void ensureEstimatedStats() { final CircuitBreakerService breakerService = getInstanceFromNode(CircuitBreakerService.class, nodeAndClient.node); CircuitBreaker fdBreaker = breakerService.getBreaker(CircuitBreaker.FIELDDATA); assertThat("Fielddata breaker not reset to 0 on node: " + name, fdBreaker.getUsed(), equalTo(0L)); - CircuitBreaker acctBreaker = breakerService.getBreaker(CircuitBreaker.ACCOUNTING); - assertThat("Accounting breaker not reset to 0 on node: " + name, acctBreaker.getUsed(), equalTo(0L)); + // TODO: This is commented out while Lee looks into the failures + // See: https://github.com/elastic/elasticsearch/issues/30290 + // CircuitBreaker acctBreaker = breakerService.getBreaker(CircuitBreaker.ACCOUNTING); + // assertThat("Accounting breaker not reset to 0 on node: " + name, acctBreaker.getUsed(), equalTo(0L)); // Anything that uses transport or HTTP can increase the // request breaker (because they use bigarrays), because of // that the breaker can sometimes be incremented from ping diff --git a/test/framework/src/main/java/org/elasticsearch/test/fixture/AbstractHttpFixture.java b/test/framework/src/main/java/org/elasticsearch/test/fixture/AbstractHttpFixture.java index daa70298224d0..7fb4e7c55ff67 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/fixture/AbstractHttpFixture.java +++ b/test/framework/src/main/java/org/elasticsearch/test/fixture/AbstractHttpFixture.java @@ -20,6 +20,8 @@ package org.elasticsearch.test.fixture; import com.sun.net.httpserver.HttpServer; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.io.PathUtils; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -32,7 +34,6 @@ import java.net.URI; import java.nio.file.Files; import java.nio.file.Path; -import java.nio.file.Paths; import java.nio.file.StandardCopyOption; import java.util.HashMap; import java.util.List; @@ -48,6 +49,7 @@ /** * Base class for test fixtures that requires a {@link HttpServer} to work. */ +@SuppressForbidden(reason = "uses httpserver by design") public abstract class AbstractHttpFixture { protected static final Map TEXT_PLAIN_CONTENT_TYPE = contentType("text/plain; charset=utf-8"); @@ -62,7 +64,7 @@ public abstract class AbstractHttpFixture { private final Path workingDirectory; protected AbstractHttpFixture(final String workingDir) { - this.workingDirectory = Paths.get(Objects.requireNonNull(workingDir)); + this.workingDirectory = PathUtils.get(Objects.requireNonNull(workingDir)); } /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java index 877a6f2e98a67..cddcca59e6cf5 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java +++ b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java @@ -22,6 +22,7 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.util.Constants; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESTestCase; @@ -86,7 +87,12 @@ public void testFailure(Failure failure) throws Exception { gradleMessageBuilder.appendClientYamlSuiteProperties(); } - System.err.println(b.toString()); + printToErr(b.toString()); + } + + @SuppressForbidden(reason = "printing repro info") + private static void printToErr(String s) { + System.err.println(s); } protected static class GradleMessageBuilder extends ReproduceErrorMessageBuilder { @@ -150,6 +156,10 @@ public ReproduceErrorMessageBuilder appendESProperties() { appendOpt("tests.locale", Locale.getDefault().toLanguageTag()); appendOpt("tests.timezone", TimeZone.getDefault().getID()); appendOpt("tests.distribution", System.getProperty("tests.distribution")); + appendOpt("compiler.java", System.getProperty("compiler.java")); + appendOpt("runtime.java", System.getProperty("runtime.java")); + appendOpt("javax.net.ssl.keyStorePassword", System.getProperty("javax.net.ssl.keyStorePassword")); + appendOpt("javax.net.ssl.trustStorePassword", System.getProperty("javax.net.ssl.trustStorePassword")); return this; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlDocsTestClient.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlDocsTestClient.java index 403c738b84410..82c177f0904ab 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlDocsTestClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlDocsTestClient.java @@ -28,7 +28,7 @@ import org.apache.http.HttpEntity; import org.apache.http.HttpHost; import org.elasticsearch.client.RestClientBuilder; -import org.elasticsearch.common.CheckedConsumer; +import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; import java.io.IOException; @@ -49,8 +49,8 @@ public ClientYamlDocsTestClient( final RestClient restClient, final List hosts, final Version esVersion, - final CheckedConsumer clientBuilderConsumer) { - super(restSpec, restClient, hosts, esVersion, clientBuilderConsumer); + final CheckedSupplier clientBuilderWithSniffedNodes) { + super(restSpec, restClient, hosts, esVersion, clientBuilderWithSniffedNodes); } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java index 8a4f77dbd66f7..2bacd20b35fb9 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java @@ -26,7 +26,6 @@ import org.apache.http.util.EntityUtils; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; -import org.elasticsearch.client.Node; import org.elasticsearch.client.NodeSelector; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; @@ -34,7 +33,7 @@ import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClientBuilder; -import org.elasticsearch.common.CheckedConsumer; +import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApi; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestPath; @@ -65,19 +64,19 @@ public class ClientYamlTestClient implements Closeable { private final ClientYamlSuiteRestSpec restSpec; private final Map restClients = new HashMap<>(); private final Version esVersion; - private final CheckedConsumer clientBuilderConsumer; + private final CheckedSupplier clientBuilderWithSniffedNodes; ClientYamlTestClient( final ClientYamlSuiteRestSpec restSpec, final RestClient restClient, final List hosts, final Version esVersion, - final CheckedConsumer clientBuilderConsumer) { + final CheckedSupplier clientBuilderWithSniffedNodes) { assert hosts.size() > 0; this.restSpec = restSpec; this.restClients.put(NodeSelector.ANY, restClient); this.esVersion = esVersion; - this.clientBuilderConsumer = clientBuilderConsumer; + this.clientBuilderWithSniffedNodes = clientBuilderWithSniffedNodes; } public Version getEsVersion() { @@ -192,10 +191,9 @@ public ClientYamlTestResponse callApi(String apiName, Map params protected RestClient getRestClient(NodeSelector nodeSelector) { //lazily build a new client in case we need to point to some specific node return restClients.computeIfAbsent(nodeSelector, selector -> { - RestClient anyClient = restClients.get(NodeSelector.ANY); - RestClientBuilder builder = RestClient.builder(anyClient.getNodes().toArray(new Node[0])); + RestClientBuilder builder; try { - clientBuilderConsumer.accept(builder); + builder = clientBuilderWithSniffedNodes.get(); } catch (IOException e) { throw new UncheckedIOException(e); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java index 8218ce5dddaca..a86ff5d805ecb 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java @@ -27,6 +27,7 @@ import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.client.sniff.ElasticsearchNodesSniffer; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; @@ -58,13 +59,6 @@ /** * Runs a suite of yaml tests shared with all the official Elasticsearch * clients against against an elasticsearch cluster. - *

        - * IMPORTANT: These tests sniff the cluster for metadata - * and hosts on startup and replace the list of hosts that they are - * configured to use with the list sniffed from the cluster. So you can't - * control which nodes receive the request by providing the right list of - * nodes in the tests.rest.cluster system property. Instead - * the tests must explictly use `node_selector`s. */ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase { @@ -116,11 +110,6 @@ protected ESClientYamlSuiteTestCase(ClientYamlTestCandidate testCandidate) { @Before public void initAndResetContext() throws Exception { if (restTestExecutionContext == null) { - // Sniff host metadata in case we need it in the yaml tests - List nodesWithMetadata = sniffHostMetadata(); - client().setNodes(nodesWithMetadata); - adminClient().setNodes(nodesWithMetadata); - assert adminExecutionContext == null; assert blacklistPathMatchers == null; ClientYamlSuiteRestSpec restSpec = ClientYamlSuiteRestSpec.load(SPEC_PATH); @@ -172,8 +161,7 @@ protected ClientYamlTestClient initClientYamlTestClient( final RestClient restClient, final List hosts, final Version esVersion) { - return new ClientYamlTestClient(restSpec, restClient, hosts, esVersion, - restClientBuilder -> configureClient(restClientBuilder, restClientSettings())); + return new ClientYamlTestClient(restSpec, restClient, hosts, esVersion, this::getClientBuilderWithSniffedHosts); } @AfterClass @@ -407,13 +395,16 @@ protected boolean randomizeContentType() { } /** - * Sniff the cluster for host metadata. + * Sniff the cluster for host metadata and return a + * {@link RestClientBuilder} for a client with that metadata. */ - private List sniffHostMetadata() throws IOException { + protected final RestClientBuilder getClientBuilderWithSniffedHosts() throws IOException { ElasticsearchNodesSniffer.Scheme scheme = ElasticsearchNodesSniffer.Scheme.valueOf(getProtocol().toUpperCase(Locale.ROOT)); ElasticsearchNodesSniffer sniffer = new ElasticsearchNodesSniffer( adminClient(), ElasticsearchNodesSniffer.DEFAULT_SNIFF_REQUEST_TIMEOUT, scheme); - return sniffer.sniff(); + RestClientBuilder builder = RestClient.builder(sniffer.sniff().toArray(new Node[0])); + configureClient(builder, restClientSettings()); + return builder; } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java index 33f776bc1f185..f941cc55b2743 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java @@ -389,7 +389,32 @@ private static NodeSelector parseAttributeValuesSelector(XContentParser parser) if (token == XContentParser.Token.FIELD_NAME) { key = parser.currentName(); } else if (token.isValue()) { - NodeSelector newSelector = new HasAttributeNodeSelector(key, parser.text()); + /* + * HasAttributeNodeSelector selects nodes that do not have + * attribute metadata set so it can be used against nodes that + * have not yet been sniffed. In these tests we expect the node + * metadata to be explicitly sniffed if we need it and we'd + * like to hard fail if it is not so we wrap the selector so we + * can assert that the data is sniffed. + */ + NodeSelector delegate = new HasAttributeNodeSelector(key, parser.text()); + NodeSelector newSelector = new NodeSelector() { + @Override + public void select(Iterable nodes) { + for (Node node : nodes) { + if (node.getAttributes() == null) { + throw new IllegalStateException("expected [attributes] metadata to be set but got " + + node); + } + } + delegate.select(nodes); + } + + @Override + public String toString() { + return delegate.toString(); + } + }; result = result == NodeSelector.ANY ? newSelector : new ComposeNodeSelector(result, newSelector); } else { diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index fb8c54540ae97..85c4d478e1887 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -29,6 +29,7 @@ import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -62,6 +63,7 @@ import java.net.InetSocketAddress; import java.net.ServerSocket; import java.net.Socket; +import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -1895,7 +1897,7 @@ public void testTimeoutPerConnection() throws IOException { // means that once we received an ACK from the client we just drop the packet on the floor (which is what we want) and we run // into a connection timeout quickly. Yet other implementations can for instance can terminate the connection within the 3 way // handshake which I haven't tested yet. - socket.bind(new InetSocketAddress(InetAddress.getLocalHost(), 0), 1); + socket.bind(getLocalEphemeral(), 1); socket.setReuseAddress(true); DiscoveryNode first = new DiscoveryNode("TEST", new TransportAddress(socket.getInetAddress(), socket.getLocalPort()), emptyMap(), @@ -2018,7 +2020,7 @@ protected String handleRequest(TcpChannel mockChannel, String profileName, Strea public void testTcpHandshakeTimeout() throws IOException { try (ServerSocket socket = new MockServerSocket()) { - socket.bind(new InetSocketAddress(InetAddress.getLocalHost(), 0), 1); + socket.bind(getLocalEphemeral(), 1); socket.setReuseAddress(true); DiscoveryNode dummy = new DiscoveryNode("TEST", new TransportAddress(socket.getInetAddress(), socket.getLocalPort()), emptyMap(), @@ -2039,7 +2041,7 @@ public void testTcpHandshakeTimeout() throws IOException { public void testTcpHandshakeConnectionReset() throws IOException, InterruptedException { try (ServerSocket socket = new MockServerSocket()) { - socket.bind(new InetSocketAddress(InetAddress.getLocalHost(), 0), 1); + socket.bind(getLocalEphemeral(), 1); socket.setReuseAddress(true); DiscoveryNode dummy = new DiscoveryNode("TEST", new TransportAddress(socket.getInetAddress(), socket.getLocalPort()), emptyMap(), @@ -2669,4 +2671,8 @@ public void onConnectionOpened(final Transport.Connection connection) { protected abstract void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException; + @SuppressForbidden(reason = "need local ephemeral port") + private InetSocketAddress getLocalEphemeral() throws UnknownHostException { + return new InetSocketAddress(InetAddress.getLocalHost(), 0); + } } diff --git a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java index 0655a6d871197..71028a9883cb7 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.transport; +import org.elasticsearch.cli.SuppressForbidden; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; @@ -171,6 +172,7 @@ private void readMessage(MockChannel mockChannel, StreamInput input) throws IOEx } @Override + @SuppressForbidden(reason = "real socket for mocking remote connections") protected MockChannel initiateChannel(DiscoveryNode node, TimeValue connectTimeout, ActionListener connectListener) throws IOException { InetSocketAddress address = node.getAddress().address(); diff --git a/test/framework/src/main/java/org/elasticsearch/transport/nio/NioGroup.java b/test/framework/src/main/java/org/elasticsearch/transport/nio/NioGroup.java index b0e1862c706ca..d4b3e16944db4 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/nio/NioGroup.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/nio/NioGroup.java @@ -20,7 +20,7 @@ package org.elasticsearch.transport.nio; import org.apache.logging.log4j.Logger; -import org.apache.lucene.util.IOUtils; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.transport.nio.channel.ChannelFactory; import org.elasticsearch.transport.nio.channel.NioServerSocketChannel; import org.elasticsearch.transport.nio.channel.NioSocketChannel; diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/DoSectionTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/DoSectionTests.java index e36ddc5f1c2df..30f7918b14788 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/DoSectionTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/DoSectionTests.java @@ -539,6 +539,15 @@ public void testNodeSelectorByVersion() throws IOException { doSection.execute(context); verify(context).callApi("indices.get_field_mapping", singletonMap("index", "test_index"), emptyList(), emptyMap(), doSection.getApiCallSection().getNodeSelector()); + + { + List badNodes = new ArrayList<>(); + badNodes.add(new Node(new HttpHost("dummy"))); + Exception e = expectThrows(IllegalStateException.class, () -> + doSection.getApiCallSection().getNodeSelector().select(badNodes)); + assertEquals("expected [version] metadata to be set but got [host=http://dummy]", + e.getMessage()); + } } private static Node nodeWithVersion(String version) { @@ -567,6 +576,14 @@ public void testNodeSelectorByAttribute() throws IOException { doSection.getApiCallSection().getNodeSelector().select(nodes); assertEquals(Arrays.asList(hasAttr), nodes); } + { + List badNodes = new ArrayList<>(); + badNodes.add(new Node(new HttpHost("dummy"))); + Exception e = expectThrows(IllegalStateException.class, () -> + doSection.getApiCallSection().getNodeSelector().select(badNodes)); + assertEquals("expected [attributes] metadata to be set but got [host=http://dummy]", + e.getMessage()); + } parser = createParser(YamlXContent.yamlXContent, "node_selector:\n" + diff --git a/test/logger-usage/build.gradle b/test/logger-usage/build.gradle index 98fe76bfcdc31..c16dab6a625c8 100644 --- a/test/logger-usage/build.gradle +++ b/test/logger-usage/build.gradle @@ -45,3 +45,13 @@ thirdPartyAudit.excludes = [ 'org.osgi.framework.wiring.BundleWire', 'org.osgi.framework.wiring.BundleWiring' ] + +if (JavaVersion.current() <= JavaVersion.VERSION_1_8) { + // Used by Log4J 2.11.1 + thirdPartyAudit.excludes += [ + 'java.io.ObjectInputFilter', + 'java.io.ObjectInputFilter$Config', + 'java.io.ObjectInputFilter$FilterInfo', + 'java.io.ObjectInputFilter$Status' + ] +} \ No newline at end of file diff --git a/x-pack/docs/en/ml/detector-custom-rules.asciidoc b/x-pack/docs/en/ml/detector-custom-rules.asciidoc index e0ca36052840f..8513c7e4d2566 100644 --- a/x-pack/docs/en/ml/detector-custom-rules.asciidoc +++ b/x-pack/docs/en/ml/detector-custom-rules.asciidoc @@ -1,11 +1,11 @@ [role="xpack"] [[ml-configuring-detector-custom-rules]] -=== Customizing detectors with rules and filters +=== Customizing detectors with custom rules -<> enable you to change the behavior of anomaly +<> enable you to change the behavior of anomaly detectors based on domain-specific knowledge. -Rules describe _when_ a detector should take a certain _action_ instead +Custom rules describe _when_ a detector should take a certain _action_ instead of following its default behavior. To specify the _when_ a rule uses a `scope` and `conditions`. You can think of `scope` as the categorical specification of a rule, while `conditions` are the numerical part. @@ -14,7 +14,7 @@ scope and conditions. Let us see how those can be configured by examples. -==== Specifying rule scope +==== Specifying custom rule scope Let us assume we are configuring a job in order to detect DNS data exfiltration. Our data contain fields "subdomain" and "highest_registered_domain". @@ -127,7 +127,7 @@ PUT _xpack/ml/anomaly_detectors/scoping_multiple_fields Such a detector will skip results when the values of all 3 scoped fields are included in the referenced filters. -==== Specifying rule conditions +==== Specifying custom rule conditions Imagine a detector that looks for anomalies in CPU utilization. Given a machine that is idle for long enough, small movement in CPU could @@ -206,9 +206,9 @@ PUT _xpack/ml/anomaly_detectors/rule_with_range ---------------------------------- // CONSOLE -==== Rules in the life-cycle of a job +==== Custom rules in the life-cycle of a job -Rules only affect results created after the rules were applied. +Custom rules only affect results created after the rules were applied. Let us imagine that we have configured a job and it has been running for some time. After observing its results we decide that we can employ rules in order to get rid of some uninteresting results. We can use @@ -216,7 +216,7 @@ the {ref}/ml-update-job.html[update job API] to do so. However, the rule we added will only be in effect for any results created from the moment we added the rule onwards. Past results will remain unaffected. -==== Using rules VS filtering data +==== Using custom rules VS filtering data It might appear like using rules is just another way of filtering the data that feeds into a job. For example, a rule that skips results when the diff --git a/x-pack/docs/en/rest-api/ml/filterresource.asciidoc b/x-pack/docs/en/rest-api/ml/filterresource.asciidoc index 64768da4911c4..e942447c1ee60 100644 --- a/x-pack/docs/en/rest-api/ml/filterresource.asciidoc +++ b/x-pack/docs/en/rest-api/ml/filterresource.asciidoc @@ -13,4 +13,4 @@ A filter resource has the following properties: `items`:: (array of strings) An array of strings which is the filter item list. -For more information, see {stack-ov}/ml-rules.html[Machine learning rules and filters]. +For more information, see {stack-ov}/ml-rules.html[Machine learning custom rules]. diff --git a/x-pack/docs/en/rest-api/ml/get-filter.asciidoc b/x-pack/docs/en/rest-api/ml/get-filter.asciidoc index 89f40cf331251..b4699e9d622cf 100644 --- a/x-pack/docs/en/rest-api/ml/get-filter.asciidoc +++ b/x-pack/docs/en/rest-api/ml/get-filter.asciidoc @@ -18,7 +18,7 @@ Retrieves filters. ===== Description You can get a single filter or all filters. For more information, see -{stack-ov}/ml-rules.html[Machine learning rules and filters]. +{stack-ov}/ml-rules.html[Machine learning custom rules]. ==== Path Parameters diff --git a/x-pack/docs/en/rest-api/ml/jobresource.asciidoc b/x-pack/docs/en/rest-api/ml/jobresource.asciidoc index 7cccbb83ac029..5b109b1c21d78 100644 --- a/x-pack/docs/en/rest-api/ml/jobresource.asciidoc +++ b/x-pack/docs/en/rest-api/ml/jobresource.asciidoc @@ -265,7 +265,7 @@ NOTE: The `field_name` cannot contain double quotes or backslashes. when there is no value for the by or partition fields. The default value is `false`. `custom_rules`:: - (array) An array of rule objects, which enable customizing how the detector works. + (array) An array of custom rule objects, which enable customizing how the detector works. For example, a rule may dictate to the detector conditions under which results should be skipped. For more information see <>. + + @@ -420,7 +420,7 @@ For more information, see {stack-ov}/ml-rules.html[Custom rules] enable you to customize the way detectors operate. -A rule has the following properties: +A custom rule has the following properties: `actions`:: (array) The set of actions to be triggered when the rule applies. diff --git a/x-pack/docs/src/test/java/org/elasticsearch/smoketest/XDocsClientYamlTestSuiteIT.java b/x-pack/docs/src/test/java/org/elasticsearch/smoketest/XDocsClientYamlTestSuiteIT.java index 3d01594e6d730..f15944181922c 100644 --- a/x-pack/docs/src/test/java/org/elasticsearch/smoketest/XDocsClientYamlTestSuiteIT.java +++ b/x-pack/docs/src/test/java/org/elasticsearch/smoketest/XDocsClientYamlTestSuiteIT.java @@ -58,8 +58,7 @@ protected ClientYamlTestClient initClientYamlTestClient( final RestClient restClient, final List hosts, final Version esVersion) { - return new ClientYamlDocsTestClient(restSpec, restClient, hosts, esVersion, - restClientBuilder -> configureClient(restClientBuilder, restClientSettings())); + return new ClientYamlDocsTestClient(restSpec, restClient, hosts, esVersion, this::getClientBuilderWithSniffedHosts); } /** diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseAction.java index de356870fbbe8..9a91f933df0f3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseAction.java @@ -7,6 +7,8 @@ import org.elasticsearch.action.Action; import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest; +import org.elasticsearch.protocol.xpack.license.DeleteLicenseResponse; public class DeleteLicenseAction extends Action { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseRequest.java deleted file mode 100644 index 29558cf9e42bb..0000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseRequest.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.license; - -import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.master.AcknowledgedRequest; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; - -import java.io.IOException; - - -public class DeleteLicenseRequest extends AcknowledgedRequest { - - public DeleteLicenseRequest() { - } - - @Override - public ActionRequestValidationException validate() { - return null; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - } -} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseRequestBuilder.java index b554b0055376b..ad58cd3e1a5b9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseRequestBuilder.java @@ -7,6 +7,8 @@ import org.elasticsearch.action.support.master.AcknowledgedRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest; +import org.elasticsearch.protocol.xpack.license.DeleteLicenseResponse; public class DeleteLicenseRequestBuilder extends AcknowledgedRequestBuilder { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseResponse.java deleted file mode 100644 index 0dd092d6fe6ae..0000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseResponse.java +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.license; - -import org.elasticsearch.action.support.master.AcknowledgedResponse; - -public class DeleteLicenseResponse extends AcknowledgedResponse { - - DeleteLicenseResponse() { - } - - DeleteLicenseResponse(boolean acknowledged) { - super(acknowledged); - } -} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetLicenseAction.java index 472634107969a..2b46f8b407320 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetLicenseAction.java @@ -7,6 +7,7 @@ import org.elasticsearch.action.Action; import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.protocol.xpack.license.GetLicenseRequest; public class GetLicenseAction extends Action { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetLicenseRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetLicenseRequest.java deleted file mode 100644 index 965308de0e4ff..0000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetLicenseRequest.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.license; - -import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.master.MasterNodeReadRequest; - - -public class GetLicenseRequest extends MasterNodeReadRequest { - - public GetLicenseRequest() { - } - - @Override - public ActionRequestValidationException validate() { - return null; - } -} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetLicenseRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetLicenseRequestBuilder.java index 7e92a54bce2d6..ed72e67468731 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetLicenseRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetLicenseRequestBuilder.java @@ -7,6 +7,7 @@ import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.protocol.xpack.license.GetLicenseRequest; public class GetLicenseRequestBuilder extends MasterNodeReadOperationRequestBuilder { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseService.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseService.java index d299406aad06c..e087438924394 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseService.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseService.java @@ -28,6 +28,7 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.protocol.xpack.XPackInfoResponse; +import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest; import org.elasticsearch.protocol.xpack.license.LicensesStatus; import org.elasticsearch.protocol.xpack.license.PutLicenseResponse; import org.elasticsearch.watcher.ResourceWatcherService; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensingClient.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensingClient.java index 14a059e9e014a..dfccd1d419054 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensingClient.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensingClient.java @@ -7,6 +7,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest; +import org.elasticsearch.protocol.xpack.license.DeleteLicenseResponse; +import org.elasticsearch.protocol.xpack.license.GetLicenseRequest; import org.elasticsearch.protocol.xpack.license.PutLicenseResponse; public class LicensingClient { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestDeleteLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestDeleteLicenseAction.java index da7f11ec4c006..68bc0c31968e9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestDeleteLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestDeleteLicenseAction.java @@ -6,6 +6,7 @@ package org.elasticsearch.license; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetLicenseAction.java index fabfe7a1f889b..086c24a4917e6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/RestGetLicenseAction.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.protocol.xpack.license.GetLicenseRequest; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportDeleteLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportDeleteLicenseAction.java index a1d57684a1737..4a6b5e0116f6f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportDeleteLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportDeleteLicenseAction.java @@ -17,6 +17,8 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest; +import org.elasticsearch.protocol.xpack.license.DeleteLicenseResponse; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportGetLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportGetLicenseAction.java index 58a5ae1fe70a5..2fd7680eb0e1f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportGetLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportGetLicenseAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.protocol.xpack.license.GetLicenseRequest; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java index abfee4b9eb3d1..266eef9f7f943 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java @@ -15,6 +15,7 @@ import org.elasticsearch.xpack.core.ssl.VerificationMode; import javax.crypto.Cipher; +import javax.crypto.SecretKeyFactory; import java.security.NoSuchAlgorithmException; import java.util.ArrayList; @@ -131,8 +132,16 @@ private XPackSettings() { public static final Setting PASSWORD_HASHING_ALGORITHM = new Setting<>( "xpack.security.authc.password_hashing.algorithm", "bcrypt", Function.identity(), (v, s) -> { if (Hasher.getAvailableAlgoStoredHash().contains(v.toLowerCase(Locale.ROOT)) == false) { - throw new IllegalArgumentException("Invalid algorithm: " + v + ". Only pbkdf2 or bcrypt family algorithms can be used for " + - "password hashing."); + throw new IllegalArgumentException("Invalid algorithm: " + v + ". Valid values for password hashing are " + + Hasher.getAvailableAlgoStoredHash().toString()); + } else if (v.regionMatches(true, 0, "pbkdf2", 0, "pbkdf2".length())) { + try { + SecretKeyFactory.getInstance("PBKDF2withHMACSHA512"); + } catch (NoSuchAlgorithmException e) { + throw new IllegalArgumentException( + "Support for PBKDF2WithHMACSHA512 must be available in order to use any of the " + + "PBKDF2 algorithms for the [xpack.security.authc.password_hashing.algorithm] setting.", e); + } } }, Setting.Property.NodeScope); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupJobsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupJobsAction.java index 2e8ed2852e27f..4fa5d84c7d476 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupJobsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupJobsAction.java @@ -218,7 +218,7 @@ public static class JobWrapper implements Writeable, ToXContentObject { (RollupJobStats) a[1], (RollupJobStatus)a[2])); static { - PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> RollupJobConfig.PARSER.apply(p,c).build(), CONFIG); + PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> RollupJobConfig.fromXContent(p, null), CONFIG); PARSER.declareObject(ConstructingObjectParser.constructorArg(), RollupJobStats.PARSER::apply, STATS); PARSER.declareObject(ConstructingObjectParser.constructorArg(), RollupJobStatus.PARSER::apply, STATUS); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/PutRollupJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/PutRollupJobAction.java index c6e279f11db97..7a420ca60ff60 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/PutRollupJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/PutRollupJobAction.java @@ -58,9 +58,8 @@ public Request() { } - public static Request parseRequest(String id, XContentParser parser) { - RollupJobConfig.Builder config = RollupJobConfig.Builder.fromXContent(id, parser); - return new Request(config.build()); + public static Request fromXContent(final XContentParser parser, final String id) throws IOException { + return new Request(RollupJobConfig.fromXContent(parser, id)); } public RollupJobConfig getConfig() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupJobCaps.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupJobCaps.java index c8874ae459d2d..1b8eb736084a8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupJobCaps.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupJobCaps.java @@ -42,8 +42,8 @@ public RollupJobCaps(RollupJobConfig job) { jobID = job.getId(); rollupIndex = job.getRollupIndex(); indexPattern = job.getIndexPattern(); - Map dateHistoAggCap = job.getGroupConfig().getDateHisto().toAggCap(); - String dateField = job.getGroupConfig().getDateHisto().getField(); + Map dateHistoAggCap = job.getGroupConfig().getDateHistogram().toAggCap(); + String dateField = job.getGroupConfig().getDateHistogram().getField(); RollupFieldCaps fieldCaps = fieldCapLookup.get(dateField); if (fieldCaps == null) { fieldCaps = new RollupFieldCaps(); @@ -51,9 +51,9 @@ public RollupJobCaps(RollupJobConfig job) { fieldCaps.addAgg(dateHistoAggCap); fieldCapLookup.put(dateField, fieldCaps); - if (job.getGroupConfig().getHisto() != null) { - Map histoAggCap = job.getGroupConfig().getHisto().toAggCap(); - Arrays.stream(job.getGroupConfig().getHisto().getFields()).forEach(field -> { + if (job.getGroupConfig().getHistogram() != null) { + Map histoAggCap = job.getGroupConfig().getHistogram().toAggCap(); + Arrays.stream(job.getGroupConfig().getHistogram().getFields()).forEach(field -> { RollupFieldCaps caps = fieldCapLookup.get(field); if (caps == null) { caps = new RollupFieldCaps(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfig.java index add60403a9882..77dfa1cbbb1c3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfig.java @@ -54,7 +54,7 @@ */ public class DateHistogramGroupConfig implements Writeable, ToXContentObject { - private static final String NAME = "date_histogram"; + static final String NAME = "date_histogram"; private static final String INTERVAL = "interval"; private static final String FIELD = "field"; public static final String TIME_ZONE = "time_zone"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/GroupConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/GroupConfig.java index 59e7d1127e17f..f7685f4e6143b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/GroupConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/GroupConfig.java @@ -13,17 +13,21 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; +import java.util.Collections; import java.util.HashSet; import java.util.Map; import java.util.Objects; import java.util.Set; import static java.util.Arrays.asList; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; /** * The configuration object for the groups section in the rollup config. @@ -38,64 +42,85 @@ * } */ public class GroupConfig implements Writeable, ToXContentObject { - private static final String NAME = "grouping_config"; - private static final ParseField DATE_HISTO = new ParseField("date_histogram"); - private static final ParseField HISTO = new ParseField("histogram"); - private static final ParseField TERMS = new ParseField("terms"); - private final DateHistogramGroupConfig dateHisto; - private final HistogramGroupConfig histo; - private final TermsGroupConfig terms; + public static final String NAME = "groups"; + private static final ConstructingObjectParser PARSER; + static { + PARSER = new ConstructingObjectParser<>(NAME, args -> + new GroupConfig((DateHistogramGroupConfig) args[0], (HistogramGroupConfig) args[1], (TermsGroupConfig) args[2])); + PARSER.declareObject(constructorArg(), + (p, c) -> DateHistogramGroupConfig.fromXContent(p), new ParseField(DateHistogramGroupConfig.NAME)); + PARSER.declareObject(optionalConstructorArg(), + (p, c) -> HistogramGroupConfig.fromXContent(p), new ParseField(HistogramGroupConfig.NAME)); + PARSER.declareObject(optionalConstructorArg(), + (p, c) -> TermsGroupConfig.fromXContent(p), new ParseField(TermsGroupConfig.NAME)); + } - public static final ObjectParser PARSER = new ObjectParser<>(NAME, GroupConfig.Builder::new); + private final DateHistogramGroupConfig dateHistogram; + private final @Nullable HistogramGroupConfig histogram; + private final @Nullable TermsGroupConfig terms; - static { - PARSER.declareObject(GroupConfig.Builder::setDateHisto, (p,c) -> DateHistogramGroupConfig.fromXContent(p), DATE_HISTO); - PARSER.declareObject(GroupConfig.Builder::setHisto, (p,c) -> HistogramGroupConfig.fromXContent(p), HISTO); - PARSER.declareObject(GroupConfig.Builder::setTerms, (p,c) -> TermsGroupConfig.fromXContent(p), TERMS); + public GroupConfig(final DateHistogramGroupConfig dateHistogram) { + this(dateHistogram, null, null); } - private GroupConfig(DateHistogramGroupConfig dateHisto, @Nullable HistogramGroupConfig histo, @Nullable TermsGroupConfig terms) { - this.dateHisto = Objects.requireNonNull(dateHisto, "A date_histogram group is mandatory"); - this.histo = histo; + public GroupConfig(final DateHistogramGroupConfig dateHistogram, + final @Nullable HistogramGroupConfig histogram, + final @Nullable TermsGroupConfig terms) { + if (dateHistogram == null) { + throw new IllegalArgumentException("Date histogram must not be null"); + } + this.dateHistogram = dateHistogram; + this.histogram = histogram; this.terms = terms; } - GroupConfig(StreamInput in) throws IOException { - dateHisto = new DateHistogramGroupConfig(in); - histo = in.readOptionalWriteable(HistogramGroupConfig::new); + GroupConfig(final StreamInput in) throws IOException { + dateHistogram = new DateHistogramGroupConfig(in); + histogram = in.readOptionalWriteable(HistogramGroupConfig::new); terms = in.readOptionalWriteable(TermsGroupConfig::new); } - public DateHistogramGroupConfig getDateHisto() { - return dateHisto; + /** + * @return the configuration of the date histogram + */ + public DateHistogramGroupConfig getDateHistogram() { + return dateHistogram; } - public HistogramGroupConfig getHisto() { - return histo; + /** + * @return the configuration of the histogram + */ + @Nullable + public HistogramGroupConfig getHistogram() { + return histogram; } + /** + * @return the configuration of the terms + */ + @Nullable public TermsGroupConfig getTerms() { return terms; } public Set getAllFields() { Set fields = new HashSet<>(); - fields.add(dateHisto.getField()); - if (histo != null) { - fields.addAll(asList(histo.getFields())); + fields.add(dateHistogram.getField()); + if (histogram != null) { + fields.addAll(asList(histogram.getFields())); } if (terms != null) { fields.addAll(asList(terms.getFields())); } - return fields; + return Collections.unmodifiableSet(fields); } - public void validateMappings(Map> fieldCapsResponse, - ActionRequestValidationException validationException) { - dateHisto.validateMappings(fieldCapsResponse, validationException); - if (histo != null) { - histo.validateMappings(fieldCapsResponse, validationException); + public void validateMappings(final Map> fieldCapsResponse, + final ActionRequestValidationException validationException) { + dateHistogram.validateMappings(fieldCapsResponse, validationException); + if (histogram != null) { + histogram.validateMappings(fieldCapsResponse, validationException); } if (terms != null) { terms.validateMappings(fieldCapsResponse, validationException); @@ -105,44 +130,43 @@ public void validateMappings(Map> fieldCa @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(DATE_HISTO.getPreferredName(), dateHisto); - if (histo != null) { - builder.field(HISTO.getPreferredName(), histo); - } - if (terms != null) { - builder.field(TERMS.getPreferredName(), terms); + { + builder.field(DateHistogramGroupConfig.NAME, dateHistogram); + if (histogram != null) { + builder.field(HistogramGroupConfig.NAME, histogram); + } + if (terms != null) { + builder.field(TermsGroupConfig.NAME, terms); + } } - builder.endObject(); - return builder; + return builder.endObject(); } @Override - public void writeTo(StreamOutput out) throws IOException { - dateHisto.writeTo(out); - out.writeOptionalWriteable(histo); + public void writeTo(final StreamOutput out) throws IOException { + dateHistogram.writeTo(out); + out.writeOptionalWriteable(histogram); out.writeOptionalWriteable(terms); } @Override - public boolean equals(Object other) { + public boolean equals(final Object other) { if (this == other) { return true; } - if (other == null || getClass() != other.getClass()) { return false; } - GroupConfig that = (GroupConfig) other; - - return Objects.equals(this.dateHisto, that.dateHisto) - && Objects.equals(this.histo, that.histo) - && Objects.equals(this.terms, that.terms); + final GroupConfig that = (GroupConfig) other; + return Objects.equals(dateHistogram, that.dateHistogram) + && Objects.equals(histogram, that.histogram) + && Objects.equals(terms, that.terms); } @Override public int hashCode() { - return Objects.hash(dateHisto, histo, terms); + return Objects.hash(dateHistogram, histogram, terms); } @Override @@ -150,43 +174,7 @@ public String toString() { return Strings.toString(this, true, true); } - public static class Builder { - private DateHistogramGroupConfig dateHisto; - private HistogramGroupConfig histo; - private TermsGroupConfig terms; - - public DateHistogramGroupConfig getDateHisto() { - return dateHisto; - } - - public GroupConfig.Builder setDateHisto(DateHistogramGroupConfig dateHisto) { - this.dateHisto = dateHisto; - return this; - } - - public HistogramGroupConfig getHisto() { - return histo; - } - - public GroupConfig.Builder setHisto(HistogramGroupConfig histo) { - this.histo = histo; - return this; - } - - public TermsGroupConfig getTerms() { - return terms; - } - - public GroupConfig.Builder setTerms(TermsGroupConfig terms) { - this.terms = terms; - return this; - } - - public GroupConfig build() { - if (dateHisto == null) { - throw new IllegalArgumentException("A date_histogram group is mandatory"); - } - return new GroupConfig(dateHisto, histo, terms); - } + public static GroupConfig fromXContent(final XContentParser parser) throws IOException { + return PARSER.parse(parser, null); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/HistogramGroupConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/HistogramGroupConfig.java index 4f67978e4bc92..0480050bf52f0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/HistogramGroupConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/HistogramGroupConfig.java @@ -46,7 +46,7 @@ */ public class HistogramGroupConfig implements Writeable, ToXContentObject { - public static final String NAME = "histogram"; + static final String NAME = "histogram"; private static final String INTERVAL = "interval"; private static final String FIELDS = "fields"; private static final ConstructingObjectParser PARSER; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/MetricConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/MetricConfig.java index 1843db5b3c304..cc673c4ed0d35 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/MetricConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/MetricConfig.java @@ -60,7 +60,7 @@ public class MetricConfig implements Writeable, ToXContentObject { private static final ParseField AVG = new ParseField("avg"); private static final ParseField VALUE_COUNT = new ParseField("value_count"); - private static final String NAME = "metrics"; + static final String NAME = "metrics"; private static final String FIELD = "field"; private static final String METRICS = "metrics"; private static final ConstructingObjectParser PARSER; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJob.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJob.java index 7afcdb71b11cc..94306966a34da 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJob.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJob.java @@ -41,7 +41,7 @@ public class RollupJob extends AbstractDiffable implements XPackPlugi = new ConstructingObjectParser<>(NAME, a -> new RollupJob((RollupJobConfig) a[0], (Map) a[1])); static { - PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> RollupJobConfig.PARSER.apply(p,c).build(), CONFIG); + PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> RollupJobConfig.fromXContent(p, null), CONFIG); PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> p.mapStrings(), HEADERS); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJobConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJobConfig.java index 1abec72ef5360..27461c62b6724 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJobConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJobConfig.java @@ -7,20 +7,19 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.fieldcaps.FieldCapabilities; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser; -import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.scheduler.Cron; import java.io.IOException; @@ -30,61 +29,112 @@ import java.util.Map; import java.util.Objects; import java.util.Set; -import java.util.stream.Collectors; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; /** * This class holds the configuration details of a rollup job, such as the groupings, metrics, what * index to rollup and where to roll them to. */ public class RollupJobConfig implements NamedWriteable, ToXContentObject { - private static final String NAME = "xpack/rollup/jobconfig"; - - public static final ParseField TIMEOUT = new ParseField("timeout"); - public static final ParseField CURRENT = new ParseField("current"); - public static final ParseField CRON = new ParseField("cron"); - public static final ParseField PAGE_SIZE = new ParseField("page_size"); - - private static final ParseField INDEX_PATTERN = new ParseField("index_pattern"); - private static final ParseField ROLLUP_INDEX = new ParseField("rollup_index"); - private static final ParseField GROUPS = new ParseField("groups"); - private static final ParseField METRICS = new ParseField("metrics"); - - private String id; - private String indexPattern; - private String rollupIndex; - private GroupConfig groupConfig; - private List metricsConfig = Collections.emptyList(); - private TimeValue timeout = TimeValue.timeValueSeconds(20); - private String cron; - private int pageSize; - - public static final ObjectParser PARSER = new ObjectParser<>(NAME, false, RollupJobConfig.Builder::new); + private static final String NAME = "xpack/rollup/jobconfig"; + private static final TimeValue DEFAULT_TIMEOUT = TimeValue.timeValueSeconds(20); + private static final String ID = "id"; + private static final String TIMEOUT = "timeout"; + private static final String CRON = "cron"; + private static final String PAGE_SIZE = "page_size"; + private static final String INDEX_PATTERN = "index_pattern"; + private static final String ROLLUP_INDEX = "rollup_index"; + + private final String id; + private final String indexPattern; + private final String rollupIndex; + private final GroupConfig groupConfig; + private final List metricsConfig; + private final TimeValue timeout; + private final String cron; + private final int pageSize; + + private static final ConstructingObjectParser PARSER; static { - PARSER.declareString(RollupJobConfig.Builder::setId, RollupField.ID); - PARSER.declareObject(RollupJobConfig.Builder::setGroupConfig, (p, c) -> GroupConfig.PARSER.apply(p,c).build(), GROUPS); - PARSER.declareObjectArray(RollupJobConfig.Builder::setMetricsConfig, (p, c) -> MetricConfig.fromXContent(p), METRICS); - PARSER.declareString((params, val) -> - params.setTimeout(TimeValue.parseTimeValue(val, TIMEOUT.getPreferredName())), TIMEOUT); - PARSER.declareString(RollupJobConfig.Builder::setIndexPattern, INDEX_PATTERN); - PARSER.declareString(RollupJobConfig.Builder::setRollupIndex, ROLLUP_INDEX); - PARSER.declareString(RollupJobConfig.Builder::setCron, CRON); - PARSER.declareInt(RollupJobConfig.Builder::setPageSize, PAGE_SIZE); + PARSER = new ConstructingObjectParser<>(NAME, false, (args, optionalId) -> { + String id = args[0] != null ? (String) args[0] : optionalId; + String indexPattern = (String) args[1]; + String rollupIndex = (String) args[2]; + GroupConfig groupConfig = (GroupConfig) args[3]; + @SuppressWarnings("unchecked") + List metricsConfig = (List) args[4]; + TimeValue timeout = (TimeValue) args[5]; + String cron = (String) args[6]; + int pageSize = (int) args[7]; + return new RollupJobConfig(id, indexPattern, rollupIndex, cron, pageSize, groupConfig, metricsConfig, timeout); + }); + PARSER.declareString(optionalConstructorArg(), new ParseField(ID)); + PARSER.declareString(constructorArg(), new ParseField(INDEX_PATTERN)); + PARSER.declareString(constructorArg(), new ParseField(ROLLUP_INDEX)); + PARSER.declareObject(optionalConstructorArg(), (p, c) -> GroupConfig.fromXContent(p), new ParseField(GroupConfig.NAME)); + PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> MetricConfig.fromXContent(p), new ParseField(MetricConfig.NAME)); + PARSER.declareField(optionalConstructorArg(), (p, c) -> TimeValue.parseTimeValue(p.textOrNull(), TIMEOUT), + new ParseField(TIMEOUT), ObjectParser.ValueType.STRING_OR_NULL); + PARSER.declareString(constructorArg(), new ParseField(CRON)); + PARSER.declareInt(constructorArg(), new ParseField(PAGE_SIZE)); } - RollupJobConfig(String id, String indexPattern, String rollupIndex, String cron, int pageSize, GroupConfig groupConfig, - List metricsConfig, TimeValue timeout) { + public RollupJobConfig(final String id, + final String indexPattern, + final String rollupIndex, + final String cron, + final int pageSize, + final GroupConfig groupConfig, + final List metricsConfig, + final @Nullable TimeValue timeout) { + if (id == null || id.isEmpty()) { + throw new IllegalArgumentException("Id must be a non-null, non-empty string"); + } + if (indexPattern == null || indexPattern.isEmpty()) { + throw new IllegalArgumentException("Index pattern must be a non-null, non-empty string"); + } + if (Regex.isMatchAllPattern(indexPattern)) { + throw new IllegalArgumentException("Index pattern must not match all indices (as it would match it's own rollup index"); + } + if (Regex.isSimpleMatchPattern(indexPattern)) { + if (Regex.simpleMatch(indexPattern, rollupIndex)) { + throw new IllegalArgumentException("Index pattern would match rollup index name which is not allowed"); + } + } + if (indexPattern.equals(rollupIndex)) { + throw new IllegalArgumentException("Rollup index may not be the same as the index pattern"); + } + if (rollupIndex == null || rollupIndex.isEmpty()) { + throw new IllegalArgumentException("Rollup index must be a non-null, non-empty string"); + } + if (cron == null || cron.isEmpty()) { + throw new IllegalArgumentException("Cron schedule must be a non-null, non-empty string"); + } + if (pageSize <= 0) { + throw new IllegalArgumentException("Page size is mandatory and must be a positive long"); + } + // Cron doesn't have a parse helper method to see if the cron is valid, + // so just construct a temporary cron object and if the cron is bad, it'll + // throw an exception + Cron testCron = new Cron(cron); + if (groupConfig == null && (metricsConfig == null || metricsConfig.isEmpty())) { + throw new IllegalArgumentException("At least one grouping or metric must be configured"); + } + this.id = id; this.indexPattern = indexPattern; this.rollupIndex = rollupIndex; this.groupConfig = groupConfig; - this.metricsConfig = metricsConfig; - this.timeout = timeout; + this.metricsConfig = metricsConfig != null ? metricsConfig : Collections.emptyList(); + this.timeout = timeout != null ? timeout : DEFAULT_TIMEOUT; this.cron = cron; this.pageSize = pageSize; } - public RollupJobConfig(StreamInput in) throws IOException { + public RollupJobConfig(final StreamInput in) throws IOException { id = in.readString(); indexPattern = in.readString(); rollupIndex = in.readString(); @@ -95,8 +145,6 @@ public RollupJobConfig(StreamInput in) throws IOException { pageSize = in.readInt(); } - public RollupJobConfig() {} - public String getId() { return id; } @@ -135,13 +183,20 @@ public String getWriteableName() { } public Set getAllFields() { - Set fields = new HashSet<>(groupConfig.getAllFields()); - fields.addAll(metricsConfig.stream().map(MetricConfig::getField).collect(Collectors.toSet())); - return fields; + final Set fields = new HashSet<>(); + if (groupConfig != null) { + fields.addAll(groupConfig.getAllFields()); + } + if (metricsConfig != null) { + for (MetricConfig metric : metricsConfig) { + fields.add(metric.getField()); + } + } + return Collections.unmodifiableSet(fields); } - public void validateMappings(Map> fieldCapsResponse, - ActionRequestValidationException validationException) { + public void validateMappings(final Map> fieldCapsResponse, + final ActionRequestValidationException validationException) { groupConfig.validateMappings(fieldCapsResponse, validationException); for (MetricConfig m : metricsConfig) { m.validateMappings(fieldCapsResponse, validationException); @@ -149,32 +204,34 @@ public void validateMappings(Map> fieldCa } @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { builder.startObject(); - builder.field(RollupField.ID.getPreferredName(), id); - builder.field(INDEX_PATTERN.getPreferredName(), indexPattern); - builder.field(ROLLUP_INDEX.getPreferredName(), rollupIndex); - builder.field(CRON.getPreferredName(), cron); - if (groupConfig != null) { - builder.field(GROUPS.getPreferredName(), groupConfig); - } - if (metricsConfig != null) { - builder.startArray(METRICS.getPreferredName()); - for (MetricConfig metric : metricsConfig) { - metric.toXContent(builder, params); + { + builder.field(ID, id); + builder.field(INDEX_PATTERN, indexPattern); + builder.field(ROLLUP_INDEX, rollupIndex); + builder.field(CRON, cron); + if (groupConfig != null) { + builder.field(GroupConfig.NAME, groupConfig); } - builder.endArray(); - } - if (timeout != null) { - builder.field(TIMEOUT.getPreferredName(), timeout); + if (metricsConfig != null) { + builder.startArray(MetricConfig.NAME); + for (MetricConfig metric : metricsConfig) { + metric.toXContent(builder, params); + } + builder.endArray(); + } + if (timeout != null) { + builder.field(TIMEOUT, timeout); + } + builder.field(PAGE_SIZE, pageSize); } - builder.field(PAGE_SIZE.getPreferredName(), pageSize); builder.endObject(); return builder; } @Override - public void writeTo(StreamOutput out) throws IOException { + public void writeTo(final StreamOutput out) throws IOException { out.writeString(id); out.writeString(indexPattern); out.writeString(rollupIndex); @@ -190,13 +247,11 @@ public boolean equals(Object other) { if (this == other) { return true; } - if (other == null || getClass() != other.getClass()) { return false; } - RollupJobConfig that = (RollupJobConfig) other; - + final RollupJobConfig that = (RollupJobConfig) other; return Objects.equals(this.id, that.id) && Objects.equals(this.indexPattern, that.indexPattern) && Objects.equals(this.rollupIndex, that.rollupIndex) @@ -209,8 +264,7 @@ public boolean equals(Object other) { @Override public int hashCode() { - return Objects.hash(id, indexPattern, rollupIndex, cron, groupConfig, - metricsConfig, timeout, pageSize); + return Objects.hash(id, indexPattern, rollupIndex, cron, groupConfig, metricsConfig, timeout, pageSize); } @Override @@ -225,193 +279,7 @@ public String toJSONString() { return toString(); } - public static class Builder implements Writeable, ToXContentObject { - private String id; - private String indexPattern; - private String rollupIndex; - private GroupConfig groupConfig; - private List metricsConfig = Collections.emptyList(); - private TimeValue timeout = TimeValue.timeValueSeconds(20); - private String cron; - private int pageSize = 0; - - public Builder(RollupJobConfig job) { - this.id = job.getId(); - this.indexPattern = job.getIndexPattern(); - this.rollupIndex = job.getRollupIndex(); - this.groupConfig = job.getGroupConfig(); - this.metricsConfig = job.getMetricsConfig(); - this.timeout = job.getTimeout(); - this.cron = job.getCron(); - this.pageSize = job.getPageSize(); - } - - public static RollupJobConfig.Builder fromXContent(String id, XContentParser parser) { - RollupJobConfig.Builder config = RollupJobConfig.PARSER.apply(parser, null); - if (id != null) { - config.setId(id); - } - return config; - } - - public Builder() {} - - public String getId() { - return id; - } - - public RollupJobConfig.Builder setId(String id) { - this.id = id; - return this; - } - - public String getIndexPattern() { - return indexPattern; - } - - public RollupJobConfig.Builder setIndexPattern(String indexPattern) { - this.indexPattern = indexPattern; - return this; - } - - public String getRollupIndex() { - return rollupIndex; - } - - public RollupJobConfig.Builder setRollupIndex(String rollupIndex) { - this.rollupIndex = rollupIndex; - return this; - } - - public GroupConfig getGroupConfig() { - return groupConfig; - } - - public RollupJobConfig.Builder setGroupConfig(GroupConfig groupConfig) { - this.groupConfig = groupConfig; - return this; - } - - public List getMetricsConfig() { - return metricsConfig; - } - - public RollupJobConfig.Builder setMetricsConfig(List metricsConfig) { - this.metricsConfig = metricsConfig; - return this; - } - - public TimeValue getTimeout() { - return timeout; - } - - public RollupJobConfig.Builder setTimeout(TimeValue timeout) { - this.timeout = timeout; - return this; - } - - public String getCron() { - return cron; - } - - public RollupJobConfig.Builder setCron(String cron) { - this.cron = cron; - return this; - } - - public int getPageSize() { - return pageSize; - } - - public RollupJobConfig.Builder setPageSize(int pageSize) { - this.pageSize = pageSize; - return this; - } - - public RollupJobConfig build() { - if (id == null || id.isEmpty()) { - throw new IllegalArgumentException("An ID is mandatory."); - } - if (indexPattern == null || indexPattern.isEmpty()) { - throw new IllegalArgumentException("An index pattern is mandatory."); - } - if (Regex.isMatchAllPattern(indexPattern)) { - throw new IllegalArgumentException("Index pattern must not match all indices (as it would match it's own rollup index"); - } - if (Regex.isSimpleMatchPattern(indexPattern)) { - if (Regex.simpleMatch(indexPattern, rollupIndex)) { - throw new IllegalArgumentException("Index pattern would match rollup index name which is not allowed."); - } - } - if (indexPattern.equals(rollupIndex)) { - throw new IllegalArgumentException("Rollup index may not be the same as the index pattern."); - } - if (rollupIndex == null || rollupIndex.isEmpty()) { - throw new IllegalArgumentException("A rollup index name is mandatory."); - } - if (cron == null || cron.isEmpty()) { - throw new IllegalArgumentException("A cron schedule is mandatory."); - } - if (pageSize <= 0) { - throw new IllegalArgumentException("Parameter [" + PAGE_SIZE.getPreferredName() - + "] is mandatory and must be a positive long."); - } - // Cron doesn't have a parse helper method to see if the cron is valid, - // so just construct a temporary cron object and if the cron is bad, it'll - // throw an exception - Cron testCron = new Cron(cron); - if (groupConfig == null && (metricsConfig == null || metricsConfig.isEmpty())) { - throw new IllegalArgumentException("At least one grouping or metric must be configured."); - } - return new RollupJobConfig(id, indexPattern, rollupIndex, cron, pageSize, groupConfig, - metricsConfig, timeout); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (id != null) { - builder.field(RollupField.ID.getPreferredName(), id); - } - if (indexPattern != null) { - builder.field(INDEX_PATTERN.getPreferredName(), indexPattern); - } - if (indexPattern != null) { - builder.field(ROLLUP_INDEX.getPreferredName(), rollupIndex); - } - if (cron != null) { - builder.field(CRON.getPreferredName(), cron); - } - if (groupConfig != null) { - builder.field(GROUPS.getPreferredName(), groupConfig); - } - if (metricsConfig != null) { - builder.startArray(METRICS.getPreferredName()); - for (MetricConfig config : metricsConfig) { - builder.startObject(); - config.toXContent(builder, params); - builder.endObject(); - } - builder.endArray(); - } - if (timeout != null) { - builder.field(TIMEOUT.getPreferredName(), timeout); - } - builder.field(PAGE_SIZE.getPreferredName(), pageSize); - builder.endObject(); - return builder; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(id); - out.writeOptionalString(indexPattern); - out.writeOptionalString(rollupIndex); - out.writeOptionalString(cron); - out.writeOptionalWriteable(groupConfig); - out.writeList(metricsConfig); - out.writeTimeValue(timeout); - out.writeInt(pageSize); - } + public static RollupJobConfig fromXContent(final XContentParser parser, @Nullable final String optionalJobId) throws IOException { + return PARSER.parse(parser, optionalJobId); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/TermsGroupConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/TermsGroupConfig.java index a1b0b3118ec5d..32507d57f32b0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/TermsGroupConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/TermsGroupConfig.java @@ -45,7 +45,7 @@ */ public class TermsGroupConfig implements Writeable, ToXContentObject { - private static final String NAME = "terms"; + static final String NAME = "terms"; private static final String FIELDS = "fields"; private static final List FLOAT_TYPES = Arrays.asList("half_float", "float", "double", "scaled_float"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserRequest.java index c018ad5f73eda..f37072b9cf0fc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserRequest.java @@ -13,10 +13,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xpack.core.security.authc.support.CharArrays; -import org.elasticsearch.xpack.core.security.support.MetadataUtils; -import org.elasticsearch.xpack.core.security.support.Validation; import java.io.IOException; import java.util.Map; @@ -34,6 +31,7 @@ public class PutUserRequest extends ActionRequest implements UserRequest, WriteR private String email; private Map metadata; private char[] passwordHash; + private char[] password; private boolean enabled = true; private RefreshPolicy refreshPolicy = RefreshPolicy.IMMEDIATE; @@ -45,18 +43,15 @@ public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (username == null) { validationException = addValidationError("user is missing", validationException); - } else { - Validation.Error error = Validation.Users.validateUsername(username, false, Settings.EMPTY); - if (error != null) { - validationException = addValidationError(error.toString(), validationException); - } } if (roles == null) { validationException = addValidationError("roles are missing", validationException); } - if (metadata != null && MetadataUtils.containsReservedMetadata(metadata)) { - validationException = addValidationError("metadata keys may not start with [" + MetadataUtils.RESERVED_PREFIX + "]", - validationException); + if (metadata != null && metadata.keySet().stream().anyMatch(s -> s.startsWith("_"))) { + validationException = addValidationError("metadata keys may not start with [_]", validationException); + } + if (password != null && passwordHash != null) { + validationException = addValidationError("only one of [password, passwordHash] can be provided", validationException); } // we do not check for a password hash here since it is possible that the user exists and we don't want to update the password return validationException; @@ -86,8 +81,12 @@ public void passwordHash(@Nullable char[] passwordHash) { this.passwordHash = passwordHash; } - public boolean enabled() { - return enabled; + public void enabled(boolean enabled) { + this.enabled = enabled; + } + + public void password(@Nullable char[] password) { + this.password = password; } /** @@ -130,8 +129,8 @@ public char[] passwordHash() { return passwordHash; } - public void enabled(boolean enabled) { - this.enabled = enabled; + public boolean enabled() { + return enabled; } @Override @@ -139,16 +138,16 @@ public String[] usernames() { return new String[] { username }; } + @Nullable + public char[] password() { + return password; + } + @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); username = in.readString(); - BytesReference passwordHashRef = in.readBytesReference(); - if (passwordHashRef == BytesArray.EMPTY) { - passwordHash = null; - } else { - passwordHash = CharArrays.utf8BytesToChars(BytesReference.toBytes(passwordHashRef)); - } + passwordHash = readCharArrayFromStream(in); roles = in.readStringArray(); fullName = in.readOptionalString(); email = in.readOptionalString(); @@ -161,13 +160,10 @@ public void readFrom(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(username); - BytesReference passwordHashRef; - if (passwordHash == null) { - passwordHashRef = null; - } else { - passwordHashRef = new BytesArray(CharArrays.toUtf8Bytes(passwordHash)); + writeCharArrayToStream(out, passwordHash); + if (password != null) { + throw new IllegalStateException("password cannot be serialized. it is only used for HL rest"); } - out.writeBytesReference(passwordHashRef); out.writeStringArray(roles); out.writeOptionalString(fullName); out.writeOptionalString(email); @@ -180,4 +176,23 @@ public void writeTo(StreamOutput out) throws IOException { refreshPolicy.writeTo(out); out.writeBoolean(enabled); } + + private static char[] readCharArrayFromStream(StreamInput in) throws IOException { + BytesReference charBytesRef = in.readBytesReference(); + if (charBytesRef == BytesArray.EMPTY) { + return null; + } else { + return CharArrays.utf8BytesToChars(BytesReference.toBytes(charBytesRef)); + } + } + + private static void writeCharArrayToStream(StreamOutput out, char[] chars) throws IOException { + final BytesReference charBytesRef; + if (chars == null) { + charBytesRef = null; + } else { + charBytesRef = new BytesArray(CharArrays.toUtf8Bytes(chars)); + } + out.writeBytesReference(charBytesRef); + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapper.java index 70b552b123e4e..e812f0cfc7332 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapper.java @@ -50,6 +50,7 @@ import org.elasticsearch.index.query.Rewriteable; import org.elasticsearch.index.query.TermsQueryBuilder; import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; +import org.elasticsearch.index.search.NestedHelper; import org.elasticsearch.index.shard.IndexSearcherWrapper; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardUtils; @@ -73,6 +74,7 @@ import java.util.Map; import java.util.function.Function; +import static org.apache.lucene.search.BooleanClause.Occur.FILTER; import static org.apache.lucene.search.BooleanClause.Occur.SHOULD; /** @@ -139,6 +141,13 @@ protected DirectoryReader wrap(DirectoryReader reader) { Query roleQuery = queryShardContext.toFilter(queryBuilder).query(); filter.add(roleQuery, SHOULD); if (queryShardContext.getMapperService().hasNested()) { + NestedHelper nestedHelper = new NestedHelper(queryShardContext.getMapperService()); + if (nestedHelper.mightMatchNestedDocs(roleQuery)) { + roleQuery = new BooleanQuery.Builder() + .add(roleQuery, FILTER) + .add(Queries.newNonNestedFilter(queryShardContext.indexVersionCreated()), FILTER) + .build(); + } // If access is allowed on root doc then also access is allowed on all nested docs of that root document: BitSetProducer rootDocs = queryShardContext.bitsetFilter( Queries.newNonNestedFilter(queryShardContext.indexVersionCreated())); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeInfoAction.java index 3415ad45829e4..cf2288f21ed94 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeInfoAction.java @@ -6,28 +6,14 @@ package org.elasticsearch.xpack.core.upgrade.actions; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder; -import org.elasticsearch.action.support.master.MasterNodeReadRequest; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ToXContentObject; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.xpack.core.upgrade.UpgradeActionRequired; +import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest; +import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoResponse; -import java.io.IOException; -import java.util.Arrays; -import java.util.Map; -import java.util.Objects; - -import static org.elasticsearch.action.ValidateActions.addValidationError; - -public class IndexUpgradeInfoAction extends Action { +public class IndexUpgradeInfoAction extends Action { public static final IndexUpgradeInfoAction INSTANCE = new IndexUpgradeInfoAction(); public static final String NAME = "cluster:admin/xpack/upgrade/info"; @@ -38,150 +24,18 @@ private IndexUpgradeInfoAction() { @Override public RequestBuilder newRequestBuilder(ElasticsearchClient client) { - return new RequestBuilder(client, this); + return new RequestBuilder(client); } - @Override - public Response newResponse() { - return new Response(); - } - - public static class Response extends ActionResponse implements ToXContentObject { - private Map actions; - - public Response() { - - } - - public Response(Map actions) { - this.actions = actions; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - actions = in.readMap(StreamInput::readString, UpgradeActionRequired::readFromStream); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeMap(actions, StreamOutput::writeString, (out1, value) -> value.writeTo(out1)); - } - - public Map getActions() { - return actions; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - { - builder.startObject("indices"); - for (Map.Entry entry : actions.entrySet()) { - builder.startObject(entry.getKey()); - { - builder.field("action_required", entry.getValue().toString()); - } - builder.endObject(); - } - builder.endObject(); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Response response = (Response) o; - return Objects.equals(actions, response.actions); - } - - @Override - public int hashCode() { - return Objects.hash(actions); - } - } - - public static class Request extends MasterNodeReadRequest implements IndicesRequest.Replaceable { - - private String[] indices = null; - private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, true, true, true); - - // for serialization - public Request() { - - } - - public Request(String... indices) { - this.indices = indices; - } - - @Override - public String[] indices() { - return indices; - } - - @Override - public Request indices(String... indices) { - this.indices = indices; - return this; - } - - @Override - public IndicesOptions indicesOptions() { - return indicesOptions; - } - - public void indicesOptions(IndicesOptions indicesOptions) { - this.indicesOptions = indicesOptions; - } - - @Override - public ActionRequestValidationException validate() { - ActionRequestValidationException validationException = null; - if (indices == null) { - validationException = addValidationError("index/indices is missing", validationException); - } - return validationException; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - indices = in.readStringArray(); - indicesOptions = IndicesOptions.readIndicesOptions(in); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeStringArray(indices); - indicesOptions.writeIndicesOptions(out); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Request request = (Request) o; - return Arrays.equals(indices, request.indices) && - Objects.equals(indicesOptions.toString(), request.indicesOptions.toString()); - } - - @Override - public int hashCode() { - return Objects.hash(Arrays.hashCode(indices), indicesOptions.toString()); - } + public IndexUpgradeInfoResponse newResponse() { + return new IndexUpgradeInfoResponse(); } - public static class RequestBuilder extends MasterNodeReadOperationRequestBuilder { + public static class RequestBuilder + extends MasterNodeReadOperationRequestBuilder { - protected RequestBuilder(ElasticsearchClient client, IndexUpgradeInfoAction action) { - super(client, action, new Request()); + public RequestBuilder(ElasticsearchClient client) { + super(client, INSTANCE, new IndexUpgradeInfoRequest()); } public RequestBuilder setIndices(String... indices) { @@ -194,4 +48,4 @@ public RequestBuilder setIndicesOptions(IndicesOptions indicesOptions) { return this; } } -} \ No newline at end of file +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicensesManagerServiceTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicensesManagerServiceTests.java index c397bd79e2885..f3fbab1026efd 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicensesManagerServiceTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicensesManagerServiceTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest; import org.elasticsearch.protocol.xpack.license.LicensesStatus; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicensesTransportTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicensesTransportTests.java index aa372eb03562a..82fde59a8ae6b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicensesTransportTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicensesTransportTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.node.Node; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.protocol.xpack.license.DeleteLicenseResponse; import org.elasticsearch.protocol.xpack.license.LicensesStatus; import org.elasticsearch.protocol.xpack.license.PutLicenseResponse; import org.elasticsearch.test.ESSingleNodeTestCase; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/XPackSettingsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/XPackSettingsTests.java index 17934efe0a503..7689ae4088f34 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/XPackSettingsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/XPackSettingsTests.java @@ -5,9 +5,14 @@ */ package org.elasticsearch.xpack.core; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import javax.crypto.Cipher; +import javax.crypto.SecretKeyFactory; +import java.security.NoSuchAlgorithmException; + +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.not; @@ -25,4 +30,30 @@ public void testDefaultSSLCiphers() throws Exception { assertThat(XPackSettings.DEFAULT_CIPHERS, not(hasItem("TLS_RSA_WITH_AES_256_CBC_SHA"))); } } + + public void testPasswordHashingAlgorithmSettingValidation() { + final boolean isPBKDF2Available = isSecretkeyFactoryAlgoAvailable("PBKDF2WithHMACSHA512"); + final String pbkdf2Algo = randomFrom("PBKDF2_10000", "PBKDF2"); + final Settings settings = Settings.builder().put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), pbkdf2Algo).build(); + if (isPBKDF2Available) { + assertEquals(pbkdf2Algo, XPackSettings.PASSWORD_HASHING_ALGORITHM.get(settings)); + } else { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> XPackSettings.PASSWORD_HASHING_ALGORITHM.get(settings)); + assertThat(e.getMessage(), containsString("Support for PBKDF2WithHMACSHA512 must be available")); + } + + final String bcryptAlgo = randomFrom("BCRYPT", "BCRYPT11"); + assertEquals(bcryptAlgo, XPackSettings.PASSWORD_HASHING_ALGORITHM.get( + Settings.builder().put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), bcryptAlgo).build())); + } + + private boolean isSecretkeyFactoryAlgoAvailable(String algorithmId) { + try { + SecretKeyFactory.getInstance(algorithmId); + return true; + } catch (NoSuchAlgorithmException e) { + return false; + } + } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/ConfigTestHelpers.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/ConfigTestHelpers.java index 16ce6158b35ae..d892eb550a17a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/ConfigTestHelpers.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/ConfigTestHelpers.java @@ -17,49 +17,54 @@ import org.elasticsearch.xpack.core.rollup.job.TermsGroupConfig; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Random; +import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.IntStream; import static com.carrotsearch.randomizedtesting.generators.RandomNumbers.randomIntBetween; +import static com.carrotsearch.randomizedtesting.generators.RandomPicks.randomFrom; import static com.carrotsearch.randomizedtesting.generators.RandomStrings.randomAsciiAlphanumOfLengthBetween; import static org.elasticsearch.test.ESTestCase.randomDateTimeZone; public class ConfigTestHelpers { - public static RollupJobConfig.Builder getRollupJob(String jobId) { - RollupJobConfig.Builder builder = new RollupJobConfig.Builder(); - builder.setId(jobId); - builder.setCron(getCronString()); - builder.setTimeout(new TimeValue(ESTestCase.randomIntBetween(1,100))); - String indexPattern = ESTestCase.randomAlphaOfLengthBetween(1,10); - builder.setIndexPattern(indexPattern); - builder.setRollupIndex("rollup_" + indexPattern); // to ensure the index pattern != rollup index - builder.setGroupConfig(ConfigTestHelpers.getGroupConfig().build()); - builder.setPageSize(ESTestCase.randomIntBetween(1,10)); - if (ESTestCase.randomBoolean()) { - builder.setMetricsConfig(randomMetricsConfigs(ESTestCase.random())); - } - return builder; + private static final String[] TIME_SUFFIXES = new String[]{"d", "h", "ms", "s", "m"}; + + private ConfigTestHelpers() { } - public static GroupConfig.Builder getGroupConfig() { - GroupConfig.Builder groupBuilder = new GroupConfig.Builder(); - groupBuilder.setDateHisto(randomDateHistogramGroupConfig(ESTestCase.random())); - if (ESTestCase.randomBoolean()) { - groupBuilder.setHisto(randomHistogramGroupConfig(ESTestCase.random())); - } - if (ESTestCase.randomBoolean()) { - groupBuilder.setTerms(randomTermsGroupConfig(ESTestCase.random())); - } - return groupBuilder; + public static RollupJobConfig randomRollupJobConfig(final Random random) { + return randomRollupJobConfig(random, randomAsciiAlphanumOfLengthBetween(random, 5, 20)); + } + public static RollupJobConfig randomRollupJobConfig(final Random random, final String id) { + return randomRollupJobConfig(random, id, randomAsciiAlphanumOfLengthBetween(random, 5, 20)); } - private static final String[] TIME_SUFFIXES = new String[]{"d", "h", "ms", "s", "m"}; - public static String randomPositiveTimeValue() { - return ESTestCase.randomIntBetween(1, 1000) + ESTestCase.randomFrom(TIME_SUFFIXES); + public static RollupJobConfig randomRollupJobConfig(final Random random, final String id, final String indexPattern) { + return randomRollupJobConfig(random, id, indexPattern, "rollup_" + indexPattern); + } + + public static RollupJobConfig randomRollupJobConfig(final Random random, + final String id, + final String indexPattern, + final String rollupIndex) { + final String cron = randomCron(); + final int pageSize = randomIntBetween(random, 1, 10); + final TimeValue timeout = random.nextBoolean() ? null : randomTimeout(random); + final GroupConfig groups = randomGroupConfig(random); + final List metrics = random.nextBoolean() ? null : randomMetricsConfigs(random); + return new RollupJobConfig(id, indexPattern, rollupIndex, cron, pageSize, groups, metrics, timeout); + } + + public static GroupConfig randomGroupConfig(final Random random) { + DateHistogramGroupConfig dateHistogram = randomDateHistogramGroupConfig(random); + HistogramGroupConfig histogram = random.nextBoolean() ? randomHistogramGroupConfig(random) : null; + TermsGroupConfig terms = random.nextBoolean() ? randomTermsGroupConfig(random) : null; + return new GroupConfig(dateHistogram, histogram, terms); } public static DateHistogramGroupConfig randomDateHistogramGroupConfig(final Random random) { @@ -76,7 +81,7 @@ public static List getFields() { .collect(Collectors.toList()); } - public static String getCronString() { + public static String randomCron() { return (ESTestCase.randomBoolean() ? "*" : String.valueOf(ESTestCase.randomIntBetween(0, 59))) + //second " " + (ESTestCase.randomBoolean() ? "*" : String.valueOf(ESTestCase.randomIntBetween(0, 59))) + //minute " " + (ESTestCase.randomBoolean() ? "*" : String.valueOf(ESTestCase.randomIntBetween(0, 23))) + //hour @@ -140,6 +145,10 @@ public static String randomField(final Random random) { return randomAsciiAlphanumOfLengthBetween(random, 5, 10); } + private static String randomPositiveTimeValue() { + return ESTestCase.randomIntBetween(1, 1000) + ESTestCase.randomFrom(TIME_SUFFIXES); + } + public static DateHistogramInterval randomInterval() { return new DateHistogramInterval(randomPositiveTimeValue()); } @@ -147,4 +156,10 @@ public static DateHistogramInterval randomInterval() { private static long randomInterval(final Random random) { return RandomNumbers.randomLongBetween(random, 1L, Long.MAX_VALUE); } + + public static TimeValue randomTimeout(final Random random) { + return new TimeValue(randomIntBetween(random, 0, 60), + randomFrom(random, Arrays.asList(TimeUnit.MILLISECONDS, TimeUnit.SECONDS, TimeUnit.MINUTES))); + } + } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/GroupConfigSerializingTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/GroupConfigSerializingTests.java index c220f10aeab27..49ea206ded767 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/GroupConfigSerializingTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/GroupConfigSerializingTests.java @@ -8,14 +8,16 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractSerializingTestCase; -import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; import java.io.IOException; +import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomGroupConfig; + public class GroupConfigSerializingTests extends AbstractSerializingTestCase { + @Override - protected GroupConfig doParseInstance(XContentParser parser) throws IOException { - return GroupConfig.PARSER.apply(parser, null).build(); + protected GroupConfig doParseInstance(final XContentParser parser) throws IOException { + return GroupConfig.fromXContent(parser); } @Override @@ -25,6 +27,6 @@ protected Writeable.Reader instanceReader() { @Override protected GroupConfig createTestInstance() { - return ConfigTestHelpers.getGroupConfig().build(); + return randomGroupConfig(random()); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/JobWrapperSerializingTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/JobWrapperSerializingTests.java index fa9767b51a3fb..a0df63bc38dde 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/JobWrapperSerializingTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/JobWrapperSerializingTests.java @@ -39,7 +39,7 @@ protected GetRollupJobsAction.JobWrapper createTestInstance() { state = IndexerState.ABORTING; } - return new GetRollupJobsAction.JobWrapper(ConfigTestHelpers.getRollupJob(randomAlphaOfLength(5)).build(), + return new GetRollupJobsAction.JobWrapper(ConfigTestHelpers.randomRollupJobConfig(random()), new RollupJobStats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()), new RollupJobStatus(state, Collections.emptyMap(), randomBoolean())); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupJobConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupJobConfigTests.java index a5f03aab51830..a5a82bc2bb090 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupJobConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupJobConfigTests.java @@ -8,17 +8,27 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractSerializingTestCase; -import org.elasticsearch.xpack.core.rollup.job.RollupJobConfig; -import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; +import org.junit.Before; +import java.io.IOException; + +import static java.util.Collections.emptyList; +import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomRollupJobConfig; import static org.hamcrest.Matchers.equalTo; public class RollupJobConfigTests extends AbstractSerializingTestCase { + private String jobId; + + @Before + public void setUpOptionalId() { + jobId = randomAlphaOfLengthBetween(1, 10); + } + @Override protected RollupJobConfig createTestInstance() { - return ConfigTestHelpers.getRollupJob(randomAlphaOfLengthBetween(1,10)).build(); + return randomRollupJobConfig(random(), jobId); } @Override @@ -27,43 +37,139 @@ protected Writeable.Reader instanceReader() { } @Override - protected RollupJobConfig doParseInstance(XContentParser parser) { - return RollupJobConfig.PARSER.apply(parser, null).build(); + protected RollupJobConfig doParseInstance(final XContentParser parser) throws IOException { + if (randomBoolean()) { + return RollupJobConfig.fromXContent(parser, jobId); + } else { + return RollupJobConfig.fromXContent(parser, null); + } } public void testEmptyIndexPattern() { - RollupJobConfig.Builder builder = ConfigTestHelpers.getRollupJob(randomAlphaOfLengthBetween(1, 10)); - builder.setIndexPattern(null); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, builder::build); - assertThat(e.getMessage(), equalTo("An index pattern is mandatory.")); - - builder = ConfigTestHelpers.getRollupJob(randomAlphaOfLengthBetween(1, 10)); - builder.setIndexPattern(""); - e = expectThrows(IllegalArgumentException.class, builder::build); - assertThat(e.getMessage(), equalTo("An index pattern is mandatory.")); + final RollupJobConfig sample = randomRollupJobConfig(random()); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> + new RollupJobConfig(sample.getId(), null, sample.getRollupIndex(), sample.getCron(), sample.getPageSize(), + sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout())); + assertThat(e.getMessage(), equalTo("Index pattern must be a non-null, non-empty string")); + + e = expectThrows(IllegalArgumentException.class, () -> + new RollupJobConfig(sample.getId(), "", sample.getRollupIndex(), sample.getCron(), sample.getPageSize(), + sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout())); + assertThat(e.getMessage(), equalTo("Index pattern must be a non-null, non-empty string")); } public void testEmptyCron() { - RollupJobConfig.Builder builder = ConfigTestHelpers.getRollupJob(randomAlphaOfLengthBetween(1, 10)); - builder.setCron(null); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, builder::build); - assertThat(e.getMessage(), equalTo("A cron schedule is mandatory.")); - - builder = ConfigTestHelpers.getRollupJob(randomAlphaOfLengthBetween(1, 10)); - builder.setCron(""); - e = expectThrows(IllegalArgumentException.class, builder::build); - assertThat(e.getMessage(), equalTo("A cron schedule is mandatory.")); + final RollupJobConfig sample = randomRollupJobConfig(random()); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> + new RollupJobConfig(sample.getId(), sample.getIndexPattern(), sample.getRollupIndex(), null, sample.getPageSize(), + sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout())); + assertThat(e.getMessage(), equalTo("Cron schedule must be a non-null, non-empty string")); + + e = expectThrows(IllegalArgumentException.class, () -> + new RollupJobConfig(sample.getId(), sample.getIndexPattern(), sample.getRollupIndex(), "", sample.getPageSize(), + sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout())); + assertThat(e.getMessage(), equalTo("Cron schedule must be a non-null, non-empty string")); } public void testEmptyID() { - RollupJobConfig.Builder builder = ConfigTestHelpers.getRollupJob(randomAlphaOfLengthBetween(1, 10)); - builder.setId(null); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, builder::build); - assertThat(e.getMessage(), equalTo("An ID is mandatory.")); - - builder = ConfigTestHelpers.getRollupJob(randomAlphaOfLengthBetween(1, 10)); - builder.setId(""); - e = expectThrows(IllegalArgumentException.class, builder::build); - assertThat(e.getMessage(), equalTo("An ID is mandatory.")); + final RollupJobConfig sample = randomRollupJobConfig(random()); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> + new RollupJobConfig(null, sample.getIndexPattern(), sample.getRollupIndex(), sample.getCron(), sample.getPageSize(), + sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout())); + assertThat(e.getMessage(), equalTo("Id must be a non-null, non-empty string")); + + e = expectThrows(IllegalArgumentException.class, () -> + new RollupJobConfig("", sample.getIndexPattern(), sample.getRollupIndex(), sample.getCron(), sample.getPageSize(), + sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout())); + assertThat(e.getMessage(), equalTo("Id must be a non-null, non-empty string")); + } + + public void testBadCron() { + final RollupJobConfig sample = randomRollupJobConfig(random()); + + Exception e = expectThrows(IllegalArgumentException.class, () -> + new RollupJobConfig(sample.getId(), sample.getIndexPattern(), sample.getRollupIndex(), "0 * * *", sample.getPageSize(), + sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout())); + assertThat(e.getMessage(), equalTo("invalid cron expression [0 * * *]")); + } + + public void testMatchAllIndexPattern() { + final RollupJobConfig sample = randomRollupJobConfig(random()); + + Exception e = expectThrows(IllegalArgumentException.class, () -> + new RollupJobConfig(sample.getId(), "*", sample.getRollupIndex(), sample.getCron(), sample.getPageSize(), + sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout())); + assertThat(e.getMessage(), equalTo("Index pattern must not match all indices (as it would match it's own rollup index")); + } + + public void testMatchOwnRollupPatternPrefix() { + final RollupJobConfig sample = randomRollupJobConfig(random()); + + Exception e = expectThrows(IllegalArgumentException.class, () -> + new RollupJobConfig(sample.getId(), "foo-*", "foo-rollup", sample.getCron(), sample.getPageSize(), + sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout())); + assertThat(e.getMessage(), equalTo("Index pattern would match rollup index name which is not allowed")); + } + + public void testMatchOwnRollupPatternSuffix() { + final RollupJobConfig sample = randomRollupJobConfig(random()); + + Exception e = expectThrows(IllegalArgumentException.class, () -> + new RollupJobConfig(sample.getId(), "*-rollup", "foo-rollup", sample.getCron(), sample.getPageSize(), + sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout())); + assertThat(e.getMessage(), equalTo("Index pattern would match rollup index name which is not allowed")); + } + + public void testIndexPatternIdenticalToRollup() { + final RollupJobConfig sample = randomRollupJobConfig(random()); + + Exception e = expectThrows(IllegalArgumentException.class, () -> + new RollupJobConfig(sample.getId(), "foo", "foo", sample.getCron(), sample.getPageSize(), + sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout())); + assertThat(e.getMessage(), equalTo("Rollup index may not be the same as the index pattern")); + } + + public void testEmptyRollupIndex() { + final RollupJobConfig sample = randomRollupJobConfig(random()); + Exception e = expectThrows(IllegalArgumentException.class, () -> + new RollupJobConfig(sample.getId(), sample.getIndexPattern(), "", sample.getCron(), sample.getPageSize(), + sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout())); + assertThat(e.getMessage(), equalTo("Rollup index must be a non-null, non-empty string")); + + e = expectThrows(IllegalArgumentException.class, () -> + new RollupJobConfig(sample.getId(), sample.getIndexPattern(), null, sample.getCron(), sample.getPageSize(), + sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout())); + assertThat(e.getMessage(), equalTo("Rollup index must be a non-null, non-empty string")); + } + + public void testBadSize() { + final RollupJobConfig sample = randomRollupJobConfig(random()); + + Exception e = expectThrows(IllegalArgumentException.class, () -> + new RollupJobConfig(sample.getId(), sample.getIndexPattern(), sample.getRollupIndex(), sample.getCron(), -1, + sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout())); + assertThat(e.getMessage(), equalTo("Page size is mandatory and must be a positive long")); + + e = expectThrows(IllegalArgumentException.class, () -> + new RollupJobConfig(sample.getId(), sample.getIndexPattern(), sample.getRollupIndex(), sample.getCron(), 0, + sample.getGroupConfig(), sample.getMetricsConfig(), sample.getTimeout())); + assertThat(e.getMessage(), equalTo("Page size is mandatory and must be a positive long")); + } + + public void testEmptyGroupAndMetrics() { + final RollupJobConfig sample = randomRollupJobConfig(random()); + + Exception e = expectThrows(IllegalArgumentException.class, () -> + new RollupJobConfig(sample.getId(), sample.getIndexPattern(), sample.getRollupIndex(), sample.getCron(), sample.getPageSize(), + null, null, sample.getTimeout())); + assertThat(e.getMessage(), equalTo("At least one grouping or metric must be configured")); + + e = expectThrows(IllegalArgumentException.class, () -> + new RollupJobConfig(sample.getId(), sample.getIndexPattern(), sample.getRollupIndex(), sample.getCron(), sample.getPageSize(), + null, emptyList(), sample.getTimeout())); + assertThat(e.getMessage(), equalTo("At least one grouping or metric must be configured")); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupJobTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupJobTests.java index 915cfc2fe3575..3d2367f12bf29 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupJobTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/RollupJobTests.java @@ -37,7 +37,7 @@ protected Writeable.Reader instanceReader() { @Override protected Writeable createTestInstance() { if (randomBoolean()) { - return new RollupJob(ConfigTestHelpers.getRollupJob(randomAlphaOfLength(5)).build(), null); + return new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), null); } Map headers = Collections.emptyMap(); @@ -45,7 +45,7 @@ protected Writeable createTestInstance() { headers = new HashMap<>(1); headers.put("foo", "bar"); } - return new RollupJob(ConfigTestHelpers.getRollupJob(randomAlphaOfLength(5)).build(), headers); + return new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), headers); } @Override @@ -60,7 +60,7 @@ protected Diffable makeTestChanges(Diffable testInstance) { return new RollupJob(other.getConfig(), null); } } else { - return new RollupJob(ConfigTestHelpers.getRollupJob(randomAlphaOfLength(5)).build(), other.getHeaders()); + return new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), other.getHeaders()); } } } diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index a673016133724..3602e1b359ec2 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -47,7 +47,7 @@ dependencies { // ml deps compile project(':libs:grok') - compile 'net.sf.supercsv:super-csv:2.4.0' + compile "net.sf.supercsv:super-csv:${versions.supercsv}" nativeBundle "org.elasticsearch.ml:ml-cpp:${project.version}@zip" testCompile 'org.ini4j:ini4j:0.5.2' } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index c3f340a484408..910d9664240b2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -163,12 +163,12 @@ import org.elasticsearch.xpack.ml.job.categorization.MlClassicTokenizer; import org.elasticsearch.xpack.ml.job.categorization.MlClassicTokenizerFactory; import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister; -import org.elasticsearch.xpack.ml.job.persistence.JobResultsProvider; import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister; +import org.elasticsearch.xpack.ml.job.persistence.JobResultsProvider; import org.elasticsearch.xpack.ml.job.process.DataCountsReporter; import org.elasticsearch.xpack.ml.job.process.NativeController; import org.elasticsearch.xpack.ml.job.process.NativeControllerHolder; -import org.elasticsearch.xpack.ml.job.process.ProcessCtrl; +import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectBuilder; import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessFactory; import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; import org.elasticsearch.xpack.ml.job.process.autodetect.BlackHoleAutodetectProcess; @@ -289,8 +289,8 @@ public List> getSettings() { CONCURRENT_JOB_ALLOCATIONS, MachineLearningField.MAX_MODEL_MEMORY_LIMIT, MAX_MACHINE_MEMORY_PERCENT, - ProcessCtrl.DONT_PERSIST_MODEL_STATE_SETTING, - ProcessCtrl.MAX_ANOMALY_RECORDS_SETTING, + AutodetectBuilder.DONT_PERSIST_MODEL_STATE_SETTING, + AutodetectBuilder.MAX_ANOMALY_RECORDS_SETTING, DataCountsReporter.ACCEPTABLE_PERCENTAGE_DATE_PARSE_ERRORS_SETTING, DataCountsReporter.ACCEPTABLE_PERCENTAGE_OUT_OF_ORDER_ERRORS_SETTING, AutodetectProcessManager.MAX_RUNNING_JOBS_PER_NODE, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java index 4f2b476a5705f..401499172fb62 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java @@ -18,12 +18,14 @@ import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.AliasOrIndex; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.service.ClusterService; @@ -455,12 +457,25 @@ protected ClusterBlockException checkBlock(OpenJobAction.Request request, Cluste protected void masterOperation(OpenJobAction.Request request, ClusterState state, ActionListener listener) { OpenJobAction.JobParams jobParams = request.getJobParams(); if (licenseState.isMachineLearningAllowed()) { - // Step 5. Wait for job to be started and respond - ActionListener> finalListener = + + // Step 6. Clear job finished time once the job is started and respond + ActionListener clearJobFinishTime = ActionListener.wrap( + response -> { + if (response.isAcknowledged()) { + clearJobFinishedTime(jobParams.getJobId(), listener); + } else { + listener.onResponse(response); + } + }, + listener::onFailure + ); + + // Step 5. Wait for job to be started + ActionListener> waitForJobToStart = new ActionListener>() { @Override public void onResponse(PersistentTasksCustomMetaData.PersistentTask task) { - waitForJobStarted(task.getId(), jobParams, listener); + waitForJobStarted(task.getId(), jobParams, clearJobFinishTime); } @Override @@ -476,7 +491,7 @@ public void onFailure(Exception e) { // Step 4. Start job task ActionListener jobUpateListener = ActionListener.wrap( response -> persistentTasksService.sendStartRequest(MlTasks.jobTaskId(jobParams.getJobId()), - OpenJobAction.TASK_NAME, jobParams, finalListener), + OpenJobAction.TASK_NAME, jobParams, waitForJobToStart), listener::onFailure ); @@ -579,6 +594,35 @@ public void onTimeout(TimeValue timeout) { }); } + private void clearJobFinishedTime(String jobId, ActionListener listener) { + clusterService.submitStateUpdateTask("clearing-job-finish-time-for-" + jobId, new ClusterStateUpdateTask() { + @Override + public ClusterState execute(ClusterState currentState) { + MlMetadata mlMetadata = MlMetadata.getMlMetadata(currentState); + MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(mlMetadata); + Job.Builder jobBuilder = new Job.Builder(mlMetadata.getJobs().get(jobId)); + jobBuilder.setFinishedTime(null); + + mlMetadataBuilder.putJob(jobBuilder.build(), true); + ClusterState.Builder builder = ClusterState.builder(currentState); + return builder.metaData(new MetaData.Builder(currentState.metaData()) + .putCustom(MlMetadata.TYPE, mlMetadataBuilder.build())) + .build(); + } + + @Override + public void onFailure(String source, Exception e) { + logger.error("[" + jobId + "] Failed to clear finished_time; source [" + source + "]", e); + listener.onResponse(new OpenJobAction.Response(true)); + } + + @Override + public void clusterStateProcessed(String source, ClusterState oldState, + ClusterState newState) { + listener.onResponse(new OpenJobAction.Response(true)); + } + }); + } private void cancelJobStart(PersistentTasksCustomMetaData.PersistentTask persistentTask, Exception exception, ActionListener listener) { persistentTasksService.sendRemoveRequest(persistentTask.getId(), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsProvider.java index 9bcabad9aea5b..d6ade87fa6e7b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsProvider.java @@ -71,7 +71,7 @@ private static Date getHistogramBucketTimestamp(Histogram.Bucket bucket) { static class TopNScores extends PriorityQueue { TopNScores(int n) { - super(n, false); + super(n); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/NativeController.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/NativeController.java index 43c3f4825ddf3..0b9cb833c8980 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/NativeController.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/NativeController.java @@ -30,6 +30,11 @@ public class NativeController { private static final Logger LOGGER = Loggers.getLogger(NativeController.class); + /** + * Process controller native program name + */ + private static final String CONTROLLER = "controller"; + // The controller process should already be running by the time this class tries to connect to it, so the timeout // can be short (although there's a gotcha with EBS volumes restored from snapshot, so not TOO short) private static final Duration CONTROLLER_CONNECT_TIMEOUT = Duration.ofSeconds(10); @@ -50,7 +55,7 @@ public class NativeController { private final OutputStream commandStream; NativeController(Environment env, NamedPipeHelper namedPipeHelper) throws IOException { - ProcessPipes processPipes = new ProcessPipes(env, namedPipeHelper, ProcessCtrl.CONTROLLER, null, + ProcessPipes processPipes = new ProcessPipes(env, namedPipeHelper, CONTROLLER, null, true, true, false, false, false, false); processPipes.connectStreams(CONTROLLER_CONNECT_TIMEOUT); cppLogHandler = new CppLogMessageHandler(null, processPipes.getLogStream().get()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/ProcessBuilderUtils.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/ProcessBuilderUtils.java new file mode 100644 index 0000000000000..fa5488eae011c --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/ProcessBuilderUtils.java @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.job.process; + +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.env.Environment; +import org.elasticsearch.xpack.core.XPackPlugin; + +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.List; + +public final class ProcessBuilderUtils { + + private ProcessBuilderUtils() {} + + /** + * Name of the model config file + */ + public static final String ML_MODEL_CONF = "mlmodel.conf"; + + public static void addIfNotNull(T object, String argKey, List command) { + if (object != null) { + String param = argKey + object; + command.add(param); + } + } + + public static void addIfNotNull(TimeValue timeValue, String argKey, List command) { + addIfNotNull(timeValue == null ? null : timeValue.getSeconds(), argKey, command); + } + + /** + * Return true if there is a file ES_HOME/config/mlmodel.conf + */ + public static boolean modelConfigFilePresent(Environment env) { + Path modelConfPath = XPackPlugin.resolveConfigFile(env, ML_MODEL_CONF); + + return Files.isRegularFile(modelConfPath); + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/ProcessCtrl.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/ProcessCtrl.java deleted file mode 100644 index 0e24e4d56ca64..0000000000000 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/ProcessCtrl.java +++ /dev/null @@ -1,289 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.job.process; - -import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.Property; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.env.Environment; -import org.elasticsearch.xpack.core.XPackPlugin; -import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; -import org.elasticsearch.xpack.core.ml.job.config.DataDescription; -import org.elasticsearch.xpack.core.ml.job.config.Job; - -import java.io.BufferedWriter; -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.List; -import java.util.Random; -import java.util.stream.Collectors; - - -/** - * Utility class for running a Ml process
        - * The process runs in a clean environment. - */ -public class ProcessCtrl { - - /** - * Autodetect API native program name - always loaded from the same directory as the controller process - */ - public static final String AUTODETECT = "autodetect"; - static final String AUTODETECT_PATH = "./" + AUTODETECT; - - /** - * The normalization native program name - always loaded from the same directory as the controller process - */ - public static final String NORMALIZE = "normalize"; - static final String NORMALIZE_PATH = "./" + NORMALIZE; - - /** - * Process controller native program name - */ - public static final String CONTROLLER = "controller"; - - /** - * Name of the config setting containing the path to the logs directory - */ - private static final int DEFAULT_MAX_NUM_RECORDS = 500; - /** - * The maximum number of anomaly records that will be written each bucket - */ - public static final Setting MAX_ANOMALY_RECORDS_SETTING = Setting.intSetting("max.anomaly.records", DEFAULT_MAX_NUM_RECORDS, - Property.NodeScope); - - /* - * General arguments - */ - static final String JOB_ID_ARG = "--jobid="; - - /* - * Arguments used by both autodetect and normalize - */ - static final String BUCKET_SPAN_ARG = "--bucketspan="; - public static final String DELETE_STATE_FILES_ARG = "--deleteStateFiles"; - static final String LENGTH_ENCODED_INPUT_ARG = "--lengthEncodedInput"; - static final String MODEL_CONFIG_ARG = "--modelconfig="; - public static final String QUANTILES_STATE_PATH_ARG = "--quantilesState="; - static final String PER_PARTITION_NORMALIZATION = "--perPartitionNormalization"; - - /* - * Arguments used by autodetect - */ - static final String LATENCY_ARG = "--latency="; - static final String RESULT_FINALIZATION_WINDOW_ARG = "--resultFinalizationWindow="; - static final String MULTIVARIATE_BY_FIELDS_ARG = "--multivariateByFields"; - static final String PERSIST_INTERVAL_ARG = "--persistInterval="; - static final String MAX_QUANTILE_INTERVAL_ARG = "--maxQuantileInterval="; - static final String SUMMARY_COUNT_FIELD_ARG = "--summarycountfield="; - static final String TIME_FIELD_ARG = "--timefield="; - - private static final int SECONDS_IN_HOUR = 3600; - - /** - * Roughly how often should the C++ process persist state? A staggering - * factor that varies by job is added to this. - */ - static final long DEFAULT_BASE_PERSIST_INTERVAL = 10800; // 3 hours - - /** - * Roughly how often should the C++ process output quantiles when no - * anomalies are being detected? A staggering factor that varies by job is - * added to this. - */ - static final int BASE_MAX_QUANTILE_INTERVAL = 21600; // 6 hours - - /** - * Name of the model config file - */ - static final String ML_MODEL_CONF = "mlmodel.conf"; - - /** - * Persisted quantiles are written to disk so they can be read by - * the autodetect program. All quantiles files have this extension. - */ - private static final String QUANTILES_FILE_EXTENSION = ".json"; - - /** - * Config setting storing the flag that disables model persistence - */ - public static final Setting DONT_PERSIST_MODEL_STATE_SETTING = Setting.boolSetting("no.model.state.persist", false, - Property.NodeScope); - - static String maxAnomalyRecordsArg(Settings settings) { - return "--maxAnomalyRecords=" + MAX_ANOMALY_RECORDS_SETTING.get(settings); - } - - private ProcessCtrl() { - } - - /** - * This random time of up to 1 hour is added to intervals at which we - * tell the C++ process to perform periodic operations. This means that - * when there are many jobs there is a certain amount of staggering of - * their periodic operations. A given job will always be given the same - * staggering interval (for a given JVM implementation). - * - * @param jobId The ID of the job to calculate the staggering interval for - * @return The staggering interval - */ - static int calculateStaggeringInterval(String jobId) { - Random rng = new Random(jobId.hashCode()); - return rng.nextInt(SECONDS_IN_HOUR); - } - - public static List buildAutodetectCommand(Environment env, Settings settings, Job job, Logger logger) { - List command = new ArrayList<>(); - command.add(AUTODETECT_PATH); - - String jobId = JOB_ID_ARG + job.getId(); - command.add(jobId); - - AnalysisConfig analysisConfig = job.getAnalysisConfig(); - if (analysisConfig != null) { - addIfNotNull(analysisConfig.getBucketSpan(), BUCKET_SPAN_ARG, command); - addIfNotNull(analysisConfig.getLatency(), LATENCY_ARG, command); - addIfNotNull(analysisConfig.getSummaryCountFieldName(), - SUMMARY_COUNT_FIELD_ARG, command); - if (Boolean.TRUE.equals(analysisConfig.getOverlappingBuckets())) { - Long window = analysisConfig.getResultFinalizationWindow(); - if (window == null) { - window = AnalysisConfig.DEFAULT_RESULT_FINALIZATION_WINDOW; - } - command.add(RESULT_FINALIZATION_WINDOW_ARG + window); - } - if (Boolean.TRUE.equals(analysisConfig.getMultivariateByFields())) { - command.add(MULTIVARIATE_BY_FIELDS_ARG); - } - - if (analysisConfig.getUsePerPartitionNormalization()) { - command.add(PER_PARTITION_NORMALIZATION); - } - } - - // Input is always length encoded - command.add(LENGTH_ENCODED_INPUT_ARG); - - // Limit the number of output records - command.add(maxAnomalyRecordsArg(settings)); - - // always set the time field - String timeFieldArg = TIME_FIELD_ARG + getTimeFieldOrDefault(job); - command.add(timeFieldArg); - - int intervalStagger = calculateStaggeringInterval(job.getId()); - logger.debug("Periodic operations staggered by " + intervalStagger +" seconds for job '" + job.getId() + "'"); - - // Supply a URL for persisting/restoring model state unless model - // persistence has been explicitly disabled. - if (DONT_PERSIST_MODEL_STATE_SETTING.get(settings)) { - logger.info("Will not persist model state - " + DONT_PERSIST_MODEL_STATE_SETTING + " setting was set"); - } else { - // Persist model state every few hours even if the job isn't closed - long persistInterval = (job.getBackgroundPersistInterval() == null) ? - (DEFAULT_BASE_PERSIST_INTERVAL + intervalStagger) : - job.getBackgroundPersistInterval().getSeconds(); - command.add(PERSIST_INTERVAL_ARG + persistInterval); - } - - int maxQuantileInterval = BASE_MAX_QUANTILE_INTERVAL + intervalStagger; - command.add(MAX_QUANTILE_INTERVAL_ARG + maxQuantileInterval); - - if (modelConfigFilePresent(env)) { - String modelConfigFile = XPackPlugin.resolveConfigFile(env, ML_MODEL_CONF).toString(); - command.add(MODEL_CONFIG_ARG + modelConfigFile); - } - - return command; - } - - private static String getTimeFieldOrDefault(Job job) { - DataDescription dataDescription = job.getDataDescription(); - boolean useDefault = dataDescription == null - || Strings.isNullOrEmpty(dataDescription.getTimeField()); - return useDefault ? DataDescription.DEFAULT_TIME_FIELD : dataDescription.getTimeField(); - } - - private static void addIfNotNull(TimeValue timeValue, String argKey, List command) { - addIfNotNull(timeValue == null ? null : timeValue.getSeconds(), argKey, command); - } - - private static void addIfNotNull(List timeValues, String argKey, List command) { - if (timeValues != null) { - addIfNotNull(timeValues.stream().map(TimeValue::getSeconds).collect(Collectors.toList()), argKey, command); - } - } - - private static void addIfNotNull(T object, String argKey, List command) { - if (object != null) { - String param = argKey + object; - command.add(param); - } - } - - /** - * Return true if there is a file ES_HOME/config/mlmodel.conf - */ - public static boolean modelConfigFilePresent(Environment env) { - Path modelConfPath = XPackPlugin.resolveConfigFile(env, ML_MODEL_CONF); - - return Files.isRegularFile(modelConfPath); - } - - /** - * Build the command to start the normalizer process. - */ - public static List buildNormalizerCommand(Environment env, String jobId, String quantilesState, Integer bucketSpan, - boolean perPartitionNormalization) throws IOException { - - List command = new ArrayList<>(); - command.add(NORMALIZE_PATH); - addIfNotNull(bucketSpan, BUCKET_SPAN_ARG, command); - command.add(LENGTH_ENCODED_INPUT_ARG); - if (perPartitionNormalization) { - command.add(PER_PARTITION_NORMALIZATION); - } - - if (quantilesState != null) { - Path quantilesStateFilePath = writeNormalizerInitState(jobId, quantilesState, env); - - String stateFileArg = QUANTILES_STATE_PATH_ARG + quantilesStateFilePath; - command.add(stateFileArg); - command.add(DELETE_STATE_FILES_ARG); - } - - if (modelConfigFilePresent(env)) { - String modelConfigFile = XPackPlugin.resolveConfigFile(env, ML_MODEL_CONF).toString(); - command.add(MODEL_CONFIG_ARG + modelConfigFile); - } - - return command; - } - - /** - * Write the normalizer init state to file. - */ - public static Path writeNormalizerInitState(String jobId, String state, Environment env) - throws IOException { - // createTempFile has a race condition where it may return the same - // temporary file name to different threads if called simultaneously - // from multiple threads, hence add the thread ID to avoid this - Path stateFile = Files.createTempFile(env.tmpFile(), jobId + "_quantiles_" + Thread.currentThread().getId(), - QUANTILES_FILE_EXTENSION); - - try (BufferedWriter osw = Files.newBufferedWriter(stateFile, StandardCharsets.UTF_8)) { - osw.write(state); - } - - return stateFile; - } -} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectBuilder.java index 89418f9d53125..200cb08512572 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectBuilder.java @@ -6,51 +6,124 @@ package org.elasticsearch.xpack.ml.job.process.autodetect; import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; +import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.ml.calendars.ScheduledEvent; +import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.core.ml.job.config.AnalysisLimits; +import org.elasticsearch.xpack.core.ml.job.config.DataDescription; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.MlFilter; import org.elasticsearch.xpack.core.ml.job.config.ModelPlotConfig; +import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.Quantiles; import org.elasticsearch.xpack.ml.job.process.NativeController; -import org.elasticsearch.xpack.ml.job.process.ProcessCtrl; +import org.elasticsearch.xpack.ml.job.process.ProcessBuilderUtils; import org.elasticsearch.xpack.ml.job.process.ProcessPipes; -import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.Quantiles; import org.elasticsearch.xpack.ml.job.process.autodetect.writer.AnalysisLimitsWriter; import org.elasticsearch.xpack.ml.job.process.autodetect.writer.FieldConfigWriter; import org.elasticsearch.xpack.ml.job.process.autodetect.writer.ModelPlotConfigWriter; +import java.io.BufferedWriter; import java.io.IOException; import java.io.OutputStreamWriter; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; +import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Objects; +import java.util.Random; import java.util.Set; +import static org.elasticsearch.xpack.ml.job.process.ProcessBuilderUtils.addIfNotNull; + /** * The autodetect process builder. */ public class AutodetectBuilder { + + /** + * Autodetect API native program name - always loaded from the same directory as the controller process + */ + public static final String AUTODETECT = "autodetect"; + static final String AUTODETECT_PATH = "./" + AUTODETECT; + + /* + * Arguments used by both autodetect and normalize + */ + public static final String BUCKET_SPAN_ARG = "--bucketspan="; + public static final String DELETE_STATE_FILES_ARG = "--deleteStateFiles"; + public static final String LENGTH_ENCODED_INPUT_ARG = "--lengthEncodedInput"; + public static final String MODEL_CONFIG_ARG = "--modelconfig="; + public static final String QUANTILES_STATE_PATH_ARG = "--quantilesState="; + public static final String PER_PARTITION_NORMALIZATION = "--perPartitionNormalization"; + private static final String CONF_EXTENSION = ".conf"; + static final String JOB_ID_ARG = "--jobid="; private static final String LIMIT_CONFIG_ARG = "--limitconfig="; private static final String MODEL_PLOT_CONFIG_ARG = "--modelplotconfig="; private static final String FIELD_CONFIG_ARG = "--fieldconfig="; + static final String LATENCY_ARG = "--latency="; + static final String RESULT_FINALIZATION_WINDOW_ARG = "--resultFinalizationWindow="; + static final String MULTIVARIATE_BY_FIELDS_ARG = "--multivariateByFields"; + static final String PERSIST_INTERVAL_ARG = "--persistInterval="; + static final String MAX_QUANTILE_INTERVAL_ARG = "--maxQuantileInterval="; + static final String SUMMARY_COUNT_FIELD_ARG = "--summarycountfield="; + static final String TIME_FIELD_ARG = "--timefield="; + + /** + * Name of the config setting containing the path to the logs directory + */ + private static final int DEFAULT_MAX_NUM_RECORDS = 500; + + /** + * The maximum number of anomaly records that will be written each bucket + */ + public static final Setting MAX_ANOMALY_RECORDS_SETTING = Setting.intSetting("max.anomaly.records", DEFAULT_MAX_NUM_RECORDS, + Setting.Property.NodeScope); + + /** + * Config setting storing the flag that disables model persistence + */ + public static final Setting DONT_PERSIST_MODEL_STATE_SETTING = Setting.boolSetting("no.model.state.persist", false, + Setting.Property.NodeScope); + + private static final int SECONDS_IN_HOUR = 3600; + + /** + * Roughly how often should the C++ process persist state? A staggering + * factor that varies by job is added to this. + */ + private static final long DEFAULT_BASE_PERSIST_INTERVAL = 10800; // 3 hours - private Job job; - private List filesToDelete; - private Logger logger; + /** + * Roughly how often should the C++ process output quantiles when no + * anomalies are being detected? A staggering factor that varies by job is + * added to this. + */ + static final int BASE_MAX_QUANTILE_INTERVAL = 21600; // 6 hours + + /** + * Persisted quantiles are written to disk so they can be read by + * the autodetect program. All quantiles files have this extension. + */ + private static final String QUANTILES_FILE_EXTENSION = ".json"; + + private final Job job; + private final List filesToDelete; + private final Logger logger; + private final Environment env; + private final Settings settings; + private final NativeController controller; + private final ProcessPipes processPipes; private Set referencedFilters; private List scheduledEvents; private Quantiles quantiles; - private Environment env; - private Settings settings; - private NativeController controller; - private ProcessPipes processPipes; /** * Constructs an autodetect process builder @@ -98,7 +171,7 @@ public AutodetectBuilder scheduledEvents(List scheduledEvents) { */ public void build() throws IOException { - List command = ProcessCtrl.buildAutodetectCommand(env, settings, job, logger); + List command = buildAutodetectCommand(); buildLimits(command); buildModelPlotConfig(command); @@ -109,6 +182,100 @@ public void build() throws IOException { controller.startProcess(command); } + /** + * Visible for testing + */ + List buildAutodetectCommand() { + List command = new ArrayList<>(); + command.add(AUTODETECT_PATH); + + command.add(JOB_ID_ARG + job.getId()); + + AnalysisConfig analysisConfig = job.getAnalysisConfig(); + if (analysisConfig != null) { + addIfNotNull(analysisConfig.getBucketSpan(), BUCKET_SPAN_ARG, command); + addIfNotNull(analysisConfig.getLatency(), LATENCY_ARG, command); + addIfNotNull(analysisConfig.getSummaryCountFieldName(), + SUMMARY_COUNT_FIELD_ARG, command); + if (Boolean.TRUE.equals(analysisConfig.getOverlappingBuckets())) { + Long window = analysisConfig.getResultFinalizationWindow(); + if (window == null) { + window = AnalysisConfig.DEFAULT_RESULT_FINALIZATION_WINDOW; + } + command.add(RESULT_FINALIZATION_WINDOW_ARG + window); + } + if (Boolean.TRUE.equals(analysisConfig.getMultivariateByFields())) { + command.add(MULTIVARIATE_BY_FIELDS_ARG); + } + + if (analysisConfig.getUsePerPartitionNormalization()) { + command.add(PER_PARTITION_NORMALIZATION); + } + } + + // Input is always length encoded + command.add(LENGTH_ENCODED_INPUT_ARG); + + // Limit the number of output records + command.add(maxAnomalyRecordsArg(settings)); + + // always set the time field + String timeFieldArg = TIME_FIELD_ARG + getTimeFieldOrDefault(job); + command.add(timeFieldArg); + + int intervalStagger = calculateStaggeringInterval(job.getId()); + logger.debug("[{}] Periodic operations staggered by {} seconds", job.getId(), intervalStagger); + + // Supply a URL for persisting/restoring model state unless model + // persistence has been explicitly disabled. + if (DONT_PERSIST_MODEL_STATE_SETTING.get(settings)) { + logger.info("[{}] Will not persist model state - {} setting was set", job.getId(), DONT_PERSIST_MODEL_STATE_SETTING); + } else { + // Persist model state every few hours even if the job isn't closed + long persistInterval = (job.getBackgroundPersistInterval() == null) ? + (DEFAULT_BASE_PERSIST_INTERVAL + intervalStagger) : + job.getBackgroundPersistInterval().getSeconds(); + command.add(PERSIST_INTERVAL_ARG + persistInterval); + } + + int maxQuantileInterval = BASE_MAX_QUANTILE_INTERVAL + intervalStagger; + command.add(MAX_QUANTILE_INTERVAL_ARG + maxQuantileInterval); + + if (ProcessBuilderUtils.modelConfigFilePresent(env)) { + String modelConfigFile = XPackPlugin.resolveConfigFile(env, ProcessBuilderUtils.ML_MODEL_CONF).toString(); + command.add(MODEL_CONFIG_ARG + modelConfigFile); + } + + return command; + } + + + static String maxAnomalyRecordsArg(Settings settings) { + return "--maxAnomalyRecords=" + MAX_ANOMALY_RECORDS_SETTING.get(settings); + } + + private static String getTimeFieldOrDefault(Job job) { + DataDescription dataDescription = job.getDataDescription(); + boolean useDefault = dataDescription == null + || Strings.isNullOrEmpty(dataDescription.getTimeField()); + return useDefault ? DataDescription.DEFAULT_TIME_FIELD : dataDescription.getTimeField(); + } + + /** + * This random time of up to 1 hour is added to intervals at which we + * tell the C++ process to perform periodic operations. This means that + * when there are many jobs there is a certain amount of staggering of + * their periodic operations. A given job will always be given the same + * staggering interval (for a given JVM implementation). + * + * @param jobId The ID of the job to calculate the staggering interval for + * @return The staggering interval + */ + static int calculateStaggeringInterval(String jobId) { + Random rng = new Random(jobId.hashCode()); + return rng.nextInt(SECONDS_IN_HOUR); + } + private void buildLimits(List command) throws IOException { if (job.getAnalysisLimits() != null) { Path limitConfigFile = Files.createTempFile(env.tmpFile(), "limitconfig", CONF_EXTENSION); @@ -152,15 +319,32 @@ private void buildQuantiles(List command) throws IOException { if (quantiles != null && !quantiles.getQuantileState().isEmpty()) { logger.info("Restoring quantiles for job '" + job.getId() + "'"); - Path normalizersStateFilePath = ProcessCtrl.writeNormalizerInitState( - job.getId(), quantiles.getQuantileState(), env); + Path normalizersStateFilePath = writeNormalizerInitState(job.getId(), quantiles.getQuantileState(), env); - String quantilesStateFileArg = ProcessCtrl.QUANTILES_STATE_PATH_ARG + normalizersStateFilePath; + String quantilesStateFileArg = QUANTILES_STATE_PATH_ARG + normalizersStateFilePath; command.add(quantilesStateFileArg); - command.add(ProcessCtrl.DELETE_STATE_FILES_ARG); + command.add(DELETE_STATE_FILES_ARG); } } + /** + * Write the normalizer init state to file. + */ + public static Path writeNormalizerInitState(String jobId, String state, Environment env) + throws IOException { + // createTempFile has a race condition where it may return the same + // temporary file name to different threads if called simultaneously + // from multiple threads, hence add the thread ID to avoid this + Path stateFile = Files.createTempFile(env.tmpFile(), jobId + "_quantiles_" + Thread.currentThread().getId(), + QUANTILES_FILE_EXTENSION); + + try (BufferedWriter osw = Files.newBufferedWriter(stateFile, StandardCharsets.UTF_8)) { + osw.write(state); + } + + return stateFile; + } + private void buildFieldConfig(List command) throws IOException { if (job.getAnalysisConfig() != null) { // write to a temporary field config file diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/NativeAutodetectProcessFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/NativeAutodetectProcessFactory.java index adaed1f6f7c36..01ad0bec85aec 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/NativeAutodetectProcessFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/NativeAutodetectProcessFactory.java @@ -6,21 +6,20 @@ package org.elasticsearch.xpack.ml.job.process.autodetect; import org.apache.logging.log4j.Logger; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.client.Client; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.env.Environment; -import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.job.process.NativeController; -import org.elasticsearch.xpack.ml.job.process.ProcessCtrl; import org.elasticsearch.xpack.ml.job.process.ProcessPipes; import org.elasticsearch.xpack.ml.job.process.autodetect.output.AutodetectResultsParser; import org.elasticsearch.xpack.ml.job.process.autodetect.output.StateProcessor; import org.elasticsearch.xpack.ml.job.process.autodetect.params.AutodetectParams; -import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.utils.NamedPipeHelper; import java.io.IOException; @@ -55,8 +54,9 @@ public AutodetectProcess createAutodetectProcess(Job job, ExecutorService executorService, Runnable onProcessCrash) { List filesToDelete = new ArrayList<>(); - ProcessPipes processPipes = new ProcessPipes(env, NAMED_PIPE_HELPER, ProcessCtrl.AUTODETECT, job.getId(), - true, false, true, true, params.modelSnapshot() != null, !ProcessCtrl.DONT_PERSIST_MODEL_STATE_SETTING.get(settings)); + ProcessPipes processPipes = new ProcessPipes(env, NAMED_PIPE_HELPER, AutodetectBuilder.AUTODETECT, job.getId(), + true, false, true, true, params.modelSnapshot() != null, + !AutodetectBuilder.DONT_PERSIST_MODEL_STATE_SETTING.get(settings)); createNativeProcess(job, params, processPipes, filesToDelete); boolean includeTokensField = MachineLearning.CATEGORIZATION_TOKENIZATION_IN_JAVA && job.getAnalysisConfig().getCategorizationFieldName() != null; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/BucketInfluencerNormalizable.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/BucketInfluencerNormalizable.java index e55e5ac0346d8..8ee5d1ad6e2d3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/BucketInfluencerNormalizable.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/BucketInfluencerNormalizable.java @@ -46,6 +46,11 @@ public String getPersonFieldName() { return bucketInfluencer.getInfluencerFieldName(); } + @Override + public String getPersonFieldValue() { + return null; + } + @Override public String getFunctionName() { return null; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/BucketNormalizable.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/BucketNormalizable.java index 1ba2e77040897..7ef23cb513b7f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/BucketNormalizable.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/BucketNormalizable.java @@ -64,6 +64,11 @@ public String getPersonFieldName() { return null; } + @Override + public String getPersonFieldValue() { + return null; + } + @Override public String getFunctionName() { return null; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/InfluencerNormalizable.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/InfluencerNormalizable.java index 74cb86a3fdfdd..bc1567ac00a14 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/InfluencerNormalizable.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/InfluencerNormalizable.java @@ -44,6 +44,11 @@ public String getPersonFieldName() { return influencer.getInfluencerFieldName(); } + @Override + public String getPersonFieldValue() { + return influencer.getInfluencerFieldValue(); + } + @Override public String getFunctionName() { return null; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/MultiplyingNormalizerProcess.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/MultiplyingNormalizerProcess.java index fc7bd35188473..8aa266e15d22e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/MultiplyingNormalizerProcess.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/MultiplyingNormalizerProcess.java @@ -63,10 +63,11 @@ public void writeRecord(String[] record) throws IOException { result.setPartitionFieldName(record[1]); result.setPartitionFieldValue(record[2]); result.setPersonFieldName(record[3]); - result.setFunctionName(record[4]); - result.setValueFieldName(record[5]); - result.setProbability(Double.parseDouble(record[6])); - result.setNormalizedScore(factor * Double.parseDouble(record[7])); + result.setPersonFieldValue(record[4]); + result.setFunctionName(record[5]); + result.setValueFieldName(record[6]); + result.setProbability(Double.parseDouble(record[7])); + result.setNormalizedScore(factor * Double.parseDouble(record[8])); } catch (NumberFormatException | ArrayIndexOutOfBoundsException e) { throw new IOException("Unable to write to no-op normalizer", e); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/NativeNormalizerProcessFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/NativeNormalizerProcessFactory.java index 0c6bb407e7835..c96a3b48fe1d8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/NativeNormalizerProcessFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/NativeNormalizerProcessFactory.java @@ -9,10 +9,9 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.job.process.NativeController; -import org.elasticsearch.xpack.ml.job.process.ProcessCtrl; import org.elasticsearch.xpack.ml.job.process.ProcessPipes; -import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.utils.NamedPipeHelper; import java.io.IOException; @@ -40,7 +39,7 @@ public NativeNormalizerProcessFactory(Environment env, Settings settings, Native @Override public NormalizerProcess createNormalizerProcess(String jobId, String quantilesState, Integer bucketSpan, boolean perPartitionNormalization, ExecutorService executorService) { - ProcessPipes processPipes = new ProcessPipes(env, NAMED_PIPE_HELPER, ProcessCtrl.NORMALIZE, jobId, + ProcessPipes processPipes = new ProcessPipes(env, NAMED_PIPE_HELPER, NormalizerBuilder.NORMALIZE, jobId, true, false, true, true, false, false); createNativeProcess(jobId, quantilesState, processPipes, bucketSpan, perPartitionNormalization); @@ -52,7 +51,7 @@ private void createNativeProcess(String jobId, String quantilesState, ProcessPip boolean perPartitionNormalization) { try { - List command = ProcessCtrl.buildNormalizerCommand(env, jobId, quantilesState, bucketSpan, perPartitionNormalization); + List command = new NormalizerBuilder(env, jobId, quantilesState, bucketSpan, perPartitionNormalization).build(); processPipes.addArgs(command); nativeController.startProcess(command); processPipes.connectStreams(PROCESS_STARTUP_TIMEOUT); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/Normalizable.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/Normalizable.java index 606be98ae10e7..7efadf2961308 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/Normalizable.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/Normalizable.java @@ -44,6 +44,8 @@ public Normalizable(String indexName) { abstract String getPersonFieldName(); + abstract String getPersonFieldValue(); + abstract String getFunctionName(); abstract String getValueFieldName(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/Normalizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/Normalizer.java index 2c929ff4f1ae4..2d4e2135478f3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/Normalizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/Normalizer.java @@ -70,6 +70,7 @@ public void normalize(Integer bucketSpan, boolean perPartitionNormalization, NormalizerResult.PARTITION_FIELD_NAME_FIELD.getPreferredName(), NormalizerResult.PARTITION_FIELD_VALUE_FIELD.getPreferredName(), NormalizerResult.PERSON_FIELD_NAME_FIELD.getPreferredName(), + NormalizerResult.PERSON_FIELD_VALUE_FIELD.getPreferredName(), NormalizerResult.FUNCTION_NAME_FIELD.getPreferredName(), NormalizerResult.VALUE_FIELD_NAME_FIELD.getPreferredName(), NormalizerResult.PROBABILITY_FIELD.getPreferredName(), @@ -108,6 +109,7 @@ private static void writeNormalizableAndChildrenRecursively(Normalizable normali Strings.coalesceToEmpty(normalizable.getPartitionFieldName()), Strings.coalesceToEmpty(normalizable.getPartitionFieldValue()), Strings.coalesceToEmpty(normalizable.getPersonFieldName()), + Strings.coalesceToEmpty(normalizable.getPersonFieldValue()), Strings.coalesceToEmpty(normalizable.getFunctionName()), Strings.coalesceToEmpty(normalizable.getValueFieldName()), Double.toString(normalizable.getProbability()), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/NormalizerBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/NormalizerBuilder.java new file mode 100644 index 0000000000000..5630a75127506 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/NormalizerBuilder.java @@ -0,0 +1,71 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.job.process.normalizer; + +import org.elasticsearch.env.Environment; +import org.elasticsearch.xpack.core.XPackPlugin; +import org.elasticsearch.xpack.ml.job.process.ProcessBuilderUtils; +import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectBuilder; + +import java.io.IOException; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; + +import static org.elasticsearch.xpack.ml.job.process.ProcessBuilderUtils.addIfNotNull; + +public class NormalizerBuilder { + + /** + * The normalization native program name - always loaded from the same directory as the controller process + */ + public static final String NORMALIZE = "normalize"; + static final String NORMALIZE_PATH = "./" + NORMALIZE; + + private final Environment env; + private final String jobId; + private final String quantilesState; + private final Integer bucketSpan; + private final boolean perPartitionNormalization; + + public NormalizerBuilder(Environment env, String jobId, String quantilesState, Integer bucketSpan, + boolean perPartitionNormalization) { + this.env = env; + this.jobId = jobId; + this.quantilesState = quantilesState; + this.bucketSpan = bucketSpan; + this.perPartitionNormalization = perPartitionNormalization; + } + + /** + * Build the command to start the normalizer process. + */ + public List build() throws IOException { + + List command = new ArrayList<>(); + command.add(NORMALIZE_PATH); + addIfNotNull(bucketSpan, AutodetectBuilder.BUCKET_SPAN_ARG, command); + command.add(AutodetectBuilder.LENGTH_ENCODED_INPUT_ARG); + if (perPartitionNormalization) { + command.add(AutodetectBuilder.PER_PARTITION_NORMALIZATION); + } + + if (quantilesState != null) { + Path quantilesStateFilePath = AutodetectBuilder.writeNormalizerInitState(jobId, quantilesState, env); + + String stateFileArg = AutodetectBuilder.QUANTILES_STATE_PATH_ARG + quantilesStateFilePath; + command.add(stateFileArg); + command.add(AutodetectBuilder.DELETE_STATE_FILES_ARG); + } + + if (ProcessBuilderUtils.modelConfigFilePresent(env)) { + String modelConfigFile = XPackPlugin.resolveConfigFile(env, ProcessBuilderUtils.ML_MODEL_CONF).toString(); + command.add(AutodetectBuilder.MODEL_CONFIG_ARG + modelConfigFile); + } + + return command; + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/NormalizerResult.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/NormalizerResult.java index 5cb2932a28ba5..269792dbe7797 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/NormalizerResult.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/NormalizerResult.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.ml.job.process.normalizer; +import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -26,6 +27,7 @@ public class NormalizerResult implements ToXContentObject, Writeable { static final ParseField PARTITION_FIELD_NAME_FIELD = new ParseField("partition_field_name"); static final ParseField PARTITION_FIELD_VALUE_FIELD = new ParseField("partition_field_value"); static final ParseField PERSON_FIELD_NAME_FIELD = new ParseField("person_field_name"); + static final ParseField PERSON_FIELD_VALUE_FIELD = new ParseField("person_field_value"); static final ParseField FUNCTION_NAME_FIELD = new ParseField("function_name"); static final ParseField VALUE_FIELD_NAME_FIELD = new ParseField("value_field_name"); static final ParseField PROBABILITY_FIELD = new ParseField("probability"); @@ -39,6 +41,7 @@ public class NormalizerResult implements ToXContentObject, Writeable { PARSER.declareString(NormalizerResult::setPartitionFieldName, PARTITION_FIELD_NAME_FIELD); PARSER.declareString(NormalizerResult::setPartitionFieldValue, PARTITION_FIELD_VALUE_FIELD); PARSER.declareString(NormalizerResult::setPersonFieldName, PERSON_FIELD_NAME_FIELD); + PARSER.declareString(NormalizerResult::setPersonFieldValue, PERSON_FIELD_VALUE_FIELD); PARSER.declareString(NormalizerResult::setFunctionName, FUNCTION_NAME_FIELD); PARSER.declareString(NormalizerResult::setValueFieldName, VALUE_FIELD_NAME_FIELD); PARSER.declareDouble(NormalizerResult::setProbability, PROBABILITY_FIELD); @@ -49,6 +52,7 @@ public class NormalizerResult implements ToXContentObject, Writeable { private String partitionFieldName; private String partitionFieldValue; private String personFieldName; + private String personFieldValue; private String functionName; private String valueFieldName; private double probability; @@ -62,6 +66,9 @@ public NormalizerResult(StreamInput in) throws IOException { partitionFieldName = in.readOptionalString(); partitionFieldValue = in.readOptionalString(); personFieldName = in.readOptionalString(); + if (in.getVersion().onOrAfter(Version.V_6_5_0)) { + personFieldValue = in.readOptionalString(); + } functionName = in.readOptionalString(); valueFieldName = in.readOptionalString(); probability = in.readDouble(); @@ -74,6 +81,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(partitionFieldName); out.writeOptionalString(partitionFieldValue); out.writeOptionalString(personFieldName); + if (out.getVersion().onOrAfter(Version.V_6_5_0)) { + out.writeOptionalString(personFieldValue); + } out.writeOptionalString(functionName); out.writeOptionalString(valueFieldName); out.writeDouble(probability); @@ -87,6 +97,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(PARTITION_FIELD_NAME_FIELD.getPreferredName(), partitionFieldName); builder.field(PARTITION_FIELD_VALUE_FIELD.getPreferredName(), partitionFieldValue); builder.field(PERSON_FIELD_NAME_FIELD.getPreferredName(), personFieldName); + builder.field(PERSON_FIELD_VALUE_FIELD.getPreferredName(), personFieldValue); builder.field(FUNCTION_NAME_FIELD.getPreferredName(), functionName); builder.field(VALUE_FIELD_NAME_FIELD.getPreferredName(), valueFieldName); builder.field(PROBABILITY_FIELD.getPreferredName(), probability); @@ -127,6 +138,14 @@ public void setPersonFieldName(String personFieldName) { this.personFieldName = personFieldName; } + public String getPersonFieldValue() { + return personFieldValue; + } + + public void setPersonFieldValue(String personFieldValue) { + this.personFieldValue = personFieldValue; + } + public String getFunctionName() { return functionName; } @@ -161,7 +180,7 @@ public void setNormalizedScore(double normalizedScore) { @Override public int hashCode() { - return Objects.hash(level, partitionFieldName, partitionFieldValue, personFieldName, + return Objects.hash(level, partitionFieldName, partitionFieldValue, personFieldName, personFieldValue, functionName, valueFieldName, probability, normalizedScore); } @@ -184,6 +203,7 @@ public boolean equals(Object other) { && Objects.equals(this.partitionFieldName, that.partitionFieldName) && Objects.equals(this.partitionFieldValue, that.partitionFieldValue) && Objects.equals(this.personFieldName, that.personFieldName) + && Objects.equals(this.personFieldValue, that.personFieldValue) && Objects.equals(this.functionName, that.functionName) && Objects.equals(this.valueFieldName, that.valueFieldName) && this.probability == that.probability diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/PartitionScoreNormalizable.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/PartitionScoreNormalizable.java index 4d5d91aa12f8c..91b2a7a505e35 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/PartitionScoreNormalizable.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/PartitionScoreNormalizable.java @@ -45,6 +45,11 @@ public String getPersonFieldName() { return null; } + @Override + public String getPersonFieldValue() { + return null; + } + @Override public String getFunctionName() { return null; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/RecordNormalizable.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/RecordNormalizable.java index 97114130c84d9..f3f32cb526ead 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/RecordNormalizable.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/RecordNormalizable.java @@ -46,6 +46,12 @@ public String getPersonFieldName() { return over != null ? over : record.getByFieldName(); } + @Override + public String getPersonFieldValue() { + String over = record.getOverFieldValue(); + return over != null ? over : record.getByFieldValue(); + } + @Override public String getFunctionName() { return record.getFunction(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/ProcessCtrlTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/ProcessCtrlTests.java deleted file mode 100644 index 8091c2a02c371..0000000000000 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/ProcessCtrlTests.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.job.process; - -import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.TestEnvironment; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; -import org.elasticsearch.xpack.core.ml.job.config.DataDescription; -import org.elasticsearch.xpack.core.ml.job.config.Detector; -import org.elasticsearch.xpack.core.ml.job.config.Job; -import org.mockito.Mockito; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; - -import static org.elasticsearch.xpack.core.ml.job.config.JobTests.buildJobBuilder; - -public class ProcessCtrlTests extends ESTestCase { - - private final Logger logger = Mockito.mock(Logger.class); - - public void testBuildAutodetectCommand() { - Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); - Environment env = TestEnvironment.newEnvironment(settings); - Job.Builder job = buildJobBuilder("unit-test-job"); - - Detector.Builder detectorBuilder = new Detector.Builder("mean", "value"); - detectorBuilder.setPartitionFieldName("foo"); - AnalysisConfig.Builder acBuilder = new AnalysisConfig.Builder(Collections.singletonList(detectorBuilder.build())); - acBuilder.setBucketSpan(TimeValue.timeValueSeconds(120)); - acBuilder.setLatency(TimeValue.timeValueSeconds(360)); - acBuilder.setSummaryCountFieldName("summaryField"); - acBuilder.setOverlappingBuckets(true); - acBuilder.setMultivariateByFields(true); - acBuilder.setUsePerPartitionNormalization(true); - job.setAnalysisConfig(acBuilder); - - DataDescription.Builder dd = new DataDescription.Builder(); - dd.setFormat(DataDescription.DataFormat.DELIMITED); - dd.setFieldDelimiter('|'); - dd.setTimeField("tf"); - job.setDataDescription(dd); - - List command = ProcessCtrl.buildAutodetectCommand(env, settings, job.build(), logger); - assertEquals(13, command.size()); - assertTrue(command.contains(ProcessCtrl.AUTODETECT_PATH)); - assertTrue(command.contains(ProcessCtrl.BUCKET_SPAN_ARG + "120")); - assertTrue(command.contains(ProcessCtrl.LATENCY_ARG + "360")); - assertTrue(command.contains(ProcessCtrl.SUMMARY_COUNT_FIELD_ARG + "summaryField")); - assertTrue(command.contains(ProcessCtrl.RESULT_FINALIZATION_WINDOW_ARG + "2")); - assertTrue(command.contains(ProcessCtrl.MULTIVARIATE_BY_FIELDS_ARG)); - - assertTrue(command.contains(ProcessCtrl.LENGTH_ENCODED_INPUT_ARG)); - assertTrue(command.contains(ProcessCtrl.maxAnomalyRecordsArg(settings))); - - assertTrue(command.contains(ProcessCtrl.TIME_FIELD_ARG + "tf")); - assertTrue(command.contains(ProcessCtrl.JOB_ID_ARG + "unit-test-job")); - - assertTrue(command.contains(ProcessCtrl.PER_PARTITION_NORMALIZATION)); - - int expectedPersistInterval = 10800 + ProcessCtrl.calculateStaggeringInterval(job.getId()); - assertTrue(command.contains(ProcessCtrl.PERSIST_INTERVAL_ARG + expectedPersistInterval)); - int expectedMaxQuantileInterval = 21600 + ProcessCtrl.calculateStaggeringInterval(job.getId()); - assertTrue(command.contains(ProcessCtrl.MAX_QUANTILE_INTERVAL_ARG + expectedMaxQuantileInterval)); - } - - public void testBuildAutodetectCommand_defaultTimeField() { - Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); - Environment env = TestEnvironment.newEnvironment(settings); - Job.Builder job = buildJobBuilder("unit-test-job"); - - List command = ProcessCtrl.buildAutodetectCommand(env, settings, job.build(), logger); - - assertTrue(command.contains(ProcessCtrl.TIME_FIELD_ARG + "time")); - } - - public void testBuildAutodetectCommand_givenPersistModelState() { - Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .put(ProcessCtrl.DONT_PERSIST_MODEL_STATE_SETTING.getKey(), true).build(); - Environment env = TestEnvironment.newEnvironment(settings); - Job.Builder job = buildJobBuilder("unit-test-job"); - - int expectedPersistInterval = 10800 + ProcessCtrl.calculateStaggeringInterval(job.getId()); - - List command = ProcessCtrl.buildAutodetectCommand(env, settings, job.build(), logger); - assertFalse(command.contains(ProcessCtrl.PERSIST_INTERVAL_ARG + expectedPersistInterval)); - - settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); - env = TestEnvironment.newEnvironment(settings); - - command = ProcessCtrl.buildAutodetectCommand(env, settings, job.build(), logger); - assertTrue(command.contains(ProcessCtrl.PERSIST_INTERVAL_ARG + expectedPersistInterval)); - } - - public void testBuildNormalizerCommand() throws IOException { - Environment env = TestEnvironment.newEnvironment( - Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build()); - String jobId = "unit-test-job"; - - List command = ProcessCtrl.buildNormalizerCommand(env, jobId, null, 300, true); - assertEquals(4, command.size()); - assertTrue(command.contains(ProcessCtrl.NORMALIZE_PATH)); - assertTrue(command.contains(ProcessCtrl.BUCKET_SPAN_ARG + "300")); - assertTrue(command.contains(ProcessCtrl.LENGTH_ENCODED_INPUT_ARG)); - assertTrue(command.contains(ProcessCtrl.PER_PARTITION_NORMALIZATION)); - } -} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/ProcessPipesTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/ProcessPipesTests.java index 37aeadaafde56..708d7af152014 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/ProcessPipesTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/ProcessPipesTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectBuilder; import org.elasticsearch.xpack.ml.utils.NamedPipeHelper; import org.mockito.Mockito; @@ -50,7 +51,7 @@ public void testProcessPipes() throws IOException { when(namedPipeHelper.openNamedPipeInputStream(contains("persist"), any(Duration.class))) .thenReturn(new ByteArrayInputStream(PERSIST_BYTES)); - ProcessPipes processPipes = new ProcessPipes(env, namedPipeHelper, ProcessCtrl.AUTODETECT, "my_job", + ProcessPipes processPipes = new ProcessPipes(env, namedPipeHelper, AutodetectBuilder.AUTODETECT, "my_job", true, false, true, true, true, true); List command = new ArrayList<>(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectBuilderTests.java new file mode 100644 index 0000000000000..0f83106441185 --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectBuilderTests.java @@ -0,0 +1,119 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.job.process.autodetect; + +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; +import org.elasticsearch.xpack.core.ml.job.config.DataDescription; +import org.elasticsearch.xpack.core.ml.job.config.Detector; +import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.ml.job.process.NativeController; +import org.elasticsearch.xpack.ml.job.process.ProcessPipes; +import org.junit.Before; + +import java.nio.file.Path; +import java.util.Collections; +import java.util.List; + +import static org.elasticsearch.xpack.core.ml.job.config.JobTests.buildJobBuilder; +import static org.mockito.Mockito.mock; + +public class AutodetectBuilderTests extends ESTestCase { + + private Logger logger; + private List filesToDelete; + private Environment env; + private Settings settings; + private NativeController nativeController; + private ProcessPipes processPipes; + + @Before + public void setUpTests() { + logger = mock(Logger.class); + filesToDelete = Collections.emptyList(); + settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); + env = TestEnvironment.newEnvironment(settings); + nativeController = mock(NativeController.class); + processPipes = mock(ProcessPipes.class); + } + + public void testBuildAutodetectCommand() { + Job.Builder job = buildJobBuilder("unit-test-job"); + + Detector.Builder detectorBuilder = new Detector.Builder("mean", "value"); + detectorBuilder.setPartitionFieldName("foo"); + AnalysisConfig.Builder acBuilder = new AnalysisConfig.Builder(Collections.singletonList(detectorBuilder.build())); + acBuilder.setBucketSpan(TimeValue.timeValueSeconds(120)); + acBuilder.setLatency(TimeValue.timeValueSeconds(360)); + acBuilder.setSummaryCountFieldName("summaryField"); + acBuilder.setOverlappingBuckets(true); + acBuilder.setMultivariateByFields(true); + acBuilder.setUsePerPartitionNormalization(true); + job.setAnalysisConfig(acBuilder); + + DataDescription.Builder dd = new DataDescription.Builder(); + dd.setFormat(DataDescription.DataFormat.DELIMITED); + dd.setFieldDelimiter('|'); + dd.setTimeField("tf"); + job.setDataDescription(dd); + + List command = autodetectBuilder(job.build()).buildAutodetectCommand(); + assertEquals(13, command.size()); + assertTrue(command.contains(AutodetectBuilder.AUTODETECT_PATH)); + assertTrue(command.contains(AutodetectBuilder.BUCKET_SPAN_ARG + "120")); + assertTrue(command.contains(AutodetectBuilder.LATENCY_ARG + "360")); + assertTrue(command.contains(AutodetectBuilder.SUMMARY_COUNT_FIELD_ARG + "summaryField")); + assertTrue(command.contains(AutodetectBuilder.RESULT_FINALIZATION_WINDOW_ARG + "2")); + assertTrue(command.contains(AutodetectBuilder.MULTIVARIATE_BY_FIELDS_ARG)); + + assertTrue(command.contains(AutodetectBuilder.LENGTH_ENCODED_INPUT_ARG)); + assertTrue(command.contains(AutodetectBuilder.maxAnomalyRecordsArg(settings))); + + assertTrue(command.contains(AutodetectBuilder.TIME_FIELD_ARG + "tf")); + assertTrue(command.contains(AutodetectBuilder.JOB_ID_ARG + "unit-test-job")); + + assertTrue(command.contains(AutodetectBuilder.PER_PARTITION_NORMALIZATION)); + + int expectedPersistInterval = 10800 + AutodetectBuilder.calculateStaggeringInterval(job.getId()); + assertTrue(command.contains(AutodetectBuilder.PERSIST_INTERVAL_ARG + expectedPersistInterval)); + int expectedMaxQuantileInterval = 21600 + AutodetectBuilder.calculateStaggeringInterval(job.getId()); + assertTrue(command.contains(AutodetectBuilder.MAX_QUANTILE_INTERVAL_ARG + expectedMaxQuantileInterval)); + } + + public void testBuildAutodetectCommand_defaultTimeField() { + Job.Builder job = buildJobBuilder("unit-test-job"); + + List command = autodetectBuilder(job.build()).buildAutodetectCommand(); + + assertTrue(command.contains(AutodetectBuilder.TIME_FIELD_ARG + "time")); + } + + public void testBuildAutodetectCommand_givenPersistModelState() { + settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(AutodetectBuilder.DONT_PERSIST_MODEL_STATE_SETTING.getKey(), true).build(); + Job.Builder job = buildJobBuilder("unit-test-job"); + + int expectedPersistInterval = 10800 + AutodetectBuilder.calculateStaggeringInterval(job.getId()); + + List command = autodetectBuilder(job.build()).buildAutodetectCommand(); + assertFalse(command.contains(AutodetectBuilder.PERSIST_INTERVAL_ARG + expectedPersistInterval)); + + settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); + env = TestEnvironment.newEnvironment(settings); + + command = autodetectBuilder(job.build()).buildAutodetectCommand(); + assertTrue(command.contains(AutodetectBuilder.PERSIST_INTERVAL_ARG + expectedPersistInterval)); + } + + private AutodetectBuilder autodetectBuilder(Job job) { + return new AutodetectBuilder(job, filesToDelete, logger, env, settings, nativeController, processPipes); + } +} \ No newline at end of file diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/BucketInfluencerNormalizableTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/BucketInfluencerNormalizableTests.java index f83d51d84b009..bde5c3e44f9b4 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/BucketInfluencerNormalizableTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/BucketInfluencerNormalizableTests.java @@ -43,10 +43,18 @@ public void testGetPartitionFieldName() { assertNull(new BucketInfluencerNormalizable(bucketInfluencer, INDEX_NAME).getPartitionFieldName()); } + public void testGetPartitionFieldValue() { + assertNull(new BucketInfluencerNormalizable(bucketInfluencer, INDEX_NAME).getPartitionFieldValue()); + } + public void testGetPersonFieldName() { assertEquals("airline", new BucketInfluencerNormalizable(bucketInfluencer, INDEX_NAME).getPersonFieldName()); } + public void testGetPersonFieldValue() { + assertNull(new BucketInfluencerNormalizable(bucketInfluencer, INDEX_NAME).getPersonFieldValue()); + } + public void testGetFunctionName() { assertNull(new BucketInfluencerNormalizable(bucketInfluencer, INDEX_NAME).getFunctionName()); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/BucketNormalizableTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/BucketNormalizableTests.java index 630bffe11129a..4436fcc7026fe 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/BucketNormalizableTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/BucketNormalizableTests.java @@ -73,6 +73,10 @@ public void testGetPersonFieldName() { assertNull(new BucketNormalizable(bucket, INDEX_NAME).getPersonFieldName()); } + public void testGetPersonFieldValue() { + assertNull(new BucketNormalizable(bucket, INDEX_NAME).getPersonFieldValue()); + } + public void testGetFunctionName() { assertNull(new BucketNormalizable(bucket, INDEX_NAME).getFunctionName()); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/InfluencerNormalizableTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/InfluencerNormalizableTests.java index 215f88ad33224..ee5518b9c12d5 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/InfluencerNormalizableTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/InfluencerNormalizableTests.java @@ -44,6 +44,10 @@ public void testGetPersonFieldName() { assertEquals("airline", new InfluencerNormalizable(influencer, INDEX_NAME).getPersonFieldName()); } + public void testGetPersonFieldValue() { + assertEquals("AAL", new InfluencerNormalizable(influencer, INDEX_NAME).getPersonFieldValue()); + } + public void testGetFunctionName() { assertNull(new InfluencerNormalizable(influencer, INDEX_NAME).getFunctionName()); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/NormalizerBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/NormalizerBuilderTests.java new file mode 100644 index 0000000000000..64e595fd5a043 --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/NormalizerBuilderTests.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.job.process.normalizer; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectBuilder; + +import java.io.IOException; +import java.util.List; + +public class NormalizerBuilderTests extends ESTestCase { + + public void testBuildNormalizerCommand() throws IOException { + Environment env = TestEnvironment.newEnvironment( + Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build()); + String jobId = "unit-test-job"; + + List command = new NormalizerBuilder(env, jobId, null, 300, true).build(); + assertEquals(4, command.size()); + assertTrue(command.contains("./normalize")); + assertTrue(command.contains(AutodetectBuilder.BUCKET_SPAN_ARG + "300")); + assertTrue(command.contains(AutodetectBuilder.LENGTH_ENCODED_INPUT_ARG)); + assertTrue(command.contains(AutodetectBuilder.PER_PARTITION_NORMALIZATION)); + } +} \ No newline at end of file diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/NormalizerResultTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/NormalizerResultTests.java index ecaea449f95c1..af35c01aa871d 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/NormalizerResultTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/NormalizerResultTests.java @@ -19,6 +19,7 @@ public void testDefaultConstructor() { assertNull(msg.getPartitionFieldName()); assertNull(msg.getPartitionFieldValue()); assertNull(msg.getPersonFieldName()); + assertNull(msg.getPersonFieldValue()); assertNull(msg.getFunctionName()); assertNull(msg.getValueFieldName()); assertEquals(0.0, msg.getProbability(), EPSILON); @@ -32,6 +33,7 @@ protected NormalizerResult createTestInstance() { msg.setPartitionFieldName("part"); msg.setPartitionFieldValue("something"); msg.setPersonFieldName("person"); + msg.setPersonFieldValue("fred"); msg.setFunctionName("mean"); msg.setValueFieldName("value"); msg.setProbability(0.005); diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java index ecdcbd0fa1db8..1a02be666360f 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java @@ -15,7 +15,6 @@ import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse; import org.elasticsearch.action.ingest.PutPipelineRequest; -import org.elasticsearch.action.ingest.WritePipelineResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterChangedEvent; @@ -39,6 +38,7 @@ import org.elasticsearch.ingest.IngestMetadata; import org.elasticsearch.ingest.PipelineConfiguration; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import org.elasticsearch.xpack.core.XPackClient; @@ -46,7 +46,6 @@ import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils; import org.elasticsearch.xpack.core.watcher.client.WatcherClient; -import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchRequest; import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchResponse; import org.elasticsearch.xpack.core.watcher.watch.Watch; @@ -385,7 +384,7 @@ private boolean hasIngestPipeline(final ClusterState clusterState, final String * } * */ - private void putIngestPipeline(final String pipelineId, final ActionListener listener) { + private void putIngestPipeline(final String pipelineId, final ActionListener listener) { final String pipelineName = pipelineName(pipelineId); final BytesReference pipeline = BytesReference.bytes(loadPipeline(pipelineId, XContentType.JSON)); final PutPipelineRequest request = new PutPipelineRequest(pipelineName, pipeline, XContentType.JSON); diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterSslIT.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterSslIT.java index a0511dc17aa92..9b108d4f8c69f 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterSslIT.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterSslIT.java @@ -144,7 +144,7 @@ public void testCanAddNewExporterWithSsl() { .put("xpack.monitoring.exporters._new.host", "https://" + webServer.getHostName() + ":" + webServer.getPort()) .put("xpack.monitoring.exporters._new.ssl.truststore.path", truststore) .put("xpack.monitoring.exporters._new.ssl.truststore.password", "testnode") - .put("xpack.monitoring.exporters._new.ssl.verification_mode", VerificationMode.NONE.name()) + .put("xpack.monitoring.exporters._new.ssl.verification_mode", VerificationMode.CERTIFICATE.name()) .build(); updateSettings.transientSettings(settings); final ActionFuture future = client().admin().cluster().updateSettings(updateSettings); diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/RollupIndexCaps.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/RollupIndexCaps.java index 22bf9ff06c26a..88c2986574792 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/RollupIndexCaps.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/RollupIndexCaps.java @@ -75,7 +75,7 @@ public class RollupIndexCaps implements Writeable, ToXContentFragment { // "job-1" while (parser.nextToken().equals(XContentParser.Token.END_OBJECT) == false) { - jobs.add(RollupJobConfig.PARSER.apply(parser, aVoid).build()); + jobs.add(RollupJobConfig.fromXContent(parser, null)); } } } @@ -167,4 +167,4 @@ public boolean equals(Object other) { public int hashCode() { return Objects.hash(rollupIndexName, jobCaps); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java index efa41c1257a2d..9119a5445d42e 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java @@ -89,12 +89,12 @@ private static void processKeys(Map keys, Map do if (k.endsWith("." + DateHistogramAggregationBuilder.NAME)) { assert v != null; doc.put(k + "." + RollupField.TIMESTAMP, v); - doc.put(k + "." + RollupField.INTERVAL, groupConfig.getDateHisto().getInterval()); - doc.put(k + "." + DateHistogramGroupConfig.TIME_ZONE, groupConfig.getDateHisto().getTimeZone().toString()); + doc.put(k + "." + RollupField.INTERVAL, groupConfig.getDateHistogram().getInterval()); + doc.put(k + "." + DateHistogramGroupConfig.TIME_ZONE, groupConfig.getDateHistogram().getTimeZone()); idGenerator.add((Long)v); } else if (k.endsWith("." + HistogramAggregationBuilder.NAME)) { doc.put(k + "." + RollupField.VALUE, v); - doc.put(k + "." + RollupField.INTERVAL, groupConfig.getHisto().getInterval()); + doc.put(k + "." + RollupField.INTERVAL, groupConfig.getHistogram().getInterval()); if (v == null) { idGenerator.addNull(); } else { diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java index 308def9c22f53..87294706b3b7d 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java @@ -219,7 +219,7 @@ public synchronized boolean maybeTriggerAsyncJob(long now) { // rounds the current time to its current bucket based on the date histogram interval. // this is needed to exclude buckets that can still receive new documents. - DateHistogramGroupConfig dateHisto = job.getConfig().getGroupConfig().getDateHisto(); + DateHistogramGroupConfig dateHisto = job.getConfig().getGroupConfig().getDateHistogram(); long rounded = dateHisto.createRounding().round(now); if (dateHisto.getDelay() != null) { // if the job has a delay we filter all documents that appear before it. @@ -396,11 +396,11 @@ private CompositeAggregationBuilder createCompositeBuilder(RollupJobConfig confi // Add all the agg builders to our request in order: date_histo -> histo -> terms if (groupConfig != null) { - builders.addAll(groupConfig.getDateHisto().toBuilders()); - metadata.putAll(groupConfig.getDateHisto().getMetadata()); - if (groupConfig.getHisto() != null) { - builders.addAll(groupConfig.getHisto().toBuilders()); - metadata.putAll(groupConfig.getHisto().getMetadata()); + builders.addAll(groupConfig.getDateHistogram().toBuilders()); + metadata.putAll(groupConfig.getDateHistogram().getMetadata()); + if (groupConfig.getHistogram() != null) { + builders.addAll(groupConfig.getHistogram().toBuilders()); + metadata.putAll(groupConfig.getHistogram().getMetadata()); } if (groupConfig.getTerms() != null) { builders.addAll(groupConfig.getTerms().toBuilders()); @@ -426,7 +426,7 @@ private CompositeAggregationBuilder createCompositeBuilder(RollupJobConfig confi */ private QueryBuilder createBoundaryQuery(Map position) { assert maxBoundary < Long.MAX_VALUE; - DateHistogramGroupConfig dateHisto = job.getConfig().getGroupConfig().getDateHisto(); + DateHistogramGroupConfig dateHisto = job.getConfig().getGroupConfig().getDateHistogram(); String fieldName = dateHisto.getField(); String rollupFieldName = fieldName + "." + DateHistogramAggregationBuilder.NAME; long lowerBound = 0L; diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestPutRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestPutRollupJobAction.java index f41074e8e658f..231e382827e8a 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestPutRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestPutRollupJobAction.java @@ -32,7 +32,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient String id = restRequest.param(ID.getPreferredName()); XContentParser parser = restRequest.contentParser(); - PutRollupJobAction.Request request = PutRollupJobAction.Request.parseRequest(id, parser); + PutRollupJobAction.Request request = PutRollupJobAction.Request.fromXContent(parser, id); return channel -> client.execute(PutRollupJobAction.INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtilTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtilTests.java index 5467e11c5a09e..3235d0c39e256 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtilTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtilTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; import org.elasticsearch.xpack.core.rollup.action.RollupJobCaps; import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig; import org.elasticsearch.xpack.core.rollup.job.GroupConfig; @@ -27,34 +26,32 @@ import java.util.Arrays; import java.util.HashSet; +import java.util.List; import java.util.Set; +import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static org.hamcrest.Matchers.equalTo; public class RollupJobIdentifierUtilTests extends ESTestCase { public void testOneMatch() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - GroupConfig.Builder group = ConfigTestHelpers.getGroupConfig(); - group.setDateHisto(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); - job.setGroupConfig(group.build()); - RollupJobCaps cap = new RollupJobCaps(job.build()); + final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + RollupJobCaps cap = new RollupJobCaps(job); Set caps = singletonSet(cap); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") - .dateHistogramInterval(job.getGroupConfig().getDateHisto().getInterval()); + .dateHistogramInterval(job.getGroupConfig().getDateHistogram().getInterval()); Set bestCaps = RollupJobIdentifierUtils.findBestJobs(builder, caps); assertThat(bestCaps.size(), equalTo(1)); } public void testBiggerButCompatibleInterval() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - GroupConfig.Builder group = ConfigTestHelpers.getGroupConfig(); - group.setDateHisto(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); - job.setGroupConfig(group.build()); - RollupJobCaps cap = new RollupJobCaps(job.build()); + final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + RollupJobCaps cap = new RollupJobCaps(job); Set caps = singletonSet(cap); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") @@ -65,11 +62,9 @@ public void testBiggerButCompatibleInterval() { } public void testIncompatibleInterval() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - GroupConfig.Builder group = ConfigTestHelpers.getGroupConfig(); - group.setDateHisto(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1d"))); - job.setGroupConfig(group.build()); - RollupJobCaps cap = new RollupJobCaps(job.build()); + final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1d"))); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + RollupJobCaps cap = new RollupJobCaps(job); Set caps = singletonSet(cap); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") @@ -81,11 +76,9 @@ public void testIncompatibleInterval() { } public void testBadTimeZone() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - GroupConfig.Builder group = ConfigTestHelpers.getGroupConfig(); - group.setDateHisto(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"), null, "EST")); - job.setGroupConfig(group.build()); - RollupJobCaps cap = new RollupJobCaps(job.build()); + final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"), null, "EST")); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + RollupJobCaps cap = new RollupJobCaps(job); Set caps = singletonSet(cap); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") @@ -98,12 +91,10 @@ public void testBadTimeZone() { } public void testMetricOnlyAgg() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - GroupConfig.Builder group = ConfigTestHelpers.getGroupConfig(); - group.setDateHisto(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); - job.setGroupConfig(group.build()); - job.setMetricsConfig(singletonList(new MetricConfig("bar", singletonList("max")))); - RollupJobCaps cap = new RollupJobCaps(job.build()); + final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); + final List metrics = singletonList(new MetricConfig("bar", singletonList("max"))); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, metrics, null); + RollupJobCaps cap = new RollupJobCaps(job); Set caps = singletonSet(cap); MaxAggregationBuilder max = new MaxAggregationBuilder("the_max").field("bar"); @@ -113,11 +104,9 @@ public void testMetricOnlyAgg() { } public void testOneOfTwoMatchingCaps() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - GroupConfig.Builder group = ConfigTestHelpers.getGroupConfig(); - group.setDateHisto(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); - job.setGroupConfig(group.build()); - RollupJobCaps cap = new RollupJobCaps(job.build()); + final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + RollupJobCaps cap = new RollupJobCaps(job); Set caps = singletonSet(cap); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") @@ -130,24 +119,16 @@ public void testOneOfTwoMatchingCaps() { } public void testTwoJobsSameRollupIndex() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - GroupConfig.Builder group = ConfigTestHelpers.getGroupConfig(); - group.setDateHisto(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); - group.setTerms(null); - group.setHisto(null); - job.setGroupConfig(group.build()); - RollupJobCaps cap = new RollupJobCaps(job.build()); + final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + RollupJobCaps cap = new RollupJobCaps(job); Set caps = new HashSet<>(2); caps.add(cap); - RollupJobConfig.Builder job2 = ConfigTestHelpers.getRollupJob("foo2"); - GroupConfig.Builder group2 = ConfigTestHelpers.getGroupConfig(); - group2.setDateHisto(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); - group2.setTerms(null); - group2.setHisto(null); - job2.setGroupConfig(group.build()); - job2.setRollupIndex(job.getRollupIndex()); - RollupJobCaps cap2 = new RollupJobCaps(job2.build()); + final GroupConfig group2 = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); + final RollupJobConfig job2 = + new RollupJobConfig("foo2", "index", job.getRollupIndex(), "*/5 * * * * ?", 10, group2, emptyList(), null); + RollupJobCaps cap2 = new RollupJobCaps(job2); caps.add(cap2); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") @@ -160,21 +141,16 @@ public void testTwoJobsSameRollupIndex() { } public void testTwoJobsButBothPartialMatches() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - GroupConfig.Builder group = ConfigTestHelpers.getGroupConfig(); - group.setDateHisto(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); - job.setGroupConfig(group.build()); - job.setMetricsConfig(singletonList(new MetricConfig("bar", singletonList("max")))); - RollupJobCaps cap = new RollupJobCaps(job.build()); + final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); + final List metrics = singletonList(new MetricConfig("bar", singletonList("max"))); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, metrics, null); + RollupJobCaps cap = new RollupJobCaps(job); Set caps = new HashSet<>(2); caps.add(cap); - RollupJobConfig.Builder job2 = ConfigTestHelpers.getRollupJob("foo2"); - GroupConfig.Builder group2 = ConfigTestHelpers.getGroupConfig(); - group2.setDateHisto(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); - job2.setGroupConfig(group.build()); - job.setMetricsConfig(singletonList(new MetricConfig("bar", singletonList("min")))); - RollupJobCaps cap2 = new RollupJobCaps(job2.build()); + // TODO Is it what we really want to test? + final RollupJobConfig job2 = new RollupJobConfig("foo2", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + RollupJobCaps cap2 = new RollupJobCaps(job2); caps.add(cap2); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") @@ -188,21 +164,14 @@ public void testTwoJobsButBothPartialMatches() { } public void testComparableDifferentDateIntervals() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - GroupConfig.Builder group = ConfigTestHelpers.getGroupConfig(); - group.setDateHisto(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))) - .setHisto(null) - .setTerms(null); - job.setGroupConfig(group.build()); - RollupJobCaps cap = new RollupJobCaps(job.build()); - - RollupJobConfig.Builder job2 = ConfigTestHelpers.getRollupJob("foo2").setRollupIndex(job.getRollupIndex()); - GroupConfig.Builder group2 = ConfigTestHelpers.getGroupConfig(); - group2.setDateHisto(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1d"))) - .setHisto(null) - .setTerms(null); - job2.setGroupConfig(group2.build()); - RollupJobCaps cap2 = new RollupJobCaps(job2.build()); + final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + RollupJobCaps cap = new RollupJobCaps(job); + + final GroupConfig group2 = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1d"))); + final RollupJobConfig job2 = + new RollupJobConfig("foo2", "index", job.getRollupIndex(), "*/5 * * * * ?", 10, group2, emptyList(), null); + RollupJobCaps cap2 = new RollupJobCaps(job2); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") .dateHistogramInterval(new DateHistogramInterval("1d")); @@ -217,21 +186,14 @@ public void testComparableDifferentDateIntervals() { } public void testComparableDifferentDateIntervalsOnlyOneWorks() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - GroupConfig.Builder group = ConfigTestHelpers.getGroupConfig(); - group.setDateHisto(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))) - .setHisto(null) - .setTerms(null); - job.setGroupConfig(group.build()); - RollupJobCaps cap = new RollupJobCaps(job.build()); - - RollupJobConfig.Builder job2 = ConfigTestHelpers.getRollupJob("foo2").setRollupIndex(job.getRollupIndex()); - GroupConfig.Builder group2 = ConfigTestHelpers.getGroupConfig(); - group2.setDateHisto(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1d"))) - .setHisto(null) - .setTerms(null); - job2.setGroupConfig(group2.build()); - RollupJobCaps cap2 = new RollupJobCaps(job2.build()); + final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + RollupJobCaps cap = new RollupJobCaps(job); + + final GroupConfig group2 = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1d"))); + final RollupJobConfig job2 = + new RollupJobConfig("foo2", "index", job.getRollupIndex(), "*/5 * * * * ?", 10, group2, emptyList(), null); + RollupJobCaps cap2 = new RollupJobCaps(job2); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") .dateHistogramInterval(new DateHistogramInterval("1h")); @@ -246,21 +208,15 @@ public void testComparableDifferentDateIntervalsOnlyOneWorks() { } public void testComparableNoHistoVsHisto() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - GroupConfig.Builder group = ConfigTestHelpers.getGroupConfig(); - group.setDateHisto(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))) - .setHisto(null) - .setTerms(null); - job.setGroupConfig(group.build()); - RollupJobCaps cap = new RollupJobCaps(job.build()); - - RollupJobConfig.Builder job2 = ConfigTestHelpers.getRollupJob("foo2").setRollupIndex(job.getRollupIndex()); - GroupConfig.Builder group2 = ConfigTestHelpers.getGroupConfig(); - group2.setDateHisto(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))) - .setHisto(new HistogramGroupConfig(100L, "bar")) - .setTerms(null); - job2.setGroupConfig(group2.build()); - RollupJobCaps cap2 = new RollupJobCaps(job2.build()); + final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + RollupJobCaps cap = new RollupJobCaps(job); + + final HistogramGroupConfig histoConfig = new HistogramGroupConfig(100L, "bar"); + final GroupConfig group2 = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h")), histoConfig, null); + final RollupJobConfig job2 = + new RollupJobConfig("foo2", "index", job.getRollupIndex(), "*/5 * * * * ?", 10, group2, emptyList(), null); + RollupJobCaps cap2 = new RollupJobCaps(job2); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") .dateHistogramInterval(new DateHistogramInterval("1h")) @@ -276,21 +232,15 @@ public void testComparableNoHistoVsHisto() { } public void testComparableNoTermsVsTerms() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - GroupConfig.Builder group = ConfigTestHelpers.getGroupConfig(); - group.setDateHisto(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))) - .setHisto(null) - .setTerms(null); - job.setGroupConfig(group.build()); - RollupJobCaps cap = new RollupJobCaps(job.build()); - - RollupJobConfig.Builder job2 = ConfigTestHelpers.getRollupJob("foo2").setRollupIndex(job.getRollupIndex()); - GroupConfig.Builder group2 = ConfigTestHelpers.getGroupConfig(); - group2.setDateHisto(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))) - .setHisto(null) - .setTerms(new TermsGroupConfig("bar")); - job2.setGroupConfig(group2.build()); - RollupJobCaps cap2 = new RollupJobCaps(job2.build()); + final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + RollupJobCaps cap = new RollupJobCaps(job); + + final TermsGroupConfig termsConfig = new TermsGroupConfig("bar"); + final GroupConfig group2 = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h")), null, termsConfig); + final RollupJobConfig job2 = + new RollupJobConfig("foo2", "index", job.getRollupIndex(), "*/5 * * * * ?", 10, group2, emptyList(), null); + RollupJobCaps cap2 = new RollupJobCaps(job2); DateHistogramAggregationBuilder builder = new DateHistogramAggregationBuilder("foo").field("foo") .dateHistogramInterval(new DateHistogramInterval("1h")) @@ -312,15 +262,16 @@ public void testHistoSameNameWrongTypeInCaps() { .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); - RollupJobConfig job = ConfigTestHelpers.getRollupJob("foo") - .setGroupConfig(ConfigTestHelpers.getGroupConfig() + final GroupConfig group = new GroupConfig( // NOTE same name but wrong type - .setDateHisto(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID())) - .setHisto(new HistogramGroupConfig(1L, "baz")) // <-- NOTE right type but wrong name - .build()) - .setMetricsConfig( - Arrays.asList(new MetricConfig("max_field", singletonList("max")), new MetricConfig("avg_field", singletonList("avg")))) - .build(); + new DateHistogramGroupConfig("foo", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID()), + new HistogramGroupConfig(1L, "baz"), // <-- NOTE right type but wrong name + null + ); + final List metrics = + Arrays.asList(new MetricConfig("max_field", singletonList("max")), new MetricConfig("avg_field", singletonList("avg"))); + + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, metrics, null); Set caps = singletonSet(new RollupJobCaps(job)); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RollupJobIdentifierUtils.findBestJobs(histo, caps)); @@ -335,13 +286,13 @@ public void testMissingDateHisto() { .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); - RollupJobConfig job = ConfigTestHelpers.getRollupJob("foo") - .setGroupConfig(ConfigTestHelpers.getGroupConfig() - .setDateHisto(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID())) - .build()) - .setMetricsConfig( - Arrays.asList(new MetricConfig("max_field", singletonList("max")), new MetricConfig("avg_field", singletonList("avg")))) - .build(); + final GroupConfig group = new GroupConfig( + new DateHistogramGroupConfig("foo", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID()) + ); + final List metrics = + Arrays.asList(new MetricConfig("max_field", singletonList("max")), new MetricConfig("avg_field", singletonList("avg"))); + + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, metrics, null); Set caps = singletonSet(new RollupJobCaps(job)); Exception e = expectThrows(IllegalArgumentException.class, () -> RollupJobIdentifierUtils.findBestJobs(histo,caps)); @@ -356,12 +307,11 @@ public void testNoMatchingInterval() { .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); - RollupJobConfig job = ConfigTestHelpers.getRollupJob("foo") - .setGroupConfig(ConfigTestHelpers.getGroupConfig() + final GroupConfig group = new GroupConfig( // interval in job is much higher than agg interval above - .setDateHisto(new DateHistogramGroupConfig("foo", new DateHistogramInterval("100d"), null, DateTimeZone.UTC.getID())) - .build()) - .build(); + new DateHistogramGroupConfig("foo", new DateHistogramInterval("100d"), null, DateTimeZone.UTC.getID()) + ); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); Set caps = singletonSet(new RollupJobCaps(job)); Exception e = expectThrows(RuntimeException.class, () -> RollupJobIdentifierUtils.findBestJobs(histo, caps)); @@ -376,14 +326,14 @@ public void testDateHistoMissingFieldInCaps() { .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); - RollupJobConfig job = ConfigTestHelpers.getRollupJob("foo") - .setGroupConfig(ConfigTestHelpers.getGroupConfig() + final GroupConfig group = new GroupConfig( // NOTE different field from the one in the query - .setDateHisto(new DateHistogramGroupConfig("bar", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID())) - .build()) - .setMetricsConfig( - Arrays.asList(new MetricConfig("max_field", singletonList("max")), new MetricConfig("avg_field", singletonList("avg")))) - .build(); + new DateHistogramGroupConfig("bar", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID()) + ); + final List metrics = + Arrays.asList(new MetricConfig("max_field", singletonList("max")), new MetricConfig("avg_field", singletonList("avg"))); + + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, metrics, null); Set caps = singletonSet(new RollupJobCaps(job)); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RollupJobIdentifierUtils.findBestJobs(histo, caps)); @@ -398,14 +348,15 @@ public void testHistoMissingFieldInCaps() { .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); - RollupJobConfig job = ConfigTestHelpers.getRollupJob("foo") - .setGroupConfig(ConfigTestHelpers.getGroupConfig() - .setDateHisto(new DateHistogramGroupConfig("bar", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID())) - .setHisto(new HistogramGroupConfig(1L, "baz")) // <-- NOTE right type but wrong name - .build()) - .setMetricsConfig( - Arrays.asList(new MetricConfig("max_field", singletonList("max")), new MetricConfig("avg_field", singletonList("avg")))) - .build(); + final GroupConfig group = new GroupConfig( + new DateHistogramGroupConfig("bar", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID()), + new HistogramGroupConfig(1L, "baz"), // <-- NOTE right type but wrong name + null + ); + final List metrics = + Arrays.asList(new MetricConfig("max_field", singletonList("max")), new MetricConfig("avg_field", singletonList("avg"))); + + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, metrics, null); Set caps = singletonSet(new RollupJobCaps(job)); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RollupJobIdentifierUtils.findBestJobs(histo, caps)); @@ -420,12 +371,12 @@ public void testNoMatchingHistoInterval() { .subAggregation(new MaxAggregationBuilder("the_max").field("max_field")) .subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field")); - RollupJobConfig job = ConfigTestHelpers.getRollupJob("foo") - .setGroupConfig(ConfigTestHelpers.getGroupConfig() - .setDateHisto(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID())) - .setHisto(new HistogramGroupConfig(1L, "baz")) // <-- NOTE right type but wrong name - .build()) - .build(); + final GroupConfig group = new GroupConfig( + new DateHistogramGroupConfig("foo", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID()), + new HistogramGroupConfig(1L, "baz"), // <-- NOTE right type but wrong name + null + ); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); Set caps = singletonSet(new RollupJobCaps(job)); Exception e = expectThrows(RuntimeException.class, @@ -437,10 +388,10 @@ public void testNoMatchingHistoInterval() { public void testMissingMetric() { int i = ESTestCase.randomIntBetween(0, 3); - Set caps = singletonSet(new RollupJobCaps(ConfigTestHelpers - .getRollupJob("foo") - .setMetricsConfig(singletonList(new MetricConfig("foo", Arrays.asList("avg", "max", "min", "sum")))) - .build())); + final GroupConfig group = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); + final List metrics = singletonList(new MetricConfig("foo", Arrays.asList("avg", "max", "min", "sum"))); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, group, emptyList(), null); + Set caps = singletonSet(new RollupJobCaps(job)); String aggType; Exception e; diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupRequestTranslationTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupRequestTranslationTests.java index 08663eb9bba33..a618e8b4e6f63 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupRequestTranslationTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupRequestTranslationTests.java @@ -27,25 +27,18 @@ import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; -import org.elasticsearch.xpack.core.rollup.action.RollupJobCaps; -import org.elasticsearch.xpack.core.rollup.job.MetricConfig; import org.hamcrest.Matchers; import org.junit.Before; import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; import static java.util.Collections.emptyList; -import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.rollup.RollupRequestTranslator.translateAggregation; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.core.IsInstanceOf.instanceOf; @@ -153,11 +146,6 @@ public void testSimpleMetric() { } public void testUnsupportedMetric() { - Set caps = singletonSet(new RollupJobCaps(ConfigTestHelpers - .getRollupJob("foo") - .setMetricsConfig(singletonList(new MetricConfig("foo", Arrays.asList("avg", "max", "min", "sum")))) - .build())); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> translateAggregation(new StatsAggregationBuilder("test_metric") .field("foo"), Collections.emptyList(), namedWriteableRegistry)); @@ -384,10 +372,4 @@ public void testUnsupportedAgg() { assertThat(e.getMessage(), equalTo("Unable to translate aggregation tree into Rollup. Aggregation [test_geo] is of type " + "[GeoDistanceAggregationBuilder] which is currently unsupported.")); } - - private Set singletonSet(RollupJobCaps cap) { - Set caps = new HashSet<>(); - caps.add(cap); - return caps; - } } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetJobsActionRequestTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetJobsActionRequestTests.java index 419feb6f19c0b..bf73ceb88113e 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetJobsActionRequestTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetJobsActionRequestTests.java @@ -70,7 +70,7 @@ public void testStateCheckNoMatchingPersistentTasks() { public void testStateCheckMatchingPersistentTasks() { GetRollupJobsAction.Request request = new GetRollupJobsAction.Request("foo"); - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob("foo").build(), Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random(), "foo"), Collections.emptyMap()); Map> tasks = Collections.singletonMap("foo", new PersistentTasksCustomMetaData.PersistentTask<>("foo", RollupJob.NAME, job, 1, null)); ClusterState state = ClusterState.builder(new ClusterName("_name")) @@ -83,7 +83,7 @@ public void testStateCheckMatchingPersistentTasks() { public void testStateCheckAllMatchingPersistentTasks() { GetRollupJobsAction.Request request = new GetRollupJobsAction.Request("_all"); - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob("foo").build(), Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random(), "foo"), Collections.emptyMap()); Map> tasks = Collections.singletonMap("foo", new PersistentTasksCustomMetaData.PersistentTask<>("foo", RollupJob.NAME, job, 1, null)); ClusterState state = ClusterState.builder(new ClusterName("_name")) @@ -96,8 +96,8 @@ public void testStateCheckAllMatchingPersistentTasks() { public void testStateCheckAllWithSeveralMatchingPersistentTasks() { GetRollupJobsAction.Request request = new GetRollupJobsAction.Request("_all"); - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob("foo").build(), Collections.emptyMap()); - RollupJob job2 = new RollupJob(ConfigTestHelpers.getRollupJob("bar").build(), Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random(), "foo"), Collections.emptyMap()); + RollupJob job2 = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random(), "bar"), Collections.emptyMap()); Map> tasks = new HashMap<>(2); tasks.put("foo", new PersistentTasksCustomMetaData.PersistentTask<>("foo", RollupJob.NAME, job, 1, null)); tasks.put("bar", new PersistentTasksCustomMetaData.PersistentTask<>("bar", RollupJob.NAME, job2, 1, null)); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetRollupCapsActionRequestTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetRollupCapsActionRequestTests.java index e3a45dbd66b87..9068bcfce36a4 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetRollupCapsActionRequestTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetRollupCapsActionRequestTests.java @@ -89,7 +89,7 @@ public void testMissingJob() throws IOException { public void testOneJob() throws IOException { String indexPattern = randomBoolean() ? randomAlphaOfLength(10) : randomAlphaOfLength(10) + "-*"; String jobName = randomAlphaOfLength(5); - RollupJobConfig job = ConfigTestHelpers.getRollupJob(jobName).build(); + RollupJobConfig job = ConfigTestHelpers.randomRollupJobConfig(random(), jobName); MappingMetaData mappingMeta = new MappingMetaData(RollupField.TYPE_NAME, Collections.singletonMap(RollupField.TYPE_NAME, @@ -113,7 +113,7 @@ public void testMultipleJobs() throws IOException { Map jobs = new HashMap<>(num); for (int i = 0; i < num; i++) { String jobName = randomAlphaOfLength(5); - jobs.put(jobName, ConfigTestHelpers.getRollupJob(jobName).build()); + jobs.put(jobName, ConfigTestHelpers.randomRollupJobConfig(random(), jobName)); } MappingMetaData mappingMeta = new MappingMetaData(RollupField.TYPE_NAME, @@ -147,7 +147,7 @@ public void testAllIndices() throws IOException { String jobName = randomAlphaOfLength(10); String indexName = Integer.toString(indexCounter); indexCounter += 1; - jobs.put(jobName, ConfigTestHelpers.getRollupJob(jobName).setIndexPattern(indexName).build()); + jobs.put(jobName, ConfigTestHelpers.randomRollupJobConfig(random(), jobName, indexName)); } MappingMetaData mappingMeta = new MappingMetaData(RollupField.TYPE_NAME, @@ -179,7 +179,7 @@ public void testOneIndex() throws IOException { Map jobs = new HashMap<>(num); for (int i = 0; i < num; i++) { String jobName = randomAlphaOfLength(5); - jobs.put(jobName, ConfigTestHelpers.getRollupJob(jobName).setIndexPattern(indexName).build()); + jobs.put(jobName, ConfigTestHelpers.randomRollupJobConfig(random(), jobName, indexName)); } MappingMetaData mappingMeta = new MappingMetaData(RollupField.TYPE_NAME, diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetRollupIndexCapsActionRequestTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetRollupIndexCapsActionRequestTests.java index 2066d6649965f..e9d5d6153b18c 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetRollupIndexCapsActionRequestTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetRollupIndexCapsActionRequestTests.java @@ -60,7 +60,7 @@ public void testAllIndicesByRollupSingleRollup() throws IOException { String jobName = randomAlphaOfLength(10); String indexName = Integer.toString(indexCounter); indexCounter += 1; - jobs.put(jobName, ConfigTestHelpers.getRollupJob(jobName).setRollupIndex("foo").build()); + jobs.put(jobName, ConfigTestHelpers.randomRollupJobConfig(random(), jobName, indexName, "foo")); } MappingMetaData mappingMeta = new MappingMetaData(RollupField.TYPE_NAME, @@ -89,10 +89,7 @@ public void testAllIndicesByRollupManyRollup() throws IOException { String jobName = randomAlphaOfLength(10); String indexName = Integer.toString(indexCounter); indexCounter += 1; - jobs.put(jobName, ConfigTestHelpers.getRollupJob(jobName) - .setIndexPattern(indexName) - .setRollupIndex("rollup_" + indexName).build()); - + jobs.put(jobName, ConfigTestHelpers.randomRollupJobConfig(random(), jobName, indexName, "rollup_" + indexName)); MappingMetaData mappingMeta = new MappingMetaData(RollupField.TYPE_NAME, Collections.singletonMap(RollupField.TYPE_NAME, @@ -120,9 +117,7 @@ public void testOneIndexByRollupManyRollup() throws IOException { String jobName = randomAlphaOfLength(10); String indexName = Integer.toString(indexCounter); indexCounter += 1; - jobs.put(jobName, ConfigTestHelpers.getRollupJob(jobName) - .setIndexPattern("foo_" + indexName) - .setRollupIndex("rollup_" + indexName).build()); + jobs.put(jobName, ConfigTestHelpers.randomRollupJobConfig(random(), jobName, "foo_" + indexName, "rollup_" + indexName)); MappingMetaData mappingMeta = new MappingMetaData(RollupField.TYPE_NAME, Collections.singletonMap(RollupField.TYPE_NAME, @@ -151,9 +146,7 @@ public void testOneIndexByRollupOneRollup() throws IOException { String jobName = randomAlphaOfLength(10); String indexName = Integer.toString(indexCounter); indexCounter += 1; - jobs.put(jobName, ConfigTestHelpers.getRollupJob(jobName) - .setIndexPattern("foo_" + indexName) - .setRollupIndex("rollup_foo").build()); + jobs.put(jobName, ConfigTestHelpers.randomRollupJobConfig(random(), jobName, "foo_" + indexName, "rollup_foo")); MappingMetaData mappingMeta = new MappingMetaData(RollupField.TYPE_NAME, Collections.singletonMap(RollupField.TYPE_NAME, diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobActionRequestTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobActionRequestTests.java index 254c2a8c81194..848bd5f13dd9e 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobActionRequestTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobActionRequestTests.java @@ -12,6 +12,8 @@ import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; import org.junit.Before; +import java.io.IOException; + public class PutJobActionRequestTests extends AbstractStreamableXContentTestCase { private String jobId; @@ -23,7 +25,7 @@ public void setupJobID() { @Override protected Request createTestInstance() { - return new Request(ConfigTestHelpers.getRollupJob(jobId).build()); + return new Request(ConfigTestHelpers.randomRollupJobConfig(random(), jobId)); } @Override @@ -37,9 +39,8 @@ protected Request createBlankInstance() { } @Override - protected Request doParseInstance(XContentParser parser) { - return Request.parseRequest(jobId, parser); + protected Request doParseInstance(final XContentParser parser) throws IOException { + return Request.fromXContent(parser, jobId); } - } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobStateMachineTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobStateMachineTests.java index d7bc2786646bf..d9caad5147d41 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobStateMachineTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobStateMachineTests.java @@ -50,7 +50,7 @@ public class PutJobStateMachineTests extends ESTestCase { @SuppressWarnings("unchecked") public void testCreateIndexException() { - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob("foo").build(), Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random(), "foo"), Collections.emptyMap()); ActionListener testListener = ActionListener.wrap(response -> { fail("Listener success should not have been triggered."); @@ -76,7 +76,7 @@ public void testCreateIndexException() { @SuppressWarnings("unchecked") public void testIndexAlreadyExists() { - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob("foo").build(), Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); ActionListener testListener = ActionListener.wrap(response -> { fail("Listener success should not have been triggered."); @@ -108,7 +108,7 @@ public void testIndexAlreadyExists() { @SuppressWarnings("unchecked") public void testIndexMetaData() throws InterruptedException { - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob("foo").build(), Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); ActionListener testListener = ActionListener.wrap(response -> { fail("Listener success should not have been triggered."); @@ -151,7 +151,7 @@ public void testIndexMetaData() throws InterruptedException { @SuppressWarnings("unchecked") public void testGetMappingFails() { - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob("foo").build(), Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random(), "foo"), Collections.emptyMap()); ActionListener testListener = ActionListener.wrap(response -> { fail("Listener success should not have been triggered."); @@ -175,7 +175,7 @@ public void testGetMappingFails() { @SuppressWarnings("unchecked") public void testNoMetadataInMapping() { - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob("foo").build(), Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); ActionListener testListener = ActionListener.wrap(response -> { fail("Listener success should not have been triggered."); @@ -208,7 +208,7 @@ public void testNoMetadataInMapping() { @SuppressWarnings("unchecked") public void testNoMappingVersion() { - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob("foo").build(), Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); ActionListener testListener = ActionListener.wrap(response -> { fail("Listener success should not have been triggered."); @@ -245,7 +245,7 @@ public void testNoMappingVersion() { @SuppressWarnings("unchecked") public void testJobAlreadyInMapping() { - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob("foo").build(), Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random(), "foo"), Collections.emptyMap()); ActionListener testListener = ActionListener.wrap(response -> { fail("Listener success should not have been triggered."); @@ -282,12 +282,12 @@ public void testJobAlreadyInMapping() { @SuppressWarnings("unchecked") public void testAddJobToMapping() { - RollupJobConfig unrelatedJob = ConfigTestHelpers.getRollupJob(ESTestCase.randomAlphaOfLength(10)) - .setIndexPattern("foo").setRollupIndex("rollup_index_foo").build(); - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob("foo") - .setIndexPattern("foo") - .setRollupIndex("rollup_index_foo") - .build(), Collections.emptyMap()); + final RollupJobConfig unrelatedJob = + ConfigTestHelpers.randomRollupJobConfig(random(), ESTestCase.randomAlphaOfLength(10), "foo", "rollup_index_foo"); + + final RollupJobConfig config = + ConfigTestHelpers.randomRollupJobConfig(random(), ESTestCase.randomAlphaOfLength(10), "foo", "rollup_index_foo"); + RollupJob job = new RollupJob(config, Collections.emptyMap()); ActionListener testListener = ActionListener.wrap(response -> { fail("Listener success should not have been triggered."); }, e -> { @@ -331,7 +331,7 @@ public void testAddJobToMapping() { @SuppressWarnings("unchecked") public void testTaskAlreadyExists() { - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob("foo").build(), Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random(), "foo"), Collections.emptyMap()); ActionListener testListener = ActionListener.wrap(response -> { fail("Listener success should not have been triggered."); @@ -354,7 +354,7 @@ public void testTaskAlreadyExists() { @SuppressWarnings("unchecked") public void testStartTask() { - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob("foo").build(), Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); ActionListener testListener = ActionListener.wrap(response -> { fail("Listener success should not have been triggered."); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/RollupIndexCapsTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/RollupIndexCapsTests.java index a1e4dba0fffda..78b1e1e0d2d0c 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/RollupIndexCapsTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/RollupIndexCapsTests.java @@ -30,8 +30,8 @@ public void testSetEmptyJobs() { public void testGetAllJobs() { List jobs = new ArrayList<>(2); - jobs.add(ConfigTestHelpers.getRollupJob("foo").build()); - jobs.add(ConfigTestHelpers.getRollupJob("bar").build()); + jobs.add(ConfigTestHelpers.randomRollupJobConfig(random(), "foo")); + jobs.add(ConfigTestHelpers.randomRollupJobConfig(random(), "bar")); RollupIndexCaps caps = new RollupIndexCaps(ESTestCase.randomAlphaOfLength(10), jobs); assertTrue(caps.hasCaps()); @@ -45,8 +45,8 @@ public void testGetAllJobs() { public void testFilterGetJobs() { List jobs = new ArrayList<>(2); - jobs.add(ConfigTestHelpers.getRollupJob("foo").setIndexPattern("foo_index_pattern").build()); - jobs.add(ConfigTestHelpers.getRollupJob("bar").build()); + jobs.add(ConfigTestHelpers.randomRollupJobConfig(random(), "foo", "foo_index_pattern")); + jobs.add(ConfigTestHelpers.randomRollupJobConfig(random(), "bar")); RollupIndexCaps caps = new RollupIndexCaps(ESTestCase.randomAlphaOfLength(10), jobs); assertTrue(caps.hasCaps()); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java index 68c132b425702..a80b05e8adbd5 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java @@ -16,9 +16,6 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.MockBigArrays; -import org.elasticsearch.common.util.MockPageCacheRecycler; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.BoostingQueryBuilder; import org.elasticsearch.index.query.ConstantScoreQueryBuilder; @@ -30,8 +27,6 @@ import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.query.TermsQueryBuilder; import org.elasticsearch.indices.IndicesModule; -import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; -import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.aggregations.Aggregations; @@ -54,11 +49,13 @@ import org.elasticsearch.xpack.core.rollup.action.RollupJobCaps; import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig; import org.elasticsearch.xpack.core.rollup.job.GroupConfig; +import org.elasticsearch.xpack.core.rollup.job.MetricConfig; import org.elasticsearch.xpack.core.rollup.job.RollupJobConfig; import org.elasticsearch.xpack.core.rollup.job.TermsGroupConfig; import org.elasticsearch.xpack.rollup.Rollup; import org.hamcrest.core.IsEqual; import org.joda.time.DateTimeZone; +import org.junit.Before; import org.mockito.Mockito; import java.io.IOException; @@ -71,6 +68,8 @@ import java.util.Map; import java.util.Set; +import static java.util.Collections.emptyList; +import static java.util.Collections.singleton; import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomHistogramGroupConfig; import static org.elasticsearch.xpack.core.rollup.RollupField.COUNT_FIELD; import static org.hamcrest.Matchers.equalTo; @@ -81,10 +80,12 @@ public class SearchActionTests extends ESTestCase { private NamedWriteableRegistry namedWriteableRegistry; + + @Before public void setUp() throws Exception { super.setUp(); - IndicesModule indicesModule = new IndicesModule(Collections.emptyList()); - SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); + IndicesModule indicesModule = new IndicesModule(emptyList()); + SearchModule searchModule = new SearchModule(Settings.EMPTY, false, emptyList()); List entries = new ArrayList<>(); entries.addAll(indicesModule.getNamedWriteables()); entries.addAll(searchModule.getNamedWriteables()); @@ -118,11 +119,9 @@ public void testBadQuery() { } public void testRange() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - GroupConfig.Builder group = ConfigTestHelpers.getGroupConfig(); - group.setDateHisto(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); - job.setGroupConfig(group.build()); - RollupJobCaps cap = new RollupJobCaps(job.build()); + final GroupConfig groupConfig = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); + final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); + RollupJobCaps cap = new RollupJobCaps(config); Set caps = new HashSet<>(); caps.add(cap); QueryBuilder rewritten = TransportRollupSearchAction.rewriteQuery(new RangeQueryBuilder("foo").gt(1).timeZone("UTC"), caps); @@ -131,11 +130,9 @@ public void testRange() { } public void testRangeNullTimeZone() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - GroupConfig.Builder group = ConfigTestHelpers.getGroupConfig(); - group.setDateHisto(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); - job.setGroupConfig(group.build()); - RollupJobCaps cap = new RollupJobCaps(job.build()); + final GroupConfig groupConfig = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"), null, null)); + final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); + RollupJobCaps cap = new RollupJobCaps(config); Set caps = new HashSet<>(); caps.add(cap); QueryBuilder rewritten = TransportRollupSearchAction.rewriteQuery(new RangeQueryBuilder("foo").gt(1), caps); @@ -144,11 +141,9 @@ public void testRangeNullTimeZone() { } public void testRangeWrongTZ() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - GroupConfig.Builder group = ConfigTestHelpers.getGroupConfig(); - group.setDateHisto(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); - job.setGroupConfig(group.build()); - RollupJobCaps cap = new RollupJobCaps(job.build()); + final GroupConfig groupConfig = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"), null, "UTC")); + final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); + RollupJobCaps cap = new RollupJobCaps(config); Set caps = new HashSet<>(); caps.add(cap); Exception e = expectThrows(IllegalArgumentException.class, @@ -158,11 +153,10 @@ public void testRangeWrongTZ() { } public void testTermQuery() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - GroupConfig.Builder group = ConfigTestHelpers.getGroupConfig(); - group.setTerms(new TermsGroupConfig("foo")); - job.setGroupConfig(group.build()); - RollupJobCaps cap = new RollupJobCaps(job.build()); + final TermsGroupConfig terms = new TermsGroupConfig("foo"); + final GroupConfig groupConfig = new GroupConfig(new DateHistogramGroupConfig("boo", new DateHistogramInterval("1h")), null, terms); + final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); + RollupJobCaps cap = new RollupJobCaps(config); Set caps = new HashSet<>(); caps.add(cap); QueryBuilder rewritten = TransportRollupSearchAction.rewriteQuery(new TermQueryBuilder("foo", "bar"), caps); @@ -171,16 +165,14 @@ public void testTermQuery() { } public void testTermsQuery() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - GroupConfig.Builder group = ConfigTestHelpers.getGroupConfig(); - group.setTerms(new TermsGroupConfig("foo")); - job.setGroupConfig(group.build()); - RollupJobCaps cap = new RollupJobCaps(job.build()); + final TermsGroupConfig terms = new TermsGroupConfig("foo"); + final GroupConfig groupConfig = new GroupConfig(new DateHistogramGroupConfig("boo", new DateHistogramInterval("1h")), null, terms); + final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); + RollupJobCaps cap = new RollupJobCaps(config); Set caps = new HashSet<>(); caps.add(cap); QueryBuilder original = new TermsQueryBuilder("foo", Arrays.asList("bar", "baz")); - QueryBuilder rewritten = - TransportRollupSearchAction.rewriteQuery(original, caps); + QueryBuilder rewritten = TransportRollupSearchAction.rewriteQuery(original, caps); assertThat(rewritten, instanceOf(TermsQueryBuilder.class)); assertNotSame(rewritten, original); assertThat(((TermsQueryBuilder)rewritten).fieldName(), equalTo("foo.terms.value")); @@ -188,11 +180,9 @@ public void testTermsQuery() { } public void testCompounds() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - GroupConfig.Builder group = ConfigTestHelpers.getGroupConfig(); - group.setDateHisto(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); - job.setGroupConfig(group.build()); - RollupJobCaps cap = new RollupJobCaps(job.build()); + final GroupConfig groupConfig = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); + final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); + RollupJobCaps cap = new RollupJobCaps(config); Set caps = new HashSet<>(); caps.add(cap); @@ -204,11 +194,9 @@ public void testCompounds() { } public void testMatchAll() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - GroupConfig.Builder group = ConfigTestHelpers.getGroupConfig(); - group.setDateHisto(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); - job.setGroupConfig(group.build()); - RollupJobCaps cap = new RollupJobCaps(job.build()); + final GroupConfig groupConfig = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); + final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); + RollupJobCaps cap = new RollupJobCaps(config); Set caps = new HashSet<>(); caps.add(cap); QueryBuilder rewritten = TransportRollupSearchAction.rewriteQuery(new MatchAllQueryBuilder(), caps); @@ -216,12 +204,10 @@ public void testMatchAll() { } public void testAmbiguousResolution() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - GroupConfig.Builder group = ConfigTestHelpers.getGroupConfig(); - group.setDateHisto(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); - group.setTerms(new TermsGroupConfig("foo")); - job.setGroupConfig(group.build()); - RollupJobCaps cap = new RollupJobCaps(job.build()); + final TermsGroupConfig terms = new TermsGroupConfig("foo"); + final GroupConfig groupConfig = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h")), null, terms); + final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); + RollupJobCaps cap = new RollupJobCaps(config); Set caps = new HashSet<>(); caps.add(cap); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, @@ -367,12 +353,10 @@ public void testLiveOnlyCreateMSearch() { } public void testGood() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - GroupConfig.Builder group = ConfigTestHelpers.getGroupConfig(); - group.setDateHisto(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); - job.setGroupConfig(group.build()); - RollupJobCaps cap = new RollupJobCaps(job.build()); - Set caps = singletonSet(cap); + final GroupConfig groupConfig = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))); + final RollupJobConfig config = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); + RollupJobCaps cap = new RollupJobCaps(config); + Set caps = singleton(cap); String[] normalIndices = new String[]{ESTestCase.randomAlphaOfLength(10)}; String[] rollupIndices = new String[]{ESTestCase.randomAlphaOfLength(10)}; @@ -385,7 +369,7 @@ public void testGood() { source.query(getQueryBuilder(1)); source.size(0); source.aggregation(new DateHistogramAggregationBuilder("foo").field("foo") - .dateHistogramInterval(job.getGroupConfig().getDateHisto().getInterval())); + .dateHistogramInterval(config.getGroupConfig().getDateHistogram().getInterval())); SearchRequest request = new SearchRequest(combinedIndices, source); MultiSearchRequest msearch = TransportRollupSearchAction.createMSearchRequest(request, namedWriteableRegistry, ctx); @@ -413,12 +397,10 @@ public void testGoodButNullQuery() { source.aggregation(new DateHistogramAggregationBuilder("foo").field("foo").dateHistogramInterval(new DateHistogramInterval("1d"))); SearchRequest request = new SearchRequest(combinedIndices, source); - RollupJobConfig job = ConfigTestHelpers.getRollupJob("foo") - .setGroupConfig(ConfigTestHelpers.getGroupConfig() - .setDateHisto(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID())) - .build()) - .build(); - Set caps = singletonSet(new RollupJobCaps(job)); + final GroupConfig groupConfig = + new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1d"), null, DateTimeZone.UTC.getID())); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); + Set caps = singleton(new RollupJobCaps(job)); TransportRollupSearchAction.RollupSearchContext ctx = new TransportRollupSearchAction.RollupSearchContext(normalIndices, rollupIndices, caps); @@ -438,20 +420,15 @@ public void testGoodButNullQuery() { } public void testTwoMatchingJobs() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - GroupConfig.Builder group = ConfigTestHelpers.getGroupConfig(); - group.setDateHisto(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))) - .setHisto(null) - .setTerms(null); - job.setGroupConfig(group.build()); - RollupJobCaps cap = new RollupJobCaps(job.build()); - - RollupJobConfig.Builder job2 = ConfigTestHelpers.getRollupJob("foo2").setRollupIndex(job.getRollupIndex()); - job2.setGroupConfig(group.build()); + final GroupConfig groupConfig = new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h")), null, null); + final RollupJobConfig job = new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); + RollupJobCaps cap = new RollupJobCaps(job); // so that the jobs aren't exactly equal - job2.setMetricsConfig(ConfigTestHelpers.randomMetricsConfigs(random())); - RollupJobCaps cap2 = new RollupJobCaps(job2.build()); + final List metricConfigs = ConfigTestHelpers.randomMetricsConfigs(random()); + final RollupJobConfig job2 = + new RollupJobConfig("foo2", "index", job.getRollupIndex(), "*/5 * * * * ?", 10, groupConfig, metricConfigs, null); + RollupJobCaps cap2 = new RollupJobCaps(job2); Set caps = new HashSet<>(2); caps.add(cap); @@ -468,7 +445,7 @@ public void testTwoMatchingJobs() { source.query(getQueryBuilder(1)); source.size(0); source.aggregation(new DateHistogramAggregationBuilder("foo").field("foo") - .dateHistogramInterval(job.getGroupConfig().getDateHisto().getInterval())); + .dateHistogramInterval(job.getGroupConfig().getDateHistogram().getInterval())); SearchRequest request = new SearchRequest(combinedIndices, source); MultiSearchRequest msearch = TransportRollupSearchAction.createMSearchRequest(request, namedWriteableRegistry, ctx); @@ -488,21 +465,17 @@ public void testTwoMatchingJobs() { } public void testTwoMatchingJobsOneBetter() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - GroupConfig.Builder group = ConfigTestHelpers.getGroupConfig(); - group.setDateHisto(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h"))) - .setHisto(null) - .setTerms(null); - job.setGroupConfig(group.build()); - RollupJobCaps cap = new RollupJobCaps(job.build()); - - RollupJobConfig.Builder job2 = ConfigTestHelpers.getRollupJob("foo2").setRollupIndex(job.getRollupIndex()); - GroupConfig.Builder group2 = ConfigTestHelpers.getGroupConfig(); - group2.setDateHisto(group.getDateHisto()) - .setHisto(randomHistogramGroupConfig(random())) - .setTerms(null); - job2.setGroupConfig(group2.build()); - RollupJobCaps cap2 = new RollupJobCaps(job2.build()); + final GroupConfig groupConfig = + new GroupConfig(new DateHistogramGroupConfig("foo", new DateHistogramInterval("1h")), null, null); + final RollupJobConfig job = + new RollupJobConfig("foo", "index", "rollup", "*/5 * * * * ?", 10, groupConfig, emptyList(), null); + RollupJobCaps cap = new RollupJobCaps(job); + + final GroupConfig groupConfig2 = + new GroupConfig(groupConfig.getDateHistogram(), randomHistogramGroupConfig(random()), null); + final RollupJobConfig job2 = + new RollupJobConfig("foo2", "index", job.getRollupIndex(), "*/5 * * * * ?", 10, groupConfig2, emptyList(), null); + RollupJobCaps cap2 = new RollupJobCaps(job2); Set caps = new HashSet<>(2); caps.add(cap); @@ -519,7 +492,7 @@ public void testTwoMatchingJobsOneBetter() { source.query(getQueryBuilder(1)); source.size(0); source.aggregation(new DateHistogramAggregationBuilder("foo").field("foo") - .dateHistogramInterval(job.getGroupConfig().getDateHisto().getInterval())); + .dateHistogramInterval(job.getGroupConfig().getDateHistogram().getInterval())); SearchRequest request = new SearchRequest(combinedIndices, source); MultiSearchRequest msearch = TransportRollupSearchAction.createMSearchRequest(request, namedWriteableRegistry, ctx); @@ -587,7 +560,7 @@ public void testMatchingIndexInMetadata() throws IOException { String[] indices = new String[]{"foo"}; String jobName = randomAlphaOfLength(5); - RollupJobConfig job = ConfigTestHelpers.getRollupJob(jobName).build(); + RollupJobConfig job = ConfigTestHelpers.randomRollupJobConfig(random(), jobName); MappingMetaData mappingMeta = new MappingMetaData(RollupField.TYPE_NAME, Collections.singletonMap(RollupField.TYPE_NAME, @@ -631,7 +604,7 @@ public void testRollupOnly() throws IOException { String[] indices = new String[]{"foo"}; String jobName = randomAlphaOfLength(5); - RollupJobConfig job = ConfigTestHelpers.getRollupJob(jobName).build(); + RollupJobConfig job = ConfigTestHelpers.randomRollupJobConfig(random(), jobName); MappingMetaData mappingMeta = new MappingMetaData(RollupField.TYPE_NAME, Collections.singletonMap(RollupField.TYPE_NAME, @@ -695,7 +668,7 @@ public void testTooManyRollups() throws IOException { String[] indices = new String[]{"foo", "bar"}; String jobName = randomAlphaOfLength(5); - RollupJobConfig job = ConfigTestHelpers.getRollupJob(jobName).build(); + RollupJobConfig job = ConfigTestHelpers.randomRollupJobConfig(random(), jobName); MappingMetaData mappingMeta = new MappingMetaData(RollupField.TYPE_NAME, Collections.singletonMap(RollupField.TYPE_NAME, @@ -730,7 +703,7 @@ public void testBoth() throws IOException { String[] indices = new String[]{"foo", "bar"}; String jobName = randomAlphaOfLength(5); - RollupJobConfig job = ConfigTestHelpers.getRollupJob(jobName).build(); + RollupJobConfig job = ConfigTestHelpers.randomRollupJobConfig(random(), jobName); MappingMetaData mappingMeta = new MappingMetaData(RollupField.TYPE_NAME, Collections.singletonMap(RollupField.TYPE_NAME, @@ -760,7 +733,7 @@ public void testBoth() throws IOException { SearchResponse protoResponse = mock(SearchResponse.class); when(protoResponse.getTook()).thenReturn(new TimeValue(100)); List protoAggTree = new ArrayList<>(1); - InternalAvg internalAvg = new InternalAvg("foo", 10, 2, DocValueFormat.RAW, Collections.emptyList(), null); + InternalAvg internalAvg = new InternalAvg("foo", 10, 2, DocValueFormat.RAW, emptyList(), null); protoAggTree.add(internalAvg); Aggregations protoMockAggs = new InternalAggregations(protoAggTree); when(protoResponse.getAggregations()).thenReturn(protoMockAggs); @@ -800,14 +773,9 @@ public void testBoth() throws IOException { MultiSearchResponse msearchResponse = new MultiSearchResponse(new MultiSearchResponse.Item[]{unrolledResponse, rolledResponse}); - - BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); - ScriptService scriptService = mock(ScriptService.class); - SearchResponse response = TransportRollupSearchAction.processResponses(separateIndices, msearchResponse, mock(InternalAggregation.ReduceContext.class)); - assertNotNull(response); Aggregations responseAggs = response.getAggregations(); assertNotNull(responseAggs); @@ -815,10 +783,4 @@ public void testBoth() throws IOException { assertThat(avg.getValue(), IsEqual.equalTo(5.0)); } - - private Set singletonSet(RollupJobCaps cap) { - Set caps = new HashSet<>(); - caps.add(cap); - return caps; - } } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/TransportTaskHelperTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/TransportTaskHelperTests.java index b27953e4e0a62..a156585b609a7 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/TransportTaskHelperTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/TransportTaskHelperTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; import org.elasticsearch.xpack.core.rollup.job.RollupJobConfig; import org.elasticsearch.xpack.rollup.job.RollupJobTask; @@ -18,6 +17,7 @@ import java.util.Map; import java.util.function.Consumer; +import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomRollupJobConfig; import static org.hamcrest.Matchers.equalTo; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -25,7 +25,7 @@ public class TransportTaskHelperTests extends ESTestCase { public void testProcessRequestOneMatching() { - RollupJobConfig job = ConfigTestHelpers.getRollupJob("foo").build(); + RollupJobConfig job = randomRollupJobConfig(random(), "foo"); TaskManager taskManager = mock(TaskManager.class); RollupJobTask task = mock(RollupJobTask.class); when(task.getDescription()).thenReturn("rollup_foo"); @@ -58,13 +58,13 @@ public void testProcessRequestMultipleMatching() { Map tasks = getRandomTasks(); when(taskManager.getTasks()).thenReturn(tasks); - RollupJobConfig job = ConfigTestHelpers.getRollupJob("foo").build(); + RollupJobConfig job = randomRollupJobConfig(random(), "foo"); RollupJobTask task = mock(RollupJobTask.class); when(task.getDescription()).thenReturn("rollup_foo"); when(task.getConfig()).thenReturn(job); tasks.put(1L, task); - RollupJobConfig job2 = ConfigTestHelpers.getRollupJob("foo").build(); + RollupJobConfig job2 = randomRollupJobConfig(random(), "foo"); RollupJobTask task2 = mock(RollupJobTask.class); when(task2.getDescription()).thenReturn("rollup_foo"); when(task2.getConfig()).thenReturn(job2); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/config/ConfigTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/config/ConfigTests.java index b8c11971111a8..86891eda669fa 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/config/ConfigTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/config/ConfigTests.java @@ -7,13 +7,11 @@ import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig; import org.elasticsearch.xpack.core.rollup.job.GroupConfig; import org.elasticsearch.xpack.core.rollup.job.HistogramGroupConfig; import org.elasticsearch.xpack.core.rollup.job.MetricConfig; import org.elasticsearch.xpack.core.rollup.job.RollupJob; -import org.elasticsearch.xpack.core.rollup.job.RollupJobConfig; import org.elasticsearch.xpack.core.rollup.job.TermsGroupConfig; import org.joda.time.DateTimeZone; @@ -22,6 +20,9 @@ import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; +import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomHistogramGroupConfig; +import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomRollupJobConfig; +import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomTermsGroupConfig; import static org.hamcrest.Matchers.equalTo; //TODO split this into dedicated unit test classes (one for each config object) public class ConfigTests extends ESTestCase { @@ -43,131 +44,14 @@ public void testEmptyMetrics() { } public void testEmptyGroup() { - GroupConfig.Builder groupConfig = ConfigTestHelpers.getGroupConfig(); - groupConfig.setDateHisto(null); - groupConfig.setTerms(null); - groupConfig.setHisto(null); - - Exception e = expectThrows(IllegalArgumentException.class, groupConfig::build); - assertThat(e.getMessage(), equalTo("A date_histogram group is mandatory")); + Exception e = expectThrows(IllegalArgumentException.class, () -> new GroupConfig(null, null, null)); + assertThat(e.getMessage(), equalTo("Date histogram must not be null")); } public void testNoDateHisto() { - GroupConfig.Builder groupConfig = new GroupConfig.Builder(); - groupConfig.setTerms(ConfigTestHelpers.randomTermsGroupConfig(random())); - groupConfig.setHisto(ConfigTestHelpers.randomHistogramGroupConfig(random())); - - Exception e = expectThrows(IllegalArgumentException.class, groupConfig::build); - assertThat(e.getMessage(), equalTo("A date_histogram group is mandatory")); - } - - public void testEmptyGroupAndMetrics() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - job.setGroupConfig(null); - job.setMetricsConfig(null); - - Exception e = expectThrows(IllegalArgumentException.class, job::build); - assertThat(e.getMessage(), equalTo("At least one grouping or metric must be configured.")); - } - - public void testEmptyJobID() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob(null); - Exception e = expectThrows(IllegalArgumentException.class, job::build); - assertThat(e.getMessage(), equalTo("An ID is mandatory.")); - - job = ConfigTestHelpers.getRollupJob(""); - e = expectThrows(IllegalArgumentException.class, job::build); - assertThat(e.getMessage(), equalTo("An ID is mandatory.")); - - job.setId(""); - e = expectThrows(IllegalArgumentException.class, job::build); - assertThat(e.getMessage(), equalTo("An ID is mandatory.")); - - job.setId(null); - e = expectThrows(IllegalArgumentException.class, job::build); - assertThat(e.getMessage(), equalTo("An ID is mandatory.")); - } - - public void testEmptyCron() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - job.setCron(""); - Exception e = expectThrows(IllegalArgumentException.class, job::build); - assertThat(e.getMessage(), equalTo("A cron schedule is mandatory.")); - - job.setCron(null); - e = expectThrows(IllegalArgumentException.class, job::build); - assertThat(e.getMessage(), equalTo("A cron schedule is mandatory.")); - } - - public void testBadCron() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - job.setCron("0 * * *"); - Exception e = expectThrows(IllegalArgumentException.class, job::build); - assertThat(e.getMessage(), equalTo("invalid cron expression [0 * * *]")); - } - - public void testEmptyIndexPattern() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - job.setIndexPattern(""); - Exception e = expectThrows(IllegalArgumentException.class, job::build); - assertThat(e.getMessage(), equalTo("An index pattern is mandatory.")); - - job.setIndexPattern(null); - e = expectThrows(IllegalArgumentException.class, job::build); - assertThat(e.getMessage(), equalTo("An index pattern is mandatory.")); - } - - public void testMatchAllIndexPattern() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - job.setIndexPattern("*"); - Exception e = expectThrows(IllegalArgumentException.class, job::build); - assertThat(e.getMessage(), equalTo("Index pattern must not match all indices (as it would match it's own rollup index")); - } - - public void testMatchOwnRollupPatternPrefix() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - job.setIndexPattern("foo-*"); - job.setRollupIndex("foo-rollup"); - Exception e = expectThrows(IllegalArgumentException.class, job::build); - assertThat(e.getMessage(), equalTo("Index pattern would match rollup index name which is not allowed.")); - } - - public void testMatchOwnRollupPatternSuffix() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - job.setIndexPattern("*-rollup"); - job.setRollupIndex("foo-rollup"); - Exception e = expectThrows(IllegalArgumentException.class, job::build); - assertThat(e.getMessage(), equalTo("Index pattern would match rollup index name which is not allowed.")); - } - - public void testIndexPatternIdenticalToRollup() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - job.setIndexPattern("foo"); - job.setRollupIndex("foo"); - Exception e = expectThrows(IllegalArgumentException.class, job::build); - assertThat(e.getMessage(), equalTo("Rollup index may not be the same as the index pattern.")); - } - - public void testEmptyRollupIndex() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - job.setRollupIndex(""); - Exception e = expectThrows(IllegalArgumentException.class, job::build); - assertThat(e.getMessage(), equalTo("A rollup index name is mandatory.")); - - job.setRollupIndex(null); - e = expectThrows(IllegalArgumentException.class, job::build); - assertThat(e.getMessage(), equalTo("A rollup index name is mandatory.")); - } - - public void testBadSize() { - RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); - job.setPageSize(-1); - Exception e = expectThrows(IllegalArgumentException.class, job::build); - assertThat(e.getMessage(), equalTo("Parameter [page_size] is mandatory and must be a positive long.")); - - job.setPageSize(0); - e = expectThrows(IllegalArgumentException.class, job::build); - assertThat(e.getMessage(), equalTo("Parameter [page_size] is mandatory and must be a positive long.")); + Exception e = expectThrows(IllegalArgumentException.class, + () -> new GroupConfig(null, randomHistogramGroupConfig(random()), randomTermsGroupConfig(random()))); + assertThat(e.getMessage(), equalTo("Date histogram must not be null")); } public void testEmptyDateHistoField() { @@ -231,8 +115,7 @@ public void testNoHeadersInJSON() { Map headers = new HashMap<>(1); headers.put("es-security-runas-user", "foo"); headers.put("_xpack_security_authentication", "bar"); - RollupJobConfig config = ConfigTestHelpers.getRollupJob(randomAlphaOfLength(5)).build(); - RollupJob job = new RollupJob(config, headers); + RollupJob job = new RollupJob(randomRollupJobConfig(random()), headers); String json = job.toString(); assertFalse(json.contains("authentication")); assertFalse(json.contains("security")); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java index 51a53db713ba4..e8c66f7e8c118 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java @@ -35,7 +35,6 @@ import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; -import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig; import org.elasticsearch.xpack.core.rollup.job.GroupConfig; @@ -54,8 +53,10 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomHistogramGroupConfig; import static java.util.Collections.singletonList; +import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomDateHistogramGroupConfig; +import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomGroupConfig; +import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomHistogramGroupConfig; import static org.hamcrest.Matchers.equalTo; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -112,8 +113,8 @@ public void testMissingFields() throws IOException { indexReader.close(); directory.close(); - List docs = IndexerUtils.processBuckets(composite, indexName, stats, - ConfigTestHelpers.getGroupConfig().build(), "foo", randomBoolean()); + final GroupConfig groupConfig = randomGroupConfig(random()); + List docs = IndexerUtils.processBuckets(composite, indexName, stats, groupConfig, "foo", randomBoolean()); assertThat(docs.size(), equalTo(numDocs)); for (IndexRequest doc : docs) { @@ -179,8 +180,8 @@ public void testCorrectFields() throws IOException { indexReader.close(); directory.close(); - List docs = IndexerUtils.processBuckets(composite, indexName, stats, - ConfigTestHelpers.getGroupConfig().build(), "foo", randomBoolean()); + final GroupConfig groupConfig = randomGroupConfig(random()); + List docs = IndexerUtils.processBuckets(composite, indexName, stats, groupConfig, "foo", randomBoolean()); assertThat(docs.size(), equalTo(numDocs)); for (IndexRequest doc : docs) { @@ -235,8 +236,8 @@ public void testNumericTerms() throws IOException { indexReader.close(); directory.close(); - List docs = IndexerUtils.processBuckets(composite, indexName, stats, - ConfigTestHelpers.getGroupConfig().build(), "foo", randomBoolean()); + final GroupConfig groupConfig = randomGroupConfig(random()); + List docs = IndexerUtils.processBuckets(composite, indexName, stats, groupConfig, "foo", randomBoolean()); assertThat(docs.size(), equalTo(numDocs)); for (IndexRequest doc : docs) { @@ -301,8 +302,8 @@ public void testEmptyCounts() throws IOException { indexReader.close(); directory.close(); - List docs = IndexerUtils.processBuckets(composite, indexName, stats, - ConfigTestHelpers.getGroupConfig().build(), "foo", randomBoolean()); + final GroupConfig groupConfig = randomGroupConfig(random()); + List docs = IndexerUtils.processBuckets(composite, indexName, stats, groupConfig, "foo", randomBoolean()); assertThat(docs.size(), equalTo(numDocs)); for (IndexRequest doc : docs) { @@ -353,11 +354,8 @@ public void testKeyOrderingOldID() { // The content of the config don't actually matter for this test // because the test is just looking at agg keys - GroupConfig.Builder groupConfig = ConfigTestHelpers.getGroupConfig(); - groupConfig.setHisto(new HistogramGroupConfig(123L, "abc")); - - List docs = IndexerUtils.processBuckets(composite, "foo", new RollupJobStats(), - groupConfig.build(), "foo", false); + GroupConfig groupConfig = new GroupConfig(randomDateHistogramGroupConfig(random()), new HistogramGroupConfig(123L, "abc"), null); + List docs = IndexerUtils.processBuckets(composite, "foo", new RollupJobStats(), groupConfig, "foo", false); assertThat(docs.size(), equalTo(1)); assertThat(docs.get(0).id(), equalTo("1237859798")); } @@ -400,11 +398,8 @@ public void testKeyOrderingNewID() { return foos; }); - GroupConfig.Builder groupConfig = ConfigTestHelpers.getGroupConfig(); - groupConfig.setHisto(new HistogramGroupConfig(1, "abc")); - - List docs = IndexerUtils.processBuckets(composite, "foo", new RollupJobStats(), - groupConfig.build(), "foo", true); + GroupConfig groupConfig = new GroupConfig(randomDateHistogramGroupConfig(random()), new HistogramGroupConfig(1L, "abc"), null); + List docs = IndexerUtils.processBuckets(composite, "foo", new RollupJobStats(), groupConfig, "foo", true); assertThat(docs.size(), equalTo(1)); assertThat(docs.get(0).id(), equalTo("foo$c9LcrFqeFW92uN_Z7sv1hA")); } @@ -453,11 +448,8 @@ public void testKeyOrderingNewIDLong() { return foos; }); - GroupConfig.Builder groupConfig = ConfigTestHelpers.getGroupConfig(); - groupConfig.setHisto(new HistogramGroupConfig(1, "abc")); - - List docs = IndexerUtils.processBuckets(composite, "foo", new RollupJobStats(), - groupConfig.build(), "foo", true); + GroupConfig groupConfig = new GroupConfig(randomDateHistogramGroupConfig(random()), new HistogramGroupConfig(1, "abc"), null); + List docs = IndexerUtils.processBuckets(composite, "foo", new RollupJobStats(), groupConfig, "foo", true); assertThat(docs.size(), equalTo(1)); assertThat(docs.get(0).id(), equalTo("foo$VAFKZpyaEqYRPLyic57_qw")); } @@ -483,11 +475,8 @@ public void testNullKeys() { return foos; }); - GroupConfig.Builder groupConfig = ConfigTestHelpers.getGroupConfig(); - groupConfig.setHisto(randomHistogramGroupConfig(random())); - - List docs = IndexerUtils.processBuckets(composite, "foo", new RollupJobStats(), - groupConfig.build(), "foo", randomBoolean()); + GroupConfig groupConfig = new GroupConfig(randomDateHistogramGroupConfig(random()), randomHistogramGroupConfig(random()), null); + List docs = IndexerUtils.processBuckets(composite, "foo", new RollupJobStats(), groupConfig, "foo", randomBoolean()); assertThat(docs.size(), equalTo(1)); assertFalse(Strings.isNullOrEmpty(docs.get(0).id())); } @@ -548,8 +537,8 @@ public void testMissingBuckets() throws IOException { indexReader.close(); directory.close(); - List docs = IndexerUtils.processBuckets(composite, indexName, stats, - ConfigTestHelpers.getGroupConfig().build(), "foo", randomBoolean()); + final GroupConfig groupConfig = randomGroupConfig(random()); + List docs = IndexerUtils.processBuckets(composite, indexName, stats, groupConfig, "foo", randomBoolean()); assertThat(docs.size(), equalTo(6)); for (IndexRequest doc : docs) { diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java index 5799eb401f6d1..6d29ee9f9ba6d 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java @@ -96,8 +96,7 @@ public void testSimpleDateHisto() throws Exception { String rollupIndex = randomAlphaOfLength(10); String field = "the_histo"; DateHistogramGroupConfig dateHistoConfig = new DateHistogramGroupConfig(field, new DateHistogramInterval("1ms")); - RollupJobConfig job = createJob(rollupIndex, new GroupConfig.Builder().setDateHisto(dateHistoConfig).build(), - Collections.emptyList()); + RollupJobConfig job = createJob(rollupIndex, new GroupConfig(dateHistoConfig), Collections.emptyList()); final List> dataset = new ArrayList<>(); dataset.addAll( Arrays.asList( @@ -142,8 +141,7 @@ public void testDateHistoAndMetrics() throws Exception { String field = "the_histo"; DateHistogramGroupConfig dateHistoConfig = new DateHistogramGroupConfig(field, new DateHistogramInterval("1h")); MetricConfig config = new MetricConfig("counter", Arrays.asList("avg", "sum", "max", "min")); - RollupJobConfig job = createJob(rollupIndex, new GroupConfig.Builder().setDateHisto(dateHistoConfig).build(), - Collections.singletonList(config)); + RollupJobConfig job = createJob(rollupIndex, new GroupConfig(dateHistoConfig), Collections.singletonList(config)); final List> dataset = new ArrayList<>(); dataset.addAll( Arrays.asList( @@ -265,8 +263,7 @@ public void testSimpleDateHistoWithDelay() throws Exception { String field = "the_histo"; DateHistogramGroupConfig dateHistoConfig = new DateHistogramGroupConfig(field, new DateHistogramInterval("1m"), new DateHistogramInterval("1h"), null); - RollupJobConfig job = createJob(rollupIndex, new GroupConfig.Builder().setDateHisto(dateHistoConfig).build(), - Collections.emptyList()); + RollupJobConfig job = createJob(rollupIndex, new GroupConfig(dateHistoConfig), Collections.emptyList()); final List> dataset = new ArrayList<>(); long now = System.currentTimeMillis(); dataset.addAll( @@ -347,8 +344,7 @@ public void testSimpleDateHistoWithTimeZone() throws Exception { String rollupIndex = randomAlphaOfLengthBetween(5, 10); String field = "the_histo"; DateHistogramGroupConfig dateHistoConfig = new DateHistogramGroupConfig(field, new DateHistogramInterval("1d"), null, timeZone); - RollupJobConfig job = createJob(rollupIndex, new GroupConfig.Builder().setDateHisto(dateHistoConfig).build(), - Collections.emptyList()); + RollupJobConfig job = createJob(rollupIndex, new GroupConfig(dateHistoConfig), Collections.emptyList()); executeTestCase(dataset, job, now, (resp) -> { assertThat(resp.size(), equalTo(1)); @@ -410,8 +406,7 @@ public void testRandomizedDateHisto() throws Exception { DateHistogramGroupConfig dateHistoConfig = new DateHistogramGroupConfig(timestampField, new DateHistogramInterval(timeInterval)); MetricConfig metricConfig = new MetricConfig(valueField, Collections.singletonList("avg")); - RollupJobConfig job = createJob(rollupIndex, new GroupConfig.Builder().setDateHisto(dateHistoConfig).build(), - Collections.singletonList(metricConfig)); + RollupJobConfig job = createJob(rollupIndex, new GroupConfig(dateHistoConfig), Collections.singletonList(metricConfig)); final List> dataset = new ArrayList<>(); int numDocs = randomIntBetween(1,100); @@ -439,15 +434,8 @@ public void testRandomizedDateHisto() throws Exception { } private RollupJobConfig createJob(String rollupIndex, GroupConfig groupConfig, List metricConfigs) { - return new RollupJobConfig.Builder() - .setId(randomAlphaOfLength(10)) - .setIndexPattern(randomAlphaOfLength(10)) - .setRollupIndex(rollupIndex) - .setGroupConfig(groupConfig) - .setMetricsConfig(metricConfigs) - .setCron(ConfigTestHelpers.getCronString()) - .setPageSize(randomIntBetween(1, 100)) - .build(); + return new RollupJobConfig(randomAlphaOfLength(10), randomAlphaOfLength(10), rollupIndex, ConfigTestHelpers.randomCron(), + randomIntBetween(1, 100), groupConfig, metricConfigs, ConfigTestHelpers.randomTimeout(random())); } static Map asMap(Object... fields) { @@ -477,7 +465,7 @@ private void executeTestCase(List> docs, RollupJobConfig con Directory dir = index(docs, fieldTypeLookup); IndexReader reader = DirectoryReader.open(dir); IndexSearcher searcher = new IndexSearcher(reader); - String dateHistoField = config.getGroupConfig().getDateHisto().getField(); + String dateHistoField = config.getGroupConfig().getDateHistogram().getField(); final ExecutorService executor = Executors.newFixedThreadPool(1); try { RollupJob job = new RollupJob(config, Collections.emptyMap()); @@ -499,14 +487,14 @@ private void executeTestCase(List> docs, RollupJobConfig con */ private Map createFieldTypes(RollupJobConfig job) { Map fieldTypes = new HashMap<>(); - MappedFieldType fieldType = new DateFieldMapper.Builder(job.getGroupConfig().getDateHisto().getField()) + MappedFieldType fieldType = new DateFieldMapper.Builder(job.getGroupConfig().getDateHistogram().getField()) .dateTimeFormatter(Joda.forPattern(randomFrom("basic_date", "date_optional_time", "epoch_second"))) .build(new Mapper.BuilderContext(settings.getSettings(), new ContentPath(0))) .fieldType(); fieldTypes.put(fieldType.name(), fieldType); - if (job.getGroupConfig().getHisto() != null) { - for (String field : job.getGroupConfig().getHisto().getFields()) { + if (job.getGroupConfig().getHistogram() != null) { + for (String field : job.getGroupConfig().getHistogram().getFields()) { MappedFieldType ft = new NumberFieldMapper.Builder(field, NumberFieldMapper.NumberType.LONG) .build(new Mapper.BuilderContext(settings.getSettings(), new ContentPath(0))) .fieldType(); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java index c645a0e3005c9..955dcbc2beb48 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java @@ -216,8 +216,7 @@ protected void onFinish() {} } public void testStarted() throws Exception { - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob(ESTestCase.randomAlphaOfLengthBetween(1, 10)).build(), - Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); final ExecutorService executor = Executors.newFixedThreadPool(1); try { @@ -236,8 +235,7 @@ public void testStarted() throws Exception { } public void testIndexing() throws Exception { - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob(ESTestCase.randomAlphaOfLengthBetween(1, 10)).build(), - Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); final ExecutorService executor = Executors.newFixedThreadPool(1); try { @@ -273,7 +271,7 @@ public void testStateChangeMidTrigger() throws Exception { // and make sure the appropriate error is thrown when(config.getGroupConfig()).then((Answer) invocationOnMock -> { state.set(IndexerState.STOPPED); - return ConfigTestHelpers.getGroupConfig().build(); + return ConfigTestHelpers.randomGroupConfig(random()); }); RollupJob job = new RollupJob(config, Collections.emptyMap()); @@ -304,8 +302,7 @@ protected void onFinish() { public void testAbortDuringSearch() throws Exception { final AtomicBoolean aborted = new AtomicBoolean(false); - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob(ESTestCase.randomAlphaOfLengthBetween(1, 10)).build(), - Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); final ExecutorService executor = Executors.newFixedThreadPool(1); final CountDownLatch latch = new CountDownLatch(1); @@ -349,8 +346,7 @@ protected void onAbort() { public void testAbortAfterCompletion() throws Exception { final AtomicBoolean aborted = new AtomicBoolean(false); - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob(ESTestCase.randomAlphaOfLengthBetween(1, 10)).build(), - Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); final ExecutorService executor = Executors.newFixedThreadPool(1); @@ -435,8 +431,7 @@ protected void doSaveState(IndexerState state, Map position, Run } public void testStopIndexing() throws Exception { - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob(ESTestCase.randomAlphaOfLengthBetween(1, 10)).build(), - Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); final ExecutorService executor = Executors.newFixedThreadPool(1); try { @@ -458,8 +453,7 @@ public void testStopIndexing() throws Exception { } public void testAbortIndexing() throws Exception { - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob(ESTestCase.randomAlphaOfLengthBetween(1, 10)).build(), - Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); final ExecutorService executor = Executors.newFixedThreadPool(1); try { @@ -486,8 +480,7 @@ protected void onAbort() { } public void testAbortStarted() throws Exception { - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob(ESTestCase.randomAlphaOfLengthBetween(1, 10)).build(), - Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); final ExecutorService executor = Executors.newFixedThreadPool(1); try { @@ -513,8 +506,7 @@ protected void onAbort() { } public void testMultipleJobTriggering() throws Exception { - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob(ESTestCase.randomAlphaOfLengthBetween(1, 10)).build(), - Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); final ExecutorService executor = Executors.newFixedThreadPool(1); try { @@ -554,8 +546,7 @@ protected void onAbort() { // deal with it everyhwere public void testUnknownKey() throws Exception { AtomicBoolean isFinished = new AtomicBoolean(false); - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob(ESTestCase.randomAlphaOfLengthBetween(1, 10)).build(), - Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); Function searchFunction = searchRequest -> { Aggregations aggs = new Aggregations(Collections.singletonList(new CompositeAggregation() { @@ -659,8 +650,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws public void testFailureWhileStopping() throws Exception { AtomicBoolean isFinished = new AtomicBoolean(false); - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob(ESTestCase.randomAlphaOfLengthBetween(1, 10)).build(), - Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); Function searchFunction = searchRequest -> { Aggregations aggs = new Aggregations(Collections.singletonList(new CompositeAggregation() { @@ -762,8 +752,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws public void testSearchShardFailure() throws Exception { AtomicBoolean isFinished = new AtomicBoolean(false); - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob(ESTestCase.randomAlphaOfLengthBetween(1, 10)).build(), - Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); Function searchFunction = searchRequest -> { ShardSearchFailure[] failures = new ShardSearchFailure[]{new ShardSearchFailure(new RuntimeException("failed"))}; @@ -806,8 +795,7 @@ public void testSearchShardFailure() throws Exception { public void testBulkFailure() throws Exception { AtomicBoolean isFinished = new AtomicBoolean(false); - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob(ESTestCase.randomAlphaOfLengthBetween(1, 10)).build(), - Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); Function searchFunction = searchRequest -> { Aggregations aggs = new Aggregations(Collections.singletonList(new CompositeAggregation() { diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java index df7c12f47fae2..13290f09e8eb8 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java @@ -58,7 +58,7 @@ public static void stopThreadPool() { } public void testInitialStatusStopped() { - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob(randomAlphaOfLength(5)).build(), Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); RollupJobStatus status = new RollupJobStatus(IndexerState.STOPPED, Collections.singletonMap("foo", "bar"), randomBoolean()); Client client = mock(Client.class); when(client.settings()).thenReturn(Settings.EMPTY); @@ -71,7 +71,7 @@ public void testInitialStatusStopped() { } public void testInitialStatusAborting() { - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob(randomAlphaOfLength(5)).build(), Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); RollupJobStatus status = new RollupJobStatus(IndexerState.ABORTING, Collections.singletonMap("foo", "bar"), randomBoolean()); Client client = mock(Client.class); when(client.settings()).thenReturn(Settings.EMPTY); @@ -84,7 +84,7 @@ public void testInitialStatusAborting() { } public void testInitialStatusStopping() { - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob(randomAlphaOfLength(5)).build(), Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); RollupJobStatus status = new RollupJobStatus(IndexerState.STOPPING, Collections.singletonMap("foo", "bar"), randomBoolean()); Client client = mock(Client.class); when(client.settings()).thenReturn(Settings.EMPTY); @@ -97,7 +97,7 @@ public void testInitialStatusStopping() { } public void testInitialStatusStarted() { - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob(randomAlphaOfLength(5)).build(), Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); RollupJobStatus status = new RollupJobStatus(IndexerState.STARTED, Collections.singletonMap("foo", "bar"), randomBoolean()); Client client = mock(Client.class); when(client.settings()).thenReturn(Settings.EMPTY); @@ -110,7 +110,7 @@ public void testInitialStatusStarted() { } public void testInitialStatusIndexingOldID() { - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob(randomAlphaOfLength(5)).build(), Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); RollupJobStatus status = new RollupJobStatus(IndexerState.INDEXING, Collections.singletonMap("foo", "bar"), false); Client client = mock(Client.class); when(client.settings()).thenReturn(Settings.EMPTY); @@ -124,7 +124,7 @@ public void testInitialStatusIndexingOldID() { } public void testInitialStatusIndexingNewID() { - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob(randomAlphaOfLength(5)).build(), Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); RollupJobStatus status = new RollupJobStatus(IndexerState.INDEXING, Collections.singletonMap("foo", "bar"), true); Client client = mock(Client.class); when(client.settings()).thenReturn(Settings.EMPTY); @@ -138,7 +138,7 @@ public void testInitialStatusIndexingNewID() { } public void testNoInitialStatus() { - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob(randomAlphaOfLength(5)).build(), Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); Client client = mock(Client.class); when(client.settings()).thenReturn(Settings.EMPTY); SchedulerEngine schedulerEngine = new SchedulerEngine(Clock.systemUTC()); @@ -150,7 +150,7 @@ public void testNoInitialStatus() { } public void testStartWhenStarted() throws InterruptedException { - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob(randomAlphaOfLength(5)).build(), Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); RollupJobStatus status = new RollupJobStatus(IndexerState.STARTED, Collections.singletonMap("foo", "bar"), randomBoolean()); Client client = mock(Client.class); when(client.settings()).thenReturn(Settings.EMPTY); @@ -179,7 +179,7 @@ public void onFailure(Exception e) { } public void testStartWhenStopping() throws InterruptedException { - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob(randomAlphaOfLength(5)).build(), Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); Client client = mock(Client.class); when(client.settings()).thenReturn(Settings.EMPTY); when(client.threadPool()).thenReturn(pool); @@ -258,7 +258,7 @@ public void onFailure(Exception e) { } public void testStartWhenStopped() throws InterruptedException { - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob(randomAlphaOfLength(5)).build(), Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); RollupJobStatus status = new RollupJobStatus(IndexerState.STOPPED, Collections.singletonMap("foo", "bar"), randomBoolean()); Client client = mock(Client.class); when(client.settings()).thenReturn(Settings.EMPTY); @@ -296,7 +296,7 @@ public void onFailure(Exception e) { } public void testTriggerUnrelated() throws InterruptedException { - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob(randomAlphaOfLength(5)).build(), Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); RollupJobStatus status = new RollupJobStatus(IndexerState.STOPPED, Collections.singletonMap("foo", "bar"), randomBoolean()); Client client = mock(Client.class); when(client.settings()).thenReturn(Settings.EMPTY); @@ -337,7 +337,7 @@ public void onFailure(Exception e) { } public void testTrigger() throws InterruptedException { - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob(randomAlphaOfLength(5)).build(), Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); Client client = mock(Client.class); when(client.settings()).thenReturn(Settings.EMPTY); when(client.threadPool()).thenReturn(pool); @@ -380,7 +380,7 @@ public void onFailure(Exception e) { @SuppressWarnings("unchecked") public void testTriggerWithoutHeaders() throws InterruptedException { final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob(randomAlphaOfLength(5)).build(), Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); Client client = mock(Client.class); when(client.settings()).thenReturn(Settings.EMPTY); @@ -465,7 +465,7 @@ public void testTriggerWithHeaders() throws InterruptedException { Map headers = new HashMap<>(1); headers.put("es-security-runas-user", "foo"); headers.put("_xpack_security_authentication", "bar"); - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob(randomAlphaOfLength(5)).build(), headers); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), headers); Client client = mock(Client.class); when(client.settings()).thenReturn(Settings.EMPTY); @@ -553,7 +553,7 @@ public void testSaveStateChangesIDScheme() throws InterruptedException { Map headers = new HashMap<>(1); headers.put("es-security-runas-user", "foo"); headers.put("_xpack_security_authentication", "bar"); - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob(randomAlphaOfLength(5)).build(), headers); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), headers); Client client = mock(Client.class); when(client.settings()).thenReturn(Settings.EMPTY); @@ -637,7 +637,7 @@ public void onFailure(Exception e) { } public void testStopWhenStopped() throws InterruptedException { - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob(randomAlphaOfLength(5)).build(), Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); RollupJobStatus status = new RollupJobStatus(IndexerState.STOPPED, null, randomBoolean()); Client client = mock(Client.class); when(client.settings()).thenReturn(Settings.EMPTY); @@ -663,7 +663,7 @@ public void onFailure(Exception e) { } public void testStopWhenStopping() throws InterruptedException { - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob(randomAlphaOfLength(5)).build(), Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); Client client = mock(Client.class); when(client.settings()).thenReturn(Settings.EMPTY); when(client.threadPool()).thenReturn(pool); @@ -744,7 +744,7 @@ public void onFailure(Exception e) { } public void testStopWhenAborting() throws InterruptedException { - RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob(randomAlphaOfLength(5)).build(), Collections.emptyMap()); + RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); RollupJobStatus status = new RollupJobStatus(IndexerState.STOPPED, null, randomBoolean()); Client client = mock(Client.class); when(client.settings()).thenReturn(Settings.EMPTY); diff --git a/x-pack/plugin/security/cli/build.gradle b/x-pack/plugin/security/cli/build.gradle index 578c006e7f0c8..7ffe5d363729d 100644 --- a/x-pack/plugin/security/cli/build.gradle +++ b/x-pack/plugin/security/cli/build.gradle @@ -17,4 +17,9 @@ dependencies { dependencyLicenses { mapping from: /bc.*/, to: 'bouncycastle' -} \ No newline at end of file +} + +if (inFipsJvm) { + test.enabled = false +} + diff --git a/x-pack/plugin/security/licenses/log4j-slf4j-impl-2.11.1.jar.sha1 b/x-pack/plugin/security/licenses/log4j-slf4j-impl-2.11.1.jar.sha1 new file mode 100644 index 0000000000000..6178556b31848 --- /dev/null +++ b/x-pack/plugin/security/licenses/log4j-slf4j-impl-2.11.1.jar.sha1 @@ -0,0 +1 @@ +4b41b53a3a2d299ce381a69d165381ca19f62912 \ No newline at end of file diff --git a/x-pack/plugin/security/licenses/log4j-slf4j-impl-2.9.1.jar.sha1 b/x-pack/plugin/security/licenses/log4j-slf4j-impl-2.9.1.jar.sha1 deleted file mode 100644 index 66119e87e211f..0000000000000 --- a/x-pack/plugin/security/licenses/log4j-slf4j-impl-2.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0a97a849b18b3798c4af1a2ca5b10c66cef17e3a \ No newline at end of file diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/PasswordHashingAlgorithmBootstrapCheck.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/PasswordHashingAlgorithmBootstrapCheck.java deleted file mode 100644 index c60c2ea18d061..0000000000000 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/PasswordHashingAlgorithmBootstrapCheck.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security; - -import org.elasticsearch.bootstrap.BootstrapCheck; -import org.elasticsearch.bootstrap.BootstrapContext; -import org.elasticsearch.xpack.core.XPackSettings; - -import javax.crypto.SecretKeyFactory; -import java.security.NoSuchAlgorithmException; -import java.util.Locale; - -/** - * Bootstrap check to ensure that one of the allowed password hashing algorithms is - * selected and that it is available. - */ -public class PasswordHashingAlgorithmBootstrapCheck implements BootstrapCheck { - @Override - public BootstrapCheckResult check(BootstrapContext context) { - final String selectedAlgorithm = XPackSettings.PASSWORD_HASHING_ALGORITHM.get(context.settings); - if (selectedAlgorithm.toLowerCase(Locale.ROOT).startsWith("pbkdf2")) { - try { - SecretKeyFactory.getInstance("PBKDF2withHMACSHA512"); - } catch (NoSuchAlgorithmException e) { - final String errorMessage = String.format(Locale.ROOT, - "Support for PBKDF2WithHMACSHA512 must be available in order to use any of the " + - "PBKDF2 algorithms for the [%s] setting.", XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey()); - return BootstrapCheckResult.failure(errorMessage); - } - } - return BootstrapCheckResult.success(); - } - - @Override - public boolean alwaysEnforce() { - return true; - } -} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 875fe7d04045b..acfe6437f4164 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -309,7 +309,6 @@ public Security(Settings settings, final Path configPath) { new TokenSSLBootstrapCheck(), new PkiRealmBootstrapCheck(getSslService()), new TLSLicenseBootstrapCheck(), - new PasswordHashingAlgorithmBootstrapCheck(), new FIPS140SecureSettingsBootstrapCheck(settings, env), new FIPS140JKSKeystoreBootstrapCheck(settings), new FIPS140PasswordHashingAlgorithmBootstrapCheck(settings))); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportPutUserAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportPutUserAction.java index 1d2f792b29809..e51e6422f53fb 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportPutUserAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportPutUserAction.java @@ -8,6 +8,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -19,11 +20,15 @@ import org.elasticsearch.xpack.core.security.action.user.PutUserRequest; import org.elasticsearch.xpack.core.security.action.user.PutUserResponse; import org.elasticsearch.xpack.core.security.authc.esnative.ClientReservedRealm; +import org.elasticsearch.xpack.core.security.support.Validation; import org.elasticsearch.xpack.core.security.user.AnonymousUser; import org.elasticsearch.xpack.core.security.user.SystemUser; +import org.elasticsearch.xpack.core.security.user.XPackSecurityUser; import org.elasticsearch.xpack.core.security.user.XPackUser; import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore; +import static org.elasticsearch.action.ValidateActions.addValidationError; + public class TransportPutUserAction extends HandledTransportAction { private final NativeUsersStore usersStore; @@ -38,37 +43,62 @@ public TransportPutUserAction(Settings settings, ThreadPool threadPool, ActionFi @Override protected void doExecute(final PutUserRequest request, final ActionListener listener) { + final ActionRequestValidationException validationException = validateRequest(request); + if (validationException != null) { + listener.onFailure(validationException); + } else { + usersStore.putUser(request, new ActionListener() { + @Override + public void onResponse(Boolean created) { + if (created) { + logger.info("added user [{}]", request.username()); + } else { + logger.info("updated user [{}]", request.username()); + } + listener.onResponse(new PutUserResponse(created)); + } + + @Override + public void onFailure(Exception e) { + logger.error((Supplier) () -> new ParameterizedMessage("failed to put user [{}]", request.username()), e); + listener.onFailure(e); + } + }); + } + } + + private ActionRequestValidationException validateRequest(PutUserRequest request) { + ActionRequestValidationException validationException = null; final String username = request.username(); if (ClientReservedRealm.isReserved(username, settings)) { if (AnonymousUser.isAnonymousUsername(username, settings)) { - listener.onFailure(new IllegalArgumentException("user [" + username + "] is anonymous and cannot be modified via the API")); - return; + validationException = + addValidationError("user [" + username + "] is anonymous and cannot be modified via the API", validationException); } else { - listener.onFailure(new IllegalArgumentException("user [" + username + "] is reserved and only the " + - "password can be changed")); - return; + validationException = addValidationError("user [" + username + "] is reserved and only the " + + "password can be changed", validationException); + } + } else if (SystemUser.NAME.equals(username) || XPackUser.NAME.equals(username) || XPackSecurityUser.NAME.equals(username)) { + validationException = addValidationError("user [" + username + "] is internal", validationException); + } else { + Validation.Error usernameError = Validation.Users.validateUsername(username, true, settings); + if (usernameError != null) { + validationException = addValidationError(usernameError.toString(), validationException); } - } else if (SystemUser.NAME.equals(username) || XPackUser.NAME.equals(username)) { - listener.onFailure(new IllegalArgumentException("user [" + username + "] is internal")); - return; } - usersStore.putUser(request, new ActionListener() { - @Override - public void onResponse(Boolean created) { - if (created) { - logger.info("added user [{}]", request.username()); - } else { - logger.info("updated user [{}]", request.username()); + if (request.roles() != null) { + for (String role : request.roles()) { + Validation.Error roleNameError = Validation.Roles.validateRoleName(role, true); + if (roleNameError != null) { + validationException = addValidationError(roleNameError.toString(), validationException); } - listener.onResponse(new PutUserResponse(created)); } + } - @Override - public void onFailure(Exception e) { - logger.error((Supplier) () -> new ParameterizedMessage("failed to put user [{}]", request.username()), e); - listener.onFailure(e); - } - }); + if (request.password() != null) { + validationException = addValidationError("password should never be passed to the transport action", validationException); + } + return validationException; } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java index 18c9d2af63c2f..3775ccbc00d14 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealm.java @@ -179,12 +179,15 @@ protected String maybeRemoveRealmName(final String principalName) { private void handleException(Exception e, final ActionListener listener) { if (e instanceof LoginException) { + logger.debug("failed to authenticate user, service login failure", e); listener.onResponse(AuthenticationResult.terminate("failed to authenticate user, service login failure", unauthorized(e.getLocalizedMessage(), e))); } else if (e instanceof GSSException) { + logger.debug("failed to authenticate user, gss context negotiation failure", e); listener.onResponse(AuthenticationResult.terminate("failed to authenticate user, gss context negotiation failure", unauthorized(e.getLocalizedMessage(), e))); } else { + logger.debug("failed to authenticate user", e); listener.onFailure(e); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java index 13af71b539081..65085bdf187e0 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java @@ -100,7 +100,8 @@ protected String configUsers() { return super.configUsers() + "user1:" + usersPasswdHashed + "\n" + "user2:" + usersPasswdHashed + "\n" + - "user3:" + usersPasswdHashed + "\n"; + "user3:" + usersPasswdHashed + "\n" + + "user4:" + usersPasswdHashed + "\n"; } @Override @@ -108,7 +109,8 @@ protected String configUsersRoles() { return super.configUsersRoles() + "role1:user1,user2,user3\n" + "role2:user1,user3\n" + - "role3:user2,user3\n"; + "role3:user2,user3\n" + + "role4:user4\n"; } @Override @@ -134,7 +136,14 @@ protected String configRoles() { " indices:\n" + " - names: '*'\n" + " privileges: [ ALL ]\n" + - " query: '{\"term\" : {\"field2\" : \"value2\"}}'"; // <-- query defined as json in a string + " query: '{\"term\" : {\"field2\" : \"value2\"}}'\n" + // <-- query defined as json in a string + "role4:\n" + + " cluster: [ all ]\n" + + " indices:\n" + + " - names: '*'\n" + + " privileges: [ ALL ]\n" + + // query that can match nested documents + " query: '{\"bool\": { \"must_not\": { \"term\" : {\"field1\" : \"value2\"}}}}'"; } @Override @@ -945,7 +954,7 @@ public void testNestedInnerHits() throws Exception { refresh("test"); SearchResponse response = client() - .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user4", USERS_PASSWD))) .prepareSearch("test") .setQuery(QueryBuilders.nestedQuery("nested_field", QueryBuilders.termQuery("nested_field.field2", "value2"), ScoreMode.None).innerHit(new InnerHitBuilder())) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/PasswordHashingAlgorithmBootstrapCheckTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/PasswordHashingAlgorithmBootstrapCheckTests.java deleted file mode 100644 index 8ca5c6c7216c5..0000000000000 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/PasswordHashingAlgorithmBootstrapCheckTests.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security; - -import org.elasticsearch.bootstrap.BootstrapContext; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.XPackSettings; - -import javax.crypto.SecretKeyFactory; -import java.security.NoSuchAlgorithmException; - -public class PasswordHashingAlgorithmBootstrapCheckTests extends ESTestCase { - - public void testPasswordHashingAlgorithmBootstrapCheck() { - Settings settings = Settings.EMPTY; - assertFalse(new PasswordHashingAlgorithmBootstrapCheck().check(new BootstrapContext(settings, null)).isFailure()); - // The following two will always pass because for now we only test in environments where PBKDF2WithHMACSHA512 is supported - assertTrue(isSecretkeyFactoryAlgoAvailable("PBKDF2WithHMACSHA512")); - settings = Settings.builder().put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), "PBKDF2_10000").build(); - assertFalse(new PasswordHashingAlgorithmBootstrapCheck().check(new BootstrapContext(settings, null)).isFailure()); - - settings = Settings.builder().put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), "PBKDF2").build(); - assertFalse(new PasswordHashingAlgorithmBootstrapCheck().check(new BootstrapContext(settings, null)).isFailure()); - - settings = Settings.builder().put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), "BCRYPT").build(); - assertFalse(new PasswordHashingAlgorithmBootstrapCheck().check(new BootstrapContext(settings, null)).isFailure()); - - settings = Settings.builder().put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), "BCRYPT11").build(); - assertFalse(new PasswordHashingAlgorithmBootstrapCheck().check(new BootstrapContext(settings, null)).isFailure()); - } - - private boolean isSecretkeyFactoryAlgoAvailable(String algorithmId) { - try { - SecretKeyFactory.getInstance(algorithmId); - return true; - } catch (NoSuchAlgorithmException e) { - return false; - } - } -} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/PutUserRequestTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/PutUserRequestTests.java index af3a89c77b6f0..952448db4869d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/PutUserRequestTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/PutUserRequestTests.java @@ -39,16 +39,6 @@ public void testValidateRejectsNullUserName() throws Exception { assertThat(validation.validationErrors().size(), is(1)); } - public void testValidateRejectsUserNameThatHasInvalidCharacters() throws Exception { - final PutUserRequest request = new PutUserRequest(); - request.username("fóóbár"); - request.roles("bar"); - final ActionRequestValidationException validation = request.validate(); - assertThat(validation, is(notNullValue())); - assertThat(validation.validationErrors(), contains(containsString("must be"))); - assertThat(validation.validationErrors().size(), is(1)); - } - public void testValidateRejectsMetaDataWithLeadingUnderscore() throws Exception { final PutUserRequest request = new PutUserRequest(); request.username("foo"); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java index cb4192b93d768..a4f6fa9f4d230 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java @@ -7,6 +7,7 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -37,6 +38,7 @@ import java.util.Collections; import java.util.concurrent.atomic.AtomicReference; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; @@ -196,12 +198,32 @@ public void onFailure(Exception e) { } }); + assertThat(throwableRef.get(), is(nullValue())); assertThat(responseRef.get(), is(notNullValue())); assertThat(responseRef.get().created(), is(created)); - assertThat(throwableRef.get(), is(nullValue())); verify(usersStore, times(1)).putUser(eq(request), any(ActionListener.class)); } + public void testInvalidUser() { + NativeUsersStore usersStore = mock(NativeUsersStore.class); + TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); + TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + mock(IndexNameExpressionResolver.class), usersStore, transportService); + + final PutUserRequest request = new PutUserRequest(); + request.username("fóóbár"); + request.roles("bar"); + ActionRequestValidationException validation = request.validate(); + assertNull(validation); + + PlainActionFuture responsePlainActionFuture = new PlainActionFuture<>(); + action.doExecute(request, responsePlainActionFuture); + validation = expectThrows(ActionRequestValidationException.class, responsePlainActionFuture::actionGet); + assertThat(validation.validationErrors(), contains(containsString("must be"))); + assertThat(validation.validationErrors().size(), is(1)); + } + public void testException() { final Exception e = randomFrom(new ElasticsearchSecurityException(""), new IllegalStateException(), new ValidationException()); final User user = new User("joe"); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java index 5589d328bc11d..8556c3456b478 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Client; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.settings.SecureString; @@ -490,14 +489,14 @@ public void testCannotCreateUserWithShortPassword() throws Exception { client.preparePutUser("joe", randomAlphaOfLengthBetween(0, 5).toCharArray(), hasher, "admin_role").get(); fail("cannot create a user without a password < 6 characters"); - } catch (ValidationException v) { + } catch (IllegalArgumentException v) { assertThat(v.getMessage().contains("password"), is(true)); } } public void testCannotCreateUserWithInvalidCharactersInName() throws Exception { SecurityClient client = securityClient(); - ValidationException v = expectThrows(ValidationException.class, + IllegalArgumentException v = expectThrows(IllegalArgumentException.class, () -> client.preparePutUser("fóóbár", "my-am@zing-password".toCharArray(), hasher, "admin_role").get() ); @@ -531,7 +530,7 @@ public void testOperationsOnReservedUsers() throws Exception { IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> securityClient().preparePutUser(username, randomBoolean() ? SecuritySettingsSourceField.TEST_PASSWORD.toCharArray() : null, hasher, "admin").get()); - assertThat(exception.getMessage(), containsString("Username [" + username + "] is reserved")); + assertThat(exception.getMessage(), containsString("user [" + username + "] is reserved")); exception = expectThrows(IllegalArgumentException.class, () -> securityClient().prepareDeleteUser(username).get()); @@ -547,7 +546,7 @@ public void testOperationsOnReservedUsers() throws Exception { exception = expectThrows(IllegalArgumentException.class, () -> securityClient().preparePutUser(AnonymousUser.DEFAULT_ANONYMOUS_USERNAME, "foobar".toCharArray(), hasher).get()); - assertThat(exception.getMessage(), containsString("Username [" + AnonymousUser.DEFAULT_ANONYMOUS_USERNAME + "] is reserved")); + assertThat(exception.getMessage(), containsString("user [" + AnonymousUser.DEFAULT_ANONYMOUS_USERNAME + "] is anonymous")); exception = expectThrows(IllegalArgumentException.class, () -> securityClient().preparePutUser(SystemUser.NAME, "foobar".toCharArray(), hasher).get()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java index e1be6c216598b..6344b020b1c74 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java @@ -83,7 +83,7 @@ public void testInvalidHashingAlgorithmFails() { IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> new ReservedRealm(mock(Environment.class), invalidSettings, usersStore, new AnonymousUser(Settings.EMPTY), securityIndex, threadPool)); assertThat(exception.getMessage(), containsString(invalidAlgoId)); - assertThat(exception.getMessage(), containsString("Only pbkdf2 or bcrypt family algorithms can be used for password hashing")); + assertThat(exception.getMessage(), containsString("Invalid algorithm")); } public void testReservedUserEmptyPasswordAuthenticationFails() throws Throwable { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java index 0f44544c9c23b..afd9ec3996c4d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/kerberos/KerberosRealmTests.java @@ -108,6 +108,7 @@ public void testLookupUser() { assertThat(future.actionGet(), is(nullValue())); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32768") public void testKerberosRealmWithInvalidKeytabPathConfigurations() throws IOException { final String keytabPathCase = randomFrom("keytabPathAsDirectory", "keytabFileDoesNotExist", "keytabPathWithNoReadPermissions"); final String expectedErrorMessage; diff --git a/x-pack/plugin/sql/jdbc/licenses/jackson-core-2.8.10.jar.sha1 b/x-pack/plugin/sql/jdbc/licenses/jackson-core-2.8.10.jar.sha1 deleted file mode 100644 index a322d371e265e..0000000000000 --- a/x-pack/plugin/sql/jdbc/licenses/jackson-core-2.8.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -eb21a035c66ad307e66ec8fce37f5d50fd62d039 \ No newline at end of file diff --git a/x-pack/plugin/sql/jdbc/licenses/jackson-core-2.8.11.jar.sha1 b/x-pack/plugin/sql/jdbc/licenses/jackson-core-2.8.11.jar.sha1 new file mode 100644 index 0000000000000..e7ad1e74ed6b8 --- /dev/null +++ b/x-pack/plugin/sql/jdbc/licenses/jackson-core-2.8.11.jar.sha1 @@ -0,0 +1 @@ +876ead1db19f0c9e79c9789273a3ef8c6fd6c29b \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-action/build.gradle b/x-pack/plugin/sql/sql-action/build.gradle index f6b5177d50833..bf79fd824ef8d 100644 --- a/x-pack/plugin/sql/sql-action/build.gradle +++ b/x-pack/plugin/sql/sql-action/build.gradle @@ -76,8 +76,6 @@ thirdPartyAudit.excludes = [ 'com.fasterxml.jackson.dataformat.xml.JacksonXmlModule', 'com.fasterxml.jackson.dataformat.xml.XmlMapper', 'com.fasterxml.jackson.dataformat.xml.util.DefaultXmlPrettyPrinter', - 'com.fasterxml.jackson.databind.node.JsonNodeFactory', - 'com.fasterxml.jackson.databind.node.ObjectNode', 'com.lmax.disruptor.BlockingWaitStrategy', 'com.lmax.disruptor.BusySpinWaitStrategy', 'com.lmax.disruptor.EventFactory', @@ -116,12 +114,6 @@ thirdPartyAudit.excludes = [ 'javax.mail.internet.MimeMultipart', 'javax.mail.internet.MimeUtility', 'javax.mail.util.ByteArrayDataSource', - 'javax.persistence.AttributeConverter', - 'javax.persistence.EntityManager', - 'javax.persistence.EntityManagerFactory', - 'javax.persistence.EntityTransaction', - 'javax.persistence.Persistence', - 'javax.persistence.PersistenceException', 'org.apache.commons.compress.compressors.CompressorStreamFactory', 'org.apache.commons.compress.utils.IOUtils', 'org.apache.commons.csv.CSVFormat', @@ -150,3 +142,13 @@ thirdPartyAudit.excludes = [ 'org.zeromq.ZMQ$Socket', 'org.zeromq.ZMQ' ] + +if (JavaVersion.current() <= JavaVersion.VERSION_1_8) { + // Used by Log4J 2.11.1 + thirdPartyAudit.excludes += [ + 'java.io.ObjectInputFilter', + 'java.io.ObjectInputFilter$Config', + 'java.io.ObjectInputFilter$FilterInfo', + 'java.io.ObjectInputFilter$Status' + ] +} \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-action/licenses/jackson-core-2.8.10.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/jackson-core-2.8.10.jar.sha1 deleted file mode 100644 index a322d371e265e..0000000000000 --- a/x-pack/plugin/sql/sql-action/licenses/jackson-core-2.8.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -eb21a035c66ad307e66ec8fce37f5d50fd62d039 \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-action/licenses/jackson-core-2.8.11.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/jackson-core-2.8.11.jar.sha1 new file mode 100644 index 0000000000000..e7ad1e74ed6b8 --- /dev/null +++ b/x-pack/plugin/sql/sql-action/licenses/jackson-core-2.8.11.jar.sha1 @@ -0,0 +1 @@ +876ead1db19f0c9e79c9789273a3ef8c6fd6c29b \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-action/licenses/log4j-api-2.11.1.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/log4j-api-2.11.1.jar.sha1 new file mode 100644 index 0000000000000..4b1bfffac179f --- /dev/null +++ b/x-pack/plugin/sql/sql-action/licenses/log4j-api-2.11.1.jar.sha1 @@ -0,0 +1 @@ +268f0fe4df3eefe052b57c87ec48517d64fb2a10 \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-action/licenses/log4j-api-2.9.1.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/log4j-api-2.9.1.jar.sha1 deleted file mode 100644 index e1a89fadfed95..0000000000000 --- a/x-pack/plugin/sql/sql-action/licenses/log4j-api-2.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7a2999229464e7a324aa503c0a52ec0f05efe7bd \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-action/licenses/log4j-core-2.11.1.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/log4j-core-2.11.1.jar.sha1 new file mode 100644 index 0000000000000..2fb8589380a03 --- /dev/null +++ b/x-pack/plugin/sql/sql-action/licenses/log4j-core-2.11.1.jar.sha1 @@ -0,0 +1 @@ +592a48674c926b01a9a747c7831bcd82a9e6d6e4 \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-action/licenses/log4j-core-2.9.1.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/log4j-core-2.9.1.jar.sha1 deleted file mode 100644 index 990ea322a7613..0000000000000 --- a/x-pack/plugin/sql/sql-action/licenses/log4j-core-2.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c041978c686866ee8534f538c6220238db3bb6be \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-action/licenses/lucene-core-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/lucene-core-7.5.0-snapshot-13b9e28f9d.jar.sha1 new file mode 100644 index 0000000000000..fdedaf3fc5756 --- /dev/null +++ b/x-pack/plugin/sql/sql-action/licenses/lucene-core-7.5.0-snapshot-13b9e28f9d.jar.sha1 @@ -0,0 +1 @@ +73dd7703a94ec2357581f65ee7c1c4d618ff310f \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-action/licenses/lucene-core-7.5.0-snapshot-608f0277b0.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/lucene-core-7.5.0-snapshot-608f0277b0.jar.sha1 deleted file mode 100644 index d38fb392c350b..0000000000000 --- a/x-pack/plugin/sql/sql-action/licenses/lucene-core-7.5.0-snapshot-608f0277b0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -471096d6e92338b208aa91f3a85feb2f9cfc4afd \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-client/licenses/jackson-core-2.8.10.jar.sha1 b/x-pack/plugin/sql/sql-client/licenses/jackson-core-2.8.10.jar.sha1 deleted file mode 100644 index a322d371e265e..0000000000000 --- a/x-pack/plugin/sql/sql-client/licenses/jackson-core-2.8.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -eb21a035c66ad307e66ec8fce37f5d50fd62d039 \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-client/licenses/jackson-core-2.8.11.jar.sha1 b/x-pack/plugin/sql/sql-client/licenses/jackson-core-2.8.11.jar.sha1 new file mode 100644 index 0000000000000..e7ad1e74ed6b8 --- /dev/null +++ b/x-pack/plugin/sql/sql-client/licenses/jackson-core-2.8.11.jar.sha1 @@ -0,0 +1 @@ +876ead1db19f0c9e79c9789273a3ef8c6fd6c29b \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-proto/licenses/jackson-core-2.8.10.jar.sha1 b/x-pack/plugin/sql/sql-proto/licenses/jackson-core-2.8.10.jar.sha1 deleted file mode 100644 index a322d371e265e..0000000000000 --- a/x-pack/plugin/sql/sql-proto/licenses/jackson-core-2.8.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -eb21a035c66ad307e66ec8fce37f5d50fd62d039 \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-proto/licenses/jackson-core-2.8.11.jar.sha1 b/x-pack/plugin/sql/sql-proto/licenses/jackson-core-2.8.11.jar.sha1 new file mode 100644 index 0000000000000..e7ad1e74ed6b8 --- /dev/null +++ b/x-pack/plugin/sql/sql-proto/licenses/jackson-core-2.8.11.jar.sha1 @@ -0,0 +1 @@ +876ead1db19f0c9e79c9789273a3ef8c6fd6c29b \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Locate.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Locate.java index d6e2f80b025ca..e33511cfbfd4a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Locate.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Locate.java @@ -62,20 +62,10 @@ protected TypeResolution resolveType() { @Override protected ProcessorDefinition makeProcessorDefinition() { - LocateFunctionProcessorDefinition processorDefinition; - if (start == null) { - processorDefinition = new LocateFunctionProcessorDefinition(location(), this, - ProcessorDefinitions.toProcessorDefinition(pattern), - ProcessorDefinitions.toProcessorDefinition(source)); - } - else { - processorDefinition = new LocateFunctionProcessorDefinition(location(), this, - ProcessorDefinitions.toProcessorDefinition(pattern), - ProcessorDefinitions.toProcessorDefinition(source), - ProcessorDefinitions.toProcessorDefinition(start)); - } - - return processorDefinition; + return new LocateFunctionProcessorDefinition(location(), this, + ProcessorDefinitions.toProcessorDefinition(pattern), + ProcessorDefinitions.toProcessorDefinition(source), + start == null ? null : ProcessorDefinitions.toProcessorDefinition(start)); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionProcessorDefinition.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionProcessorDefinition.java index 84b0c4457b170..1902051185273 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionProcessorDefinition.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionProcessorDefinition.java @@ -21,20 +21,12 @@ public class LocateFunctionProcessorDefinition extends ProcessorDefinition { public LocateFunctionProcessorDefinition(Location location, Expression expression, ProcessorDefinition pattern, ProcessorDefinition source, ProcessorDefinition start) { - super(location, expression, Arrays.asList(pattern, source, start)); + super(location, expression, start == null ? Arrays.asList(pattern, source) : Arrays.asList(pattern, source, start)); this.pattern = pattern; this.source = source; this.start = start; } - public LocateFunctionProcessorDefinition(Location location, Expression expression, ProcessorDefinition pattern, - ProcessorDefinition source) { - super(location, expression, Arrays.asList(pattern, source)); - this.pattern = pattern; - this.source = source; - this.start = null; - } - @Override public final ProcessorDefinition replaceChildren(List newChildren) { int childrenSize = newChildren.size(); @@ -68,9 +60,6 @@ public boolean resolved() { protected ProcessorDefinition replaceChildren(ProcessorDefinition newPattern, ProcessorDefinition newSource, ProcessorDefinition newStart) { - if (newStart == null) { - return new LocateFunctionProcessorDefinition(location(), expression(), newPattern, newSource); - } return new LocateFunctionProcessorDefinition(location(), expression(), newPattern, newSource, newStart); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionProcessorDefinitionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionProcessorDefinitionTests.java index 6d86e807be24a..4815c9c9528b1 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionProcessorDefinitionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LocateFunctionProcessorDefinitionTests.java @@ -38,50 +38,34 @@ public static LocateFunctionProcessorDefinition randomLocateFunctionProcessorDef return (LocateFunctionProcessorDefinition) (new Locate(randomLocation(), randomStringLiteral(), randomStringLiteral(), - frequently() ? randomIntLiteral() : null) + randomFrom(true, false) ? randomIntLiteral() : null) .makeProcessorDefinition()); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32554") @Override public void testTransform() { // test transforming only the properties (location, expression), // skipping the children (the two parameters of the binary function) which are tested separately LocateFunctionProcessorDefinition b1 = randomInstance(); Expression newExpression = randomValueOtherThan(b1.expression(), () -> randomLocateFunctionExpression()); - LocateFunctionProcessorDefinition newB; - if (b1.start() == null) { - newB = new LocateFunctionProcessorDefinition( - b1.location(), - newExpression, - b1.pattern(), - b1.source()); - } else { - newB = new LocateFunctionProcessorDefinition( - b1.location(), - newExpression, - b1.pattern(), - b1.source(), - b1.start()); - } + LocateFunctionProcessorDefinition newB = new LocateFunctionProcessorDefinition( + b1.location(), + newExpression, + b1.pattern(), + b1.source(), + b1.start()); + assertEquals(newB, b1.transformPropertiesOnly(v -> Objects.equals(v, b1.expression()) ? newExpression : v, Expression.class)); LocateFunctionProcessorDefinition b2 = randomInstance(); Location newLoc = randomValueOtherThan(b2.location(), () -> randomLocation()); - if (b2.start() == null) { - newB = new LocateFunctionProcessorDefinition( - newLoc, - b2.expression(), - b2.pattern(), - b2.source()); - } else { - newB = new LocateFunctionProcessorDefinition( - newLoc, - b2.expression(), - b2.pattern(), - b2.source(), - b2.start()); - } + newB = new LocateFunctionProcessorDefinition( + newLoc, + b2.expression(), + b2.pattern(), + b2.source(), + b2.start()); + assertEquals(newB, b2.transformPropertiesOnly(v -> Objects.equals(v, b2.location()) ? newLoc : v, Location.class)); } @@ -93,15 +77,9 @@ public void testReplaceChildren() { ProcessorDefinition newSource = toProcessorDefinition((Expression) randomValueOtherThan(b.source(), () -> randomStringLiteral())); ProcessorDefinition newStart; - LocateFunctionProcessorDefinition newB; - if (b.start() == null) { - newB = new LocateFunctionProcessorDefinition(b.location(), b.expression(), b.pattern(), b.source()); - newStart = null; - } - else { - newB = new LocateFunctionProcessorDefinition(b.location(), b.expression(), b.pattern(), b.source(), b.start()); - newStart = toProcessorDefinition((Expression) randomValueOtherThan(b.start(), () -> randomIntLiteral())); - } + LocateFunctionProcessorDefinition newB = new LocateFunctionProcessorDefinition( + b.location(), b.expression(), b.pattern(), b.source(), b.start()); + newStart = toProcessorDefinition((Expression) randomValueOtherThan(b.start(), () -> randomIntLiteral())); LocateFunctionProcessorDefinition transformed = null; // generate all the combinations of possible children modifications and test all of them @@ -132,7 +110,8 @@ protected LocateFunctionProcessorDefinition mutate(LocateFunctionProcessorDefini comb.get(0) ? toProcessorDefinition((Expression) randomValueOtherThan(f.pattern(), () -> randomStringLiteral())) : f.pattern(), comb.get(1) ? toProcessorDefinition((Expression) randomValueOtherThan(f.source(), - () -> randomStringLiteral())) : f.source())); + () -> randomStringLiteral())) : f.source(), + null)); } } } else { @@ -155,13 +134,7 @@ protected LocateFunctionProcessorDefinition mutate(LocateFunctionProcessorDefini @Override protected LocateFunctionProcessorDefinition copy(LocateFunctionProcessorDefinition instance) { - return instance.start() == null ? - new LocateFunctionProcessorDefinition(instance.location(), - instance.expression(), - instance.pattern(), - instance.source()) - : - new LocateFunctionProcessorDefinition(instance.location(), + return new LocateFunctionProcessorDefinition(instance.location(), instance.expression(), instance.pattern(), instance.source(), diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionProcessorTests.java index dcfb8d278ff3f..a4d9d4cb57ab6 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionProcessorTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation; import java.io.IOException; +import java.util.Locale; public class StringFunctionProcessorTests extends AbstractWireSerializingTestCase { public static StringProcessor randomStringFunctionProcessor() { @@ -73,6 +74,19 @@ public void testLCase() { stringCharInputValidation(proc); } + + public void testLCaseWithTRLocale() { + Locale.setDefault(Locale.forLanguageTag("tr")); + StringProcessor proc = new StringProcessor(StringOperation.LCASE); + + // ES-SQL is not locale sensitive (so far). The obvious test for this is the Turkish language, uppercase letter I conversion + // in non-Turkish locale the lowercasing would create i and an additional dot, while in Turkish Locale it would only create "i" + // unicode 0069 = i + assertEquals("\u0069\u0307", proc.process("\u0130")); + // unicode 0049 = I (regular capital letter i) + // in Turkish locale this would be lowercased to a "i" without dot (unicode 0131) + assertEquals("\u0069", proc.process("\u0049")); + } public void testUCase() { StringProcessor proc = new StringProcessor(StringOperation.UCASE); @@ -81,9 +95,21 @@ public void testUCase() { assertEquals("SOMELOWERCASE", proc.process("SomeLoweRCasE")); assertEquals("FULLUPPERCASE", proc.process("FULLUPPERCASE")); assertEquals("A", proc.process('a')); + + // special uppercasing for small letter sharp "s" resulting "SS" + assertEquals("\u0053\u0053", proc.process("\u00df")); stringCharInputValidation(proc); } + + public void testUCaseWithTRLocale() { + Locale.setDefault(Locale.forLanguageTag("tr")); + StringProcessor proc = new StringProcessor(StringOperation.UCASE); + + // ES-SQL is not Locale sensitive (so far). + // in Turkish locale, small letter "i" is uppercased to "I" with a dot above (unicode 130), otherwise in "i" (unicode 49) + assertEquals("\u0049", proc.process("\u0069")); + } public void testLength() { StringProcessor proc = new StringProcessor(StringOperation.LENGTH); diff --git a/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeCheck.java b/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeCheck.java index 0972c780618bd..d91cc2a6f0148 100644 --- a/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeCheck.java +++ b/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeCheck.java @@ -13,11 +13,11 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.reindex.BulkByScrollResponse; +import org.elasticsearch.protocol.xpack.migration.UpgradeActionRequired; import org.elasticsearch.script.Script; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.xpack.core.upgrade.IndexUpgradeCheckVersion; -import org.elasticsearch.xpack.core.upgrade.UpgradeActionRequired; import java.util.function.BiConsumer; import java.util.function.Consumer; diff --git a/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeService.java b/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeService.java index af75595d4fd85..07017e6fc0014 100644 --- a/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeService.java +++ b/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeService.java @@ -16,8 +16,8 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.reindex.BulkByScrollResponse; +import org.elasticsearch.protocol.xpack.migration.UpgradeActionRequired; import org.elasticsearch.tasks.TaskId; -import org.elasticsearch.xpack.core.upgrade.UpgradeActionRequired; import java.util.HashMap; import java.util.List; diff --git a/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/Upgrade.java b/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/Upgrade.java index b24d0486cbf60..9acf5b01d7ee6 100644 --- a/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/Upgrade.java +++ b/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/Upgrade.java @@ -35,6 +35,7 @@ import org.elasticsearch.indices.IndexTemplateMissingException; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.protocol.xpack.migration.UpgradeActionRequired; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.script.Script; @@ -47,7 +48,6 @@ import org.elasticsearch.xpack.core.security.authc.support.Hasher; import org.elasticsearch.protocol.xpack.security.User; import org.elasticsearch.xpack.core.template.TemplateUtils; -import org.elasticsearch.xpack.core.upgrade.UpgradeActionRequired; import org.elasticsearch.xpack.core.upgrade.actions.IndexUpgradeAction; import org.elasticsearch.xpack.core.upgrade.actions.IndexUpgradeInfoAction; import org.elasticsearch.xpack.upgrade.actions.TransportIndexUpgradeAction; diff --git a/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/actions/TransportIndexUpgradeInfoAction.java b/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/actions/TransportIndexUpgradeInfoAction.java index a868c151c0dbe..e0e7d0c7e5d33 100644 --- a/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/actions/TransportIndexUpgradeInfoAction.java +++ b/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/actions/TransportIndexUpgradeInfoAction.java @@ -17,17 +17,19 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest; +import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoResponse; +import org.elasticsearch.protocol.xpack.migration.UpgradeActionRequired; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.upgrade.actions.IndexUpgradeInfoAction; import org.elasticsearch.xpack.upgrade.IndexUpgradeService; -import org.elasticsearch.xpack.core.upgrade.UpgradeActionRequired; import java.util.Map; -public class TransportIndexUpgradeInfoAction extends TransportMasterNodeReadAction { +public class TransportIndexUpgradeInfoAction + extends TransportMasterNodeReadAction { private final IndexUpgradeService indexUpgradeService; private final XPackLicenseState licenseState; @@ -40,7 +42,7 @@ public TransportIndexUpgradeInfoAction(Settings settings, TransportService trans IndexNameExpressionResolver indexNameExpressionResolver, XPackLicenseState licenseState) { super(settings, IndexUpgradeInfoAction.NAME, transportService, clusterService, threadPool, actionFilters, - indexNameExpressionResolver, IndexUpgradeInfoAction.Request::new); + indexNameExpressionResolver, IndexUpgradeInfoRequest::new); this.indexUpgradeService = indexUpgradeService; this.licenseState = licenseState; } @@ -51,23 +53,23 @@ protected String executor() { } @Override - protected IndexUpgradeInfoAction.Response newResponse() { - return new IndexUpgradeInfoAction.Response(); + protected IndexUpgradeInfoResponse newResponse() { + return new IndexUpgradeInfoResponse(); } @Override - protected ClusterBlockException checkBlock(IndexUpgradeInfoAction.Request request, ClusterState state) { + protected ClusterBlockException checkBlock(IndexUpgradeInfoRequest request, ClusterState state) { // Cluster is not affected but we look up repositories in metadata return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); } @Override - protected final void masterOperation(final IndexUpgradeInfoAction.Request request, ClusterState state, - final ActionListener listener) { + protected final void masterOperation(final IndexUpgradeInfoRequest request, ClusterState state, + final ActionListener listener) { if (licenseState.isUpgradeAllowed()) { Map results = indexUpgradeService.upgradeInfo(request.indices(), request.indicesOptions(), state); - listener.onResponse(new IndexUpgradeInfoAction.Response(results)); + listener.onResponse(new IndexUpgradeInfoResponse(results)); } else { listener.onFailure(LicenseUtils.newComplianceException(XPackField.UPGRADE)); } diff --git a/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/rest/RestIndexUpgradeInfoAction.java b/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/rest/RestIndexUpgradeInfoAction.java index 24b576187cb6e..dbb61937642ef 100644 --- a/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/rest/RestIndexUpgradeInfoAction.java +++ b/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/rest/RestIndexUpgradeInfoAction.java @@ -9,12 +9,12 @@ import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.upgrade.actions.IndexUpgradeInfoAction; -import org.elasticsearch.xpack.core.upgrade.actions.IndexUpgradeInfoAction.Request; import java.io.IOException; @@ -41,7 +41,7 @@ public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client } private RestChannelConsumer handleGet(final RestRequest request, NodeClient client) { - Request infoRequest = new Request(Strings.splitStringByCommaToArray(request.param("index"))); + IndexUpgradeInfoRequest infoRequest = new IndexUpgradeInfoRequest(Strings.splitStringByCommaToArray(request.param("index"))); infoRequest.indicesOptions(IndicesOptions.fromRequest(request, infoRequest.indicesOptions())); return channel -> client.execute(IndexUpgradeInfoAction.INSTANCE, infoRequest, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeCheckTests.java b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeCheckTests.java index ff54a9105ae8f..b8528603a2bf7 100644 --- a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeCheckTests.java +++ b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeCheckTests.java @@ -12,9 +12,9 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.protocol.xpack.migration.UpgradeActionRequired; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xpack.core.upgrade.UpgradeActionRequired; import org.junit.Before; import java.io.IOException; diff --git a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java index ef5c3acc3d238..cebe68c4047b8 100644 --- a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java +++ b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java @@ -14,12 +14,12 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.reindex.BulkByScrollResponse; +import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoResponse; +import org.elasticsearch.protocol.xpack.migration.UpgradeActionRequired; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.transport.TransportResponse; -import org.elasticsearch.xpack.core.upgrade.UpgradeActionRequired; import org.elasticsearch.xpack.core.upgrade.actions.IndexUpgradeAction; import org.elasticsearch.xpack.core.upgrade.actions.IndexUpgradeInfoAction; -import org.elasticsearch.xpack.core.upgrade.actions.IndexUpgradeInfoAction.Response; import org.junit.Before; import java.util.Collections; @@ -41,7 +41,7 @@ public void testIndexUpgradeInfo() { // Testing only negative case here, the positive test is done in bwcTests assertAcked(client().admin().indices().prepareCreate("test").get()); ensureYellow("test"); - Response response = client().prepareExecute(IndexUpgradeInfoAction.INSTANCE).setIndices("test").get(); + IndexUpgradeInfoResponse response = client().prepareExecute(IndexUpgradeInfoAction.INSTANCE).setIndices("test").get(); assertThat(response.getActions().entrySet(), empty()); } @@ -57,7 +57,7 @@ public void testIndexUpgradeInfoLicense() throws Exception { () -> client().prepareExecute(IndexUpgradeInfoAction.INSTANCE).setIndices("test").get()); assertThat(e.getMessage(), equalTo("current license is non-compliant for [upgrade]")); enableLicensing(); - Response response = client().prepareExecute(IndexUpgradeInfoAction.INSTANCE).setIndices("test").get(); + IndexUpgradeInfoResponse response = client().prepareExecute(IndexUpgradeInfoAction.INSTANCE).setIndices("test").get(); assertThat(response.getActions().entrySet(), empty()); } @@ -132,7 +132,7 @@ public void testInternalUpgradePrePostChecks() throws Exception { public void testIndexUpgradeInfoOnEmptyCluster() { // On empty cluster asking for all indices shouldn't fail since no indices means nothing needs to be upgraded - Response response = client().prepareExecute(IndexUpgradeInfoAction.INSTANCE).setIndices("_all").get(); + IndexUpgradeInfoResponse response = client().prepareExecute(IndexUpgradeInfoAction.INSTANCE).setIndices("_all").get(); assertThat(response.getActions().entrySet(), empty()); // but calling on a particular index should fail diff --git a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeServiceTests.java b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeServiceTests.java index a7e9e9a26a65e..090d7d11baf76 100644 --- a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeServiceTests.java +++ b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeServiceTests.java @@ -13,8 +13,8 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.protocol.xpack.migration.UpgradeActionRequired; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.upgrade.UpgradeActionRequired; import java.util.Arrays; import java.util.Map; diff --git a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeTasksIT.java b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeTasksIT.java index c3f371a74b71d..b852f843bdf21 100644 --- a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeTasksIT.java +++ b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeTasksIT.java @@ -30,6 +30,8 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.plugins.ScriptPlugin; +import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoResponse; +import org.elasticsearch.protocol.xpack.migration.UpgradeActionRequired; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptContext; @@ -39,7 +41,6 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xpack.core.upgrade.UpgradeActionRequired; import org.elasticsearch.xpack.core.upgrade.UpgradeField; import org.elasticsearch.xpack.core.upgrade.actions.IndexUpgradeAction; import org.elasticsearch.xpack.core.upgrade.actions.IndexUpgradeInfoAction; @@ -170,8 +171,7 @@ public void testParentTasksDuringUpgrade() throws Exception { ensureYellow("test"); - - IndexUpgradeInfoAction.Response infoResponse = client().prepareExecute(IndexUpgradeInfoAction.INSTANCE).setIndices("test").get(); + IndexUpgradeInfoResponse infoResponse = client().prepareExecute(IndexUpgradeInfoAction.INSTANCE).setIndices("test").get(); assertThat(infoResponse.getActions().keySet(), contains("test")); assertThat(infoResponse.getActions().get("test"), equalTo(UpgradeActionRequired.UPGRADE)); diff --git a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeInfoActionRequestTests.java b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeInfoActionRequestTests.java deleted file mode 100644 index d1e2f23215480..0000000000000 --- a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeInfoActionRequestTests.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.upgrade.actions; - -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.test.AbstractStreamableTestCase; -import org.elasticsearch.xpack.core.upgrade.actions.IndexUpgradeInfoAction.Request; - -public class IndexUpgradeInfoActionRequestTests extends AbstractStreamableTestCase { - @Override - protected Request createTestInstance() { - int indexCount = randomInt(4); - String[] indices = new String[indexCount]; - for (int i = 0; i < indexCount; i++) { - indices[i] = randomAlphaOfLength(10); - } - Request request = new Request(indices); - if (randomBoolean()) { - request.indicesOptions(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean())); - } - return request; - } - - @Override - protected Request createBlankInstance() { - return new Request(); - } -} diff --git a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeInfoActionResponseTests.java b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeInfoActionResponseTests.java deleted file mode 100644 index 6893e45dd2e4f..0000000000000 --- a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/actions/IndexUpgradeInfoActionResponseTests.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.upgrade.actions; - -import org.elasticsearch.test.AbstractStreamableTestCase; -import org.elasticsearch.xpack.core.upgrade.UpgradeActionRequired; -import org.elasticsearch.xpack.core.upgrade.actions.IndexUpgradeInfoAction.Response; - -import java.util.EnumSet; -import java.util.HashMap; -import java.util.Map; - -public class IndexUpgradeInfoActionResponseTests extends AbstractStreamableTestCase { - - - @Override - protected Response createTestInstance() { - int actionsCount = randomIntBetween(0, 5); - Map actions = new HashMap<>(actionsCount); - for (int i = 0; i < actionsCount; i++) { - actions.put(randomAlphaOfLength(10), randomFrom(EnumSet.allOf(UpgradeActionRequired.class))); - } - return new Response(actions); - } - - @Override - protected Response createBlankInstance() { - return new Response(); - } -} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/DeleteLicenseRequest.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/DeleteLicenseRequest.java new file mode 100644 index 0000000000000..abcc4a68eb88a --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/DeleteLicenseRequest.java @@ -0,0 +1,31 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.license; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.AcknowledgedRequest; + + +public class DeleteLicenseRequest extends AcknowledgedRequest { + + @Override + public ActionRequestValidationException validate() { + return null; + } +} diff --git a/server/src/main/java/org/elasticsearch/action/ingest/WritePipelineResponse.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/DeleteLicenseResponse.java similarity index 70% rename from server/src/main/java/org/elasticsearch/action/ingest/WritePipelineResponse.java rename to x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/DeleteLicenseResponse.java index 293a62b66f26e..596af7f2f90e4 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/WritePipelineResponse.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/DeleteLicenseResponse.java @@ -16,23 +16,21 @@ * specific language governing permissions and limitations * under the License. */ - -package org.elasticsearch.action.ingest; +package org.elasticsearch.protocol.xpack.license; import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentParser; -public class WritePipelineResponse extends AcknowledgedResponse implements ToXContentObject { +public class DeleteLicenseResponse extends AcknowledgedResponse { - WritePipelineResponse() { + public DeleteLicenseResponse() { } - public WritePipelineResponse(boolean acknowledged) { + public DeleteLicenseResponse(boolean acknowledged) { super(acknowledged); } - public static WritePipelineResponse fromXContent(XContentParser parser) { - return new WritePipelineResponse(parseAcknowledged(parser)); + public static DeleteLicenseResponse fromXContent(XContentParser parser) { + return new DeleteLicenseResponse(parseAcknowledged(parser)); } } diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseRequest.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseRequest.java new file mode 100644 index 0000000000000..ff511d6ca4815 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseRequest.java @@ -0,0 +1,34 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.license; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; + + +public class GetLicenseRequest extends MasterNodeReadRequest { + + public GetLicenseRequest() { + } + + @Override + public ActionRequestValidationException validate() { + return null; + } +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseResponse.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseResponse.java new file mode 100644 index 0000000000000..7232e185a7ef5 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseResponse.java @@ -0,0 +1,38 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.license; + +import org.elasticsearch.action.ActionResponse; + +public class GetLicenseResponse extends ActionResponse { + + private String license; + + GetLicenseResponse() { + } + + public GetLicenseResponse(String license) { + this.license = license; + } + + public String getLicenseDefinition() { + return license; + } + +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoRequest.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoRequest.java new file mode 100644 index 0000000000000..8b1904dbc8f8a --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoRequest.java @@ -0,0 +1,94 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.migration; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Objects; + +public class IndexUpgradeInfoRequest extends MasterNodeReadRequest implements IndicesRequest.Replaceable { + + private String[] indices = Strings.EMPTY_ARRAY; + private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, true, true, true); + + public IndexUpgradeInfoRequest(String... indices) { + indices(indices); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeStringArray(indices); + indicesOptions.writeIndicesOptions(out); + } + + @Override + public String[] indices() { + return indices; + } + + @Override + public IndexUpgradeInfoRequest indices(String... indices) { + this.indices = Objects.requireNonNull(indices, "indices cannot be null"); + return this; + } + + @Override + public IndicesOptions indicesOptions() { + return indicesOptions; + } + + public void indicesOptions(IndicesOptions indicesOptions) { + this.indicesOptions = indicesOptions; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + indices = in.readStringArray(); + indicesOptions = IndicesOptions.readIndicesOptions(in); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + IndexUpgradeInfoRequest request = (IndexUpgradeInfoRequest) o; + return Arrays.equals(indices, request.indices) && + Objects.equals(indicesOptions.toString(), request.indicesOptions.toString()); + } + + @Override + public int hashCode() { + return Objects.hash(Arrays.hashCode(indices), indicesOptions.toString()); + } +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoResponse.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoResponse.java new file mode 100644 index 0000000000000..4c1208f960ebd --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoResponse.java @@ -0,0 +1,133 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.migration; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; + +public class IndexUpgradeInfoResponse extends ActionResponse implements ToXContentObject { + + private static final ParseField INDICES = new ParseField("indices"); + private static final ParseField ACTION_REQUIRED = new ParseField("action_required"); + + private static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("IndexUpgradeInfoResponse", + true, + (a, c) -> { + @SuppressWarnings("unchecked") + Map map = (Map)a[0]; + Map actionsRequired = map.entrySet().stream() + .filter(e -> { + if (e.getValue() instanceof Map == false) { + return false; + } + @SuppressWarnings("unchecked") + Map value =(Map)e.getValue(); + return value.containsKey(ACTION_REQUIRED.getPreferredName()); + }) + .collect(Collectors.toMap( + Map.Entry::getKey, + e -> { + @SuppressWarnings("unchecked") + Map value = (Map) e.getValue(); + return UpgradeActionRequired.fromString((String)value.get(ACTION_REQUIRED.getPreferredName())); + } + )); + return new IndexUpgradeInfoResponse(actionsRequired); + }); + + static { + PARSER.declareObject(constructorArg(), (p, c) -> p.map(), INDICES); + } + + + private Map actions; + + public IndexUpgradeInfoResponse() { + + } + + public IndexUpgradeInfoResponse(Map actions) { + this.actions = actions; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + actions = in.readMap(StreamInput::readString, UpgradeActionRequired::readFromStream); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeMap(actions, StreamOutput::writeString, (out1, value) -> value.writeTo(out1)); + } + + public Map getActions() { + return actions; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.startObject(INDICES.getPreferredName()); + for (Map.Entry entry : actions.entrySet()) { + builder.startObject(entry.getKey()); + { + builder.field(ACTION_REQUIRED.getPreferredName(), entry.getValue().toString()); + } + builder.endObject(); + } + builder.endObject(); + } + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + IndexUpgradeInfoResponse response = (IndexUpgradeInfoResponse) o; + return Objects.equals(actions, response.actions); + } + + @Override + public int hashCode() { + return Objects.hash(actions); + } + + public static IndexUpgradeInfoResponse fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/UpgradeActionRequired.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/migration/UpgradeActionRequired.java similarity index 59% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/UpgradeActionRequired.java rename to x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/migration/UpgradeActionRequired.java index 1bc4d92f33d90..c87e37be7a55a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/UpgradeActionRequired.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/migration/UpgradeActionRequired.java @@ -1,9 +1,22 @@ /* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. */ -package org.elasticsearch.xpack.core.upgrade; +package org.elasticsearch.protocol.xpack.migration; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/migration/package-info.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/migration/package-info.java new file mode 100644 index 0000000000000..12dc8eebc1773 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/migration/package-info.java @@ -0,0 +1,24 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * Request and Response objects for the default distribution's Migration + * APIs. + */ +package org.elasticsearch.protocol.xpack.migration; diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/PutJobRequest.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/PutJobRequest.java new file mode 100644 index 0000000000000..2cdf1993fccd3 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/PutJobRequest.java @@ -0,0 +1,76 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; + +import java.io.IOException; +import java.util.Objects; + +public class PutJobRequest extends ActionRequest implements ToXContentObject { + + private final Job job; + + public PutJobRequest(Job job) { + this.job = job; + } + + public Job getJob() { + return job; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return job.toXContent(builder, params); + } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + PutJobRequest request = (PutJobRequest) object; + return Objects.equals(job, request.job); + } + + @Override + public int hashCode() { + return Objects.hash(job); + } + + @Override + public final String toString() { + return Strings.toString(this); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/PutJobResponse.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/PutJobResponse.java new file mode 100644 index 0000000000000..b37bd35d6b17f --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/PutJobResponse.java @@ -0,0 +1,71 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml; + +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; + +import java.io.IOException; +import java.util.Objects; + +public class PutJobResponse implements ToXContentObject { + + private Job job; + + public static PutJobResponse fromXContent(XContentParser parser) throws IOException { + return new PutJobResponse(Job.PARSER.parse(parser, null).build()); + } + + public PutJobResponse(Job job) { + this.job = job; + } + + public PutJobResponse() { + } + + public Job getResponse() { + return job; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + job.toXContent(builder, params); + return builder; + } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + if (object == null || getClass() != object.getClass()) { + return false; + } + PutJobResponse response = (PutJobResponse) object; + return Objects.equals(job, response.job); + } + + @Override + public int hashCode() { + return Objects.hash(job); + } +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/datafeed/ChunkingConfig.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/datafeed/ChunkingConfig.java new file mode 100644 index 0000000000000..0b9d9f1204614 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/datafeed/ChunkingConfig.java @@ -0,0 +1,134 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml.datafeed; + +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser.ValueType; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Locale; +import java.util.Objects; + +/** + * The description of how searches should be chunked. + */ +public class ChunkingConfig implements ToXContentObject { + + public static final ParseField MODE_FIELD = new ParseField("mode"); + public static final ParseField TIME_SPAN_FIELD = new ParseField("time_span"); + + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "chunking_config", true, a -> new ChunkingConfig((Mode) a[0], (TimeValue) a[1])); + + static { + PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return Mode.fromString(p.text()); + } + throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); + }, MODE_FIELD, ValueType.STRING); + PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return TimeValue.parseTimeValue(p.text(), TIME_SPAN_FIELD.getPreferredName()); + } + throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); + }, TIME_SPAN_FIELD, ValueType.STRING); + + } + + private final Mode mode; + private final TimeValue timeSpan; + + + ChunkingConfig(Mode mode, @Nullable TimeValue timeSpan) { + this.mode = Objects.requireNonNull(mode, MODE_FIELD.getPreferredName()); + this.timeSpan = timeSpan; + } + + @Nullable + public TimeValue getTimeSpan() { + return timeSpan; + } + + Mode getMode() { + return mode; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(MODE_FIELD.getPreferredName(), mode); + if (timeSpan != null) { + builder.field(TIME_SPAN_FIELD.getPreferredName(), timeSpan.getStringRep()); + } + builder.endObject(); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(mode, timeSpan); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + ChunkingConfig other = (ChunkingConfig) obj; + return Objects.equals(this.mode, other.mode) && + Objects.equals(this.timeSpan, other.timeSpan); + } + + public static ChunkingConfig newAuto() { + return new ChunkingConfig(Mode.AUTO, null); + } + + public static ChunkingConfig newOff() { + return new ChunkingConfig(Mode.OFF, null); + } + + public static ChunkingConfig newManual(TimeValue timeSpan) { + return new ChunkingConfig(Mode.MANUAL, timeSpan); + } + + public enum Mode { + AUTO, MANUAL, OFF; + + public static Mode fromString(String value) { + return Mode.valueOf(value.toUpperCase(Locale.ROOT)); + } + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } + } +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/datafeed/DatafeedConfig.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/datafeed/DatafeedConfig.java new file mode 100644 index 0000000000000..929d4dacb90fa --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/datafeed/DatafeedConfig.java @@ -0,0 +1,329 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml.datafeed; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.query.AbstractQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.builder.SearchSourceBuilder; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; +import java.util.Objects; + +/** + * Datafeed configuration options pojo. Describes where to proactively pull input + * data from. + *

        + * If a value has not been set it will be null. Object wrappers are + * used around integral types and booleans so they can take null + * values. + */ +public class DatafeedConfig implements ToXContentObject { + + public static final int DEFAULT_SCROLL_SIZE = 1000; + + public static final ParseField ID = new ParseField("datafeed_id"); + public static final ParseField QUERY_DELAY = new ParseField("query_delay"); + public static final ParseField FREQUENCY = new ParseField("frequency"); + public static final ParseField INDEXES = new ParseField("indexes"); + public static final ParseField INDICES = new ParseField("indices"); + public static final ParseField TYPES = new ParseField("types"); + public static final ParseField QUERY = new ParseField("query"); + public static final ParseField SCROLL_SIZE = new ParseField("scroll_size"); + public static final ParseField AGGREGATIONS = new ParseField("aggregations"); + public static final ParseField AGGS = new ParseField("aggs"); + public static final ParseField SCRIPT_FIELDS = new ParseField("script_fields"); + public static final ParseField CHUNKING_CONFIG = new ParseField("chunking_config"); + + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "datafeed_config", true, a -> new Builder((String)a[0], (String)a[1])); + + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), ID); + PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); + + PARSER.declareStringArray(Builder::setIndices, INDEXES); + PARSER.declareStringArray(Builder::setIndices, INDICES); + PARSER.declareStringArray(Builder::setTypes, TYPES); + PARSER.declareString((builder, val) -> + builder.setQueryDelay(TimeValue.parseTimeValue(val, QUERY_DELAY.getPreferredName())), QUERY_DELAY); + PARSER.declareString((builder, val) -> + builder.setFrequency(TimeValue.parseTimeValue(val, FREQUENCY.getPreferredName())), FREQUENCY); + PARSER.declareObject(Builder::setQuery, (p, c) -> AbstractQueryBuilder.parseInnerQueryBuilder(p), QUERY); + PARSER.declareObject(Builder::setAggregations, (p, c) -> AggregatorFactories.parseAggregators(p), AGGREGATIONS); + PARSER.declareObject(Builder::setAggregations, (p, c) -> AggregatorFactories.parseAggregators(p), AGGS); + PARSER.declareObject(Builder::setScriptFields, (p, c) -> { + List parsedScriptFields = new ArrayList<>(); + while (p.nextToken() != XContentParser.Token.END_OBJECT) { + parsedScriptFields.add(new SearchSourceBuilder.ScriptField(p)); + } + return parsedScriptFields; + }, SCRIPT_FIELDS); + PARSER.declareInt(Builder::setScrollSize, SCROLL_SIZE); + PARSER.declareObject(Builder::setChunkingConfig, ChunkingConfig.PARSER, CHUNKING_CONFIG); + } + + private final String id; + private final String jobId; + + /** + * The delay before starting to query a period of time + */ + private final TimeValue queryDelay; + + /** + * The frequency with which queries are executed + */ + private final TimeValue frequency; + + private final List indices; + private final List types; + private final QueryBuilder query; + private final AggregatorFactories.Builder aggregations; + private final List scriptFields; + private final Integer scrollSize; + private final ChunkingConfig chunkingConfig; + + private DatafeedConfig(String id, String jobId, TimeValue queryDelay, TimeValue frequency, List indices, List types, + QueryBuilder query, AggregatorFactories.Builder aggregations, List scriptFields, + Integer scrollSize, ChunkingConfig chunkingConfig) { + this.id = id; + this.jobId = jobId; + this.queryDelay = queryDelay; + this.frequency = frequency; + this.indices = indices == null ? null : Collections.unmodifiableList(indices); + this.types = types == null ? null : Collections.unmodifiableList(types); + this.query = query; + this.aggregations = aggregations; + this.scriptFields = scriptFields == null ? null : Collections.unmodifiableList(scriptFields); + this.scrollSize = scrollSize; + this.chunkingConfig = chunkingConfig; + } + + public String getId() { + return id; + } + + public String getJobId() { + return jobId; + } + + public TimeValue getQueryDelay() { + return queryDelay; + } + + public TimeValue getFrequency() { + return frequency; + } + + public List getIndices() { + return indices; + } + + public List getTypes() { + return types; + } + + public Integer getScrollSize() { + return scrollSize; + } + + public QueryBuilder getQuery() { + return query; + } + + public AggregatorFactories.Builder getAggregations() { + return aggregations; + } + + public List getScriptFields() { + return scriptFields == null ? Collections.emptyList() : scriptFields; + } + + public ChunkingConfig getChunkingConfig() { + return chunkingConfig; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(ID.getPreferredName(), id); + builder.field(Job.ID.getPreferredName(), jobId); + if (queryDelay != null) { + builder.field(QUERY_DELAY.getPreferredName(), queryDelay.getStringRep()); + } + if (frequency != null) { + builder.field(FREQUENCY.getPreferredName(), frequency.getStringRep()); + } + builder.field(INDICES.getPreferredName(), indices); + builder.field(TYPES.getPreferredName(), types); + builder.field(QUERY.getPreferredName(), query); + if (aggregations != null) { + builder.field(AGGREGATIONS.getPreferredName(), aggregations); + } + if (scriptFields != null) { + builder.startObject(SCRIPT_FIELDS.getPreferredName()); + for (SearchSourceBuilder.ScriptField scriptField : scriptFields) { + scriptField.toXContent(builder, params); + } + builder.endObject(); + } + builder.field(SCROLL_SIZE.getPreferredName(), scrollSize); + if (chunkingConfig != null) { + builder.field(CHUNKING_CONFIG.getPreferredName(), chunkingConfig); + } + + builder.endObject(); + return builder; + } + + /** + * The lists of indices and types are compared for equality but they are not + * sorted first so this test could fail simply because the indices and types + * lists are in different orders. + */ + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + DatafeedConfig that = (DatafeedConfig) other; + + return Objects.equals(this.id, that.id) + && Objects.equals(this.jobId, that.jobId) + && Objects.equals(this.frequency, that.frequency) + && Objects.equals(this.queryDelay, that.queryDelay) + && Objects.equals(this.indices, that.indices) + && Objects.equals(this.types, that.types) + && Objects.equals(this.query, that.query) + && Objects.equals(this.scrollSize, that.scrollSize) + && Objects.equals(this.aggregations, that.aggregations) + && Objects.equals(this.scriptFields, that.scriptFields) + && Objects.equals(this.chunkingConfig, that.chunkingConfig); + } + + @Override + public int hashCode() { + return Objects.hash(id, jobId, frequency, queryDelay, indices, types, query, scrollSize, aggregations, scriptFields, + chunkingConfig); + } + + public static class Builder { + + private String id; + private String jobId; + private TimeValue queryDelay; + private TimeValue frequency; + private List indices = Collections.emptyList(); + private List types = Collections.emptyList(); + private QueryBuilder query = QueryBuilders.matchAllQuery(); + private AggregatorFactories.Builder aggregations; + private List scriptFields; + private Integer scrollSize = DEFAULT_SCROLL_SIZE; + private ChunkingConfig chunkingConfig; + + public Builder(String id, String jobId) { + this.id = Objects.requireNonNull(id, ID.getPreferredName()); + this.jobId = Objects.requireNonNull(jobId, Job.ID.getPreferredName()); + } + + public Builder(DatafeedConfig config) { + this.id = config.id; + this.jobId = config.jobId; + this.queryDelay = config.queryDelay; + this.frequency = config.frequency; + this.indices = config.indices; + this.types = config.types; + this.query = config.query; + this.aggregations = config.aggregations; + this.scriptFields = config.scriptFields; + this.scrollSize = config.scrollSize; + this.chunkingConfig = config.chunkingConfig; + } + + public Builder setIndices(List indices) { + this.indices = Objects.requireNonNull(indices, INDICES.getPreferredName()); + return this; + } + + public Builder setTypes(List types) { + this.types = Objects.requireNonNull(types, TYPES.getPreferredName()); + return this; + } + + public Builder setQueryDelay(TimeValue queryDelay) { + this.queryDelay = queryDelay; + return this; + } + + public Builder setFrequency(TimeValue frequency) { + this.frequency = frequency; + return this; + } + + public Builder setQuery(QueryBuilder query) { + this.query = Objects.requireNonNull(query, QUERY.getPreferredName()); + return this; + } + + public Builder setAggregations(AggregatorFactories.Builder aggregations) { + this.aggregations = aggregations; + return this; + } + + public Builder setScriptFields(List scriptFields) { + List sorted = new ArrayList<>(scriptFields); + sorted.sort(Comparator.comparing(SearchSourceBuilder.ScriptField::fieldName)); + this.scriptFields = sorted; + return this; + } + + public Builder setScrollSize(int scrollSize) { + this.scrollSize = scrollSize; + return this; + } + + public Builder setChunkingConfig(ChunkingConfig chunkingConfig) { + this.chunkingConfig = chunkingConfig; + return this; + } + + public DatafeedConfig build() { + return new DatafeedConfig(id, jobId, queryDelay, frequency, indices, types, query, aggregations, scriptFields, scrollSize, + chunkingConfig); + } + } +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/datafeed/DatafeedUpdate.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/datafeed/DatafeedUpdate.java new file mode 100644 index 0000000000000..787bdf06e5ec2 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/datafeed/DatafeedUpdate.java @@ -0,0 +1,311 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml.datafeed; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.query.AbstractQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.builder.SearchSourceBuilder; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; +import java.util.Objects; + +/** + * A datafeed update contains partial properties to update a {@link DatafeedConfig}. + * The main difference between this class and {@link DatafeedConfig} is that here all + * fields are nullable. + */ +public class DatafeedUpdate implements ToXContentObject { + + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "datafeed_update", true, a -> new Builder((String)a[0])); + + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), DatafeedConfig.ID); + + PARSER.declareString(Builder::setJobId, Job.ID); + PARSER.declareStringArray(Builder::setIndices, DatafeedConfig.INDEXES); + PARSER.declareStringArray(Builder::setIndices, DatafeedConfig.INDICES); + PARSER.declareStringArray(Builder::setTypes, DatafeedConfig.TYPES); + PARSER.declareString((builder, val) -> builder.setQueryDelay( + TimeValue.parseTimeValue(val, DatafeedConfig.QUERY_DELAY.getPreferredName())), DatafeedConfig.QUERY_DELAY); + PARSER.declareString((builder, val) -> builder.setFrequency( + TimeValue.parseTimeValue(val, DatafeedConfig.FREQUENCY.getPreferredName())), DatafeedConfig.FREQUENCY); + PARSER.declareObject(Builder::setQuery, (p, c) -> AbstractQueryBuilder.parseInnerQueryBuilder(p), DatafeedConfig.QUERY); + PARSER.declareObject(Builder::setAggregations, (p, c) -> AggregatorFactories.parseAggregators(p), + DatafeedConfig.AGGREGATIONS); + PARSER.declareObject(Builder::setAggregations, (p, c) -> AggregatorFactories.parseAggregators(p), + DatafeedConfig.AGGS); + PARSER.declareObject(Builder::setScriptFields, (p, c) -> { + List parsedScriptFields = new ArrayList<>(); + while (p.nextToken() != XContentParser.Token.END_OBJECT) { + parsedScriptFields.add(new SearchSourceBuilder.ScriptField(p)); + } + return parsedScriptFields; + }, DatafeedConfig.SCRIPT_FIELDS); + PARSER.declareInt(Builder::setScrollSize, DatafeedConfig.SCROLL_SIZE); + PARSER.declareObject(Builder::setChunkingConfig, ChunkingConfig.PARSER, DatafeedConfig.CHUNKING_CONFIG); + } + + private final String id; + private final String jobId; + private final TimeValue queryDelay; + private final TimeValue frequency; + private final List indices; + private final List types; + private final QueryBuilder query; + private final AggregatorFactories.Builder aggregations; + private final List scriptFields; + private final Integer scrollSize; + private final ChunkingConfig chunkingConfig; + + private DatafeedUpdate(String id, String jobId, TimeValue queryDelay, TimeValue frequency, List indices, List types, + QueryBuilder query, AggregatorFactories.Builder aggregations, List scriptFields, + Integer scrollSize, ChunkingConfig chunkingConfig) { + this.id = id; + this.jobId = jobId; + this.queryDelay = queryDelay; + this.frequency = frequency; + this.indices = indices; + this.types = types; + this.query = query; + this.aggregations = aggregations; + this.scriptFields = scriptFields; + this.scrollSize = scrollSize; + this.chunkingConfig = chunkingConfig; + } + + /** + * Get the id of the datafeed to update + */ + public String getId() { + return id; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(DatafeedConfig.ID.getPreferredName(), id); + addOptionalField(builder, Job.ID, jobId); + if (queryDelay != null) { + builder.field(DatafeedConfig.QUERY_DELAY.getPreferredName(), queryDelay.getStringRep()); + } + if (frequency != null) { + builder.field(DatafeedConfig.FREQUENCY.getPreferredName(), frequency.getStringRep()); + } + addOptionalField(builder, DatafeedConfig.INDICES, indices); + addOptionalField(builder, DatafeedConfig.TYPES, types); + addOptionalField(builder, DatafeedConfig.QUERY, query); + addOptionalField(builder, DatafeedConfig.AGGREGATIONS, aggregations); + if (scriptFields != null) { + builder.startObject(DatafeedConfig.SCRIPT_FIELDS.getPreferredName()); + for (SearchSourceBuilder.ScriptField scriptField : scriptFields) { + scriptField.toXContent(builder, params); + } + builder.endObject(); + } + addOptionalField(builder, DatafeedConfig.SCROLL_SIZE, scrollSize); + addOptionalField(builder, DatafeedConfig.CHUNKING_CONFIG, chunkingConfig); + builder.endObject(); + return builder; + } + + private void addOptionalField(XContentBuilder builder, ParseField field, Object value) throws IOException { + if (value != null) { + builder.field(field.getPreferredName(), value); + } + } + + public String getJobId() { + return jobId; + } + + public TimeValue getQueryDelay() { + return queryDelay; + } + + public TimeValue getFrequency() { + return frequency; + } + + public List getIndices() { + return indices; + } + + public List getTypes() { + return types; + } + + public Integer getScrollSize() { + return scrollSize; + } + + public QueryBuilder getQuery() { + return query; + } + + public AggregatorFactories.Builder getAggregations() { + return aggregations; + } + + public List getScriptFields() { + return scriptFields == null ? Collections.emptyList() : scriptFields; + } + + public ChunkingConfig getChunkingConfig() { + return chunkingConfig; + } + + /** + * The lists of indices and types are compared for equality but they are not + * sorted first so this test could fail simply because the indices and types + * lists are in different orders. + */ + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + DatafeedUpdate that = (DatafeedUpdate) other; + + return Objects.equals(this.id, that.id) + && Objects.equals(this.jobId, that.jobId) + && Objects.equals(this.frequency, that.frequency) + && Objects.equals(this.queryDelay, that.queryDelay) + && Objects.equals(this.indices, that.indices) + && Objects.equals(this.types, that.types) + && Objects.equals(this.query, that.query) + && Objects.equals(this.scrollSize, that.scrollSize) + && Objects.equals(this.aggregations, that.aggregations) + && Objects.equals(this.scriptFields, that.scriptFields) + && Objects.equals(this.chunkingConfig, that.chunkingConfig); + } + + @Override + public int hashCode() { + return Objects.hash(id, jobId, frequency, queryDelay, indices, types, query, scrollSize, aggregations, scriptFields, + chunkingConfig); + } + + public static class Builder { + + private String id; + private String jobId; + private TimeValue queryDelay; + private TimeValue frequency; + private List indices; + private List types; + private QueryBuilder query; + private AggregatorFactories.Builder aggregations; + private List scriptFields; + private Integer scrollSize; + private ChunkingConfig chunkingConfig; + + public Builder(String id) { + this.id = Objects.requireNonNull(id, DatafeedConfig.ID.getPreferredName()); + } + + public Builder(DatafeedUpdate config) { + this.id = config.id; + this.jobId = config.jobId; + this.queryDelay = config.queryDelay; + this.frequency = config.frequency; + this.indices = config.indices; + this.types = config.types; + this.query = config.query; + this.aggregations = config.aggregations; + this.scriptFields = config.scriptFields; + this.scrollSize = config.scrollSize; + this.chunkingConfig = config.chunkingConfig; + } + + public Builder setJobId(String jobId) { + this.jobId = jobId; + return this; + } + + public Builder setIndices(List indices) { + this.indices = indices; + return this; + } + + public Builder setTypes(List types) { + this.types = types; + return this; + } + + public Builder setQueryDelay(TimeValue queryDelay) { + this.queryDelay = queryDelay; + return this; + } + + public Builder setFrequency(TimeValue frequency) { + this.frequency = frequency; + return this; + } + + public Builder setQuery(QueryBuilder query) { + this.query = query; + return this; + } + + public Builder setAggregations(AggregatorFactories.Builder aggregations) { + this.aggregations = aggregations; + return this; + } + + public Builder setScriptFields(List scriptFields) { + List sorted = new ArrayList<>(scriptFields); + sorted.sort(Comparator.comparing(SearchSourceBuilder.ScriptField::fieldName)); + this.scriptFields = sorted; + return this; + } + + public Builder setScrollSize(int scrollSize) { + this.scrollSize = scrollSize; + return this; + } + + public Builder setChunkingConfig(ChunkingConfig chunkingConfig) { + this.chunkingConfig = chunkingConfig; + return this; + } + + public DatafeedUpdate build() { + return new DatafeedUpdate(id, jobId, queryDelay, frequency, indices, types, query, aggregations, scriptFields, scrollSize, + chunkingConfig); + } + } +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/AnalysisConfig.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/AnalysisConfig.java new file mode 100644 index 0000000000000..00fa1bdd47fed --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/AnalysisConfig.java @@ -0,0 +1,412 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml.job.config; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Objects; +import java.util.Set; +import java.util.function.Function; + +/** + * Analysis configuration options that describe which fields are + * analyzed and which functions are used to detect anomalies. + *

        + * The configuration can contain multiple detectors, a new anomaly detector will + * be created for each detector configuration. The fields + * bucketSpan, summaryCountFieldName and categorizationFieldName + * apply to all detectors. + *

        + * If a value has not been set it will be null + * Object wrappers are used around integral types & booleans so they can take + * null values. + */ +public class AnalysisConfig implements ToXContentObject { + /** + * Serialisation names + */ + public static final ParseField ANALYSIS_CONFIG = new ParseField("analysis_config"); + public static final ParseField BUCKET_SPAN = new ParseField("bucket_span"); + public static final ParseField CATEGORIZATION_FIELD_NAME = new ParseField("categorization_field_name"); + public static final ParseField CATEGORIZATION_FILTERS = new ParseField("categorization_filters"); + public static final ParseField CATEGORIZATION_ANALYZER = CategorizationAnalyzerConfig.CATEGORIZATION_ANALYZER; + public static final ParseField LATENCY = new ParseField("latency"); + public static final ParseField SUMMARY_COUNT_FIELD_NAME = new ParseField("summary_count_field_name"); + public static final ParseField DETECTORS = new ParseField("detectors"); + public static final ParseField INFLUENCERS = new ParseField("influencers"); + public static final ParseField OVERLAPPING_BUCKETS = new ParseField("overlapping_buckets"); + public static final ParseField RESULT_FINALIZATION_WINDOW = new ParseField("result_finalization_window"); + public static final ParseField MULTIVARIATE_BY_FIELDS = new ParseField("multivariate_by_fields"); + + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(ANALYSIS_CONFIG.getPreferredName(), + true, a -> new AnalysisConfig.Builder((List) a[0])); + + static { + PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), + (p, c) -> (Detector.PARSER).apply(p, c).build(), DETECTORS); + PARSER.declareString((builder, val) -> + builder.setBucketSpan(TimeValue.parseTimeValue(val, BUCKET_SPAN.getPreferredName())), BUCKET_SPAN); + PARSER.declareString(Builder::setCategorizationFieldName, CATEGORIZATION_FIELD_NAME); + PARSER.declareStringArray(Builder::setCategorizationFilters, CATEGORIZATION_FILTERS); + // This one is nasty - the syntax for analyzers takes either names or objects at many levels, hence it's not + // possible to simply declare whether the field is a string or object and a completely custom parser is required + PARSER.declareField(Builder::setCategorizationAnalyzerConfig, + (p, c) -> CategorizationAnalyzerConfig.buildFromXContentFragment(p), + CATEGORIZATION_ANALYZER, ObjectParser.ValueType.OBJECT_OR_STRING); + PARSER.declareString((builder, val) -> + builder.setLatency(TimeValue.parseTimeValue(val, LATENCY.getPreferredName())), LATENCY); + PARSER.declareString(Builder::setSummaryCountFieldName, SUMMARY_COUNT_FIELD_NAME); + PARSER.declareStringArray(Builder::setInfluencers, INFLUENCERS); + PARSER.declareBoolean(Builder::setOverlappingBuckets, OVERLAPPING_BUCKETS); + PARSER.declareLong(Builder::setResultFinalizationWindow, RESULT_FINALIZATION_WINDOW); + PARSER.declareBoolean(Builder::setMultivariateByFields, MULTIVARIATE_BY_FIELDS); + } + + /** + * These values apply to all detectors + */ + private final TimeValue bucketSpan; + private final String categorizationFieldName; + private final List categorizationFilters; + private final CategorizationAnalyzerConfig categorizationAnalyzerConfig; + private final TimeValue latency; + private final String summaryCountFieldName; + private final List detectors; + private final List influencers; + private final Boolean overlappingBuckets; + private final Long resultFinalizationWindow; + private final Boolean multivariateByFields; + + private AnalysisConfig(TimeValue bucketSpan, String categorizationFieldName, List categorizationFilters, + CategorizationAnalyzerConfig categorizationAnalyzerConfig, TimeValue latency, String summaryCountFieldName, + List detectors, List influencers, Boolean overlappingBuckets, Long resultFinalizationWindow, + Boolean multivariateByFields) { + this.detectors = Collections.unmodifiableList(detectors); + this.bucketSpan = bucketSpan; + this.latency = latency; + this.categorizationFieldName = categorizationFieldName; + this.categorizationAnalyzerConfig = categorizationAnalyzerConfig; + this.categorizationFilters = categorizationFilters == null ? null : Collections.unmodifiableList(categorizationFilters); + this.summaryCountFieldName = summaryCountFieldName; + this.influencers = Collections.unmodifiableList(influencers); + this.overlappingBuckets = overlappingBuckets; + this.resultFinalizationWindow = resultFinalizationWindow; + this.multivariateByFields = multivariateByFields; + } + + /** + * The analysis bucket span + * + * @return The bucketspan or null if not set + */ + public TimeValue getBucketSpan() { + return bucketSpan; + } + + public String getCategorizationFieldName() { + return categorizationFieldName; + } + + public List getCategorizationFilters() { + return categorizationFilters; + } + + public CategorizationAnalyzerConfig getCategorizationAnalyzerConfig() { + return categorizationAnalyzerConfig; + } + + /** + * The latency interval during which out-of-order records should be handled. + * + * @return The latency interval or null if not set + */ + public TimeValue getLatency() { + return latency; + } + + /** + * The name of the field that contains counts for pre-summarised input + * + * @return The field name or null if not set + */ + public String getSummaryCountFieldName() { + return summaryCountFieldName; + } + + /** + * The list of analysis detectors. In a valid configuration the list should + * contain at least 1 {@link Detector} + * + * @return The Detectors used in this job + */ + public List getDetectors() { + return detectors; + } + + /** + * The list of influence field names + */ + public List getInfluencers() { + return influencers; + } + + public Boolean getOverlappingBuckets() { + return overlappingBuckets; + } + + public Long getResultFinalizationWindow() { + return resultFinalizationWindow; + } + + public Boolean getMultivariateByFields() { + return multivariateByFields; + } + + private static void addIfNotNull(Set fields, String field) { + if (field != null) { + fields.add(field); + } + } + + public List fields() { + return collectNonNullAndNonEmptyDetectorFields(Detector::getFieldName); + } + + private List collectNonNullAndNonEmptyDetectorFields( + Function fieldGetter) { + Set fields = new HashSet<>(); + + for (Detector d : getDetectors()) { + addIfNotNull(fields, fieldGetter.apply(d)); + } + + // remove empty strings + fields.remove(""); + + return new ArrayList<>(fields); + } + + public List byFields() { + return collectNonNullAndNonEmptyDetectorFields(Detector::getByFieldName); + } + + public List overFields() { + return collectNonNullAndNonEmptyDetectorFields(Detector::getOverFieldName); + } + + public List partitionFields() { + return collectNonNullAndNonEmptyDetectorFields(Detector::getPartitionFieldName); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (bucketSpan != null) { + builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan.getStringRep()); + } + if (categorizationFieldName != null) { + builder.field(CATEGORIZATION_FIELD_NAME.getPreferredName(), categorizationFieldName); + } + if (categorizationFilters != null) { + builder.field(CATEGORIZATION_FILTERS.getPreferredName(), categorizationFilters); + } + if (categorizationAnalyzerConfig != null) { + // This cannot be builder.field(CATEGORIZATION_ANALYZER.getPreferredName(), categorizationAnalyzerConfig, params); + // because that always writes categorizationAnalyzerConfig as an object, and in the case of a global analyzer it + // gets written as a single string. + categorizationAnalyzerConfig.toXContent(builder, params); + } + if (latency != null) { + builder.field(LATENCY.getPreferredName(), latency.getStringRep()); + } + if (summaryCountFieldName != null) { + builder.field(SUMMARY_COUNT_FIELD_NAME.getPreferredName(), summaryCountFieldName); + } + builder.startArray(DETECTORS.getPreferredName()); + for (Detector detector : detectors) { + detector.toXContent(builder, params); + } + builder.endArray(); + builder.field(INFLUENCERS.getPreferredName(), influencers); + if (overlappingBuckets != null) { + builder.field(OVERLAPPING_BUCKETS.getPreferredName(), overlappingBuckets); + } + if (resultFinalizationWindow != null) { + builder.field(RESULT_FINALIZATION_WINDOW.getPreferredName(), resultFinalizationWindow); + } + if (multivariateByFields != null) { + builder.field(MULTIVARIATE_BY_FIELDS.getPreferredName(), multivariateByFields); + } + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + AnalysisConfig that = (AnalysisConfig) object; + return Objects.equals(latency, that.latency) && + Objects.equals(bucketSpan, that.bucketSpan) && + Objects.equals(categorizationFieldName, that.categorizationFieldName) && + Objects.equals(categorizationFilters, that.categorizationFilters) && + Objects.equals(categorizationAnalyzerConfig, that.categorizationAnalyzerConfig) && + Objects.equals(summaryCountFieldName, that.summaryCountFieldName) && + Objects.equals(detectors, that.detectors) && + Objects.equals(influencers, that.influencers) && + Objects.equals(overlappingBuckets, that.overlappingBuckets) && + Objects.equals(resultFinalizationWindow, that.resultFinalizationWindow) && + Objects.equals(multivariateByFields, that.multivariateByFields); + } + + @Override + public int hashCode() { + return Objects.hash( + bucketSpan, categorizationFieldName, categorizationFilters, categorizationAnalyzerConfig, latency, + summaryCountFieldName, detectors, influencers, overlappingBuckets, resultFinalizationWindow, + multivariateByFields); + } + + public static class Builder { + + private List detectors; + private TimeValue bucketSpan; + private TimeValue latency; + private String categorizationFieldName; + private List categorizationFilters; + private CategorizationAnalyzerConfig categorizationAnalyzerConfig; + private String summaryCountFieldName; + private List influencers = new ArrayList<>(); + private Boolean overlappingBuckets; + private Long resultFinalizationWindow; + private Boolean multivariateByFields; + + public Builder(List detectors) { + setDetectors(detectors); + } + + public Builder(AnalysisConfig analysisConfig) { + this.detectors = new ArrayList<>(analysisConfig.detectors); + this.bucketSpan = analysisConfig.bucketSpan; + this.latency = analysisConfig.latency; + this.categorizationFieldName = analysisConfig.categorizationFieldName; + this.categorizationFilters = analysisConfig.categorizationFilters == null ? null + : new ArrayList<>(analysisConfig.categorizationFilters); + this.categorizationAnalyzerConfig = analysisConfig.categorizationAnalyzerConfig; + this.summaryCountFieldName = analysisConfig.summaryCountFieldName; + this.influencers = new ArrayList<>(analysisConfig.influencers); + this.overlappingBuckets = analysisConfig.overlappingBuckets; + this.resultFinalizationWindow = analysisConfig.resultFinalizationWindow; + this.multivariateByFields = analysisConfig.multivariateByFields; + } + + public Builder setDetectors(List detectors) { + Objects.requireNonNull(detectors, "[" + DETECTORS.getPreferredName() + "] must not be null"); + // We always assign sequential IDs to the detectors that are correct for this analysis config + int detectorIndex = 0; + List sequentialIndexDetectors = new ArrayList<>(detectors.size()); + for (Detector origDetector : detectors) { + Detector.Builder builder = new Detector.Builder(origDetector); + builder.setDetectorIndex(detectorIndex++); + sequentialIndexDetectors.add(builder.build()); + } + this.detectors = sequentialIndexDetectors; + return this; + } + + public Builder setDetector(int detectorIndex, Detector detector) { + detectors.set(detectorIndex, detector); + return this; + } + + public Builder setBucketSpan(TimeValue bucketSpan) { + this.bucketSpan = bucketSpan; + return this; + } + + public Builder setLatency(TimeValue latency) { + this.latency = latency; + return this; + } + + public Builder setCategorizationFieldName(String categorizationFieldName) { + this.categorizationFieldName = categorizationFieldName; + return this; + } + + public Builder setCategorizationFilters(List categorizationFilters) { + this.categorizationFilters = categorizationFilters; + return this; + } + + public Builder setCategorizationAnalyzerConfig(CategorizationAnalyzerConfig categorizationAnalyzerConfig) { + this.categorizationAnalyzerConfig = categorizationAnalyzerConfig; + return this; + } + + public Builder setSummaryCountFieldName(String summaryCountFieldName) { + this.summaryCountFieldName = summaryCountFieldName; + return this; + } + + public Builder setInfluencers(List influencers) { + this.influencers = Objects.requireNonNull(influencers, INFLUENCERS.getPreferredName()); + return this; + } + + public Builder setOverlappingBuckets(Boolean overlappingBuckets) { + this.overlappingBuckets = overlappingBuckets; + return this; + } + + public Builder setResultFinalizationWindow(Long resultFinalizationWindow) { + this.resultFinalizationWindow = resultFinalizationWindow; + return this; + } + + public Builder setMultivariateByFields(Boolean multivariateByFields) { + this.multivariateByFields = multivariateByFields; + return this; + } + + public AnalysisConfig build() { + + return new AnalysisConfig(bucketSpan, categorizationFieldName, categorizationFilters, categorizationAnalyzerConfig, + latency, summaryCountFieldName, detectors, influencers, overlappingBuckets, + resultFinalizationWindow, multivariateByFields); + } + } +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/DataDescription.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/DataDescription.java index f469512f64973..a3f8c2563b2d8 100644 --- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/DataDescription.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/DataDescription.java @@ -243,28 +243,34 @@ public static class Builder { private Character fieldDelimiter; private Character quoteCharacter; - public void setFormat(DataFormat format) { + public Builder setFormat(DataFormat format) { dataFormat = Objects.requireNonNull(format); + return this; } - private void setFormat(String format) { + private Builder setFormat(String format) { setFormat(DataFormat.forString(format)); + return this; } - public void setTimeField(String fieldName) { + public Builder setTimeField(String fieldName) { timeFieldName = Objects.requireNonNull(fieldName); + return this; } - public void setTimeFormat(String format) { + public Builder setTimeFormat(String format) { timeFormat = Objects.requireNonNull(format); + return this; } - public void setFieldDelimiter(Character delimiter) { + public Builder setFieldDelimiter(Character delimiter) { fieldDelimiter = delimiter; + return this; } - public void setQuoteCharacter(Character value) { + public Builder setQuoteCharacter(Character value) { quoteCharacter = value; + return this; } public DataDescription build() { diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/Job.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/Job.java new file mode 100644 index 0000000000000..6bc1be3b56384 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/Job.java @@ -0,0 +1,592 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml.job.config; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser.ValueType; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.protocol.xpack.ml.job.util.TimeUtil; + +import java.io.IOException; +import java.util.Collections; +import java.util.Date; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * This class represents a configured and created Job. The creation time is set + * to the time the object was constructed and the finished time and last + * data time fields are {@code null} until the job has seen some data or it is + * finished respectively. + */ +public class Job implements ToXContentObject { + + public static final String ANOMALY_DETECTOR_JOB_TYPE = "anomaly_detector"; + + /* + * Field names used in serialization + */ + public static final ParseField ID = new ParseField("job_id"); + public static final ParseField JOB_TYPE = new ParseField("job_type"); + public static final ParseField GROUPS = new ParseField("groups"); + public static final ParseField ANALYSIS_CONFIG = AnalysisConfig.ANALYSIS_CONFIG; + public static final ParseField ANALYSIS_LIMITS = new ParseField("analysis_limits"); + public static final ParseField CREATE_TIME = new ParseField("create_time"); + public static final ParseField CUSTOM_SETTINGS = new ParseField("custom_settings"); + public static final ParseField DATA_DESCRIPTION = new ParseField("data_description"); + public static final ParseField DESCRIPTION = new ParseField("description"); + public static final ParseField FINISHED_TIME = new ParseField("finished_time"); + public static final ParseField LAST_DATA_TIME = new ParseField("last_data_time"); + public static final ParseField ESTABLISHED_MODEL_MEMORY = new ParseField("established_model_memory"); + public static final ParseField MODEL_PLOT_CONFIG = new ParseField("model_plot_config"); + public static final ParseField RENORMALIZATION_WINDOW_DAYS = new ParseField("renormalization_window_days"); + public static final ParseField BACKGROUND_PERSIST_INTERVAL = new ParseField("background_persist_interval"); + public static final ParseField MODEL_SNAPSHOT_RETENTION_DAYS = new ParseField("model_snapshot_retention_days"); + public static final ParseField RESULTS_RETENTION_DAYS = new ParseField("results_retention_days"); + public static final ParseField MODEL_SNAPSHOT_ID = new ParseField("model_snapshot_id"); + public static final ParseField RESULTS_INDEX_NAME = new ParseField("results_index_name"); + public static final ParseField DELETED = new ParseField("deleted"); + + public static final ObjectParser PARSER = new ObjectParser<>("job_details", true, Builder::new); + + static { + PARSER.declareString(Builder::setId, ID); + PARSER.declareString(Builder::setJobType, JOB_TYPE); + PARSER.declareStringArray(Builder::setGroups, GROUPS); + PARSER.declareStringOrNull(Builder::setDescription, DESCRIPTION); + PARSER.declareField(Builder::setCreateTime, + (p) -> TimeUtil.parseTimeField(p, CREATE_TIME.getPreferredName()), + CREATE_TIME, + ValueType.VALUE); + PARSER.declareField(Builder::setFinishedTime, + (p) -> TimeUtil.parseTimeField(p, FINISHED_TIME.getPreferredName()), + FINISHED_TIME, + ValueType.VALUE); + PARSER.declareField(Builder::setLastDataTime, + (p) -> TimeUtil.parseTimeField(p, LAST_DATA_TIME.getPreferredName()), + LAST_DATA_TIME, + ValueType.VALUE); + PARSER.declareLong(Builder::setEstablishedModelMemory, ESTABLISHED_MODEL_MEMORY); + PARSER.declareObject(Builder::setAnalysisConfig, AnalysisConfig.PARSER, ANALYSIS_CONFIG); + PARSER.declareObject(Builder::setAnalysisLimits, AnalysisLimits.PARSER, ANALYSIS_LIMITS); + PARSER.declareObject(Builder::setDataDescription, DataDescription.PARSER, DATA_DESCRIPTION); + PARSER.declareObject(Builder::setModelPlotConfig, ModelPlotConfig.PARSER, MODEL_PLOT_CONFIG); + PARSER.declareLong(Builder::setRenormalizationWindowDays, RENORMALIZATION_WINDOW_DAYS); + PARSER.declareString((builder, val) -> builder.setBackgroundPersistInterval( + TimeValue.parseTimeValue(val, BACKGROUND_PERSIST_INTERVAL.getPreferredName())), BACKGROUND_PERSIST_INTERVAL); + PARSER.declareLong(Builder::setResultsRetentionDays, RESULTS_RETENTION_DAYS); + PARSER.declareLong(Builder::setModelSnapshotRetentionDays, MODEL_SNAPSHOT_RETENTION_DAYS); + PARSER.declareField(Builder::setCustomSettings, (p, c) -> p.map(), CUSTOM_SETTINGS, ValueType.OBJECT); + PARSER.declareStringOrNull(Builder::setModelSnapshotId, MODEL_SNAPSHOT_ID); + PARSER.declareString(Builder::setResultsIndexName, RESULTS_INDEX_NAME); + PARSER.declareBoolean(Builder::setDeleted, DELETED); + } + + private final String jobId; + private final String jobType; + + private final List groups; + private final String description; + private final Date createTime; + private final Date finishedTime; + private final Date lastDataTime; + private final Long establishedModelMemory; + private final AnalysisConfig analysisConfig; + private final AnalysisLimits analysisLimits; + private final DataDescription dataDescription; + private final ModelPlotConfig modelPlotConfig; + private final Long renormalizationWindowDays; + private final TimeValue backgroundPersistInterval; + private final Long modelSnapshotRetentionDays; + private final Long resultsRetentionDays; + private final Map customSettings; + private final String modelSnapshotId; + private final String resultsIndexName; + private final boolean deleted; + + private Job(String jobId, String jobType, List groups, String description, Date createTime, + Date finishedTime, Date lastDataTime, Long establishedModelMemory, + AnalysisConfig analysisConfig, AnalysisLimits analysisLimits, DataDescription dataDescription, + ModelPlotConfig modelPlotConfig, Long renormalizationWindowDays, TimeValue backgroundPersistInterval, + Long modelSnapshotRetentionDays, Long resultsRetentionDays, Map customSettings, + String modelSnapshotId, String resultsIndexName, boolean deleted) { + + this.jobId = jobId; + this.jobType = jobType; + this.groups = Collections.unmodifiableList(groups); + this.description = description; + this.createTime = createTime; + this.finishedTime = finishedTime; + this.lastDataTime = lastDataTime; + this.establishedModelMemory = establishedModelMemory; + this.analysisConfig = analysisConfig; + this.analysisLimits = analysisLimits; + this.dataDescription = dataDescription; + this.modelPlotConfig = modelPlotConfig; + this.renormalizationWindowDays = renormalizationWindowDays; + this.backgroundPersistInterval = backgroundPersistInterval; + this.modelSnapshotRetentionDays = modelSnapshotRetentionDays; + this.resultsRetentionDays = resultsRetentionDays; + this.customSettings = customSettings == null ? null : Collections.unmodifiableMap(customSettings); + this.modelSnapshotId = modelSnapshotId; + this.resultsIndexName = resultsIndexName; + this.deleted = deleted; + } + + /** + * Return the Job Id. + * + * @return The job Id string + */ + public String getId() { + return jobId; + } + + public String getJobType() { + return jobType; + } + + public List getGroups() { + return groups; + } + + /** + * Private version of getResultsIndexName so that a job can be built from another + * job and pass index name validation + * + * @return The job's index name, minus prefix + */ + private String getResultsIndexNameNoPrefix() { + return resultsIndexName; + } + + /** + * The job description + * + * @return job description + */ + public String getDescription() { + return description; + } + + /** + * The Job creation time. This name is preferred when serialising to the + * REST API. + * + * @return The date the job was created + */ + public Date getCreateTime() { + return createTime; + } + + /** + * The time the job was finished or null if not finished. + * + * @return The date the job was last retired or null + */ + public Date getFinishedTime() { + return finishedTime; + } + + /** + * The last time data was uploaded to the job or null if no + * data has been seen. + * + * @return The date at which the last data was processed + */ + public Date getLastDataTime() { + return lastDataTime; + } + + /** + * The established model memory of the job, or null if model + * memory has not reached equilibrium yet. + * + * @return The established model memory of the job + */ + public Long getEstablishedModelMemory() { + return establishedModelMemory; + } + + /** + * The analysis configuration object + * + * @return The AnalysisConfig + */ + public AnalysisConfig getAnalysisConfig() { + return analysisConfig; + } + + /** + * The analysis options object + * + * @return The AnalysisLimits + */ + public AnalysisLimits getAnalysisLimits() { + return analysisLimits; + } + + public ModelPlotConfig getModelPlotConfig() { + return modelPlotConfig; + } + + /** + * If not set the input data is assumed to be csv with a '_time' field in + * epoch format. + * + * @return A DataDescription or null + * @see DataDescription + */ + public DataDescription getDataDescription() { + return dataDescription; + } + + /** + * The duration of the renormalization window in days + * + * @return renormalization window in days + */ + public Long getRenormalizationWindowDays() { + return renormalizationWindowDays; + } + + /** + * The background persistence interval + * + * @return background persistence interval + */ + public TimeValue getBackgroundPersistInterval() { + return backgroundPersistInterval; + } + + public Long getModelSnapshotRetentionDays() { + return modelSnapshotRetentionDays; + } + + public Long getResultsRetentionDays() { + return resultsRetentionDays; + } + + public Map getCustomSettings() { + return customSettings; + } + + public String getModelSnapshotId() { + return modelSnapshotId; + } + + public boolean isDeleted() { + return deleted; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + final String humanReadableSuffix = "_string"; + + builder.field(ID.getPreferredName(), jobId); + builder.field(JOB_TYPE.getPreferredName(), jobType); + + if (groups.isEmpty() == false) { + builder.field(GROUPS.getPreferredName(), groups); + } + if (description != null) { + builder.field(DESCRIPTION.getPreferredName(), description); + } + if (createTime != null) { + builder.timeField(CREATE_TIME.getPreferredName(), CREATE_TIME.getPreferredName() + humanReadableSuffix, createTime.getTime()); + } + if (finishedTime != null) { + builder.timeField(FINISHED_TIME.getPreferredName(), FINISHED_TIME.getPreferredName() + humanReadableSuffix, + finishedTime.getTime()); + } + if (lastDataTime != null) { + builder.timeField(LAST_DATA_TIME.getPreferredName(), LAST_DATA_TIME.getPreferredName() + humanReadableSuffix, + lastDataTime.getTime()); + } + if (establishedModelMemory != null) { + builder.field(ESTABLISHED_MODEL_MEMORY.getPreferredName(), establishedModelMemory); + } + builder.field(ANALYSIS_CONFIG.getPreferredName(), analysisConfig, params); + if (analysisLimits != null) { + builder.field(ANALYSIS_LIMITS.getPreferredName(), analysisLimits, params); + } + if (dataDescription != null) { + builder.field(DATA_DESCRIPTION.getPreferredName(), dataDescription, params); + } + if (modelPlotConfig != null) { + builder.field(MODEL_PLOT_CONFIG.getPreferredName(), modelPlotConfig, params); + } + if (renormalizationWindowDays != null) { + builder.field(RENORMALIZATION_WINDOW_DAYS.getPreferredName(), renormalizationWindowDays); + } + if (backgroundPersistInterval != null) { + builder.field(BACKGROUND_PERSIST_INTERVAL.getPreferredName(), backgroundPersistInterval.getStringRep()); + } + if (modelSnapshotRetentionDays != null) { + builder.field(MODEL_SNAPSHOT_RETENTION_DAYS.getPreferredName(), modelSnapshotRetentionDays); + } + if (resultsRetentionDays != null) { + builder.field(RESULTS_RETENTION_DAYS.getPreferredName(), resultsRetentionDays); + } + if (customSettings != null) { + builder.field(CUSTOM_SETTINGS.getPreferredName(), customSettings); + } + if (modelSnapshotId != null) { + builder.field(MODEL_SNAPSHOT_ID.getPreferredName(), modelSnapshotId); + } + if (resultsIndexName != null) { + builder.field(RESULTS_INDEX_NAME.getPreferredName(), resultsIndexName); + } + if (params.paramAsBoolean("all", false)) { + builder.field(DELETED.getPreferredName(), deleted); + } + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + Job that = (Job) other; + return Objects.equals(this.jobId, that.jobId) + && Objects.equals(this.jobType, that.jobType) + && Objects.equals(this.groups, that.groups) + && Objects.equals(this.description, that.description) + && Objects.equals(this.createTime, that.createTime) + && Objects.equals(this.finishedTime, that.finishedTime) + && Objects.equals(this.lastDataTime, that.lastDataTime) + && Objects.equals(this.establishedModelMemory, that.establishedModelMemory) + && Objects.equals(this.analysisConfig, that.analysisConfig) + && Objects.equals(this.analysisLimits, that.analysisLimits) + && Objects.equals(this.dataDescription, that.dataDescription) + && Objects.equals(this.modelPlotConfig, that.modelPlotConfig) + && Objects.equals(this.renormalizationWindowDays, that.renormalizationWindowDays) + && Objects.equals(this.backgroundPersistInterval, that.backgroundPersistInterval) + && Objects.equals(this.modelSnapshotRetentionDays, that.modelSnapshotRetentionDays) + && Objects.equals(this.resultsRetentionDays, that.resultsRetentionDays) + && Objects.equals(this.customSettings, that.customSettings) + && Objects.equals(this.modelSnapshotId, that.modelSnapshotId) + && Objects.equals(this.resultsIndexName, that.resultsIndexName) + && Objects.equals(this.deleted, that.deleted); + } + + @Override + public int hashCode() { + return Objects.hash(jobId, jobType, groups, description, createTime, finishedTime, lastDataTime, establishedModelMemory, + analysisConfig, analysisLimits, dataDescription, modelPlotConfig, renormalizationWindowDays, + backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings, + modelSnapshotId, resultsIndexName, deleted); + } + + @Override + public final String toString() { + return Strings.toString(this); + } + + public static class Builder { + + private String id; + private String jobType = ANOMALY_DETECTOR_JOB_TYPE; + private List groups = Collections.emptyList(); + private String description; + private AnalysisConfig analysisConfig; + private AnalysisLimits analysisLimits; + private DataDescription dataDescription; + private Date createTime; + private Date finishedTime; + private Date lastDataTime; + private Long establishedModelMemory; + private ModelPlotConfig modelPlotConfig; + private Long renormalizationWindowDays; + private TimeValue backgroundPersistInterval; + private Long modelSnapshotRetentionDays; + private Long resultsRetentionDays; + private Map customSettings; + private String modelSnapshotId; + private String resultsIndexName; + private boolean deleted; + + public Builder() { + } + + public Builder(String id) { + this.id = id; + } + + public Builder(Job job) { + this.id = job.getId(); + this.jobType = job.getJobType(); + this.groups = job.getGroups(); + this.description = job.getDescription(); + this.analysisConfig = job.getAnalysisConfig(); + this.analysisLimits = job.getAnalysisLimits(); + this.dataDescription = job.getDataDescription(); + this.createTime = job.getCreateTime(); + this.finishedTime = job.getFinishedTime(); + this.lastDataTime = job.getLastDataTime(); + this.establishedModelMemory = job.getEstablishedModelMemory(); + this.modelPlotConfig = job.getModelPlotConfig(); + this.renormalizationWindowDays = job.getRenormalizationWindowDays(); + this.backgroundPersistInterval = job.getBackgroundPersistInterval(); + this.modelSnapshotRetentionDays = job.getModelSnapshotRetentionDays(); + this.resultsRetentionDays = job.getResultsRetentionDays(); + this.customSettings = job.getCustomSettings(); + this.modelSnapshotId = job.getModelSnapshotId(); + this.resultsIndexName = job.getResultsIndexNameNoPrefix(); + this.deleted = job.isDeleted(); + } + + public Builder setId(String id) { + this.id = id; + return this; + } + + public String getId() { + return id; + } + + public Builder setJobType(String jobType) { + this.jobType = jobType; + return this; + } + + public Builder setGroups(List groups) { + this.groups = groups == null ? Collections.emptyList() : groups; + return this; + } + + public Builder setCustomSettings(Map customSettings) { + this.customSettings = customSettings; + return this; + } + + public Builder setDescription(String description) { + this.description = description; + return this; + } + + public Builder setAnalysisConfig(AnalysisConfig.Builder configBuilder) { + analysisConfig = Objects.requireNonNull(configBuilder, ANALYSIS_CONFIG.getPreferredName()).build(); + return this; + } + + public Builder setAnalysisLimits(AnalysisLimits analysisLimits) { + this.analysisLimits = Objects.requireNonNull(analysisLimits, ANALYSIS_LIMITS.getPreferredName()); + return this; + } + + Builder setCreateTime(Date createTime) { + this.createTime = createTime; + return this; + } + + Builder setFinishedTime(Date finishedTime) { + this.finishedTime = finishedTime; + return this; + } + + /** + * Set the wall clock time of the last data upload + * + * @param lastDataTime Wall clock time + */ + public Builder setLastDataTime(Date lastDataTime) { + this.lastDataTime = lastDataTime; + return this; + } + + public Builder setEstablishedModelMemory(Long establishedModelMemory) { + this.establishedModelMemory = establishedModelMemory; + return this; + } + + public Builder setDataDescription(DataDescription.Builder description) { + dataDescription = Objects.requireNonNull(description, DATA_DESCRIPTION.getPreferredName()).build(); + return this; + } + + public Builder setModelPlotConfig(ModelPlotConfig modelPlotConfig) { + this.modelPlotConfig = modelPlotConfig; + return this; + } + + public Builder setBackgroundPersistInterval(TimeValue backgroundPersistInterval) { + this.backgroundPersistInterval = backgroundPersistInterval; + return this; + } + + public Builder setRenormalizationWindowDays(Long renormalizationWindowDays) { + this.renormalizationWindowDays = renormalizationWindowDays; + return this; + } + + public Builder setModelSnapshotRetentionDays(Long modelSnapshotRetentionDays) { + this.modelSnapshotRetentionDays = modelSnapshotRetentionDays; + return this; + } + + public Builder setResultsRetentionDays(Long resultsRetentionDays) { + this.resultsRetentionDays = resultsRetentionDays; + return this; + } + + public Builder setModelSnapshotId(String modelSnapshotId) { + this.modelSnapshotId = modelSnapshotId; + return this; + } + + public Builder setResultsIndexName(String resultsIndexName) { + this.resultsIndexName = resultsIndexName; + return this; + } + + public Builder setDeleted(boolean deleted) { + this.deleted = deleted; + return this; + } + + /** + * Builds a job. + * + * @return The job + */ + public Job build() { + Objects.requireNonNull(id, "[" + ID.getPreferredName() + "] must not be null"); + Objects.requireNonNull(jobType, "[" + JOB_TYPE.getPreferredName() + "] must not be null"); + return new Job( + id, jobType, groups, description, createTime, finishedTime, lastDataTime, establishedModelMemory, + analysisConfig, analysisLimits, dataDescription, modelPlotConfig, renormalizationWindowDays, + backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings, + modelSnapshotId, resultsIndexName, deleted); + } + } +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/DataCounts.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/DataCounts.java new file mode 100644 index 0000000000000..e07312d12e1f4 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/DataCounts.java @@ -0,0 +1,415 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml.job.process; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser.ValueType; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; +import org.elasticsearch.protocol.xpack.ml.job.util.TimeUtil; + +import java.io.IOException; +import java.util.Date; +import java.util.Objects; + +/** + * Job processed record counts. + *

        + * The getInput... methods return the actual number of + * fields/records sent the the API including invalid records. + * The getProcessed... methods are the number sent to the + * Engine. + *

        + * The inputRecordCount field is calculated so it + * should not be set in deserialization but it should be serialised + * so the field is visible. + */ +public class DataCounts implements ToXContentObject { + + public static final ParseField PROCESSED_RECORD_COUNT = new ParseField("processed_record_count"); + public static final ParseField PROCESSED_FIELD_COUNT = new ParseField("processed_field_count"); + public static final ParseField INPUT_BYTES = new ParseField("input_bytes"); + public static final ParseField INPUT_RECORD_COUNT = new ParseField("input_record_count"); + public static final ParseField INPUT_FIELD_COUNT = new ParseField("input_field_count"); + public static final ParseField INVALID_DATE_COUNT = new ParseField("invalid_date_count"); + public static final ParseField MISSING_FIELD_COUNT = new ParseField("missing_field_count"); + public static final ParseField OUT_OF_ORDER_TIME_COUNT = new ParseField("out_of_order_timestamp_count"); + public static final ParseField EMPTY_BUCKET_COUNT = new ParseField("empty_bucket_count"); + public static final ParseField SPARSE_BUCKET_COUNT = new ParseField("sparse_bucket_count"); + public static final ParseField BUCKET_COUNT = new ParseField("bucket_count"); + public static final ParseField EARLIEST_RECORD_TIME = new ParseField("earliest_record_timestamp"); + public static final ParseField LATEST_RECORD_TIME = new ParseField("latest_record_timestamp"); + public static final ParseField LAST_DATA_TIME = new ParseField("last_data_time"); + public static final ParseField LATEST_EMPTY_BUCKET_TIME = new ParseField("latest_empty_bucket_timestamp"); + public static final ParseField LATEST_SPARSE_BUCKET_TIME = new ParseField("latest_sparse_bucket_timestamp"); + + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("data_counts", true, + a -> new DataCounts((String) a[0], (long) a[1], (long) a[2], (long) a[3], (long) a[4], (long) a[5], (long) a[6], + (long) a[7], (long) a[8], (long) a[9], (long) a[10], (Date) a[11], (Date) a[12], (Date) a[13], (Date) a[14], + (Date) a[15])); + + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), PROCESSED_RECORD_COUNT); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), PROCESSED_FIELD_COUNT); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), INPUT_BYTES); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), INPUT_FIELD_COUNT); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), INVALID_DATE_COUNT); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), MISSING_FIELD_COUNT); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), OUT_OF_ORDER_TIME_COUNT); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), EMPTY_BUCKET_COUNT); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), SPARSE_BUCKET_COUNT); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_COUNT); + PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), + (p) -> TimeUtil.parseTimeField(p, EARLIEST_RECORD_TIME.getPreferredName()), + EARLIEST_RECORD_TIME, + ValueType.VALUE); + PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), + (p) -> TimeUtil.parseTimeField(p, LATEST_RECORD_TIME.getPreferredName()), + LATEST_RECORD_TIME, + ValueType.VALUE); + PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), + (p) -> TimeUtil.parseTimeField(p, LAST_DATA_TIME.getPreferredName()), + LAST_DATA_TIME, + ValueType.VALUE); + PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), + (p) -> TimeUtil.parseTimeField(p, LATEST_EMPTY_BUCKET_TIME.getPreferredName()), + LATEST_EMPTY_BUCKET_TIME, + ValueType.VALUE); + PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), + (p) -> TimeUtil.parseTimeField(p, LATEST_SPARSE_BUCKET_TIME.getPreferredName()), + LATEST_SPARSE_BUCKET_TIME, + ValueType.VALUE); + } + + private final String jobId; + private long processedRecordCount; + private long processedFieldCount; + private long inputBytes; + private long inputFieldCount; + private long invalidDateCount; + private long missingFieldCount; + private long outOfOrderTimeStampCount; + private long emptyBucketCount; + private long sparseBucketCount; + private long bucketCount; + private Date earliestRecordTimeStamp; + private Date latestRecordTimeStamp; + private Date lastDataTimeStamp; + private Date latestEmptyBucketTimeStamp; + private Date latestSparseBucketTimeStamp; + + public DataCounts(String jobId, long processedRecordCount, long processedFieldCount, long inputBytes, + long inputFieldCount, long invalidDateCount, long missingFieldCount, long outOfOrderTimeStampCount, + long emptyBucketCount, long sparseBucketCount, long bucketCount, + Date earliestRecordTimeStamp, Date latestRecordTimeStamp, Date lastDataTimeStamp, + Date latestEmptyBucketTimeStamp, Date latestSparseBucketTimeStamp) { + this.jobId = jobId; + this.processedRecordCount = processedRecordCount; + this.processedFieldCount = processedFieldCount; + this.inputBytes = inputBytes; + this.inputFieldCount = inputFieldCount; + this.invalidDateCount = invalidDateCount; + this.missingFieldCount = missingFieldCount; + this.outOfOrderTimeStampCount = outOfOrderTimeStampCount; + this.emptyBucketCount = emptyBucketCount; + this.sparseBucketCount = sparseBucketCount; + this.bucketCount = bucketCount; + this.latestRecordTimeStamp = latestRecordTimeStamp; + this.earliestRecordTimeStamp = earliestRecordTimeStamp; + this.lastDataTimeStamp = lastDataTimeStamp; + this.latestEmptyBucketTimeStamp = latestEmptyBucketTimeStamp; + this.latestSparseBucketTimeStamp = latestSparseBucketTimeStamp; + } + + DataCounts(String jobId) { + this.jobId = jobId; + } + + public DataCounts(DataCounts lhs) { + jobId = lhs.jobId; + processedRecordCount = lhs.processedRecordCount; + processedFieldCount = lhs.processedFieldCount; + inputBytes = lhs.inputBytes; + inputFieldCount = lhs.inputFieldCount; + invalidDateCount = lhs.invalidDateCount; + missingFieldCount = lhs.missingFieldCount; + outOfOrderTimeStampCount = lhs.outOfOrderTimeStampCount; + emptyBucketCount = lhs.emptyBucketCount; + sparseBucketCount = lhs.sparseBucketCount; + bucketCount = lhs.bucketCount; + latestRecordTimeStamp = lhs.latestRecordTimeStamp; + earliestRecordTimeStamp = lhs.earliestRecordTimeStamp; + lastDataTimeStamp = lhs.lastDataTimeStamp; + latestEmptyBucketTimeStamp = lhs.latestEmptyBucketTimeStamp; + latestSparseBucketTimeStamp = lhs.latestSparseBucketTimeStamp; + } + + public String getJobId() { + return jobId; + } + + /** + * Number of records processed by this job. + * This value is the number of records sent passed on to + * the engine i.e. {@linkplain #getInputRecordCount()} minus + * records with bad dates or out of order + * + * @return Number of records processed by this job {@code long} + */ + public long getProcessedRecordCount() { + return processedRecordCount; + } + + /** + * Number of data points (processed record count * the number + * of analysed fields) processed by this job. This count does + * not include the time field. + * + * @return Number of data points processed by this job {@code long} + */ + public long getProcessedFieldCount() { + return processedFieldCount; + } + + /** + * Total number of input records read. + * This = processed record count + date parse error records count + * + out of order record count. + *

        + * Records with missing fields are counted as they are still written. + * + * @return Total number of input records read {@code long} + */ + public long getInputRecordCount() { + return processedRecordCount + outOfOrderTimeStampCount + + invalidDateCount; + } + + /** + * The total number of bytes sent to this job. + * This value includes the bytes from any records + * that have been discarded for any reason + * e.g. because the date cannot be read + * + * @return Volume in bytes + */ + public long getInputBytes() { + return inputBytes; + } + + /** + * The total number of fields sent to the job + * including fields that aren't analysed. + * + * @return The total number of fields sent to the job + */ + public long getInputFieldCount() { + return inputFieldCount; + } + + /** + * The number of records with an invalid date field that could + * not be parsed or converted to epoch time. + * + * @return The number of records with an invalid date field + */ + public long getInvalidDateCount() { + return invalidDateCount; + } + + /** + * The number of missing fields that had been + * configured for analysis. + * + * @return The number of missing fields + */ + public long getMissingFieldCount() { + return missingFieldCount; + } + + /** + * The number of records with a timestamp that is + * before the time of the latest record. Records should + * be in ascending chronological order + * + * @return The number of records with a timestamp that is before the time of the latest record + */ + public long getOutOfOrderTimeStampCount() { + return outOfOrderTimeStampCount; + } + + /** + * The number of buckets with no records in it. Used to measure general data fitness and/or + * configuration problems (bucket span). + * + * @return Number of empty buckets processed by this job {@code long} + */ + public long getEmptyBucketCount() { + return emptyBucketCount; + } + + /** + * The number of buckets with few records compared to the overall counts. + * Used to measure general data fitness and/or configuration problems (bucket span). + * + * @return Number of sparse buckets processed by this job {@code long} + */ + public long getSparseBucketCount() { + return sparseBucketCount; + } + + /** + * The number of buckets overall. + * + * @return Number of buckets processed by this job {@code long} + */ + public long getBucketCount() { + return bucketCount; + } + + /** + * The time of the first record seen. + * + * @return The first record time + */ + public Date getEarliestRecordTimeStamp() { + return earliestRecordTimeStamp; + } + + /** + * The time of the latest record seen. + * + * @return Latest record time + */ + public Date getLatestRecordTimeStamp() { + return latestRecordTimeStamp; + } + + /** + * The wall clock time the latest record was seen. + * + * @return Wall clock time of the lastest record + */ + public Date getLastDataTimeStamp() { + return lastDataTimeStamp; + } + + /** + * The time of the latest empty bucket seen. + * + * @return Latest empty bucket time + */ + public Date getLatestEmptyBucketTimeStamp() { + return latestEmptyBucketTimeStamp; + } + + /** + * The time of the latest sparse bucket seen. + * + * @return Latest sparse bucket time + */ + public Date getLatestSparseBucketTimeStamp() { + return latestSparseBucketTimeStamp; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + builder.field(Job.ID.getPreferredName(), jobId); + builder.field(PROCESSED_RECORD_COUNT.getPreferredName(), processedRecordCount); + builder.field(PROCESSED_FIELD_COUNT.getPreferredName(), processedFieldCount); + builder.field(INPUT_BYTES.getPreferredName(), inputBytes); + builder.field(INPUT_FIELD_COUNT.getPreferredName(), inputFieldCount); + builder.field(INVALID_DATE_COUNT.getPreferredName(), invalidDateCount); + builder.field(MISSING_FIELD_COUNT.getPreferredName(), missingFieldCount); + builder.field(OUT_OF_ORDER_TIME_COUNT.getPreferredName(), outOfOrderTimeStampCount); + builder.field(EMPTY_BUCKET_COUNT.getPreferredName(), emptyBucketCount); + builder.field(SPARSE_BUCKET_COUNT.getPreferredName(), sparseBucketCount); + builder.field(BUCKET_COUNT.getPreferredName(), bucketCount); + if (earliestRecordTimeStamp != null) { + builder.timeField(EARLIEST_RECORD_TIME.getPreferredName(), EARLIEST_RECORD_TIME.getPreferredName() + "_string", + earliestRecordTimeStamp.getTime()); + } + if (latestRecordTimeStamp != null) { + builder.timeField(LATEST_RECORD_TIME.getPreferredName(), LATEST_RECORD_TIME.getPreferredName() + "_string", + latestRecordTimeStamp.getTime()); + } + if (lastDataTimeStamp != null) { + builder.timeField(LAST_DATA_TIME.getPreferredName(), LAST_DATA_TIME.getPreferredName() + "_string", + lastDataTimeStamp.getTime()); + } + if (latestEmptyBucketTimeStamp != null) { + builder.timeField(LATEST_EMPTY_BUCKET_TIME.getPreferredName(), LATEST_EMPTY_BUCKET_TIME.getPreferredName() + "_string", + latestEmptyBucketTimeStamp.getTime()); + } + if (latestSparseBucketTimeStamp != null) { + builder.timeField(LATEST_SPARSE_BUCKET_TIME.getPreferredName(), LATEST_SPARSE_BUCKET_TIME.getPreferredName() + "_string", + latestSparseBucketTimeStamp.getTime()); + } + builder.field(INPUT_RECORD_COUNT.getPreferredName(), getInputRecordCount()); + + builder.endObject(); + return builder; + } + + /** + * Equality test + */ + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + DataCounts that = (DataCounts) other; + + return Objects.equals(this.jobId, that.jobId) && + this.processedRecordCount == that.processedRecordCount && + this.processedFieldCount == that.processedFieldCount && + this.inputBytes == that.inputBytes && + this.inputFieldCount == that.inputFieldCount && + this.invalidDateCount == that.invalidDateCount && + this.missingFieldCount == that.missingFieldCount && + this.outOfOrderTimeStampCount == that.outOfOrderTimeStampCount && + this.emptyBucketCount == that.emptyBucketCount && + this.sparseBucketCount == that.sparseBucketCount && + this.bucketCount == that.bucketCount && + Objects.equals(this.latestRecordTimeStamp, that.latestRecordTimeStamp) && + Objects.equals(this.earliestRecordTimeStamp, that.earliestRecordTimeStamp) && + Objects.equals(this.lastDataTimeStamp, that.lastDataTimeStamp) && + Objects.equals(this.latestEmptyBucketTimeStamp, that.latestEmptyBucketTimeStamp) && + Objects.equals(this.latestSparseBucketTimeStamp, that.latestSparseBucketTimeStamp); + } + + @Override + public int hashCode() { + return Objects.hash(jobId, processedRecordCount, processedFieldCount, + inputBytes, inputFieldCount, invalidDateCount, missingFieldCount, + outOfOrderTimeStampCount, lastDataTimeStamp, emptyBucketCount, sparseBucketCount, bucketCount, + latestRecordTimeStamp, earliestRecordTimeStamp, latestEmptyBucketTimeStamp, latestSparseBucketTimeStamp); + } +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSizeStats.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSizeStats.java new file mode 100644 index 0000000000000..50f655b4dd7f1 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSizeStats.java @@ -0,0 +1,294 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml.job.process; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser.ValueType; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; +import org.elasticsearch.protocol.xpack.ml.job.results.Result; +import org.elasticsearch.protocol.xpack.ml.job.util.TimeUtil; + +import java.io.IOException; +import java.util.Date; +import java.util.Locale; +import java.util.Objects; + +/** + * Provide access to the C++ model memory usage numbers for the Java process. + */ +public class ModelSizeStats implements ToXContentObject { + + /** + * Result type + */ + public static final String RESULT_TYPE_VALUE = "model_size_stats"; + public static final ParseField RESULT_TYPE_FIELD = new ParseField(RESULT_TYPE_VALUE); + + /** + * Field Names + */ + public static final ParseField MODEL_BYTES_FIELD = new ParseField("model_bytes"); + public static final ParseField TOTAL_BY_FIELD_COUNT_FIELD = new ParseField("total_by_field_count"); + public static final ParseField TOTAL_OVER_FIELD_COUNT_FIELD = new ParseField("total_over_field_count"); + public static final ParseField TOTAL_PARTITION_FIELD_COUNT_FIELD = new ParseField("total_partition_field_count"); + public static final ParseField BUCKET_ALLOCATION_FAILURES_COUNT_FIELD = new ParseField("bucket_allocation_failures_count"); + public static final ParseField MEMORY_STATUS_FIELD = new ParseField("memory_status"); + public static final ParseField LOG_TIME_FIELD = new ParseField("log_time"); + public static final ParseField TIMESTAMP_FIELD = new ParseField("timestamp"); + + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>(RESULT_TYPE_VALUE, true, a -> new Builder((String) a[0])); + + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); + PARSER.declareLong(Builder::setModelBytes, MODEL_BYTES_FIELD); + PARSER.declareLong(Builder::setBucketAllocationFailuresCount, BUCKET_ALLOCATION_FAILURES_COUNT_FIELD); + PARSER.declareLong(Builder::setTotalByFieldCount, TOTAL_BY_FIELD_COUNT_FIELD); + PARSER.declareLong(Builder::setTotalOverFieldCount, TOTAL_OVER_FIELD_COUNT_FIELD); + PARSER.declareLong(Builder::setTotalPartitionFieldCount, TOTAL_PARTITION_FIELD_COUNT_FIELD); + PARSER.declareField(Builder::setLogTime, + (p) -> TimeUtil.parseTimeField(p, LOG_TIME_FIELD.getPreferredName()), + LOG_TIME_FIELD, + ValueType.VALUE); + PARSER.declareField(Builder::setTimestamp, + (p) -> TimeUtil.parseTimeField(p, TIMESTAMP_FIELD.getPreferredName()), + TIMESTAMP_FIELD, + ValueType.VALUE); + PARSER.declareField(Builder::setMemoryStatus, p -> MemoryStatus.fromString(p.text()), MEMORY_STATUS_FIELD, ValueType.STRING); + } + + /** + * The status of the memory monitored by the ResourceMonitor. OK is default, + * SOFT_LIMIT means that the models have done some aggressive pruning to + * keep the memory below the limit, and HARD_LIMIT means that samples have + * been dropped + */ + public enum MemoryStatus { + OK, SOFT_LIMIT, HARD_LIMIT; + + public static MemoryStatus fromString(String statusName) { + return valueOf(statusName.trim().toUpperCase(Locale.ROOT)); + } + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } + } + + private final String jobId; + private final long modelBytes; + private final long totalByFieldCount; + private final long totalOverFieldCount; + private final long totalPartitionFieldCount; + private final long bucketAllocationFailuresCount; + private final MemoryStatus memoryStatus; + private final Date timestamp; + private final Date logTime; + + private ModelSizeStats(String jobId, long modelBytes, long totalByFieldCount, long totalOverFieldCount, + long totalPartitionFieldCount, long bucketAllocationFailuresCount, MemoryStatus memoryStatus, + Date timestamp, Date logTime) { + this.jobId = jobId; + this.modelBytes = modelBytes; + this.totalByFieldCount = totalByFieldCount; + this.totalOverFieldCount = totalOverFieldCount; + this.totalPartitionFieldCount = totalPartitionFieldCount; + this.bucketAllocationFailuresCount = bucketAllocationFailuresCount; + this.memoryStatus = memoryStatus; + this.timestamp = timestamp; + this.logTime = logTime; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + builder.field(Job.ID.getPreferredName(), jobId); + builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); + builder.field(MODEL_BYTES_FIELD.getPreferredName(), modelBytes); + builder.field(TOTAL_BY_FIELD_COUNT_FIELD.getPreferredName(), totalByFieldCount); + builder.field(TOTAL_OVER_FIELD_COUNT_FIELD.getPreferredName(), totalOverFieldCount); + builder.field(TOTAL_PARTITION_FIELD_COUNT_FIELD.getPreferredName(), totalPartitionFieldCount); + builder.field(BUCKET_ALLOCATION_FAILURES_COUNT_FIELD.getPreferredName(), bucketAllocationFailuresCount); + builder.field(MEMORY_STATUS_FIELD.getPreferredName(), memoryStatus); + builder.timeField(LOG_TIME_FIELD.getPreferredName(), LOG_TIME_FIELD.getPreferredName() + "_string", logTime.getTime()); + if (timestamp != null) { + builder.timeField(TIMESTAMP_FIELD.getPreferredName(), TIMESTAMP_FIELD.getPreferredName() + "_string", timestamp.getTime()); + } + + builder.endObject(); + return builder; + } + + public String getJobId() { + return jobId; + } + + public long getModelBytes() { + return modelBytes; + } + + public long getTotalByFieldCount() { + return totalByFieldCount; + } + + public long getTotalPartitionFieldCount() { + return totalPartitionFieldCount; + } + + public long getTotalOverFieldCount() { + return totalOverFieldCount; + } + + public long getBucketAllocationFailuresCount() { + return bucketAllocationFailuresCount; + } + + public MemoryStatus getMemoryStatus() { + return memoryStatus; + } + + /** + * The timestamp of the last processed record when this instance was created. + * + * @return The record time + */ + public Date getTimestamp() { + return timestamp; + } + + /** + * The wall clock time at the point when this instance was created. + * + * @return The wall clock time + */ + public Date getLogTime() { + return logTime; + } + + @Override + public int hashCode() { + return Objects.hash(jobId, modelBytes, totalByFieldCount, totalOverFieldCount, totalPartitionFieldCount, + this.bucketAllocationFailuresCount, memoryStatus, timestamp, logTime); + } + + /** + * Compare all the fields. + */ + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + ModelSizeStats that = (ModelSizeStats) other; + + return this.modelBytes == that.modelBytes && this.totalByFieldCount == that.totalByFieldCount + && this.totalOverFieldCount == that.totalOverFieldCount && this.totalPartitionFieldCount == that.totalPartitionFieldCount + && this.bucketAllocationFailuresCount == that.bucketAllocationFailuresCount + && Objects.equals(this.memoryStatus, that.memoryStatus) && Objects.equals(this.timestamp, that.timestamp) + && Objects.equals(this.logTime, that.logTime) + && Objects.equals(this.jobId, that.jobId); + } + + public static class Builder { + + private final String jobId; + private long modelBytes; + private long totalByFieldCount; + private long totalOverFieldCount; + private long totalPartitionFieldCount; + private long bucketAllocationFailuresCount; + private MemoryStatus memoryStatus; + private Date timestamp; + private Date logTime; + + public Builder(String jobId) { + this.jobId = jobId; + memoryStatus = MemoryStatus.OK; + logTime = new Date(); + } + + public Builder(ModelSizeStats modelSizeStats) { + this.jobId = modelSizeStats.jobId; + this.modelBytes = modelSizeStats.modelBytes; + this.totalByFieldCount = modelSizeStats.totalByFieldCount; + this.totalOverFieldCount = modelSizeStats.totalOverFieldCount; + this.totalPartitionFieldCount = modelSizeStats.totalPartitionFieldCount; + this.bucketAllocationFailuresCount = modelSizeStats.bucketAllocationFailuresCount; + this.memoryStatus = modelSizeStats.memoryStatus; + this.timestamp = modelSizeStats.timestamp; + this.logTime = modelSizeStats.logTime; + } + + public Builder setModelBytes(long modelBytes) { + this.modelBytes = modelBytes; + return this; + } + + public Builder setTotalByFieldCount(long totalByFieldCount) { + this.totalByFieldCount = totalByFieldCount; + return this; + } + + public Builder setTotalPartitionFieldCount(long totalPartitionFieldCount) { + this.totalPartitionFieldCount = totalPartitionFieldCount; + return this; + } + + public Builder setTotalOverFieldCount(long totalOverFieldCount) { + this.totalOverFieldCount = totalOverFieldCount; + return this; + } + + public Builder setBucketAllocationFailuresCount(long bucketAllocationFailuresCount) { + this.bucketAllocationFailuresCount = bucketAllocationFailuresCount; + return this; + } + + public Builder setMemoryStatus(MemoryStatus memoryStatus) { + Objects.requireNonNull(memoryStatus, "[" + MEMORY_STATUS_FIELD.getPreferredName() + "] must not be null"); + this.memoryStatus = memoryStatus; + return this; + } + + public Builder setTimestamp(Date timestamp) { + this.timestamp = timestamp; + return this; + } + + public Builder setLogTime(Date logTime) { + this.logTime = logTime; + return this; + } + + public ModelSizeStats build() { + return new ModelSizeStats(jobId, modelBytes, totalByFieldCount, totalOverFieldCount, totalPartitionFieldCount, + bucketAllocationFailuresCount, memoryStatus, timestamp, logTime); + } + } +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSnapshot.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSnapshot.java new file mode 100644 index 0000000000000..11cca459cf82d --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSnapshot.java @@ -0,0 +1,299 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml.job.process; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser.ValueType; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; +import org.elasticsearch.protocol.xpack.ml.job.util.TimeUtil; + +import java.io.IOException; +import java.util.Date; +import java.util.Objects; + +/** + * ModelSnapshot Result POJO + */ +public class ModelSnapshot implements ToXContentObject { + /** + * Field Names + */ + public static final ParseField TIMESTAMP = new ParseField("timestamp"); + public static final ParseField DESCRIPTION = new ParseField("description"); + public static final ParseField SNAPSHOT_DOC_COUNT = new ParseField("snapshot_doc_count"); + public static final ParseField LATEST_RECORD_TIME = new ParseField("latest_record_time_stamp"); + public static final ParseField LATEST_RESULT_TIME = new ParseField("latest_result_time_stamp"); + public static final ParseField QUANTILES = new ParseField("quantiles"); + public static final ParseField RETAIN = new ParseField("retain"); + public static final ParseField SNAPSHOT_ID = new ParseField("snapshot_id"); + + public static final ObjectParser PARSER = new ObjectParser<>("model_snapshot", true, Builder::new); + + static { + PARSER.declareString(Builder::setJobId, Job.ID); + PARSER.declareField(Builder::setTimestamp, + (p) -> TimeUtil.parseTimeField(p, TIMESTAMP.getPreferredName()), + TIMESTAMP, + ValueType.VALUE); + PARSER.declareString(Builder::setDescription, DESCRIPTION); + PARSER.declareString(Builder::setSnapshotId, SNAPSHOT_ID); + PARSER.declareInt(Builder::setSnapshotDocCount, SNAPSHOT_DOC_COUNT); + PARSER.declareObject(Builder::setModelSizeStats, ModelSizeStats.PARSER, + ModelSizeStats.RESULT_TYPE_FIELD); + PARSER.declareField(Builder::setLatestRecordTimeStamp, + (p) -> TimeUtil.parseTimeField(p, LATEST_RECORD_TIME.getPreferredName()), + LATEST_RECORD_TIME, + ValueType.VALUE); + PARSER.declareField(Builder::setLatestResultTimeStamp, + (p) -> TimeUtil.parseTimeField(p, LATEST_RESULT_TIME.getPreferredName()), + LATEST_RESULT_TIME, + ValueType.VALUE); + PARSER.declareObject(Builder::setQuantiles, Quantiles.PARSER, QUANTILES); + PARSER.declareBoolean(Builder::setRetain, RETAIN); + } + + + private final String jobId; + + private final Date timestamp; + private final String description; + private final String snapshotId; + private final int snapshotDocCount; + private final ModelSizeStats modelSizeStats; + private final Date latestRecordTimeStamp; + private final Date latestResultTimeStamp; + private final Quantiles quantiles; + private final boolean retain; + + + private ModelSnapshot(String jobId, Date timestamp, String description, String snapshotId, int snapshotDocCount, + ModelSizeStats modelSizeStats, Date latestRecordTimeStamp, Date latestResultTimeStamp, Quantiles quantiles, + boolean retain) { + this.jobId = jobId; + this.timestamp = timestamp; + this.description = description; + this.snapshotId = snapshotId; + this.snapshotDocCount = snapshotDocCount; + this.modelSizeStats = modelSizeStats; + this.latestRecordTimeStamp = latestRecordTimeStamp; + this.latestResultTimeStamp = latestResultTimeStamp; + this.quantiles = quantiles; + this.retain = retain; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(Job.ID.getPreferredName(), jobId); + if (timestamp != null) { + builder.timeField(TIMESTAMP.getPreferredName(), TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); + } + if (description != null) { + builder.field(DESCRIPTION.getPreferredName(), description); + } + if (snapshotId != null) { + builder.field(SNAPSHOT_ID.getPreferredName(), snapshotId); + } + builder.field(SNAPSHOT_DOC_COUNT.getPreferredName(), snapshotDocCount); + if (modelSizeStats != null) { + builder.field(ModelSizeStats.RESULT_TYPE_FIELD.getPreferredName(), modelSizeStats); + } + if (latestRecordTimeStamp != null) { + builder.timeField(LATEST_RECORD_TIME.getPreferredName(), LATEST_RECORD_TIME.getPreferredName() + "_string", + latestRecordTimeStamp.getTime()); + } + if (latestResultTimeStamp != null) { + builder.timeField(LATEST_RESULT_TIME.getPreferredName(), LATEST_RESULT_TIME.getPreferredName() + "_string", + latestResultTimeStamp.getTime()); + } + if (quantiles != null) { + builder.field(QUANTILES.getPreferredName(), quantiles); + } + builder.field(RETAIN.getPreferredName(), retain); + builder.endObject(); + return builder; + } + + public String getJobId() { + return jobId; + } + + public Date getTimestamp() { + return timestamp; + } + + public String getDescription() { + return description; + } + + public String getSnapshotId() { + return snapshotId; + } + + public int getSnapshotDocCount() { + return snapshotDocCount; + } + + public ModelSizeStats getModelSizeStats() { + return modelSizeStats; + } + + public Quantiles getQuantiles() { + return quantiles; + } + + public Date getLatestRecordTimeStamp() { + return latestRecordTimeStamp; + } + + public Date getLatestResultTimeStamp() { + return latestResultTimeStamp; + } + + @Override + public int hashCode() { + return Objects.hash(jobId, timestamp, description, snapshotId, quantiles, snapshotDocCount, modelSizeStats, + latestRecordTimeStamp, latestResultTimeStamp, retain); + } + + /** + * Compare all the fields. + */ + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + ModelSnapshot that = (ModelSnapshot) other; + + return Objects.equals(this.jobId, that.jobId) + && Objects.equals(this.timestamp, that.timestamp) + && Objects.equals(this.description, that.description) + && Objects.equals(this.snapshotId, that.snapshotId) + && this.snapshotDocCount == that.snapshotDocCount + && Objects.equals(this.modelSizeStats, that.modelSizeStats) + && Objects.equals(this.quantiles, that.quantiles) + && Objects.equals(this.latestRecordTimeStamp, that.latestRecordTimeStamp) + && Objects.equals(this.latestResultTimeStamp, that.latestResultTimeStamp) + && this.retain == that.retain; + } + + public static class Builder { + private String jobId; + + private Date timestamp; + private String description; + private String snapshotId; + private int snapshotDocCount; + private ModelSizeStats modelSizeStats; + private Date latestRecordTimeStamp; + private Date latestResultTimeStamp; + private Quantiles quantiles; + private boolean retain; + + + public Builder() { + } + + public Builder(String jobId) { + this.jobId = jobId; + } + + public Builder(ModelSnapshot modelSnapshot) { + this.jobId = modelSnapshot.jobId; + this.timestamp = modelSnapshot.timestamp; + this.description = modelSnapshot.description; + this.snapshotId = modelSnapshot.snapshotId; + this.snapshotDocCount = modelSnapshot.snapshotDocCount; + this.modelSizeStats = modelSnapshot.modelSizeStats; + this.latestRecordTimeStamp = modelSnapshot.latestRecordTimeStamp; + this.latestResultTimeStamp = modelSnapshot.latestResultTimeStamp; + this.quantiles = modelSnapshot.quantiles; + this.retain = modelSnapshot.retain; + } + + public Builder setJobId(String jobId) { + this.jobId = jobId; + return this; + } + + public Builder setTimestamp(Date timestamp) { + this.timestamp = timestamp; + return this; + } + + public Builder setDescription(String description) { + this.description = description; + return this; + } + + public Builder setSnapshotId(String snapshotId) { + this.snapshotId = snapshotId; + return this; + } + + public Builder setSnapshotDocCount(int snapshotDocCount) { + this.snapshotDocCount = snapshotDocCount; + return this; + } + + public Builder setModelSizeStats(ModelSizeStats.Builder modelSizeStats) { + this.modelSizeStats = modelSizeStats.build(); + return this; + } + + public Builder setModelSizeStats(ModelSizeStats modelSizeStats) { + this.modelSizeStats = modelSizeStats; + return this; + } + + public Builder setLatestRecordTimeStamp(Date latestRecordTimeStamp) { + this.latestRecordTimeStamp = latestRecordTimeStamp; + return this; + } + + public Builder setLatestResultTimeStamp(Date latestResultTimeStamp) { + this.latestResultTimeStamp = latestResultTimeStamp; + return this; + } + + public Builder setQuantiles(Quantiles quantiles) { + this.quantiles = quantiles; + return this; + } + + public Builder setRetain(boolean value) { + this.retain = value; + return this; + } + + public ModelSnapshot build() { + return new ModelSnapshot(jobId, timestamp, description, snapshotId, snapshotDocCount, modelSizeStats, + latestRecordTimeStamp, latestResultTimeStamp, quantiles, retain); + } + } +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/Quantiles.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/Quantiles.java new file mode 100644 index 0000000000000..1c047d6c30284 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/Quantiles.java @@ -0,0 +1,112 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml.job.process; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser.ValueType; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; + +import java.io.IOException; +import java.util.Date; +import java.util.Objects; + +/** + * Quantiles Result POJO + */ +public class Quantiles implements ToXContentObject { + + /** + * Field Names + */ + public static final ParseField TIMESTAMP = new ParseField("timestamp"); + public static final ParseField QUANTILE_STATE = new ParseField("quantile_state"); + + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("quantiles", true, a -> new Quantiles((String) a[0], (Date) a[1], (String) a[2])); + + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); + PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> new Date(p.longValue()), TIMESTAMP, ValueType.LONG); + PARSER.declareString(ConstructingObjectParser.constructorArg(), QUANTILE_STATE); + } + + private final String jobId; + private final Date timestamp; + private final String quantileState; + + public Quantiles(String jobId, Date timestamp, String quantileState) { + this.jobId = jobId; + this.timestamp = Objects.requireNonNull(timestamp); + this.quantileState = Objects.requireNonNull(quantileState); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(Job.ID.getPreferredName(), jobId); + if (timestamp != null) { + builder.field(TIMESTAMP.getPreferredName(), timestamp.getTime()); + } + if (quantileState != null) { + builder.field(QUANTILE_STATE.getPreferredName(), quantileState); + } + builder.endObject(); + return builder; + } + + public String getJobId() { + return jobId; + } + + public Date getTimestamp() { + return timestamp; + } + + public String getQuantileState() { + return quantileState; + } + + @Override + public int hashCode() { + return Objects.hash(jobId, timestamp, quantileState); + } + + /** + * Compare all the fields. + */ + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + Quantiles that = (Quantiles) other; + + return Objects.equals(this.jobId, that.jobId) && Objects.equals(this.timestamp, that.timestamp) + && Objects.equals(this.quantileState, that.quantileState); + } +} + diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/AnomalyRecord.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/AnomalyRecord.java index 8289032634eb6..4747f3a48bdc8 100644 --- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/AnomalyRecord.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/AnomalyRecord.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser.Token; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; import java.io.IOException; import java.time.format.DateTimeFormatter; @@ -88,7 +89,7 @@ public class AnomalyRecord implements ToXContentObject { static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Result.JOB_ID); + PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> { if (p.currentToken() == Token.VALUE_NUMBER) { return new Date(p.longValue()); @@ -159,7 +160,7 @@ public class AnomalyRecord implements ToXContentObject { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(Result.JOB_ID.getPreferredName(), jobId); + builder.field(Job.ID.getPreferredName(), jobId); builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); builder.field(PROBABILITY.getPreferredName(), probability); builder.field(RECORD_SCORE.getPreferredName(), recordScore); diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Bucket.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Bucket.java index dc56c7bd26237..cbaf83abbad40 100644 --- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Bucket.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Bucket.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser.Token; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; import java.io.IOException; import java.time.format.DateTimeFormatter; @@ -61,7 +62,7 @@ public class Bucket implements ToXContentObject { new ConstructingObjectParser<>(RESULT_TYPE_VALUE, true, a -> new Bucket((String) a[0], (Date) a[1], (long) a[2])); static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Result.JOB_ID); + PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> { if (p.currentToken() == Token.VALUE_NUMBER) { return new Date(p.longValue()); @@ -104,7 +105,7 @@ public class Bucket implements ToXContentObject { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(Result.JOB_ID.getPreferredName(), jobId); + builder.field(Job.ID.getPreferredName(), jobId); builder.timeField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); builder.field(ANOMALY_SCORE.getPreferredName(), anomalyScore); builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan); diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/BucketInfluencer.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/BucketInfluencer.java index c556737213ee1..29d8447cd6a37 100644 --- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/BucketInfluencer.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/BucketInfluencer.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser.Token; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; import java.io.IOException; import java.time.format.DateTimeFormatter; @@ -54,7 +55,7 @@ public class BucketInfluencer implements ToXContentObject { a -> new BucketInfluencer((String) a[0], (Date) a[1], (long) a[2])); static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Result.JOB_ID); + PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> { if (p.currentToken() == Token.VALUE_NUMBER) { return new Date(p.longValue()); @@ -93,7 +94,7 @@ public class BucketInfluencer implements ToXContentObject { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(Result.JOB_ID.getPreferredName(), jobId); + builder.field(Job.ID.getPreferredName(), jobId); builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); if (influenceField != null) { builder.field(INFLUENCER_FIELD_NAME.getPreferredName(), influenceField); diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/CategoryDefinition.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/CategoryDefinition.java index 2b452eeb82802..59b59006b33a1 100644 --- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/CategoryDefinition.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/CategoryDefinition.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; import java.io.IOException; import java.util.ArrayList; @@ -49,7 +50,7 @@ public class CategoryDefinition implements ToXContentObject { new ConstructingObjectParser<>(TYPE.getPreferredName(), true, a -> new CategoryDefinition((String) a[0])); static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Result.JOB_ID); + PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); PARSER.declareLong(CategoryDefinition::setCategoryId, CATEGORY_ID); PARSER.declareString(CategoryDefinition::setTerms, TERMS); PARSER.declareString(CategoryDefinition::setRegex, REGEX); @@ -130,7 +131,7 @@ void setGrokPattern(String grokPattern) { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(Result.JOB_ID.getPreferredName(), jobId); + builder.field(Job.ID.getPreferredName(), jobId); builder.field(CATEGORY_ID.getPreferredName(), categoryId); builder.field(TERMS.getPreferredName(), terms); builder.field(REGEX.getPreferredName(), regex); diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Influencer.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Influencer.java index ce3a032e54beb..51c88883608b0 100644 --- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Influencer.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Influencer.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser.Token; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; import java.io.IOException; import java.time.format.DateTimeFormatter; @@ -57,7 +58,7 @@ public class Influencer implements ToXContentObject { a -> new Influencer((String) a[0], (String) a[1], (String) a[2], (Date) a[3], (long) a[4])); static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Result.JOB_ID); + PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_NAME); PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_VALUE); PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> { @@ -98,7 +99,7 @@ public class Influencer implements ToXContentObject { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(Result.JOB_ID.getPreferredName(), jobId); + builder.field(Job.ID.getPreferredName(), jobId); builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); builder.field(INFLUENCER_FIELD_NAME.getPreferredName(), influenceField); builder.field(INFLUENCER_FIELD_VALUE.getPreferredName(), influenceValue); diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/OverallBucket.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/OverallBucket.java index 217f0bf5e21bc..4f13b4b26646e 100644 --- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/OverallBucket.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/OverallBucket.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; import java.io.IOException; import java.time.format.DateTimeFormatter; @@ -158,7 +159,7 @@ public static class JobInfo implements ToXContentObject, Comparable { new ConstructingObjectParser<>("job_info", true, a -> new JobInfo((String) a[0], (double) a[1])); static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Result.JOB_ID); + PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); PARSER.declareDouble(ConstructingObjectParser.constructorArg(), MAX_ANOMALY_SCORE); } @@ -181,7 +182,7 @@ public double getMaxAnomalyScore() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(Result.JOB_ID.getPreferredName(), jobId); + builder.field(Job.ID.getPreferredName(), jobId); builder.field(MAX_ANOMALY_SCORE.getPreferredName(), maxAnomalyScore); builder.endObject(); return builder; diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Result.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Result.java index 0cd8a09da9521..cce5fa65ebb44 100644 --- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Result.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/results/Result.java @@ -28,7 +28,6 @@ public final class Result { /** * Serialisation fields */ - public static final ParseField JOB_ID = new ParseField("job_id"); public static final ParseField TYPE = new ParseField("result"); public static final ParseField RESULT_TYPE = new ParseField("result_type"); public static final ParseField TIMESTAMP = new ParseField("timestamp"); diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/util/TimeUtil.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/util/TimeUtil.java new file mode 100644 index 0000000000000..549b196949145 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/util/TimeUtil.java @@ -0,0 +1,48 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml.job.util; + +import org.elasticsearch.common.time.DateFormatters; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.time.format.DateTimeFormatter; +import java.util.Date; + +public final class TimeUtil { + + /** + * Parse out a Date object given the current parser and field name. + * + * @param parser current XContentParser + * @param fieldName the field's preferred name (utilized in exception) + * @return parsed Date object + * @throws IOException from XContentParser + */ + public static Date parseTimeField(XContentParser parser, String fieldName) throws IOException { + if (parser.currentToken() == XContentParser.Token.VALUE_NUMBER) { + return new Date(parser.longValue()); + } else if (parser.currentToken() == XContentParser.Token.VALUE_STRING) { + return new Date(DateFormatters.toZonedDateTime(DateTimeFormatter.ISO_INSTANT.parse(parser.text())).toInstant().toEpochMilli()); + } + throw new IllegalArgumentException( + "unexpected token [" + parser.currentToken() + "] for [" + fieldName + "]"); + } + +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/license/DeleteLicenseResponseTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/license/DeleteLicenseResponseTests.java new file mode 100644 index 0000000000000..f4caa1f42421d --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/license/DeleteLicenseResponseTests.java @@ -0,0 +1,50 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.license; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; + +public class DeleteLicenseResponseTests extends AbstractStreamableXContentTestCase { + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected DeleteLicenseResponse createTestInstance() { + return new DeleteLicenseResponse(randomBoolean()); + } + + @Override + protected DeleteLicenseResponse doParseInstance(XContentParser parser) { + return DeleteLicenseResponse.fromXContent(parser); + } + + @Override + protected DeleteLicenseResponse createBlankInstance() { + return new DeleteLicenseResponse(); + } + + @Override + protected DeleteLicenseResponse mutateInstance(DeleteLicenseResponse response) { + return new DeleteLicenseResponse(!response.isAcknowledged()); + } +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoRequestTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoRequestTests.java new file mode 100644 index 0000000000000..7d770eec08609 --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoRequestTests.java @@ -0,0 +1,50 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.protocol.xpack.migration; + +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.test.AbstractStreamableTestCase; + +public class IndexUpgradeInfoRequestTests extends AbstractStreamableTestCase { + @Override + protected IndexUpgradeInfoRequest createTestInstance() { + int indexCount = randomInt(4); + String[] indices = new String[indexCount]; + for (int i = 0; i < indexCount; i++) { + indices[i] = randomAlphaOfLength(10); + } + IndexUpgradeInfoRequest request = new IndexUpgradeInfoRequest(indices); + if (randomBoolean()) { + request.indicesOptions(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean())); + } + return request; + } + + + public void testNullIndices() { + expectThrows(NullPointerException.class, () -> new IndexUpgradeInfoRequest((String[])null)); + expectThrows(NullPointerException.class, () -> new IndexUpgradeInfoRequest().indices((String[])null)); + } + + @Override + protected IndexUpgradeInfoRequest createBlankInstance() { + return new IndexUpgradeInfoRequest(); + } +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoResponseTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoResponseTests.java new file mode 100644 index 0000000000000..42de1ae60908a --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoResponseTests.java @@ -0,0 +1,68 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.protocol.xpack.migration; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; + +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; + +public class IndexUpgradeInfoResponseTests extends AbstractStreamableXContentTestCase { + @Override + protected IndexUpgradeInfoResponse doParseInstance(XContentParser parser) { + return IndexUpgradeInfoResponse.fromXContent(parser); + } + + @Override + protected IndexUpgradeInfoResponse createBlankInstance() { + return new IndexUpgradeInfoResponse(); + } + + @Override + protected IndexUpgradeInfoResponse createTestInstance() { + return randomIndexUpgradeInfoResponse(randomIntBetween(0, 10)); + } + + private static IndexUpgradeInfoResponse randomIndexUpgradeInfoResponse(int numIndices) { + Map actions = new HashMap<>(); + for (int i = 0; i < numIndices; i++) { + actions.put(randomAlphaOfLength(5), randomFrom(UpgradeActionRequired.values())); + } + return new IndexUpgradeInfoResponse(actions); + } + + @Override + protected IndexUpgradeInfoResponse mutateInstance(IndexUpgradeInfoResponse instance) { + if (instance.getActions().size() == 0) { + return randomIndexUpgradeInfoResponse(1); + } + Map actions = new HashMap<>(instance.getActions()); + if (randomBoolean()) { + Iterator> iterator = actions.entrySet().iterator(); + iterator.next(); + iterator.remove(); + } else { + actions.put(randomAlphaOfLength(5), randomFrom(UpgradeActionRequired.values())); + } + return new IndexUpgradeInfoResponse(actions); + } +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/PutJobRequestTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/PutJobRequestTests.java new file mode 100644 index 0000000000000..448c40a4d2fa1 --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/PutJobRequestTests.java @@ -0,0 +1,44 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.ml.job.config.Job; +import org.elasticsearch.protocol.xpack.ml.job.config.JobTests; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; + +public class PutJobRequestTests extends AbstractXContentTestCase { + + @Override + protected PutJobRequest createTestInstance() { + return new PutJobRequest(JobTests.createRandomizedJob()); + } + + @Override + protected PutJobRequest doParseInstance(XContentParser parser) throws IOException { + return new PutJobRequest(Job.PARSER.apply(parser, null).build()); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/PutJobResponseTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/PutJobResponseTests.java new file mode 100644 index 0000000000000..ed91e33635b29 --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/PutJobResponseTests.java @@ -0,0 +1,43 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.ml.job.config.JobTests; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; + +public class PutJobResponseTests extends AbstractXContentTestCase { + + @Override + protected PutJobResponse createTestInstance() { + return new PutJobResponse(JobTests.createRandomizedJob()); + } + + @Override + protected PutJobResponse doParseInstance(XContentParser parser) throws IOException { + return PutJobResponse.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/datafeed/ChunkingConfigTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/datafeed/ChunkingConfigTests.java new file mode 100644 index 0000000000000..c835788bb1c98 --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/datafeed/ChunkingConfigTests.java @@ -0,0 +1,59 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml.datafeed; + +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.util.Arrays; +import java.util.concurrent.TimeUnit; + +public class ChunkingConfigTests extends AbstractXContentTestCase { + + @Override + protected ChunkingConfig createTestInstance() { + return createRandomizedChunk(); + } + + @Override + protected ChunkingConfig doParseInstance(XContentParser parser) { + return ChunkingConfig.PARSER.apply(parser, null); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + public static ChunkingConfig createRandomizedChunk() { + ChunkingConfig.Mode mode = randomFrom(ChunkingConfig.Mode.values()); + TimeValue timeSpan = null; + if (mode == ChunkingConfig.Mode.MANUAL) { + // time span is required to be at least 1 millis, so we use a custom method to generate a time value here + timeSpan = randomPositiveSecondsMinutesHours(); + } + return new ChunkingConfig(mode, timeSpan); + } + + private static TimeValue randomPositiveSecondsMinutesHours() { + return new TimeValue(randomIntBetween(1, 1000), randomFrom(Arrays.asList(TimeUnit.SECONDS, TimeUnit.MINUTES, TimeUnit.HOURS))); + } + +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/datafeed/DatafeedConfigTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/datafeed/DatafeedConfigTests.java new file mode 100644 index 0000000000000..f45d88d318e01 --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/datafeed/DatafeedConfigTests.java @@ -0,0 +1,177 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml.datafeed; + +import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.DeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.SearchModule; +import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; +import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +public class DatafeedConfigTests extends AbstractXContentTestCase { + + @Override + protected DatafeedConfig createTestInstance() { + long bucketSpanMillis = 3600000; + DatafeedConfig.Builder builder = constructBuilder(); + builder.setIndices(randomStringList(1, 10)); + builder.setTypes(randomStringList(0, 10)); + if (randomBoolean()) { + builder.setQuery(QueryBuilders.termQuery(randomAlphaOfLength(10), randomAlphaOfLength(10))); + } + boolean addScriptFields = randomBoolean(); + if (addScriptFields) { + int scriptsSize = randomInt(3); + List scriptFields = new ArrayList<>(scriptsSize); + for (int scriptIndex = 0; scriptIndex < scriptsSize; scriptIndex++) { + scriptFields.add(new ScriptField(randomAlphaOfLength(10), mockScript(randomAlphaOfLength(10)), + randomBoolean())); + } + builder.setScriptFields(scriptFields); + } + Long aggHistogramInterval = null; + if (randomBoolean()) { + // can only test with a single agg as the xcontent order gets randomized by test base class and then + // the actual xcontent isn't the same and test fail. + // Testing with a single agg is ok as we don't have special list xcontent logic + AggregatorFactories.Builder aggs = new AggregatorFactories.Builder(); + aggHistogramInterval = randomNonNegativeLong(); + aggHistogramInterval = aggHistogramInterval > bucketSpanMillis ? bucketSpanMillis : aggHistogramInterval; + aggHistogramInterval = aggHistogramInterval <= 0 ? 1 : aggHistogramInterval; + MaxAggregationBuilder maxTime = AggregationBuilders.max("time").field("time"); + aggs.addAggregator(AggregationBuilders.dateHistogram("buckets") + .interval(aggHistogramInterval).subAggregation(maxTime).field("time")); + builder.setAggregations(aggs); + } + if (randomBoolean()) { + builder.setScrollSize(randomIntBetween(0, Integer.MAX_VALUE)); + } + if (randomBoolean()) { + if (aggHistogramInterval == null) { + builder.setFrequency(TimeValue.timeValueSeconds(randomIntBetween(1, 1_000_000))); + } else { + builder.setFrequency(TimeValue.timeValueMillis(randomIntBetween(1, 5) * aggHistogramInterval)); + } + } + if (randomBoolean()) { + builder.setQueryDelay(TimeValue.timeValueMillis(randomIntBetween(1, 1_000_000))); + } + if (randomBoolean()) { + builder.setChunkingConfig(ChunkingConfigTests.createRandomizedChunk()); + } + return builder.build(); + } + + @Override + protected NamedXContentRegistry xContentRegistry() { + SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); + return new NamedXContentRegistry(searchModule.getNamedXContents()); + } + + public static List randomStringList(int min, int max) { + int size = scaledRandomIntBetween(min, max); + List list = new ArrayList<>(); + for (int i = 0; i < size; i++) { + list.add(randomAlphaOfLength(10)); + } + return list; + } + + @Override + protected DatafeedConfig doParseInstance(XContentParser parser) { + return DatafeedConfig.PARSER.apply(parser, null).build(); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } + + private static final String FUTURE_DATAFEED = "{\n" + + " \"datafeed_id\": \"farequote-datafeed\",\n" + + " \"job_id\": \"farequote\",\n" + + " \"frequency\": \"1h\",\n" + + " \"indices\": [\"farequote1\", \"farequote2\"],\n" + + " \"tomorrows_technology_today\": \"amazing\",\n" + + " \"scroll_size\": 1234\n" + + "}"; + + public void testFutureMetadataParse() throws IOException { + XContentParser parser = XContentFactory.xContent(XContentType.JSON) + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, FUTURE_DATAFEED); + // Unlike the config version of this test, the metadata parser should tolerate the unknown future field + assertNotNull(DatafeedConfig.PARSER.apply(parser, null).build()); + } + + public void testCopyConstructor() { + for (int i = 0; i < NUMBER_OF_TEST_RUNS; i++) { + DatafeedConfig datafeedConfig = createTestInstance(); + DatafeedConfig copy = new DatafeedConfig.Builder(datafeedConfig).build(); + assertEquals(datafeedConfig, copy); + } + } + + public void testCheckValid_GivenNullIdInConstruction() { + expectThrows(NullPointerException.class, () -> new DatafeedConfig.Builder(null, null)); + } + + public void testCheckValid_GivenNullJobId() { + expectThrows(NullPointerException.class, () -> new DatafeedConfig.Builder(randomValidDatafeedId(), null)); + } + + public void testCheckValid_GivenNullIndices() { + DatafeedConfig.Builder conf = constructBuilder(); + expectThrows(NullPointerException.class, () -> conf.setIndices(null)); + } + + public void testCheckValid_GivenNullType() { + DatafeedConfig.Builder conf = constructBuilder(); + expectThrows(NullPointerException.class, () -> conf.setTypes(null)); + } + + public void testCheckValid_GivenNullQuery() { + DatafeedConfig.Builder conf = constructBuilder(); + expectThrows(NullPointerException.class, () -> conf.setQuery(null)); + } + + public static String randomValidDatafeedId() { + CodepointSetGenerator generator = new CodepointSetGenerator("abcdefghijklmnopqrstuvwxyz".toCharArray()); + return generator.ofCodePointsLength(random(), 10, 10); + } + + private static DatafeedConfig.Builder constructBuilder() { + return new DatafeedConfig.Builder(randomValidDatafeedId(), randomAlphaOfLength(10)); + } + +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/datafeed/DatafeedUpdateTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/datafeed/DatafeedUpdateTests.java new file mode 100644 index 0000000000000..edbef8461e053 --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/datafeed/DatafeedUpdateTests.java @@ -0,0 +1,101 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml.datafeed; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.SearchModule; +import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +public class DatafeedUpdateTests extends AbstractXContentTestCase { + + @Override + protected DatafeedUpdate createTestInstance() { + DatafeedUpdate.Builder builder = new DatafeedUpdate.Builder(DatafeedConfigTests.randomValidDatafeedId()); + if (randomBoolean()) { + builder.setJobId(randomAlphaOfLength(10)); + } + if (randomBoolean()) { + builder.setQueryDelay(TimeValue.timeValueMillis(randomIntBetween(1, Integer.MAX_VALUE))); + } + if (randomBoolean()) { + builder.setFrequency(TimeValue.timeValueSeconds(randomIntBetween(1, Integer.MAX_VALUE))); + } + if (randomBoolean()) { + builder.setIndices(DatafeedConfigTests.randomStringList(1, 10)); + } + if (randomBoolean()) { + builder.setTypes(DatafeedConfigTests.randomStringList(1, 10)); + } + if (randomBoolean()) { + builder.setQuery(QueryBuilders.termQuery(randomAlphaOfLength(10), randomAlphaOfLength(10))); + } + if (randomBoolean()) { + int scriptsSize = randomInt(3); + List scriptFields = new ArrayList<>(scriptsSize); + for (int scriptIndex = 0; scriptIndex < scriptsSize; scriptIndex++) { + scriptFields.add(new SearchSourceBuilder.ScriptField(randomAlphaOfLength(10), mockScript(randomAlphaOfLength(10)), + randomBoolean())); + } + builder.setScriptFields(scriptFields); + } + if (randomBoolean()) { + // can only test with a single agg as the xcontent order gets randomized by test base class and then + // the actual xcontent isn't the same and test fail. + // Testing with a single agg is ok as we don't have special list xcontent logic + AggregatorFactories.Builder aggs = new AggregatorFactories.Builder(); + aggs.addAggregator(AggregationBuilders.avg(randomAlphaOfLength(10)).field(randomAlphaOfLength(10))); + builder.setAggregations(aggs); + } + if (randomBoolean()) { + builder.setScrollSize(randomIntBetween(0, Integer.MAX_VALUE)); + } + if (randomBoolean()) { + builder.setChunkingConfig(ChunkingConfigTests.createRandomizedChunk()); + } + return builder.build(); + } + + @Override + protected DatafeedUpdate doParseInstance(XContentParser parser) { + return DatafeedUpdate.PARSER.apply(parser, null).build(); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } + + @Override + protected NamedXContentRegistry xContentRegistry() { + SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); + return new NamedXContentRegistry(searchModule.getNamedXContents()); + } + +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/config/AnalysisConfigTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/config/AnalysisConfigTests.java new file mode 100644 index 0000000000000..34f12fc067e75 --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/config/AnalysisConfigTests.java @@ -0,0 +1,268 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml.job.config; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.SearchModule; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class AnalysisConfigTests extends AbstractXContentTestCase { + + public static AnalysisConfig.Builder createRandomized() { + boolean isCategorization = randomBoolean(); + List detectors = new ArrayList<>(); + int numDetectors = randomIntBetween(1, 10); + for (int i = 0; i < numDetectors; i++) { + Detector.Builder builder = new Detector.Builder("count", null); + builder.setPartitionFieldName(isCategorization ? "mlcategory" : "part"); + detectors.add(builder.build()); + } + AnalysisConfig.Builder builder = new AnalysisConfig.Builder(detectors); + if (randomBoolean()) { + TimeValue bucketSpan = TimeValue.timeValueSeconds(randomIntBetween(1, 1_000_000)); + builder.setBucketSpan(bucketSpan); + } + if (isCategorization) { + builder.setCategorizationFieldName(randomAlphaOfLength(10)); + if (randomBoolean()) { + builder.setCategorizationFilters(Arrays.asList(generateRandomStringArray(10, 10, false))); + } else { + CategorizationAnalyzerConfig.Builder analyzerBuilder = new CategorizationAnalyzerConfig.Builder(); + if (rarely()) { + analyzerBuilder.setAnalyzer(randomAlphaOfLength(10)); + } else { + if (randomBoolean()) { + for (String pattern : generateRandomStringArray(3, 40, false)) { + Map charFilter = new HashMap<>(); + charFilter.put("type", "pattern_replace"); + charFilter.put("pattern", pattern); + analyzerBuilder.addCharFilter(charFilter); + } + } + + Map tokenizer = new HashMap<>(); + tokenizer.put("type", "pattern"); + tokenizer.put("pattern", randomAlphaOfLength(10)); + analyzerBuilder.setTokenizer(tokenizer); + + if (randomBoolean()) { + for (String pattern : generateRandomStringArray(4, 40, false)) { + Map tokenFilter = new HashMap<>(); + tokenFilter.put("type", "pattern_replace"); + tokenFilter.put("pattern", pattern); + analyzerBuilder.addTokenFilter(tokenFilter); + } + } + } + builder.setCategorizationAnalyzerConfig(analyzerBuilder.build()); + } + } + if (randomBoolean()) { + builder.setLatency(TimeValue.timeValueSeconds(randomIntBetween(1, 1_000_000))); + } + if (randomBoolean()) { + builder.setMultivariateByFields(randomBoolean()); + } + if (randomBoolean()) { + builder.setOverlappingBuckets(randomBoolean()); + } + if (randomBoolean()) { + builder.setResultFinalizationWindow(randomNonNegativeLong()); + } + + builder.setInfluencers(Arrays.asList(generateRandomStringArray(10, 10, false))); + return builder; + } + + @Override + protected AnalysisConfig createTestInstance() { + return createRandomized().build(); + } + + @Override + protected AnalysisConfig doParseInstance(XContentParser parser) { + return AnalysisConfig.PARSER.apply(parser, null).build(); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } + + public void testBuilder_WithNullDetectors() { + AnalysisConfig.Builder builder = new AnalysisConfig.Builder(new ArrayList<>()); + NullPointerException ex = expectThrows(NullPointerException.class, () -> builder.setDetectors(null)); + assertEquals("[detectors] must not be null", ex.getMessage()); + } + + public void testEquals_GivenSameReference() { + AnalysisConfig config = createRandomized().build(); + assertTrue(config.equals(config)); + } + + public void testEquals_GivenDifferentClass() { + assertFalse(createRandomized().build().equals("a string")); + } + + public void testEquals_GivenNull() { + assertFalse(createRandomized().build().equals(null)); + } + + public void testEquals_GivenEqualConfig() { + AnalysisConfig config1 = createValidCategorizationConfig().build(); + AnalysisConfig config2 = createValidCategorizationConfig().build(); + + assertTrue(config1.equals(config2)); + assertTrue(config2.equals(config1)); + assertEquals(config1.hashCode(), config2.hashCode()); + } + + public void testEquals_GivenDifferentBucketSpan() { + AnalysisConfig.Builder builder = createConfigBuilder(); + builder.setBucketSpan(TimeValue.timeValueSeconds(1800)); + AnalysisConfig config1 = builder.build(); + + builder = createConfigBuilder(); + builder.setBucketSpan(TimeValue.timeValueHours(1)); + AnalysisConfig config2 = builder.build(); + + assertFalse(config1.equals(config2)); + assertFalse(config2.equals(config1)); + } + + public void testEquals_GivenCategorizationField() { + AnalysisConfig.Builder builder = createValidCategorizationConfig(); + builder.setCategorizationFieldName("foo"); + AnalysisConfig config1 = builder.build(); + + builder = createValidCategorizationConfig(); + builder.setCategorizationFieldName("bar"); + AnalysisConfig config2 = builder.build(); + + assertFalse(config1.equals(config2)); + assertFalse(config2.equals(config1)); + } + + public void testEquals_GivenDifferentDetector() { + AnalysisConfig config1 = createConfigWithDetectors(Collections.singletonList(new Detector.Builder("min", "low_count").build())); + + AnalysisConfig config2 = createConfigWithDetectors(Collections.singletonList(new Detector.Builder("min", "high_count").build())); + + assertFalse(config1.equals(config2)); + assertFalse(config2.equals(config1)); + } + + public void testEquals_GivenDifferentInfluencers() { + AnalysisConfig.Builder builder = createConfigBuilder(); + builder.setInfluencers(Collections.singletonList("foo")); + AnalysisConfig config1 = builder.build(); + + builder = createConfigBuilder(); + builder.setInfluencers(Collections.singletonList("bar")); + AnalysisConfig config2 = builder.build(); + + assertFalse(config1.equals(config2)); + assertFalse(config2.equals(config1)); + } + + public void testEquals_GivenDifferentLatency() { + AnalysisConfig.Builder builder = createConfigBuilder(); + builder.setLatency(TimeValue.timeValueSeconds(1800)); + AnalysisConfig config1 = builder.build(); + + builder = createConfigBuilder(); + builder.setLatency(TimeValue.timeValueSeconds(1801)); + AnalysisConfig config2 = builder.build(); + + assertFalse(config1.equals(config2)); + assertFalse(config2.equals(config1)); + } + + public void testEquals_GivenSummaryCountField() { + AnalysisConfig.Builder builder = createConfigBuilder(); + builder.setSummaryCountFieldName("foo"); + AnalysisConfig config1 = builder.build(); + + builder = createConfigBuilder(); + builder.setSummaryCountFieldName("bar"); + AnalysisConfig config2 = builder.build(); + + assertFalse(config1.equals(config2)); + assertFalse(config2.equals(config1)); + } + + public void testEquals_GivenMultivariateByField() { + AnalysisConfig.Builder builder = createConfigBuilder(); + builder.setMultivariateByFields(true); + AnalysisConfig config1 = builder.build(); + + builder = createConfigBuilder(); + builder.setMultivariateByFields(false); + AnalysisConfig config2 = builder.build(); + + assertFalse(config1.equals(config2)); + assertFalse(config2.equals(config1)); + } + + public void testEquals_GivenDifferentCategorizationFilters() { + AnalysisConfig.Builder configBuilder1 = createValidCategorizationConfig(); + AnalysisConfig.Builder configBuilder2 = createValidCategorizationConfig(); + configBuilder1.setCategorizationFilters(Arrays.asList("foo", "bar")); + configBuilder2.setCategorizationFilters(Arrays.asList("foo", "foobar")); + AnalysisConfig config1 = configBuilder1.build(); + AnalysisConfig config2 = configBuilder2.build(); + + assertFalse(config1.equals(config2)); + assertFalse(config2.equals(config1)); + } + + private static AnalysisConfig createConfigWithDetectors(List detectors) { + return new AnalysisConfig.Builder(detectors).build(); + } + + private static AnalysisConfig.Builder createConfigBuilder() { + return new AnalysisConfig.Builder(Collections.singletonList(new Detector.Builder("min", "count").build())); + } + + private static AnalysisConfig.Builder createValidCategorizationConfig() { + Detector.Builder detector = new Detector.Builder("count", null); + detector.setByFieldName("mlcategory"); + AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(detector.build())); + analysisConfig.setBucketSpan(TimeValue.timeValueHours(1)); + analysisConfig.setLatency(TimeValue.ZERO); + analysisConfig.setCategorizationFieldName("msg"); + return analysisConfig; + } + + @Override + protected NamedXContentRegistry xContentRegistry() { + SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); + return new NamedXContentRegistry(searchModule.getNamedXContents()); + } +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/config/JobTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/config/JobTests.java new file mode 100644 index 0000000000000..7ba4946efa753 --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/config/JobTests.java @@ -0,0 +1,276 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml.job.config; + +import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.DeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.search.SearchModule; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class JobTests extends AbstractXContentTestCase { + + private static final String FUTURE_JOB = "{\n" + + " \"job_id\": \"farequote\",\n" + + " \"create_time\": 1234567890000,\n" + + " \"tomorrows_technology_today\": \"wow\",\n" + + " \"analysis_config\": {\n" + + " \"bucket_span\": \"1h\",\n" + + " \"something_new\": \"gasp\",\n" + + " \"detectors\": [{\"function\": \"metric\", \"field_name\": \"responsetime\", \"by_field_name\": \"airline\"}]\n" + + " },\n" + + " \"data_description\": {\n" + + " \"time_field\": \"time\",\n" + + " \"the_future\": 123\n" + + " }\n" + + "}"; + + @Override + protected Job createTestInstance() { + return createRandomizedJob(); + } + + @Override + protected Job doParseInstance(XContentParser parser) { + return Job.PARSER.apply(parser, null).build(); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } + + public void testFutureMetadataParse() throws IOException { + XContentParser parser = XContentFactory.xContent(XContentType.JSON) + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, FUTURE_JOB); + // The parser should tolerate unknown fields + assertNotNull(Job.PARSER.apply(parser, null).build()); + } + + public void testEquals_GivenDifferentClass() { + Job job = buildJobBuilder("foo").build(); + assertFalse(job.equals("a string")); + } + + public void testEquals_GivenDifferentIds() { + Date createTime = new Date(); + Job.Builder builder = buildJobBuilder("foo"); + builder.setCreateTime(createTime); + Job job1 = builder.build(); + builder.setId("bar"); + Job job2 = builder.build(); + assertFalse(job1.equals(job2)); + } + + public void testEquals_GivenDifferentRenormalizationWindowDays() { + Date date = new Date(); + Job.Builder jobDetails1 = new Job.Builder("foo"); + jobDetails1.setDataDescription(new DataDescription.Builder()); + jobDetails1.setAnalysisConfig(createAnalysisConfig()); + jobDetails1.setRenormalizationWindowDays(3L); + jobDetails1.setCreateTime(date); + Job.Builder jobDetails2 = new Job.Builder("foo"); + jobDetails2.setDataDescription(new DataDescription.Builder()); + jobDetails2.setRenormalizationWindowDays(4L); + jobDetails2.setAnalysisConfig(createAnalysisConfig()); + jobDetails2.setCreateTime(date); + assertFalse(jobDetails1.build().equals(jobDetails2.build())); + } + + public void testEquals_GivenDifferentBackgroundPersistInterval() { + Date date = new Date(); + Job.Builder jobDetails1 = new Job.Builder("foo"); + jobDetails1.setDataDescription(new DataDescription.Builder()); + jobDetails1.setAnalysisConfig(createAnalysisConfig()); + jobDetails1.setBackgroundPersistInterval(TimeValue.timeValueSeconds(10000L)); + jobDetails1.setCreateTime(date); + Job.Builder jobDetails2 = new Job.Builder("foo"); + jobDetails2.setDataDescription(new DataDescription.Builder()); + jobDetails2.setBackgroundPersistInterval(TimeValue.timeValueSeconds(8000L)); + jobDetails2.setAnalysisConfig(createAnalysisConfig()); + jobDetails2.setCreateTime(date); + assertFalse(jobDetails1.build().equals(jobDetails2.build())); + } + + public void testEquals_GivenDifferentModelSnapshotRetentionDays() { + Date date = new Date(); + Job.Builder jobDetails1 = new Job.Builder("foo"); + jobDetails1.setDataDescription(new DataDescription.Builder()); + jobDetails1.setAnalysisConfig(createAnalysisConfig()); + jobDetails1.setModelSnapshotRetentionDays(10L); + jobDetails1.setCreateTime(date); + Job.Builder jobDetails2 = new Job.Builder("foo"); + jobDetails2.setDataDescription(new DataDescription.Builder()); + jobDetails2.setModelSnapshotRetentionDays(8L); + jobDetails2.setAnalysisConfig(createAnalysisConfig()); + jobDetails2.setCreateTime(date); + assertFalse(jobDetails1.build().equals(jobDetails2.build())); + } + + public void testEquals_GivenDifferentResultsRetentionDays() { + Date date = new Date(); + Job.Builder jobDetails1 = new Job.Builder("foo"); + jobDetails1.setDataDescription(new DataDescription.Builder()); + jobDetails1.setAnalysisConfig(createAnalysisConfig()); + jobDetails1.setCreateTime(date); + jobDetails1.setResultsRetentionDays(30L); + Job.Builder jobDetails2 = new Job.Builder("foo"); + jobDetails2.setDataDescription(new DataDescription.Builder()); + jobDetails2.setResultsRetentionDays(4L); + jobDetails2.setAnalysisConfig(createAnalysisConfig()); + jobDetails2.setCreateTime(date); + assertFalse(jobDetails1.build().equals(jobDetails2.build())); + } + + public void testEquals_GivenDifferentCustomSettings() { + Job.Builder jobDetails1 = buildJobBuilder("foo"); + Map customSettings1 = new HashMap<>(); + customSettings1.put("key1", "value1"); + jobDetails1.setCustomSettings(customSettings1); + Job.Builder jobDetails2 = buildJobBuilder("foo"); + Map customSettings2 = new HashMap<>(); + customSettings2.put("key2", "value2"); + jobDetails2.setCustomSettings(customSettings2); + assertFalse(jobDetails1.build().equals(jobDetails2.build())); + } + + public void testCopyConstructor() { + for (int i = 0; i < NUMBER_OF_TEST_RUNS; i++) { + Job job = createTestInstance(); + Job copy = new Job.Builder(job).build(); + assertEquals(job, copy); + } + } + + public void testBuilder_WithNullID() { + Job.Builder builder = new Job.Builder("anything").setId(null); + NullPointerException ex = expectThrows(NullPointerException.class, builder::build); + assertEquals("[job_id] must not be null", ex.getMessage()); + } + + public void testBuilder_WithNullJobType() { + Job.Builder builder = new Job.Builder("anything").setJobType(null); + NullPointerException ex = expectThrows(NullPointerException.class, builder::build); + assertEquals("[job_type] must not be null", ex.getMessage()); + } + + public static Job.Builder buildJobBuilder(String id, Date date) { + Job.Builder builder = new Job.Builder(id); + builder.setCreateTime(date); + AnalysisConfig.Builder ac = createAnalysisConfig(); + DataDescription.Builder dc = new DataDescription.Builder(); + builder.setAnalysisConfig(ac); + builder.setDataDescription(dc); + return builder; + } + + public static Job.Builder buildJobBuilder(String id) { + return buildJobBuilder(id, new Date()); + } + + public static String randomValidJobId() { + CodepointSetGenerator generator = new CodepointSetGenerator("abcdefghijklmnopqrstuvwxyz".toCharArray()); + return generator.ofCodePointsLength(random(), 10, 10); + } + + public static AnalysisConfig.Builder createAnalysisConfig() { + Detector.Builder d1 = new Detector.Builder("info_content", "domain"); + d1.setOverFieldName("client"); + Detector.Builder d2 = new Detector.Builder("min", "field"); + return new AnalysisConfig.Builder(Arrays.asList(d1.build(), d2.build())); + } + + public static Job createRandomizedJob() { + String jobId = randomValidJobId(); + Job.Builder builder = new Job.Builder(jobId); + if (randomBoolean()) { + builder.setDescription(randomAlphaOfLength(10)); + } + if (randomBoolean()) { + int groupsNum = randomIntBetween(0, 10); + List groups = new ArrayList<>(groupsNum); + for (int i = 0; i < groupsNum; i++) { + groups.add(randomValidJobId()); + } + builder.setGroups(groups); + } + builder.setCreateTime(new Date(randomNonNegativeLong())); + if (randomBoolean()) { + builder.setFinishedTime(new Date(randomNonNegativeLong())); + } + if (randomBoolean()) { + builder.setLastDataTime(new Date(randomNonNegativeLong())); + } + if (randomBoolean()) { + builder.setEstablishedModelMemory(randomNonNegativeLong()); + } + builder.setAnalysisConfig(AnalysisConfigTests.createRandomized()); + builder.setAnalysisLimits(AnalysisLimitsTests.createRandomized()); + + DataDescription.Builder dataDescription = new DataDescription.Builder(); + dataDescription.setFormat(randomFrom(DataDescription.DataFormat.values())); + builder.setDataDescription(dataDescription); + + if (randomBoolean()) { + builder.setModelPlotConfig(new ModelPlotConfig(randomBoolean(), randomAlphaOfLength(10))); + } + if (randomBoolean()) { + builder.setRenormalizationWindowDays(randomNonNegativeLong()); + } + if (randomBoolean()) { + builder.setBackgroundPersistInterval(TimeValue.timeValueHours(randomIntBetween(1, 24))); + } + if (randomBoolean()) { + builder.setModelSnapshotRetentionDays(randomNonNegativeLong()); + } + if (randomBoolean()) { + builder.setResultsRetentionDays(randomNonNegativeLong()); + } + if (randomBoolean()) { + builder.setCustomSettings(Collections.singletonMap(randomAlphaOfLength(10), randomAlphaOfLength(10))); + } + if (randomBoolean()) { + builder.setModelSnapshotId(randomAlphaOfLength(10)); + } + if (randomBoolean()) { + builder.setResultsIndexName(randomValidJobId()); + } + return builder.build(); + } + + @Override + protected NamedXContentRegistry xContentRegistry() { + SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList()); + return new NamedXContentRegistry(searchModule.getNamedXContents()); + } +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/process/DataCountsTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/process/DataCountsTests.java new file mode 100644 index 0000000000000..2232e8c88d924 --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/process/DataCountsTests.java @@ -0,0 +1,130 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml.job.process; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; +import org.joda.time.DateTime; + +import java.util.Date; + +import static org.hamcrest.Matchers.greaterThan; + +public class DataCountsTests extends AbstractXContentTestCase { + + public static DataCounts createTestInstance(String jobId) { + return new DataCounts(jobId, randomIntBetween(1, 1_000_000), + randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000), + randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000), + randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000), + new DateTime(randomDateTimeZone()).toDate(), new DateTime(randomDateTimeZone()).toDate(), + new DateTime(randomDateTimeZone()).toDate(), new DateTime(randomDateTimeZone()).toDate(), + new DateTime(randomDateTimeZone()).toDate()); + } + + @Override + public DataCounts createTestInstance() { + return createTestInstance(randomAlphaOfLength(10)); + } + + @Override + protected DataCounts doParseInstance(XContentParser parser) { + return DataCounts.PARSER.apply(parser, null); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + public void testCountsEquals_GivenEqualCounts() { + DataCounts counts1 = createCounts(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15); + DataCounts counts2 = createCounts(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15); + + assertTrue(counts1.equals(counts2)); + assertTrue(counts2.equals(counts1)); + } + + public void testCountsHashCode_GivenEqualCounts() { + DataCounts counts1 = createCounts(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15); + DataCounts counts2 = createCounts(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15); + assertEquals(counts1.hashCode(), counts2.hashCode()); + } + + public void testCountsCopyConstructor() { + DataCounts counts1 = createCounts(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15); + DataCounts counts2 = new DataCounts(counts1); + + assertEquals(counts1.hashCode(), counts2.hashCode()); + } + + public void testCountCreatedZero() throws Exception { + DataCounts counts = new DataCounts(randomAlphaOfLength(16)); + assertAllFieldsEqualZero(counts); + } + + public void testCountCopyCreatedFieldsNotZero() throws Exception { + DataCounts counts1 = createCounts(1, 200, 400, 3, 4, 5, 6, 7, 8, 9, 1479211200000L, 1479384000000L, 13, 14, 15); + assertAllFieldsGreaterThanZero(counts1); + + DataCounts counts2 = new DataCounts(counts1); + assertAllFieldsGreaterThanZero(counts2); + } + + private void assertAllFieldsEqualZero(DataCounts stats) throws Exception { + assertEquals(0L, stats.getProcessedRecordCount()); + assertEquals(0L, stats.getProcessedFieldCount()); + assertEquals(0L, stats.getInputBytes()); + assertEquals(0L, stats.getInputFieldCount()); + assertEquals(0L, stats.getInputRecordCount()); + assertEquals(0L, stats.getInvalidDateCount()); + assertEquals(0L, stats.getMissingFieldCount()); + assertEquals(0L, stats.getOutOfOrderTimeStampCount()); + } + + private void assertAllFieldsGreaterThanZero(DataCounts stats) throws Exception { + assertThat(stats.getProcessedRecordCount(), greaterThan(0L)); + assertThat(stats.getProcessedFieldCount(), greaterThan(0L)); + assertThat(stats.getInputBytes(), greaterThan(0L)); + assertThat(stats.getInputFieldCount(), greaterThan(0L)); + assertThat(stats.getInputRecordCount(), greaterThan(0L)); + assertThat(stats.getInputRecordCount(), greaterThan(0L)); + assertThat(stats.getInvalidDateCount(), greaterThan(0L)); + assertThat(stats.getMissingFieldCount(), greaterThan(0L)); + assertThat(stats.getOutOfOrderTimeStampCount(), greaterThan(0L)); + assertThat(stats.getLatestRecordTimeStamp().getTime(), greaterThan(0L)); + } + + private static DataCounts createCounts( + long processedRecordCount, long processedFieldCount, long inputBytes, long inputFieldCount, + long invalidDateCount, long missingFieldCount, long outOfOrderTimeStampCount, + long emptyBucketCount, long sparseBucketCount, long bucketCount, + long earliestRecordTime, long latestRecordTime, long lastDataTimeStamp, long latestEmptyBucketTimeStamp, + long latestSparseBucketTimeStamp) { + + DataCounts counts = new DataCounts("foo", processedRecordCount, processedFieldCount, inputBytes, + inputFieldCount, invalidDateCount, missingFieldCount, outOfOrderTimeStampCount, + emptyBucketCount, sparseBucketCount, bucketCount, + new Date(earliestRecordTime), new Date(latestRecordTime), + new Date(lastDataTimeStamp), new Date(latestEmptyBucketTimeStamp), new Date(latestSparseBucketTimeStamp)); + + return counts; + } + +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSizeStatsTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSizeStatsTests.java new file mode 100644 index 0000000000000..e3341123fb007 --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSizeStatsTests.java @@ -0,0 +1,99 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml.job.process; + +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; +import org.elasticsearch.protocol.xpack.ml.job.process.ModelSizeStats.MemoryStatus; + +import java.util.Date; + +public class ModelSizeStatsTests extends AbstractXContentTestCase { + + public void testDefaultConstructor() { + ModelSizeStats stats = new ModelSizeStats.Builder("foo").build(); + assertEquals(0, stats.getModelBytes()); + assertEquals(0, stats.getTotalByFieldCount()); + assertEquals(0, stats.getTotalOverFieldCount()); + assertEquals(0, stats.getTotalPartitionFieldCount()); + assertEquals(0, stats.getBucketAllocationFailuresCount()); + assertEquals(MemoryStatus.OK, stats.getMemoryStatus()); + } + + public void testSetMemoryStatus_GivenNull() { + ModelSizeStats.Builder stats = new ModelSizeStats.Builder("foo"); + + NullPointerException ex = expectThrows(NullPointerException.class, () -> stats.setMemoryStatus(null)); + + assertEquals("[memory_status] must not be null", ex.getMessage()); + } + + public void testSetMemoryStatus_GivenSoftLimit() { + ModelSizeStats.Builder stats = new ModelSizeStats.Builder("foo"); + + stats.setMemoryStatus(MemoryStatus.SOFT_LIMIT); + + assertEquals(MemoryStatus.SOFT_LIMIT, stats.build().getMemoryStatus()); + } + + @Override + protected ModelSizeStats createTestInstance() { + return createRandomized(); + } + + public static ModelSizeStats createRandomized() { + ModelSizeStats.Builder stats = new ModelSizeStats.Builder("foo"); + if (randomBoolean()) { + stats.setBucketAllocationFailuresCount(randomNonNegativeLong()); + } + if (randomBoolean()) { + stats.setModelBytes(randomNonNegativeLong()); + } + if (randomBoolean()) { + stats.setTotalByFieldCount(randomNonNegativeLong()); + } + if (randomBoolean()) { + stats.setTotalOverFieldCount(randomNonNegativeLong()); + } + if (randomBoolean()) { + stats.setTotalPartitionFieldCount(randomNonNegativeLong()); + } + if (randomBoolean()) { + stats.setLogTime(new Date(TimeValue.parseTimeValue(randomTimeValue(), "test").millis())); + } + if (randomBoolean()) { + stats.setTimestamp(new Date(TimeValue.parseTimeValue(randomTimeValue(), "test").millis())); + } + if (randomBoolean()) { + stats.setMemoryStatus(randomFrom(MemoryStatus.values())); + } + return stats.build(); + } + + @Override + protected ModelSizeStats doParseInstance(XContentParser parser) { + return ModelSizeStats.PARSER.apply(parser, null).build(); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSnapshotTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSnapshotTests.java new file mode 100644 index 0000000000000..95cfbedeab484 --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSnapshotTests.java @@ -0,0 +1,183 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml.job.process; + +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.util.Date; + +public class ModelSnapshotTests extends AbstractXContentTestCase { + + private static final Date DEFAULT_TIMESTAMP = new Date(); + private static final String DEFAULT_DESCRIPTION = "a snapshot"; + private static final String DEFAULT_ID = "my_id"; + private static final int DEFAULT_DOC_COUNT = 7; + private static final Date DEFAULT_LATEST_RESULT_TIMESTAMP = new Date(12345678901234L); + private static final Date DEFAULT_LATEST_RECORD_TIMESTAMP = new Date(12345678904321L); + private static final boolean DEFAULT_RETAIN = true; + + public void testCopyBuilder() { + ModelSnapshot modelSnapshot1 = createFullyPopulated().build(); + ModelSnapshot modelSnapshot2 = new ModelSnapshot.Builder(modelSnapshot1).build(); + assertEquals(modelSnapshot1, modelSnapshot2); + } + + public void testEquals_GivenSameObject() { + ModelSnapshot modelSnapshot = createFullyPopulated().build(); + assertTrue(modelSnapshot.equals(modelSnapshot)); + } + + public void testEquals_GivenObjectOfDifferentClass() { + ModelSnapshot modelSnapshot = createFullyPopulated().build(); + assertFalse(modelSnapshot.equals("a string")); + } + + public void testEquals_GivenEqualModelSnapshots() { + ModelSnapshot modelSnapshot1 = createFullyPopulated().build(); + ModelSnapshot modelSnapshot2 = createFullyPopulated().build(); + + assertEquals(modelSnapshot1, modelSnapshot2); + assertEquals(modelSnapshot2, modelSnapshot1); + assertEquals(modelSnapshot1.hashCode(), modelSnapshot2.hashCode()); + } + + public void testEquals_GivenDifferentTimestamp() { + ModelSnapshot modelSnapshot1 = createFullyPopulated().build(); + ModelSnapshot modelSnapshot2 = createFullyPopulated().setTimestamp( + new Date(modelSnapshot1.getTimestamp().getTime() + 1)).build(); + + assertFalse(modelSnapshot1.equals(modelSnapshot2)); + assertFalse(modelSnapshot2.equals(modelSnapshot1)); + } + + public void testEquals_GivenDifferentDescription() { + ModelSnapshot modelSnapshot1 = createFullyPopulated().build(); + ModelSnapshot modelSnapshot2 = createFullyPopulated() + .setDescription(modelSnapshot1.getDescription() + " blah").build(); + + assertFalse(modelSnapshot1.equals(modelSnapshot2)); + assertFalse(modelSnapshot2.equals(modelSnapshot1)); + } + + public void testEquals_GivenDifferentId() { + ModelSnapshot modelSnapshot1 = createFullyPopulated().build(); + ModelSnapshot modelSnapshot2 = createFullyPopulated() + .setSnapshotId(modelSnapshot1.getSnapshotId() + "_2").build(); + + assertFalse(modelSnapshot1.equals(modelSnapshot2)); + assertFalse(modelSnapshot2.equals(modelSnapshot1)); + } + + public void testEquals_GivenDifferentDocCount() { + ModelSnapshot modelSnapshot1 = createFullyPopulated().build(); + ModelSnapshot modelSnapshot2 = createFullyPopulated() + .setSnapshotDocCount(modelSnapshot1.getSnapshotDocCount() + 1).build(); + + assertFalse(modelSnapshot1.equals(modelSnapshot2)); + assertFalse(modelSnapshot2.equals(modelSnapshot1)); + } + + public void testEquals_GivenDifferentModelSizeStats() { + ModelSnapshot modelSnapshot1 = createFullyPopulated().build(); + ModelSizeStats.Builder modelSizeStats = new ModelSizeStats.Builder("foo"); + modelSizeStats.setModelBytes(42L); + ModelSnapshot modelSnapshot2 = createFullyPopulated().setModelSizeStats(modelSizeStats).build(); + + assertFalse(modelSnapshot1.equals(modelSnapshot2)); + assertFalse(modelSnapshot2.equals(modelSnapshot1)); + } + + public void testEquals_GivenDifferentQuantiles() { + ModelSnapshot modelSnapshot1 = createFullyPopulated().build(); + ModelSnapshot modelSnapshot2 = createFullyPopulated() + .setQuantiles(new Quantiles("foo", modelSnapshot1.getQuantiles().getTimestamp(), + "different state")).build(); + + assertFalse(modelSnapshot1.equals(modelSnapshot2)); + assertFalse(modelSnapshot2.equals(modelSnapshot1)); + } + + public void testEquals_GivenDifferentLatestResultTimestamp() { + ModelSnapshot modelSnapshot1 = createFullyPopulated().build(); + ModelSnapshot modelSnapshot2 = createFullyPopulated().setLatestResultTimeStamp( + new Date(modelSnapshot1.getLatestResultTimeStamp().getTime() + 1)).build(); + + assertFalse(modelSnapshot1.equals(modelSnapshot2)); + assertFalse(modelSnapshot2.equals(modelSnapshot1)); + } + + public void testEquals_GivenDifferentLatestRecordTimestamp() { + ModelSnapshot modelSnapshot1 = createFullyPopulated().build(); + ModelSnapshot modelSnapshot2 = createFullyPopulated().setLatestRecordTimeStamp( + new Date(modelSnapshot1.getLatestRecordTimeStamp().getTime() + 1)).build(); + + assertFalse(modelSnapshot1.equals(modelSnapshot2)); + assertFalse(modelSnapshot2.equals(modelSnapshot1)); + } + + private static ModelSnapshot.Builder createFullyPopulated() { + ModelSnapshot.Builder modelSnapshot = new ModelSnapshot.Builder(); + modelSnapshot.setJobId("foo"); + modelSnapshot.setTimestamp(DEFAULT_TIMESTAMP); + modelSnapshot.setDescription(DEFAULT_DESCRIPTION); + modelSnapshot.setSnapshotId(DEFAULT_ID); + modelSnapshot.setSnapshotDocCount(DEFAULT_DOC_COUNT); + ModelSizeStats.Builder modelSizeStatsBuilder = new ModelSizeStats.Builder("foo"); + modelSizeStatsBuilder.setLogTime(null); + modelSnapshot.setModelSizeStats(modelSizeStatsBuilder); + modelSnapshot.setLatestResultTimeStamp(DEFAULT_LATEST_RESULT_TIMESTAMP); + modelSnapshot.setLatestRecordTimeStamp(DEFAULT_LATEST_RECORD_TIMESTAMP); + modelSnapshot.setQuantiles(new Quantiles("foo", DEFAULT_TIMESTAMP, "state")); + modelSnapshot.setRetain(DEFAULT_RETAIN); + return modelSnapshot; + } + + @Override + protected ModelSnapshot createTestInstance() { + return createRandomized(); + } + + public static ModelSnapshot createRandomized() { + ModelSnapshot.Builder modelSnapshot = new ModelSnapshot.Builder(randomAlphaOfLengthBetween(1, 20)); + modelSnapshot.setTimestamp(new Date(TimeValue.parseTimeValue(randomTimeValue(), "test").millis())); + modelSnapshot.setDescription(randomAlphaOfLengthBetween(1, 20)); + modelSnapshot.setSnapshotId(randomAlphaOfLengthBetween(1, 20)); + modelSnapshot.setSnapshotDocCount(randomInt()); + modelSnapshot.setModelSizeStats(ModelSizeStatsTests.createRandomized()); + modelSnapshot.setLatestResultTimeStamp( + new Date(TimeValue.parseTimeValue(randomTimeValue(), "test").millis())); + modelSnapshot.setLatestRecordTimeStamp( + new Date(TimeValue.parseTimeValue(randomTimeValue(), "test").millis())); + modelSnapshot.setQuantiles(QuantilesTests.createRandomized()); + modelSnapshot.setRetain(randomBoolean()); + return modelSnapshot.build(); + } + + @Override + protected ModelSnapshot doParseInstance(XContentParser parser){ + return ModelSnapshot.PARSER.apply(parser, null).build(); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/process/QuantilesTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/process/QuantilesTests.java new file mode 100644 index 0000000000000..77ae21bc6f89a --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/ml/job/process/QuantilesTests.java @@ -0,0 +1,91 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.ml.job.process; + +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.util.Date; + + +public class QuantilesTests extends AbstractXContentTestCase { + + public void testEquals_GivenSameObject() { + Quantiles quantiles = new Quantiles("foo", new Date(0L), "foo"); + assertTrue(quantiles.equals(quantiles)); + } + + + public void testEquals_GivenDifferentClassObject() { + Quantiles quantiles = new Quantiles("foo", new Date(0L), "foo"); + assertFalse(quantiles.equals("not a quantiles object")); + } + + + public void testEquals_GivenEqualQuantilesObject() { + Quantiles quantiles1 = new Quantiles("foo", new Date(0L), "foo"); + + Quantiles quantiles2 = new Quantiles("foo", new Date(0L), "foo"); + + assertTrue(quantiles1.equals(quantiles2)); + assertTrue(quantiles2.equals(quantiles1)); + } + + + public void testEquals_GivenDifferentState() { + Quantiles quantiles1 = new Quantiles("foo", new Date(0L), "bar1"); + + Quantiles quantiles2 = new Quantiles("foo", new Date(0L), "bar2"); + + assertFalse(quantiles1.equals(quantiles2)); + assertFalse(quantiles2.equals(quantiles1)); + } + + + public void testHashCode_GivenEqualObject() { + Quantiles quantiles1 = new Quantiles("foo", new Date(0L), "foo"); + + Quantiles quantiles2 = new Quantiles("foo", new Date(0L), "foo"); + + assertEquals(quantiles1.hashCode(), quantiles2.hashCode()); + } + + + @Override + protected Quantiles createTestInstance() { + return createRandomized(); + } + + public static Quantiles createRandomized() { + return new Quantiles(randomAlphaOfLengthBetween(1, 20), + new Date(TimeValue.parseTimeValue(randomTimeValue(), "test").millis()), + randomAlphaOfLengthBetween(0, 1000)); + } + + @Override + protected Quantiles doParseInstance(XContentParser parser) { + return Quantiles.PARSER.apply(parser, null); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } +} diff --git a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index ea22fdd87b557..71026c6ae836f 100644 --- a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -401,13 +401,29 @@ public void testRollupIDSchemeAfterRestart() throws Exception { } }); + // After we've confirmed the doc, wait until we move back to STARTED so that we know the + // state was saved at the end + waitForRollUpJob("rollup-id-test", equalTo("started")); + } else { final Request indexRequest = new Request("POST", "/id-test-rollup/_doc/2"); indexRequest.setJsonEntity("{\"timestamp\":\"2018-01-02T00:00:01\",\"value\":345}"); client().performRequest(indexRequest); - assertRollUpJob("rollup-id-test"); + // stop the rollup job to force a state save, which will upgrade the ID + final Request stopRollupJobRequest = new Request("POST", "_xpack/rollup/job/rollup-id-test/_stop"); + Map stopRollupJobResponse = entityAsMap(client().performRequest(stopRollupJobRequest)); + assertThat(stopRollupJobResponse.get("stopped"), equalTo(Boolean.TRUE)); + + waitForRollUpJob("rollup-id-test", equalTo("stopped")); + + // start the rollup job again + final Request startRollupJobRequest = new Request("POST", "_xpack/rollup/job/rollup-id-test/_start"); + Map startRollupJobResponse = entityAsMap(client().performRequest(startRollupJobRequest)); + assertThat(startRollupJobResponse.get("started"), equalTo(Boolean.TRUE)); + + waitForRollUpJob("rollup-id-test", anyOf(equalTo("indexing"), equalTo("started"))); assertBusy(() -> { client().performRequest(new Request("POST", "id-test-results-rollup/_refresh")); diff --git a/x-pack/qa/full-cluster-restart/with-system-key/build.gradle b/x-pack/qa/full-cluster-restart/with-system-key/build.gradle index e69de29bb2d1d..928280b6584bd 100644 --- a/x-pack/qa/full-cluster-restart/with-system-key/build.gradle +++ b/x-pack/qa/full-cluster-restart/with-system-key/build.gradle @@ -0,0 +1,8 @@ +import org.elasticsearch.gradle.test.RestIntegTestTask + +// Skip test on FIPS FIXME https://github.com/elastic/elasticsearch/issues/32737 +if (project.inFipsJvm) { + tasks.withType(RestIntegTestTask) { + enabled = false + } +} diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ReopenJobResetsFinishedTimeIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ReopenJobResetsFinishedTimeIT.java new file mode 100644 index 0000000000000..325b1370315ca --- /dev/null +++ b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ReopenJobResetsFinishedTimeIT.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.integration; + +import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; +import org.elasticsearch.xpack.core.ml.job.config.DataDescription; +import org.elasticsearch.xpack.core.ml.job.config.Detector; +import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.junit.After; + +import java.util.Collections; + +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.CoreMatchers.nullValue; +import static org.hamcrest.Matchers.is; + +public class ReopenJobResetsFinishedTimeIT extends MlNativeAutodetectIntegTestCase { + + @After + public void cleanUpTest() { + cleanUp(); + } + + public void test() { + final String jobId = "reset-finished-time-test"; + Job.Builder job = createJob(jobId); + + registerJob(job); + putJob(job); + openJob(job.getId()); + + assertThat(getSingleJob(jobId).getFinishedTime(), is(nullValue())); + + closeJob(jobId); + assertThat(getSingleJob(jobId).getFinishedTime(), is(notNullValue())); + + openJob(jobId); + assertThat(getSingleJob(jobId).getFinishedTime(), is(nullValue())); + } + + private Job getSingleJob(String jobId) { + return getJob(jobId).get(0); + } + + private Job.Builder createJob(String id) { + DataDescription.Builder dataDescription = new DataDescription.Builder(); + dataDescription.setFormat(DataDescription.DataFormat.XCONTENT); + dataDescription.setTimeFormat(DataDescription.EPOCH_MS); + + Detector.Builder d = new Detector.Builder("count", null); + AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(d.build())); + + Job.Builder builder = new Job.Builder(); + builder.setId(id); + builder.setAnalysisConfig(analysisConfig); + builder.setDataDescription(dataDescription); + return builder; + } +} diff --git a/x-pack/qa/rolling-upgrade/with-system-key/build.gradle b/x-pack/qa/rolling-upgrade/with-system-key/build.gradle index e69de29bb2d1d..928280b6584bd 100644 --- a/x-pack/qa/rolling-upgrade/with-system-key/build.gradle +++ b/x-pack/qa/rolling-upgrade/with-system-key/build.gradle @@ -0,0 +1,8 @@ +import org.elasticsearch.gradle.test.RestIntegTestTask + +// Skip test on FIPS FIXME https://github.com/elastic/elasticsearch/issues/32737 +if (project.inFipsJvm) { + tasks.withType(RestIntegTestTask) { + enabled = false + } +} diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SqlSpecTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SqlSpecTestCase.java index b782e1474ea85..d61b4b9a946bd 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SqlSpecTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SqlSpecTestCase.java @@ -7,12 +7,14 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.junit.Assume; import org.junit.ClassRule; import java.sql.Connection; import java.sql.ResultSet; import java.util.ArrayList; import java.util.List; +import java.util.Locale; /** * Tests comparing sql queries executed against our jdbc client @@ -25,7 +27,7 @@ public abstract class SqlSpecTestCase extends SpecBaseIntegrationTestCase { public static LocalH2 H2 = new LocalH2((c) -> c.createStatement().execute("RUNSCRIPT FROM 'classpath:/setup_test_emp.sql'")); @ParametersFactory(argumentFormatting = PARAM_FORMATTING) - public static List readScriptSpec() throws Exception { + public static List readScriptSpec() throws Exception { Parser parser = specParser(); List tests = new ArrayList<>(); tests.addAll(readScriptSpec("/select.sql-spec", parser)); @@ -35,6 +37,8 @@ public static List readScriptSpec() throws Exception { tests.addAll(readScriptSpec("/agg.sql-spec", parser)); tests.addAll(readScriptSpec("/arithmetic.sql-spec", parser)); tests.addAll(readScriptSpec("/string-functions.sql-spec", parser)); + // AwaitsFix: https://github.com/elastic/elasticsearch/issues/32589 + // tests.addAll(readScriptSpec("/case-functions.sql-spec", parser)); return tests; } @@ -56,6 +60,12 @@ public SqlSpecTestCase(String fileName, String groupName, String testName, Integ @Override protected final void doTest() throws Throwable { + boolean goodLocale = !(Locale.getDefault().equals(new Locale.Builder().setLanguageTag("tr").build()) + || Locale.getDefault().equals(new Locale.Builder().setLanguageTag("tr-TR").build())); + if (fileName.startsWith("case-functions")) { + Assume.assumeTrue(goodLocale); + } + try (Connection h2 = H2.get(); Connection es = esJdbc()) { diff --git a/x-pack/qa/sql/src/main/resources/case-functions.sql-spec b/x-pack/qa/sql/src/main/resources/case-functions.sql-spec new file mode 100644 index 0000000000000..899d7cb0a6cb1 --- /dev/null +++ b/x-pack/qa/sql/src/main/resources/case-functions.sql-spec @@ -0,0 +1,13 @@ +// Next 4 SELECTs in this file are related to https://github.com/elastic/elasticsearch/issues/32589 +// H2 is Locale sensitive, while ES-SQL is not (so far) +selectInsertWithLcaseAndLengthWithOrderBy +SELECT "first_name" origFN, "last_name" origLN, INSERT(UCASE("first_name"),LENGTH("first_name")+1,123,LCASE("last_name")) modified FROM "test_emp" WHERE ASCII("first_name")=65 ORDER BY "first_name" ASC, "last_name" ASC LIMIT 10; + +upperCasingTheSecondLetterFromTheRightFromFirstName +SELECT CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) f FROM "test_emp" ORDER BY "first_name" LIMIT 10; + +upperCasingTheSecondLetterFromTheRightFromFirstNameWithOrderByAndGroupBy +SELECT CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) f, COUNT(*) c FROM "test_emp" GROUP BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) ORDER BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) LIMIT 10; + +upperCasingTheSecondLetterFromTheRightFromFirstNameWithWhere +SELECT CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) f, COUNT(*) c FROM "test_emp" WHERE CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1))='AlejandRo' GROUP BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) ORDER BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) LIMIT 10; diff --git a/x-pack/qa/sql/src/main/resources/string-functions.sql-spec b/x-pack/qa/sql/src/main/resources/string-functions.sql-spec index bad6a5d043214..15bb6dea935c8 100644 --- a/x-pack/qa/sql/src/main/resources/string-functions.sql-spec +++ b/x-pack/qa/sql/src/main/resources/string-functions.sql-spec @@ -22,7 +22,8 @@ SELECT LCASE(first_name) lc, CHAR(ASCII(LCASE(first_name))) chr FROM "test_emp" ltrimFilter SELECT LTRIM(first_name) lt FROM "test_emp" WHERE LTRIM(first_name) = 'Bob'; -//Unsupported yet +// Unsupported yet +// Functions combined with 'LIKE' should perform the match inside a Painless script, whereas at the moment it's handled as a regular `match` query in ES. //ltrimFilterWithLike //SELECT LTRIM("first_name") lt FROM "test_emp" WHERE LTRIM("first_name") LIKE '%a%'; @@ -93,10 +94,6 @@ SELECT "first_name" orig, REPEAT("first_name",2) reps FROM "test_emp" WHERE ASCI selectInsertWithLcase SELECT "first_name" orig, INSERT("first_name",2,1000,LCASE("first_name")) modified FROM "test_emp" WHERE ASCII("first_name")=65 ORDER BY "first_name" ASC LIMIT 10; -// AWAITS FIX for https://github.com/elastic/elasticsearch/issues/32589 -// selectInsertWithLcaseAndLengthWithOrderBy -//SELECT "first_name" origFN, "last_name" origLN, INSERT(UCASE("first_name"),LENGTH("first_name")+1,123,LCASE("last_name")) modified FROM "test_emp" WHERE ASCII("first_name")=65 ORDER BY "first_name" ASC, "last_name" ASC LIMIT 10; - selectInsertWithUcaseWithGroupByAndOrderBy SELECT INSERT(UCASE("first_name"),2,123000,INSERT(UCASE("last_name"),2,500,' ')) modified, COUNT(*) count FROM "test_emp" WHERE ASCII("first_name")=65 GROUP BY INSERT(UCASE("first_name"),2,123000,INSERT(UCASE("last_name"),2,500,' ')) ORDER BY INSERT(UCASE("first_name"),2,123000,INSERT(UCASE("last_name"),2,500,' ')) ASC LIMIT 10; @@ -141,14 +138,3 @@ SELECT RIGHT("first_name",2) f FROM "test_emp" ORDER BY "first_name" LIMIT 10; selectRightWithGroupByAndOrderBy SELECT RIGHT("first_name",2) f, COUNT(*) count FROM "test_emp" GROUP BY RIGHT("first_name",2) ORDER BY RIGHT("first_name",2) LIMIT 10; - -// AWAITS FIX for https://github.com/elastic/elasticsearch/issues/32589 -// upperCasingTheSecondLetterFromTheRightFromFirstName -// SELECT CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) f FROM "test_emp" ORDER BY "first_name" LIMIT 10; - -// AWAITS FIX for https://github.com/elastic/elasticsearch/issues/32589 -// upperCasingTheSecondLetterFromTheRightFromFirstNameWithOrderByAndGroupBy -// SELECT CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) f, COUNT(*) c FROM "test_emp" GROUP BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) ORDER BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) LIMIT 10; - -upperCasingTheSecondLetterFromTheRightFromFirstNameWithWhere -SELECT CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) f, COUNT(*) c FROM "test_emp" WHERE CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1))='AlejandRo' GROUP BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) ORDER BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) LIMIT 10; diff --git a/x-pack/qa/tribe-tests-with-license/src/test/java/org/elasticsearch/license/LicenseTribeTests.java b/x-pack/qa/tribe-tests-with-license/src/test/java/org/elasticsearch/license/LicenseTribeTests.java index 2ad39e68dad2d..ac3f127dfa757 100644 --- a/x-pack/qa/tribe-tests-with-license/src/test/java/org/elasticsearch/license/LicenseTribeTests.java +++ b/x-pack/qa/tribe-tests-with-license/src/test/java/org/elasticsearch/license/LicenseTribeTests.java @@ -7,6 +7,8 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest; +import org.elasticsearch.protocol.xpack.license.GetLicenseRequest; import static org.elasticsearch.license.TestUtils.generateSignedLicense;