diff --git a/.buildkite/pipelines/periodic.template.yml b/.buildkite/pipelines/periodic.template.yml index fda4315926b6b..207a332ed6717 100644 --- a/.buildkite/pipelines/periodic.template.yml +++ b/.buildkite/pipelines/periodic.template.yml @@ -88,6 +88,7 @@ steps: - openjdk17 - openjdk21 - openjdk22 + - openjdk23 GRADLE_TASK: - checkPart1 - checkPart2 @@ -113,6 +114,7 @@ steps: - openjdk17 - openjdk21 - openjdk22 + - openjdk23 BWC_VERSION: $BWC_LIST agents: provider: gcp diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 7c5f683cf9692..7ba46f0f0951c 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -735,6 +735,7 @@ steps: - openjdk17 - openjdk21 - openjdk22 + - openjdk23 GRADLE_TASK: - checkPart1 - checkPart2 @@ -760,6 +761,7 @@ steps: - openjdk17 - openjdk21 - openjdk22 + - openjdk23 BWC_VERSION: ["7.17.22", "8.13.5", "8.14.0", "8.15.0"] agents: provider: gcp diff --git a/distribution/packages/src/deb/lintian/elasticsearch b/distribution/packages/src/deb/lintian/elasticsearch index a6a46bb41f112..edd705b66caaa 100644 --- a/distribution/packages/src/deb/lintian/elasticsearch +++ b/distribution/packages/src/deb/lintian/elasticsearch @@ -59,3 +59,7 @@ unknown-field License # don't build them ourselves and the license precludes us modifying them # to fix this. library-not-linked-against-libc usr/share/elasticsearch/modules/x-pack-ml/platform/linux-x86_64/lib/libmkl_*.so + +# shared-lib-without-dependency-information (now shared-library-lacks-prerequisites) is falsely reported for libvec.so +# which has no dependencies (not even libc) besides the symbols in the base executable. +shared-lib-without-dependency-information usr/share/elasticsearch/lib/platform/linux-x64/libvec.so diff --git a/docs/build.gradle b/docs/build.gradle index 0eba980e8cc31..7ca4820eea1af 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -1752,6 +1752,7 @@ setups['setup-snapshots'] = setups['setup-repository'] + ''' name: "my_admin_role" body: > { + "description": "Grants full access to all management features within the cluster.", "cluster": ["all"], "indices": [ {"names": ["index1", "index2" ], "privileges": ["all"], "field_security" : {"grant" : [ "title", "body" ]}} diff --git a/docs/changelog/107886.yaml b/docs/changelog/107886.yaml new file mode 100644 index 0000000000000..a328bc2a2a208 --- /dev/null +++ b/docs/changelog/107886.yaml @@ -0,0 +1,5 @@ +pr: 107886 +summary: Cluster state role mapper file settings service +area: Authorization +type: enhancement +issues: [] diff --git a/docs/changelog/108517.yaml b/docs/changelog/108517.yaml new file mode 100644 index 0000000000000..359c8302fdf6c --- /dev/null +++ b/docs/changelog/108517.yaml @@ -0,0 +1,6 @@ +pr: 108517 +summary: Forward `indexServiceSafe` exception to listener +area: Transform +type: bug +issues: + - 108418 diff --git a/docs/changelog/108518.yaml b/docs/changelog/108518.yaml new file mode 100644 index 0000000000000..aad823ccc89f6 --- /dev/null +++ b/docs/changelog/108518.yaml @@ -0,0 +1,5 @@ +pr: 108518 +summary: Remove leading is_ prefix from Enterprise geoip docs +area: Ingest Node +type: bug +issues: [] diff --git a/docs/changelog/108521.yaml b/docs/changelog/108521.yaml new file mode 100644 index 0000000000000..adc7c11a4decd --- /dev/null +++ b/docs/changelog/108521.yaml @@ -0,0 +1,6 @@ +pr: 108521 +summary: Adding override for lintian false positive on `libvec.so` +area: "Packaging" +type: bug +issues: + - 108514 diff --git a/docs/changelog/108522.yaml b/docs/changelog/108522.yaml new file mode 100644 index 0000000000000..5bc064d7995e9 --- /dev/null +++ b/docs/changelog/108522.yaml @@ -0,0 +1,5 @@ +pr: 108522 +summary: Ensure we return non-negative scores when scoring scalar dot-products +area: Vector Search +type: bug +issues: [] diff --git a/docs/changelog/108562.yaml b/docs/changelog/108562.yaml new file mode 100644 index 0000000000000..2a0047fe807fd --- /dev/null +++ b/docs/changelog/108562.yaml @@ -0,0 +1,6 @@ +pr: 108562 +summary: Add `internalClusterTest` for and fix leak in `ExpandSearchPhase` +area: Search +type: bug +issues: + - 108369 diff --git a/docs/internal/DistributedArchitectureGuide.md b/docs/internal/DistributedArchitectureGuide.md index 59305c6305737..732e2e7be46fa 100644 --- a/docs/internal/DistributedArchitectureGuide.md +++ b/docs/internal/DistributedArchitectureGuide.md @@ -1,6 +1,14 @@ -# Distributed Area Team Internals +# Distributed Area Internals -(Summary, brief discussion of our features) +The Distributed Area contains indexing and coordination systems. + +The index path stretches from the user REST command through shard routing down to each individual shard's translog and storage +engine. Reindexing is effectively reading from a source index and writing to a destination index (perhaps on different nodes). +The coordination side includes cluster coordination, shard allocation, cluster autoscaling stats, task management, and cross +cluster replication. Less obvious coordination systems include networking, the discovery plugin system, the snapshot/restore +logic, and shard recovery. + +A guide to the general Elasticsearch components can be found [here](https://github.com/elastic/elasticsearch/blob/main/docs/internal/GeneralArchitectureGuide.md). # Networking @@ -237,9 +245,101 @@ works in parallel with the storage engine.) # Autoscaling -(Reactive and proactive autoscaling. Explain that we surface recommendations, how control plane uses it.) - -(Sketch / list the different deciders that we have, and then also how we use information from each to make a recommendation.) +The Autoscaling API in ES (Elasticsearch) uses cluster and node level statistics to provide a recommendation +for a cluster size to support the current cluster data and active workloads. ES Autoscaling is paired +with an ES Cloud service that periodically polls the ES elected master node for suggested cluster +changes. The cloud service will add more resources to the cluster based on Elasticsearch's recommendation. +Elasticsearch by itself cannot automatically scale. + +Autoscaling recommendations are tailored for the user [based on user defined policies][], composed of data +roles (hot, frozen, etc) and [deciders][]. There's a public [webinar on autoscaling][], as well as the +public [Autoscaling APIs] docs. + +Autoscaling's current implementation is based primary on storage requirements, as well as memory capacity +for ML and frozen tier. It does not yet support scaling related to search load. Paired with ES Cloud, +autoscaling only scales upward, not downward, except for ML nodes that do get scaled up _and_ down. + +[based on user defined policies]: https://www.elastic.co/guide/en/elasticsearch/reference/current/xpack-autoscaling.html +[deciders]: https://www.elastic.co/guide/en/elasticsearch/reference/current/autoscaling-deciders.html +[webinar on autoscaling]: https://www.elastic.co/webinars/autoscaling-from-zero-to-production-seamlessly +[Autoscaling APIs]: https://www.elastic.co/guide/en/elasticsearch/reference/current/autoscaling-apis.html + +### Plugin REST and TransportAction entrypoints + +Autoscaling is a [plugin][]. All the REST APIs can be found in [autoscaling/rest/][]. +`GetAutoscalingCapacityAction` is the capacity calculation operation REST endpoint, as opposed to the +other rest commands that get/set/delete the policies guiding the capacity calculation. The Transport +Actions can be found in [autoscaling/action/], where [TransportGetAutoscalingCapacityAction][] is the +entrypoint on the master node for calculating the optimal cluster resources based on the autoscaling +policies. + +[plugin]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/Autoscaling.java#L72 +[autoscaling/rest/]: https://github.com/elastic/elasticsearch/tree/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/rest +[autoscaling/action/]: https://github.com/elastic/elasticsearch/tree/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action +[TransportGetAutoscalingCapacityAction]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/TransportGetAutoscalingCapacityAction.java#L82-L98 + +### How cluster capacity is determined + +[AutoscalingMetadata][] implements [Metadata.Custom][] in order to persist autoscaling policies. Each +Decider is an implementation of [AutoscalingDeciderService][]. The [AutoscalingCalculateCapacityService][] +is responsible for running the calculation. + +[TransportGetAutoscalingCapacityAction.computeCapacity] is the entry point to [AutoscalingCalculateCapacityService.calculate], +which creates a [AutoscalingDeciderResults][] for [each autoscaling policy][]. [AutoscalingDeciderResults.toXContent][] then +determines the [maximum required capacity][] to return to the caller. [AutoscalingCapacity][] is the base unit of a cluster +resources recommendation. + +The `TransportGetAutoscalingCapacityAction` response is cached to prevent concurrent callers +overloading the system: the operation is expensive. `TransportGetAutoscalingCapacityAction` contains +a [CapacityResponseCache][]. `TransportGetAutoscalingCapacityAction.masterOperation` +calls [through the CapacityResponseCache][], into the `AutoscalingCalculateCapacityService`, to handle +concurrent callers. + +[AutoscalingMetadata]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/AutoscalingMetadata.java#L38 +[Metadata.Custom]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java#L141-L145 +[AutoscalingDeciderService]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingDeciderService.java#L16-L19 +[AutoscalingCalculateCapacityService]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingCalculateCapacityService.java#L43 + +[TransportGetAutoscalingCapacityAction.computeCapacity]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/TransportGetAutoscalingCapacityAction.java#L102-L108 +[AutoscalingCalculateCapacityService.calculate]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingCalculateCapacityService.java#L108-L139 +[AutoscalingDeciderResults]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingDeciderResults.java#L34-L38 +[each autoscaling policy]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingCalculateCapacityService.java#L124-L131 +[AutoscalingDeciderResults.toXContent]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingDeciderResults.java#L78 +[maximum required capacity]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingDeciderResults.java#L105-L116 +[AutoscalingCapacity]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingCapacity.java#L27-L35 + +[CapacityResponseCache]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/TransportGetAutoscalingCapacityAction.java#L44-L47 +[through the CapacityResponseCache]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/TransportGetAutoscalingCapacityAction.java#L97 + +### Where the data comes from + +The Deciders each pull data from different sources as needed to inform their decisions. The +[DiskThresholdMonitor][] is one such data source. The Monitor runs on the master node and maintains +lists of nodes that exceed various disk size thresholds. [DiskThresholdSettings][] contains the +threshold settings with which the `DiskThresholdMonitor` runs. + +[DiskThresholdMonitor]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdMonitor.java#L53-L58 +[DiskThresholdSettings]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdSettings.java#L24-L27 + +### Deciders + +The `ReactiveStorageDeciderService` tracks information that demonstrates storage limitations are causing +problems in the cluster. It uses [an algorithm defined here][]. Some examples are +- information from the `DiskThresholdMonitor` to find out whether nodes are exceeding their storage capacity +- number of unassigned shards that failed allocation because of insufficient storage +- the max shard size and minimum node size, and whether these can be satisfied with the existing infrastructure + +[an algorithm defined here]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java#L158-L176 + +The `ProactiveStorageDeciderService` maintains a forecast window that [defaults to 30 minutes][]. It only +runs on data streams (ILM, rollover, etc), not regular indexes. It looks at past [index changes][] that +took place within the forecast window to [predict][] resources that will be needed shortly. + +[defaults to 30 minutes]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ProactiveStorageDeciderService.java#L32 +[index changes]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ProactiveStorageDeciderService.java#L79-L83 +[predict]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ProactiveStorageDeciderService.java#L85-L95 + +There are several more Decider Services, implementing the `AutoscalingDeciderService` interface. # Snapshot / Restore diff --git a/docs/reference/connector/apis/list-connector-sync-jobs-api.asciidoc b/docs/reference/connector/apis/list-connector-sync-jobs-api.asciidoc index 410bec7ac38ac..6d06e7e6b9045 100644 --- a/docs/reference/connector/apis/list-connector-sync-jobs-api.asciidoc +++ b/docs/reference/connector/apis/list-connector-sync-jobs-api.asciidoc @@ -30,13 +30,13 @@ Returns information about all stored connector sync jobs ordered by their creati (Optional, integer) The offset from the first result to fetch. Defaults to `0`. `status`:: -(Optional, job status) The job status the fetched sync jobs need to have. +(Optional, job status) A comma-separated list of job statuses to filter the results. Available statuses include: `canceling`, `canceled`, `completed`, `error`, `in_progress`, `pending`, `suspended`. `connector_id`:: (Optional, string) The connector id the fetched sync jobs need to have. `job_type`:: -(Optional, job type) A comma-separated list of job types. +(Optional, job type) A comma-separated list of job types. Available job types are: `full`, `incremental` and `access_control`. [[list-connector-sync-jobs-api-example]] ==== {api-examples-title} diff --git a/docs/reference/mapping/types/nested.asciidoc b/docs/reference/mapping/types/nested.asciidoc index 5d6ede6acd5ac..6272f4529c5f9 100644 --- a/docs/reference/mapping/types/nested.asciidoc +++ b/docs/reference/mapping/types/nested.asciidoc @@ -11,6 +11,8 @@ independently of each other. TIP: When ingesting key-value pairs with a large, arbitrary set of keys, you might consider modeling each key-value pair as its own nested document with `key` and `value` fields. Instead, consider using the <> data type, which maps an entire object as a single field and allows for simple searches over its contents. Nested documents and queries are typically expensive, so using the `flattened` data type for this use case is a better option. +WARNING: Nested fields have incomplete support in Kibana. While they are visible and searchable in Discover, they cannot be used to build visualizations in Lens. + [[nested-arrays-flattening-objects]] ==== How arrays of objects are flattened diff --git a/docs/reference/rest-api/security/create-roles.asciidoc b/docs/reference/rest-api/security/create-roles.asciidoc index 4f41c0b54bb1d..75f1d7c799187 100644 --- a/docs/reference/rest-api/security/create-roles.asciidoc +++ b/docs/reference/rest-api/security/create-roles.asciidoc @@ -50,6 +50,9 @@ privilege or action. `cluster`:: (list) A list of cluster privileges. These privileges define the cluster level actions that users with this role are able to execute. +`description`:: (string) A description of the role. +The maximum length is `1000` chars. + `global`:: (object) An object defining global privileges. A global privilege is a form of cluster privilege that is request-aware. Support for global privileges is currently limited to the management of application privileges. @@ -104,6 +107,7 @@ The following example adds a role called `my_admin_role`: -------------------------------------------------- POST /_security/role/my_admin_role { + "description": "Grants full access to all management features within the cluster.", "cluster": ["all"], "indices": [ { diff --git a/docs/reference/rest-api/security/get-roles.asciidoc b/docs/reference/rest-api/security/get-roles.asciidoc index 80f0fd587aae8..3eb5a735194c6 100644 --- a/docs/reference/rest-api/security/get-roles.asciidoc +++ b/docs/reference/rest-api/security/get-roles.asciidoc @@ -61,6 +61,7 @@ GET /_security/role/my_admin_role -------------------------------------------------- { "my_admin_role": { + "description": "Grants full access to all management features within the cluster.", "cluster" : [ "all" ], "indices" : [ { diff --git a/docs/reference/security/authorization/privileges.asciidoc b/docs/reference/security/authorization/privileges.asciidoc index 9153b5fbdcab3..be30db4d100bd 100644 --- a/docs/reference/security/authorization/privileges.asciidoc +++ b/docs/reference/security/authorization/privileges.asciidoc @@ -85,6 +85,9 @@ All {Ilm} operations related to managing policies. `manage_index_templates`:: All operations on index templates. +`manage_inference`:: +All operations related to managing {infer}. + `manage_ingest_pipelines`:: All operations on ingest pipelines. @@ -192,6 +195,9 @@ node info, node and cluster stats, and pending cluster tasks. `monitor_enrich`:: All read-only operations related to managing and executing enrich policies. +`monitor_inference`:: +All read-only operations related to {infer}. + `monitor_ml`:: All read-only {ml} operations, such as getting information about {dfeeds}, jobs, model snapshots, or results. diff --git a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7DotProduct.java b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7DotProduct.java index 9b452219bd635..5231bb8e3c67f 100644 --- a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7DotProduct.java +++ b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7DotProduct.java @@ -47,10 +47,11 @@ public float score(int firstOrd, int secondOrd) throws IOException { if (firstSeg != null && secondSeg != null) { int dotProduct = dotProduct7u(firstSeg, secondSeg, length); + assert dotProduct >= 0; float adjustedDistance = dotProduct * scoreCorrectionConstant + firstOffset + secondOffset; - return (1 + adjustedDistance) / 2; + return Math.max((1 + adjustedDistance) / 2, 0f); } else { - return fallbackScore(firstByteOffset, secondByteOffset); + return Math.max(fallbackScore(firstByteOffset, secondByteOffset), 0f); } } } diff --git a/libs/vec/src/test/java/org/elasticsearch/vec/VectorScorerFactoryTests.java b/libs/vec/src/test/java/org/elasticsearch/vec/VectorScorerFactoryTests.java index 246ddaeb2ebcf..07d30a887c683 100644 --- a/libs/vec/src/test/java/org/elasticsearch/vec/VectorScorerFactoryTests.java +++ b/libs/vec/src/test/java/org/elasticsearch/vec/VectorScorerFactoryTests.java @@ -28,6 +28,7 @@ import static org.elasticsearch.vec.VectorSimilarityType.EUCLIDEAN; import static org.elasticsearch.vec.VectorSimilarityType.MAXIMUM_INNER_PRODUCT; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; // @com.carrotsearch.randomizedtesting.annotations.Repeat(iterations = 100) public class VectorScorerFactoryTests extends AbstractVectorTestCase { @@ -96,6 +97,51 @@ void testSimpleImpl(long maxChunkSize) throws IOException { } } + public void testNonNegativeDotProduct() throws IOException { + assumeTrue(notSupportedMsg(), supported()); + var factory = AbstractVectorTestCase.factory.get(); + + try (Directory dir = new MMapDirectory(createTempDir(getTestName()), MMapDirectory.DEFAULT_MAX_CHUNK_SIZE)) { + // keep vecs `0` so dot product is `0` + byte[] vec1 = new byte[32]; + byte[] vec2 = new byte[32]; + String fileName = getTestName() + "-32"; + try (IndexOutput out = dir.createOutput(fileName, IOContext.DEFAULT)) { + var negativeOffset = floatToByteArray(-5f); + byte[] bytes = concat(vec1, negativeOffset, vec2, negativeOffset); + out.writeBytes(bytes, 0, bytes.length); + } + try (IndexInput in = dir.openInput(fileName, IOContext.DEFAULT)) { + // dot product + float expected = 0f; // TODO fix in Lucene: https://github.com/apache/lucene/pull/13356 luceneScore(DOT_PRODUCT, vec1, vec2, + // 1, -5, -5); + var scorer = factory.getInt7ScalarQuantizedVectorScorer(32, 2, 1, DOT_PRODUCT, in).get(); + assertThat(scorer.score(0, 1), equalTo(expected)); + assertThat(scorer.score(0, 1), greaterThanOrEqualTo(0f)); + assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(0).score(1), equalTo(expected)); + // max inner product + expected = luceneScore(MAXIMUM_INNER_PRODUCT, vec1, vec2, 1, -5, -5); + scorer = factory.getInt7ScalarQuantizedVectorScorer(32, 2, 1, MAXIMUM_INNER_PRODUCT, in).get(); + assertThat(scorer.score(0, 1), greaterThanOrEqualTo(0f)); + assertThat(scorer.score(0, 1), equalTo(expected)); + assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(0).score(1), equalTo(expected)); + // cosine + expected = 0f; // TODO fix in Lucene: https://github.com/apache/lucene/pull/13356 luceneScore(COSINE, vec1, vec2, 1, -5, + // -5); + scorer = factory.getInt7ScalarQuantizedVectorScorer(32, 2, 1, COSINE, in).get(); + assertThat(scorer.score(0, 1), equalTo(expected)); + assertThat(scorer.score(0, 1), greaterThanOrEqualTo(0f)); + assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(0).score(1), equalTo(expected)); + // euclidean + expected = luceneScore(EUCLIDEAN, vec1, vec2, 1, -5, -5); + scorer = factory.getInt7ScalarQuantizedVectorScorer(32, 2, 1, EUCLIDEAN, in).get(); + assertThat(scorer.score(0, 1), equalTo(expected)); + assertThat(scorer.score(0, 1), greaterThanOrEqualTo(0f)); + assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(0).score(1), equalTo(expected)); + } + } + } + public void testRandom() throws IOException { assumeTrue(notSupportedMsg(), supported()); testRandom(MMapDirectory.DEFAULT_MAX_CHUNK_SIZE, BYTE_ARRAY_RANDOM_INT7_FUNC); diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java index 88e529ec5569b..f5fa0db839230 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettingProvider; @@ -56,11 +57,11 @@ public class DataStreamIndexSettingsProvider implements IndexSettingProvider { @Override public Settings getAdditionalIndexSettings( String indexName, - String dataStreamName, - boolean timeSeries, + @Nullable String dataStreamName, + boolean isTimeSeries, Metadata metadata, Instant resolvedAt, - Settings allSettings, + Settings indexTemplateAndCreateRequestSettings, List combinedTemplateMappings ) { if (dataStreamName != null) { @@ -70,13 +71,13 @@ public Settings getAdditionalIndexSettings( // so checking that index_mode==null|standard and templateIndexMode == TIME_SERIES boolean migrating = dataStream != null && (dataStream.getIndexMode() == null || dataStream.getIndexMode() == IndexMode.STANDARD) - && timeSeries; + && isTimeSeries; IndexMode indexMode; if (migrating) { indexMode = IndexMode.TIME_SERIES; } else if (dataStream != null) { - indexMode = timeSeries ? dataStream.getIndexMode() : null; - } else if (timeSeries) { + indexMode = isTimeSeries ? dataStream.getIndexMode() : null; + } else if (isTimeSeries) { indexMode = IndexMode.TIME_SERIES; } else { indexMode = null; @@ -84,8 +85,8 @@ public Settings getAdditionalIndexSettings( if (indexMode != null) { if (indexMode == IndexMode.TIME_SERIES) { Settings.Builder builder = Settings.builder(); - TimeValue lookAheadTime = DataStreamsPlugin.getLookAheadTime(allSettings); - TimeValue lookBackTime = DataStreamsPlugin.LOOK_BACK_TIME.get(allSettings); + TimeValue lookAheadTime = DataStreamsPlugin.getLookAheadTime(indexTemplateAndCreateRequestSettings); + TimeValue lookBackTime = DataStreamsPlugin.LOOK_BACK_TIME.get(indexTemplateAndCreateRequestSettings); final Instant start; final Instant end; if (dataStream == null || migrating) { @@ -114,9 +115,13 @@ public Settings getAdditionalIndexSettings( builder.put(IndexSettings.TIME_SERIES_START_TIME.getKey(), FORMATTER.format(start)); builder.put(IndexSettings.TIME_SERIES_END_TIME.getKey(), FORMATTER.format(end)); - if (allSettings.hasValue(IndexMetadata.INDEX_ROUTING_PATH.getKey()) == false + if (indexTemplateAndCreateRequestSettings.hasValue(IndexMetadata.INDEX_ROUTING_PATH.getKey()) == false && combinedTemplateMappings.isEmpty() == false) { - List routingPaths = findRoutingPaths(indexName, allSettings, combinedTemplateMappings); + List routingPaths = findRoutingPaths( + indexName, + indexTemplateAndCreateRequestSettings, + combinedTemplateMappings + ); if (routingPaths.isEmpty() == false) { builder.putList(INDEX_ROUTING_PATH.getKey(), routingPaths); } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamGlobalRetentionAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamGlobalRetentionAction.java index a6060923bd396..e3cdd6a8c14d9 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamGlobalRetentionAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamGlobalRetentionAction.java @@ -64,7 +64,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(dryRun); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public boolean dryRun() { return dryRun; diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamLifecycleAction.java index 3fe9ae0758a91..3bd100a106dd6 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamLifecycleAction.java @@ -48,6 +48,7 @@ public void writeTo(StreamOutput out) throws IOException { } public Request(String[] names) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.names = names; } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamGlobalRetentionAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamGlobalRetentionAction.java index 51eb9e7e7e944..5816823ed710a 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamGlobalRetentionAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamGlobalRetentionAction.java @@ -47,7 +47,9 @@ private GetDataStreamGlobalRetentionAction() {/* no instances */} public static final class Request extends MasterNodeReadRequest { - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleStatsAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleStatsAction.java index a30af402a9186..cc61c7fe664be 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleStatsAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleStatsAction.java @@ -43,7 +43,9 @@ public Request(StreamInput in) throws IOException { super(in); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } @Override public ActionRequestValidationException validate() { diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/PutDataStreamGlobalRetentionAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/PutDataStreamGlobalRetentionAction.java index 2aa5b4b4d3acd..65ca34a99da23 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/PutDataStreamGlobalRetentionAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/PutDataStreamGlobalRetentionAction.java @@ -108,6 +108,7 @@ public void writeTo(StreamOutput out) throws IOException { } public Request(@Nullable TimeValue defaultRetention, @Nullable TimeValue maxRetention) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.globalRetention = new DataStreamGlobalRetention(defaultRetention, maxRetention); } diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index 2e50cc0a97677..6898e44335793 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -483,22 +483,22 @@ private Map retrieveEnterpriseGeoData(GeoIpDatabase geoIpDatabas } } case HOSTING_PROVIDER -> { - geoData.put("is_hosting_provider", isHostingProvider); + geoData.put("hosting_provider", isHostingProvider); } case TOR_EXIT_NODE -> { - geoData.put("is_tor_exit_node", isTorExitNode); + geoData.put("tor_exit_node", isTorExitNode); } case ANONYMOUS_VPN -> { - geoData.put("is_anonymous_vpn", isAnonymousVpn); + geoData.put("anonymous_vpn", isAnonymousVpn); } case ANONYMOUS -> { - geoData.put("is_anonymous", isAnonymous); + geoData.put("anonymous", isAnonymous); } case PUBLIC_PROXY -> { - geoData.put("is_public_proxy", isPublicProxy); + geoData.put("public_proxy", isPublicProxy); } case RESIDENTIAL_PROXY -> { - geoData.put("is_residential_proxy", isResidentialProxy); + geoData.put("residential_proxy", isResidentialProxy); } } } diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java index f9f79d54522da..ec77cacbdb6b6 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java @@ -373,12 +373,12 @@ public void testEnterprise() throws Exception { location.put("lon", -1.25); assertThat(geoData.get("location"), equalTo(location)); assertThat(geoData.get("network"), equalTo("2.125.160.216/29")); - assertThat(geoData.get("is_hosting_provider"), equalTo(false)); - assertThat(geoData.get("is_tor_exit_node"), equalTo(false)); - assertThat(geoData.get("is_anonymous_vpn"), equalTo(false)); - assertThat(geoData.get("is_anonymous"), equalTo(false)); - assertThat(geoData.get("is_public_proxy"), equalTo(false)); - assertThat(geoData.get("is_residential_proxy"), equalTo(false)); + assertThat(geoData.get("hosting_provider"), equalTo(false)); + assertThat(geoData.get("tor_exit_node"), equalTo(false)); + assertThat(geoData.get("anonymous_vpn"), equalTo(false)); + assertThat(geoData.get("anonymous"), equalTo(false)); + assertThat(geoData.get("public_proxy"), equalTo(false)); + assertThat(geoData.get("residential_proxy"), equalTo(false)); } public void testAddressIsNotInTheDatabase() throws Exception { diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java index d0cef178dc920..aee0d313e4e00 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java @@ -10,7 +10,6 @@ import org.apache.logging.log4j.Level; import org.elasticsearch.ESNetty4IntegTestCase; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.MockLogAppender; @@ -24,16 +23,14 @@ public class ESLoggingHandlerIT extends ESNetty4IntegTestCase { private MockLogAppender appender; - private Releasable appenderRelease; public void setUp() throws Exception { super.setUp(); - appender = new MockLogAppender(); - appenderRelease = appender.capturing(ESLoggingHandler.class, TransportLogger.class, TcpTransport.class); + appender = MockLogAppender.capture(ESLoggingHandler.class, TransportLogger.class, TcpTransport.class); } public void tearDown() throws Exception { - appenderRelease.close(); + appender.close(); super.tearDown(); } diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpClient.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpClient.java index 7ce962ff56b67..3035213766584 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpClient.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpClient.java @@ -40,6 +40,7 @@ import org.elasticsearch.transport.netty4.NettyAllocator; import java.io.Closeable; +import java.io.IOException; import java.net.SocketAddress; import java.net.SocketException; import java.nio.charset.StandardCharsets; @@ -203,7 +204,11 @@ protected void channelRead0(ChannelHandlerContext ctx, HttpObject msg) { @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) { - if (cause instanceof PrematureChannelClosureException || cause instanceof SocketException) { + if (cause instanceof PrematureChannelClosureException + || cause instanceof SocketException + || (cause instanceof IOException + && cause.getMessage() != null + && cause.getMessage().contains("An established connection was aborted by the software in your host machine"))) { // no more requests coming, so fast-forward the latch fastForward(); } else { diff --git a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java index 08e3ac2cbce8c..99b2728ebfa3c 100644 --- a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java +++ b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java @@ -206,16 +206,16 @@ private void assertControllerSpawns(final Function pluginsDir String stdoutLoggerName = "test_plugin-controller-stdout"; String stderrLoggerName = "test_plugin-controller-stderr"; - MockLogAppender appender = new MockLogAppender(); Loggers.setLevel(LogManager.getLogger(stdoutLoggerName), Level.TRACE); Loggers.setLevel(LogManager.getLogger(stderrLoggerName), Level.TRACE); CountDownLatch messagesLoggedLatch = new CountDownLatch(2); - if (expectSpawn) { - appender.addExpectation(new ExpectedStreamMessage(stdoutLoggerName, "I am alive", messagesLoggedLatch)); - appender.addExpectation(new ExpectedStreamMessage(stderrLoggerName, "I am an error", messagesLoggedLatch)); - } - try (var ignore = appender.capturing(stdoutLoggerName, stderrLoggerName)) { + try (var appender = MockLogAppender.capture(stdoutLoggerName, stderrLoggerName)) { + if (expectSpawn) { + appender.addExpectation(new ExpectedStreamMessage(stdoutLoggerName, "I am alive", messagesLoggedLatch)); + appender.addExpectation(new ExpectedStreamMessage(stderrLoggerName, "I am an error", messagesLoggedLatch)); + } + Spawner spawner = new Spawner(); spawner.spawnNativeControllers(environment); diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java index dc4e24959a5c6..81ac8ab1200f6 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java @@ -1211,7 +1211,6 @@ private List listPlugins() { /** * Check that readiness listener works */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/108523") public void test500Readiness() throws Exception { assertFalse(readinessProbe(9399)); // Disabling security so we wait for green @@ -1220,6 +1219,7 @@ public void test500Readiness() throws Exception { builder().envVar("readiness.port", "9399").envVar("xpack.security.enabled", "false").envVar("discovery.type", "single-node") ); waitForElasticsearch(installation); + dumpDebug(); assertTrue(readinessProbe(9399)); } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_settings/all_path_options.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_settings/all_path_options.yml index ae3eadded108b..86f02641d86f1 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_settings/all_path_options.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_settings/all_path_options.yml @@ -78,24 +78,6 @@ setup: - match: {test_index2.settings.index.refresh_interval: 10s} - is_false: foo.settings.index.refresh_interval ---- -"put settings in list of indices": - - skip: - awaits_fix: list of indices not implemented yet - - do: - indices.put_settings: - index: test_index1, test_index2 - body: - refresh_interval: 10s - - - do: - indices.get_settings: {} - - - match: {test_index1.settings.index.refresh_interval: 10s} - - match: {test_index2.settings.index.refresh_interval: 10s} - - is_false: foo.settings.index.refresh_interval - - --- "put settings in blank index": - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/85_fields_meta.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/85_fields_meta.yml deleted file mode 100644 index 81be6f82d8a14..0000000000000 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/85_fields_meta.yml +++ /dev/null @@ -1,30 +0,0 @@ ---- -"Metadata Fields": - - - skip: - awaits_fix: "Update doesn't return metadata fields, waiting for #3259" - - - do: - indices.create: - index: test_1 - - - do: - update: - index: test_1 - id: "1" - parent: 5 - fields: [ _routing ] - body: - doc: { foo: baz } - upsert: { foo: bar } - - - match: { get._routing: "5" } - - - do: - get: - index: test_1 - id: "1" - parent: 5 - stored_fields: [ _routing ] - - diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java index 3b9d3e133b63a..fcccc0051f0cd 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java @@ -387,17 +387,16 @@ public void testMessageLogging() { ) .get(); - MockLogAppender dryRunMockLog = new MockLogAppender(); - dryRunMockLog.addExpectation( - new MockLogAppender.UnseenEventExpectation( - "no completed message logged on dry run", - TransportClusterRerouteAction.class.getName(), - Level.INFO, - "allocated an empty primary*" - ) - ); + try (var dryRunMockLog = MockLogAppender.capture(TransportClusterRerouteAction.class)) { + dryRunMockLog.addExpectation( + new MockLogAppender.UnseenEventExpectation( + "no completed message logged on dry run", + TransportClusterRerouteAction.class.getName(), + Level.INFO, + "allocated an empty primary*" + ) + ); - try (var ignored = dryRunMockLog.capturing(TransportClusterRerouteAction.class)) { AllocationCommand dryRunAllocation = new AllocateEmptyPrimaryAllocationCommand(indexName, 0, nodeName1, true); ClusterRerouteResponse dryRunResponse = clusterAdmin().prepareReroute() .setExplain(randomBoolean()) @@ -412,24 +411,23 @@ public void testMessageLogging() { dryRunMockLog.assertAllExpectationsMatched(); } - MockLogAppender allocateMockLog = new MockLogAppender(); - allocateMockLog.addExpectation( - new MockLogAppender.SeenEventExpectation( - "message for first allocate empty primary", - TransportClusterRerouteAction.class.getName(), - Level.INFO, - "allocated an empty primary*" + nodeName1 + "*" - ) - ); - allocateMockLog.addExpectation( - new MockLogAppender.UnseenEventExpectation( - "no message for second allocate empty primary", - TransportClusterRerouteAction.class.getName(), - Level.INFO, - "allocated an empty primary*" + nodeName2 + "*" - ) - ); - try (var ignored = allocateMockLog.capturing(TransportClusterRerouteAction.class)) { + try (var allocateMockLog = MockLogAppender.capture(TransportClusterRerouteAction.class)) { + allocateMockLog.addExpectation( + new MockLogAppender.SeenEventExpectation( + "message for first allocate empty primary", + TransportClusterRerouteAction.class.getName(), + Level.INFO, + "allocated an empty primary*" + nodeName1 + "*" + ) + ); + allocateMockLog.addExpectation( + new MockLogAppender.UnseenEventExpectation( + "no message for second allocate empty primary", + TransportClusterRerouteAction.class.getName(), + Level.INFO, + "allocated an empty primary*" + nodeName2 + "*" + ) + ); AllocationCommand yesDecisionAllocation = new AllocateEmptyPrimaryAllocationCommand(indexName, 0, nodeName1, true); AllocationCommand noDecisionAllocation = new AllocateEmptyPrimaryAllocationCommand("noexist", 1, nodeName2, true); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/CollapseSearchResultsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/CollapseSearchResultsIT.java new file mode 100644 index 0000000000000..a12a26d69c5ff --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/CollapseSearchResultsIT.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.index.query.InnerHitBuilder; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.search.collapse.CollapseBuilder; +import org.elasticsearch.test.ESIntegTestCase; + +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; + +public class CollapseSearchResultsIT extends ESIntegTestCase { + + public void testCollapse() { + final String indexName = "test_collapse"; + createIndex(indexName); + final String collapseField = "collapse_field"; + assertAcked(indicesAdmin().preparePutMapping(indexName).setSource(collapseField, "type=keyword")); + index(indexName, "id_1", Map.of(collapseField, "value1")); + index(indexName, "id_2", Map.of(collapseField, "value2")); + refresh(indexName); + assertNoFailuresAndResponse( + prepareSearch(indexName).setQuery(new MatchAllQueryBuilder()) + .setCollapse(new CollapseBuilder(collapseField).setInnerHits(new InnerHitBuilder("ih").setSize(2))), + searchResponse -> { + assertEquals(collapseField, searchResponse.getHits().getCollapseField()); + assertEquals(Set.of(new BytesRef("value1"), new BytesRef("value2")), Set.of(searchResponse.getHits().getCollapseValues())); + } + ); + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainRequest.java index e6de1faa1aff7..91561814fea1a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainRequest.java @@ -49,6 +49,7 @@ public class ClusterAllocationExplainRequest extends MasterNodeRequest { - public DesiredBalanceRequest() {} + public DesiredBalanceRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public DesiredBalanceRequest(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsAction.java index 75434ff554b9c..f26921fd47260 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsAction.java @@ -103,6 +103,7 @@ protected ClusterBlockException checkBlock(Request request, ClusterState state) public static class Request extends MasterNodeReadRequest { public Request(TaskId parentTaskId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); setParentTask(parentTaskId); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/AddVotingConfigExclusionsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/AddVotingConfigExclusionsRequest.java index 75877cf0630f4..82e4e4123e4fe 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/AddVotingConfigExclusionsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/AddVotingConfigExclusionsRequest.java @@ -57,6 +57,7 @@ public AddVotingConfigExclusionsRequest(String... nodeNames) { * @param timeout How long to wait for the added exclusions to take effect and be removed from the voting configuration. */ public AddVotingConfigExclusionsRequest(String[] nodeIds, String[] nodeNames, TimeValue timeout) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); if (timeout.compareTo(TimeValue.ZERO) < 0) { throw new IllegalArgumentException("timeout [" + timeout + "] must be non-negative"); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/ClearVotingConfigExclusionsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/ClearVotingConfigExclusionsRequest.java index f8f64edad2974..2ddd27261db0f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/ClearVotingConfigExclusionsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/ClearVotingConfigExclusionsRequest.java @@ -26,7 +26,9 @@ public class ClearVotingConfigExclusionsRequest extends MasterNodeRequest { - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportDeleteDesiredNodesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportDeleteDesiredNodesAction.java index e6e2616e67662..46e41d306cefe 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportDeleteDesiredNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportDeleteDesiredNodesAction.java @@ -102,7 +102,9 @@ public ClusterState afterBatchExecution(ClusterState clusterState, boolean clust } public static class Request extends AcknowledgedRequest { - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesRequest.java index c7c2b9a290a2e..3d8cdb4b405f8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesRequest.java @@ -48,6 +48,7 @@ public class UpdateDesiredNodesRequest extends AcknowledgedRequest nodes, boolean dryRun) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); assert historyID != null; assert nodes != null; this.historyID = historyID; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthRequest.java index a94555f1dfd1c..2b60e2d4a5ffa 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthRequest.java @@ -37,9 +37,12 @@ public class ClusterHealthRequest extends MasterNodeReadRequest { public GetFeatureUpgradeStatusRequest() { - super(); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); } public GetFeatureUpgradeStatusRequest(StreamInput in) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/PostFeatureUpgradeRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/PostFeatureUpgradeRequest.java index ccc4a62a1138f..36a90ae9afe33 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/PostFeatureUpgradeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/PostFeatureUpgradeRequest.java @@ -20,7 +20,7 @@ public class PostFeatureUpgradeRequest extends MasterNodeRequest { public PostFeatureUpgradeRequest() { - super(); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); } public PostFeatureUpgradeRequest(StreamInput in) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/shutdown/PrevalidateNodeRemovalRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/shutdown/PrevalidateNodeRemovalRequest.java index a88fb83b2300d..5bde01195e35c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/shutdown/PrevalidateNodeRemovalRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/shutdown/PrevalidateNodeRemovalRequest.java @@ -34,6 +34,7 @@ public class PrevalidateNodeRemovalRequest extends MasterNodeReadRequest { - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java index 5b49a41ed9476..c4e40f1b208b4 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java @@ -55,7 +55,9 @@ public ClusterUpdateSettingsRequest(StreamInput in) throws IOException { persistentSettings = readSettingsFromStream(in); } - public ClusterUpdateSettingsRequest() {} + public ClusterUpdateSettingsRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } @Override public ActionRequestValidationException validate() { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java index 6f6253491c580..91c302c8aa7be 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java @@ -31,9 +31,12 @@ public final class ClusterSearchShardsRequest extends MasterNodeReadRequest userMetadata; - public CreateSnapshotRequest() {} + public CreateSnapshotRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } /** * Constructs a new put repository request with the provided snapshot and repository names @@ -87,6 +89,7 @@ public CreateSnapshotRequest() {} * @param snapshot snapshot name */ public CreateSnapshotRequest(String repository, String snapshot) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.snapshot = snapshot; this.repository = repository; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequest.java index b16041da66bf7..67389ea3116d8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequest.java @@ -38,6 +38,7 @@ public class DeleteSnapshotRequest extends MasterNodeRequest private boolean includeIndexNames = true; - public GetSnapshotsRequest() {} + public GetSnapshotsRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } /** * Constructs a new get snapshots request with given repository names and list of snapshots @@ -85,6 +87,7 @@ public GetSnapshotsRequest() {} * @param snapshots list of snapshots */ public GetSnapshotsRequest(String[] repositories, String[] snapshots) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.repositories = repositories; this.snapshots = snapshots; } @@ -95,6 +98,7 @@ public GetSnapshotsRequest(String[] repositories, String[] snapshots) { * @param repositories repository names */ public GetSnapshotsRequest(String... repositories) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.repositories = repositories; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/shard/GetShardSnapshotRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/shard/GetShardSnapshotRequest.java index d8fd55451cc63..7a7cc0c304556 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/shard/GetShardSnapshotRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/shard/GetShardSnapshotRequest.java @@ -29,6 +29,7 @@ public class GetShardSnapshotRequest extends MasterNodeRequest repositories, ShardId shardId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); assert repositories.isEmpty() == false; assert repositories.stream().noneMatch(Objects::isNull); assert repositories.size() == 1 || repositories.stream().noneMatch(repo -> repo.equals(ALL_REPOSITORIES)); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java index 8d025653d47fe..73339cedb96e0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java @@ -60,7 +60,9 @@ public class RestoreSnapshotRequest extends MasterNodeRequest { - public PendingClusterTasksRequest() {} + public PendingClusterTasksRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public PendingClusterTasksRequest(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java index fac2006b68814..f223d7fb2762f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java @@ -65,7 +65,9 @@ public IndicesAliasesRequest(StreamInput in) throws IOException { origin = in.readOptionalString(); } - public IndicesAliasesRequest() {} + public IndicesAliasesRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } /** * Request to take one or more actions on one or more indexes and alias combinations. diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesRequest.java index 9d10065c9c3e9..09071f2e6ea3a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesRequest.java @@ -34,11 +34,14 @@ public class GetAliasesRequest extends MasterNodeReadRequest private String[] originalAliases = Strings.EMPTY_ARRAY; public GetAliasesRequest(String... aliases) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.aliases = aliases; this.originalAliases = aliases; } - public GetAliasesRequest() {} + public GetAliasesRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } /** * NB prior to 8.12 get-aliases was a TransportMasterNodeReadAction so for BwC we must remain able to read these requests until we no diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexRequest.java index 9427a5fa363ba..9a722f1bce2a9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexRequest.java @@ -37,12 +37,15 @@ public CloseIndexRequest(StreamInput in) throws IOException { waitForActiveShards = ActiveShardCount.readFrom(in); } - public CloseIndexRequest() {} + public CloseIndexRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } /** * Constructs a new close index request for the specified index. */ public CloseIndexRequest(String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indices = indices; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java index 21f187f052580..3a78738ae986a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java @@ -111,7 +111,9 @@ public CreateIndexRequest(StreamInput in) throws IOException { } } - public CreateIndexRequest() {} + public CreateIndexRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } /** * Constructs a request to create an index. @@ -129,6 +131,7 @@ public CreateIndexRequest(String index) { * @param settings the settings to apply to the index */ public CreateIndexRequest(String index, Settings settings) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.index = index; this.settings = settings; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/delete/DeleteDanglingIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/delete/DeleteDanglingIndexRequest.java index b8206cba8de2a..daceeece4f97b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/delete/DeleteDanglingIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/delete/DeleteDanglingIndexRequest.java @@ -30,7 +30,7 @@ public DeleteDanglingIndexRequest(StreamInput in) throws IOException { } public DeleteDanglingIndexRequest(String indexUUID, boolean acceptDataLoss) { - super(); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indexUUID = Objects.requireNonNull(indexUUID, "indexUUID cannot be null"); this.acceptDataLoss = acceptDataLoss; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/import_index/ImportDanglingIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/import_index/ImportDanglingIndexRequest.java index 66378ab9907d8..be2fb10821662 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/import_index/ImportDanglingIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/import_index/ImportDanglingIndexRequest.java @@ -32,7 +32,7 @@ public ImportDanglingIndexRequest(StreamInput in) throws IOException { } public ImportDanglingIndexRequest(String indexUUID, boolean acceptDataLoss) { - super(); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indexUUID = Objects.requireNonNull(indexUUID, "indexUUID cannot be null"); this.acceptDataLoss = acceptDataLoss; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexRequest.java index 87cfc303a289a..2cb431577242d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexRequest.java @@ -48,7 +48,9 @@ public DeleteIndexRequest(StreamInput in) throws IOException { indicesOptions = IndicesOptions.readIndicesOptions(in); } - public DeleteIndexRequest() {} + public DeleteIndexRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } /** * Constructs a new delete index request for the specified index. @@ -56,6 +58,7 @@ public DeleteIndexRequest() {} * @param index The index to delete. Use "_all" to delete all indices. */ public DeleteIndexRequest(String index) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indices = new String[] { index }; } @@ -65,6 +68,7 @@ public DeleteIndexRequest(String index) { * @param indices The indices to delete. Use "_all" to delete all indices. */ public DeleteIndexRequest(String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indices = indices; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java index edc6381438635..707286801cf66 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java @@ -108,13 +108,16 @@ public PutMappingRequest(StreamInput in) throws IOException { writeIndexOnly = in.readBoolean(); } - public PutMappingRequest() {} + public PutMappingRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } /** * Constructs a new put mapping request against one or more indices. If nothing is set then * it will be executed against all indices. */ public PutMappingRequest(String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indices = indices; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexRequest.java index fb0745eb72d1f..4bb4578f24459 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexRequest.java @@ -39,12 +39,15 @@ public OpenIndexRequest(StreamInput in) throws IOException { waitForActiveShards = ActiveShardCount.readFrom(in); } - public OpenIndexRequest() {} + public OpenIndexRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } /** * Constructs a new open index request for the specified index. */ public OpenIndexRequest(String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indices = indices; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockRequest.java index caf33a541e92a..9331d7010a6e6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockRequest.java @@ -43,6 +43,7 @@ public AddIndexBlockRequest(StreamInput in) throws IOException { * Constructs a new request for the specified block and indices */ public AddIndexBlockRequest(APIBlock block, String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.block = Objects.requireNonNull(block); this.indices = Objects.requireNonNull(indices); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java index 1f582f95aea91..09f9411d5a834 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java @@ -120,9 +120,12 @@ public RolloverRequest(StreamInput in) throws IOException { } } - RolloverRequest() {} + RolloverRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public RolloverRequest(String rolloverTarget, String newIndexName) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.rolloverTarget = rolloverTarget; this.newIndexName = newIndexName; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequest.java index 96cbfc80c8d67..42ff256579984 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequest.java @@ -51,7 +51,9 @@ public GetSettingsRequest includeDefaults(boolean includeDefaults) { return this; } - public GetSettingsRequest() {} + public GetSettingsRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public GetSettingsRequest(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequest.java index 7fa2e11317a43..c3e87f2f54cf0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequest.java @@ -61,12 +61,15 @@ public UpdateSettingsRequest(StreamInput in) throws IOException { } } - public UpdateSettingsRequest() {} + public UpdateSettingsRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } /** * Constructs a new request to update settings for one or more indices */ public UpdateSettingsRequest(String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indices = indices; } @@ -74,6 +77,7 @@ public UpdateSettingsRequest(String... indices) { * Constructs a new request to update settings for one or more indices */ public UpdateSettingsRequest(Settings settings, String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indices = indices; this.settings = settings; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresRequest.java index 475c9c16f149e..8cf2427e91c15 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresRequest.java @@ -40,10 +40,13 @@ public class IndicesShardStoresRequest extends MasterNodeReadRequestindices */ public IndicesShardStoresRequest(String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.indices = indices; } - public IndicesShardStoresRequest() {} + public IndicesShardStoresRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public IndicesShardStoresRequest(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java index c39d2e1114618..ef709fc4457a7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java @@ -73,9 +73,12 @@ public ResizeRequest(StreamInput in) throws IOException { } } - ResizeRequest() {} + ResizeRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public ResizeRequest(String targetIndex, String sourceIndex) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.targetIndexRequest = new CreateIndexRequest(targetIndex); this.sourceIndex = sourceIndex; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateRequest.java index b3f3a0a203df5..3c2416200ce61 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateRequest.java @@ -28,12 +28,15 @@ public DeleteIndexTemplateRequest(StreamInput in) throws IOException { name = in.readString(); } - public DeleteIndexTemplateRequest() {} + public DeleteIndexTemplateRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } /** * Constructs a new delete index request for the specified name. */ public DeleteIndexTemplateRequest(String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.name = name; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteComponentTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteComponentTemplateAction.java index 593162305f2d0..9ac10d782a605 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteComponentTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteComponentTemplateAction.java @@ -109,6 +109,7 @@ public Request(StreamInput in) throws IOException { * Constructs a new delete index request for the specified name. */ public Request(String... names) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.names = Objects.requireNonNull(names, "component templates to delete must not be null"); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteComposableIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteComposableIndexTemplateAction.java index f884c8404d0f2..fa40a901c705b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteComposableIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteComposableIndexTemplateAction.java @@ -108,6 +108,7 @@ public Request(StreamInput in) throws IOException { * Constructs a new delete template request for the specified name. */ public Request(String... names) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.names = Objects.requireNonNull(names, "templates to delete must not be null"); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java index 3d5b4a73e0a57..5483097b140da 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java @@ -48,9 +48,12 @@ public static class Request extends MasterNodeReadRequest { private String name; private boolean includeDefaults; - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.name = name; this.includeDefaults = false; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java index aebb9cef12f43..5cb35d23c8b7c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java @@ -49,6 +49,7 @@ public static class Request extends MasterNodeReadRequest { * @param name A template name or pattern, or {@code null} to retrieve all templates. */ public Request(@Nullable String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); if (name != null && name.contains(",")) { throw new IllegalArgumentException("template name may not contain ','"); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesRequest.java index ec7ce037e651c..19c89b0186733 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesRequest.java @@ -25,6 +25,7 @@ public class GetIndexTemplatesRequest extends MasterNodeReadRequest { private TransportPutComposableIndexTemplateAction.Request indexTemplateRequest; private boolean includeDefaults = false; - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(String templateName) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); if (templateName == null) { throw new IllegalArgumentException("template name cannot be null"); } @@ -53,6 +56,7 @@ public Request(String templateName) { } public Request(TransportPutComposableIndexTemplateAction.Request indexTemplateRequest) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); if (indexTemplateRequest == null) { throw new IllegalArgumentException("index template body must be present"); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutComponentTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutComponentTemplateAction.java index 56e7079ec38ba..ebf1e9e74b793 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutComponentTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutComponentTemplateAction.java @@ -56,6 +56,7 @@ public Request(StreamInput in) throws IOException { * Constructs a new put component template request with the provided name. */ public Request(String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.name = name; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java index 10c9a5e7205b0..6ef887847c270 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java @@ -92,12 +92,15 @@ public PutIndexTemplateRequest(StreamInput in) throws IOException { version = in.readOptionalVInt(); } - public PutIndexTemplateRequest() {} + public PutIndexTemplateRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } /** * Constructs a new put index template request with the provided name. */ public PutIndexTemplateRequest(String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.name = name; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutComposableIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutComposableIndexTemplateAction.java index 8d259083a1352..86c6109469477 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutComposableIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutComposableIndexTemplateAction.java @@ -156,6 +156,7 @@ public Request(StreamInput in) throws IOException { * Constructs a new put index template request with the provided name. */ public Request(String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.name = name; } diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/CreateDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/CreateDataStreamAction.java index f9e559fa16ec7..40060d5e5d927 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/CreateDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/CreateDataStreamAction.java @@ -36,11 +36,13 @@ public static class Request extends AcknowledgedRequest implements Indi private final long startTime; public Request(String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.name = name; this.startTime = System.currentTimeMillis(); } public Request(String name, long startTime) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.name = name; this.startTime = startTime; } diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/DeleteDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/DeleteDataStreamAction.java index b68a7d3fcd159..5b79eae0cebfd 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/DeleteDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/DeleteDataStreamAction.java @@ -47,6 +47,7 @@ public static class Request extends MasterNodeRequest implements Indice private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, true, true, true, false, false, true, false); public Request(String... names) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.names = Objects.requireNonNull(names); this.wildcardExpressionsOriginallySpecified = Arrays.stream(names).anyMatch(Regex::isSimpleMatchPattern); } diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java index 1517b368e21ea..812da87eab103 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java @@ -58,10 +58,12 @@ public static class Request extends MasterNodeReadRequest implements In private boolean includeDefaults = false; public Request(String[] names) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.names = names; } public Request(String[] names, boolean includeDefaults) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.names = names; this.includeDefaults = includeDefaults; } diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/MigrateToDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/MigrateToDataStreamAction.java index 3a834273e84cf..226b8d44f636c 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/MigrateToDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/MigrateToDataStreamAction.java @@ -35,6 +35,7 @@ public static class Request extends AcknowledgedRequest actions) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.actions = Collections.unmodifiableList(actions); } diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/PromoteDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/PromoteDataStreamAction.java index 3b3e644272cbc..0853d30d22de4 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/PromoteDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/PromoteDataStreamAction.java @@ -35,6 +35,7 @@ public static class Request extends MasterNodeRequest implements In private boolean includeDefaults = false; public Request(String[] names) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.names = names; } public Request(String[] names, boolean includeDefaults) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.names = names; this.includeDefaults = includeDefaults; } diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/PutDataStreamLifecycleAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/PutDataStreamLifecycleAction.java index 8156e03b0cdd1..7bb63ae27b526 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/PutDataStreamLifecycleAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/PutDataStreamLifecycleAction.java @@ -95,6 +95,7 @@ public Request(String[] names, @Nullable TimeValue dataRetention) { } public Request(String[] names, DataStreamLifecycle lifecycle) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.names = names; this.lifecycle = lifecycle; } @@ -104,6 +105,7 @@ public Request(String[] names, @Nullable TimeValue dataRetention, @Nullable Bool } public Request(String[] names, @Nullable TimeValue dataRetention, @Nullable Boolean enabled, @Nullable Downsampling downsampling) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.names = names; this.lifecycle = DataStreamLifecycle.newBuilder() .dataRetention(dataRetention) diff --git a/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java b/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java index e8e299c58d2eb..7d2b1be79731e 100644 --- a/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java +++ b/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java @@ -50,13 +50,16 @@ public Request( final TimeValue waitTimeout, final DownsampleConfig downsampleConfig ) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.sourceIndex = sourceIndex; this.targetIndex = targetIndex; this.waitTimeout = waitTimeout == null ? DEFAULT_WAIT_TIMEOUT : waitTimeout; this.downsampleConfig = downsampleConfig; } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java index 3810d95872417..4ac4d63ba5de0 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java @@ -20,6 +20,7 @@ public class DeletePipelineRequest extends AcknowledgedRequest * Create a new pipeline request with the id and source along with the content type of the source */ public PutPipelineRequest(String id, BytesReference source, XContentType xContentType, Integer version) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.id = Objects.requireNonNull(id); this.source = Objects.requireNonNull(source); this.xContentType = Objects.requireNonNull(xContentType); diff --git a/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java index 48c2f1890ba08..e8470ba77632f 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java @@ -101,6 +101,7 @@ private void doRun() { hit.setInnerHits(Maps.newMapWithExpectedSize(innerHitBuilders.size())); } hit.getInnerHits().put(innerHitBuilder.getName(), innerHits); + assert innerHits.isPooled() == false || hit.isPooled() : "pooled inner hits can only be added to a pooled hit"; innerHits.mustIncRef(); } } diff --git a/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java b/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java index 7e271536be9fe..2bbe3d36f031f 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java @@ -34,18 +34,27 @@ public abstract class AcknowledgedRequest + * For requests which originate in the REST layer, use {@link + * org.elasticsearch.rest.RestUtils#getMasterNodeTimeout} to determine the timeout. + *

+ * For internally-generated requests, choose an appropriate timeout. Often this will be {@link + * TimeValue#MAX_VALUE} (or {@link TimeValue#MINUS_ONE} which means an infinite timeout in 8.15.0 onwards) + * since usually we want internal requests to wait for as long as necessary to complete. + * + * @param ackTimeout specifies how long to wait for all relevant nodes to apply a cluster state update and acknowledge this to + * the elected master. */ - protected AcknowledgedRequest() { - this(DEFAULT_ACK_TIMEOUT); + protected AcknowledgedRequest(TimeValue masterNodeTimeout, TimeValue ackTimeout) { + super(masterNodeTimeout); + this.ackTimeout = Objects.requireNonNull(ackTimeout); } - /** - * @param ackTimeout specifies how long to wait for all relevant nodes to apply a cluster state update and acknowledge this to the - * elected master. - */ + @Deprecated(forRemoval = true) // just a temporary compatibility shim protected AcknowledgedRequest(TimeValue ackTimeout) { - this.ackTimeout = Objects.requireNonNull(ackTimeout); + this(MasterNodeRequest.TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, ackTimeout); } protected AcknowledgedRequest(StreamInput in) throws IOException { @@ -94,6 +103,8 @@ public Plain(StreamInput in) throws IOException { super(in); } - public Plain() {} + public Plain() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } } } diff --git a/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadRequest.java b/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadRequest.java index 7f4100473c42c..92788f53279d5 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadRequest.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadRequest.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.TimeValue; import java.io.IOException; @@ -20,7 +21,20 @@ public abstract class MasterNodeReadRequest + * For requests which originate in the REST layer, use {@link + * org.elasticsearch.rest.RestUtils#getMasterNodeTimeout} to determine the timeout. + *

+ * For internally-generated requests, choose an appropriate timeout. Often this will be {@link + * TimeValue#MAX_VALUE} (or {@link TimeValue#MINUS_ONE} which means an infinite timeout in 8.15.0 onwards) + * since usually we want internal requests to wait for as long as necessary to complete. + */ + protected MasterNodeReadRequest(TimeValue masterNodeTimeout) { + super(masterNodeTimeout); + } protected MasterNodeReadRequest(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java b/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java index 063dbb0397de8..1b3dca31689e2 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java @@ -21,15 +21,36 @@ */ public abstract class MasterNodeRequest> extends ActionRequest { - public static final TimeValue DEFAULT_MASTER_NODE_TIMEOUT = TimeValue.timeValueSeconds(30); - - private TimeValue masterNodeTimeout = DEFAULT_MASTER_NODE_TIMEOUT; + /** + * The default timeout for master-node requests. It's super-trappy to have such a default, because it makes it all too easy to forget + * to add a mechanism by which clients can change it. Without such a mechanism things will work fine until we encounter a large cluster + * that is struggling to process cluster state updates fast enough, and it's a disaster if we cannot extend the master-node timeout in + * those cases. We shouldn't use this any more and should work towards removing it. + *

+ * For requests which originate in the REST layer, use {@link org.elasticsearch.rest.RestUtils#getMasterNodeTimeout} to determine the + * timeout. + *

+ * For internally-generated requests, choose an appropriate timeout. Often this will be {@link TimeValue#MAX_VALUE} (or {@link + * TimeValue#MINUS_ONE} which means an infinite timeout in 8.15.0 onwards) since usually we want internal requests to wait for as long + * as necessary to complete. + * + * @deprecated all requests should specify a timeout, see #107984. + */ + @Deprecated(forRemoval = true) + public static final TimeValue TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT = TimeValue.timeValueSeconds(30); - protected MasterNodeRequest() {} + private TimeValue masterNodeTimeout; /** * @param masterNodeTimeout Specifies how long to wait when the master has not been discovered yet, or is disconnected, or is busy - * processing other tasks. The value {@link TimeValue#MINUS_ONE} means to wait forever. + * processing other tasks. The value {@link TimeValue#MINUS_ONE} means to wait forever in 8.15.0 onwards. + *

+ * For requests which originate in the REST layer, use {@link + * org.elasticsearch.rest.RestUtils#getMasterNodeTimeout} to determine the timeout. + *

+ * For internally-generated requests, choose an appropriate timeout. Often this will be {@link + * TimeValue#MAX_VALUE} (or {@link TimeValue#MINUS_ONE} which means an infinite timeout in 8.15.0 onwards) + * since usually we want internal requests to wait for as long as necessary to complete. */ protected MasterNodeRequest(TimeValue masterNodeTimeout) { this.masterNodeTimeout = Objects.requireNonNull(masterNodeTimeout); @@ -49,7 +70,14 @@ public void writeTo(StreamOutput out) throws IOException { /** * Specifies how long to wait when the master has not been discovered yet, or is disconnected, or is busy processing other tasks. The - * value {@link TimeValue#MINUS_ONE} means to wait forever. + * value {@link TimeValue#MINUS_ONE} means to wait forever in 8.15.0 onwards. + *

+ * For requests which originate in the REST layer, use {@link org.elasticsearch.rest.RestUtils#getMasterNodeTimeout} to determine the + * timeout. + *

+ * For internally-generated requests, choose an appropriate timeout. Often this will be {@link TimeValue#MAX_VALUE} (or {@link + * TimeValue#MINUS_ONE} which means an infinite timeout in 8.15.0 onwards) since usually we want internal requests to wait for as long + * as necessary to complete. */ @SuppressWarnings("unchecked") public final Request masterNodeTimeout(TimeValue timeout) { diff --git a/server/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequest.java b/server/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequest.java index 00384852d1472..94ba504c8b175 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequest.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequest.java @@ -26,10 +26,13 @@ public abstract class ClusterInfoRequest buildNumberOfShardsSetting() { - /* This is a safety limit that should only be exceeded in very rare and special cases. The assumption is that - * 99% of the users have less than 1024 shards per index. We also make it a hard check that requires restart of nodes - * if a cluster should allow to create more than 1024 shards per index. NOTE: this does not limit the number of shards - * per cluster. this also prevents creating stuff like a new index with millions of shards by accident which essentially - * kills the entire cluster with OOM on the spot.*/ - final int maxNumShards = Integer.parseInt(System.getProperty("es.index.max_number_of_shards", "1024")); + final int maxNumShards = Integer.parseInt(System.getProperty("es.index.max_number_of_shards", PER_INDEX_MAX_NUMBER_OF_SHARDS)); if (maxNumShards < 1) { throw new IllegalArgumentException("es.index.max_number_of_shards must be > 0"); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java index 631845dc33288..f5bb97af7625f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java @@ -1864,7 +1864,7 @@ public static class PutRequest { CompressedXContent mappings = null; List aliases = new ArrayList<>(); - TimeValue masterTimeout = MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT; + TimeValue masterTimeout = MasterNodeRequest.TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT; public PutRequest(String cause, String name) { this.cause = cause; @@ -1914,7 +1914,7 @@ public PutRequest version(Integer version) { public static class RemoveRequest { final String name; - TimeValue masterTimeout = MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT; + TimeValue masterTimeout = MasterNodeRequest.TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT; public RemoveRequest(String name) { this.name = name; diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DataTier.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DataTier.java index ebdf6e4b3d8ee..3b1257a510747 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DataTier.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DataTier.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettingProvider; import org.elasticsearch.snapshots.SearchableSnapshotsSettings; @@ -223,14 +224,14 @@ public static class DefaultHotAllocationSettingProvider implements IndexSettingP @Override public Settings getAdditionalIndexSettings( String indexName, - String dataStreamName, - boolean timeSeries, + @Nullable String dataStreamName, + boolean isTimeSeries, Metadata metadata, Instant resolvedAt, - Settings allSettings, + Settings indexTemplateAndCreateRequestSettings, List combinedTemplateMappings ) { - Set settings = allSettings.keySet(); + Set settings = indexTemplateAndCreateRequestSettings.keySet(); if (settings.contains(TIER_PREFERENCE)) { // just a marker -- this null value will be removed or overridden by the template/request settings return NULL_TIER_PREFERENCE_SETTINGS; diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java b/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java index e67196c9090c9..bbf7cc3e0e1e9 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.core.Nullable; import org.elasticsearch.index.mapper.MapperService; import java.io.IOException; @@ -31,20 +32,20 @@ public interface IndexSettingProvider { * @param indexName The name of the new index being created * @param dataStreamName The name of the data stream if the index being created is part of a data stream otherwise * null - * @param timeSeries Whether the template is in time series mode. + * @param isTimeSeries Whether the template is in time series mode. * @param metadata The current metadata instance that doesn't yet contain the index to be created * @param resolvedAt The time the request to create this new index was accepted. - * @param allSettings All the setting resolved from the template that matches and any setting defined on the create index - * request + * @param indexTemplateAndCreateRequestSettings All the settings resolved from the template that matches and any settings + * defined on the create index request * @param combinedTemplateMappings All the mappings resolved from the template that matches */ Settings getAdditionalIndexSettings( String indexName, - String dataStreamName, - boolean timeSeries, + @Nullable String dataStreamName, + boolean isTimeSeries, Metadata metadata, Instant resolvedAt, - Settings allSettings, + Settings indexTemplateAndCreateRequestSettings, List combinedTemplateMappings ); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index 9476c3e719e0b..08421af332fe4 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -838,7 +838,15 @@ protected String contentType() { private static class NoOpObjectMapper extends ObjectMapper { NoOpObjectMapper(String name, String fullPath) { - super(name, fullPath, Explicit.IMPLICIT_TRUE, Explicit.IMPLICIT_TRUE, Dynamic.RUNTIME, Collections.emptyMap()); + super( + name, + fullPath, + Explicit.IMPLICIT_TRUE, + Explicit.IMPLICIT_TRUE, + Explicit.IMPLICIT_FALSE, + Dynamic.RUNTIME, + Collections.emptyMap() + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java b/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java index b5de3971fa091..acfe0fcfbf5bd 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java @@ -21,6 +21,7 @@ import java.util.Comparator; import java.util.HashMap; import java.util.Map; +import java.util.stream.Stream; /** * Wrapper around everything that defines a mapping, without references to @@ -125,7 +126,8 @@ private boolean isSourceSynthetic() { } public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { - return root.syntheticFieldLoader(Arrays.stream(metadataMappers)); + var stream = Stream.concat(Stream.of(metadataMappers), root.mappers.values().stream()); + return root.syntheticFieldLoader(stream); } /** diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java index 5c2880a4bf760..a8955e46f0ad4 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java @@ -149,7 +149,7 @@ public MapperBuilderContext createChildContext(String name, Dynamic dynamic) { String nestedTypePath, Query nestedTypeFilter ) { - super(name, fullPath, enabled, Explicit.IMPLICIT_TRUE, dynamic, mappers); + super(name, fullPath, enabled, Explicit.IMPLICIT_TRUE, Explicit.IMPLICIT_FALSE, dynamic, mappers); this.nestedTypePath = nestedTypePath; this.nestedTypeFilter = nestedTypeFilter; this.includeInParent = includeInParent; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java index 2d432670936cc..6336e6ca0b764 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java @@ -37,10 +37,12 @@ public class ObjectMapper extends Mapper { private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(ObjectMapper.class); public static final String CONTENT_TYPE = "object"; + static final String STORE_ARRAY_SOURCE_PARAM = "store_array_source"; public static class Defaults { public static final boolean ENABLED = true; public static final Explicit SUBOBJECTS = Explicit.IMPLICIT_TRUE; + public static final Explicit TRACK_ARRAY_SOURCE = Explicit.IMPLICIT_FALSE; public static final Dynamic DYNAMIC = Dynamic.TRUE; } @@ -78,6 +80,7 @@ static Dynamic getRootDynamic(MappingLookup mappingLookup) { public static class Builder extends Mapper.Builder { protected final Explicit subobjects; protected Explicit enabled = Explicit.IMPLICIT_TRUE; + protected Explicit trackArraySource = Defaults.TRACK_ARRAY_SOURCE; protected Dynamic dynamic; protected final List mappersBuilders = new ArrayList<>(); @@ -91,6 +94,11 @@ public Builder enabled(boolean enabled) { return this; } + public Builder trackArraySource(boolean value) { + this.trackArraySource = Explicit.explicitBoolean(value); + return this; + } + public Builder dynamic(Dynamic dynamic) { this.dynamic = dynamic; return this; @@ -182,6 +190,7 @@ public ObjectMapper build(MapperBuilderContext context) { context.buildFullName(name()), enabled, subobjects, + trackArraySource, dynamic, buildMappers(context.createChildContext(name(), dynamic)) ); @@ -242,6 +251,9 @@ protected static boolean parseObjectOrDocumentTypeProperties( } else if (fieldName.equals("enabled")) { builder.enabled(XContentMapValues.nodeBooleanValue(fieldNode, fieldName + ".enabled")); return true; + } else if (fieldName.equals(STORE_ARRAY_SOURCE_PARAM)) { + builder.trackArraySource(XContentMapValues.nodeBooleanValue(fieldNode, fieldName + ".track_array_source")); + return true; } else if (fieldName.equals("properties")) { if (fieldNode instanceof Collection && ((Collection) fieldNode).isEmpty()) { // nothing to do here, empty (to support "properties: []" case) @@ -369,6 +381,7 @@ private static void validateFieldName(String fieldName, IndexVersion indexCreate protected final Explicit enabled; protected final Explicit subobjects; + protected final Explicit trackArraySource; protected final Dynamic dynamic; protected final Map mappers; @@ -378,6 +391,7 @@ private static void validateFieldName(String fieldName, IndexVersion indexCreate String fullPath, Explicit enabled, Explicit subobjects, + Explicit trackArraySource, Dynamic dynamic, Map mappers ) { @@ -387,6 +401,7 @@ private static void validateFieldName(String fieldName, IndexVersion indexCreate this.fullPath = internFieldName(fullPath); this.enabled = enabled; this.subobjects = subobjects; + this.trackArraySource = trackArraySource; this.dynamic = dynamic; if (mappers == null) { this.mappers = Map.of(); @@ -412,7 +427,7 @@ public ObjectMapper.Builder newBuilder(IndexVersion indexVersionCreated) { * This is typically used in the context of a mapper merge when there's not enough budget to add the entire object. */ ObjectMapper withoutMappers() { - return new ObjectMapper(simpleName(), fullPath, enabled, subobjects, dynamic, Map.of()); + return new ObjectMapper(simpleName(), fullPath, enabled, subobjects, trackArraySource, dynamic, Map.of()); } @Override @@ -454,6 +469,10 @@ public final boolean subobjects() { return subobjects.value(); } + public final boolean trackArraySource() { + return trackArraySource.value(); + } + @Override public void validate(MappingLookup mappers) { for (Mapper mapper : this.mappers.values()) { @@ -480,6 +499,7 @@ public ObjectMapper merge(Mapper mergeWith, MapperMergeContext parentMergeContex fullPath, mergeResult.enabled, mergeResult.subObjects, + mergeResult.trackArraySource, mergeResult.dynamic, mergeResult.mappers ); @@ -488,6 +508,7 @@ public ObjectMapper merge(Mapper mergeWith, MapperMergeContext parentMergeContex protected record MergeResult( Explicit enabled, Explicit subObjects, + Explicit trackArraySource, ObjectMapper.Dynamic dynamic, Map mappers ) { @@ -519,11 +540,26 @@ static MergeResult build(ObjectMapper existing, ObjectMapper mergeWithObject, Ma } else { subObjects = existing.subobjects; } + final Explicit trackArraySource; + if (mergeWithObject.trackArraySource.explicit()) { + if (reason == MergeReason.INDEX_TEMPLATE) { + trackArraySource = mergeWithObject.trackArraySource; + } else if (existing.trackArraySource != mergeWithObject.trackArraySource) { + throw new MapperException( + "the [track_array_source] parameter can't be updated for the object mapping [" + existing.name() + "]" + ); + } else { + trackArraySource = existing.trackArraySource; + } + } else { + trackArraySource = existing.trackArraySource; + } MapperMergeContext objectMergeContext = existing.createChildContext(parentMergeContext, existing.simpleName()); Map mergedMappers = buildMergedMappers(existing, mergeWithObject, objectMergeContext, subObjects.value()); return new MergeResult( enabled, subObjects, + trackArraySource, mergeWithObject.dynamic != null ? mergeWithObject.dynamic : existing.dynamic, mergedMappers ); @@ -680,6 +716,9 @@ void toXContent(XContentBuilder builder, Params params, ToXContent custom) throw if (subobjects != Defaults.SUBOBJECTS) { builder.field("subobjects", subobjects.value()); } + if (trackArraySource != Defaults.TRACK_ARRAY_SOURCE) { + builder.field(STORE_ARRAY_SOURCE_PARAM, trackArraySource.value()); + } if (custom != null) { custom.toXContent(builder, params); } @@ -712,19 +751,17 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep } - public SourceLoader.SyntheticFieldLoader syntheticFieldLoader(Stream extra) { - return new SyntheticSourceFieldLoader( - Stream.concat(extra, mappers.values().stream()) - .sorted(Comparator.comparing(Mapper::name)) - .map(Mapper::syntheticFieldLoader) - .filter(l -> l != null) - .toList() - ); + public SourceLoader.SyntheticFieldLoader syntheticFieldLoader(Stream mappers) { + var fields = mappers.sorted(Comparator.comparing(Mapper::name)) + .map(Mapper::syntheticFieldLoader) + .filter(l -> l != SourceLoader.SyntheticFieldLoader.NOTHING) + .toList(); + return new SyntheticSourceFieldLoader(fields); } @Override public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { - return syntheticFieldLoader(Stream.empty()); + return syntheticFieldLoader(mappers.values().stream()); } private class SyntheticSourceFieldLoader implements SourceLoader.SyntheticFieldLoader { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java index d44f03d72e211..668237571984a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java @@ -77,7 +77,7 @@ public PassThroughObjectMapper build(MapperBuilderContext context) { Explicit timeSeriesDimensionSubFields ) { // Subobjects are not currently supported. - super(name, fullPath, enabled, Explicit.IMPLICIT_FALSE, dynamic, mappers); + super(name, fullPath, enabled, Explicit.IMPLICIT_FALSE, Explicit.IMPLICIT_FALSE, dynamic, mappers); this.timeSeriesDimensionSubFields = timeSeriesDimensionSubFields; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java index 9e0680e6e6e6a..c19809760ec43 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java @@ -117,6 +117,7 @@ public RootObjectMapper build(MapperBuilderContext context) { name(), enabled, subobjects, + trackArraySource, dynamic, mappers, new HashMap<>(runtimeFields), @@ -262,6 +263,7 @@ private static boolean isConflictingObject(Mapper mapper, String[] parts) { String name, Explicit enabled, Explicit subobjects, + Explicit trackArraySource, Dynamic dynamic, Map mappers, Map runtimeFields, @@ -270,7 +272,7 @@ private static boolean isConflictingObject(Mapper mapper, String[] parts) { Explicit dateDetection, Explicit numericDetection ) { - super(name, name, enabled, subobjects, dynamic, mappers); + super(name, name, enabled, subobjects, trackArraySource, dynamic, mappers); this.runtimeFields = runtimeFields; this.dynamicTemplates = dynamicTemplates; this.dynamicDateTimeFormatters = dynamicDateTimeFormatters; @@ -292,6 +294,7 @@ RootObjectMapper withoutMappers() { simpleName(), enabled, subobjects, + trackArraySource, dynamic, Map.of(), Map.of(), @@ -407,6 +410,7 @@ public RootObjectMapper merge(Mapper mergeWith, MapperMergeContext parentMergeCo simpleName(), mergeResult.enabled(), mergeResult.subObjects(), + mergeResult.trackArraySource(), mergeResult.dynamic(), mergeResult.mappers(), Map.copyOf(runtimeFields), diff --git a/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java b/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java index 44e86e056ef3b..7ab682d3143e7 100644 --- a/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java @@ -53,7 +53,9 @@ public static class Request extends MasterNodeRequest { private String localAbortReason; - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); @@ -64,6 +66,7 @@ public Request(StreamInput in) throws IOException { } public Request(String taskId, long allocationId, Exception exception, String localAbortReason) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.taskId = taskId; this.exception = exception; this.allocationId = allocationId; diff --git a/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java b/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java index 1fbdd03dcc268..26cf0658f60b9 100644 --- a/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java @@ -41,7 +41,9 @@ public static class Request extends MasterNodeRequest { private String taskId; - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); @@ -49,6 +51,7 @@ public Request(StreamInput in) throws IOException { } public Request(String taskId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.taskId = taskId; } diff --git a/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java b/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java index 299891c64711a..ce0e46e7b0425 100644 --- a/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java @@ -51,7 +51,9 @@ public static class Request extends MasterNodeRequest { private PersistentTaskParams params; - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); @@ -61,6 +63,7 @@ public Request(StreamInput in) throws IOException { } public Request(String taskId, String taskName, PersistentTaskParams params) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.taskId = taskId; this.taskName = taskName; this.params = params; diff --git a/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java b/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java index dcf86f85eb709..6ecefa1bbf847 100644 --- a/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java @@ -45,7 +45,9 @@ public static class Request extends MasterNodeRequest { private long allocationId = -1L; private PersistentTaskState state; - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); @@ -55,6 +57,7 @@ public Request(StreamInput in) throws IOException { } public Request(String taskId, long allocationId, PersistentTaskState state) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.taskId = taskId; this.allocationId = allocationId; this.state = state; diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 4b5c647da0c9a..0c54e8ff89589 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -167,23 +167,35 @@ protected SearchHit nextDoc(int doc) throws IOException { leafSourceLoader, leafIdLoader ); - sourceProvider.source = hit.source(); - fieldLookupProvider.setPreloadedStoredFieldValues(hit.hit().getId(), hit.loadedFields()); - for (FetchSubPhaseProcessor processor : processors) { - processor.process(hit); + boolean success = false; + try { + sourceProvider.source = hit.source(); + fieldLookupProvider.setPreloadedStoredFieldValues(hit.hit().getId(), hit.loadedFields()); + for (FetchSubPhaseProcessor processor : processors) { + processor.process(hit); + } + success = true; + return hit.hit(); + } finally { + if (success == false) { + hit.hit().decRef(); + } } - return hit.hit(); } }; SearchHit[] hits = docsIterator.iterate(context.shardTarget(), context.searcher().getIndexReader(), docIdsToLoad); if (context.isCancelled()) { + for (SearchHit hit : hits) { + // release all hits that would otherwise become owned and eventually released by SearchHits below + hit.decRef(); + } throw new TaskCancelledException("cancelled"); } TotalHits totalHits = context.getTotalHits(); - return SearchHits.unpooled(hits, totalHits, context.getMaxScore()); + return new SearchHits(hits, totalHits, context.getMaxScore()); } List getProcessors(SearchShardTarget target, FetchContext context, Profiler profiler) { @@ -257,12 +269,12 @@ private static HitContext prepareNonNestedHitContext( String id = idLoader.getId(subDocId); if (id == null) { - // TODO: can we use pooled buffers here as well? - SearchHit hit = SearchHit.unpooled(docId, null); + SearchHit hit = new SearchHit(docId); + // TODO: can we use real pooled buffers here as well? Source source = Source.lazy(lazyStoredSourceLoader(profiler, subReaderContext, subDocId)); return new HitContext(hit, subReaderContext, subDocId, Map.of(), source); } else { - SearchHit hit = SearchHit.unpooled(docId, id); + SearchHit hit = new SearchHit(docId, id); Source source; if (requiresSource) { Timer timer = profiler.startLoadingSource(); @@ -339,7 +351,7 @@ private static HitContext prepareNestedHitContext( assert nestedIdentity != null; Source nestedSource = nestedIdentity.extractSource(rootSource); - SearchHit hit = SearchHit.unpooled(topDocId, rootId, nestedIdentity); + SearchHit hit = new SearchHit(topDocId, rootId, nestedIdentity); return new HitContext(hit, subReaderContext, nestedInfo.doc(), childFieldLoader.storedFields(), nestedSource); } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java index cc39113f2009f..81b3e7465feee 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java @@ -67,6 +67,7 @@ public final SearchHit[] iterate(SearchShardTarget shardTarget, IndexReader inde setNextReader(ctx, docsInLeaf); } currentDoc = docs[i].docId; + assert searchHits[docs[i].index] == null; searchHits[docs[i].index] = nextDoc(docs[i].docId); } } catch (Exception e) { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java index 4c3d3948ff889..4170f7e2f8b4b 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java @@ -61,8 +61,13 @@ public FetchSearchResult fetchResult() { public void shardResult(SearchHits hits, ProfileResult profileResult) { assert assertNoSearchTarget(hits); + assert hasReferences(); + var existing = this.hits; + if (existing != null) { + existing.decRef(); + } this.hits = hits; - hits.incRef(); + hits.mustIncRef(); assert this.profileResult == null; this.profileResult = profileResult; } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java index ccb54801472a6..a4ba982e1dd73 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java @@ -104,6 +104,7 @@ private void hitExecute(Map innerHi } } var h = fetchResult.hits(); + assert hit.isPooled() || h.isPooled() == false; results.put(entry.getKey(), h); h.mustIncRef(); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java index 6098ea777d38a..7ccdb5da6d736 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java @@ -9,7 +9,6 @@ package org.elasticsearch.action.admin.cluster.reroute; import org.elasticsearch.action.support.master.AcknowledgedRequest; -import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.cluster.routing.allocation.command.AllocateEmptyPrimaryAllocationCommand; import org.elasticsearch.cluster.routing.allocation.command.AllocateReplicaAllocationCommand; import org.elasticsearch.cluster.routing.allocation.command.AllocateStalePrimaryAllocationCommand; @@ -22,6 +21,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.admin.cluster.RestClusterRerouteAction; import org.elasticsearch.test.ESTestCase; @@ -202,7 +202,7 @@ private RestRequest toRestRequest(ClusterRerouteRequest original) throws IOExcep if (original.isRetryFailed() || randomBoolean()) { params.put("retry_failed", Boolean.toString(original.isRetryFailed())); } - if (false == original.masterNodeTimeout().equals(MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT) || randomBoolean()) { + if (false == original.masterNodeTimeout().equals(TimeValue.THIRTY_SECONDS) || randomBoolean()) { params.put(REST_MASTER_TIMEOUT_PARAM, original.masterNodeTimeout().toString()); } if (original.getCommands() != null) { diff --git a/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java b/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java index 94e0ce1ccaf17..6d24f8d2fe9e0 100644 --- a/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java @@ -153,7 +153,9 @@ public static class Request extends MasterNodeRequest implements Indice private String[] indices = Strings.EMPTY_ARRAY; private final RefCounted refCounted = AbstractRefCounted.of(() -> {}); - Request() {} + Request() { + super(TimeValue.THIRTY_SECONDS); + } Request(StreamInput in) throws IOException { super(in); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java index 6df9fd1f35f52..c02df8336a66d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java @@ -164,7 +164,15 @@ private static FieldMapper createFieldMapper(String parent, String name) { } private static ObjectMapper createObjectMapper(String name) { - return new ObjectMapper(name, name, Explicit.IMPLICIT_TRUE, Explicit.IMPLICIT_TRUE, ObjectMapper.Dynamic.FALSE, emptyMap()); + return new ObjectMapper( + name, + name, + Explicit.IMPLICIT_TRUE, + Explicit.IMPLICIT_TRUE, + Explicit.IMPLICIT_FALSE, + ObjectMapper.Dynamic.FALSE, + emptyMap() + ); } private static NestedObjectMapper createNestedObjectMapper(String name) { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupTests.java index 0308dac5fa216..65fa4e236bafc 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupTests.java @@ -82,6 +82,7 @@ public void testSubfieldOverride() { "object", Explicit.EXPLICIT_TRUE, Explicit.IMPLICIT_TRUE, + Explicit.IMPLICIT_FALSE, ObjectMapper.Dynamic.TRUE, Collections.singletonMap("object.subfield", fieldMapper) ); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java index 154132c772927..69848e3b93f90 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java @@ -165,6 +165,7 @@ public void testMergeEnabledForIndexTemplates() throws IOException { assertNotNull(objectMapper); assertFalse(objectMapper.isEnabled()); assertTrue(objectMapper.subobjects()); + assertFalse(objectMapper.trackArraySource()); // Setting 'enabled' to true is allowed, and updates the mapping. update = Strings.toString( @@ -175,6 +176,7 @@ public void testMergeEnabledForIndexTemplates() throws IOException { .field("type", "object") .field("enabled", true) .field("subobjects", false) + .field(ObjectMapper.STORE_ARRAY_SOURCE_PARAM, true) .endObject() .endObject() .endObject() @@ -185,6 +187,7 @@ public void testMergeEnabledForIndexTemplates() throws IOException { assertNotNull(objectMapper); assertTrue(objectMapper.isEnabled()); assertFalse(objectMapper.subobjects()); + assertTrue(objectMapper.trackArraySource()); } public void testFieldReplacementForIndexTemplates() throws IOException { @@ -573,6 +576,7 @@ private ObjectMapper createObjectMapperWithAllParametersSet(CheckedConsumer { private String key; private String value; - Request() {} - Request(StreamInput in) throws IOException { super(in); index = in.readString(); @@ -79,6 +78,7 @@ public static class Request extends MasterNodeRequest { } public Request(final String index, final String key, final String value) { + super(TimeValue.THIRTY_SECONDS); this.index = index; this.key = key; this.value = value; diff --git a/server/src/test/java/org/elasticsearch/reservedstate/ReservedClusterStateHandlerTests.java b/server/src/test/java/org/elasticsearch/reservedstate/ReservedClusterStateHandlerTests.java index a0ad31c65c8b8..c92b0b0bf15d2 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/ReservedClusterStateHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/ReservedClusterStateHandlerTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.MasterNodeRequest; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.settings.InternalOrPrivateSettingsPlugin; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentParser; @@ -43,6 +44,10 @@ public ValidRequest fromXContent(XContentParser parser) throws IOException { } static class ValidRequest extends MasterNodeRequest { + ValidRequest() { + super(TimeValue.THIRTY_SECONDS); + } + @Override public ActionRequestValidationException validate() { return null; diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java index 5c034a81fc9cd..e693f9a1562fd 100644 --- a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java @@ -269,6 +269,7 @@ public void testManyEval() throws IOException { assertMap(map, matchesMap().entry("columns", columns).entry("values", hasSize(10_000))); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch-serverless/issues/1874") public void testTooManyEval() throws IOException { initManyLongs(); assertCircuitBreaks(() -> manyEval(490)); diff --git a/test/framework/src/main/java/org/elasticsearch/common/lucene/store/ESIndexInputTestCase.java b/test/framework/src/main/java/org/elasticsearch/common/lucene/store/ESIndexInputTestCase.java index 2b4e7fd4c7517..63b7dd88cb44e 100644 --- a/test/framework/src/main/java/org/elasticsearch/common/lucene/store/ESIndexInputTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/common/lucene/store/ESIndexInputTestCase.java @@ -74,36 +74,45 @@ protected byte[] randomReadAndSlice(IndexInput indexInput, int length) throws IO switch (readStrategy) { case 0, 1, 2, 3: if (length - readPos >= Long.BYTES && readStrategy <= 0) { - long read = indexInput.readLong(); - ByteBuffer.wrap(output, readPos, Long.BYTES).order(ByteOrder.LITTLE_ENDIAN).putLong(read); + ByteBuffer.wrap(output, readPos, Long.BYTES).order(ByteOrder.LITTLE_ENDIAN).putLong(indexInput.readLong()); readPos += Long.BYTES; - if (indexInput instanceof RandomAccessInput randomAccessInput) { - assertEquals(read, randomAccessInput.readLong(indexInput.getFilePointer() - Long.BYTES)); - indexInput.seek(readPos); - } } else if (length - readPos >= Integer.BYTES && readStrategy <= 1) { - int read = indexInput.readInt(); - ByteBuffer.wrap(output, readPos, Integer.BYTES).order(ByteOrder.LITTLE_ENDIAN).putInt(read); + ByteBuffer.wrap(output, readPos, Integer.BYTES).order(ByteOrder.LITTLE_ENDIAN).putInt(indexInput.readInt()); readPos += Integer.BYTES; - if (indexInput instanceof RandomAccessInput randomAccessInput) { - assertEquals(read, randomAccessInput.readInt(indexInput.getFilePointer() - Integer.BYTES)); - indexInput.seek(readPos); - } } else if (length - readPos >= Short.BYTES && readStrategy <= 2) { - short read = indexInput.readShort(); - ByteBuffer.wrap(output, readPos, Short.BYTES).order(ByteOrder.LITTLE_ENDIAN).putShort(read); + ByteBuffer.wrap(output, readPos, Short.BYTES).order(ByteOrder.LITTLE_ENDIAN).putShort(indexInput.readShort()); readPos += Short.BYTES; - if (indexInput instanceof RandomAccessInput randomAccessInput) { - assertEquals(read, randomAccessInput.readShort(indexInput.getFilePointer() - Short.BYTES)); - indexInput.seek(readPos); - } } else { - byte read = indexInput.readByte(); - output[readPos++] = read; - if (indexInput instanceof RandomAccessInput randomAccessInput) { - assertEquals(read, randomAccessInput.readByte(indexInput.getFilePointer() - 1)); + output[readPos++] = indexInput.readByte(); + } + if (indexInput instanceof RandomAccessInput randomAccessInput && randomBoolean()) { + final var randomAccessReadStart = between(0, length - 1); + final int randomAccessReadEnd; + if (length - randomAccessReadStart >= Long.BYTES && randomBoolean()) { + ByteBuffer.wrap(output, randomAccessReadStart, Long.BYTES) + .order(ByteOrder.LITTLE_ENDIAN) + .putLong(randomAccessInput.readLong(randomAccessReadStart)); + randomAccessReadEnd = randomAccessReadStart + Long.BYTES; + } else if (length - randomAccessReadStart >= Integer.BYTES && randomBoolean()) { + ByteBuffer.wrap(output, randomAccessReadStart, Integer.BYTES) + .order(ByteOrder.LITTLE_ENDIAN) + .putInt(randomAccessInput.readInt(randomAccessReadStart)); + randomAccessReadEnd = randomAccessReadStart + Integer.BYTES; + } else if (length - randomAccessReadStart >= Short.BYTES && randomBoolean()) { + ByteBuffer.wrap(output, randomAccessReadStart, Short.BYTES) + .order(ByteOrder.LITTLE_ENDIAN) + .putShort(randomAccessInput.readShort(randomAccessReadStart)); + randomAccessReadEnd = randomAccessReadStart + Short.BYTES; + } else { + output[randomAccessReadStart] = randomAccessInput.readByte(randomAccessReadStart); + randomAccessReadEnd = randomAccessReadStart + 1; + } + if (randomAccessReadStart <= readPos && readPos <= randomAccessReadEnd && randomBoolean()) { + readPos = between(readPos, randomAccessReadEnd); indexInput.seek(readPos); } + + indexInput.seek(readPos); // BUG these random-access reads shouldn't affect the current position } break; case 4: diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 83f7fdfe386c7..80f9f2abea184 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -2101,9 +2101,24 @@ protected static SecureRandom secureRandomFips(final byte[] seed) throws NoSuchA return secureRandomFips; } + /** + * The timeout used for the various "safe" wait methods such as {@link #safeAwait} and {@link #safeAcquire}. In tests we generally want + * these things to complete almost immediately, but sometimes the CI runner executes things rather slowly so we use {@code 10s} as a + * fairly relaxed definition of "immediately". + *

+ * A well-designed test should not need to wait for anything close to this duration when run in isolation. If you think you need to do + * so, instead seek a better way to write the test such that it does not need to wait for so long. Tests that take multiple seconds to + * complete are a big drag on CI times which slows everyone down. + *

+ * For instance, tests which verify things that require the passage of time ought to simulate this (e.g. using a {@link + * org.elasticsearch.common.util.concurrent.DeterministicTaskQueue}). Excessive busy-waits ought to be replaced by blocking waits (e.g. + * using a {@link CountDownLatch}) which release as soon as the condition is satisfied. + */ + public static final TimeValue SAFE_AWAIT_TIMEOUT = TimeValue.timeValueSeconds(10); + public static void safeAwait(CyclicBarrier barrier) { try { - barrier.await(10, TimeUnit.SECONDS); + barrier.await(SAFE_AWAIT_TIMEOUT.millis(), TimeUnit.MILLISECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); fail(e, "safeAwait: interrupted waiting for CyclicBarrier release"); @@ -2114,7 +2129,10 @@ public static void safeAwait(CyclicBarrier barrier) { public static void safeAwait(CountDownLatch countDownLatch) { try { - assertTrue("safeAwait: CountDownLatch did not reach zero within the timeout", countDownLatch.await(10, TimeUnit.SECONDS)); + assertTrue( + "safeAwait: CountDownLatch did not reach zero within the timeout", + countDownLatch.await(SAFE_AWAIT_TIMEOUT.millis(), TimeUnit.MILLISECONDS) + ); } catch (InterruptedException e) { Thread.currentThread().interrupt(); fail(e, "safeAwait: interrupted waiting for CountDownLatch to reach zero"); @@ -2123,7 +2141,10 @@ public static void safeAwait(CountDownLatch countDownLatch) { public static void safeAcquire(Semaphore semaphore) { try { - assertTrue("safeAcquire: Semaphore did not acquire permit within the timeout", semaphore.tryAcquire(10, TimeUnit.SECONDS)); + assertTrue( + "safeAcquire: Semaphore did not acquire permit within the timeout", + semaphore.tryAcquire(SAFE_AWAIT_TIMEOUT.millis(), TimeUnit.MILLISECONDS) + ); } catch (InterruptedException e) { Thread.currentThread().interrupt(); fail(e, "safeAcquire: interrupted waiting for Semaphore to acquire permit"); @@ -2134,7 +2155,7 @@ public static T safeAwait(SubscribableListener listener) { final var future = new PlainActionFuture(); listener.addListener(future); try { - return future.get(10, TimeUnit.SECONDS); + return future.get(SAFE_AWAIT_TIMEOUT.millis(), TimeUnit.MILLISECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new AssertionError("safeAwait: interrupted waiting for SubscribableListener", e); diff --git a/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java b/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java index bc3723119afa9..dd7987642c58a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java +++ b/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java @@ -31,13 +31,35 @@ /** * Test appender that can be used to verify that certain events were logged correctly */ -public class MockLogAppender { +public class MockLogAppender implements Releasable { private static final Map> mockAppenders = new ConcurrentHashMap<>(); private static final RealMockAppender parent = new RealMockAppender(); + // TODO: this can become final once the ctor is made private + private List loggers = List.of(); private final List expectations; private volatile boolean isAlive = true; + @Override + public void close() { + isAlive = false; + for (String logger : loggers) { + mockAppenders.compute(logger, (k, v) -> { + assert v != null; + v.remove(this); + return v.isEmpty() ? null : v; + }); + } + // check that all expectations have been evaluated before this is released + for (WrappedLoggingExpectation expectation : expectations) { + assertThat( + "Method assertMatched() not called on LoggingExpectation instance before release: " + expectation, + expectation.assertMatchedCalled, + is(true) + ); + } + } + private static class RealMockAppender extends AbstractAppender { RealMockAppender() { @@ -71,6 +93,11 @@ public MockLogAppender() { expectations = new CopyOnWriteArrayList<>(); } + private MockLogAppender(List loggers) { + this(); + this.loggers = loggers; + } + /** * Initialize the mock log appender with the log4j system. */ @@ -267,58 +294,57 @@ public String toString() { } } + public Releasable capturing(Class... classes) { + this.loggers = Arrays.stream(classes).map(Class::getCanonicalName).toList(); + addToMockAppenders(this, loggers); + return this; + } + + public Releasable capturing(String... names) { + this.loggers = Arrays.asList(names); + addToMockAppenders(this, loggers); + return this; + } + /** * Adds the list of class loggers to this {@link MockLogAppender}. * * Stops and runs some checks on the {@link MockLogAppender} once the returned object is released. */ - public Releasable capturing(Class... classes) { - return appendToLoggers(Arrays.stream(classes).map(Class::getCanonicalName).toList()); + public static MockLogAppender capture(Class... classes) { + return create(Arrays.stream(classes).map(Class::getCanonicalName).toList()); } /** * Same as above except takes string class names of each logger. */ - public Releasable capturing(String... names) { - return appendToLoggers(Arrays.asList(names)); + public static MockLogAppender capture(String... names) { + return create(Arrays.asList(names)); + } + + private static MockLogAppender create(List loggers) { + MockLogAppender appender = new MockLogAppender(loggers); + addToMockAppenders(appender, loggers); + return appender; } - private Releasable appendToLoggers(List loggers) { + private static void addToMockAppenders(MockLogAppender appender, List loggers) { for (String logger : loggers) { mockAppenders.compute(logger, (k, v) -> { if (v == null) { v = new CopyOnWriteArrayList<>(); } - v.add(this); + v.add(appender); return v; }); } - return () -> { - isAlive = false; - for (String logger : loggers) { - mockAppenders.compute(logger, (k, v) -> { - assert v != null; - v.remove(this); - return v.isEmpty() ? null : v; - }); - } - // check that all expectations have been evaluated before this is released - for (WrappedLoggingExpectation expectation : expectations) { - assertThat( - "Method assertMatched() not called on LoggingExpectation instance before release: " + expectation, - expectation.assertMatchedCalled, - is(true) - ); - } - }; } /** * Executes an action and verifies expectations against the provided logger */ public static void assertThatLogger(Runnable action, Class loggerOwner, MockLogAppender.LoggingExpectation expectation) { - MockLogAppender mockAppender = new MockLogAppender(); - try (var ignored = mockAppender.capturing(loggerOwner)) { + try (var mockAppender = MockLogAppender.capture(loggerOwner)) { mockAppender.addExpectation(expectation); action.run(); mockAppender.assertAllExpectationsMatched(); diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index ee7687398cf7b..89d10acb6ec45 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -1319,8 +1319,7 @@ public void handleException(TransportException exp) {} .build() ); - MockLogAppender appender = new MockLogAppender(); - try (var ignored = appender.capturing("org.elasticsearch.transport.TransportService.tracer")) { + try (var appender = MockLogAppender.capture("org.elasticsearch.transport.TransportService.tracer")) { //////////////////////////////////////////////////////////////////////// // tests for included action type "internal:test" diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java index 49fb38b518dce..d555337f467ae 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java @@ -16,7 +16,8 @@ */ public enum FeatureFlag { TIME_SERIES_MODE("es.index_mode_feature_flag_registered=true", Version.fromString("8.0.0"), null), - FAILURE_STORE_ENABLED("es.failure_store_feature_flag_enabled=true", Version.fromString("8.12.0"), null); + FAILURE_STORE_ENABLED("es.failure_store_feature_flag_enabled=true", Version.fromString("8.12.0"), null), + SEMANTIC_TEXT_ENABLED("es.semantic_text_feature_flag_enabled=true", Version.fromString("8.15.0"), null); public final String systemProperty; public final Version from; diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/DeleteAutoscalingPolicyAction.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/DeleteAutoscalingPolicyAction.java index d3be1816924fb..9b44daf6dd427 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/DeleteAutoscalingPolicyAction.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/DeleteAutoscalingPolicyAction.java @@ -34,6 +34,7 @@ public String name() { } public Request(final String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.name = Objects.requireNonNull(name); } diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/GetAutoscalingCapacityAction.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/GetAutoscalingCapacityAction.java index 4a356f74e03f8..90c2d664b421d 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/GetAutoscalingCapacityAction.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/GetAutoscalingCapacityAction.java @@ -39,6 +39,7 @@ public static class Request extends AcknowledgedRequest roles, final SortedMap deciders) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.name = name; this.roles = roles; this.deciders = deciders; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusRequest.java index 5883c36c9e2c5..9e8e707db6b86 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusRequest.java @@ -14,7 +14,9 @@ public class GetBasicStatusRequest extends MasterNodeReadRequest { - public GetBasicStatusRequest() {} + public GetBasicStatusRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public GetBasicStatusRequest(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusRequest.java index 93a0206ac70c3..cae967058fb73 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusRequest.java @@ -14,7 +14,9 @@ public class GetTrialStatusRequest extends MasterNodeReadRequest { - public GetTrialStatusRequest() {} + public GetTrialStatusRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public GetTrialStatusRequest(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicRequest.java index 602e521fe10e3..7e9b0ebf44bee 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicRequest.java @@ -16,7 +16,9 @@ public class PostStartBasicRequest extends AcknowledgedRequest) () -> "unexpected failure during [" + TASK_SOURCE + "]", e); + var state = clusterService.lifecycleState(); + if (state == Lifecycle.State.STOPPED || state == Lifecycle.State.CLOSED) { + logger.debug("node shutdown during [" + TASK_SOURCE + "]", e); + } else { + logger.error("unexpected failure during [" + TASK_SOURCE + "]", e); + } } private ClusterState extendBasic(ClusterState currentState, LicensesMetadata currentLicenseMetadata) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageRequest.java index 398b39b12aa19..e5fbc9e07955c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageRequest.java @@ -18,7 +18,9 @@ public class XPackUsageRequest extends MasterNodeRequest { - public XPackUsageRequest() {} + public XPackUsageRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public XPackUsageRequest(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/frozen/FreezeRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/frozen/FreezeRequest.java index f32fd515e7817..d1d04088dcdd7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/frozen/FreezeRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/frozen/FreezeRequest.java @@ -27,6 +27,7 @@ public class FreezeRequest extends AcknowledgedRequest implements private ActiveShardCount waitForActiveShards = ActiveShardCount.DEFAULT; public FreezeRequest(String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indices = indices; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseRequest.java index e96c6a7632ec1..ea4e53aced5fe 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseRequest.java @@ -14,7 +14,9 @@ public class GetLicenseRequest extends MasterNodeReadRequest { - public GetLicenseRequest() {} + public GetLicenseRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public GetLicenseRequest(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/action/MigrateToDataTiersRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/action/MigrateToDataTiersRequest.java index e6b087c97cdb5..6584dcc279e85 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/action/MigrateToDataTiersRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/action/MigrateToDataTiersRequest.java @@ -53,6 +53,7 @@ public static MigrateToDataTiersRequest parse(XContentParser parser) throws IOEx } public MigrateToDataTiersRequest(@Nullable String legacyTemplateToDelete, @Nullable String nodeAttributeName) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.legacyTemplateToDelete = legacyTemplateToDelete; this.nodeAttributeName = nodeAttributeName; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/SetResetModeActionRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/SetResetModeActionRequest.java index 3d46b2dd5070f..6270c27ac463f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/SetResetModeActionRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/SetResetModeActionRequest.java @@ -44,6 +44,7 @@ public static SetResetModeActionRequest disabled(boolean deleteMetadata) { } SetResetModeActionRequest(boolean enabled, Boolean deleteMetadata) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.enabled = enabled; this.deleteMetadata = deleteMetadata != null && deleteMetadata; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ActivateAutoFollowPatternAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ActivateAutoFollowPatternAction.java index 300d2844b7a2a..df917b4e97b7d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ActivateAutoFollowPatternAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ActivateAutoFollowPatternAction.java @@ -34,6 +34,7 @@ public static class Request extends AcknowledgedRequest { private final boolean active; public Request(final String name, final boolean active) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.name = name; this.active = active; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsAction.java index b12f7bf2dc06a..b187e5e39dd33 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsAction.java @@ -45,7 +45,9 @@ public Request(StreamInput in) throws IOException { } } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } @Override public ActionRequestValidationException validate() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/DeleteAutoFollowPatternAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/DeleteAutoFollowPatternAction.java index 8e7e9f8605245..e38a1cfd4a2cb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/DeleteAutoFollowPatternAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/DeleteAutoFollowPatternAction.java @@ -32,6 +32,7 @@ public static class Request extends AcknowledgedRequest { private final String name; public Request(String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.name = name; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowInfoAction.java index c405e4e81ff19..d979a4cf44b9b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowInfoAction.java @@ -41,7 +41,9 @@ public static class Request extends MasterNodeReadRequest { private String[] followerIndices; - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public String[] getFollowerIndices() { return followerIndices; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/GetAutoFollowPatternAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/GetAutoFollowPatternAction.java index 70f4f256c87e2..bd6ab5bb5af44 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/GetAutoFollowPatternAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/GetAutoFollowPatternAction.java @@ -34,7 +34,9 @@ public static class Request extends MasterNodeReadRequest { private String name; - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PauseFollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PauseFollowAction.java index 7ad8e5881e443..c6905b2d06a34 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PauseFollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PauseFollowAction.java @@ -31,6 +31,7 @@ public static class Request extends MasterNodeRequest { private final String followIndex; public Request(String followIndex) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.followIndex = Objects.requireNonNull(followIndex, "followIndex"); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java index 92902aa9962ab..333171d864c4f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java @@ -85,7 +85,9 @@ public static Request fromXContent(XContentParser parser, String name) throws IO private FollowParameters parameters = new FollowParameters(); private List leaderIndexExclusionPatterns = Collections.emptyList(); - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } @Override public ActionRequestValidationException validate() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java index 6570fb66a2755..db1e84aca9cda 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java @@ -85,7 +85,9 @@ public static Request fromXContent(final XContentParser parser) throws IOExcepti private FollowParameters parameters = new FollowParameters(); private ActiveShardCount waitForActiveShards = ActiveShardCount.NONE; - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public String getFollowerIndex() { return followerIndex; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java index 4cd84733b19e0..12ddea8d99578 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java @@ -54,7 +54,9 @@ public static Request fromXContent(final XContentParser parser, final String fol private String followerIndex; private FollowParameters parameters = new FollowParameters(); - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public String getFollowerIndex() { return followerIndex; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/UnfollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/UnfollowAction.java index 808df5f8bccb0..9a5f011f39a1b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/UnfollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/UnfollowAction.java @@ -34,6 +34,7 @@ public static class Request extends AcknowledgedRequest implements Indi private final String followerIndex; public Request(String followerIndex) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.followerIndex = followerIndex; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/DeleteEnrichPolicyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/DeleteEnrichPolicyAction.java index e444232291101..82f98176838ee 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/DeleteEnrichPolicyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/DeleteEnrichPolicyAction.java @@ -30,6 +30,7 @@ public static class Request extends MasterNodeRequest { - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/ExecuteEnrichPolicyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/ExecuteEnrichPolicyAction.java index 779ea535f74d9..5d629365a8096 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/ExecuteEnrichPolicyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/ExecuteEnrichPolicyAction.java @@ -34,6 +34,7 @@ public static class Request extends MasterNodeRequest { private boolean waitForCompletion; public Request(String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.name = Objects.requireNonNull(name, "name cannot be null"); this.waitForCompletion = true; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/GetEnrichPolicyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/GetEnrichPolicyAction.java index ef8229b407b56..37851a3641ebd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/GetEnrichPolicyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/GetEnrichPolicyAction.java @@ -39,10 +39,12 @@ public static class Request extends MasterNodeReadRequest { private final List names; public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.names = new ArrayList<>(); } public Request(String[] names) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.names = Arrays.asList(names); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/PutEnrichPolicyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/PutEnrichPolicyAction.java index 4ebbb75239879..d1031828e0522 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/PutEnrichPolicyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/PutEnrichPolicyAction.java @@ -37,6 +37,7 @@ public static class Request extends MasterNodeRequest { private String policyName; public Request(String policyName) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.policyName = policyName; } @@ -42,7 +43,9 @@ public Request(StreamInput in) throws IOException { policyName = in.readString(); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public String getPolicyName() { return policyName; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetLifecycleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetLifecycleAction.java index 41b29365b8866..d359498f33621 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetLifecycleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetLifecycleAction.java @@ -104,6 +104,7 @@ public static class Request extends AcknowledgedRequest { private final String[] policyNames; public Request(String... policyNames) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); if (policyNames == null) { throw new IllegalArgumentException("ids cannot be null"); } @@ -116,6 +117,7 @@ public Request(StreamInput in) throws IOException { } public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); policyNames = Strings.EMPTY_ARRAY; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/PutLifecycleRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/PutLifecycleRequest.java index fe6754b735ef7..ebaaf42246251 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/PutLifecycleRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/PutLifecycleRequest.java @@ -38,6 +38,7 @@ public class PutLifecycleRequest extends AcknowledgedRequest { private final XContentType contentType; public Request(TaskType taskType, String inferenceEntityId, BytesReference content, XContentType contentType) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.taskType = taskType; this.inferenceEntityId = inferenceEntityId; this.content = content; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CreateTrainedModelAssignmentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CreateTrainedModelAssignmentAction.java index 23fed34d6889e..9b383b2652af4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CreateTrainedModelAssignmentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CreateTrainedModelAssignmentAction.java @@ -36,6 +36,7 @@ public static class Request extends MasterNodeRequest { private final StartTrainedModelDeploymentAction.TaskParams taskParams; public Request(StartTrainedModelDeploymentAction.TaskParams taskParams) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.taskParams = ExceptionsHelper.requireNonNull(taskParams, "taskParams"); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarAction.java index 5c5e02559b1d5..40560f11b5039 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarAction.java @@ -36,6 +36,7 @@ public Request(StreamInput in) throws IOException { } public Request(String calendarId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.calendarId = ExceptionsHelper.requireNonNull(calendarId, Calendar.ID.getPreferredName()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarEventAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarEventAction.java index 7d37dc8716387..efd35a3ba87f2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarEventAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarEventAction.java @@ -38,6 +38,7 @@ public Request(StreamInput in) throws IOException { } public Request(String calendarId, String eventId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.calendarId = ExceptionsHelper.requireNonNull(calendarId, Calendar.ID.getPreferredName()); this.eventId = ExceptionsHelper.requireNonNull(eventId, ScheduledEvent.EVENT_ID.getPreferredName()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsAction.java index 48323692b7915..82d6c36273539 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsAction.java @@ -48,6 +48,7 @@ public Request(StreamInput in) throws IOException { } public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); ackTimeout(DEFAULT_TIMEOUT); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDatafeedAction.java index 2681fadf8fc59..f25be9cd164a8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDatafeedAction.java @@ -37,6 +37,7 @@ public static class Request extends AcknowledgedRequest implements ToXC private boolean force; public Request(String datafeedId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.datafeedId = ExceptionsHelper.requireNonNull(datafeedId, DatafeedConfig.ID.getPreferredName()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteFilterAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteFilterAction.java index 50cec50b2e255..782c7fa4a4db1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteFilterAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteFilterAction.java @@ -38,6 +38,7 @@ public Request(StreamInput in) throws IOException { } public Request(String filterId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.filterId = ExceptionsHelper.requireNonNull(filterId, FILTER_ID.getPreferredName()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteForecastAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteForecastAction.java index f3e888ef9599c..5bf6a8e38e18d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteForecastAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteForecastAction.java @@ -40,6 +40,7 @@ public Request(StreamInput in) throws IOException { } public Request(String jobId, String forecastId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName()); this.forecastId = ExceptionsHelper.requireNonNull(forecastId, ForecastRequestStats.FORECAST_ID.getPreferredName()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java index 58b67e57acf26..99b045d19bdd0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java @@ -44,6 +44,7 @@ public static class Request extends AcknowledgedRequest { private boolean deleteUserAnnotations; public Request(String jobId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAction.java index 9cd19eab449a3..d76c4e2db064a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAction.java @@ -48,6 +48,7 @@ public Request(StreamInput in) throws IOException { } public Request(String id) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.id = ExceptionsHelper.requireNonNull(id, TrainedModelConfig.MODEL_ID); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAliasAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAliasAction.java index 507060b1e51a4..27e895df5d415 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAliasAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAliasAction.java @@ -35,6 +35,7 @@ public static class Request extends AcknowledgedRequest { private final String modelId; public Request(String modelAlias, String modelId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.modelAlias = ExceptionsHelper.requireNonNull(modelAlias, MODEL_ALIAS); this.modelId = ExceptionsHelper.requireNonNull(modelId, TrainedModelConfig.MODEL_ID); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAssignmentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAssignmentAction.java index 04f1b3ddb2e26..9254d9ecc1425 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAssignmentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAssignmentAction.java @@ -30,6 +30,7 @@ public static class Request extends MasterNodeRequest { private final String modelId; public Request(String modelId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.modelId = ExceptionsHelper.requireNonNull(modelId, "model_id"); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ExplainDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ExplainDataFrameAnalyticsAction.java index 64b042b61c2b6..305ed8c4fc607 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ExplainDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ExplainDataFrameAnalyticsAction.java @@ -60,6 +60,7 @@ public Request(StreamInput in) throws IOException { } public Request(DataFrameAnalyticsConfig config) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.config = config; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FinalizeJobExecutionAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FinalizeJobExecutionAction.java index b270c4506ba4a..8fb1f3a91ab8b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FinalizeJobExecutionAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FinalizeJobExecutionAction.java @@ -29,6 +29,7 @@ public static class Request extends MasterNodeRequest { private String[] jobIds; public Request(String[] jobIds) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.jobIds = jobIds; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushTrainedModelCacheAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushTrainedModelCacheAction.java index bdba626676b2d..c24fc159769e1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushTrainedModelCacheAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushTrainedModelCacheAction.java @@ -27,11 +27,11 @@ private FlushTrainedModelCacheAction() { public static class Request extends AcknowledgedRequest { public Request() { - super(); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); } Request(TimeValue timeout) { - super(timeout); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, timeout); } public Request(StreamInput in) throws IOException { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java index 1bd266c68a65a..e509b84b06ae1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java @@ -50,6 +50,7 @@ public Request(String datafeedId) { } public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); local(true); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsStatsAction.java index 1a63eda0d687d..fafb9afa99f85 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsStatsAction.java @@ -70,6 +70,7 @@ public static class Request extends MasterNodeReadRequest { private boolean allowNoMatch = true; public Request(String datafeedId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.datafeedId = ExceptionsHelper.requireNonNull(datafeedId, DatafeedConfig.ID.getPreferredName()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobModelSnapshotsUpgradeStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobModelSnapshotsUpgradeStatsAction.java index e5542593df4e4..ec49603c89cb8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobModelSnapshotsUpgradeStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobModelSnapshotsUpgradeStatsAction.java @@ -61,6 +61,7 @@ public static class Request extends MasterNodeReadRequest { + public static class Request extends MasterNodeRequest { + private final TimeValue requestTimeout; + + public Request(TimeValue masterNodeTimeout, TimeValue requestTimeout) { + super(masterNodeTimeout); + this.requestTimeout = Objects.requireNonNull(requestTimeout); + } + + @Deprecated(forRemoval = true) // temporary compatibility shi public Request(TimeValue timeout) { - super(timeout); + this(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, timeout); } public Request(StreamInput in) throws IOException { super(in); + this.requestTimeout = in.readTimeValue(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeTimeValue(this.requestTimeout); + } + + public TimeValue requestTimeout() { + return requestTimeout; } @Override @@ -50,9 +70,14 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId, return new CancellableTask(id, type, action, "get_ml_autoscaling_resources", parentTaskId, headers); } + @Override + public ActionRequestValidationException validate() { + return null; + } + @Override public int hashCode() { - return Objects.hash(ackTimeout()); + return Objects.hash(requestTimeout); } @Override @@ -64,7 +89,7 @@ public boolean equals(Object obj) { return false; } GetMlAutoscalingStats.Request other = (GetMlAutoscalingStats.Request) obj; - return Objects.equals(ackTimeout(), other.ackTimeout()); + return Objects.equals(requestTimeout, other.requestTimeout); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlMemoryAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlMemoryAction.java index e8b345b3c3ff6..4664dbe8f7bc0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlMemoryAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlMemoryAction.java @@ -68,6 +68,7 @@ public static class Request extends AcknowledgedRequest { private final String nodeId; public Request(String nodeId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.nodeId = ExceptionsHelper.requireNonNull(nodeId, "nodeId"); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java index b6f852605db9f..cf17a828930c5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java @@ -55,10 +55,12 @@ public static Request parseRequest(String jobId, XContentParser parser) { private JobParams jobParams; public Request(JobParams jobParams) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.jobParams = Objects.requireNonNull(jobParams); } public Request(String jobId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.jobParams = new JobParams(jobId); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsAction.java index fe26cdb0377fd..82db002e42043 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsAction.java @@ -62,6 +62,7 @@ public Request(StreamInput in) throws IOException { } public Request(DataFrameAnalyticsConfig config) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.config = config; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java index 12e9b4f2967d0..f79d2af49f536 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java @@ -43,6 +43,7 @@ public static Request parseRequest(String datafeedId, IndicesOptions indicesOpti private final DatafeedConfig datafeed; public Request(DatafeedConfig datafeed) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.datafeed = datafeed; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java index 9d8fca699df2d..60d7f0008c0de 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java @@ -51,6 +51,7 @@ public static Request parseRequest(String jobId, XContentParser parser, IndicesO public Request(Job.Builder jobBuilder) { // Validate the jobBuilder immediately so that errors can be detected prior to transportation. + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); jobBuilder.validateInputFields(); // Validate that detector configs are unique. // This validation logically belongs to validateInputFields call but we perform it only for PUT action to avoid BWC issues which diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAction.java index 2e5a475369510..25d32d19aef8d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAction.java @@ -75,6 +75,7 @@ public Request(TrainedModelConfig config, boolean deferDefinitionDecompression) } public Request(TrainedModelConfig config, boolean deferDefinitionDecompression, boolean waitForCompletion) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.config = config; this.deferDefinitionDecompression = deferDefinitionDecompression; this.waitForCompletion = waitForCompletion; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAliasAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAliasAction.java index 9f0b5880f5c51..3ba91390f10d1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAliasAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAliasAction.java @@ -48,6 +48,7 @@ public static class Request extends AcknowledgedRequest { private final boolean reassign; public Request(String modelAlias, String modelId, boolean reassign) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.modelAlias = ExceptionsHelper.requireNonNull(modelAlias, MODEL_ALIAS); this.modelId = ExceptionsHelper.requireNonNull(modelId, TrainedModelConfig.MODEL_ID); this.reassign = reassign; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartAction.java index b7fcb98426cc0..a588f74426993 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartAction.java @@ -76,6 +76,7 @@ public Request( int totalParts, boolean allowOverwriting ) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.modelId = ExceptionsHelper.requireNonNull(modelId, TrainedModelConfig.MODEL_ID); this.definition = ExceptionsHelper.requireNonNull(definition, DEFINITION); this.part = part; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelVocabularyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelVocabularyAction.java index 1abae7be95011..106f37a378897 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelVocabularyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelVocabularyAction.java @@ -70,6 +70,7 @@ public Request( @Nullable List scores, boolean allowOverwriting ) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.modelId = ExceptionsHelper.requireNonNull(modelId, TrainedModelConfig.MODEL_ID); this.vocabulary = ExceptionsHelper.requireNonNull(vocabulary, VOCABULARY); this.merges = Optional.ofNullable(merges).orElse(List.of()); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ResetJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ResetJobAction.java index bc74f16eea0e5..548fd80da73de 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ResetJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ResetJobAction.java @@ -57,6 +57,7 @@ public static class Request extends AcknowledgedRequest { private boolean deleteUserAnnotations; public Request(String jobId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/RevertModelSnapshotAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/RevertModelSnapshotAction.java index eb975133e71eb..0dd6fd8b59669 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/RevertModelSnapshotAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/RevertModelSnapshotAction.java @@ -63,7 +63,9 @@ public static Request parseRequest(String jobId, String snapshotId, XContentPars private boolean deleteInterveningResults; private boolean force; - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); @@ -74,6 +76,7 @@ public Request(StreamInput in) throws IOException { } public Request(String jobId, String snapshotId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName()); this.snapshotId = ExceptionsHelper.requireNonNull(snapshotId, SNAPSHOT_ID.getPreferredName()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/SetUpgradeModeAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/SetUpgradeModeAction.java index 9a1574bd2b036..821caf001f3e0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/SetUpgradeModeAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/SetUpgradeModeAction.java @@ -43,6 +43,7 @@ public static class Request extends AcknowledgedRequest implements ToXC } public Request(boolean enabled) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.enabled = enabled; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDataFrameAnalyticsAction.java index 67abda2b3eb64..00e6a546be5a4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDataFrameAnalyticsAction.java @@ -72,6 +72,7 @@ public static Request parseRequest(String id, XContentParser parser) { private TimeValue timeout = DEFAULT_TIMEOUT; public Request(String id) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); setId(id); } @@ -81,7 +82,9 @@ public Request(StreamInput in) throws IOException { timeout = in.readTimeValue(); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public final void setId(String id) { this.id = ExceptionsHelper.requireNonNull(id, DataFrameAnalyticsConfig.ID); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java index 18763a78fa456..deeed6df87064 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java @@ -66,14 +66,17 @@ public static Request parseRequest(String datafeedId, XContentParser parser) { private DatafeedParams params; public Request(String datafeedId, long startTime) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.params = new DatafeedParams(datafeedId, startTime); } public Request(String datafeedId, String startTime) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.params = new DatafeedParams(datafeedId, startTime); } public Request(DatafeedParams params) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.params = params; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java index 8d9da97538e11..b3cf9f16c3c82 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java @@ -140,9 +140,12 @@ public static Request parseRequest(String modelId, String deploymentId, XContent private int queueCapacity = 1024; private Priority priority = Priority.NORMAL; - private Request() {} + private Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(String modelId, String deploymentId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); setModelId(modelId); setDeploymentId(deploymentId); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDataFrameAnalyticsAction.java index d23f222b9687b..513a4d7b2ea8e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDataFrameAnalyticsAction.java @@ -57,6 +57,7 @@ public Request(StreamInput in) throws IOException { } public Request(DataFrameAnalyticsConfigUpdate update) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.update = update; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDatafeedAction.java index 694ca39d9cd49..0757f1f1dc7e7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDatafeedAction.java @@ -43,6 +43,7 @@ public static Request parseRequest(String datafeedId, @Nullable IndicesOptions i private DatafeedUpdate update; public Request(DatafeedUpdate update) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.update = update; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateJobAction.java index 15cd272d12b8b..33856bfcefbb7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateJobAction.java @@ -46,6 +46,7 @@ public Request(String jobId, JobUpdate update) { } private Request(String jobId, JobUpdate update, boolean isInternal) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.jobId = jobId; this.update = update; this.isInternal = isInternal; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelAssignmentRoutingInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelAssignmentRoutingInfoAction.java index 5cd55a201c45d..fd1b179da8919 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelAssignmentRoutingInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelAssignmentRoutingInfoAction.java @@ -33,6 +33,7 @@ public static class Request extends MasterNodeRequest { private final RoutingInfoUpdate update; public Request(String nodeId, String deploymentId, RoutingInfoUpdate update) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.nodeId = ExceptionsHelper.requireNonNull(nodeId, "node_id"); this.deploymentId = ExceptionsHelper.requireNonNull(deploymentId, "deployment_id"); this.update = ExceptionsHelper.requireNonNull(update, "update"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelDeploymentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelDeploymentAction.java index bb113a9b3e1e8..62a7d84c60a62 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelDeploymentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelDeploymentAction.java @@ -64,9 +64,12 @@ public static Request parseRequest(String deploymentId, XContentParser parser) { private String deploymentId; private int numberOfAllocations; - private Request() {} + private Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public Request(String deploymentId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); setDeploymentId(deploymentId); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpgradeJobModelSnapshotAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpgradeJobModelSnapshotAction.java index 7fbcffa476159..abe481c926fdb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpgradeJobModelSnapshotAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpgradeJobModelSnapshotAction.java @@ -71,6 +71,7 @@ public static UpgradeJobModelSnapshotAction.Request parseRequest(XContentParser } public Request(String jobId, String snapshotId, TimeValue timeValue, boolean waitForCompletion) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID); this.snapshotId = ExceptionsHelper.requireNonNull(snapshotId, SNAPSHOT_ID); this.timeout = timeValue == null ? DEFAULT_TIMEOUT : timeValue; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/packageloader/action/GetTrainedModelPackageConfigAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/packageloader/action/GetTrainedModelPackageConfigAction.java index 8fcc977e3faeb..ea67dfdfb1857 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/packageloader/action/GetTrainedModelPackageConfigAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/packageloader/action/GetTrainedModelPackageConfigAction.java @@ -37,10 +37,12 @@ public static class Request extends MasterNodeRequest { - public MonitoringMigrateAlertsRequest() {} + public MonitoringMigrateAlertsRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public MonitoringMigrateAlertsRequest(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/PutRollupJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/PutRollupJobAction.java index 06a6b4c2a072c..7f1e81164a513 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/PutRollupJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/PutRollupJobAction.java @@ -38,6 +38,7 @@ public static class Request extends AcknowledgedRequest implements Indi private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, false, true, false); public Request(RollupJobConfig config) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.config = config; } @@ -48,6 +49,7 @@ public Request(StreamInput in) throws IOException { public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); } public static Request fromXContent(final XContentParser parser, final String id) throws IOException { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/searchablesnapshots/MountSearchableSnapshotRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/searchablesnapshots/MountSearchableSnapshotRequest.java index 3cb7b5b07fc1b..fba742e288032 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/searchablesnapshots/MountSearchableSnapshotRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/searchablesnapshots/MountSearchableSnapshotRequest.java @@ -101,6 +101,7 @@ public MountSearchableSnapshotRequest( boolean waitForCompletion, Storage storage ) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.mountedIndexName = Objects.requireNonNull(mountedIndexName); this.repositoryName = Objects.requireNonNull(repositoryName); this.snapshotName = Objects.requireNonNull(snapshotName); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequest.java index 039ed8aa5fb64..f85ca260c3fff 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequest.java @@ -166,16 +166,4 @@ public void writeTo(StreamOutput out) throws IOException { public ExpressionRoleMapping getMapping() { return new ExpressionRoleMapping(name, rules, roles, roleTemplates, metadata, enabled); } - - public static PutRoleMappingRequest fromMapping(ExpressionRoleMapping mapping) { - var request = new PutRoleMappingRequest(); - request.setName(mapping.getName()); - request.setEnabled(mapping.isEnabled()); - request.setRoles(mapping.getRoles()); - request.setRoleTemplates(mapping.getRoleTemplates()); - request.setRules(mapping.getExpression()); - request.setMetadata(mapping.getMetadata()); - - return request; - } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequestBuilder.java index 88a930063190b..d46c21f080308 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequestBuilder.java @@ -9,8 +9,7 @@ import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.support.WriteRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; import org.elasticsearch.xpack.core.security.authc.support.mapper.TemplateRoleName; import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.RoleMapperExpression; @@ -35,8 +34,8 @@ public PutRoleMappingRequestBuilder(ElasticsearchClient client) { /** * Populate the put role request from the source and the role's name */ - public PutRoleMappingRequestBuilder source(String name, BytesReference source, XContentType xContentType) throws IOException { - ExpressionRoleMapping mapping = ExpressionRoleMapping.parse(name, source, xContentType); + public PutRoleMappingRequestBuilder source(String name, XContentParser parser) throws IOException { + ExpressionRoleMapping mapping = ExpressionRoleMapping.parse(name, parser); request.setName(name); request.setEnabled(mapping.isEnabled()); request.setRoles(mapping.getRoles()); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/GetSecuritySettingsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/GetSecuritySettingsAction.java index bc8d81cd268ad..7623a7f65af34 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/GetSecuritySettingsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/GetSecuritySettingsAction.java @@ -34,9 +34,13 @@ public GetSecuritySettingsAction() { public static class Request extends MasterNodeReadRequest { - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } - public Request(StreamInput in) throws IOException {} + public Request(StreamInput in) throws IOException { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } @Override public void writeTo(StreamOutput out) throws IOException {} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsAction.java index 20feb0faf5033..3cce133749e44 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsAction.java @@ -72,12 +72,14 @@ public Request( Map tokensIndexSettings, Map profilesIndexSettings ) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.mainIndexSettings = Objects.requireNonNullElse(mainIndexSettings, Collections.emptyMap()); this.tokensIndexSettings = Objects.requireNonNullElse(tokensIndexSettings, Collections.emptyMap()); this.profilesIndexSettings = Objects.requireNonNullElse(profilesIndexSettings, Collections.emptyMap()); } public Request(StreamInput in) throws IOException { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.mainIndexSettings = in.readGenericMap(); this.tokensIndexSettings = in.readGenericMap(); this.profilesIndexSettings = in.readGenericMap(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java index 49be4c5d466b2..eb4b7efdb88b0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java @@ -49,7 +49,11 @@ static RoleDescriptor kibanaAdminUser(String name, Map metadata) null, null, metadata, - null + null, + null, + null, + null, + "Grants access to all features in Kibana." ); } @@ -408,7 +412,13 @@ static RoleDescriptor kibanaSystem(String name) { getRemoteIndicesReadPrivileges("traces-apm-*") }, null, null, - null + "Grants access necessary for the Kibana system user to read from and write to the Kibana indices, " + + "manage index templates and tokens, and check the availability of the Elasticsearch cluster. " + + "It also permits activating, searching, and retrieving user profiles, " + + "as well as updating user profile data for the kibana-* namespace. " + + "Additionally, this role grants read access to the .monitoring-* indices " + + "and read and write access to the .reporting-* indices. " + + "Note: This role should not be assigned to users as the granted permissions may change between releases." ); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java index dd8f34a60fa1f..2e7a5271103f4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java @@ -103,7 +103,11 @@ public class ReservedRolesStore implements BiConsumer, ActionListene ) ), null, - null + "Grants full access to cluster management and data indices. " + + "This role also grants direct read-only access to restricted indices like .security. " + + "A user with this role can impersonate any other user in the system, " + + "manage security and create roles with unlimited privileges. " + + "Take extra care when assigning it to a user." ); private static final Map ALL_RESERVED_ROLES = initializeReservedRoles(); @@ -203,7 +207,12 @@ private static Map initializeReservedRoles() { getRemoteIndicesReadPrivileges("metricbeat-*") }, null, null, - null + "Grants the minimum privileges required for any user of X-Pack monitoring other than those required to use Kibana. " + + "This role grants access to the monitoring indices and grants privileges necessary " + + "for reading basic cluster information. " + + "This role also includes all Kibana privileges for the Elastic Stack monitoring features. " + + "Monitoring users should also be assigned the kibana_admin role, " + + "or another role with access to the Kibana instance." ) ), entry( @@ -232,7 +241,16 @@ private static Map initializeReservedRoles() { ) .build() }, null, - MetadataUtils.DEFAULT_RESERVED_METADATA + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants the minimum privileges required to write data into the monitoring indices (.monitoring-*). " + + "This role also has the privileges necessary to create Metricbeat indices (metricbeat-*) " + + "and write data into them." ) ), entry( @@ -251,7 +269,11 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, - null + null, + null, + null, + null, + "Grants the minimum privileges required to collect monitoring data for the Elastic Stack." ) ), entry( @@ -261,7 +283,14 @@ private static Map initializeReservedRoles() { new String[] { "manage_index_templates", "manage_pipeline" }, null, null, - MetadataUtils.DEFAULT_RESERVED_METADATA + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants access to manage all index templates and all ingest pipeline configurations." ) ), // reporting_user doesn't have any privileges in Elasticsearch, and Kibana authorizes privileges based on this role @@ -275,7 +304,14 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.getDeprecatedReservedMetadata("Please use Kibana feature privileges instead"), - null + null, + null, + null, + null, + "Grants the specific privileges required for users of X-Pack reporting other than those required to use Kibana. " + + "This role grants access to the reporting indices; each user has access to only their own reports. " + + "Reporting users should also be assigned additional roles that grant access to Kibana as well as read access " + + "to the indices that will be used to generate reports." ) ), entry(KibanaSystemUser.ROLE_NAME, kibanaSystemRoleDescriptor(KibanaSystemUser.ROLE_NAME)), @@ -286,7 +322,15 @@ private static Map initializeReservedRoles() { new String[] { "monitor", MonitoringBulkAction.NAME }, null, null, - MetadataUtils.DEFAULT_RESERVED_METADATA + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants access necessary for the Logstash system user to send system-level data (such as monitoring) to Elasticsearch. " + + "This role should not be assigned to users as the granted permissions may change between releases." ) ), entry( @@ -297,7 +341,14 @@ private static Map initializeReservedRoles() { new RoleDescriptor.IndicesPrivileges[] { RoleDescriptor.IndicesPrivileges.builder().indices(".management-beats").privileges("all").build() }, null, - MetadataUtils.DEFAULT_RESERVED_METADATA + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants access to the .management-beats index, which contains configuration information for the Beats." ) ), entry( @@ -311,7 +362,15 @@ private static Map initializeReservedRoles() { .privileges("create_index", "create") .build() }, null, - MetadataUtils.DEFAULT_RESERVED_METADATA + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants access necessary for the Beats system user to send system-level data (such as monitoring) to Elasticsearch. " + + "This role should not be assigned to users as the granted permissions may change between releases." ) ), entry( @@ -325,7 +384,14 @@ private static Map initializeReservedRoles() { .privileges("create_index", "create_doc") .build() }, null, - MetadataUtils.DEFAULT_RESERVED_METADATA + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants access necessary for the APM system user to send system-level data (such as monitoring) to Elasticsearch.\n" ) ), entry( @@ -381,7 +447,12 @@ private static Map initializeReservedRoles() { MetadataUtils.getDeprecatedReservedMetadata( "This role will be removed in a future major release. Please use editor and viewer roles instead" ), - null + null, + null, + null, + null, + "Grants the privileges required for APM users (such as read and view_index_metadata privileges " + + "on the apm-* and .ml-anomalies* indices)." ) ), entry( @@ -394,7 +465,11 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, - null + null, + null, + null, + null, + "Grants access necessary to manage inference models and performing inference." ) ), entry( @@ -407,7 +482,11 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, - null + null, + null, + null, + null, + "Grants access necessary to perform inference." ) ), entry( @@ -440,7 +519,15 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, - null + null, + null, + null, + null, + "Grants the minimum privileges required to view machine learning configuration, status, and work with results. " + + "This role grants monitor_ml cluster privileges, read access to the .ml-notifications and .ml-anomalies* indices " + + "(which store machine learning results), and write access to .ml-annotations* indices. " + + "Machine learning users also need index privileges for source and destination indices " + + "and roles that grant access to Kibana. " ) ), entry( @@ -474,7 +561,15 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, - null + null, + null, + null, + null, + "Provides all of the privileges of the machine_learning_user role plus the full use of the machine learning APIs. " + + "Grants manage_ml cluster privileges, read access to .ml-anomalies*, .ml-notifications*, .ml-state*, " + + ".ml-meta* indices and write access to .ml-annotations* indices. " + + "Machine learning administrators also need index privileges for source and destination indices " + + "and roles that grant access to Kibana." ) ), // DEPRECATED: to be removed in 9.0.0 @@ -501,7 +596,12 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.getDeprecatedReservedMetadata("Please use the [transform_admin] role instead"), - null + null, + null, + null, + null, + "Grants manage_data_frame_transforms cluster privileges, which enable you to manage transforms. " + + "This role also includes all Kibana privileges for the machine learning features." ) ), // DEPRECATED: to be removed in 9.0.0 @@ -528,7 +628,12 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.getDeprecatedReservedMetadata("Please use the [transform_user] role instead"), - null + null, + null, + null, + null, + "Grants monitor_data_frame_transforms cluster privileges, which enable you to use transforms. " + + "This role also includes all Kibana privileges for the machine learning features. " ) ), entry( @@ -549,7 +654,12 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, - null + null, + null, + null, + null, + "Grants manage_transform cluster privileges, which enable you to manage transforms. " + + "This role also includes all Kibana privileges for the machine learning features." ) ), entry( @@ -570,7 +680,12 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, - null + null, + null, + null, + null, + "Grants monitor_transform cluster privileges, which enable you to perform read-only operations related to " + + "transforms. This role also includes all Kibana privileges for the machine learning features." ) ), entry( @@ -585,7 +700,16 @@ private static Map initializeReservedRoles() { .allowRestrictedIndices(true) .build() }, null, - MetadataUtils.DEFAULT_RESERVED_METADATA + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Allows users to create and execute all Watcher actions. " + + "Grants read access to the .watches index. Also grants read access " + + "to the watch history and the triggered watches index." ) ), entry( @@ -604,7 +728,14 @@ private static Map initializeReservedRoles() { .privileges("read") .build() }, null, - MetadataUtils.DEFAULT_RESERVED_METADATA + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants read access to the .watches index, the get watch action and the watcher stats." ) ), entry( @@ -619,16 +750,50 @@ private static Map initializeReservedRoles() { .allowRestrictedIndices(true) .build() }, null, - MetadataUtils.DEFAULT_RESERVED_METADATA + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants access to the .logstash* indices for managing configurations, " + + "and grants necessary access for logstash-specific APIs exposed by the logstash x-pack plugin." ) ), entry( "rollup_user", - new RoleDescriptor("rollup_user", new String[] { "monitor_rollup" }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA) + new RoleDescriptor( + "rollup_user", + new String[] { "monitor_rollup" }, + null, + null, + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants monitor_rollup cluster privileges, which enable you to perform read-only operations related to rollups." + ) ), entry( "rollup_admin", - new RoleDescriptor("rollup_admin", new String[] { "manage_rollup" }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA) + new RoleDescriptor( + "rollup_admin", + new String[] { "manage_rollup" }, + null, + null, + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants manage_rollup cluster privileges, which enable you to manage and execute all rollup actions." + ) ), entry( "snapshot_user", @@ -645,7 +810,14 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, - null + null, + null, + null, + null, + "Grants the necessary privileges to create snapshots of all the indices and to view their metadata. " + + "This role enables users to view the configuration of existing snapshot repositories and snapshot details. " + + "It does not grant authority to remove or add repositories or to restore snapshots. " + + "It also does not enable to change index settings or to read or update data stream or index data." ) ), entry( @@ -661,7 +833,14 @@ private static Map initializeReservedRoles() { .build(), RoleDescriptor.IndicesPrivileges.builder().indices(".enrich-*").privileges("manage", "write").build() }, null, - MetadataUtils.DEFAULT_RESERVED_METADATA + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants access to manage all enrich indices (.enrich-*) and all operations on ingest pipelines." ) ), entry("viewer", buildViewerRoleDescriptor()), @@ -703,7 +882,11 @@ private static RoleDescriptor buildViewerRoleDescriptor() { null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, - null + null, + null, + null, + null, + "Grants read-only access to all features in Kibana (including Solutions) and to data indices." ); } @@ -750,7 +933,11 @@ private static RoleDescriptor buildEditorRoleDescriptor() { null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, - null + null, + null, + null, + null, + "Grants full access to all features in Kibana (including Solutions) and read-only access to data indices." ); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/DeleteSnapshotLifecycleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/DeleteSnapshotLifecycleAction.java index 17a23f6b66b5b..6e083295b0863 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/DeleteSnapshotLifecycleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/DeleteSnapshotLifecycleAction.java @@ -33,9 +33,12 @@ public Request(StreamInput in) throws IOException { lifecycleId = in.readString(); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public Request(String lifecycleId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.lifecycleId = Objects.requireNonNull(lifecycleId, "id may not be null"); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotLifecycleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotLifecycleAction.java index 8a8ecf3a747a8..442ff6b2bfb66 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotLifecycleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotLifecycleAction.java @@ -36,6 +36,7 @@ public static class Request extends AcknowledgedRequest implements ToXC private String lifecycleId; public Request(String lifecycleId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.lifecycleId = lifecycleId; } @@ -44,7 +45,9 @@ public Request(StreamInput in) throws IOException { lifecycleId = in.readString(); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public String getLifecycleId() { return this.lifecycleId; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotRetentionAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotRetentionAction.java index 9574ba7fff685..e4d698f48d252 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotRetentionAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotRetentionAction.java @@ -26,7 +26,9 @@ protected ExecuteSnapshotRetentionAction() { public static class Request extends AcknowledgedRequest implements ToXContentObject { - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/GetSnapshotLifecycleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/GetSnapshotLifecycleAction.java index d556c0fda5e7f..ad62b155da41c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/GetSnapshotLifecycleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/GetSnapshotLifecycleAction.java @@ -35,6 +35,7 @@ public static class Request extends AcknowledgedRequest implements ToXC private SnapshotLifecyclePolicy lifecycle; public Request(String lifecycleId, SnapshotLifecyclePolicy lifecycle) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.lifecycleId = lifecycleId; this.lifecycle = lifecycle; } @@ -46,7 +47,9 @@ public Request(StreamInput in) throws IOException { lifecycle = new SnapshotLifecyclePolicy(in); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public String getLifecycleId() { return this.lifecycleId; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/StartSLMAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/StartSLMAction.java index d6deb7bda384f..666701ac1f885 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/StartSLMAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/StartSLMAction.java @@ -28,7 +28,9 @@ public Request(StreamInput in) throws IOException { super(in); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } @Override public int hashCode() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/StopSLMAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/StopSLMAction.java index 60be1b99cde8d..4aae048b5e5b6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/StopSLMAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/StopSLMAction.java @@ -28,7 +28,9 @@ public Request(StreamInput in) throws IOException { super(in); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } @Override public int hashCode() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/DeleteTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/DeleteTransformAction.java index 3623c659216d2..79ae38745934d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/DeleteTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/DeleteTransformAction.java @@ -34,7 +34,7 @@ public static class Request extends AcknowledgedRequest { private final boolean deleteDestIndex; public Request(String id, boolean force, boolean deleteDestIndex, TimeValue timeout) { - super(timeout); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, timeout); this.id = ExceptionsHelper.requireNonNull(id, TransformField.ID.getPreferredName()); this.force = force; this.deleteDestIndex = deleteDestIndex; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PreviewTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PreviewTransformAction.java index f06ba16d9da78..adebbba651f16 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PreviewTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PreviewTransformAction.java @@ -58,7 +58,7 @@ public static class Request extends AcknowledgedRequest implements ToXC private final TransformConfig config; public Request(TransformConfig config, TimeValue timeout) { - super(timeout); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, timeout); this.config = config; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PutTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PutTransformAction.java index 9d335b2ccdb34..496e826651572 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PutTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PutTransformAction.java @@ -57,7 +57,7 @@ public static class Request extends AcknowledgedRequest { private final boolean deferValidation; public Request(TransformConfig config, boolean deferValidation, TimeValue timeout) { - super(timeout); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, timeout); this.config = config; this.deferValidation = deferValidation; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ResetTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ResetTransformAction.java index 609dd33cbfa9e..5840e107c1d17 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ResetTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ResetTransformAction.java @@ -34,7 +34,7 @@ public static class Request extends AcknowledgedRequest { private final boolean force; public Request(String id, boolean force, TimeValue timeout) { - super(timeout); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, timeout); this.id = ExceptionsHelper.requireNonNull(id, TransformField.ID.getPreferredName()); this.force = force; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/StartTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/StartTransformAction.java index 3ecadd1b708cc..838a0650c8afa 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/StartTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/StartTransformAction.java @@ -39,7 +39,7 @@ public static class Request extends AcknowledgedRequest { private final Instant from; public Request(String id, Instant from, TimeValue timeout) { - super(timeout); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, timeout); this.id = ExceptionsHelper.requireNonNull(id, TransformField.ID.getPreferredName()); this.from = from; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpgradeTransformsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpgradeTransformsAction.java index 3a36d9163e0c0..cdc0a53b6f0a4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpgradeTransformsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpgradeTransformsAction.java @@ -40,7 +40,7 @@ public Request(StreamInput in) throws IOException { } public Request(boolean dryRun, TimeValue timeout) { - super(timeout); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, timeout); this.dryRun = dryRun; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ValidateTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ValidateTransformAction.java index de6435ad31dbc..55c21b91b11d8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ValidateTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ValidateTransformAction.java @@ -36,7 +36,7 @@ public static class Request extends AcknowledgedRequest { private final boolean deferValidation; public Request(TransformConfig config, boolean deferValidation, TimeValue timeout) { - super(timeout); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, timeout); this.config = config; this.deferValidation = deferValidation; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/GetWatcherSettingsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/GetWatcherSettingsAction.java index 576bd220853ce..902c6db07dc89 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/GetWatcherSettingsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/GetWatcherSettingsAction.java @@ -30,9 +30,13 @@ public GetWatcherSettingsAction() { public static class Request extends MasterNodeReadRequest { - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } - public Request(StreamInput in) throws IOException {} + public Request(StreamInput in) throws IOException { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } @Override public void writeTo(StreamOutput out) throws IOException {} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/UpdateWatcherSettingsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/UpdateWatcherSettingsAction.java index 29f4db51e146e..b6d999ebbf380 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/UpdateWatcherSettingsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/UpdateWatcherSettingsAction.java @@ -39,10 +39,12 @@ public static class Request extends AcknowledgedRequest { private final Map settings; public Request(Map settings) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.settings = settings; } public Request(StreamInput in) throws IOException { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.settings = in.readGenericMap(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/service/WatcherServiceRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/service/WatcherServiceRequest.java index 93cc7a18594d6..449179e4f18f6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/service/WatcherServiceRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/service/WatcherServiceRequest.java @@ -29,7 +29,9 @@ public WatcherServiceRequest(StreamInput in) throws IOException { command = Command.valueOf(in.readString().toUpperCase(Locale.ROOT)); } - public WatcherServiceRequest() {} + public WatcherServiceRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } /** * Starts the watcher service if not already started. diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStatsRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStatsRequestTests.java index ee265538829d3..eb0b8420625ac 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStatsRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStatsRequestTests.java @@ -23,11 +23,14 @@ protected Writeable.Reader instanceReader() { @Override protected Request createTestInstance() { - return new Request(randomTimeValue(0, 10_000)); + return new Request(TimeValue.THIRTY_SECONDS, randomTimeValue(0, 10_000)); } @Override protected Request mutateInstance(Request instance) throws IOException { - return new Request(TimeValue.timeValueMillis(instance.ackTimeout().millis() + randomIntBetween(1, 1000))); + return new Request( + TimeValue.THIRTY_SECONDS, + TimeValue.timeValueMillis(instance.requestTimeout().millis() + randomIntBetween(1, 1000)) + ); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappingsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappingsTests.java index 6ba7dc6ac24cd..9d3c4d684e194 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappingsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappingsTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.admin.indices.mapping.put.TransportPutMappingAction; import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -21,6 +20,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.indices.SystemIndexDescriptor; @@ -297,7 +297,7 @@ public void testAddDocMappingIfMissing() { {"_doc":{"properties":{"some-field":{"type":"long"}}}}""", client, clusterState, - MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT, + TimeValue.THIRTY_SECONDS, ActionTestUtils.assertNoFailureListener(Assert::assertTrue), 1 ); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java index f9fdc0c8362e5..f72ca14c37e14 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; -import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.client.internal.AdminClient; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.ClusterAdminClient; @@ -371,7 +370,7 @@ private void createIndexAndAliasIfNecessary(ClusterState clusterState) { TestIndexNameExpressionResolver.newInstance(), TEST_INDEX_PREFIX, TEST_INDEX_ALIAS, - MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT, + TimeValue.THIRTY_SECONDS, listener ); } diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java index 13ef198863284..3376073bded02 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java @@ -331,6 +331,7 @@ public static class Request extends MasterNodeReadRequest implements In private String[] indices; public Request(String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.indices = indices; } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/DeleteAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/DeleteAnalyticsCollectionAction.java index 43601ab1b2943..ac5c5761efe13 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/DeleteAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/DeleteAnalyticsCollectionAction.java @@ -44,6 +44,7 @@ public Request(StreamInput in) throws IOException { } public Request(String collectionName) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.collectionName = collectionName; } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/GetAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/GetAnalyticsCollectionAction.java index f9eeb2cca6d2e..d54c119e083ed 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/GetAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/GetAnalyticsCollectionAction.java @@ -41,6 +41,7 @@ public static class Request extends MasterNodeReadRequest implements To public static ParseField NAMES_FIELD = new ParseField("names"); public Request(String[] names) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.names = Objects.requireNonNull(names, "Collection names cannot be null"); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java index 659c58d2bd1b8..108cebae155be 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java @@ -43,6 +43,7 @@ public Request(StreamInput in) throws IOException { } public Request(String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.name = name; } diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java index 072dc5265fe60..7c57212d0f574 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java @@ -118,15 +118,16 @@ public void testDoNotLogWithInfo() throws IOException { Map colA = Map.of("name", "DO_NOT_LOG_ME", "type", "integer"); assertEquals(List.of(colA), result.get("columns")); assertEquals(List.of(List.of(1)), result.get("values")); - try (InputStream log = cluster.getNodeLog(0, LogType.SERVER)) { - Streams.readAllLines(log, line -> { assertThat(line, not(containsString("DO_NOT_LOG_ME"))); }); + for (int i = 0; i < cluster.getNumNodes(); i++) { + try (InputStream log = cluster.getNodeLog(i, LogType.SERVER)) { + Streams.readAllLines(log, line -> assertThat(line, not(containsString("DO_NOT_LOG_ME")))); + } } } finally { setLoggingLevel(null); } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/108367") public void testDoLogWithDebug() throws IOException { try { setLoggingLevel("DEBUG"); @@ -136,15 +137,17 @@ public void testDoLogWithDebug() throws IOException { Map colA = Map.of("name", "DO_LOG_ME", "type", "integer"); assertEquals(List.of(colA), result.get("columns")); assertEquals(List.of(List.of(1)), result.get("values")); - try (InputStream log = cluster.getNodeLog(0, LogType.SERVER)) { - boolean[] found = new boolean[] { false }; - Streams.readAllLines(log, line -> { - if (line.contains("DO_LOG_ME")) { - found[0] = true; - } - }); - assertThat(found[0], equalTo(true)); + boolean[] found = new boolean[] { false }; + for (int i = 0; i < cluster.getNumNodes(); i++) { + try (InputStream log = cluster.getNodeLog(i, LogType.SERVER)) { + Streams.readAllLines(log, line -> { + if (line.contains("DO_LOG_ME")) { + found[0] = true; + } + }); + } } + assertThat(found[0], equalTo(true)); } finally { setLoggingLevel(null); } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog.csv-spec index 64c4641b2ca01..3f6ef72d84bc3 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog.csv-spec @@ -1,7 +1,7 @@ # Examples that were published in a blog post 2023-08-08.full-blown-query -required_feature: esql.enrich_load +required_capability: enrich_load FROM employees | WHERE still_hired == true diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec index 809f4e9ba2c74..c0572e7bbcd49 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec @@ -63,7 +63,7 @@ avg(salary):double | always_false:boolean in -required_feature: esql.mv_warn +required_capability: mv_warn from employees | keep emp_no, is_rehired, still_hired | where is_rehired in (still_hired, true) | where is_rehired != still_hired; ignoreOrder:true @@ -236,7 +236,7 @@ emp_no:integer |languages:integer |byte2bool:boolean |short2bool:boolean ; mvSort -required_feature: esql.mv_sort +required_capability: mv_sort row a = [true, false, true, false] | eval sa = mv_sort(a), sb = mv_sort(a, "DESC"); @@ -245,7 +245,7 @@ a:boolean | sa:boolean | sb:boolean ; mvSortEmp -required_feature: esql.mv_sort +required_capability: mv_sort FROM employees | eval sd = mv_sort(is_rehired, "DESC"), sa = mv_sort(is_rehired) @@ -263,7 +263,7 @@ emp_no:integer | is_rehired:boolean | sa:boolean | sd:boolea ; mvSlice -required_feature: esql.mv_sort +required_capability: mv_sort row a = [true, false, false, true] | eval a1 = mv_slice(a, 1), a2 = mv_slice(a, 2, 3); @@ -273,7 +273,7 @@ a:boolean | a1:boolean | a2:boolean ; mvSliceEmp -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval a1 = mv_slice(is_rehired, 0) @@ -290,7 +290,7 @@ emp_no:integer | is_rehired:boolean | a1:boolean ; values -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -302,7 +302,7 @@ required_feature: esql.agg_values ; valuesGrouped -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -323,7 +323,7 @@ still_hired:boolean | first_letter:keyword ; valuesGroupedByOrdinals -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -347,7 +347,7 @@ still_hired:boolean | job_positions:keyword ; implicitCastingEqual -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from employees | where still_hired == "true" | sort emp_no | keep emp_no | limit 1; emp_no:integer @@ -355,7 +355,7 @@ emp_no:integer ; implicitCastingNotEqual -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from employees | where still_hired != "true" | sort emp_no | keep emp_no | limit 1; emp_no:integer @@ -363,7 +363,7 @@ emp_no:integer ; implicitCastingIn -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from employees | where still_hired in ("true", "false") | sort emp_no | keep emp_no | limit 1; emp_no:integer @@ -371,7 +371,7 @@ emp_no:integer ; implicitCastingInField -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from employees | where false in ("true", still_hired) | sort emp_no | keep emp_no | limit 1; emp_no:integer diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec index aa6529c2d4319..508cccc20b86c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec @@ -6,7 +6,7 @@ # Test against a polygon similar in size to the Bottom Left polygon whereIntersectsSinglePolygon -required_feature: esql.st_intersects +required_capability: st_intersects FROM cartesian_multipolygons | WHERE ST_Intersects(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 1 0, 1 1, 0 1, 0 0))")) @@ -25,7 +25,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereContainsSinglePolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM cartesian_multipolygons | WHERE ST_Contains(shape, TO_CARTESIANSHAPE("POLYGON((0.001 0.001, 0.999 0.001, 0.999 0.999, 0.001 0.999, 0.001 0.001))")) @@ -38,7 +38,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereWithinSinglePolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM cartesian_multipolygons | WHERE ST_Within(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 1 0, 1 1, 0 1, 0 0))")) @@ -53,7 +53,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereDisjointSinglePolygon -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM cartesian_multipolygons | WHERE ST_Disjoint(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 1 0, 1 1, 0 1, 0 0))")) @@ -79,7 +79,7 @@ id:l | name:keyword | shape:cartesian_shape # Test against a polygon smaller in size to the Bottom Left polygon whereIntersectsSmallerPolygon -required_feature: esql.st_intersects +required_capability: st_intersects FROM cartesian_multipolygons | WHERE ST_Intersects(shape, TO_CARTESIANSHAPE("POLYGON((0.2 0.2, 0.8 0.2, 0.8 0.8, 0.2 0.8, 0.2 0.2))")) @@ -98,7 +98,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereContainsSmallerPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM cartesian_multipolygons | WHERE ST_Contains(shape, TO_CARTESIANSHAPE("POLYGON((0.2 0.2, 0.8 0.2, 0.8 0.8, 0.2 0.8, 0.2 0.2))")) @@ -111,7 +111,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereWithinSmallerPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM cartesian_multipolygons | WHERE ST_Within(shape, TO_CARTESIANSHAPE("POLYGON((0.2 0.2, 0.8 0.2, 0.8 0.8, 0.2 0.8, 0.2 0.2))")) @@ -123,7 +123,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereDisjointSmallerPolygon -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM cartesian_multipolygons | WHERE ST_Disjoint(shape, TO_CARTESIANSHAPE("POLYGON((0.2 0.2, 0.8 0.2, 0.8 0.8, 0.2 0.8, 0.2 0.2))")) @@ -149,7 +149,7 @@ id:l | name:keyword | shape:cartesian_shape # Test against a polygon similar in size to the entire test data whereIntersectsLargerPolygon -required_feature: esql.st_intersects +required_capability: st_intersects FROM cartesian_multipolygons | WHERE ST_Intersects(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 3 0, 3 3, 0 3, 0 0))")) @@ -180,7 +180,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereContainsLargerPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM cartesian_multipolygons | WHERE ST_Contains(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 3 0, 3 3, 0 3, 0 0))")) @@ -191,7 +191,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereWithinLargerPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM cartesian_multipolygons | WHERE ST_Within(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 3 0, 3 3, 0 3, 0 0))")) @@ -222,7 +222,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereDisjointLargerPolygon -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM cartesian_multipolygons | WHERE ST_Disjoint(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 3 0, 3 3, 0 3, 0 0))")) @@ -236,7 +236,7 @@ id:l | name:keyword | shape:cartesian_shape # Test against a polygon larger than all test data whereIntersectsEvenLargerPolygon -required_feature: esql.st_intersects +required_capability: st_intersects FROM cartesian_multipolygons | WHERE ST_Intersects(shape, TO_CARTESIANSHAPE("POLYGON((-1 -1, 4 -1, 4 4, -1 4, -1 -1))")) @@ -267,7 +267,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereContainsEvenLargerPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM cartesian_multipolygons | WHERE ST_Contains(shape, TO_CARTESIANSHAPE("POLYGON((-1 -1, 4 -1, 4 4, -1 4, -1 -1))")) @@ -278,7 +278,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereWithinEvenLargerPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM cartesian_multipolygons | WHERE ST_Within(shape, TO_CARTESIANSHAPE("POLYGON((-1 -1, 4 -1, 4 4, -1 4, -1 -1))")) @@ -309,7 +309,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereDisjointEvenLargerPolygon -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM cartesian_multipolygons | WHERE ST_Disjoint(shape, TO_CARTESIANSHAPE("POLYGON((-1 -1, 4 -1, 4 4, -1 4, -1 -1))")) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec index 64a8c1d9da316..d4b45ca37fc2d 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec @@ -130,7 +130,7 @@ error_rate:double | hour:date nullOnMultivaluesMathOperation -required_feature: esql.disable_nullable_opts +required_capability: disable_nullable_opts ROW a = 5, b = [ 1, 2 ]| EVAL sum = a + b| LIMIT 1 | WHERE sum IS NULL; warning:Line 1:37: evaluation of [a + b] failed, treating result as null. Only first 20 failures recorded. @@ -142,7 +142,7 @@ a:integer | b:integer | sum:integer notNullOnMultivaluesMathOperation -required_feature: esql.disable_nullable_opts +required_capability: disable_nullable_opts ROW a = 5, b = [ 1, 2 ]| EVAL sum = a + b| LIMIT 1 | WHERE sum IS NOT NULL; warning:Line 1:37: evaluation of [a + b] failed, treating result as null. Only first 20 failures recorded. @@ -153,7 +153,7 @@ a:integer | b:integer | sum:integer nullOnMultivaluesComparisonOperation -required_feature: esql.disable_nullable_opts +required_capability: disable_nullable_opts ROW a = 5, b = [ 1, 2 ]| EVAL same = a == b| LIMIT 1 | WHERE same IS NULL; warning:Line 1:38: evaluation of [a == b] failed, treating result as null. Only first 20 failures recorded. @@ -166,7 +166,7 @@ a:integer | b:integer | same:boolean notNullOnMultivaluesComparisonOperation -required_feature: esql.disable_nullable_opts +required_capability: disable_nullable_opts ROW a = 5, b = [ 1, 2 ]| EVAL same = a == b| LIMIT 1 | WHERE same IS NOT NULL; warning:Line 1:38: evaluation of [a == b] failed, treating result as null. Only first 20 failures recorded. @@ -177,7 +177,7 @@ a:integer | b:integer | same:boolean notNullOnMultivaluesComparisonOperationWithPartialMatch -required_feature: esql.disable_nullable_opts +required_capability: disable_nullable_opts ROW a = 5, b = [ 5, 2 ]| EVAL same = a == b| LIMIT 1 | WHERE same IS NOT NULL; warning:Line 1:38: evaluation of [a == b] failed, treating result as null. Only first 20 failures recorded. diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/convert.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/convert.csv-spec index 43e683e165e29..94dfd9f3267f7 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/convert.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/convert.csv-spec @@ -1,7 +1,7 @@ // Conversion-specific tests convertToBoolean -required_feature: esql.casting_operator +required_capability: casting_operator ROW zero=0::boolean, one=1::bool ; @@ -10,7 +10,7 @@ false |true ; convertToInteger -required_feature: esql.casting_operator +required_capability: casting_operator ROW zero="0"::integer, one="1"::int ; @@ -19,7 +19,7 @@ ROW zero="0"::integer, one="1"::int ; convertToIP -required_feature: esql.casting_operator +required_capability: casting_operator ROW ip="1.1.1.1"::ip ; @@ -28,7 +28,7 @@ ROW ip="1.1.1.1"::ip ; convertToLong -required_feature: esql.casting_operator +required_capability: casting_operator ROW long="-1"::long ; @@ -37,7 +37,7 @@ long:long ; convertToLongWithWarning -required_feature: esql.casting_operator +required_capability: casting_operator ROW long="1.1.1.1"::long ; warning:Line 1:10: evaluation of [\"1.1.1.1\"::long] failed, treating result as null. Only first 20 failures recorded. @@ -48,7 +48,7 @@ null ; convertToDouble -required_feature: esql.casting_operator +required_capability: casting_operator ROW zero="0"::double ; @@ -57,7 +57,7 @@ ROW zero="0"::double ; convertToString -required_feature: esql.casting_operator +required_capability: casting_operator ROW one=1::keyword, two=2::text, three=3::string ; @@ -66,7 +66,7 @@ ROW one=1::keyword, two=2::text, three=3::string ; convertToDatetime -required_feature: esql.casting_operator +required_capability: casting_operator ROW date="1985-01-01T00:00:00Z"::datetime, zero=0::datetime ; @@ -75,7 +75,7 @@ ROW date="1985-01-01T00:00:00Z"::datetime, zero=0::datetime ; convertToVersion -required_feature: esql.casting_operator +required_capability: casting_operator ROW ver="1.2.3"::version ; @@ -84,7 +84,7 @@ ROW ver="1.2.3"::version ; convertToUnsignedLong -required_feature: esql.casting_operator +required_capability: casting_operator ROW zero="0"::unsigned_long, two=abs(-2)::UnsigneD_LOng ; @@ -93,7 +93,7 @@ ROW zero="0"::unsigned_long, two=abs(-2)::UnsigneD_LOng ; convertToGeoPoint -required_feature: esql.casting_operator +required_capability: casting_operator ROW gp="POINT(0 0)"::geo_point ; @@ -102,7 +102,7 @@ POINT (0.0 0.0) ; convertToGeoShape -required_feature: esql.casting_operator +required_capability: casting_operator ROW gs="POINT(0 0)"::geo_shape ; @@ -111,7 +111,7 @@ POINT (0.0 0.0) ; convertToCartesianPoint -required_feature: esql.casting_operator +required_capability: casting_operator ROW cp="POINT(0 0)"::cartesian_point ; @@ -120,7 +120,7 @@ POINT (0.0 0.0) ; convertToCartesianShape -required_feature: esql.casting_operator +required_capability: casting_operator ROW cs="POINT(0 0)"::cartesian_shape ; @@ -129,7 +129,7 @@ POINT (0.0 0.0) ; convertChained -required_feature: esql.casting_operator +required_capability: casting_operator ROW one=1::STRING::LONG::BOOL ; @@ -138,7 +138,7 @@ true ; convertWithIndexMultipleConversionsInSameExpressionAndConversionInFiltering -required_feature: esql.casting_operator +required_capability: casting_operator FROM employees | EVAL en_str=emp_no::STRING, bd=ABS(birth_date::LONG)::STRING | KEEP en_str, emp_no, bd, birth_date @@ -153,7 +153,7 @@ required_feature: esql.casting_operator ; convertWithBoolExpressionAndQualifiedName -required_feature: esql.casting_operator +required_capability: casting_operator FROM employees | EVAL neg = (NOT still_hired)::string, sf = ROUND(height.scaled_float::double, 2) | KEEP emp_no, still_hired, neg, sf @@ -169,7 +169,7 @@ required_feature: esql.casting_operator ; docsCastOperator -required_feature: esql.casting_operator +required_capability: casting_operator //tag::docsCastOperator[] ROW ver = CONCAT(("0"::INT + 1)::STRING, ".2.3")::VERSION //end::docsCastOperator[] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index 8d54288de552d..22e9231939d02 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -216,7 +216,7 @@ string:keyword |datetime:date ; convertFromUnsignedLong -required_feature:esql.convert_warn +required_capability: convert_warn row ul = [9223372036854775808, 520128000000] | eval dt = to_datetime(ul); warning:Line 1:58: evaluation of [to_datetime(ul)] failed, treating result as null. Only first 20 failures recorded. @@ -357,7 +357,7 @@ date1:date | date2:date | dd_ms:integer ; evalDateDiffString -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting ROW date1 = TO_DATETIME("2023-12-02T11:00:00.000Z") | EVAL dd_ms = DATE_DIFF("microseconds", date1, "2023-12-02T11:00:00.001Z") @@ -623,7 +623,7 @@ dt:datetime |plus_post:datetime |plus_pre:datetime datePlusQuarter # "quarter" introduced in 8.15 -required_feature: esql.timespan_abbreviations +required_capability: timespan_abbreviations row dt = to_dt("2100-01-01T01:01:01.000Z") | eval plusQuarter = dt + 2 quarters ; @@ -634,7 +634,7 @@ dt:datetime | plusQuarter:datetime datePlusAbbreviatedDurations # abbreviations introduced in 8.15 -required_feature: esql.timespan_abbreviations +required_capability: timespan_abbreviations row dt = to_dt("2100-01-01T00:00:00.000Z") | eval plusDurations = dt + 1 h + 2 min + 2 sec + 1 s + 4 ms ; @@ -645,7 +645,7 @@ dt:datetime | plusDurations:datetime datePlusAbbreviatedPeriods # abbreviations introduced in 8.15 -required_feature: esql.timespan_abbreviations +required_capability: timespan_abbreviations row dt = to_dt("2100-01-01T00:00:00.000Z") | eval plusDurations = dt + 0 yr + 1y + 2 q + 3 mo + 4 w + 3 d ; @@ -855,7 +855,7 @@ date:date | year:long ; dateExtractString -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting ROW date = DATE_PARSE("yyyy-MM-dd", "2022-05-06") | EVAL year = DATE_EXTRACT("year", "2022-05-06") @@ -896,7 +896,7 @@ Anneke |Preusig |1989-06-02T00:00:00.000Z|1989-06-02 ; evalDateFormatString -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting ROW a = 1 | EVAL df = DATE_FORMAT("YYYY-MM-dd", "1989-06-02T00:00:00.000Z") @@ -925,7 +925,7 @@ Anneke |Preusig |1989-06-02T00:00:00.000Z|1989-01-01T00:00:00.000 ; evalDateTruncString -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting ROW a = 1 | EVAL year_hired = DATE_TRUNC(1 year, "1991-06-26T00:00:00.000Z") @@ -990,7 +990,7 @@ FROM sample_data ; mvSort -required_feature: esql.mv_sort +required_capability: mv_sort row a = ["1985-01-01T00:00:00.000Z", "1986-01-01T00:00:00.000Z", "1987-01-01T00:00:00.000Z"] | eval datetime = TO_DATETIME(a) @@ -1019,7 +1019,7 @@ count:long | age:long ; values -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10003 @@ -1031,7 +1031,7 @@ required_feature: esql.agg_values ; valuesGrouped -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -1052,7 +1052,7 @@ required_feature: esql.agg_values ; valuesGroupedByOrdinals -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -1077,7 +1077,7 @@ required_feature: esql.agg_values ; implicitCastingNotEqual -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting from employees | where birth_date != "1957-05-23T00:00:00Z" | keep emp_no, birth_date | sort emp_no | limit 3; emp_no:integer | birth_date:datetime @@ -1087,7 +1087,7 @@ emp_no:integer | birth_date:datetime ; implicitCastingLessThanOrEqual -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting from employees | where birth_date <= "1957-05-20T00:00:00Z" | keep emp_no, birth_date | sort emp_no | limit 3; emp_no:integer | birth_date:datetime @@ -1097,7 +1097,7 @@ emp_no:integer | birth_date:datetime ; implicitCastingGreaterThan -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting from employees | where birth_date > "1957-05-24T00:00:00Z" | keep emp_no, birth_date | sort emp_no | limit 3; emp_no:integer | birth_date:datetime diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec index f044989ec9cce..bd384886f0dd7 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec @@ -32,7 +32,7 @@ median_duration:double | env:keyword ; simple -required_feature: esql.enrich_load +required_capability: enrich_load // tag::enrich[] ROW language_code = "1" @@ -47,7 +47,7 @@ language_code:keyword | language_name:keyword ; enrichOnSimple -required_feature: esql.enrich_load +required_capability: enrich_load // tag::enrich_on[] ROW a = "1" @@ -63,7 +63,7 @@ a:keyword | language_name:keyword enrichOn -required_feature: esql.enrich_load +required_capability: enrich_load from employees | sort emp_no | limit 1 | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name; @@ -73,7 +73,7 @@ emp_no:integer | language_name:keyword enrichOn2 -required_feature: esql.enrich_load +required_capability: enrich_load from employees | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name | sort emp_no | limit 1 ; @@ -83,7 +83,7 @@ emp_no:integer | language_name:keyword simpleSortLimit -required_feature: esql.enrich_load +required_capability: enrich_load from employees | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name | sort emp_no | limit 1; @@ -92,7 +92,7 @@ emp_no:integer | language_name:keyword ; with -required_feature: esql.enrich_load +required_capability: enrich_load from employees | eval x = to_string(languages) | keep emp_no, x | sort emp_no | limit 1 | enrich languages_policy on x with language_name; @@ -103,7 +103,7 @@ emp_no:integer | x:keyword | language_name:keyword withSimple -required_feature: esql.enrich_load +required_capability: enrich_load // tag::enrich_with[] ROW a = "1" @@ -119,7 +119,7 @@ a:keyword | language_name:keyword withAlias -required_feature: esql.enrich_load +required_capability: enrich_load from employees | sort emp_no | limit 3 | eval x = to_string(languages) | keep emp_no, x | enrich languages_policy on x with lang = language_name; @@ -131,7 +131,7 @@ emp_no:integer | x:keyword | lang:keyword ; withAliasSimple -required_feature: esql.enrich_load +required_capability: enrich_load // tag::enrich_rename[] ROW a = "1" @@ -147,7 +147,7 @@ a:keyword | name:keyword withAliasSort -required_feature: esql.enrich_load +required_capability: enrich_load from employees | eval x = to_string(languages) | keep emp_no, x | sort emp_no | limit 3 | enrich languages_policy on x with lang = language_name; @@ -160,7 +160,7 @@ emp_no:integer | x:keyword | lang:keyword withAliasOverwriteName#[skip:-8.13.0] -required_feature: esql.enrich_load +required_capability: enrich_load from employees | sort emp_no | eval x = to_string(languages) | enrich languages_policy on x with emp_no = language_name @@ -172,7 +172,7 @@ French ; withAliasAndPlain -required_feature: esql.enrich_load +required_capability: enrich_load from employees | sort emp_no desc | limit 3 | eval x = to_string(languages) | keep emp_no, x | enrich languages_policy on x with lang = language_name, language_name; @@ -185,7 +185,7 @@ emp_no:integer | x:keyword | lang:keyword | language_name:keyword withTwoAliasesSameProp -required_feature: esql.enrich_load +required_capability: enrich_load from employees | sort emp_no | limit 1 | eval x = to_string(languages) | keep emp_no, x | enrich languages_policy on x with lang = language_name, lang2 = language_name; @@ -196,7 +196,7 @@ emp_no:integer | x:keyword | lang:keyword | lang2:keyword redundantWith -required_feature: esql.enrich_load +required_capability: enrich_load from employees | sort emp_no | limit 1 | eval x = to_string(languages) | keep emp_no, x | enrich languages_policy on x with language_name, language_name; @@ -207,7 +207,7 @@ emp_no:integer | x:keyword | language_name:keyword nullInput -required_feature: esql.enrich_load +required_capability: enrich_load from employees | where emp_no == 10017 | keep emp_no, gender | enrich languages_policy on gender with language_name, language_name; @@ -218,7 +218,7 @@ emp_no:integer | gender:keyword | language_name:keyword constantNullInput -required_feature: esql.enrich_load +required_capability: enrich_load from employees | where emp_no == 10020 | eval x = to_string(languages) | keep emp_no, x | enrich languages_policy on x with language_name, language_name; @@ -229,7 +229,7 @@ emp_no:integer | x:keyword | language_name:keyword multipleEnrich -required_feature: esql.enrich_load +required_capability: enrich_load row a = "1", b = "2", c = "10" | enrich languages_policy on a with a_lang = language_name @@ -242,7 +242,7 @@ a:keyword | b:keyword | c:keyword | a_lang:keyword | b_lang:keyword | c_lang:key enrichEval -required_feature: esql.enrich_load +required_capability: enrich_load from employees | eval x = to_string(languages) | enrich languages_policy on x with lang = language_name @@ -258,8 +258,8 @@ emp_no:integer | x:keyword | lang:keyword | language:keyword multivalue -required_feature: esql.enrich_load -required_feature: esql.mv_sort +required_capability: enrich_load +required_capability: mv_sort row a = ["1", "2"] | enrich languages_policy on a with a_lang = language_name | eval a_lang = mv_sort(a_lang); @@ -269,7 +269,7 @@ a:keyword | a_lang:keyword enrichCidr#[skip:-8.13.99, reason:enrich for cidr added in 8.14.0] -required_feature: esql.enrich_load +required_capability: enrich_load FROM sample_data | ENRICH client_cidr_policy ON client_ip WITH env @@ -290,7 +290,7 @@ client_ip:ip | count_env:i | max_env:keyword enrichCidr2#[skip:-8.99.99, reason:ip_range support not added yet] -required_feature: esql.enrich_load +required_capability: enrich_load FROM sample_data | ENRICH client_cidr_policy ON client_ip WITH env, client_cidr @@ -310,7 +310,7 @@ client_ip:ip | env:keyword | client_cidr:ip_range enrichAgesStatsYear#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -required_feature: esql.enrich_load +required_capability: enrich_load FROM employees | WHERE birth_date > "1960-01-01" @@ -333,7 +333,7 @@ birth_year:long | age_group:keyword | count:long enrichAgesStatsAgeGroup#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -required_feature: esql.enrich_load +required_capability: enrich_load FROM employees | WHERE birth_date IS NOT NULL @@ -350,7 +350,7 @@ count:long | age_group:keyword enrichHeightsStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -required_feature: esql.enrich_load +required_capability: enrich_load FROM employees | ENRICH heights_policy ON height WITH height_group = description @@ -369,7 +369,7 @@ Very Tall | 2.0 | 2.1 | 20 enrichDecadesStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -required_feature: esql.enrich_load +required_capability: enrich_load FROM employees | ENRICH decades_policy ON birth_date WITH birth_decade = decade, birth_description = description @@ -390,7 +390,7 @@ null | 1980 | null | Radical Eighties | 4 spatialEnrichmentKeywordMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -required_feature: esql.enrich_load +required_capability: enrich_load FROM airports | WHERE abbrev == "CPH" @@ -405,7 +405,7 @@ CPH | Copenhagen | POINT(12.5683 55.6761) | Denmark spatialEnrichmentGeoMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -required_feature: esql.enrich_load +required_capability: enrich_load FROM airports | WHERE abbrev == "CPH" @@ -420,8 +420,8 @@ CPH | Copenhagen | POINT(12.5683 55.6761) | Denmark spatialEnrichmentGeoMatchStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -required_feature: esql.enrich_load -required_feature: esql.mv_warn +required_capability: enrich_load +required_capability: mv_warn FROM airports | ENRICH city_boundaries ON city_location WITH airport, region, city_boundary @@ -437,7 +437,7 @@ POINT(1.396561 24.127649) | 872 | 88 | 1044 spatialEnrichmentKeywordMatchAndSpatialPredicate#[skip:-8.13.99, reason:st_intersects added in 8.14] -required_feature: esql.enrich_load +required_capability: enrich_load FROM airports | ENRICH city_names ON city WITH airport, region, city_boundary @@ -455,7 +455,7 @@ count:long | airport_in_city:boolean spatialEnrichmentKeywordMatchAndSpatialAggregation#[skip:-8.13.99, reason:st_intersects added in 8.14] -required_feature: esql.enrich_load +required_capability: enrich_load FROM airports | ENRICH city_names ON city WITH airport, region, city_boundary @@ -473,7 +473,7 @@ count:long | centroid:geo_point | airport_in_city:boolean spatialEnrichmentTextMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -required_feature: esql.enrich_load +required_capability: enrich_load FROM airports | WHERE abbrev == "IDR" diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec index 85b665d717449..571d7835451c3 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec @@ -201,7 +201,7 @@ Kyoichi. |Kyoichi.Maliniak |Kyoichi.MaliniakKyoichi. |Kyoichi ; roundArrays -required_feature: esql.disable_nullable_opts +required_capability: disable_nullable_opts row a = [1.2], b = [2.4, 7.9] | eval c = round(a), d = round(b), e = round([1.2]), f = round([1.2, 4.6]), g = round([1.14], 1), h = round([1.14], [1, 2]); warning:Line 1:56: evaluation of [round(b)] failed, treating result as null. Only first 20 failures recorded. diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec index 8af770c521243..1f2bcb6b51209 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec @@ -92,7 +92,7 @@ int:integer |dbl:double ; lessThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where salary_change < 1 | keep emp_no, salary_change | sort emp_no | limit 5; warning:Line 1:24: evaluation of [salary_change < 1] failed, treating result as null. Only first 20 failures recorded. @@ -108,7 +108,7 @@ emp_no:integer |salary_change:double ; greaterThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where salary_change > 1 | keep emp_no, salary_change | sort emp_no | limit 5; warning:Line 1:24: evaluation of [salary_change > 1] failed, treating result as null. Only first 20 failures recorded. @@ -124,7 +124,7 @@ emp_no:integer |salary_change:double ; equalToMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where salary_change == 1.19 | keep emp_no, salary_change | sort emp_no; warning:Line 1:24: evaluation of [salary_change == 1.19] failed, treating result as null. Only first 20 failures recorded. @@ -136,7 +136,7 @@ emp_no:integer |salary_change:double ; equalToOrEqualToMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where salary_change == 1.19 or salary_change == 7.58 | keep emp_no, salary_change | sort emp_no; warning:Line 1:24: evaluation of [salary_change] failed, treating result as null. Only first 20 failures recorded. @@ -149,7 +149,7 @@ emp_no:integer |salary_change:double ; inMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where salary_change in (1.19, 7.58) | keep emp_no, salary_change | sort emp_no; warning:Line 1:24: evaluation of [salary_change in (1.19, 7.58)] failed, treating result as null. Only first 20 failures recorded. @@ -162,7 +162,7 @@ emp_no:integer |salary_change:double ; notLessThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where not(salary_change < 1) | keep emp_no, salary_change | sort emp_no | limit 5; warning:Line 1:24: evaluation of [not(salary_change < 1)] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [salary_change < 1] failed, treating result as null. Only first 20 failures recorded.] @@ -178,7 +178,7 @@ emp_no:integer |salary_change:double ; notGreaterThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where not(salary_change > 1) | keep emp_no, salary_change | sort emp_no | limit 5; warning:Line 1:24: evaluation of [not(salary_change > 1)] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [salary_change > 1] failed, treating result as null. Only first 20 failures recorded.] @@ -194,7 +194,7 @@ emp_no:integer |salary_change:double ; notEqualToMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where not(salary_change == 1.19) | keep emp_no, salary_change | sort emp_no | limit 5; warning:Line 1:24: evaluation of [not(salary_change == 1.19)] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [salary_change == 1.19] failed, treating result as null. Only first 20 failures recorded.] @@ -241,7 +241,7 @@ row a = [1.1, 2.1, 2.1] | eval da = mv_dedupe(a); ; mvSliceEmp -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval a1 = mv_slice(salary_change, 0, 1) @@ -436,7 +436,7 @@ ROW deg = [90.0, 180.0, 270.0] ; mvSort -required_feature: esql.mv_sort +required_capability: mv_sort row a = [4.0, 2.0, -3.0, 2.0] | eval sa = mv_sort(a), sd = mv_sort(a, "DESC"); @@ -445,7 +445,7 @@ a:double | sa:double | sd:double ; mvSortEmp -required_feature: esql.mv_sort +required_capability: mv_sort FROM employees | eval sd = mv_sort(salary_change, "DESC"), sa = mv_sort(salary_change) @@ -467,7 +467,7 @@ emp_no:integer | salary_change:double | sa:double | sd:double ; values -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -479,7 +479,7 @@ required_feature: esql.agg_values ; valuesGrouped -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -500,7 +500,7 @@ required_feature: esql.agg_values ; valuesGroupedByOrdinals -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/from.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/from.csv-spec index c2c0b82f1a664..00a8c0da8f14c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/from.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/from.csv-spec @@ -130,7 +130,7 @@ c:l | name:k ; convertFromDatetimeWithOptions -required_feature: esql.from_options +required_capability: from_options // tag::convertFromDatetimeWithOptions[] FROM employees OPTIONS "allow_no_indices"="false","preference"="_local" diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec index 69ae951e4290d..e247d6c3a04ef 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec @@ -1,7 +1,7 @@ // Integral types-specific tests inLongAndInt -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where avg_worked_seconds in (372957040, salary_change.long, 236703986) | where emp_no in (10017, emp_no - 1) | keep emp_no, avg_worked_seconds; warning:Line 1:24: evaluation of [avg_worked_seconds in (372957040, salary_change.long, 236703986)] failed, treating result as null. Only first 20 failures recorded. @@ -68,7 +68,7 @@ long:long |ul:ul ; convertDoubleToUL -required_feature:esql.convert_warn +required_capability: convert_warn row d = 123.4 | eval ul = to_ul(d), overflow = to_ul(1e20); warningRegex:Line 1:48: evaluation of \[to_ul\(1e20\)\] failed, treating result as null. Only first 20 failures recorded. @@ -127,7 +127,7 @@ int:integer |long:long ; convertULToLong -required_feature:esql.convert_warn +required_capability: convert_warn row ul = [9223372036854775807, 9223372036854775808] | eval long = to_long(ul); warningRegex:Line 1:67: evaluation of \[to_long\(ul\)\] failed, treating result as null. Only first 20 failures recorded. @@ -170,7 +170,7 @@ str1:keyword |str2:keyword |str3:keyword |long1:long |long2:long |long3:long ; convertDoubleToLong -required_feature:esql.convert_warn +required_capability: convert_warn row d = 123.4 | eval d2l = to_long(d), overflow = to_long(1e19); warningRegex:Line 1:51: evaluation of \[to_long\(1e19\)\] failed, treating result as null. Only first 20 failures recorded. @@ -190,7 +190,7 @@ int:integer |ii:integer ; convertLongToInt -required_feature:esql.convert_warn +required_capability: convert_warn // tag::to_int-long[] ROW long = [5013792, 2147483647, 501379200000] @@ -207,7 +207,7 @@ long:long |int:integer ; convertULToInt -required_feature:esql.convert_warn +required_capability: convert_warn row ul = [2147483647, 9223372036854775808] | eval int = to_int(ul); warningRegex:Line 1:57: evaluation of \[to_int\(ul\)\] failed, treating result as null. Only first 20 failures recorded. @@ -239,7 +239,7 @@ int_str:keyword |int_dbl_str:keyword |is2i:integer|ids2i:integer ; convertStringToIntFail#[skip:-8.13.99, reason:warning changed in 8.14] -required_feature: esql.mv_warn +required_capability: mv_warn row str1 = "2147483647.2", str2 = "2147483648", non = "no number" | eval i1 = to_integer(str1), i2 = to_integer(str2), noi = to_integer(non); warning:Line 1:79: evaluation of [to_integer(str1)] failed, treating result as null. Only first 20 failures recorded. @@ -254,7 +254,7 @@ str1:keyword |str2:keyword |non:keyword |i1:integer |i2:integer | ; convertDoubleToInt -required_feature:esql.convert_warn +required_capability: convert_warn row d = 123.4 | eval d2i = to_integer(d), overflow = to_integer(1e19); warningRegex:Line 1:54: evaluation of \[to_integer\(1e19\)\] failed, treating result as null. Only first 20 failures recorded. @@ -265,7 +265,7 @@ d:double |d2i:integer |overflow:integer ; lessThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where salary_change.int < 1 | keep emp_no, salary_change.int | sort emp_no | limit 5; warning:Line 1:24: evaluation of [salary_change.int < 1] failed, treating result as null. Only first 20 failures recorded. @@ -281,7 +281,7 @@ emp_no:integer |salary_change.int:integer ; greaterThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where salary_change.int > 1 | keep emp_no, salary_change.int | sort emp_no | limit 5; warning:Line 1:24: evaluation of [salary_change.int > 1] failed, treating result as null. Only first 20 failures recorded. @@ -297,7 +297,7 @@ emp_no:integer |salary_change.int:integer ; equalToMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where salary_change.int == 0 | keep emp_no, salary_change.int | sort emp_no; warning:Line 1:24: evaluation of [salary_change.int == 0] failed, treating result as null. Only first 20 failures recorded. @@ -312,7 +312,7 @@ emp_no:integer |salary_change.int:integer ; equalToOrEqualToMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where salary_change.int == 1 or salary_change.int == 8 | keep emp_no, salary_change.int | sort emp_no; warning:Line 1:24: evaluation of [salary_change.int] failed, treating result as null. Only first 20 failures recorded. @@ -325,7 +325,7 @@ emp_no:integer |salary_change.int:integer ; inMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where salary_change.int in (1, 7) | keep emp_no, salary_change.int | sort emp_no; warning:Line 1:24: evaluation of [salary_change.int in (1, 7)] failed, treating result as null. Only first 20 failures recorded. @@ -338,7 +338,7 @@ emp_no:integer |salary_change.int:integer ; notLessThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where not(salary_change.int < 1) | keep emp_no, salary_change.int | sort emp_no | limit 5; warning:Line 1:24: evaluation of [not(salary_change.int < 1)] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [salary_change.int < 1] failed, treating result as null. Only first 20 failures recorded.] @@ -354,7 +354,7 @@ emp_no:integer |salary_change.int:integer ; notGreaterThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where not(salary_change.int > 1) | keep emp_no, salary_change.int | sort emp_no | limit 5; warning:Line 1:24: evaluation of [not(salary_change.int > 1)] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [salary_change.int > 1] failed, treating result as null. Only first 20 failures recorded.] @@ -370,7 +370,7 @@ emp_no:integer |salary_change.int:integer ; notEqualToMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where not(salary_change.int == 1) | keep emp_no, salary_change.int | sort emp_no | limit 5; warning:Line 1:24: evaluation of [not(salary_change.int == 1)] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [salary_change.int == 1] failed, treating result as null. Only first 20 failures recorded.] @@ -417,7 +417,7 @@ row a = [1, 2, 2, 3] | eval da = mv_dedupe(a); ; mvSort -required_feature: esql.mv_sort +required_capability: mv_sort // tag::mv_sort[] ROW a = [4, 2, -3, 2] @@ -432,7 +432,7 @@ a:integer | sa:integer | sd:integer ; mvSortEmpInt -required_feature: esql.mv_sort +required_capability: mv_sort FROM employees | eval sd = mv_sort(salary_change.int, "DESC"), sa = mv_sort(salary_change.int) @@ -454,7 +454,7 @@ emp_no:integer | salary_change.int:integer | sa:integer | sd:integer ; mvSortEmpLong -required_feature: esql.mv_sort +required_capability: mv_sort FROM employees | eval sd = mv_sort(salary_change.long, "DESC"), sa = mv_sort(salary_change.long) @@ -476,7 +476,7 @@ emp_no:integer | salary_change.long:long | sa:long | sd:long ; mvSlice -required_feature: esql.mv_sort +required_capability: mv_sort // tag::mv_slice_positive[] row a = [1, 2, 2, 3] @@ -490,7 +490,7 @@ a:integer | a1:integer | a2:integer ; mvSliceNegativeOffset -required_feature: esql.mv_sort +required_capability: mv_sort // tag::mv_slice_negative[] row a = [1, 2, 2, 3] @@ -504,7 +504,7 @@ a:integer | a1:integer | a2:integer ; mvSliceSingle -required_feature: esql.mv_sort +required_capability: mv_sort row a = 1 | eval a1 = mv_slice(a, 0); @@ -514,7 +514,7 @@ a:integer | a1:integer ; mvSliceOutOfBound -required_feature: esql.mv_sort +required_capability: mv_sort row a = [1, 2, 2, 3] | eval a1 = mv_slice(a, 4), a2 = mv_slice(a, 2, 6), a3 = mv_slice(a, 4, 6); @@ -524,7 +524,7 @@ a:integer | a1:integer | a2:integer | a3:integer ; mvSliceEmpInt -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval a1 = mv_slice(salary_change.int, 0, 1) @@ -541,7 +541,7 @@ emp_no:integer | salary_change.int:integer | a1:integer ; mvSliceEmpIntSingle -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval a1 = mv_slice(salary_change.int, 1) @@ -558,7 +558,7 @@ emp_no:integer | salary_change.int:integer | a1:integer ; mvSliceEmpIntEndOutOfBound -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval a1 = mv_slice(salary_change.int, 1, 4) @@ -575,7 +575,7 @@ emp_no:integer | salary_change.int:integer | a1:integer ; mvSliceEmpIntOutOfBound -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval a1 = mv_slice(salary_change.int, 2, 4) @@ -592,7 +592,7 @@ emp_no:integer | salary_change.int:integer | a1:integer ; mvSliceEmpIntStartOutOfBoundNegative -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval a1 = mv_slice(salary_change.int, -5, -2) @@ -609,7 +609,7 @@ emp_no:integer | salary_change.int:integer | a1:integer ; mvSliceEmpIntOutOfBoundNegative -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval a1 = mv_slice(salary_change.int, -5, -3) @@ -626,7 +626,7 @@ emp_no:integer | salary_change.int:integer | a1:integer ; mvSliceEmpLong -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval a1 = mv_slice(salary_change.long, 0, 1) @@ -750,7 +750,7 @@ x:long ; valuesLong -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -762,7 +762,7 @@ required_feature: esql.agg_values ; valuesLongGrouped -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -783,7 +783,7 @@ required_feature: esql.agg_values ; valuesLongGroupedByOrdinals -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -807,7 +807,7 @@ required_feature: esql.agg_values ; valuesInt -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -819,7 +819,7 @@ required_feature: esql.agg_values ; valuesIntGrouped -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -840,7 +840,7 @@ l:integer | first_letter:keyword ; valuesIntGroupedByOrdinals -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -864,7 +864,7 @@ required_feature: esql.agg_values ; valuesShort -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -876,7 +876,7 @@ required_feature: esql.agg_values ; valuesShortGrouped -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -897,7 +897,7 @@ l:integer | first_letter:keyword ; valuesShortGroupedByOrdinals -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec index 8d3c0c9186c6c..ae683acbb2c3a 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec @@ -16,7 +16,7 @@ eth2 |epsilon |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece ; equals -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | sort host, card | where ip0 == ip1 | keep card, host, ip0, ip1; warning:Line 1:38: evaluation of [ip0 == ip1] failed, treating result as null. Only first 20 failures recorded. @@ -60,7 +60,7 @@ eth2 |epsilon |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece ; lessThan -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | sort host, card, ip1 | where ip0 < ip1 | keep card, host, ip0, ip1; warning:Line 1:43: evaluation of [ip0 < ip1] failed, treating result as null. Only first 20 failures recorded. @@ -73,7 +73,7 @@ lo0 |gamma |fe80::cae2:65ff:fece:feb9|fe81::cae2:65ff:fece:f ; notEquals -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | sort host, card, ip1 | where ip0 != ip1 | keep card, host, ip0, ip1; warning:Line 1:43: evaluation of [ip0 != ip1] failed, treating result as null. Only first 20 failures recorded. @@ -125,7 +125,7 @@ null |[127.0.0.1, 127.0.0.2, 127.0.0.3] ; conditional -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | eval eq=case(ip0==ip1, ip0, ip1) | keep eq, ip0, ip1; ignoreOrder:true @@ -146,7 +146,7 @@ fe80::cae2:65ff:fece:fec1 |[fe80::cae2:65ff:fece:feb ; in -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | eval eq=case(ip0==ip1, ip0, ip1) | where eq in (ip0, ip1) | keep card, host, ip0, ip1, eq; ignoreOrder:true @@ -168,7 +168,7 @@ eth0 |epsilon |[fe80::cae2:65ff:fece:feb9, fe80::cae2:65ff:fece inWithWarningsRegex#[skip:-8.13.99, reason:regex warnings in tests introduced in v 8.14.0] -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | eval eq=case(ip0==ip1, ip0, ip1) | where eq in (ip0, ip1) | keep card, host, ip0, ip1, eq; ignoreOrder:true @@ -188,7 +188,7 @@ eth0 |epsilon |[fe80::cae2:65ff:fece:feb9, fe80::cae2:65ff:fece ; cidrMatchSimple -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | where cidr_match(ip1, "127.0.0.2/32") | keep card, host, ip0, ip1; warning:Line 1:20: evaluation of [cidr_match(ip1, \"127.0.0.2/32\")] failed, treating result as null. Only first 20 failures recorded. @@ -199,7 +199,7 @@ eth1 |beta |127.0.0.1 |127.0.0.2 ; cidrMatchNullField -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | where cidr_match(ip0, "127.0.0.2/32") is null | keep card, host, ip0, ip1; ignoreOrder:true @@ -213,7 +213,7 @@ eth2 |epsilon |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece ; cdirMatchMultipleArgs -required_feature: esql.mv_warn +required_capability: mv_warn //tag::cdirMatchMultipleArgs[] FROM hosts @@ -233,7 +233,7 @@ eth0 |gamma |fe80::cae2:65ff:fece:feb9|127.0.0.3 ; cidrMatchFunctionArg -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | where cidr_match(ip1, concat("127.0.0.2", "/32"), "127.0.0.3/32") | keep card, host, ip0, ip1; ignoreOrder:true @@ -246,7 +246,7 @@ eth0 |gamma |fe80::cae2:65ff:fece:feb9|127.0.0.3 ; cidrMatchFieldArg -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | eval cidr="127.0.0.2" | where cidr_match(ip1, cidr, "127.0.0.3/32") | keep card, host, ip0, ip1; ignoreOrder:true @@ -294,7 +294,7 @@ eth0 |beta |127.0.0.1 |::1 ; pushDownIPWithIn -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | where ip1 in (to_ip("::1"), to_ip("127.0.0.1")) | keep card, host, ip0, ip1; ignoreOrder:true @@ -308,7 +308,7 @@ eth0 |beta |127.0.0.1 |::1 ; pushDownIPWithComparision -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | where ip1 > to_ip("127.0.0.1") | keep card, ip1; ignoreOrder:true @@ -324,7 +324,7 @@ eth0 |fe80::cae2:65ff:fece:fec1 ; mvSort -required_feature: esql.mv_sort +required_capability: mv_sort FROM hosts | eval sd = mv_sort(ip1, "DESC"), sa = mv_sort(ip1) @@ -342,7 +342,7 @@ epsilon | [fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] | [fe81::c ; mvSlice -required_feature: esql.mv_sort +required_capability: mv_sort from hosts | where host == "epsilon" @@ -358,7 +358,7 @@ epsilon | [fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] | [fe81::c ; mvSlice -required_feature: esql.mv_sort +required_capability: mv_sort from hosts | where host == "epsilon" @@ -374,7 +374,7 @@ epsilon | [fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] | [fe81::c ; mvZip -required_feature: esql.mv_sort +required_capability: mv_sort from hosts | eval zip = mv_zip(to_string(description), to_string(ip0), "@@") @@ -392,7 +392,7 @@ epsilon | null | null ; values -required_feature: esql.agg_values +required_capability: agg_values FROM hosts | STATS ip0=MV_SORT(VALUES(ip0)) @@ -403,7 +403,7 @@ required_feature: esql.agg_values ; valuesGrouped -required_feature: esql.agg_values +required_capability: agg_values FROM hosts | EVAL host=SUBSTRING(host, 0, 1) @@ -419,7 +419,7 @@ fe80::cae2:65ff:fece:feb9 | g ; valuesGroupedByOrdinals -required_feature: esql.agg_values +required_capability: agg_values FROM hosts | STATS ip0=MV_SORT(VALUES(ip0)) BY host @@ -434,7 +434,7 @@ fe80::cae2:65ff:fece:feb9 | gamma ; implictCastingEqual -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from hosts | where mv_first(ip0) == "127.0.0.1" | keep host, ip0 | sort host; host:keyword | ip0:ip @@ -445,7 +445,7 @@ beta | 127.0.0.1 ; implictCastingNotEqual -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from hosts | where mv_first(ip0) != "127.0.0.1" | keep host, ip0 | sort host, ip0 | limit 3; host:keyword | ip0:ip @@ -455,7 +455,7 @@ epsilon | [fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] ; implictCastingGreaterThan -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from hosts | where mv_first(ip0) > "127.0.0.1" | keep host, ip0 | sort host, ip0 | limit 3; host:keyword | ip0:ip @@ -465,7 +465,7 @@ gamma | fe80::cae2:65ff:fece:feb9 ; implictCastingLessThanOrEqual -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from hosts | where mv_first(ip0) <= "127.0.0.1" | keep host, ip0 | sort host, ip0 | limit 3; host:keyword | ip0:ip @@ -475,7 +475,7 @@ beta | 127.0.0.1 ; implictCastingIn -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from hosts | where mv_first(ip0) in ( "127.0.0.1", "::1") | keep host, ip0 | sort host, ip0; host:keyword | ip0:ip diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec index e0604acbcce1d..4e080bac0ed2e 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec @@ -201,7 +201,7 @@ height:double | s:double ; powSalarySquared -required_feature: esql.pow_double +required_capability: pow_double from employees | eval s = pow(to_long(salary) - 75000, 2) + 10000 | keep salary, s | sort salary desc | limit 4; @@ -618,7 +618,7 @@ base:double | exponent:integer | result:double ; powIntInt -required_feature: esql.pow_double +required_capability: pow_double ROW base = 2, exponent = 2 | EVAL s = POW(base, exponent) @@ -629,7 +629,7 @@ base:integer | exponent:integer | s:double ; powIntIntPlusInt -required_feature: esql.pow_double +required_capability: pow_double row s = 1 + pow(2, 2); @@ -645,7 +645,7 @@ s:double ; powIntUL -required_feature: esql.pow_double +required_capability: pow_double row x = pow(1, 9223372036854775808); @@ -654,7 +654,7 @@ x:double ; powLongUL -required_feature: esql.pow_double +required_capability: pow_double row x = to_long(1) | eval x = pow(x, 9223372036854775808); @@ -663,7 +663,7 @@ x:double ; powUnsignedLongUL -required_feature: esql.pow_double +required_capability: pow_double row x = to_ul(1) | eval x = pow(x, 9223372036854775808); @@ -688,7 +688,7 @@ null ; powULInt -required_feature: esql.pow_double +required_capability: pow_double row x = pow(to_unsigned_long(9223372036854775807), 1); @@ -697,7 +697,7 @@ x:double ; powULIntOverrun -required_feature: esql.pow_double +required_capability: pow_double ROW x = POW(9223372036854775808, 2) ; @@ -719,7 +719,7 @@ x:double ; powULLong -required_feature: esql.pow_double +required_capability: pow_double row x = to_long(10) | eval x = pow(to_unsigned_long(10), x); @@ -728,7 +728,7 @@ x:double ; powULLongOverrun -required_feature: esql.pow_double +required_capability: pow_double row x = to_long(100) | eval x = pow(to_unsigned_long(10), x); @@ -1414,7 +1414,7 @@ Anneke |Preusig |1.56 |1.56 ; evalAbsString -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting ROW number = -1.0 | EVAL abs_number = ABS("10.0") @@ -1425,7 +1425,7 @@ number:double | abs_number:double ; functionUnderArithmeticOperationAggString -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting ROW a = 1 | eval x = date_trunc(1 month, "2024-11-22") + 2 days, y = x + 3 days @@ -1437,7 +1437,7 @@ count():long | y:date ; functionUnderArithmeticOperationString -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting from employees | eval x = date_trunc(1 month, "2024-11-22") + 2 days, y = x + 3 days diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata.csv-spec index bcb9718048085..b4cd18f728858 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata.csv-spec @@ -1,5 +1,5 @@ simpleKeep -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _index, _version | sort emp_no | limit 2 | keep emp_no, _index, _version; emp_no:integer |_index:keyword |_version:long @@ -8,7 +8,7 @@ emp_no:integer |_index:keyword |_version:long ; aliasWithSameName -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _index, _version | sort emp_no | limit 2 | eval _index = _index, _version = _version | keep emp_no, _index, _version; emp_no:integer |_index:keyword |_version:long @@ -17,7 +17,7 @@ emp_no:integer |_index:keyword |_version:long ; inComparison -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _index, _version | sort emp_no | where _index == "employees" | where _version == 1 | keep emp_no | limit 2; emp_no:integer @@ -26,7 +26,7 @@ emp_no:integer ; metaIndexInAggs -required_feature: esql.metadata_fields +required_capability: metadata_fields // tag::metaIndexInAggs[] FROM employees METADATA _index, _id | STATS max = MAX(emp_no) BY _index @@ -40,7 +40,7 @@ max:integer |_index:keyword ; metaIndexAliasedInAggs -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _index | eval _i = _index | stats max = max(emp_no) by _i; @@ -49,7 +49,7 @@ max:integer |_i:keyword ; metaVersionInAggs -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _version | stats min = min(emp_no) by _version; min:integer |_version:long @@ -57,7 +57,7 @@ min:integer |_version:long ; metaVersionAliasedInAggs -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _version | eval _v = _version | stats min = min(emp_no) by _v; min:integer |_v:long @@ -65,7 +65,7 @@ min:integer |_v:long ; inAggsAndAsGroups -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _index, _version | stats max = max(_version) by _index; max:long |_index:keyword @@ -73,7 +73,7 @@ max:long |_index:keyword ; inAggsAndAsGroupsAliased -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _index, _version | eval _i = _index, _v = _version | stats max = max(_v) by _i; max:long |_i:keyword @@ -81,7 +81,7 @@ max:long |_i:keyword ; inFunction -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _index, _version | sort emp_no | where length(_index) == length("employees") | where abs(_version) == 1 | keep emp_no | limit 2; emp_no:integer @@ -90,7 +90,7 @@ emp_no:integer ; inArithmetics -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _index, _version | eval i = _version + 2 | stats min = min(emp_no) by i; min:integer |i:long @@ -98,7 +98,7 @@ min:integer |i:long ; inSort -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _index, _version | sort _version, _index, emp_no | keep emp_no, _version, _index | limit 2; emp_no:integer |_version:long |_index:keyword @@ -107,7 +107,7 @@ emp_no:integer |_version:long |_index:keyword ; withMvFunction -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _version | eval i = mv_avg(_version) + 2 | stats min = min(emp_no) by i; min:integer |i:double @@ -115,7 +115,7 @@ min:integer |i:double ; overwritten -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _index, _version | sort emp_no | eval _index = 3, _version = "version" | keep emp_no, _index, _version | limit 3; emp_no:integer |_index:integer |_version:keyword @@ -125,7 +125,7 @@ emp_no:integer |_index:integer |_version:keyword ; multipleIndices -required_feature: esql.metadata_fields +required_capability: metadata_fields // tag::multipleIndices[] FROM ul_logs, apps METADATA _index, _version | WHERE id IN (13, 14) AND _version == 1 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec index 26fcca423d28d..6d6b3b0782a98 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec @@ -3,7 +3,7 @@ ############################################### convertFromStringQuantize -required_feature: esql.spatial_points +required_capability: spatial_points row wkt = "POINT(42.97109629958868 14.7552534006536)" | eval pt = to_geopoint(wkt); @@ -13,7 +13,7 @@ POINT(42.97109629958868 14.7552534006536) |POINT(42.97109629958868 14.7552534006 ; convertFromString -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source // tag::to_geopoint-str[] ROW wkt = "POINT(42.97109630194 14.7552534413725)" @@ -28,7 +28,7 @@ wkt:keyword |pt:geo_point ; convertFromStringArray -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source row wkt = ["POINT(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)"] | eval pt = to_geopoint(wkt); @@ -38,7 +38,7 @@ wkt:keyword ; centroidFromStringNested -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg row wkt = "POINT(42.97109629958868 14.7552534006536)" | STATS c = ST_CENTROID_AGG(TO_GEOPOINT(wkt)); @@ -48,7 +48,7 @@ POINT(42.97109629958868 14.7552534006536) ; centroidFromString1 -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg ROW wkt = ["POINT(42.97109629958868 14.7552534006536)"] | MV_EXPAND wkt @@ -60,7 +60,7 @@ POINT(42.97109629958868 14.7552534006536) ; centroidFromString2 -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg ROW wkt = ["POINT(42.97109629958868 14.7552534006536)", "POINT(75.80929149873555 22.72774917539209)"] | MV_EXPAND wkt @@ -72,7 +72,7 @@ POINT(59.390193899162114 18.741501288022846) ; centroidFromString3 -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg ROW wkt = ["POINT(42.97109629958868 14.7552534006536)", "POINT(75.80929149873555 22.72774917539209)", "POINT(-0.030548143003023033 24.37553649504829)"] | MV_EXPAND wkt @@ -84,7 +84,7 @@ POINT(39.58327988510707 20.619513023697994) ; centroidFromString4 -required_feature: esql.st_x_y +required_capability: st_x_y ROW wkt = ["POINT(42.97109629958868 14.7552534006536)", "POINT(75.80929149873555 22.72774917539209)", "POINT(-0.030548143003023033 24.37553649504829)"] | MV_EXPAND wkt @@ -97,7 +97,7 @@ POINT(39.58327988510707 20.619513023697994) | 39.58327988510707 | 20.61951302369 ; stXFromString -required_feature: esql.st_x_y +required_capability: st_x_y // tag::st_x_y[] ROW point = TO_GEOPOINT("POINT(42.97109629958868 14.7552534006536)") @@ -112,7 +112,7 @@ POINT(42.97109629958868 14.7552534006536) | 42.97109629958868 | 14.755253400653 ; simpleLoad -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source FROM airports | WHERE scalerank == 9 | SORT abbrev | WHERE length(name) > 12; @@ -132,7 +132,7 @@ ZAH | Zāhedān | POINT(60.8628 29.4964) | Iran ; stXFromAirportsSupportsNull -required_feature: esql.st_x_y +required_capability: st_x_y FROM airports | EVAL x = FLOOR(ABS(ST_X(city_location))/200), y = FLOOR(ABS(ST_Y(city_location))/100) @@ -149,7 +149,7 @@ c:long | x:double | y:double # Tests for ST_CENTROID on GEO_POINT type centroidFromAirports -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg // tag::st_centroid_agg-airports[] FROM airports @@ -164,7 +164,7 @@ POINT(-0.030548143003023033 24.37553649504829) ; centroidFromAirportsNested -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | STATS centroid=ST_CENTROID_AGG(TO_GEOPOINT(location)) @@ -175,7 +175,7 @@ POINT (-0.03054810272375508 24.37553651570554) ; centroidFromAirportsCount -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() @@ -186,7 +186,7 @@ POINT(-0.030548143003023033 24.37553649504829) | 891 ; centroidFromAirportsCountGrouped -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() BY scalerank @@ -205,7 +205,7 @@ POINT(1.2588642098541771 24.379140841774642) | 63 | 2 ; centroidFromAirportsFiltered -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | WHERE scalerank == 9 @@ -217,7 +217,7 @@ POINT(83.27726172452623 28.99289782286029) | 33 ; centroidFromAirportsCountGroupedCentroid -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() BY scalerank @@ -229,7 +229,7 @@ POINT (7.572387259169772 26.836561792945492) | 891 ; centroidFromAirportsCountCityLocations -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | STATS centroid=ST_CENTROID_AGG(city_location), count=COUNT() @@ -240,7 +240,7 @@ POINT (1.3965610809060276 24.127649406297987) | 891 ; centroidFromAirportsCountGroupedCountry -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | STATS centroid=ST_CENTROID_AGG(city_location), count=COUNT() BY country @@ -269,7 +269,7 @@ POINT (70.7946499697864 30.69746997440234) | 10 | Pakistan ; centroidFromAirportsFilteredCountry -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | WHERE country == "United States" @@ -281,7 +281,7 @@ POINT (-97.3333946136801 38.07953176370194) | 129 ; centroidFromAirportsCountGroupedCountryCentroid -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | STATS centroid=ST_CENTROID_AGG(city_location), count=COUNT() BY country @@ -293,7 +293,7 @@ POINT (17.55538044598613 18.185558743854063) | 891 ; centroidFromAirportsCountryCount -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | STATS airports=ST_CENTROID_AGG(location), cities=ST_CENTROID_AGG(city_location), count=COUNT() @@ -304,7 +304,7 @@ POINT(-0.030548143003023033 24.37553649504829) | POINT (1.3965610809060276 24.12 ; centroidFromAirportsFilteredAndSorted -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | WHERE scalerank == 9 @@ -318,7 +318,7 @@ POINT(78.73736493755132 26.761841227998957) | 12 ; centroidFromAirportsAfterMvExpand -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | MV_EXPAND type @@ -330,7 +330,7 @@ POINT(2.121611400672094 24.559172889205755) | 933 ; centroidFromAirportsGroupedAfterMvExpand -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | MV_EXPAND type @@ -350,7 +350,7 @@ POINT(1.2588642098541771 24.379140841774642) | 63 | 2 ; centroidFromAirportsGroupedAfterMvExpandFiltered -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | WHERE scalerank == 9 @@ -363,7 +363,7 @@ POINT(83.16847535921261 28.79002037679311) | 40 | 9 ; centroidFromAirportsAfterMvExpandFiltered -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | WHERE scalerank == 9 @@ -376,7 +376,7 @@ POINT(83.16847535921261 28.79002037679311) | 40 ; centroidFromAirportsAfterKeywordPredicateCountryUK -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | WHERE country == "United Kingdom" @@ -388,7 +388,7 @@ POINT (-2.597342072712148 54.33551226578214) | 17 ; centroidFromAirportsAfterIntersectsPredicateCountryUK -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports | WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) @@ -400,7 +400,7 @@ POINT (-2.597342072712148 54.33551226578214) | 17 ; centroidFromAirportsAfterContainsPredicateCountryUK -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports | WHERE ST_CONTAINS(TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))"), location) @@ -412,7 +412,7 @@ POINT (-2.597342072712148 54.33551226578214) | 17 ; centroidFromAirportsAfterWithinPredicateCountryUK -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports | WHERE ST_WITHIN(location, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) @@ -424,7 +424,7 @@ POINT (-2.597342072712148 54.33551226578214) | 17 ; intersectsAfterCentroidFromAirportsAfterKeywordPredicateCountryUK -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports | WHERE country == "United Kingdom" @@ -443,7 +443,7 @@ POINT (-2.597342072712148 54.33551226578214) | 17 | true ; centroidFromAirportsAfterIntersectsEvalExpression -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports | EVAL in_uk = ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) @@ -461,7 +461,7 @@ POINT (0.04453958108176276 23.74658354606057) | 873 | false ; centroidFromAirportsAfterIntersectsPredicate -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports | WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) @@ -473,7 +473,7 @@ POINT (42.97109629958868 14.7552534006536) | 1 ; centroidFromAirportsAfterIntersectsCompoundPredicate -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports | WHERE scalerank == 9 AND ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) AND country == "Yemen" @@ -488,7 +488,7 @@ POINT (42.97109629958868 14.7552534006536) | 1 # Tests for ST_INTERSECTS on GEO_POINT type pointIntersectsLiteralPolygon -required_feature: esql.st_intersects +required_capability: st_intersects // tag::st_intersects-airports[] FROM airports @@ -503,7 +503,7 @@ HOD | Al Ḩudaydah | POINT(42.9511 14.8022) | Yemen ; pointIntersectsLiteralPolygonReversed -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports | WHERE ST_INTERSECTS(TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))"), location) @@ -514,7 +514,7 @@ HOD | Al Ḩudaydah | POINT(42.9511 14.8022) | Yemen ; literalPointIntersectsLiteralPolygon -required_feature: esql.st_intersects +required_capability: st_intersects ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -528,7 +528,7 @@ wkt:keyword | pt:geo_point ; literalPointIntersectsLiteralPolygonReversed -required_feature: esql.st_intersects +required_capability: st_intersects ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -542,7 +542,7 @@ wkt:keyword | pt:geo_point ; literalPointIntersectsLiteralPolygonOneRow -required_feature: esql.st_intersects +required_capability: st_intersects ROW intersects = ST_INTERSECTS(TO_GEOPOINT("POINT(0 0)"), TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) ; @@ -552,7 +552,7 @@ true ; cityInCityBoundary -required_feature: esql.st_intersects +required_capability: st_intersects FROM airport_city_boundaries | EVAL in_city = ST_INTERSECTS(city_location, city_boundary) @@ -568,7 +568,7 @@ cardinality:k | in_city:boolean ; cityNotInCityBoundaryBiggest -required_feature: esql.st_intersects +required_capability: st_intersects FROM airport_city_boundaries | WHERE NOT ST_INTERSECTS(city_location, city_boundary) @@ -583,7 +583,7 @@ SYX | Sanya Phoenix Int'l | Sanya | POINT(109.5036 18.253 ; airportCityLocationPointIntersection -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_mp | WHERE ST_INTERSECTS(location, city_location) @@ -594,7 +594,7 @@ XXX | Atlantis | POINT(0 0) | Atlantis ; airportCityLocationPointIntersectionCentroid -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_mp | WHERE ST_INTERSECTS(location, city_location) @@ -609,7 +609,7 @@ POINT (0 0) | POINT (0 0) | 1 # Tests for ST_DISJOINT on GEO_POINT type literalPolygonDisjointLiteralPoint -required_feature: esql.st_disjoint +required_capability: st_disjoint ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -623,7 +623,7 @@ wkt:keyword | pt:geo_point ; literalPointDisjointLiteralPolygon -required_feature: esql.st_disjoint +required_capability: st_disjoint ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -637,7 +637,7 @@ wkt:keyword | pt:geo_point ; literalPolygonDisjointLiteralPointOneRow -required_feature: esql.st_disjoint +required_capability: st_disjoint ROW disjoint = ST_DISJOINT(TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), TO_GEOPOINT("POINT(0 0)")) ; @@ -647,7 +647,7 @@ false ; literalPointDisjointLiteralPolygonOneRow -required_feature: esql.st_disjoint +required_capability: st_disjoint ROW disjoint = ST_DISJOINT(TO_GEOPOINT("POINT(-1 0)"), TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) ; @@ -657,7 +657,7 @@ true ; pointDisjointLiteralPolygon -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM airports | WHERE ST_DISJOINT(location, TO_GEOSHAPE("POLYGON((-10 -60, 120 -60, 120 60, -10 60, -10 -60))")) @@ -679,7 +679,7 @@ x:double | y:double | count:long ; airportCityLocationPointDisjointCentroid -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM airports_mp | WHERE ST_DISJOINT(location, city_location) @@ -694,7 +694,7 @@ POINT (67.8581917192787 24.02956652920693) | POINT (67.81638333333332 24.0489999 # Tests for ST_CONTAINS on GEO_POINT type literalPolygonContainsLiteralPoint -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -708,7 +708,7 @@ wkt:keyword | pt:geo_point ; literalPointDoesNotContainLiteralPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -720,7 +720,7 @@ wkt:keyword | pt:geo_point ; literalPolygonContainsLiteralPointOneRow -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW contains = ST_CONTAINS(TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), TO_GEOPOINT("POINT(0 0)")) ; @@ -730,7 +730,7 @@ true ; literalPointDoesNotContainLiteralPolygonOneRow -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW contains = ST_CONTAINS(TO_GEOPOINT("POINT(0 0)"), TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) ; @@ -740,7 +740,7 @@ false ; pointContainsLiteralPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports | WHERE ST_CONTAINS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) @@ -750,7 +750,7 @@ abbrev:keyword | city:keyword | city_location:geo_point | country:keyword ; pointContainedInLiteralPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports | WHERE ST_CONTAINS(TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))"), location) @@ -761,7 +761,7 @@ HOD | Al Ḩudaydah | POINT(42.9511 14.8022) | Yemen ; airportCityLocationPointContains -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports_mp | WHERE ST_CONTAINS(location, city_location) @@ -772,7 +772,7 @@ XXX | Atlantis | POINT(0 0) | Atlantis ; airportCityLocationPointContainsCentroid -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports_mp | WHERE ST_CONTAINS(location, city_location) @@ -787,7 +787,7 @@ POINT (0 0) | POINT (0 0) | 1 # Tests for ST_WITHIN on GEO_POINT type literalPolygonNotWithinLiteralPoint -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -799,7 +799,7 @@ wkt:keyword | pt:geo_point ; literalPointWithinLiteralPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -813,7 +813,7 @@ wkt:keyword | pt:geo_point ; literalPolygonNotWithinLiteralPointOneRow -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW within = ST_WITHIN(TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), TO_GEOPOINT("POINT(0 0)")) ; @@ -823,7 +823,7 @@ false ; literalPointWithinLiteralPolygonOneRow -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW within = ST_WITHIN(TO_GEOPOINT("POINT(0 0)"), TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) ; @@ -833,7 +833,7 @@ true ; pointWithinLiteralPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within // tag::st_within-airports[] FROM airports @@ -848,7 +848,7 @@ HOD | Al Ḩudaydah | POINT(42.9511 14.8022) | Yemen ; airportCityLocationPointWithin -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports_mp | WHERE ST_WITHIN(location, city_location) @@ -859,7 +859,7 @@ XXX | Atlantis | POINT(0 0) | Atlantis ; airportCityLocationPointWithinCentroid -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports_mp | WHERE ST_WITHIN(location, city_location) @@ -874,7 +874,7 @@ POINT (0 0) | POINT (0 0) | 1 # Tests for Equality and casting with GEO_POINT geoPointEquals -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source // tag::to_geopoint-equals[] ROW wkt = ["POINT(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)"] @@ -891,7 +891,7 @@ wkt:keyword |pt:geo_point ; geoPointNotEquals -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source // tag::to_geopoint-not-equals[] ROW wkt = ["POINT(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)"] @@ -908,7 +908,7 @@ wkt:keyword |pt:geo_point ; convertFromStringParseError -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source // tag::to_geopoint-str-parse-error[] row wkt = ["POINTX(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)", "POINT(111)"] @@ -936,7 +936,7 @@ wkt:keyword |pt:geo_point ############################################### convertCartesianFromString -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source // tag::to_cartesianpoint-str[] ROW wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] @@ -953,7 +953,7 @@ wkt:keyword |pt:cartesian_point ; convertCartesianFromStringArray -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source row wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] | eval pt = to_cartesianpoint(wkt); @@ -963,7 +963,7 @@ wkt:keyword |pt:cartesian_point ; centroidCartesianFromStringNested -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg row wkt = "POINT(4297.10986328125 -1475.530029296875)" | STATS c = ST_CENTROID_AGG(TO_CARTESIANPOINT(wkt)); @@ -973,7 +973,7 @@ POINT(4297.10986328125 -1475.530029296875) ; centroidFromCartesianString1 -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg ROW wkt = ["POINT(4297.10986328125 -1475.530029296875)"] | MV_EXPAND wkt @@ -985,7 +985,7 @@ POINT(4297.10986328125 -1475.530029296875) ; centroidFromCartesianString2 -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg ROW wkt = ["POINT(4297.10986328125 -1475.530029296875)", "POINT(7580.93017578125 2272.77001953125)"] | MV_EXPAND wkt @@ -997,7 +997,7 @@ POINT(5939.02001953125 398.6199951171875) ; centroidFromCartesianString3 -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg ROW wkt = ["POINT(4297.10986328125 -1475.530029296875)", "POINT(7580.93017578125 2272.77001953125)", "POINT(-30.548143003023033 2437.553649504829)"] | MV_EXPAND wkt @@ -1009,7 +1009,7 @@ POINT(3949.163965353159 1078.2645465797348) ; stXFromCartesianString -required_feature: esql.st_x_y +required_capability: st_x_y ROW point = TO_CARTESIANPOINT("POINT(4297.10986328125 -1475.530029296875)") | EVAL x = ST_X(point), y = ST_Y(point) @@ -1020,7 +1020,7 @@ POINT(4297.10986328125 -1475.530029296875) | 4297.10986328125 | -1475.530029296 ; simpleCartesianLoad -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source FROM airports_web | WHERE scalerank == 9 | SORT abbrev | WHERE length(name) > 12; @@ -1039,7 +1039,7 @@ ZAH | POINT (6779435.866395892 3436280.545331025) | Zahedan Int'l # Tests for ST_CENTROID on CARTESIAN_POINT type cartesianCentroidFromAirports -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports_web | STATS centroid=ST_CENTROID_AGG(location); @@ -1049,7 +1049,7 @@ POINT(-266681.67563861894 3053301.5120195406) ; cartesianCentroidFromAirportsNested -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports_web | STATS centroid=ST_CENTROID_AGG(TO_CARTESIANPOINT(location)); @@ -1059,7 +1059,7 @@ POINT (-266681.66530554957 3053301.506061676) ; cartesianCentroidFromAirportsCount -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports_web | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() @@ -1070,7 +1070,7 @@ POINT(-266681.67563861894 3053301.5120195406) | 849 ; cartesianCentroidFromAirportsCountGrouped -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports_web | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() BY scalerank @@ -1089,7 +1089,7 @@ POINT(140136.12878224207 3081220.7881944445) | 63 | 2 ; cartesianCentroidFromAirportsFiltered -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports_web | WHERE scalerank == 9 @@ -1101,7 +1101,7 @@ POINT(9289013.153846154 3615537.0533353365) | 26 ; cartesianCentroidFromAirportsFilteredAndSorted -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports_web | WHERE scalerank == 9 @@ -1115,7 +1115,7 @@ POINT(9003597.4375 3429344.0078125) | 8 ; cartesianCentroidFromAirportsCountGroupedCentroid -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports_web | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() BY scalerank @@ -1130,7 +1130,7 @@ POINT (726480.0130685265 3359566.331716279) | 849 # Tests for ST_INTERSECTS on CARTESIAN_POINT type cartesianCentroidFromAirportsAfterIntersectsPredicate -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) @@ -1142,7 +1142,7 @@ POINT (4783520.5 1661010.0) | 1 ; cartesianPointIntersectsPolygon -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) @@ -1153,7 +1153,7 @@ HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | ; literalCartesianPointIntersectsPolygon -required_feature: esql.st_intersects +required_capability: st_intersects ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -1167,7 +1167,7 @@ wkt:keyword | pt:cartesian_point ; cartesianPointIntersectsPointShape -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POINT(4783520.559160681 1661010.0197476079)")) @@ -1178,7 +1178,7 @@ HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | ; cartesianPointIntersectsPoint -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANPOINT("POINT(4783520.559160681 1661010.0197476079)")) @@ -1189,7 +1189,7 @@ HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | ; cartesianPointIntersectsMultiPoint -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("MULTIPOINT(4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096)")) @@ -1202,7 +1202,7 @@ CPH | POINT (1408119.2975413958 7484813.53657096) | Copenhagen | ; cartesianPointIntersectsLineString -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("LINESTRING(4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096)")) @@ -1215,7 +1215,7 @@ CPH | POINT (1408119.2975413958 7484813.53657096) | Copenhagen | ; cartesianPointIntersectsMultiLineString -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("MULTILINESTRING((4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096),(1408119.2975413958 7484813.53657096, 1996039.722208033 8322469.9470024165))")) @@ -1229,7 +1229,7 @@ ARN | POINT(1996039.722208033 8322469.9470024165) | Arlanda | ; cartesianPointIntersectsPointShapeWithCentroid -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POINT(4783520.559160681 1661010.0197476079)")) @@ -1241,7 +1241,7 @@ POINT (4783520.5 1661010.0) | 1 ; cartesianPointIntersectsPointWithCentroid -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANPOINT("POINT(4783520.559160681 1661010.0197476079)")) @@ -1253,7 +1253,7 @@ POINT (4783520.5 1661010.0) | 1 ; cartesianPointIntersectsLiteralPolygonCount -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((0 -60000000, 120000000 -60000000, 120000000 60000000, 0 60000000, 0 -60000000))")) @@ -1268,7 +1268,7 @@ count:long # Tests for ST_DISJOINT on CARTESIAN_POINT type literalPolygonDisjointLiteralCartesianPoint -required_feature: esql.st_disjoint +required_capability: st_disjoint ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -1282,7 +1282,7 @@ wkt:keyword | pt:cartesian_point ; literalCartesianPointDisjointLiteralPolygon -required_feature: esql.st_disjoint +required_capability: st_disjoint ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -1296,7 +1296,7 @@ wkt:keyword | pt:cartesian_point ; literalPolygonDisjointLiteralCartesianPointOneRow -required_feature: esql.st_disjoint +required_capability: st_disjoint ROW disjoint = ST_DISJOINT(TO_CARTESIANSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), TO_CARTESIANPOINT("POINT(0 0)")) ; @@ -1306,7 +1306,7 @@ false ; literalCartesianPointDisjointLiteralPolygonOneRow -required_feature: esql.st_disjoint +required_capability: st_disjoint ROW disjoint = ST_DISJOINT(TO_CARTESIANPOINT("POINT(-1 0)"), TO_CARTESIANSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) ; @@ -1316,7 +1316,7 @@ true ; cartesianPointDisjointLiteralPolygonCount -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM airports_web | WHERE ST_DISJOINT(location, TO_CARTESIANSHAPE("POLYGON((0 -60000000, 120000000 -60000000, 120000000 60000000, 0 60000000, 0 -60000000))")) @@ -1328,7 +1328,7 @@ count:long ; cartesianPointIntersectsDisjointLiteralPolygonCount -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM airports_web | EVAL intersects = ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((0 -60000000, 120000000 -60000000, 120000000 60000000, 0 60000000, 0 -60000000))")) @@ -1344,7 +1344,7 @@ false | true | 405 ; cartesianPointDisjointLiteralPolygon -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM airports_web | WHERE ST_DISJOINT(location, TO_CARTESIANSHAPE("POLYGON((0 -60000000, 120000000 -60000000, 120000000 60000000, 0 60000000, 0 -60000000))")) @@ -1365,7 +1365,7 @@ x:double | y:double | count:long ; cartesianPointDisjointEmptyGeometry -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM airports_web | WHERE ST_DISJOINT(location, TO_CARTESIANSHAPE("LINESTRING()")) @@ -1380,7 +1380,7 @@ count:long ; cartesianPointDisjointInvalidGeometry -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM airports_web | WHERE ST_DISJOINT(location, TO_CARTESIANSHAPE("Invalid Geometry")) @@ -1398,7 +1398,7 @@ count:long # Tests for ST_CONTAINS on CARTESIAN_POINT type cartesianCentroidFromAirportsAfterPolygonContainsPointPredicate -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports_web | WHERE ST_CONTAINS(TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))"), location) @@ -1410,7 +1410,7 @@ POINT (4783520.5 1661010.0) | 1 ; cartesianPolygonContainsPointPredicate -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports_web | WHERE ST_CONTAINS(TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))"), location) @@ -1421,7 +1421,7 @@ HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | ; literalCartesianPolygonContainsPointPredicate -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -1435,7 +1435,7 @@ wkt:keyword | pt:cartesian_point ; cartesianCentroidFromAirportsAfterPointContainsPolygonPredicate -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) @@ -1447,7 +1447,7 @@ POINT (NaN NaN) | 0 ; cartesianPointContainsPolygonPredicate -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) @@ -1457,7 +1457,7 @@ abbrev:keyword | location:cartesian_point | name:text | ; literalCartesianPointContainsPolygonPredicate -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -1469,7 +1469,7 @@ wkt:keyword | pt:cartesian_point ; cartesianPointContainsPointShape -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("POINT(4783520.559160681 1661010.0197476079)")) @@ -1480,7 +1480,7 @@ HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | ; cartesianPointContainsPoint -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANPOINT("POINT(4783520.559160681 1661010.0197476079)")) @@ -1491,7 +1491,7 @@ HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | ; cartesianPointContainsMultiPoint -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("MULTIPOINT(4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096)")) @@ -1502,7 +1502,7 @@ abbrev:keyword | location:cartesian_point | name:text | ; cartesianPointContainsLineString -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("LINESTRING(4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096)")) @@ -1513,7 +1513,7 @@ abbrev:keyword | location:cartesian_point | name:text | ; cartesianPointContainsMultiLineString -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("MULTILINESTRING((4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096),(1408119.2975413958 7484813.53657096, 1996039.722208033 8322469.9470024165))")) @@ -1524,7 +1524,7 @@ abbrev:keyword | location:cartesian_point | name:text | ; cartesianPointContainsPointShapeWithCentroid -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("POINT(4783520.559160681 1661010.0197476079)")) @@ -1536,7 +1536,7 @@ POINT (4783520.5 1661010.0) | 1 ; cartesianPointContainsPointWithCentroid -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANPOINT("POINT(4783520.559160681 1661010.0197476079)")) @@ -1551,7 +1551,7 @@ POINT (4783520.5 1661010.0) | 1 # Tests for ST_WITHIN on CARTESIAN_POINT type cartesianCentroidFromAirportsAfterWithinPredicate -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) @@ -1563,7 +1563,7 @@ POINT (4783520.5 1661010.0) | 1 ; cartesianPointWithinPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) @@ -1574,7 +1574,7 @@ HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | ; literalCartesianPointWithinPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -1588,7 +1588,7 @@ wkt:keyword | pt:cartesian_point ; cartesianPointWithinPointShape -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("POINT(4783520.559160681 1661010.0197476079)")) @@ -1599,7 +1599,7 @@ HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | ; cartesianPointWithinPoint -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANPOINT("POINT(4783520.559160681 1661010.0197476079)")) @@ -1610,7 +1610,7 @@ HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | ; cartesianPointWithinMultiPoint -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("MULTIPOINT(4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096)")) @@ -1623,7 +1623,7 @@ CPH | POINT (1408119.2975413958 7484813.53657096) | Copenhagen | ; cartesianPointWithinLineString -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("LINESTRING(4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096)")) @@ -1636,7 +1636,7 @@ CPH | POINT (1408119.2975413958 7484813.53657096) | Copenhagen | ; cartesianPointWithinMultiLineString -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("MULTILINESTRING((4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096),(1408119.2975413958 7484813.53657096, 1996039.722208033 8322469.9470024165))")) @@ -1650,7 +1650,7 @@ ARN | POINT(1996039.722208033 8322469.9470024165) | Arlanda | ; cartesianPointWithinPointShapeWithCentroid -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("POINT(4783520.559160681 1661010.0197476079)")) @@ -1662,7 +1662,7 @@ POINT (4783520.5 1661010.0) | 1 ; cartesianPointWithinPointWithCentroid -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANPOINT("POINT(4783520.559160681 1661010.0197476079)")) @@ -1677,7 +1677,7 @@ POINT (4783520.5 1661010.0) | 1 # Tests for Equality and casting with GEO_POINT cartesianPointEquals -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source // tag::to_cartesianpoint-equals[] ROW wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] @@ -1694,7 +1694,7 @@ wkt:keyword |pt:cartesian_point ; cartesianPointNotEquals -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source // tag::to_cartesianpoint-not-equals[] ROW wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] @@ -1711,7 +1711,7 @@ wkt:keyword |pt:cartesian_point ; convertCartesianFromStringParseError -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source // tag::to_cartesianpoint-str-parse-error[] row wkt = ["POINTX(4297.11 -1475.53)", "POINT(7580.93 2272.77)", "POINT(111)"] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec index 6d0d15c398986..dd092130c3406 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec @@ -3,7 +3,7 @@ # convertFromString -required_feature: esql.spatial_shapes +required_capability: spatial_shapes // tag::to_geoshape-str[] ROW wkt = "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))" @@ -18,7 +18,7 @@ wkt:keyword | geom:geo_shape ; convertFromStringArray -required_feature: esql.spatial_shapes +required_capability: spatial_shapes row wkt = ["POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))", "POINT(75.8092915005895 22.727749187571)"] | eval pt = to_geoshape(wkt); @@ -28,7 +28,7 @@ wkt:keyword ; convertFromStringViaPoint -required_feature: esql.spatial_shapes +required_capability: spatial_shapes ROW wkt = "POINT (30 10)" | EVAL point = TO_GEOPOINT(wkt) @@ -41,7 +41,7 @@ wkt:keyword | point:geo_point | shape:geo_shape # need to work out how to upload WKT simpleLoad -required_feature: esql.spatial_shapes +required_capability: spatial_shapes FROM countries_bbox | WHERE id == "ISL"; @@ -50,7 +50,7 @@ ISL|Iceland|BBOX(-24.538400, -13.499446, 66.536100, 63.390000) ; simpleLoadPointsAsShapes -required_feature: esql.spatial_shapes +required_capability: spatial_shapes FROM airports | WHERE abbrev == "CPH" OR abbrev == "VLC" @@ -80,7 +80,7 @@ CPH | Københavns Kommune | POINT(12.5683 55.6761) | Copenhagen # Tests for ST_INTERSECTS with GEO_SHAPE pointIntersectsLiteralPolygon -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports | EVAL location = TO_GEOSHAPE(location) @@ -93,7 +93,7 @@ HOD | Hodeidah Int'l | POINT(42.97109630194 14.7552534413725) | Yemen ; polygonIntersectsLiteralPolygon -required_feature: esql.st_intersects +required_capability: st_intersects FROM airport_city_boundaries | WHERE ST_INTERSECTS(city_boundary, TO_GEOSHAPE("POLYGON((109.4 18.1, 109.6 18.1, 109.6 18.3, 109.4 18.3, 109.4 18.1))")) @@ -106,7 +106,7 @@ SYX | Sanya Phoenix Int'l | 天涯区 | Sanya | POINT(1 ; pointIntersectsLiteralPolygonReversed -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports | EVAL location = TO_GEOSHAPE(location) @@ -119,7 +119,7 @@ HOD | Hodeidah Int'l | POINT(42.97109630194 14.7552534413725) | Yemen ; literalPointIntersectsLiteralPolygon -required_feature: esql.st_intersects +required_capability: st_intersects ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -133,7 +133,7 @@ wkt:keyword | pt:geo_point ; literalPointIntersectsLiteralPolygonReversed -required_feature: esql.st_intersects +required_capability: st_intersects ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -147,7 +147,7 @@ wkt:keyword | pt:geo_point ; literalPointAsShapeIntersectsLiteralPolygon -required_feature: esql.st_intersects +required_capability: st_intersects ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -161,7 +161,7 @@ wkt:keyword | pt:geo_shape ; literalPointAsShapeIntersectsLiteralPolygonReversed -required_feature: esql.st_intersects +required_capability: st_intersects ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -175,7 +175,7 @@ wkt:keyword | pt:geo_shape ; shapeIntersectsLiteralPolygon -required_feature: esql.st_intersects +required_capability: st_intersects FROM countries_bbox | WHERE ST_INTERSECTS(shape, TO_GEOSHAPE("POLYGON((29 -30, 31 -30, 31 -27.3, 29 -27.3, 29 -30))")) @@ -189,7 +189,7 @@ LSO | Lesotho | BBOX(27.013973, 29.455554, -28.570691, -30.650527) ; literalPolygonIntersectsLiteralPolygon -required_feature: esql.st_intersects +required_capability: st_intersects ROW wkt = ["POLYGON((-20 60, -6 60, -6 66, -20 66, -20 60))", "POLYGON((20 60, 6 60, 6 66, 20 66, 20 60))"] | EVAL other = TO_GEOSHAPE("POLYGON((-15 64, -10 64, -10 66, -15 66, -15 64))") @@ -204,7 +204,7 @@ wkt:keyword | shape:geo_shape ; literalPolygonIntersectsLiteralPolygonOneRow -required_feature: esql.st_intersects +required_capability: st_intersects ROW intersects = ST_INTERSECTS(TO_GEOSHAPE("POLYGON((-20 60, -6 60, -6 66, -20 66, -20 60))"), TO_GEOSHAPE("POLYGON((-15 64, -10 64, -10 66, -15 66, -15 64))")) ; @@ -217,7 +217,7 @@ true # Tests for ST_DISJOINT with GEO_SHAPE polygonDisjointLiteralPolygon -required_feature: esql.st_disjoint +required_capability: st_disjoint // tag::st_disjoint-airport_city_boundaries[] FROM airport_city_boundaries @@ -238,7 +238,7 @@ ACA | General Juan N Alvarez Int'l | Acapulco de Juárez | Acapulco d # Tests for ST_CONTAINS and ST_WITHIN with GEO_SHAPE polygonContainsLiteralPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within // tag::st_contains-airport_city_boundaries[] FROM airport_city_boundaries @@ -255,7 +255,7 @@ SYX | Sanya Phoenix Int'l | 天涯区 | Sanya | POINT(1 ; polygonWithinLiteralPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within // tag::st_within-airport_city_boundaries[] FROM airport_city_boundaries @@ -275,7 +275,7 @@ SYX | Sanya Phoenix Int'l | 天涯区 | Sanya | POINT(1 # Tests for Equality and casting with GEO_SHAPE geo_shapeEquals -required_feature: esql.spatial_shapes +required_capability: spatial_shapes ROW wkt = ["POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))", "POINT(75.8092915005895 22.727749187571)"] | MV_EXPAND wkt @@ -288,7 +288,7 @@ wkt:keyword |pt:geo_shape ; geo_shapeNotEquals -required_feature: esql.spatial_shapes +required_capability: spatial_shapes ROW wkt = ["POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))", "POINT(75.8092915005895 22.727749187571)"] | MV_EXPAND wkt @@ -301,7 +301,7 @@ wkt:keyword |pt:geo_shape ; convertFromStringParseError -required_feature: esql.spatial_shapes +required_capability: spatial_shapes row wkt = ["POINTX(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)", "POINT(111)"] | mv_expand wkt @@ -323,7 +323,7 @@ wkt:keyword |pt:geo_shape # convertCartesianShapeFromString -required_feature: esql.spatial_shapes +required_capability: spatial_shapes // tag::to_cartesianshape-str[] ROW wkt = ["POINT(4297.11 -1475.53)", "POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))"] @@ -340,7 +340,7 @@ wkt:keyword |geom:cartesian_shape ; convertCartesianFromStringArray -required_feature: esql.spatial_shapes +required_capability: spatial_shapes row wkt = ["POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))", "POINT(7580.93 2272.77)"] | eval pt = to_cartesianshape(wkt); @@ -350,7 +350,7 @@ wkt:keyword ; convertCartesianFromStringViaPoint -required_feature: esql.spatial_shapes +required_capability: spatial_shapes ROW wkt = "POINT (3010 -1010)" | EVAL point = TO_CARTESIANPOINT(wkt) @@ -363,7 +363,7 @@ wkt:keyword | point:cartesian_point | shape:cartesian_shape # need to work out how to upload WKT simpleCartesianShapeLoad -required_feature: esql.spatial_shapes +required_capability: spatial_shapes FROM countries_bbox_web | WHERE id == "ISL"; @@ -372,7 +372,7 @@ ISL|Iceland|BBOX(-2731602.192501422, -1502751.454502109, 1.0025136653899286E7, 9 ; simpleLoadCartesianPointsAsShapes -required_feature: esql.spatial_shapes +required_capability: spatial_shapes FROM airports_web | WHERE abbrev == "CPH" OR abbrev == "VLC" @@ -389,7 +389,7 @@ abbrev:keyword | name:text | scalerank:integer | type:keyword | location:cart # Tests for ST_INTERSECTS with CARTESIAN_SHAPE cartesianPointIntersectsPolygon -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | EVAL location = TO_CARTESIANSHAPE(location) @@ -402,7 +402,7 @@ HOD | Hodeidah Int'l | POINT (4783520.559160681 1661010.0197476079) | ; literalCartesianPointIntersectsPolygon -required_feature: esql.st_intersects +required_capability: st_intersects ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -416,7 +416,7 @@ wkt:keyword | pt:cartesian_shape ; cartesianShapeIntersectsPolygon -required_feature: esql.st_intersects +required_capability: st_intersects FROM countries_bbox_web | WHERE ST_INTERSECTS(shape, TO_CARTESIANSHAPE("POLYGON((3100000 -3400000, 3500000 -3400000, 3500000 -3150000, 3100000 -3150000, 3100000 -3400000))")) @@ -430,7 +430,7 @@ LSO | Lesotho | BBOX(3007181.718244638, 3278977.271857335, -3321117. ; literalCartesianPolygonIntersectsPolygon -required_feature: esql.st_intersects +required_capability: st_intersects ROW wkt = ["POLYGON((-2000 6000, -600 6000, -600 6600, -2000 6600, -2000 6000))", "POLYGON((2000 6000, 600 6000, 600 6600, 2000 6600, 2000 6000))"] | MV_EXPAND wkt @@ -447,7 +447,7 @@ wkt:keyword | shape:ca # Tests for ST_DISJOINT with CARTESIAN_SHAPE cartesianPolygonDisjointLiteralPolygon -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM countries_bbox_web | WHERE ST_DISJOINT(shape, TO_CARTESIANSHAPE("POLYGON((3100000 -3400000, 3500000 -3400000, 3500000 -3150000, 3100000 -3150000, 3100000 -3400000))")) @@ -460,7 +460,7 @@ ZWE | Zimbabwe | BBOX (2809472.180051312, 3681512.6693309383, -176035 ; cartesianPolygonDisjointEmptyGeometry -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM countries_bbox_web | WHERE ST_DISJOINT(shape, TO_CARTESIANSHAPE("LINESTRING()")) @@ -478,7 +478,7 @@ count:long # Tests for ST_CONTAINS and ST_WITHIN with CARTESIAN_SHAPE cartesianShapeContainsPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM countries_bbox_web | WHERE ST_CONTAINS(shape, TO_CARTESIANSHAPE("POLYGON((3100000 -3400000, 3500000 -3400000, 3500000 -3150000, 3100000 -3150000, 3100000 -3400000))")) @@ -490,7 +490,7 @@ ZAF | South Africa | BBOX(1834915.5679635953, 4218142.412200545, -2527908 ; cartesianShapeWithinPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM countries_bbox_web | WHERE ST_WITHIN(shape, TO_CARTESIANSHAPE("POLYGON((1800000 -2500000, 4300000 -2500000, 4300000 -6000000, 1800000 -6000000, 1800000 -2500000))")) @@ -507,7 +507,7 @@ LSO | Lesotho | BBOX(3007181.718244638, 3278977.271857335, -3321117. # Tests for Equality and casting with CARTESIAN_SHAPE cartesianshapeEquals -required_feature: esql.spatial_shapes +required_capability: spatial_shapes ROW wkt = ["POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))", "POINT(7580.93 2272.77)"] | MV_EXPAND wkt @@ -520,7 +520,7 @@ wkt:keyword |pt:cartesian_shape ; cartesianShapeNotEquals -required_feature: esql.spatial_shapes +required_capability: spatial_shapes ROW wkt = ["POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))", "POINT(7580.93 2272.77)"] | MV_EXPAND wkt @@ -533,7 +533,7 @@ wkt:keyword |pt:cartesian_shape ; convertCartesianShapeFromStringParseError -required_feature: esql.spatial_shapes +required_capability: spatial_shapes row wkt = ["POINTX(4297.11 -1475.53)", "POINT(7580.93 2272.77)", "POINT(111)"] | mv_expand wkt diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index 5bdf0bd963fee..6322746318230 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -71,7 +71,7 @@ emp_no:integer | last_name:keyword | gender:keyword | f_l:boolean ; stringCast -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting ROW a = 1 | eval ss = substring("abcd", "2"), l = left("abcd", "2"), r = right("abcd", "2"); @@ -80,7 +80,7 @@ a:integer | ss:keyword | l:keyword | r:keyword ; stringCastEmp -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting from employees | eval ss = substring(first_name, "2") @@ -330,7 +330,7 @@ emp_no:integer | name:keyword // Note: no matches in MV returned in -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where job_positions in ("Internship", first_name) | keep emp_no, job_positions; ignoreOrder:true @@ -522,7 +522,7 @@ emp_no:integer |positions:keyword ; lessThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where job_positions < "C" | keep emp_no, job_positions | sort emp_no; warning:Line 1:24: evaluation of [job_positions < \"C\"] failed, treating result as null. Only first 20 failures recorded. @@ -535,7 +535,7 @@ emp_no:integer |job_positions:keyword ; greaterThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where job_positions > "C" | keep emp_no, job_positions | sort emp_no | limit 6; warning:Line 1:24: evaluation of [job_positions > \"C\"] failed, treating result as null. Only first 20 failures recorded. @@ -552,7 +552,7 @@ emp_no:integer |job_positions:keyword ; equalToMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where job_positions == "Accountant" | keep emp_no, job_positions | sort emp_no; warning:Line 1:24: evaluation of [job_positions == \"Accountant\"] failed, treating result as null. Only first 20 failures recorded. @@ -564,7 +564,7 @@ emp_no:integer |job_positions:keyword ; equalToOrEqualToMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where job_positions == "Accountant" or job_positions == "Tech Lead" | keep emp_no, job_positions | sort emp_no; warning:Line 1:24: evaluation of [job_positions] failed, treating result as null. Only first 20 failures recorded. @@ -577,7 +577,7 @@ emp_no:integer |job_positions:keyword ; inMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where job_positions in ("Accountant", "Tech Lead") | keep emp_no, job_positions | sort emp_no; warning:Line 1:24: evaluation of [job_positions in (\"Accountant\", \"Tech Lead\")] failed, treating result as null. Only first 20 failures recorded. @@ -590,7 +590,7 @@ emp_no:integer |job_positions:keyword ; notLessThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where not(job_positions < "C") | keep emp_no, job_positions | sort emp_no | limit 6; warning:Line 1:24: evaluation of [not(job_positions < \"C\")] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [job_positions < \"C\"] failed, treating result as null. Only first 20 failures recorded.] @@ -607,7 +607,7 @@ emp_no:integer |job_positions:keyword ; notGreaterThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where not(job_positions > "C") | keep emp_no, job_positions | sort emp_no | limit 6; warning:Line 1:24: evaluation of [not(job_positions > \"C\")] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [job_positions > \"C\"] failed, treating result as null. Only first 20 failures recorded.] @@ -620,7 +620,7 @@ emp_no:integer |job_positions:keyword ; notEqualToMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where not(job_positions == "Accountant") | keep emp_no, job_positions | sort emp_no | limit 6; warning:Line 1:24: evaluation of [not(job_positions == \"Accountant\")] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [job_positions == \"Accountant\"] failed, treating result as null. Only first 20 failures recorded.] @@ -745,7 +745,7 @@ ROW a=[10, 9, 8] ; mvSort -required_feature: esql.mv_sort +required_capability: mv_sort row a = ["Mon", "Tues", "Wed", "Thu", "Fri"] | eval sa = mv_sort(a), sd = mv_sort(a, "DESC"); @@ -754,7 +754,7 @@ a:keyword | sa:keyword | sd:keyword ; mvSortEmp -required_feature: esql.mv_sort +required_capability: mv_sort FROM employees | eval sd = mv_sort(job_positions, "DESC"), sa = mv_sort(job_positions) @@ -772,7 +772,7 @@ emp_no:integer | job_positions:keyword ; mvSliceCast -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting ROW a = ["1", "2", "3", "4"] | eval a1 = mv_slice(a, "0", "1"); @@ -782,7 +782,7 @@ a:keyword | a1:keyword ; mvSliceEmp -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval a1 = mv_slice(salary_change.keyword, 0, 1) @@ -799,7 +799,7 @@ emp_no:integer | salary_change.keyword:keyword | a1:keyword ; mvZip -required_feature: esql.mv_sort +required_capability: mv_sort // tag::mv_zip[] ROW a = ["x", "y", "z"], b = ["1", "2"] @@ -815,7 +815,7 @@ a:keyword | b:keyword | c:keyword ; mvZipEmp -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval full_name = mv_zip(first_name, last_name, " "), full_name_2 = mv_zip(last_name, first_name), jobs = mv_zip(job_positions, salary_change.keyword, "#") @@ -842,7 +842,7 @@ beta | Kubernetes cluster | [beta k8s server, beta k8s server2 ; lengthOfText -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | where host=="epsilon" | eval l1 = length(host_group), l2 = length(description) | keep l1, l2; ignoreOrder:true @@ -856,7 +856,7 @@ null | 19 ; startsWithText -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | where host=="epsilon" | eval l1 = starts_with(host_group, host), l2 = starts_with(description, host) | keep l1, l2; ignoreOrder:true @@ -870,7 +870,7 @@ false | null ; substringOfText -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | where host=="epsilon" | eval l1 = substring(host_group, 0, 5), l2 = substring(description, 0, 5) | keep l1, l2; ignoreOrder:true @@ -884,7 +884,7 @@ Gatew | null ; concatOfText -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | where host == "epsilon" | eval l1 = concat(host, "/", host_group), l2 = concat(host_group, "/", description) | sort l1 | keep l1, l2; warning:Line 1:86: evaluation of [concat(host_group, \"/\", description)] failed, treating result as null. Only first 20 failures recorded. @@ -1150,7 +1150,7 @@ a:keyword | upper:keyword | lower:keyword ; values -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -1162,7 +1162,7 @@ required_feature: esql.agg_values ; valuesGrouped -required_feature: esql.agg_values +required_capability: agg_values // tag::values-grouped[] FROM employees @@ -1314,7 +1314,7 @@ min(f_l):integer | max(f_l):integer | job_positions:keyword ; locateWarnings#[skip:-8.13.99,reason:new string function added in 8.14] -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | where host=="epsilon" | eval l1 = locate(host_group, "ate"), l2 = locate(description, "ate") | keep l1, l2; ignoreOrder:true @@ -1328,7 +1328,7 @@ null | 0 ; base64Encode#[skip:-8.13.99,reason:new base64 function added in 8.14] -required_feature: esql.base64_decode_encode +required_capability: base64_decode_encode // tag::to_base64[] row a = "elastic" @@ -1343,7 +1343,7 @@ elastic | ZWxhc3RpYw== ; base64Decode#[skip:-8.13.99,reason:new base64 function added in 8.14] -required_feature: esql.base64_decode_encode +required_capability: base64_decode_encode // tag::from_base64[] row a = "ZWxhc3RpYw==" @@ -1358,7 +1358,7 @@ ZWxhc3RpYw== | elastic ; base64EncodeDecodeEmp#[skip:-8.13.99,reason:new base64 function added in 8.14] -required_feature: esql.base64_decode_encode +required_capability: base64_decode_encode from employees | where emp_no < 10032 and emp_no > 10027 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec index fa524d270bb98..38f3d439e7504 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec @@ -46,7 +46,7 @@ from ul_logs | sort bytes_in desc nulls last, id | limit 12; ; filterPushDownGT -required_feature: esql.mv_warn +required_capability: mv_warn from ul_logs | where bytes_in >= to_ul(74330435873664882) | sort bytes_in | eval div = bytes_in / to_ul(pow(10., 15)) | keep bytes_in, div, id | limit 12; warning:Line 1:22: evaluation of [bytes_in >= to_ul(74330435873664882)] failed, treating result as null. Only first 20 failures recorded. @@ -68,7 +68,7 @@ warning:Line 1:22: java.lang.IllegalArgumentException: single-value function enc ; filterPushDownRange -required_feature: esql.mv_warn +required_capability: mv_warn from ul_logs | where bytes_in >= to_ul(74330435873664882) | where bytes_in <= to_ul(316080452389500167) | sort bytes_in | eval div = bytes_in / to_ul(pow(10., 15)) | keep bytes_in, div, id | limit 12; warning:Line 1:22: evaluation of [bytes_in >= to_ul(74330435873664882)] failed, treating result as null. Only first 20 failures recorded. @@ -84,7 +84,7 @@ warning:#[Emulated:Line 1:67: java.lang.IllegalArgumentException: single-value f ; filterPushDownIn -required_feature: esql.mv_warn +required_capability: mv_warn // TODO: testing framework doesn't perform implicit conversion to UL of given values, needs explicit conversion from ul_logs | where bytes_in in (to_ul(74330435873664882), to_ul(154551962150890564), to_ul(195161570976258241)) | sort bytes_in | keep bytes_in, id; @@ -98,7 +98,7 @@ warning:Line 1:22: java.lang.IllegalArgumentException: single-value function enc ; filterOnFieldsEquality -required_feature: esql.mv_warn +required_capability: mv_warn from ul_logs | where bytes_in == bytes_out; warning:Line 1:22: evaluation of [bytes_in == bytes_out] failed, treating result as null. Only first 20 failures recorded. @@ -109,7 +109,7 @@ warning:Line 1:22: java.lang.IllegalArgumentException: single-value function enc ; filterOnFieldsInequality -required_feature: esql.mv_warn +required_capability: mv_warn from ul_logs | sort id | where bytes_in < bytes_out | eval b_in = bytes_in / to_ul(pow(10.,15)), b_out = bytes_out / to_ul(pow(10.,15)) | limit 5; warning:Line 1:32: evaluation of [bytes_in < bytes_out] failed, treating result as null. Only first 20 failures recorded. @@ -140,7 +140,7 @@ from ul_logs | stats c = count(bytes_in) by bytes_in | sort c desc, bytes_in des ; case -required_feature: esql.mv_warn +required_capability: mv_warn from ul_logs | where case(bytes_in == to_ul(154551962150890564), true, false); warning:Line 1:27: evaluation of [bytes_in == to_ul(154551962150890564)] failed, treating result as null. Only first 20 failures recorded. @@ -151,7 +151,7 @@ warning:Line 1:27: java.lang.IllegalArgumentException: single-value function enc ; toDegrees -required_feature: esql.mv_warn +required_capability: mv_warn FROM ul_logs | WHERE bytes_in == bytes_out | EVAL deg = TO_DEGREES(bytes_in) | KEEP bytes_in, deg ; @@ -163,7 +163,7 @@ warning:Line 1:22: java.lang.IllegalArgumentException: single-value function enc ; toRadians -required_feature: esql.mv_warn +required_capability: mv_warn FROM ul_logs | WHERE bytes_in == bytes_out | EVAL rad = TO_RADIANS(bytes_in) | KEEP bytes_in, rad ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/version.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/version.csv-spec index 513189cc0fe86..3b6c41f883018 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/version.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/version.csv-spec @@ -312,7 +312,7 @@ null | null | null | 11 | 0 | 1.3.0 | 0.1 | no ; values -required_feature: esql.agg_values +required_capability: agg_values FROM apps | STATS version=MV_SORT(VALUES(version)) @@ -323,7 +323,7 @@ required_feature: esql.agg_values ; valuesGrouped -required_feature: esql.agg_values +required_capability: agg_values FROM apps | EVAL name=SUBSTRING(name, 0, 1) @@ -348,7 +348,7 @@ version:version | name:keyword ; valuesGroupedByOrdinals -required_feature: esql.agg_values +required_capability: agg_values FROM apps | STATS version=MV_SORT(VALUES(version)) BY name @@ -372,7 +372,7 @@ version:version | name:keyword ; implictCastingEqual -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from apps | where version == "1.2.3.4" | sort name | keep name, version; name:keyword | version:version @@ -381,7 +381,7 @@ hhhhh | 1.2.3.4 ; implictCastingNotEqual -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from apps | where version != "1.2.3.4" | sort name, version | keep name, version | limit 2; name:keyword | version:version @@ -390,7 +390,7 @@ bbbbb | 2.1 ; implictCastingGreaterThan -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from apps | where version > "1.2.3.4" | sort name, version | keep name, version | limit 2; name:keyword | version:version @@ -399,7 +399,7 @@ ccccc | 2.3.4 ; implictCastingLessThanOrEqual -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from apps | where version <= "1.2.3.4" | sort name, version | keep name, version | limit 2; name:keyword | version:version @@ -408,7 +408,7 @@ aaaaa | 1.2.3.4 ; implictCastingIn -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from apps | where version in ( "1.2.3.4", "bad" ) | sort name | keep name, version; name:keyword | version:version diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EstimatesRowSize.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EstimatesRowSize.java index b79d7cc0fbdde..3d626e65f6f11 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EstimatesRowSize.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EstimatesRowSize.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.type.DataType; -import org.elasticsearch.xpack.ql.type.DataTypes; import java.util.List; @@ -106,12 +105,13 @@ static int estimateSize(DataType dataType) { ElementType elementType = PlannerUtils.toElementType(dataType); return switch (elementType) { case BOOLEAN -> 1; - case BYTES_REF -> { - if (dataType == DataTypes.IP) { - yield 16; - } - yield 50; // wild estimate for the size of a string. - } + case BYTES_REF -> switch (dataType.typeName()) { + case "ip" -> 16; // IP addresses, both IPv4 and IPv6, are encoded using 16 bytes. + case "version" -> 15; // 8.15.2-SNAPSHOT is 15 bytes, most are shorter, some can be longer + case "geo_point", "cartesian_point" -> 21; // WKB for points is typically 21 bytes. + case "geo_shape", "cartesian_shape" -> 200; // wild estimate, based on some test data (airport_city_boundaries) + default -> 50; // wild estimate for the size of a string. + }; case DOC -> throw new EsqlIllegalArgumentException("can't load a [doc] with field extraction"); case DOUBLE -> Double.BYTES; case INT -> Integer.BYTES; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java index 4f852264193b4..cf311d4413671 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java @@ -10,10 +10,23 @@ import org.elasticsearch.Version; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.rest.action.admin.cluster.RestNodesCapabilitiesAction; +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import java.util.Map; import java.util.Set; +/** + * {@link NodeFeature}s declared by ESQL. These should be used for fast checks + * on the node. Before the introduction of the {@link RestNodesCapabilitiesAction} + * this was used for controlling which features are tested so many of the + * examples below are *just* used for that. Don't make more of those - add them + * to {@link EsqlCapabilities} instead. + *

+ * NOTE: You can't remove a feature now and probably never will be able to. + * Only add more of these if you need a fast CPU level check. + *

+ */ public class EsqlFeatures implements FeatureSpecification { /** * Introduction of {@code MV_SORT}, {@code MV_SLICE}, and {@code MV_ZIP}. diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportMoveToStepAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportMoveToStepAction.java index 6061b6db89724..87c93a9198215 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportMoveToStepAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportMoveToStepAction.java @@ -212,6 +212,7 @@ public static class Request extends AcknowledgedRequest implements ToXC private PartialStepKey nextStepKey; public Request(String index, Step.StepKey currentStepKey, PartialStepKey nextStepKey) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.index = index; this.currentStepKey = currentStepKey; this.nextStepKey = nextStepKey; @@ -224,7 +225,9 @@ public Request(StreamInput in) throws IOException { this.nextStepKey = new PartialStepKey(in); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public String getIndex() { return index; diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportRetryAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportRetryAction.java index 5818ce6582bef..95358adb832c7 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportRetryAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportRetryAction.java @@ -118,6 +118,7 @@ public static class Request extends AcknowledgedRequest implements Indi private IndicesOptions indicesOptions = IndicesOptions.strictExpandOpen(); public Request(String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indices = indices; } @@ -127,7 +128,9 @@ public Request(StreamInput in) throws IOException { this.indicesOptions = IndicesOptions.readIndicesOptions(in); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } @Override public Request indices(String... indices) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionAction.java index 5d75adedddde0..e11e9d5ad8cc9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionAction.java @@ -44,16 +44,17 @@ public OpenAiChatCompletionAction(Sender sender, OpenAiChatCompletionModel model @Override public void execute(InferenceInputs inferenceInputs, TimeValue timeout, ActionListener listener) { - if (inferenceInputs instanceof DocumentsOnlyInput docsOnlyInput) { - if (docsOnlyInput.getInputs().size() > 1) { - listener.onFailure(new ElasticsearchStatusException("OpenAI completions only accepts 1 input", RestStatus.BAD_REQUEST)); - return; - } - } else { + if (inferenceInputs instanceof DocumentsOnlyInput == false) { listener.onFailure(new ElasticsearchStatusException("Invalid inference input type", RestStatus.INTERNAL_SERVER_ERROR)); return; } + var docsOnlyInput = (DocumentsOnlyInput) inferenceInputs; + if (docsOnlyInput.getInputs().size() > 1) { + listener.onFailure(new ElasticsearchStatusException("OpenAI completions only accepts 1 input", RestStatus.BAD_REQUEST)); + return; + } + try { ActionListener wrappedListener = wrapFailuresInElasticsearchException(errorMessage, listener); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereUtilsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereUtilsTests.java new file mode 100644 index 0000000000000..47aff8dad65db --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereUtilsTests.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.cohere; + +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.is; + +public class CohereUtilsTests extends ESTestCase { + + public void testCreateRequestSourceHeader() { + var requestSourceHeader = CohereUtils.createRequestSourceHeader(); + + assertThat(requestSourceHeader.getName(), is("Request-Source")); + assertThat(requestSourceHeader.getValue(), is("unspecified:elasticsearch")); + } + +} diff --git a/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java b/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java index a397d9864d23d..2f6127c44957f 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java +++ b/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java @@ -10,6 +10,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; @@ -22,6 +23,7 @@ public class InferenceRestIT extends ESClientYamlSuiteTestCase { .setting("xpack.security.enabled", "false") .setting("xpack.security.http.ssl.enabled", "false") .plugin("inference-service-test") + .feature(FeatureFlag.SEMANTIC_TEXT_ENABLED) .distribution(DistributionType.DEFAULT) .build(); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java index 57aba2bb80d68..f09d867087664 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java @@ -17,7 +17,6 @@ import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilder; @@ -71,7 +70,7 @@ public void addMlState() { client(), ClusterState.EMPTY_STATE, TestIndexNameExpressionResolver.newInstance(), - MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT, + TimeValue.THIRTY_SECONDS, future ); future.actionGet(); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotSearchIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotSearchIT.java index 2e16436736e89..2f8165e6a20be 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotSearchIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotSearchIT.java @@ -15,8 +15,8 @@ import org.elasticsearch.action.index.TransportIndexAction; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -60,7 +60,7 @@ public void addMlState() { client(), ClusterState.EMPTY_STATE, TestIndexNameExpressionResolver.newInstance(), - MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT, + TimeValue.THIRTY_SECONDS, future ); future.actionGet(); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java index 6cb467af525c9..bc8e4794d7daa 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.OperationRouting; @@ -200,7 +199,7 @@ protected void updateModelSnapshotOnJob(ModelSnapshot modelSnapshot) { client(), ClusterState.EMPTY_STATE, TestIndexNameExpressionResolver.newInstance(), - MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT, + TimeValue.THIRTY_SECONDS, future ); future.get(); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java index ae128b507c795..675933808c603 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java @@ -18,7 +18,6 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.AliasMetadata; @@ -1101,7 +1100,7 @@ private void indexQuantiles(Quantiles quantiles) { client(), ClusterState.EMPTY_STATE, TestIndexNameExpressionResolver.newInstance(), - MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT, + TimeValue.THIRTY_SECONDS, future ); future.actionGet(); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/UnusedStatsRemoverIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/UnusedStatsRemoverIT.java index 4c8382047e796..ee96d154ab55e 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/UnusedStatsRemoverIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/UnusedStatsRemoverIT.java @@ -9,9 +9,9 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.xcontent.ToXContent; @@ -57,7 +57,7 @@ public void createComponents() { client(), clusterService().state(), TestIndexNameExpressionResolver.newInstance(client().threadPool().getThreadContext()), - MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT, + TimeValue.THIRTY_SECONDS, future ); future.actionGet(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java index c849e69c780bd..a2d8fd1d60316 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java @@ -146,7 +146,7 @@ public void clusterChanged(ClusterChangedEvent event) { AnnotationIndex.createAnnotationsIndexIfNecessary( client, event.state(), - MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT, + MasterNodeRequest.TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, ActionListener.wrap(r -> isIndexCreationInProgress.set(false), e -> { if (e.getMessage().equals(previousException)) { logger.debug("Error creating ML annotations index or aliases", e); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/TrainedModelStatsService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/TrainedModelStatsService.java index 9fc97ff234c58..4ee294bcf0d8c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/TrainedModelStatsService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/TrainedModelStatsService.java @@ -256,14 +256,14 @@ private void createStatsIndexIfNecessary() { client, clusterState, indexNameExpressionResolver, - MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT, + MasterNodeRequest.TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, ActionListener.wrap( r -> ElasticsearchMappings.addDocMappingIfMissing( MlStatsIndex.writeAlias(), MlStatsIndex::wrappedMapping, client, clusterState, - MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT, + MasterNodeRequest.TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, listener, MlStatsIndex.STATS_INDEX_MAPPINGS_VERSION ), diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java index 7a314b82024be..8d83156b0e0ee 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java @@ -100,7 +100,6 @@ import java.util.function.BiConsumer; import java.util.function.Consumer; -import static org.elasticsearch.action.support.master.MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT; import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_INDEX_HIDDEN; import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; @@ -268,7 +267,7 @@ public void testOpenJob() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); when(jobTask.getAllocationId()).thenReturn(1L); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); assertEquals(1, manager.numberOfOpenJobs()); assertTrue(manager.jobHasActiveAutodetectProcess(jobTask)); ArgumentCaptor captor = ArgumentCaptor.forClass(JobTaskState.class); @@ -296,7 +295,7 @@ public void testOpenJob_withoutVersion() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn(job.getId()); AtomicReference errorHolder = new AtomicReference<>(); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> errorHolder.set(e)); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> errorHolder.set(e)); Exception error = errorHolder.get(); assertThat(error, is(notNullValue())); assertThat(error.getMessage(), equalTo("Cannot open job [no_version] because jobs created prior to version 5.5 are not supported")); @@ -339,22 +338,22 @@ public void testOpenJob_exceedMaxNumJobs() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("bar"); when(jobTask.getAllocationId()).thenReturn(1L); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("baz"); when(jobTask.getAllocationId()).thenReturn(2L); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); assertEquals(3, manager.numberOfOpenJobs()); Exception[] holder = new Exception[1]; jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foobar"); when(jobTask.getAllocationId()).thenReturn(3L); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> holder[0] = e); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> holder[0] = e); Exception e = holder[0]; assertEquals("max running job capacity [3] reached", e.getMessage()); @@ -363,7 +362,7 @@ public void testOpenJob_exceedMaxNumJobs() { when(jobTask.getJobId()).thenReturn("baz"); manager.closeJob(jobTask, null); assertEquals(2, manager.numberOfOpenJobs()); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e1, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e1, b) -> {}); assertEquals(3, manager.numberOfOpenJobs()); } @@ -374,7 +373,7 @@ public void testProcessData() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); DataLoadParams params = new DataLoadParams(TimeRange.builder().build(), Optional.empty()); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData( jobTask, analysisRegistry, @@ -401,7 +400,7 @@ public void testProcessDataThrowsElasticsearchStatusException_onIoException() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); Exception[] holder = new Exception[1]; manager.processData(jobTask, analysisRegistry, inputStream, xContentType, params, (dataCounts1, e) -> holder[0] = e); assertNotNull(holder[0]); @@ -413,7 +412,7 @@ public void testCloseJob() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData( jobTask, analysisRegistry, @@ -443,7 +442,7 @@ public void testVacate() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); when(jobTask.triggerVacate()).thenReturn(true); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData( jobTask, analysisRegistry, @@ -475,7 +474,7 @@ public void testCanCloseClosingJob() throws Exception { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData( jobTask, analysisRegistry, @@ -528,7 +527,7 @@ public void testCanKillClosingJob() throws Exception { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData( jobTask, analysisRegistry, @@ -562,7 +561,7 @@ public void testBucketResetMessageIsSent() { InputStream inputStream = createInputStream(""); JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData(jobTask, analysisRegistry, inputStream, xContentType, params, (dataCounts1, e) -> {}); verify(autodetectCommunicator).writeToJob(same(inputStream), same(analysisRegistry), same(xContentType), same(params), any()); } @@ -573,7 +572,7 @@ public void testFlush() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); InputStream inputStream = createInputStream(""); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData( jobTask, analysisRegistry, @@ -617,7 +616,7 @@ public void testCloseThrows() { // create a jobtask JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData( jobTask, analysisRegistry, @@ -660,7 +659,7 @@ public void testJobHasActiveAutodetectProcess() { when(jobTask.getJobId()).thenReturn("foo"); assertFalse(manager.jobHasActiveAutodetectProcess(jobTask)); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData( jobTask, analysisRegistry, @@ -683,7 +682,7 @@ public void testKillKillsAutodetectProcess() throws IOException { when(jobTask.getJobId()).thenReturn("foo"); assertFalse(manager.jobHasActiveAutodetectProcess(jobTask)); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData( jobTask, analysisRegistry, @@ -728,7 +727,7 @@ public void testProcessData_GivenStateNotOpened() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); InputStream inputStream = createInputStream(""); DataCounts[] dataCounts = new DataCounts[1]; manager.processData( @@ -836,7 +835,7 @@ public void testGetOpenProcessMemoryUsage() { AutodetectProcessManager manager = createSpyManager(); JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); long expectedSizeBytes = Job.PROCESS_MEMORY_OVERHEAD.getBytes() + switch (assignmentMemoryBasis) { case MODEL_MEMORY_LIMIT -> modelMemoryLimitBytes; @@ -905,7 +904,7 @@ private AutodetectProcessManager createSpyManagerAndCallProcessData(String jobId AutodetectProcessManager manager = createSpyManager(); JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn(jobId); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData( jobTask, analysisRegistry, diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStatusAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStatusAction.java index 0d8f3aad27daa..05ab989f444fe 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStatusAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStatusAction.java @@ -133,7 +133,9 @@ public Request(StreamInput in) throws IOException { waitForResourcesCreated = in.readBoolean(); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public boolean waitForResourcesCreated() { return waitForResourcesCreated; diff --git a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java index 45d3653a28b6a..af4595c5bbd76 100644 --- a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java +++ b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java @@ -43,8 +43,8 @@ public Object parse(String line) { if (line.startsWith(SCHEMA_PREFIX)) { assertThat("Early schema already declared " + earlySchema, earlySchema.length(), is(0)); earlySchema.append(line.substring(SCHEMA_PREFIX.length()).trim()); - } else if (line.toLowerCase(Locale.ROOT).startsWith("required_feature:")) { - requiredCapabilities.add(line.substring("required_feature:".length()).trim().replace("esql.", "")); + } else if (line.toLowerCase(Locale.ROOT).startsWith("required_capability:")) { + requiredCapabilities.add(line.substring("required_capability:".length()).trim()); } else { if (line.endsWith(";")) { // pick up the query diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java index 7c753692628cb..286a9cb736b1b 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java @@ -7,11 +7,13 @@ package org.elasticsearch.integration; +import org.apache.logging.log4j.Logger; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; -import org.elasticsearch.action.admin.indices.close.CloseIndexResponse; +import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.metadata.ReservedStateErrorMetadata; @@ -25,10 +27,15 @@ import org.elasticsearch.reservedstate.service.FileSettingsService; import org.elasticsearch.test.NativeRealmIntegTestCase; import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingAction; +import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsAction; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingAction; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; +import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder; +import org.elasticsearch.xpack.core.security.authc.RealmConfig; +import org.elasticsearch.xpack.core.security.authc.support.UserRoleMapper; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; import org.elasticsearch.xpack.security.action.rolemapping.ReservedRoleMappingAction; import org.junit.After; @@ -39,25 +46,31 @@ import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; -import java.util.stream.Collectors; +import java.util.function.Consumer; import static org.elasticsearch.indices.recovery.RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING; import static org.elasticsearch.xcontent.XContentType.JSON; import static org.elasticsearch.xpack.core.security.test.TestRestrictedIndices.INTERNAL_SECURITY_MAIN_INDEX_7; import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.emptyArray; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.notNullValue; +import static org.mockito.Mockito.mock; /** - * Tests that file settings service can properly add role mappings and detect REST clashes - * with the reserved role mappings. + * Tests that file settings service can properly add role mappings. */ public class RoleMappingFileSettingsIT extends NativeRealmIntegTestCase { @@ -135,12 +148,21 @@ public class RoleMappingFileSettingsIT extends NativeRealmIntegTestCase { } }"""; + @Override + protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + Settings.Builder builder = Settings.builder() + .put(super.nodeSettings(nodeOrdinal, otherSettings)) + // some tests make use of cluster-state based role mappings + .put("xpack.security.authc.cluster_state_role_mappings.enabled", true); + return builder.build(); + } + @After public void cleanUp() { updateClusterSettings(Settings.builder().putNull("indices.recovery.max_bytes_per_sec")); } - private void writeJSONFile(String node, String json) throws Exception { + public static void writeJSONFile(String node, String json, Logger logger, AtomicLong versionCounter) throws Exception { long version = versionCounter.incrementAndGet(); FileSettingsService fileSettingsService = internalCluster().getInstance(FileSettingsService.class, node); @@ -151,10 +173,11 @@ private void writeJSONFile(String node, String json) throws Exception { Files.createDirectories(fileSettingsService.watchedFileDir()); Path tempFilePath = createTempFile(); - logger.info("--> writing JSON config to node {} with path {}", node, tempFilePath); + logger.info("--> before writing JSON config to node {} with path {}", node, tempFilePath); logger.info(Strings.format(json, version)); Files.write(tempFilePath, Strings.format(json, version).getBytes(StandardCharsets.UTF_8)); Files.move(tempFilePath, fileSettingsService.watchedFile(), StandardCopyOption.ATOMIC_MOVE); + logger.info("--> after writing JSON config to node {} with path {}", node, tempFilePath); } private Tuple setupClusterStateListener(String node, String expectedKey) { @@ -238,49 +261,41 @@ private void assertRoleMappingsSaveOK(CountDownLatch savedClusterState, AtomicLo expectThrows(ExecutionException.class, () -> clusterAdmin().updateSettings(req).get()).getMessage() ); + for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { + PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); + userRoleMapper.resolveRoles( + new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), + resolveRolesFuture + ); + assertThat(resolveRolesFuture.get(), containsInAnyOrder("kibana_user", "fleet_user")); + } + + // the role mappings are not retrievable by the role mapping action (which only accesses "native" i.e. index-based role mappings) var request = new GetRoleMappingsRequest(); request.setNames("everyone_kibana", "everyone_fleet"); var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); - assertTrue(response.hasMappings()); - assertThat( - Arrays.stream(response.mappings()).map(r -> r.getName()).collect(Collectors.toSet()), - allOf(notNullValue(), containsInAnyOrder("everyone_kibana", "everyone_fleet")) - ); + assertFalse(response.hasMappings()); + assertThat(response.mappings(), emptyArray()); - // Try using the REST API to update the everyone_kibana role mapping - // This should fail, we have reserved certain role mappings in operator mode - assertEquals( - "Failed to process request " - + "[org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest/unset] " - + "with errors: [[everyone_kibana] set as read-only by [file_settings]]", - expectThrows( - IllegalArgumentException.class, - () -> client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_kibana")).actionGet() - ).getMessage() - ); - assertEquals( - "Failed to process request " - + "[org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest/unset] " - + "with errors: [[everyone_fleet] set as read-only by [file_settings]]", - expectThrows( - IllegalArgumentException.class, - () -> client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_fleet")).actionGet() - ).getMessage() - ); + // role mappings (with the same names) can also be stored in the "native" store + var putRoleMappingResponse = client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_kibana")).actionGet(); + assertTrue(putRoleMappingResponse.isCreated()); + putRoleMappingResponse = client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_fleet")).actionGet(); + assertTrue(putRoleMappingResponse.isCreated()); } public void testRoleMappingsApplied() throws Exception { ensureGreen(); var savedClusterState = setupClusterStateListener(internalCluster().getMasterName(), "everyone_kibana"); - writeJSONFile(internalCluster().getMasterName(), testJSON); + writeJSONFile(internalCluster().getMasterName(), testJSON, logger, versionCounter); assertRoleMappingsSaveOK(savedClusterState.v1(), savedClusterState.v2()); logger.info("---> cleanup cluster settings..."); savedClusterState = setupClusterStateListenerForCleanup(internalCluster().getMasterName()); - writeJSONFile(internalCluster().getMasterName(), emptyJSON); + writeJSONFile(internalCluster().getMasterName(), emptyJSON, logger, versionCounter); boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); assertTrue(awaitSuccessful); @@ -292,32 +307,65 @@ public void testRoleMappingsApplied() throws Exception { clusterStateResponse.getState().metadata().persistentSettings().get(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey()) ); - var request = new GetRoleMappingsRequest(); - request.setNames("everyone_kibana", "everyone_fleet"); - var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); - assertFalse(response.hasMappings()); + // native role mappings are not affected by the removal of the cluster-state based ones + { + var request = new GetRoleMappingsRequest(); + request.setNames("everyone_kibana", "everyone_fleet"); + var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); + assertTrue(response.hasMappings()); + assertThat( + Arrays.stream(response.mappings()).map(ExpressionRoleMapping::getName).toList(), + containsInAnyOrder("everyone_kibana", "everyone_fleet") + ); + } + + // and roles are resolved based on the native role mappings + for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { + PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); + userRoleMapper.resolveRoles( + new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), + resolveRolesFuture + ); + assertThat(resolveRolesFuture.get(), contains("kibana_user_native")); + } + + { + var request = new DeleteRoleMappingRequest(); + request.setName("everyone_kibana"); + var response = client().execute(DeleteRoleMappingAction.INSTANCE, request).get(); + assertTrue(response.isFound()); + request = new DeleteRoleMappingRequest(); + request.setName("everyone_fleet"); + response = client().execute(DeleteRoleMappingAction.INSTANCE, request).get(); + assertTrue(response.isFound()); + } + + // no roles are resolved now, because both native and cluster-state based stores have been cleared + for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { + PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); + userRoleMapper.resolveRoles( + new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), + resolveRolesFuture + ); + assertThat(resolveRolesFuture.get(), empty()); + } } - private Tuple setupClusterStateListenerForError(String node) { - ClusterService clusterService = internalCluster().clusterService(node); + public static Tuple setupClusterStateListenerForError( + ClusterService clusterService, + Consumer errorMetadataConsumer + ) { CountDownLatch savedClusterState = new CountDownLatch(1); AtomicLong metadataVersion = new AtomicLong(-1); clusterService.addListener(new ClusterStateListener() { @Override public void clusterChanged(ClusterChangedEvent event) { ReservedStateMetadata reservedState = event.state().metadata().reservedStateMetadata().get(FileSettingsService.NAMESPACE); - if (reservedState != null - && reservedState.errorMetadata() != null - && reservedState.errorMetadata().errorKind() == ReservedStateErrorMetadata.ErrorKind.PARSING) { + if (reservedState != null && reservedState.errorMetadata() != null) { clusterService.removeListener(this); metadataVersion.set(event.state().metadata().version()); savedClusterState.countDown(); - assertEquals(ReservedStateErrorMetadata.ErrorKind.PARSING, reservedState.errorMetadata().errorKind()); - assertThat(reservedState.errorMetadata().errors(), allOf(notNullValue(), hasSize(1))); - assertThat( - reservedState.errorMetadata().errors().get(0), - containsString("failed to parse role-mapping [everyone_kibana_bad]. missing field [rules]") - ); + errorMetadataConsumer.accept(reservedState.errorMetadata()); } } }); @@ -325,22 +373,13 @@ public void clusterChanged(ClusterChangedEvent event) { return new Tuple<>(savedClusterState, metadataVersion); } - private void assertRoleMappingsNotSaved(CountDownLatch savedClusterState, AtomicLong metadataVersion) throws Exception { - boolean awaitSuccessful = savedClusterState.await(20, TimeUnit.SECONDS); - assertTrue(awaitSuccessful); - - // This should succeed, nothing was reserved - client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_kibana_bad")).get(); - client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_fleet_ok")).get(); - } - public void testErrorSaved() throws Exception { ensureGreen(); // save an empty file to clear any prior state, this ensures we don't get a stale file left over by another test var savedClusterState = setupClusterStateListenerForCleanup(internalCluster().getMasterName()); - writeJSONFile(internalCluster().getMasterName(), emptyJSON); + writeJSONFile(internalCluster().getMasterName(), emptyJSON, logger, versionCounter); boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); assertTrue(awaitSuccessful); @@ -353,76 +392,94 @@ public void testErrorSaved() throws Exception { ); // save a bad file - savedClusterState = setupClusterStateListenerForError(internalCluster().getMasterName()); - - writeJSONFile(internalCluster().getMasterName(), testErrorJSON); - assertRoleMappingsNotSaved(savedClusterState.v1(), savedClusterState.v2()); - } - - private Tuple setupClusterStateListenerForSecurityWriteError(String node) { - ClusterService clusterService = internalCluster().clusterService(node); - CountDownLatch savedClusterState = new CountDownLatch(1); - AtomicLong metadataVersion = new AtomicLong(-1); - clusterService.addListener(new ClusterStateListener() { - @Override - public void clusterChanged(ClusterChangedEvent event) { - ReservedStateMetadata reservedState = event.state().metadata().reservedStateMetadata().get(FileSettingsService.NAMESPACE); - if (reservedState != null - && reservedState.errorMetadata() != null - && reservedState.errorMetadata().errorKind() == ReservedStateErrorMetadata.ErrorKind.VALIDATION) { - clusterService.removeListener(this); - metadataVersion.set(event.state().metadata().version()); - savedClusterState.countDown(); - assertEquals(ReservedStateErrorMetadata.ErrorKind.VALIDATION, reservedState.errorMetadata().errorKind()); - assertThat(reservedState.errorMetadata().errors(), allOf(notNullValue(), hasSize(1))); - assertThat(reservedState.errorMetadata().errors().get(0), containsString("closed")); - } + savedClusterState = setupClusterStateListenerForError( + internalCluster().getCurrentMasterNodeInstance(ClusterService.class), + errorMetadata -> { + assertEquals(ReservedStateErrorMetadata.ErrorKind.PARSING, errorMetadata.errorKind()); + assertThat(errorMetadata.errors(), allOf(notNullValue(), hasSize(1))); + assertThat( + errorMetadata.errors().get(0), + containsString("failed to parse role-mapping [everyone_kibana_bad]. missing field [rules]") + ); } - }); - - return new Tuple<>(savedClusterState, metadataVersion); - } - - public void testRoleMappingFailsToWriteToStore() throws Exception { - ensureGreen(); - - var savedClusterState = setupClusterStateListenerForSecurityWriteError(internalCluster().getMasterName()); - - final CloseIndexResponse closeIndexResponse = indicesAdmin().close(new CloseIndexRequest(INTERNAL_SECURITY_MAIN_INDEX_7)).get(); - assertTrue(closeIndexResponse.isAcknowledged()); + ); - writeJSONFile(internalCluster().getMasterName(), testJSON); - boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); + writeJSONFile(internalCluster().getMasterName(), testErrorJSON, logger, versionCounter); + awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); assertTrue(awaitSuccessful); - var request = new GetRoleMappingsRequest(); - request.setNames("everyone_kibana", "everyone_fleet"); - - var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); - assertFalse(response.hasMappings()); - - final ClusterStateResponse clusterStateResponse = clusterAdmin().state( - new ClusterStateRequest().waitForMetadataVersion(savedClusterState.v2().get()) - ).get(); + // no roles are resolved because both role mapping stores are empty + for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { + PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); + userRoleMapper.resolveRoles( + new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), + resolveRolesFuture + ); + assertThat(resolveRolesFuture.get(), empty()); + } + } - assertNull( - clusterStateResponse.getState().metadata().persistentSettings().get(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey()) - ); + public void testRoleMappingApplyWithSecurityIndexClosed() throws Exception { + ensureGreen(); - ReservedStateMetadata reservedState = clusterStateResponse.getState() - .metadata() - .reservedStateMetadata() - .get(FileSettingsService.NAMESPACE); + // expect the role mappings to apply even if the .security index is closed + var savedClusterState = setupClusterStateListener(internalCluster().getMasterName(), "everyone_kibana"); - ReservedStateHandlerMetadata handlerMetadata = reservedState.handlers().get(ReservedRoleMappingAction.NAME); - assertTrue(handlerMetadata == null || handlerMetadata.keys().isEmpty()); + try { + var closeIndexResponse = indicesAdmin().close(new CloseIndexRequest(INTERNAL_SECURITY_MAIN_INDEX_7)).get(); + assertTrue(closeIndexResponse.isAcknowledged()); + + writeJSONFile(internalCluster().getMasterName(), testJSON, logger, versionCounter); + boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); + assertTrue(awaitSuccessful); + + // no native role mappings exist + var request = new GetRoleMappingsRequest(); + request.setNames("everyone_kibana", "everyone_fleet"); + var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); + assertFalse(response.hasMappings()); + + // cluster state settings are also applied + var clusterStateResponse = clusterAdmin().state(new ClusterStateRequest().waitForMetadataVersion(savedClusterState.v2().get())) + .get(); + assertThat( + clusterStateResponse.getState().metadata().persistentSettings().get(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey()), + equalTo("50mb") + ); + + ReservedStateMetadata reservedState = clusterStateResponse.getState() + .metadata() + .reservedStateMetadata() + .get(FileSettingsService.NAMESPACE); + + ReservedStateHandlerMetadata handlerMetadata = reservedState.handlers().get(ReservedRoleMappingAction.NAME); + assertThat(handlerMetadata.keys(), containsInAnyOrder("everyone_kibana", "everyone_fleet")); + + // and roles are resolved based on the cluster-state role mappings + for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { + PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); + userRoleMapper.resolveRoles( + new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), + resolveRolesFuture + ); + assertThat(resolveRolesFuture.get(), containsInAnyOrder("kibana_user", "fleet_user")); + } + } finally { + savedClusterState = setupClusterStateListenerForCleanup(internalCluster().getMasterName()); + writeJSONFile(internalCluster().getMasterName(), emptyJSON, logger, versionCounter); + boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); + assertTrue(awaitSuccessful); + + var openIndexResponse = indicesAdmin().open(new OpenIndexRequest(INTERNAL_SECURITY_MAIN_INDEX_7)).get(); + assertTrue(openIndexResponse.isAcknowledged()); + } } private PutRoleMappingRequest sampleRestRequest(String name) throws Exception { var json = """ { - "enabled": false, - "roles": [ "kibana_user" ], + "enabled": true, + "roles": [ "kibana_user_native" ], "rules": { "field": { "username": "*" } }, "metadata": { "uuid" : "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7" @@ -433,8 +490,7 @@ private PutRoleMappingRequest sampleRestRequest(String name) throws Exception { var bis = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)); var parser = JSON.xContent().createParser(XContentParserConfiguration.EMPTY, bis) ) { - ExpressionRoleMapping mapping = ExpressionRoleMapping.parse(name, parser); - return PutRoleMappingRequest.fromMapping(mapping); + return new PutRoleMappingRequestBuilder(null).source(name, parser).request(); } } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsStartupIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsStartupIT.java deleted file mode 100644 index 48e97b7afb897..0000000000000 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsStartupIT.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.security; - -import org.elasticsearch.analysis.common.CommonAnalysisPlugin; -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterStateListener; -import org.elasticsearch.cluster.metadata.ReservedStateErrorMetadata; -import org.elasticsearch.cluster.metadata.ReservedStateMetadata; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.core.Strings; -import org.elasticsearch.core.Tuple; -import org.elasticsearch.index.mapper.extras.MapperExtrasPlugin; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.reindex.ReindexPlugin; -import org.elasticsearch.reservedstate.service.FileSettingsService; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.SecurityIntegTestCase; -import org.elasticsearch.test.junit.annotations.TestLogging; -import org.elasticsearch.transport.netty4.Netty4Plugin; -import org.elasticsearch.xpack.wildcard.Wildcard; - -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.StandardCopyOption; -import java.util.Arrays; -import java.util.Collection; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicLong; - -import static org.hamcrest.Matchers.allOf; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.notNullValue; - -@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, autoManageMasterNodes = false) -public class FileSettingsRoleMappingsStartupIT extends SecurityIntegTestCase { - - private static AtomicLong versionCounter = new AtomicLong(1); - private static String testJSONForFailedCase = """ - { - "metadata": { - "version": "%s", - "compatibility": "8.4.0" - }, - "state": { - "role_mappings": { - "everyone_kibana_2": { - "enabled": true, - "roles": [ "kibana_user" ], - "rules": { "field": { "username": "*" } }, - "metadata": { - "uuid" : "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", - "_foo": "something" - } - } - } - } - }"""; - - @Override - protected void doAssertXPackIsInstalled() {} - - @Override - protected Path nodeConfigPath(int nodeOrdinal) { - return null; - } - - private void writeJSONFile(String node, String json) throws Exception { - long version = versionCounter.incrementAndGet(); - - FileSettingsService fileSettingsService = internalCluster().getInstance(FileSettingsService.class, node); - - Files.deleteIfExists(fileSettingsService.watchedFile()); - - Files.createDirectories(fileSettingsService.watchedFileDir()); - Path tempFilePath = createTempFile(); - - logger.info("--> writing JSON config to node {} with path {}", node, tempFilePath); - logger.info(Strings.format(json, version)); - Files.write(tempFilePath, Strings.format(json, version).getBytes(StandardCharsets.UTF_8)); - Files.move(tempFilePath, fileSettingsService.watchedFile(), StandardCopyOption.ATOMIC_MOVE); - } - - private Tuple setupClusterStateListenerForError(String node) { - ClusterService clusterService = internalCluster().clusterService(node); - CountDownLatch savedClusterState = new CountDownLatch(1); - AtomicLong metadataVersion = new AtomicLong(-1); - clusterService.addListener(new ClusterStateListener() { - @Override - public void clusterChanged(ClusterChangedEvent event) { - ReservedStateMetadata reservedState = event.state().metadata().reservedStateMetadata().get(FileSettingsService.NAMESPACE); - if (reservedState != null && reservedState.errorMetadata() != null) { - assertEquals(ReservedStateErrorMetadata.ErrorKind.VALIDATION, reservedState.errorMetadata().errorKind()); - assertThat(reservedState.errorMetadata().errors(), allOf(notNullValue(), hasSize(1))); - assertThat(reservedState.errorMetadata().errors().get(0), containsString("Fake exception")); - clusterService.removeListener(this); - metadataVersion.set(event.state().metadata().version()); - savedClusterState.countDown(); - } else if (reservedState != null) { - logger.debug(() -> "Got reserved state update without error metadata: " + reservedState); - } else { - logger.debug(() -> "Got cluster state update: " + event.source()); - } - } - }); - - return new Tuple<>(savedClusterState, metadataVersion); - } - - @TestLogging( - value = "org.elasticsearch.common.file:DEBUG,org.elasticsearch.xpack.security:DEBUG,org.elasticsearch.cluster.metadata:DEBUG", - reason = "https://github.com/elastic/elasticsearch/issues/98391" - ) - public void testFailsOnStartMasterNodeWithError() throws Exception { - internalCluster().setBootstrapMasterNodeIndex(0); - - internalCluster().startMasterOnlyNode(); - - logger.info("--> write some role mappings, no other file settings"); - writeJSONFile(internalCluster().getMasterName(), testJSONForFailedCase); - var savedClusterState = setupClusterStateListenerForError(internalCluster().getMasterName()); - - boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); - assertTrue(awaitSuccessful); - } - - public Collection> nodePlugins() { - return Arrays.asList( - UnstableLocalStateSecurity.class, - Netty4Plugin.class, - ReindexPlugin.class, - CommonAnalysisPlugin.class, - InternalSettingsPlugin.class, - MapperExtrasPlugin.class, - Wildcard.class - ); - } - -} diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java index 58d6657b99e32..076ac01f1c8f3 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java @@ -2673,7 +2673,9 @@ public void testUpdateApiKeysAutoUpdatesLegacySuperuserRoleDescriptor() throws E // raw document has the legacy superuser role descriptor expectRoleDescriptorsForApiKey("limited_by_role_descriptors", legacySuperuserRoleDescriptor, getApiKeyDocument(apiKeyId)); - final Set currentSuperuserRoleDescriptors = Set.of(ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR); + final Set currentSuperuserRoleDescriptors = ApiKeyService.removeUserRoleDescriptorDescriptions( + Set.of(ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR) + ); // The first request is not a noop because we are auto-updating the legacy role descriptors to 8.x role descriptors assertSingleUpdate( apiKeyId, diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index ef08f855a46cc..0ff4f1160af56 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -1103,8 +1103,7 @@ Collection createComponents( new SecurityUsageServices(realms, allRolesStore, nativeRoleMappingStore, ipFilter.get(), profileService, apiKeyService) ); - reservedRoleMappingAction.set(new ReservedRoleMappingAction(nativeRoleMappingStore)); - systemIndices.getMainIndexManager().onStateRecovered(state -> reservedRoleMappingAction.get().securityIndexRecovered()); + reservedRoleMappingAction.set(new ReservedRoleMappingAction()); cacheInvalidatorRegistry.validate(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java index 852887767578f..73d1a1abcdb50 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java @@ -7,24 +7,18 @@ package org.elasticsearch.xpack.security.action.rolemapping; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.support.GroupedActionListener; -import org.elasticsearch.common.util.concurrent.ListenableFuture; -import org.elasticsearch.reservedstate.NonStateTransformResult; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.reservedstate.ReservedClusterStateHandler; import org.elasticsearch.reservedstate.TransformState; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; -import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; +import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; -import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; +import org.elasticsearch.xpack.core.security.authz.RoleMappingMetadata; import java.io.IOException; import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -38,123 +32,59 @@ * It is used by the ReservedClusterStateService to add/update or remove role mappings. Typical usage * for this action is in the context of file based settings. */ -public class ReservedRoleMappingAction implements ReservedClusterStateHandler> { +public class ReservedRoleMappingAction implements ReservedClusterStateHandler> { public static final String NAME = "role_mappings"; - private final NativeRoleMappingStore roleMappingStore; - private final ListenableFuture securityIndexRecoveryListener = new ListenableFuture<>(); - - /** - * Creates a ReservedRoleMappingAction - * - * @param roleMappingStore requires {@link NativeRoleMappingStore} for storing/deleting the mappings - */ - public ReservedRoleMappingAction(NativeRoleMappingStore roleMappingStore) { - this.roleMappingStore = roleMappingStore; - } - @Override public String name() { return NAME; } - private static Collection prepare(List roleMappings) { - List requests = roleMappings.stream().map(rm -> PutRoleMappingRequest.fromMapping(rm)).toList(); - - var exceptions = new ArrayList(); - for (var request : requests) { - // File based defined role mappings are allowed to use MetadataUtils.RESERVED_PREFIX - var exception = request.validate(false); - if (exception != null) { - exceptions.add(exception); - } - } - - if (exceptions.isEmpty() == false) { - var illegalArgumentException = new IllegalArgumentException("error on validating put role mapping requests"); - exceptions.forEach(illegalArgumentException::addSuppressed); - throw illegalArgumentException; - } - - return requests; - } - @Override public TransformState transform(Object source, TransformState prevState) throws Exception { - // We execute the prepare() call to catch any errors in the transform phase. - // Since we store the role mappings outside the cluster state, we do the actual save with a - // non cluster state transform call. @SuppressWarnings("unchecked") - var requests = prepare((List) source); - return new TransformState( - prevState.state(), - prevState.keys(), - l -> securityIndexRecoveryListener.addListener( - ActionListener.wrap(ignored -> nonStateTransform(requests, prevState, l), l::onFailure) - ) - ); - } - - // Exposed for testing purposes - protected void nonStateTransform( - Collection requests, - TransformState prevState, - ActionListener listener - ) { - Set entities = requests.stream().map(r -> r.getName()).collect(Collectors.toSet()); - Set toDelete = new HashSet<>(prevState.keys()); - toDelete.removeAll(entities); - - final int tasksCount = requests.size() + toDelete.size(); - - // Nothing to do, don't start a group listener with 0 actions - if (tasksCount == 0) { - listener.onResponse(new NonStateTransformResult(ReservedRoleMappingAction.NAME, Set.of())); - return; - } - - GroupedActionListener taskListener = new GroupedActionListener<>(tasksCount, new ActionListener<>() { - @Override - public void onResponse(Collection booleans) { - listener.onResponse(new NonStateTransformResult(ReservedRoleMappingAction.NAME, Collections.unmodifiableSet(entities))); - } - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - }); - - for (var request : requests) { - roleMappingStore.putRoleMapping(request, taskListener); - } - - for (var mappingToDelete : toDelete) { - var deleteRequest = new DeleteRoleMappingRequest(); - deleteRequest.setName(mappingToDelete); - roleMappingStore.deleteRoleMapping(deleteRequest, taskListener); + Set roleMappings = validate((List) source); + RoleMappingMetadata newRoleMappingMetadata = new RoleMappingMetadata(roleMappings); + if (newRoleMappingMetadata.equals(RoleMappingMetadata.getFromClusterState(prevState.state()))) { + return prevState; + } else { + ClusterState newState = newRoleMappingMetadata.updateClusterState(prevState.state()); + Set entities = newRoleMappingMetadata.getRoleMappings() + .stream() + .map(ExpressionRoleMapping::getName) + .collect(Collectors.toSet()); + return new TransformState(newState, entities); } } @Override - public List fromXContent(XContentParser parser) throws IOException { - List result = new ArrayList<>(); - + public List fromXContent(XContentParser parser) throws IOException { + List result = new ArrayList<>(); Map source = parser.map(); - for (String name : source.keySet()) { @SuppressWarnings("unchecked") Map content = (Map) source.get(name); try (XContentParser mappingParser = mapToXContentParser(XContentParserConfiguration.EMPTY, content)) { - ExpressionRoleMapping mapping = ExpressionRoleMapping.parse(name, mappingParser); - result.add(mapping); + result.add(new PutRoleMappingRequestBuilder(null).source(name, mappingParser).request()); } } - return result; } - public void securityIndexRecovered() { - securityIndexRecoveryListener.onResponse(null); + private Set validate(List roleMappings) { + var exceptions = new ArrayList(); + for (var roleMapping : roleMappings) { + // File based defined role mappings are allowed to use MetadataUtils.RESERVED_PREFIX + var exception = roleMapping.validate(false); + if (exception != null) { + exceptions.add(exception); + } + } + if (exceptions.isEmpty() == false) { + var illegalArgumentException = new IllegalArgumentException("error on validating put role mapping requests"); + exceptions.forEach(illegalArgumentException::addSuppressed); + throw illegalArgumentException; + } + return roleMappings.stream().map(PutRoleMappingRequest::getMapping).collect(Collectors.toUnmodifiableSet()); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java index 811d357b89f89..b4e8d5d6db83f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java @@ -8,9 +8,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.ReservedStateAwareHandledTransportAction; -import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingAction; @@ -18,12 +18,7 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingResponse; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; -import java.util.Optional; -import java.util.Set; - -public class TransportDeleteRoleMappingAction extends ReservedStateAwareHandledTransportAction< - DeleteRoleMappingRequest, - DeleteRoleMappingResponse> { +public class TransportDeleteRoleMappingAction extends HandledTransportAction { private final NativeRoleMappingStore roleMappingStore; @@ -31,25 +26,20 @@ public class TransportDeleteRoleMappingAction extends ReservedStateAwareHandledT public TransportDeleteRoleMappingAction( ActionFilters actionFilters, TransportService transportService, - ClusterService clusterService, NativeRoleMappingStore roleMappingStore ) { - super(DeleteRoleMappingAction.NAME, clusterService, transportService, actionFilters, DeleteRoleMappingRequest::new); + super( + DeleteRoleMappingAction.NAME, + transportService, + actionFilters, + DeleteRoleMappingRequest::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); this.roleMappingStore = roleMappingStore; } @Override - protected void doExecuteProtected(Task task, DeleteRoleMappingRequest request, ActionListener listener) { + protected void doExecute(Task task, DeleteRoleMappingRequest request, ActionListener listener) { roleMappingStore.deleteRoleMapping(request, listener.safeMap(DeleteRoleMappingResponse::new)); } - - @Override - public Optional reservedStateHandlerName() { - return Optional.of(ReservedRoleMappingAction.NAME); - } - - @Override - public Set modifiedKeys(DeleteRoleMappingRequest request) { - return Set.of(request.getName()); - } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java index 5e32e4f903f81..44c72bc13a54b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java @@ -8,9 +8,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.ReservedStateAwareHandledTransportAction; -import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingAction; @@ -18,10 +18,7 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; -import java.util.Optional; -import java.util.Set; - -public class TransportPutRoleMappingAction extends ReservedStateAwareHandledTransportAction { +public class TransportPutRoleMappingAction extends HandledTransportAction { private final NativeRoleMappingStore roleMappingStore; @@ -29,32 +26,17 @@ public class TransportPutRoleMappingAction extends ReservedStateAwareHandledTran public TransportPutRoleMappingAction( ActionFilters actionFilters, TransportService transportService, - ClusterService clusterService, NativeRoleMappingStore roleMappingStore ) { - super(PutRoleMappingAction.NAME, clusterService, transportService, actionFilters, PutRoleMappingRequest::new); + super(PutRoleMappingAction.NAME, transportService, actionFilters, PutRoleMappingRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); this.roleMappingStore = roleMappingStore; } @Override - protected void doExecuteProtected( - Task task, - final PutRoleMappingRequest request, - final ActionListener listener - ) { + protected void doExecute(Task task, final PutRoleMappingRequest request, final ActionListener listener) { roleMappingStore.putRoleMapping( request, ActionListener.wrap(created -> listener.onResponse(new PutRoleMappingResponse(created)), listener::onFailure) ); } - - @Override - public Optional reservedStateHandlerName() { - return Optional.of(ReservedRoleMappingAction.NAME); - } - - @Override - public Set modifiedKeys(PutRoleMappingRequest request) { - return Set.of(request.getName()); - } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java index 55a89e184f84f..883d7cb8ab103 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java @@ -371,7 +371,13 @@ && hasRemoteIndices(request.getRoleDescriptors())) { } } - private Set removeUserRoleDescriptorDescriptions(Set userRoleDescriptors) { + /** + * This method removes description from the given user's (limited-by) role descriptors. + * The description field is not supported for API key role descriptors hence storing limited-by roles with descriptions + * would be inconsistent and require handling backwards compatibility. + * Hence why we have to remove them before create/update of API key roles. + */ + static Set removeUserRoleDescriptorDescriptions(Set userRoleDescriptors) { return userRoleDescriptors.stream().map(roleDescriptor -> { if (roleDescriptor.hasDescription()) { return new RoleDescriptor( diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java index e7e24037543fa..55562c8ee0138 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java @@ -8,6 +8,8 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; @@ -17,6 +19,7 @@ import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestBuilderListener; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; @@ -57,12 +60,18 @@ public String getName() { @Override public RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException { - final String name = request.param("name"); - PutRoleMappingRequestBuilder requestBuilder = new PutRoleMappingRequestBuilder(client).source( - name, - request.requiredContent(), - request.getXContentType() - ).setRefreshPolicy(request.param("refresh")); + String name = request.param("name"); + String refresh = request.param("refresh"); + PutRoleMappingRequestBuilder requestBuilder; + try ( + XContentParser parser = XContentHelper.createParserNotCompressed( + LoggingDeprecationHandler.XCONTENT_PARSER_CONFIG, + request.requiredContent(), + request.getXContentType() + ) + ) { + requestBuilder = new PutRoleMappingRequestBuilder(client).source(name, parser).setRefreshPolicy(refresh); + } return channel -> requestBuilder.execute(new RestBuilderListener<>(channel) { @Override public RestResponse buildResponse(PutRoleMappingResponse response, XContentBuilder builder) throws Exception { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalReservedUnstableSecurityStateHandlerProvider.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalReservedUnstableSecurityStateHandlerProvider.java deleted file mode 100644 index b4a07093e49c3..0000000000000 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalReservedUnstableSecurityStateHandlerProvider.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.security; - -import org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider; - -/** - * Mock Security Provider implementation for the {@link ReservedClusterStateHandlerProvider} service interface. This is used - * for {@link org.elasticsearch.test.ESIntegTestCase} because the Security Plugin is really LocalStateSecurity in those tests. - *

- * Unlike {@link LocalReservedSecurityStateHandlerProvider} this implementation is mocked to implement the - * {@link UnstableLocalStateSecurity}. Separate implementation is needed, because the SPI creation code matches the constructor - * signature when instantiating. E.g. we need to match {@link UnstableLocalStateSecurity} instead of {@link LocalStateSecurity} - */ -public class LocalReservedUnstableSecurityStateHandlerProvider extends LocalReservedSecurityStateHandlerProvider { - public LocalReservedUnstableSecurityStateHandlerProvider() { - throw new IllegalStateException("Provider must be constructed using PluginsService"); - } - - public LocalReservedUnstableSecurityStateHandlerProvider(UnstableLocalStateSecurity plugin) { - super(plugin); - } -} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/UnstableLocalStateSecurity.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/UnstableLocalStateSecurity.java deleted file mode 100644 index 5621bdced15b3..0000000000000 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/UnstableLocalStateSecurity.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.security; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.reservedstate.NonStateTransformResult; -import org.elasticsearch.reservedstate.ReservedClusterStateHandler; -import org.elasticsearch.reservedstate.TransformState; -import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; -import org.elasticsearch.xpack.core.ssl.SSLService; -import org.elasticsearch.xpack.security.action.rolemapping.ReservedRoleMappingAction; - -import java.nio.file.Path; -import java.util.Collection; -import java.util.List; -import java.util.Optional; - -/** - * A test class that allows us to Inject new type of Reserved Handler that can - * simulate errors in saving role mappings. - *

- * We can't use our regular path to simply make an extension of LocalStateSecurity - * in an integration test class, because the reserved handlers are injected through - * SPI. (see {@link LocalReservedUnstableSecurityStateHandlerProvider}) - */ -public final class UnstableLocalStateSecurity extends LocalStateSecurity { - - public UnstableLocalStateSecurity(Settings settings, Path configPath) throws Exception { - super(settings, configPath); - // We reuse most of the initialization of LocalStateSecurity, we then just overwrite - // the security plugin with an extra method to give us a fake RoleMappingAction. - Optional security = plugins.stream().filter(p -> p instanceof Security).findFirst(); - if (security.isPresent()) { - plugins.remove(security.get()); - } - - UnstableLocalStateSecurity thisVar = this; - var action = new ReservedUnstableRoleMappingAction(); - - plugins.add(new Security(settings, super.securityExtensions()) { - @Override - protected SSLService getSslService() { - return thisVar.getSslService(); - } - - @Override - protected XPackLicenseState getLicenseState() { - return thisVar.getLicenseState(); - } - - @Override - List> reservedClusterStateHandlers() { - // pretend the security index is initialized after 2 seconds - var timer = new java.util.Timer(); - timer.schedule(new java.util.TimerTask() { - @Override - public void run() { - action.securityIndexRecovered(); - timer.cancel(); - } - }, 2_000); - return List.of(action); - } - }); - } - - public static class ReservedUnstableRoleMappingAction extends ReservedRoleMappingAction { - /** - * Creates a fake ReservedRoleMappingAction that doesn't actually use the role mapping store - */ - public ReservedUnstableRoleMappingAction() { - // we don't actually need a NativeRoleMappingStore - super(null); - } - - /** - * The nonStateTransform method is the only one that uses the native store, we simply pretend - * something has called the onFailure method of the listener. - */ - @Override - protected void nonStateTransform( - Collection requests, - TransformState prevState, - ActionListener listener - ) { - listener.onFailure(new IllegalStateException("Fake exception")); - } - } -} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/reservedstate/ReservedRoleMappingActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/reservedstate/ReservedRoleMappingActionTests.java index 6cdca0cb3b24d..cac7c91f73ed1 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/reservedstate/ReservedRoleMappingActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/reservedstate/ReservedRoleMappingActionTests.java @@ -7,77 +7,40 @@ package org.elasticsearch.xpack.security.action.reservedstate; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.reservedstate.NonStateTransformResult; import org.elasticsearch.reservedstate.TransformState; -import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.security.action.rolemapping.ReservedRoleMappingAction; -import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; -import org.elasticsearch.xpack.security.support.SecurityIndexManager; import java.util.Collections; -import java.util.Set; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.atomic.AtomicReference; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; +import static org.hamcrest.Matchers.nullValue; /** * Tests that the ReservedRoleMappingAction does validation, can add and remove role mappings */ public class ReservedRoleMappingActionTests extends ESTestCase { + private TransformState processJSON(ReservedRoleMappingAction action, TransformState prevState, String json) throws Exception { try (XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, json)) { var content = action.fromXContent(parser); var state = action.transform(content, prevState); - - CountDownLatch latch = new CountDownLatch(1); - AtomicReference> updatedKeys = new AtomicReference<>(); - AtomicReference error = new AtomicReference<>(); - state.nonStateTransform().accept(new ActionListener<>() { - @Override - public void onResponse(NonStateTransformResult nonStateTransformResult) { - updatedKeys.set(nonStateTransformResult.updatedKeys()); - latch.countDown(); - } - - @Override - public void onFailure(Exception e) { - error.set(e); - latch.countDown(); - } - }); - - latch.await(); - if (error.get() != null) { - throw error.get(); - } - return new TransformState(state.state(), updatedKeys.get()); + assertThat(state.nonStateTransform(), nullValue()); + return state; } } public void testValidation() { - var nativeRoleMappingStore = mockNativeRoleMappingStore(); - ClusterState state = ClusterState.builder(new ClusterName("elasticsearch")).build(); TransformState prevState = new TransformState(state, Collections.emptySet()); - ReservedRoleMappingAction action = new ReservedRoleMappingAction(nativeRoleMappingStore); - action.securityIndexRecovered(); - + ReservedRoleMappingAction action = new ReservedRoleMappingAction(); String badPolicyJSON = """ { "everyone_kibana": { @@ -97,7 +60,6 @@ public void testValidation() { } } }"""; - assertEquals( "failed to parse role-mapping [everyone_fleet]. missing field [rules]", expectThrows(ParsingException.class, () -> processJSON(action, prevState, badPolicyJSON)).getMessage() @@ -105,13 +67,9 @@ public void testValidation() { } public void testAddRemoveRoleMapping() throws Exception { - var nativeRoleMappingStore = mockNativeRoleMappingStore(); - ClusterState state = ClusterState.builder(new ClusterName("elasticsearch")).build(); TransformState prevState = new TransformState(state, Collections.emptySet()); - ReservedRoleMappingAction action = new ReservedRoleMappingAction(nativeRoleMappingStore); - action.securityIndexRecovered(); - + ReservedRoleMappingAction action = new ReservedRoleMappingAction(); String emptyJSON = ""; TransformState updatedState = processJSON(action, prevState, emptyJSON); @@ -147,102 +105,4 @@ public void testAddRemoveRoleMapping() throws Exception { updatedState = processJSON(action, prevState, emptyJSON); assertThat(updatedState.keys(), empty()); } - - @SuppressWarnings("unchecked") - public void testNonStateTransformWaitsOnAsyncActions() throws Exception { - var nativeRoleMappingStore = mockNativeRoleMappingStore(); - - doAnswer(invocation -> { - new Thread(() -> { - // Simulate put role mapping async action taking a while - try { - Thread.sleep(1_000); - ((ActionListener) invocation.getArgument(1)).onFailure(new IllegalStateException("err_done")); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - }).start(); - - return null; - }).when(nativeRoleMappingStore).putRoleMapping(any(), any()); - - doAnswer(invocation -> { - new Thread(() -> { - // Simulate delete role mapping async action taking a while - try { - Thread.sleep(1_000); - ((ActionListener) invocation.getArgument(1)).onFailure(new IllegalStateException("err_done")); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - }).start(); - - return null; - }).when(nativeRoleMappingStore).deleteRoleMapping(any(), any()); - - ClusterState state = ClusterState.builder(new ClusterName("elasticsearch")).build(); - TransformState updatedState = new TransformState(state, Collections.emptySet()); - ReservedRoleMappingAction action = new ReservedRoleMappingAction(nativeRoleMappingStore); - action.securityIndexRecovered(); - - String json = """ - { - "everyone_kibana": { - "enabled": true, - "roles": [ "kibana_user" ], - "rules": { "field": { "username": "*" } }, - "metadata": { - "uuid" : "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", - "_reserved": true - } - }, - "everyone_fleet": { - "enabled": true, - "roles": [ "fleet_user" ], - "rules": { "field": { "username": "*" } }, - "metadata": { - "uuid" : "a9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", - "_reserved": true - } - } - }"""; - - assertEquals( - "err_done", - expectThrows(IllegalStateException.class, () -> processJSON(action, new TransformState(state, Collections.emptySet()), json)) - .getMessage() - ); - - // Now that we've tested that we wait on putRoleMapping correctly, let it finish without exception, so we can test error on delete - doAnswer(invocation -> { - ((ActionListener) invocation.getArgument(1)).onResponse(true); - return null; - }).when(nativeRoleMappingStore).putRoleMapping(any(), any()); - - updatedState = processJSON(action, updatedState, json); - assertThat(updatedState.keys(), containsInAnyOrder("everyone_kibana", "everyone_fleet")); - - final TransformState currentState = new TransformState(updatedState.state(), updatedState.keys()); - - assertEquals("err_done", expectThrows(IllegalStateException.class, () -> processJSON(action, currentState, "")).getMessage()); - } - - @SuppressWarnings("unchecked") - private NativeRoleMappingStore mockNativeRoleMappingStore() { - final NativeRoleMappingStore nativeRoleMappingStore = spy( - new NativeRoleMappingStore(Settings.EMPTY, mock(Client.class), mock(SecurityIndexManager.class), mock(ScriptService.class)) - ); - - doAnswer(invocation -> { - ((ActionListener) invocation.getArgument(1)).onResponse(true); - return null; - }).when(nativeRoleMappingStore).putRoleMapping(any(), any()); - - doAnswer(invocation -> { - ((ActionListener) invocation.getArgument(1)).onResponse(true); - return null; - }).when(nativeRoleMappingStore).deleteRoleMapping(any(), any()); - - return nativeRoleMappingStore; - } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingActionTests.java deleted file mode 100644 index 038e673e07862..0000000000000 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingActionTests.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.security.action.rolemapping; - -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.Transport; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingRequest; -import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; - -import java.util.Collections; - -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.mockito.Mockito.mock; - -public class TransportDeleteRoleMappingActionTests extends ESTestCase { - public void testReservedStateHandler() { - var store = mock(NativeRoleMappingStore.class); - TransportService transportService = new TransportService( - Settings.EMPTY, - mock(Transport.class), - mock(ThreadPool.class), - TransportService.NOOP_TRANSPORT_INTERCEPTOR, - x -> null, - null, - Collections.emptySet() - ); - var action = new TransportDeleteRoleMappingAction(mock(ActionFilters.class), transportService, mock(ClusterService.class), store); - - assertEquals(ReservedRoleMappingAction.NAME, action.reservedStateHandlerName().get()); - - var deleteRequest = new DeleteRoleMappingRequest(); - deleteRequest.setName("kibana_all"); - assertThat(action.modifiedKeys(deleteRequest), containsInAnyOrder("kibana_all")); - } -} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java index 58a8e8e3d4751..6f789a10a3a6c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java @@ -9,16 +9,12 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentParserConfiguration; -import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; @@ -33,7 +29,6 @@ import static org.hamcrest.Matchers.aMapWithSize; import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.iterableWithSize; @@ -60,7 +55,7 @@ public void setupMocks() { null, Collections.emptySet() ); - action = new TransportPutRoleMappingAction(mock(ActionFilters.class), transportService, mock(ClusterService.class), store); + action = new TransportPutRoleMappingAction(mock(ActionFilters.class), transportService, store); requestRef = new AtomicReference<>(null); @@ -99,39 +94,7 @@ private PutRoleMappingResponse put(String name, FieldExpression expression, Stri request.setMetadata(metadata); request.setEnabled(true); final PlainActionFuture future = new PlainActionFuture<>(); - action.doExecuteProtected(mock(Task.class), request, future); + action.doExecute(mock(Task.class), request, future); return future.get(); } - - public void testReservedStateHandler() throws Exception { - assertEquals(ReservedRoleMappingAction.NAME, action.reservedStateHandlerName().get()); - String json = """ - { - "everyone_kibana": { - "enabled": true, - "roles": [ "kibana_user" ], - "rules": { "field": { "username": "*" } }, - "metadata": { - "uuid" : "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7" - } - }, - "everyone_fleet": { - "enabled": true, - "roles": [ "fleet_user" ], - "rules": { "field": { "username": "*" } }, - "metadata": { - "uuid" : "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7" - } - } - }"""; - - try (XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, json)) { - ReservedRoleMappingAction roleMappingAction = new ReservedRoleMappingAction(store); - var parsedResult = roleMappingAction.fromXContent(parser); - - for (var mapping : parsedResult) { - assertThat(action.modifiedKeys(PutRoleMappingRequest.fromMapping(mapping)), containsInAnyOrder(mapping.getName())); - } - } - } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java index 7752b85c6345c..0871e2568d225 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java @@ -1158,7 +1158,9 @@ private static Tuple, Map> newApiKeyDocument getFastStoredHashAlgoForTests().hash(new SecureString(key.toCharArray())), "test", authentication, - type == ApiKey.Type.CROSS_CLUSTER ? Set.of() : Collections.singleton(SUPERUSER_ROLE_DESCRIPTOR), + type == ApiKey.Type.CROSS_CLUSTER + ? Set.of() + : ApiKeyService.removeUserRoleDescriptorDescriptions(Set.of(SUPERUSER_ROLE_DESCRIPTOR)), Instant.now(), Instant.now().plus(expiry), keyRoles, @@ -1316,22 +1318,6 @@ public void testParseRoleDescriptorsMap() throws Exception { assertThat(roleDescriptors, hasSize(1)); assertThat(roleDescriptors.get(0), equalTo(roleARoleDescriptor)); - Map superUserRdMap; - try (XContentBuilder builder = JsonXContent.contentBuilder()) { - superUserRdMap = XContentHelper.convertToMap( - XContentType.JSON.xContent(), - BytesReference.bytes(SUPERUSER_ROLE_DESCRIPTOR.toXContent(builder, ToXContent.EMPTY_PARAMS, true)).streamInput(), - false - ); - } - roleDescriptors = service.parseRoleDescriptors( - apiKeyId, - Map.of(SUPERUSER_ROLE_DESCRIPTOR.getName(), superUserRdMap), - randomApiKeyRoleType() - ); - assertThat(roleDescriptors, hasSize(1)); - assertThat(roleDescriptors.get(0), equalTo(SUPERUSER_ROLE_DESCRIPTOR)); - final Map legacySuperUserRdMap; try (XContentBuilder builder = JsonXContent.contentBuilder()) { legacySuperUserRdMap = XContentHelper.convertToMap( @@ -1812,7 +1798,10 @@ public void testApiKeyDocCache() throws IOException, ExecutionException, Interru RoleReference.ApiKeyRoleType.LIMITED_BY ); assertEquals(1, limitedByRoleDescriptors.size()); - assertEquals(SUPERUSER_ROLE_DESCRIPTOR, limitedByRoleDescriptors.get(0)); + RoleDescriptor superuserWithoutDescription = ApiKeyService.removeUserRoleDescriptorDescriptions(Set.of(SUPERUSER_ROLE_DESCRIPTOR)) + .iterator() + .next(); + assertEquals(superuserWithoutDescription, limitedByRoleDescriptors.get(0)); if (metadata == null) { assertNull(cachedApiKeyDoc.metadataFlattened); } else { diff --git a/x-pack/plugin/security/src/test/resources/META-INF/services/org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider b/x-pack/plugin/security/src/test/resources/META-INF/services/org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider index 77c38d302d9c9..3d17572429bac 100644 --- a/x-pack/plugin/security/src/test/resources/META-INF/services/org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider +++ b/x-pack/plugin/security/src/test/resources/META-INF/services/org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider @@ -6,4 +6,3 @@ # org.elasticsearch.xpack.security.LocalReservedSecurityStateHandlerProvider -org.elasticsearch.xpack.security.LocalReservedUnstableSecurityStateHandlerProvider diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/DeleteShutdownNodeAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/DeleteShutdownNodeAction.java index 4446e0aeae4db..14417c693f280 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/DeleteShutdownNodeAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/DeleteShutdownNodeAction.java @@ -33,10 +33,12 @@ public static class Request extends AcknowledgedRequest { private final String nodeId; public Request(String nodeId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.nodeId = nodeId; } public Request(StreamInput in) throws IOException { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); if (in.getTransportVersion().isPatchFrom(TransportVersions.V_8_13_4) || in.getTransportVersion().isPatchFrom(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX_8_14) || in.getTransportVersion().onOrAfter(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX)) { diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusAction.java index b82e6a08fb269..7266f8ff71129 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusAction.java @@ -43,6 +43,7 @@ public static class Request extends MasterNodeRequest { private final String[] nodeIds; public Request(String... nodeIds) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.nodeIds = nodeIds; } diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/PutShutdownNodeAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/PutShutdownNodeAction.java index 8356285c10d0d..d857ee4b322d3 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/PutShutdownNodeAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/PutShutdownNodeAction.java @@ -90,6 +90,7 @@ public Request( @Nullable String targetNodeName, @Nullable TimeValue gracePeriod ) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.nodeId = nodeId; this.type = type; this.reason = reason; @@ -100,6 +101,7 @@ public Request( @UpdateForV9 // TODO call super(in) instead of explicitly reading superclass contents once bwc no longer needed public Request(StreamInput in) throws IOException { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); if (in.getTransportVersion().isPatchFrom(TransportVersions.V_8_13_4) || in.getTransportVersion().isPatchFrom(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX_8_14) || in.getTransportVersion().onOrAfter(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX)) { diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_jobs.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_jobs.yml index bd40e29d0b675..671fb24715631 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_jobs.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_jobs.yml @@ -98,129 +98,3 @@ setup: - match: jobs: [] - ---- -"Test get all jobs": - - - skip: - awaits_fix: "Job ordering isn't guaranteed right now, cannot test" - - - do: - headers: - Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser - rollup.put_job: - id: foo - body: > - { - "index_pattern": "foo", - "rollup_index": "foo_rollup", - "cron": "*/30 * * * * ?", - "page_size" :10, - "groups" : { - "date_histogram": { - "field": "the_field", - "calendar_interval": "1h" - } - }, - "metrics": [ - { - "field": "value_field", - "metrics": ["min", "max", "sum"] - } - ] - } - - is_true: acknowledged - - - do: - headers: - Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser - rollup.put_job: - id: bar - body: > - { - "index_pattern": "bar", - "rollup_index": "foo_rollup", - "cron": "*/30 * * * * ?", - "page_size" :10, - "groups" : { - "date_histogram": { - "field": "the_field", - "calendar_interval": "1h" - } - }, - "metrics": [ - { - "field": "value_field", - "metrics": ["min", "max", "sum"] - } - ] - } - - is_true: acknowledged - - - do: - rollup.get_jobs: - id: "_all" - - - length: { jobs: 2 } - - match: - jobs: - - config: - id: "foo" - index_pattern: "foo" - rollup_index: "foo_rollup" - cron: "*/30 * * * * ?" - page_size: 10 - groups : - date_histogram: - calendar_interval: "1h" - field: "the_field" - time_zone: "UTC" - metrics: - - field: "value_field" - metrics: - - "min" - - "max" - - "sum" - timeout: "20s" - stats: - pages_processed: 0 - documents_processed: 0 - rollups_indexed: 0 - trigger_count: 0 - status: - job_state: "stopped" - - config: - id: "bar" - index_pattern: "bar" - rollup_index: "foo_rollup" - cron: "*/30 * * * * ?" - page_size: 10 - groups : - date_histogram: - calendar_interval: "1h" - field: "the_field" - time_zone: "UTC" - metrics: - - field: "value_field" - metrics: - - "min" - - "max" - - "sum" - timeout: "20s" - stats: - pages_processed: 0 - documents_processed: 0 - rollups_indexed: 0 - trigger_count: 0 - search_failures: 0 - index_failures: 0 - index_time_in_ms: 0 - index_total: 0 - search_time_in_ms: 0 - search_total: 0 - processing_time_in_ms: 0 - processing_total: 0 - status: - job_state: "stopped" - - diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeAction.java index 481fe40a764a6..177f00c704c3c 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeAction.java @@ -6,6 +6,8 @@ */ package org.elasticsearch.xpack.transform.action; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; @@ -34,6 +36,7 @@ public class TransportGetCheckpointNodeAction extends HandledTransportAction { + private static final Logger logger = LogManager.getLogger(TransportGetCheckpointNodeAction.class); private final IndicesService indicesService; @Inject @@ -83,17 +86,27 @@ protected static void getGlobalCheckpoints( return; } } - final IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); - final IndexShard indexShard = indexService.getShard(shardId.id()); - checkpointsByIndexOfThisNode.computeIfAbsent(shardId.getIndexName(), k -> { - long[] seqNumbers = new long[indexService.getIndexSettings().getNumberOfShards()]; - Arrays.fill(seqNumbers, SequenceNumbers.UNASSIGNED_SEQ_NO); - return seqNumbers; - }); - checkpointsByIndexOfThisNode.get(shardId.getIndexName())[shardId.getId()] = indexShard.seqNoStats().getGlobalCheckpoint(); - ++numProcessedShards; + try { + final IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); + final IndexShard indexShard = indexService.getShard(shardId.id()); + + checkpointsByIndexOfThisNode.computeIfAbsent(shardId.getIndexName(), k -> { + long[] seqNumbers = new long[indexService.getIndexSettings().getNumberOfShards()]; + Arrays.fill(seqNumbers, SequenceNumbers.UNASSIGNED_SEQ_NO); + return seqNumbers; + }); + checkpointsByIndexOfThisNode.get(shardId.getIndexName())[shardId.getId()] = indexShard.seqNoStats().getGlobalCheckpoint(); + ++numProcessedShards; + } catch (Exception e) { + logger.atDebug() + .withThrowable(e) + .log("Failed to get checkpoint for shard [{}] and index [{}]", shardId.getId(), shardId.getIndexName()); + listener.onFailure(e); + return; + } } + listener.onResponse(new Response(checkpointsByIndexOfThisNode)); } } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeActionTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeActionTests.java index 25c7f9efa7992..950e593165f01 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeActionTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeActionTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.seqno.SeqNoStats; @@ -47,6 +48,8 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -68,35 +71,9 @@ public void setUp() throws Exception { null, (TaskManager) null ); - IndexShard indexShardA0 = mock(IndexShard.class); - when(indexShardA0.seqNoStats()).thenReturn(new SeqNoStats(3_000, 2_000, 3_000)); - IndexShard indexShardA1 = mock(IndexShard.class); - when(indexShardA1.seqNoStats()).thenReturn(new SeqNoStats(3_000, 2_000, 3_001)); - IndexShard indexShardB0 = mock(IndexShard.class); - when(indexShardB0.seqNoStats()).thenReturn(new SeqNoStats(3_000, 2_000, 4_000)); - IndexShard indexShardB1 = mock(IndexShard.class); - when(indexShardB1.seqNoStats()).thenReturn(new SeqNoStats(3_000, 2_000, 4_001)); - Settings commonIndexSettings = Settings.builder() - .put(SETTING_VERSION_CREATED, 1_000_000) - .put(SETTING_NUMBER_OF_SHARDS, 2) - .put(SETTING_NUMBER_OF_REPLICAS, 1) - .build(); - IndexService indexServiceA = mock(IndexService.class); - when(indexServiceA.getIndexSettings()).thenReturn( - new IndexSettings(IndexMetadata.builder("my-index-A").settings(commonIndexSettings).build(), Settings.EMPTY) - ); - when(indexServiceA.getShard(0)).thenReturn(indexShardA0); - when(indexServiceA.getShard(1)).thenReturn(indexShardA1); - IndexService indexServiceB = mock(IndexService.class); - when(indexServiceB.getIndexSettings()).thenReturn( - new IndexSettings(IndexMetadata.builder("my-index-B").settings(commonIndexSettings).build(), Settings.EMPTY) - ); - when(indexServiceB.getShard(0)).thenReturn(indexShardB0); - when(indexServiceB.getShard(1)).thenReturn(indexShardB1); + indicesService = mock(IndicesService.class); when(indicesService.clusterService()).thenReturn(clusterService); - when(indicesService.indexServiceSafe(new Index("my-index-A", "A"))).thenReturn(indexServiceA); - when(indicesService.indexServiceSafe(new Index("my-index-B", "B"))).thenReturn(indexServiceB); task = new CancellableTask(123, "type", "action", "description", new TaskId("dummy-node:456"), Map.of()); clock = new FakeClock(Instant.now()); @@ -117,6 +94,7 @@ public void testGetGlobalCheckpointsWithHighTimeout() throws InterruptedExceptio } private void testGetGlobalCheckpointsSuccess(TimeValue timeout) throws InterruptedException { + mockIndexServiceResponse(); CountDownLatch latch = new CountDownLatch(1); SetOnce responseHolder = new SetOnce<>(); SetOnce exceptionHolder = new SetOnce<>(); @@ -136,7 +114,38 @@ private void testGetGlobalCheckpointsSuccess(TimeValue timeout) throws Interrupt assertThat(exceptionHolder.get(), is(nullValue())); } + private void mockIndexServiceResponse() { + IndexShard indexShardA0 = mock(IndexShard.class); + when(indexShardA0.seqNoStats()).thenReturn(new SeqNoStats(3_000, 2_000, 3_000)); + IndexShard indexShardA1 = mock(IndexShard.class); + when(indexShardA1.seqNoStats()).thenReturn(new SeqNoStats(3_000, 2_000, 3_001)); + IndexShard indexShardB0 = mock(IndexShard.class); + when(indexShardB0.seqNoStats()).thenReturn(new SeqNoStats(3_000, 2_000, 4_000)); + IndexShard indexShardB1 = mock(IndexShard.class); + when(indexShardB1.seqNoStats()).thenReturn(new SeqNoStats(3_000, 2_000, 4_001)); + Settings commonIndexSettings = Settings.builder() + .put(SETTING_VERSION_CREATED, 1_000_000) + .put(SETTING_NUMBER_OF_SHARDS, 2) + .put(SETTING_NUMBER_OF_REPLICAS, 1) + .build(); + IndexService indexServiceA = mock(IndexService.class); + when(indexServiceA.getIndexSettings()).thenReturn( + new IndexSettings(IndexMetadata.builder("my-index-A").settings(commonIndexSettings).build(), Settings.EMPTY) + ); + when(indexServiceA.getShard(0)).thenReturn(indexShardA0); + when(indexServiceA.getShard(1)).thenReturn(indexShardA1); + IndexService indexServiceB = mock(IndexService.class); + when(indexServiceB.getIndexSettings()).thenReturn( + new IndexSettings(IndexMetadata.builder("my-index-B").settings(commonIndexSettings).build(), Settings.EMPTY) + ); + when(indexServiceB.getShard(0)).thenReturn(indexShardB0); + when(indexServiceB.getShard(1)).thenReturn(indexShardB1); + when(indicesService.indexServiceSafe(new Index("my-index-A", "A"))).thenReturn(indexServiceA); + when(indicesService.indexServiceSafe(new Index("my-index-B", "B"))).thenReturn(indexServiceB); + } + public void testGetGlobalCheckpointsFailureDueToTaskCancelled() throws InterruptedException { + mockIndexServiceResponse(); TaskCancelHelper.cancel(task, "due to apocalypse"); CountDownLatch latch = new CountDownLatch(1); @@ -156,6 +165,7 @@ public void testGetGlobalCheckpointsFailureDueToTaskCancelled() throws Interrupt } public void testGetGlobalCheckpointsFailureDueToTimeout() throws InterruptedException { + mockIndexServiceResponse(); // Move the current time past the timeout. clock.advanceTimeBy(Duration.ofSeconds(10)); @@ -184,4 +194,24 @@ public void testGetGlobalCheckpointsFailureDueToTimeout() throws InterruptedExce is(equalTo("Transform checkpointing timed out on node [dummy-node] after [5s] having processed [0] of [4] shards")) ); } + + public void testIndexNotFoundException() throws InterruptedException { + var expectedException = new IndexNotFoundException("some index"); + when(indicesService.indexServiceSafe(any())).thenThrow(expectedException); + + var exceptionHolder = new SetOnce(); + TransportGetCheckpointNodeAction.getGlobalCheckpoints( + indicesService, + task, + shards, + TimeValue.timeValueSeconds(5), + clock, + ActionListener.wrap(r -> { + fail("Test is meant to call the onFailure method."); + }, exceptionHolder::set) + ); + + assertNotNull("Listener's onFailure handler was not called.", exceptionHolder.get()); + assertThat(exceptionHolder.get(), sameInstance(expectedException)); + } } diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java index 3d9e7f3828bc7..17363d58545c2 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java @@ -20,11 +20,14 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.ssl.SslVerificationMode; import org.elasticsearch.common.util.Maps; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.test.fixtures.smb.SmbTestContainer; import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; +import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; @@ -187,11 +190,16 @@ public void setupRoleMappings() throws Exception { Map> futures = Maps.newLinkedHashMapWithExpectedSize(content.size()); for (int i = 0; i < content.size(); i++) { final String name = "external_" + i; - final PutRoleMappingRequestBuilder builder = new PutRoleMappingRequestBuilder(client()).source( - name, - new BytesArray(content.get(i)), - XContentType.JSON - ); + final PutRoleMappingRequestBuilder builder; + try ( + XContentParser parser = XContentHelper.createParserNotCompressed( + LoggingDeprecationHandler.XCONTENT_PARSER_CONFIG, + new BytesArray(content.get(i)), + XContentType.JSON + ) + ) { + builder = new PutRoleMappingRequestBuilder(client()).source(name, parser); + } futures.put(name, builder.execute()); } for (String mappingName : futures.keySet()) {