From 163730352d50111df6fea62571f6b17ed87e517e Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Wed, 5 Mar 2025 03:10:01 +1100 Subject: [PATCH 01/54] Update docker.elastic.co/wolfi/chainguard-base:latest Docker digest to 15a4191 (#124012) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Update | Change | |---|---|---| | docker.elastic.co/wolfi/chainguard-base | digest | `c66fdaf` -> `15a4191` | --- ### Configuration 📅 **Schedule**: Branch creation - "after 1pm on tuesday" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [Renovate Bot](https://redirect.github.com/renovatebot/renovate). --- .../main/java/org/elasticsearch/gradle/internal/DockerBase.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java index ec0c2521ac40b..416580e30d607 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java @@ -22,7 +22,7 @@ public enum DockerBase { // Chainguard based wolfi image with latest jdk // This is usually updated via renovatebot // spotless:off - WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:c66fdafe581a6ab1668a962015de4ce4666a60ed601d24f019f03bb4aaab8eeb", + WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:15a4191ff8ec8305dcba449365e8a1586c9cda8e016ae838d960b9009c6a5cac", "-wolfi", "apk" ), From 9e6fcf6690ec7cd42c56653c311488c3dc34e2cb Mon Sep 17 00:00:00 2001 From: Jordan Powers Date: Tue, 4 Mar 2025 08:18:49 -0800 Subject: [PATCH 02/54] Fix timestamp range query optimization for indices with doc values skipper (#123930) When running a timestamp range query, as an optimization we check if the query range overlaps with the total range of values within a shard before executing the query on that shard. That way, if the range is disjoint, we can skip execution for that shard. To get the range of values within a shard, we usually use the PointValues index on the shard. However, when the doc values skipper is enabled, the point values are not (as the reason for the skipper is to reduce storage overhead by removing the point values index). In this case, we need to instead get the range of values within the shard by using the skipper. This patch implements that logic. Follow-up to #123191 --- .../index/shard/SearchIdleIT.java | 11 ++++- .../index/mapper/DateFieldMapper.java | 22 +++++++++- .../index/mapper/DateFieldTypeTests.java | 42 ++++++++++++++++++- 3 files changed, 71 insertions(+), 4 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/SearchIdleIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/SearchIdleIT.java index 03319332003aa..101cb89dc02a6 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/SearchIdleIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/SearchIdleIT.java @@ -235,6 +235,15 @@ public void testSearchIdleStats() throws InterruptedException { } public void testSearchIdleBoolQueryMatchOneIndex() throws InterruptedException { + checkSearchIdleBoolQueryMatchOneIndex(IndexSettings.DOC_VALUES_SKIPPER.isEnabled()); + } + + public void testSearchIdleBoolQueryMatchOneIndexWithDocValuesSkipper() throws InterruptedException { + assumeTrue("doc values skipper feature should be enabled", IndexSettings.DOC_VALUES_SKIPPER.isEnabled()); + checkSearchIdleBoolQueryMatchOneIndex(false); + } + + private void checkSearchIdleBoolQueryMatchOneIndex(boolean disableDocValuesSkippers) throws InterruptedException { // GIVEN final String idleIndex = "test1"; final String activeIndex = "test2"; @@ -259,7 +268,7 @@ public void testSearchIdleBoolQueryMatchOneIndex() throws InterruptedException { .put(IndexSettings.TIME_SERIES_START_TIME.getKey(), "2021-05-12T00:00:00.000Z") .put(IndexSettings.TIME_SERIES_END_TIME.getKey(), "2021-05-13T23:59:59.999Z"); - if (IndexSettings.DOC_VALUES_SKIPPER.isEnabled()) { + if (disableDocValuesSkippers) { idleIndexSettingsBuilder.put(IndexSettings.USE_DOC_VALUES_SKIPPER.getKey(), false); activeIndexSettingsBuilder.put(IndexSettings.USE_DOC_VALUES_SKIPPER.getKey(), false); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index 2a1d761512852..0bf53682d2d17 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -16,6 +16,7 @@ import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.StoredField; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.PointValues; @@ -69,6 +70,7 @@ import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.util.Collections; +import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; @@ -827,8 +829,24 @@ public Relation isFieldWithinQuery( QueryRewriteContext context ) throws IOException { if (isIndexed() == false && pointsMetadataAvailable == false && hasDocValues()) { - // we don't have a quick way to run this check on doc values, so fall back to default assuming we are within bounds - return Relation.INTERSECTS; + if (hasDocValuesSkipper() == false) { + // we don't have a quick way to run this check on doc values, so fall back to default assuming we are within bounds + return Relation.INTERSECTS; + } + long minValue = Long.MAX_VALUE; + long maxValue = Long.MIN_VALUE; + List leaves = reader.leaves(); + if (leaves.size() == 0) { + // no data, so nothing matches + return Relation.DISJOINT; + } + for (LeafReaderContext ctx : leaves) { + DocValuesSkipper skipper = ctx.reader().getDocValuesSkipper(name()); + assert skipper != null : "no skipper for field:" + name() + " and reader:" + reader; + minValue = Long.min(minValue, skipper.minValue()); + maxValue = Long.max(maxValue, skipper.maxValue()); + } + return isFieldWithinQuery(minValue, maxValue, from, to, includeLower, includeUpper, timeZone, dateParser, context); } byte[] minPackedValue = PointValues.getMinPackedValue(reader, name()); if (minPackedValue == null) { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java index d925a9dd1d691..ad258086affc7 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java @@ -8,6 +8,7 @@ */ package org.elasticsearch.index.mapper; +import org.apache.lucene.document.Field; import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedNumericDocValuesField; @@ -85,12 +86,51 @@ public void testIsFieldWithinQueryDateNanos() throws IOException { isFieldWithinRangeTestCase(ft); } + public void testIsFieldWithinQueryDateMillisDocValueSkipper() throws IOException { + DateFieldType ft = new DateFieldType( + "my_date", + false, + false, + false, + true, + true, + DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, + Resolution.MILLISECONDS, + null, + null, + Collections.emptyMap() + ); + isFieldWithinRangeTestCase(ft); + } + + public void testIsFieldWithinQueryDateNanosDocValueSkipper() throws IOException { + DateFieldType ft = new DateFieldType( + "my_date", + false, + false, + false, + true, + true, + DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, + Resolution.NANOSECONDS, + null, + null, + Collections.emptyMap() + ); + isFieldWithinRangeTestCase(ft); + } + public void isFieldWithinRangeTestCase(DateFieldType ft) throws IOException { Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null)); LuceneDocument doc = new LuceneDocument(); - LongPoint field = new LongPoint("my_date", ft.parse("2015-10-12")); + Field field; + if (ft.hasDocValuesSkipper()) { + field = SortedNumericDocValuesField.indexedField("my_date", ft.parse("2015-10-12")); + } else { + field = new LongPoint("my_date", ft.parse("2015-10-12")); + } doc.add(field); w.addDocument(doc); field.setLongValue(ft.parse("2016-04-03")); From a0f3b2464aafa2a0b25bc2196ae95eea004ad7e9 Mon Sep 17 00:00:00 2001 From: Adam Demjen Date: Tue, 4 Mar 2025 12:10:12 -0500 Subject: [PATCH 03/54] Remove obsolete EIS feature flag class (#123716) --- .../ElasticInferenceServiceFeature.java | 21 ------------------- 1 file changed, 21 deletions(-) delete mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceFeature.java diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceFeature.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceFeature.java deleted file mode 100644 index 4ec270eef3a62..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceFeature.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.services.elastic; - -import org.elasticsearch.common.util.FeatureFlag; - -/** - * Elastic Inference Service feature flag. Not being used anymore, but we'll keep it until the controller is no longer - * passing -Des.elastic_inference_service_feature_flag_enabled=true at startup. - */ -public class ElasticInferenceServiceFeature { - - @Deprecated - public static final FeatureFlag ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG = new FeatureFlag("elastic_inference_service"); - -} From 535dbbe0329bcf6845671af6aa2e9dc09ca2033c Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Tue, 4 Mar 2025 10:23:43 -0700 Subject: [PATCH 04/54] Add coordinating object to track bytes (#122460) This commit adds a new coordinating object to reflect reduce the need to introduce a new releasable for every network byte reference. Additionally, it will allow us to more specifically track bytes while they are buffered. --- .../action/bulk/IncrementalBulkService.java | 40 ++-- .../elasticsearch/index/IndexingPressure.java | 216 +++++++++++++----- .../rest/action/document/RestBulkAction.java | 12 +- .../index/IndexingPressureTests.java | 57 ++--- .../action/document/RestBulkActionTests.java | 12 +- 5 files changed, 226 insertions(+), 111 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/bulk/IncrementalBulkService.java b/server/src/main/java/org/elasticsearch/action/bulk/IncrementalBulkService.java index 6ce198260ba3c..cd84114755f81 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/IncrementalBulkService.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/IncrementalBulkService.java @@ -91,19 +91,18 @@ public static class Handler implements Releasable { public static final BulkRequest.IncrementalState EMPTY_STATE = new BulkRequest.IncrementalState(Collections.emptyMap(), true); private final Client client; - private final IndexingPressure indexingPressure; private final ActiveShardCount waitForActiveShards; private final TimeValue timeout; private final String refresh; private final ArrayList releasables = new ArrayList<>(4); private final ArrayList responses = new ArrayList<>(2); + private final IndexingPressure.Incremental incrementalOperation; private boolean closed = false; private boolean globalFailure = false; private boolean incrementalRequestSubmitted = false; private boolean bulkInProgress = false; private Exception bulkActionLevelFailure = null; - private long currentBulkSize = 0L; private BulkRequest bulkRequest = null; protected Handler( @@ -114,13 +113,17 @@ protected Handler( @Nullable String refresh ) { this.client = client; - this.indexingPressure = indexingPressure; this.waitForActiveShards = waitForActiveShards != null ? ActiveShardCount.parseString(waitForActiveShards) : null; this.timeout = timeout; this.refresh = refresh; + this.incrementalOperation = indexingPressure.startIncrementalCoordinating(0, 0, false); createNewBulkRequest(EMPTY_STATE); } + public IndexingPressure.Incremental getIncrementalOperation() { + return incrementalOperation; + } + public void addItems(List> items, Releasable releasable, Runnable nextItems) { assert closed == false; assert bulkInProgress == false; @@ -130,7 +133,8 @@ public void addItems(List> items, Releasable releasable, Runn } else { assert bulkRequest != null; if (internalAddItems(items, releasable)) { - if (shouldBackOff()) { + if (incrementalOperation.shouldSplit()) { + IndexingPressure.Coordinating coordinating = incrementalOperation.split(); final boolean isFirstRequest = incrementalRequestSubmitted == false; incrementalRequestSubmitted = true; final ArrayList toRelease = new ArrayList<>(releasables); @@ -152,6 +156,7 @@ public void onFailure(Exception e) { } }, () -> { bulkInProgress = false; + coordinating.close(); toRelease.forEach(Releasable::close); nextItems.run(); })); @@ -164,10 +169,6 @@ public void onFailure(Exception e) { } } - private boolean shouldBackOff() { - return indexingPressure.shouldSplitBulk(currentBulkSize); - } - public void lastItems(List> items, Releasable releasable, ActionListener listener) { assert bulkInProgress == false; if (bulkActionLevelFailure != null) { @@ -176,6 +177,7 @@ public void lastItems(List> items, Releasable releasable, Act } else { assert bulkRequest != null; if (internalAddItems(items, releasable)) { + IndexingPressure.Coordinating coordinating = incrementalOperation.split(); final ArrayList toRelease = new ArrayList<>(releasables); releasables.clear(); // We do not need to set this back to false as this will be the last request. @@ -195,7 +197,10 @@ public void onFailure(Exception e) { handleBulkFailure(isFirstRequest, e); errorResponse(listener); } - }, () -> toRelease.forEach(Releasable::close))); + }, () -> { + coordinating.close(); + toRelease.forEach(Releasable::close); + })); } else { errorResponse(listener); } @@ -204,13 +209,17 @@ public void onFailure(Exception e) { @Override public void close() { - closed = true; - releasables.forEach(Releasable::close); - releasables.clear(); + if (closed == false) { + closed = true; + incrementalOperation.close(); + releasables.forEach(Releasable::close); + releasables.clear(); + } } private void shortCircuitDueToTopLevelFailure(List> items, Releasable releasable) { assert releasables.isEmpty(); + assert incrementalOperation.currentOperationsSize() == 0; assert bulkRequest == null; if (globalFailure == false) { addItemLevelFailures(items); @@ -228,7 +237,6 @@ private void errorResponse(ActionListener listener) { private void handleBulkSuccess(BulkResponse bulkResponse) { responses.add(bulkResponse); - currentBulkSize = 0L; bulkRequest = null; } @@ -237,7 +245,6 @@ private void handleBulkFailure(boolean isFirstRequest, Exception e) { globalFailure = isFirstRequest; bulkActionLevelFailure = e; addItemLevelFailures(bulkRequest.requests()); - currentBulkSize = 0; bulkRequest = null; } @@ -257,11 +264,11 @@ private boolean internalAddItems(List> items, Releasable rele bulkRequest.add(items); releasables.add(releasable); long size = items.stream().mapToLong(Accountable::ramBytesUsed).sum(); - releasables.add(indexingPressure.markCoordinatingOperationStarted(items.size(), size, false)); - currentBulkSize += size; + incrementalOperation.increment(items.size(), size); return true; } catch (EsRejectedExecutionException e) { handleBulkFailure(incrementalRequestSubmitted == false, e); + incrementalOperation.split().close(); releasables.forEach(Releasable::close); releasables.clear(); return false; @@ -269,7 +276,6 @@ private boolean internalAddItems(List> items, Releasable rele } private void createNewBulkRequest(BulkRequest.IncrementalState incrementalState) { - assert currentBulkSize == 0L; assert bulkRequest == null; bulkRequest = new BulkRequest(); bulkRequest.incrementalState(incrementalState); diff --git a/server/src/main/java/org/elasticsearch/index/IndexingPressure.java b/server/src/main/java/org/elasticsearch/index/IndexingPressure.java index 43ae38fea6018..4ebbdd22f75c2 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexingPressure.java +++ b/server/src/main/java/org/elasticsearch/index/IndexingPressure.java @@ -138,48 +138,167 @@ private static Releasable wrapReleasable(Releasable releasable) { }; } - public Releasable markCoordinatingOperationStarted(int operations, long bytes, boolean forceExecution) { - long combinedBytes = this.currentCombinedCoordinatingAndPrimaryBytes.addAndGet(bytes); - long replicaWriteBytes = this.currentReplicaBytes.get(); - long totalBytes = combinedBytes + replicaWriteBytes; - if (forceExecution == false && totalBytes > coordinatingLimit) { - long bytesWithoutOperation = combinedBytes - bytes; - long totalBytesWithoutOperation = totalBytes - bytes; - this.currentCombinedCoordinatingAndPrimaryBytes.getAndAdd(-bytes); - this.coordinatingRejections.getAndIncrement(); - throw new EsRejectedExecutionException( - "rejected execution of coordinating operation [" - + "coordinating_and_primary_bytes=" - + bytesWithoutOperation - + ", " - + "replica_bytes=" - + replicaWriteBytes - + ", " - + "all_bytes=" - + totalBytesWithoutOperation - + ", " - + "coordinating_operation_bytes=" - + bytes - + ", " - + "max_coordinating_bytes=" - + coordinatingLimit - + "]", - false - ); + public Incremental startIncrementalCoordinating(int operations, long bytes, boolean forceExecution) { + Incremental coordinating = new Incremental(forceExecution); + coordinating.coordinating.increment(operations, bytes); + return coordinating; + } + + public Coordinating markCoordinatingOperationStarted(int operations, long bytes, boolean forceExecution) { + Coordinating coordinating = new Coordinating(forceExecution); + coordinating.increment(operations, bytes); + return coordinating; + } + + public class Incremental implements Releasable { + + private final AtomicBoolean closed = new AtomicBoolean(); + private final boolean forceExecution; + private long currentUnparsedSize = 0; + private long totalParsedBytes = 0; + private Coordinating coordinating; + + public Incremental(boolean forceExecution) { + this.forceExecution = forceExecution; + this.coordinating = new Coordinating(forceExecution); + } + + public long totalParsedBytes() { + return totalParsedBytes; + } + + public void incrementUnparsedBytes(long bytes) { + assert closed.get() == false; + // TODO: Implement integration with IndexingPressure for unparsed bytes + currentUnparsedSize += bytes; + } + + public void transferUnparsedBytesToParsed(long bytes) { + assert closed.get() == false; + assert currentUnparsedSize >= bytes; + currentUnparsedSize -= bytes; + totalParsedBytes += bytes; + } + + public void increment(int operations, long bytes) { + // TODO: Eventually most of the memory will already be accounted for in unparsed. + coordinating.increment(operations, bytes); + } + + public long currentOperationsSize() { + return coordinating.currentOperationsSize; + } + + public boolean shouldSplit() { + long currentUsage = (currentCombinedCoordinatingAndPrimaryBytes.get() + currentReplicaBytes.get()); + long currentOperationsSize = coordinating.currentOperationsSize; + if (currentUsage >= highWatermark && currentOperationsSize >= highWatermarkSize) { + highWaterMarkSplits.getAndIncrement(); + logger.trace( + () -> Strings.format( + "Split bulk due to high watermark: current bytes [%d] and size [%d]", + currentUsage, + currentOperationsSize + ) + ); + return true; + } + if (currentUsage >= lowWatermark && currentOperationsSize >= lowWatermarkSize) { + lowWaterMarkSplits.getAndIncrement(); + logger.trace( + () -> Strings.format( + "Split bulk due to low watermark: current bytes [%d] and size [%d]", + currentUsage, + currentOperationsSize + ) + ); + return true; + } + return false; + } + + public Coordinating split() { + Coordinating toReturn = coordinating; + coordinating = new Coordinating(forceExecution); + return toReturn; + } + + @Override + public void close() { + coordinating.close(); + } + } + + // TODO: Maybe this should be re-named and used for primary operations too. Eventually we will need to account for: ingest pipeline + // expansions, reading updates, etc. This could just be a generic OP that could be expanded as appropriate + public class Coordinating implements Releasable { + + private final AtomicBoolean closed = new AtomicBoolean(); + private final boolean forceExecution; + private int currentOperations = 0; + private long currentOperationsSize = 0; + + public Coordinating(boolean forceExecution) { + this.forceExecution = forceExecution; + } + + private void increment(int operations, long bytes) { + assert closed.get() == false; + long combinedBytes = currentCombinedCoordinatingAndPrimaryBytes.addAndGet(bytes); + long replicaWriteBytes = currentReplicaBytes.get(); + long totalBytes = combinedBytes + replicaWriteBytes; + if (forceExecution == false && totalBytes > coordinatingLimit) { + long bytesWithoutOperation = combinedBytes - bytes; + long totalBytesWithoutOperation = totalBytes - bytes; + currentCombinedCoordinatingAndPrimaryBytes.getAndAdd(-bytes); + coordinatingRejections.getAndIncrement(); + throw new EsRejectedExecutionException( + "rejected execution of coordinating operation [" + + "coordinating_and_primary_bytes=" + + bytesWithoutOperation + + ", " + + "replica_bytes=" + + replicaWriteBytes + + ", " + + "all_bytes=" + + totalBytesWithoutOperation + + ", " + + "coordinating_operation_bytes=" + + bytes + + ", " + + "max_coordinating_bytes=" + + coordinatingLimit + + "]", + false + ); + } + currentOperations += operations; + currentOperationsSize += bytes; + logger.trace(() -> Strings.format("adding [%d] coordinating operations and [%d] bytes", operations, bytes)); + currentCoordinatingBytes.getAndAdd(bytes); + currentCoordinatingOps.getAndAdd(operations); + totalCombinedCoordinatingAndPrimaryBytes.getAndAdd(bytes); + totalCoordinatingBytes.getAndAdd(bytes); + totalCoordinatingOps.getAndAdd(operations); + totalCoordinatingRequests.getAndIncrement(); + } + + @Override + public void close() { + if (closed.compareAndSet(false, true)) { + logger.trace( + () -> Strings.format("removing [%d] coordinating operations and [%d] bytes", currentOperations, currentOperationsSize) + ); + currentCombinedCoordinatingAndPrimaryBytes.getAndAdd(-currentOperationsSize); + currentCoordinatingBytes.getAndAdd(-currentOperationsSize); + currentCoordinatingOps.getAndAdd(-currentOperations); + currentOperationsSize = 0; + currentOperations = 0; + } else { + logger.error("IndexingPressure memory is adjusted twice", new IllegalStateException("Releasable is called twice")); + assert false : "IndexingPressure is adjusted twice"; + } } - logger.trace(() -> Strings.format("adding [%d] coordinating operations and [%d] bytes", operations, bytes)); - currentCoordinatingBytes.getAndAdd(bytes); - currentCoordinatingOps.getAndAdd(operations); - totalCombinedCoordinatingAndPrimaryBytes.getAndAdd(bytes); - totalCoordinatingBytes.getAndAdd(bytes); - totalCoordinatingOps.getAndAdd(operations); - totalCoordinatingRequests.getAndIncrement(); - return wrapReleasable(() -> { - logger.trace(() -> Strings.format("removing [%d] coordinating operations and [%d] bytes", operations, bytes)); - this.currentCombinedCoordinatingAndPrimaryBytes.getAndAdd(-bytes); - this.currentCoordinatingBytes.getAndAdd(-bytes); - this.currentCoordinatingOps.getAndAdd(-operations); - }); } public Releasable markPrimaryOperationLocalToCoordinatingNodeStarted(int operations, long bytes) { @@ -266,21 +385,6 @@ public Releasable markReplicaOperationStarted(int operations, long bytes, boolea }); } - public boolean shouldSplitBulk(long size) { - long currentUsage = (currentCombinedCoordinatingAndPrimaryBytes.get() + currentReplicaBytes.get()); - if (currentUsage >= highWatermark && size >= highWatermarkSize) { - highWaterMarkSplits.getAndIncrement(); - logger.trace(() -> Strings.format("Split bulk due to high watermark: current bytes [%d] and size [%d]", currentUsage, size)); - return (true); - } - if (currentUsage >= lowWatermark && size >= lowWatermarkSize) { - lowWaterMarkSplits.getAndIncrement(); - logger.trace(() -> Strings.format("Split bulk due to low watermark: current bytes [%d] and size [%d]", currentUsage, size)); - return (true); - } - return (false); - } - public IndexingPressureStats stats() { return new IndexingPressureStats( totalCombinedCoordinatingAndPrimaryBytes.get(), diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java index 944edc2e5e1f8..068ea4e48db01 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java @@ -155,7 +155,6 @@ static class ChunkHandler implements BaseRestHandler.RequestBodyChunkConsumer { private volatile RestChannel restChannel; private boolean shortCircuited; - private int bytesParsed = 0; private final ArrayDeque unParsedChunks = new ArrayDeque<>(4); private final ArrayList> items = new ArrayList<>(4); @@ -202,6 +201,7 @@ public void handleChunk(RestChannel channel, ReleasableBytesReference chunk, boo bytesConsumed = 0; } else { try { + handler.getIncrementalOperation().incrementUnparsedBytes(chunk.length()); unParsedChunks.add(chunk); if (unParsedChunks.size() > 1) { @@ -210,10 +210,8 @@ public void handleChunk(RestChannel channel, ReleasableBytesReference chunk, boo data = chunk; } - // TODO: Check that the behavior here vs. globalRouting, globalPipeline, globalRequireAlias, globalRequireDatsStream in - // BulkRequest#add is fine bytesConsumed = parser.parse(data, isLast); - bytesParsed += bytesConsumed; + handler.getIncrementalOperation().transferUnparsedBytesToParsed(bytesConsumed); } catch (Exception e) { shortCircuit(); @@ -225,7 +223,7 @@ public void handleChunk(RestChannel channel, ReleasableBytesReference chunk, boo final ArrayList releasables = accountParsing(bytesConsumed); if (isLast) { assert unParsedChunks.isEmpty(); - if (bytesParsed == 0) { + if (handler.getIncrementalOperation().totalParsedBytes() == 0) { shortCircuit(); new RestToXContentListener<>(channel).onFailure(new ElasticsearchParseException("request body is required")); } else { @@ -247,7 +245,9 @@ public void handleChunk(RestChannel channel, ReleasableBytesReference chunk, boo @Override public void streamClose() { assert Transports.assertTransportThread(); - shortCircuit(); + if (shortCircuited == false) { + shortCircuit(); + } } private void shortCircuit() { diff --git a/server/src/test/java/org/elasticsearch/index/IndexingPressureTests.java b/server/src/test/java/org/elasticsearch/index/IndexingPressureTests.java index 8da7ada91856d..a4bc63880208a 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexingPressureTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexingPressureTests.java @@ -41,40 +41,45 @@ public void testHighAndLowWatermarkSplits() { IndexingPressure indexingPressure = new IndexingPressure(settings); try ( - Releasable ignored1 = indexingPressure.markCoordinatingOperationStarted(10, ByteSizeValue.ofKb(6).getBytes(), false); - Releasable ignored2 = indexingPressure.markCoordinatingOperationStarted(10, ByteSizeValue.ofKb(2).getBytes(), false) + IndexingPressure.Incremental coordinating1 = indexingPressure.startIncrementalCoordinating(10, randomIntBetween(1, 127), false); + IndexingPressure.Incremental coordinating2 = indexingPressure.startIncrementalCoordinating( + 10, + randomIntBetween(128, 1023), + false + ); + IndexingPressure.Incremental coordinating3 = indexingPressure.startIncrementalCoordinating( + 10, + randomIntBetween(1024, 6000), + false + ); + Releasable ignored1 = indexingPressure.startIncrementalCoordinating( + 10, + 1 + (8 * 1024) - indexingPressure.stats().getCurrentCoordinatingBytes(), + false + ) ) { - assertFalse(indexingPressure.shouldSplitBulk(randomIntBetween(1, 1000))); + assertFalse(coordinating1.shouldSplit()); + assertFalse(coordinating2.shouldSplit()); assertEquals(indexingPressure.stats().getHighWaterMarkSplits(), 0L); assertEquals(indexingPressure.stats().getLowWaterMarkSplits(), 0L); - assertTrue(indexingPressure.shouldSplitBulk(randomIntBetween(1025, 10000))); + assertTrue(coordinating3.shouldSplit()); assertEquals(indexingPressure.stats().getHighWaterMarkSplits(), 0L); assertEquals(indexingPressure.stats().getLowWaterMarkSplits(), 1L); - try (Releasable ignored3 = indexingPressure.markPrimaryOperationStarted(10, ByteSizeValue.ofKb(1).getBytes(), false)) { - assertFalse(indexingPressure.shouldSplitBulk(randomIntBetween(1, 127))); - assertEquals(indexingPressure.stats().getHighWaterMarkSplits(), 0L); - assertEquals(indexingPressure.stats().getLowWaterMarkSplits(), 1L); - assertTrue(indexingPressure.shouldSplitBulk(randomIntBetween(129, 1000))); + try ( + Releasable ignored2 = indexingPressure.markCoordinatingOperationStarted( + 10, + 1 + (9 * 1024) - indexingPressure.stats().getCurrentCoordinatingBytes(), + false + ) + ) { + assertFalse(coordinating1.shouldSplit()); + assertTrue(coordinating2.shouldSplit()); assertEquals(indexingPressure.stats().getHighWaterMarkSplits(), 1L); assertEquals(indexingPressure.stats().getLowWaterMarkSplits(), 1L); - } - } - } - - public void testHighAndLowWatermarkSettings() { - IndexingPressure indexingPressure = new IndexingPressure(settings); - - try ( - Releasable ignored1 = indexingPressure.markCoordinatingOperationStarted(10, ByteSizeValue.ofKb(6).getBytes(), false); - Releasable ignored2 = indexingPressure.markCoordinatingOperationStarted(10, ByteSizeValue.ofKb(2).getBytes(), false) - ) { - assertFalse(indexingPressure.shouldSplitBulk(randomIntBetween(1, 1000))); - assertTrue(indexingPressure.shouldSplitBulk(randomIntBetween(1025, 10000))); - - try (Releasable ignored3 = indexingPressure.markPrimaryOperationStarted(10, ByteSizeValue.ofKb(1).getBytes(), false)) { - assertFalse(indexingPressure.shouldSplitBulk(randomIntBetween(1, 127))); - assertTrue(indexingPressure.shouldSplitBulk(randomIntBetween(129, 1000))); + assertTrue(coordinating3.shouldSplit()); + assertEquals(indexingPressure.stats().getLowWaterMarkSplits(), 1L); + assertEquals(indexingPressure.stats().getHighWaterMarkSplits(), 2L); } } } diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestBulkActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestBulkActionTests.java index f83ba1704f954..55ad64433bf9e 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestBulkActionTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.ReleasableBytesReference; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Releasable; import org.elasticsearch.http.HttpBody; import org.elasticsearch.index.IndexVersion; @@ -224,10 +225,9 @@ public void next() { .build(); FakeRestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); - RestBulkAction.ChunkHandler chunkHandler = new RestBulkAction.ChunkHandler( - true, - request, - () -> new IncrementalBulkService.Handler(null, null, null, null, null) { + IndexingPressure indexingPressure = new IndexingPressure(Settings.EMPTY); + RestBulkAction.ChunkHandler chunkHandler = new RestBulkAction.ChunkHandler(true, request, () -> { + return new IncrementalBulkService.Handler(null, indexingPressure, null, null, null) { @Override public void addItems(List> items, Releasable releasable, Runnable nextItems) { @@ -241,8 +241,8 @@ public void lastItems(List> items, Releasable releasable, Act docs.addAll(items); isLast.set(true); } - } - ); + }; + }); chunkHandler.accept(channel); ReleasableBytesReference r1 = new ReleasableBytesReference(new BytesArray("{\"index\":{\"_index\":\"index_name\"}}\n"), () -> {}); From cc08d1d2a68078b6306ecd49ae81e53dd41d7643 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Tue, 4 Mar 2025 09:24:54 -0800 Subject: [PATCH 05/54] Add inbound_network entitlement to repository-hdfs plugin (#123907) --- .../runtime/policy/entitlements/InboundNetworkEntitlement.java | 2 +- .../src/main/plugin-metadata/entitlement-policy.yaml | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/InboundNetworkEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/InboundNetworkEntitlement.java index 7c00a53cc16cb..04f1cda86b46a 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/InboundNetworkEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/InboundNetworkEntitlement.java @@ -15,6 +15,6 @@ * Describes an entitlement for inbound network actions (listen/accept/receive) */ public record InboundNetworkEntitlement() implements Entitlement { - @ExternalEntitlement + @ExternalEntitlement(esModulesOnly = false) public InboundNetworkEntitlement {} } diff --git a/plugins/repository-hdfs/src/main/plugin-metadata/entitlement-policy.yaml b/plugins/repository-hdfs/src/main/plugin-metadata/entitlement-policy.yaml index 0c921d8d61696..21d5fed283531 100644 --- a/plugins/repository-hdfs/src/main/plugin-metadata/entitlement-policy.yaml +++ b/plugins/repository-hdfs/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,5 +1,6 @@ ALL-UNNAMED: - manage_threads + - inbound_network # required for kerberos principals which specify a host component - outbound_network - load_native_libraries - write_system_properties: From e54037b7d0d2e66a2e6f50809e2efb199ab7a09a Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Tue, 4 Mar 2025 09:25:11 -0800 Subject: [PATCH 06/54] Add build artifact containing json file of all wire compatible versions (#123740) --- build.gradle | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/build.gradle b/build.gradle index 6d4893b21fe48..71f8f8df9e8d7 100644 --- a/build.gradle +++ b/build.gradle @@ -229,6 +229,22 @@ tasks.register("verifyVersions") { } } +def generateUpgradeCompatibilityFile = tasks.register("generateUpgradeCompatibilityFile") { + def outputFile = project.layout.buildDirectory.file("rolling-upgrade-compatible-${VersionProperties.elasticsearch}.json") + def rollingUpgradeCompatibleVersions = buildParams.bwcVersions.wireCompatible - VersionProperties.elasticsearchVersion + inputs.property("rollingUpgradeCompatibleVersions", rollingUpgradeCompatibleVersions) + outputs.file(outputFile) + doLast { + def versionsString = rollingUpgradeCompatibleVersions.collect { "\"${it.toString()}\"" }.join(', ') + outputFile.get().asFile.write("""{"rolling_upgrade_compatible_versions" : [${versionsString}]}""") + } +} + +def upgradeCompatibilityZip = tasks.register("upgradeCompatibilityZip", Zip) { + archiveFile.set(project.layout.buildDirectory.file("rolling-upgrade-compatible-${VersionProperties.elasticsearch}.zip")) + from(generateUpgradeCompatibilityFile) +} + // TODO: This flag existed as a mechanism to disable bwc tests during a backport. It is no // longer used for that purpose, but instead a way to run only functional tests. We should // rework the functionalTests task to be more explicit about which tasks it wants to run @@ -483,6 +499,7 @@ tasks.register("buildReleaseArtifacts").configure { } .collect { GradleUtils.findByName(it.tasks, 'assemble') } .findAll { it != null } + dependsOn upgradeCompatibilityZip } tasks.register("spotlessApply").configure { From 77313160bfa6ff8d17d90392aa55a1ee7d0267ad Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 4 Mar 2025 17:34:50 +0000 Subject: [PATCH 07/54] [main] [ML] Use latest results index for new Anomaly Detection jobs (#122597) (#122673) * [ML] Use latest results index for new Anomaly Detection jobs (#122597) After upgrading from v7 new anomaly detection jobs should use the latest results index if one has been created. * Delete 8.x test --- .../xpack/ml/job/persistence/JobResultsProvider.java | 9 +++++++-- .../elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java | 2 +- .../xpack/test/rest/XPackRestTestConstants.java | 5 ----- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java index da667fc86baef..56cd1948b021f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java @@ -125,6 +125,7 @@ import org.elasticsearch.xpack.core.ml.stats.ForecastStats; import org.elasticsearch.xpack.core.ml.stats.StatsAccumulator; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.core.ml.utils.MlIndexAndAlias; import org.elasticsearch.xpack.core.security.support.Exceptions; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.job.categorization.GrokPatternCreator; @@ -306,11 +307,15 @@ public void createJobResultIndex(Job job, ClusterState state, final ActionListen String readAliasName = AnomalyDetectorsIndex.jobResultsAliasedName(job.getId()); String writeAliasName = AnomalyDetectorsIndex.resultsWriteAlias(job.getId()); String tempIndexName = job.getInitialResultsIndexName(); + // Find all indices starting with this name and pick the latest one + String[] concreteIndices = resolver.concreteIndexNames(state, IndicesOptions.lenientExpandOpen(), tempIndexName + "*"); + if (concreteIndices.length > 0) { + tempIndexName = MlIndexAndAlias.latestIndex(concreteIndices); + } // Our read/write aliases should point to the concrete index // If the initial index is NOT an alias, either it is already a concrete index, or it does not exist yet if (state.getMetadata().getProject().hasAlias(tempIndexName)) { - String[] concreteIndices = resolver.concreteIndexNames(state, IndicesOptions.lenientExpandOpen(), tempIndexName); // SHOULD NOT be closed as in typical call flow checkForLeftOverDocuments already verified this // if it is closed, we bailout and return an error @@ -324,8 +329,8 @@ public void createJobResultIndex(Job job, ClusterState state, final ActionListen ); return; } - tempIndexName = concreteIndices[0]; } + final String indexName = tempIndexName; ActionListener indexAndMappingsListener = ActionListener.wrap(success -> { diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java index 78f6bcd8ac9ab..9cbd6ff6296a9 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java @@ -279,7 +279,7 @@ private Response buildAndPutJob(String jobId, TimeValue bucketSpan) throws Excep return client().performRequest(request); } - private static List generateData( + static List generateData( long timestamp, TimeValue bucketSpan, int bucketCount, diff --git a/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestConstants.java b/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestConstants.java index 1a0f098b45bde..93281e3453e5c 100644 --- a/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestConstants.java +++ b/x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestConstants.java @@ -17,20 +17,15 @@ public final class XPackRestTestConstants { public static final String[] TEMPLATE_NAMES_NO_ILM = new String[] { HISTORY_TEMPLATE_NAME_NO_ILM }; // ML constants: - public static final String ML_META_INDEX_NAME = ".ml-meta"; - public static final String CONFIG_INDEX = ".ml-config"; public static final String RESULTS_INDEX_PREFIX = ".ml-anomalies-"; public static final String STATE_INDEX_PREFIX = ".ml-state"; - public static final String RESULTS_INDEX_DEFAULT = "shared"; public static final List ML_POST_V7120_TEMPLATES = List.of(STATE_INDEX_PREFIX, RESULTS_INDEX_PREFIX); // Transform constants: public static final String TRANSFORM_TASK_NAME = "data_frame/transforms"; public static final String TRANSFORM_INTERNAL_INDEX_PREFIX = ".transform-internal-"; - public static final String TRANSFORM_NOTIFICATIONS_INDEX_PREFIX = ".transform-notifications-"; public static final String TRANSFORM_INTERNAL_INDEX_PREFIX_DEPRECATED = ".data-frame-internal-"; - public static final String TRANSFORM_NOTIFICATIONS_INDEX_PREFIX_DEPRECATED = ".data-frame-notifications-"; private XPackRestTestConstants() {} } From 976c9f93751a52492883dcdaf18c08b9bd9bc54b Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 4 Mar 2025 19:21:23 +0100 Subject: [PATCH 08/54] Much faster indices lookup on metadata (#123749) We mostly need the map for lookups in very tight loops but also rarely rely on the sortedness of this thing. Getting the best of both worlds at surprisingly only a ~10% increase in build time and ~25% increase in the heap consumption of this structure provides a massive speedup to e.g. search or field_caps over large numbers of indices thanks to the up to an order of magnitude cheaper lookups in index name resolution and security (as well as speedups in other areas). --- .../cluster/metadata/ProjectMetadata.java | 119 +++++++++++++++++- 1 file changed, 117 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ProjectMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ProjectMetadata.java index 24adeedd7366e..5ea0bada50946 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ProjectMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ProjectMetadata.java @@ -46,6 +46,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; @@ -1761,7 +1762,7 @@ static SortedMap buildIndicesLookup( if (indices.isEmpty()) { return Collections.emptySortedMap(); } - SortedMap indicesLookup = new TreeMap<>(); + Map indicesLookup = new HashMap<>(); Map indexToDataStreamLookup = new HashMap<>(); collectDataStreams(dataStreamMetadata, indicesLookup, indexToDataStreamLookup); @@ -1769,7 +1770,121 @@ static SortedMap buildIndicesLookup( collectIndices(indices, indexToDataStreamLookup, indicesLookup, aliasToIndices); collectAliases(aliasToIndices, indicesLookup); - return Collections.unmodifiableSortedMap(indicesLookup); + // We do a ton of lookups on this map but also need its sorted properties at times. + // Using this hybrid of a sorted and a hash-map trades some heap overhead relative to just using a TreeMap + // for much faster O(1) lookups in large clusters. + return new SortedMap<>() { + + private final SortedMap sortedMap = Collections.unmodifiableSortedMap( + new TreeMap<>(indicesLookup) + ); + + @Override + public Comparator comparator() { + return sortedMap.comparator(); + } + + @Override + public SortedMap subMap(String fromKey, String toKey) { + return sortedMap.subMap(fromKey, toKey); + } + + @Override + public SortedMap headMap(String toKey) { + return sortedMap.headMap(toKey); + } + + @Override + public SortedMap tailMap(String fromKey) { + return sortedMap.tailMap(fromKey); + } + + @Override + public String firstKey() { + return sortedMap.firstKey(); + } + + @Override + public String lastKey() { + return sortedMap.lastKey(); + } + + @Override + public Set keySet() { + return sortedMap.keySet(); + } + + @Override + public Collection values() { + return sortedMap.values(); + } + + @Override + public Set> entrySet() { + return sortedMap.entrySet(); + } + + @Override + public int size() { + return indicesLookup.size(); + } + + @Override + public boolean isEmpty() { + return indicesLookup.isEmpty(); + } + + @Override + public boolean containsKey(Object key) { + return indicesLookup.containsKey(key); + } + + @Override + public boolean containsValue(Object value) { + return indicesLookup.containsValue(value); + } + + @Override + public IndexAbstraction get(Object key) { + return indicesLookup.get(key); + } + + @Override + public IndexAbstraction put(String key, IndexAbstraction value) { + throw new UnsupportedOperationException(); + } + + @Override + public IndexAbstraction remove(Object key) { + throw new UnsupportedOperationException(); + } + + @Override + public void putAll(Map m) { + throw new UnsupportedOperationException(); + } + + @Override + public void clear() { + throw new UnsupportedOperationException(); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + return indicesLookup.equals(obj); + } + + @Override + public int hashCode() { + return indicesLookup.hashCode(); + } + }; } private static void collectAliases(Map> aliasToIndices, Map indicesLookup) { From b0318eccefb9f6990c04eae5d9b0a0d5c63b08e9 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Tue, 4 Mar 2025 20:04:21 +0100 Subject: [PATCH 09/54] [CI] Fix the lucene compatibility tests in intake (#124034) the last commit before the lucene update is not compatible in how we apply the buildscan plugin these days. We now instead created a branch "combat-lucene-10-0-0" where we added a compatibibility fix. Now we pick the fix commit instead. --- .buildkite/pipelines/intake.template.yml | 2 +- .buildkite/pipelines/intake.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.buildkite/pipelines/intake.template.yml b/.buildkite/pipelines/intake.template.yml index 75c7a339b8cea..3aba186ed5abb 100644 --- a/.buildkite/pipelines/intake.template.yml +++ b/.buildkite/pipelines/intake.template.yml @@ -75,7 +75,7 @@ steps: ES_VERSION: - "9.0.0" ES_COMMIT: - - "b2cc9d9b8f00ee621f93ddca07ea9c671aab1578" # update to match last commit before lucene bump + - "10352e57d85505984582616e1e38530d3ec6ca59" # update to match last commit before lucene bump maintained from combat-lucene-10-0-0 branch agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index 918a1b8a4ab8b..ab18aa381c77b 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -76,7 +76,7 @@ steps: ES_VERSION: - "9.0.0" ES_COMMIT: - - "b2cc9d9b8f00ee621f93ddca07ea9c671aab1578" # update to match last commit before lucene bump + - "10352e57d85505984582616e1e38530d3ec6ca59" # update to match last commit before lucene bump maintained from combat-lucene-10-0-0 branch agents: provider: gcp image: family/elasticsearch-ubuntu-2004 From d61b86477db8c22f57a38837cee7cf8ddc3f8e3b Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Tue, 4 Mar 2025 11:32:42 -0800 Subject: [PATCH 10/54] Add jdk.management.agent module to server boot layer on start (#123938) --- .../java/org/elasticsearch/server/cli/ServerProcessBuilder.java | 1 + .../java/org/elasticsearch/packaging/util/docker/Docker.java | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcessBuilder.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcessBuilder.java index 293c4af3270b9..adebf6be9842b 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcessBuilder.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcessBuilder.java @@ -109,6 +109,7 @@ private List getJvmArgs() { esHome.resolve("lib").toString(), // Special circumstances require some modules (not depended on by the main server module) to be explicitly added: "--add-modules=jdk.net", // needed to reflectively set extended socket options + "--add-modules=jdk.management.agent", // needed by external debug tools to grab thread and heap dumps // we control the module path, which may have additional modules not required by server "--add-modules=ALL-MODULE-PATH", "-m", diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java index 808aec92fb35d..7bb45e6029409 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java @@ -187,7 +187,7 @@ public static void waitForElasticsearchToStart() { Thread.sleep(STARTUP_SLEEP_INTERVAL_MILLISECONDS); // Set COLUMNS so that `ps` doesn't truncate its output - psOutput = dockerShell.run("bash -c 'COLUMNS=2000 ps ax'").stdout(); + psOutput = dockerShell.run("bash -c 'COLUMNS=3000 ps ax'").stdout(); if (psOutput.contains("org.elasticsearch.bootstrap.Elasticsearch")) { isElasticsearchRunning = true; From b46dd1ca5cb2f17ba0e7395dc8199390f6c9b0d8 Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Tue, 4 Mar 2025 15:03:46 -0700 Subject: [PATCH 11/54] Simplify check to split bulk request (#124035) Use an optional to check if bulk operation should be split. --- .../action/bulk/IncrementalBulkService.java | 12 +++--- .../elasticsearch/index/IndexingPressure.java | 11 ++--- .../index/IndexingPressureTests.java | 43 +++++++++++-------- 3 files changed, 37 insertions(+), 29 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/bulk/IncrementalBulkService.java b/server/src/main/java/org/elasticsearch/action/bulk/IncrementalBulkService.java index cd84114755f81..8713eb023dad9 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/IncrementalBulkService.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/IncrementalBulkService.java @@ -26,6 +26,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Optional; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Supplier; @@ -133,8 +134,9 @@ public void addItems(List> items, Releasable releasable, Runn } else { assert bulkRequest != null; if (internalAddItems(items, releasable)) { - if (incrementalOperation.shouldSplit()) { - IndexingPressure.Coordinating coordinating = incrementalOperation.split(); + Optional maybeSplit = incrementalOperation.maybeSplit(); + if (maybeSplit.isPresent()) { + Releasable coordinating = maybeSplit.get(); final boolean isFirstRequest = incrementalRequestSubmitted == false; incrementalRequestSubmitted = true; final ArrayList toRelease = new ArrayList<>(releasables); @@ -156,8 +158,8 @@ public void onFailure(Exception e) { } }, () -> { bulkInProgress = false; - coordinating.close(); toRelease.forEach(Releasable::close); + coordinating.close(); nextItems.run(); })); } else { @@ -177,7 +179,7 @@ public void lastItems(List> items, Releasable releasable, Act } else { assert bulkRequest != null; if (internalAddItems(items, releasable)) { - IndexingPressure.Coordinating coordinating = incrementalOperation.split(); + Releasable coordinating = incrementalOperation.split(); final ArrayList toRelease = new ArrayList<>(releasables); releasables.clear(); // We do not need to set this back to false as this will be the last request. @@ -198,8 +200,8 @@ public void onFailure(Exception e) { errorResponse(listener); } }, () -> { - coordinating.close(); toRelease.forEach(Releasable::close); + coordinating.close(); })); } else { errorResponse(listener); diff --git a/server/src/main/java/org/elasticsearch/index/IndexingPressure.java b/server/src/main/java/org/elasticsearch/index/IndexingPressure.java index 4ebbdd22f75c2..bc64df9e7bc5d 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexingPressure.java +++ b/server/src/main/java/org/elasticsearch/index/IndexingPressure.java @@ -19,6 +19,7 @@ import org.elasticsearch.core.Releasable; import org.elasticsearch.index.stats.IndexingPressureStats; +import java.util.Optional; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; @@ -189,7 +190,7 @@ public long currentOperationsSize() { return coordinating.currentOperationsSize; } - public boolean shouldSplit() { + public Optional maybeSplit() { long currentUsage = (currentCombinedCoordinatingAndPrimaryBytes.get() + currentReplicaBytes.get()); long currentOperationsSize = coordinating.currentOperationsSize; if (currentUsage >= highWatermark && currentOperationsSize >= highWatermarkSize) { @@ -201,7 +202,7 @@ public boolean shouldSplit() { currentOperationsSize ) ); - return true; + return Optional.of(split()); } if (currentUsage >= lowWatermark && currentOperationsSize >= lowWatermarkSize) { lowWaterMarkSplits.getAndIncrement(); @@ -212,12 +213,12 @@ public boolean shouldSplit() { currentOperationsSize ) ); - return true; + return Optional.of(split()); } - return false; + return Optional.empty(); } - public Coordinating split() { + public Releasable split() { Coordinating toReturn = coordinating; coordinating = new Coordinating(forceExecution); return toReturn; diff --git a/server/src/test/java/org/elasticsearch/index/IndexingPressureTests.java b/server/src/test/java/org/elasticsearch/index/IndexingPressureTests.java index a4bc63880208a..228afd30da6dc 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexingPressureTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexingPressureTests.java @@ -17,6 +17,8 @@ import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; +import java.util.Optional; + public class IndexingPressureTests extends ESTestCase { private final Settings settings = Settings.builder() @@ -58,28 +60,31 @@ public void testHighAndLowWatermarkSplits() { false ) ) { - assertFalse(coordinating1.shouldSplit()); - assertFalse(coordinating2.shouldSplit()); + assertFalse(coordinating1.maybeSplit().isPresent()); + assertFalse(coordinating2.maybeSplit().isPresent()); assertEquals(indexingPressure.stats().getHighWaterMarkSplits(), 0L); assertEquals(indexingPressure.stats().getLowWaterMarkSplits(), 0L); - assertTrue(coordinating3.shouldSplit()); - assertEquals(indexingPressure.stats().getHighWaterMarkSplits(), 0L); - assertEquals(indexingPressure.stats().getLowWaterMarkSplits(), 1L); - - try ( - Releasable ignored2 = indexingPressure.markCoordinatingOperationStarted( - 10, - 1 + (9 * 1024) - indexingPressure.stats().getCurrentCoordinatingBytes(), - false - ) - ) { - assertFalse(coordinating1.shouldSplit()); - assertTrue(coordinating2.shouldSplit()); - assertEquals(indexingPressure.stats().getHighWaterMarkSplits(), 1L); + Optional split1 = coordinating3.maybeSplit(); + assertTrue(split1.isPresent()); + try (Releasable ignored2 = split1.get()) { + assertEquals(indexingPressure.stats().getHighWaterMarkSplits(), 0L); assertEquals(indexingPressure.stats().getLowWaterMarkSplits(), 1L); - assertTrue(coordinating3.shouldSplit()); - assertEquals(indexingPressure.stats().getLowWaterMarkSplits(), 1L); - assertEquals(indexingPressure.stats().getHighWaterMarkSplits(), 2L); + + try ( + Releasable ignored3 = indexingPressure.markCoordinatingOperationStarted( + 10, + 1 + (9 * 1024) - indexingPressure.stats().getCurrentCoordinatingBytes(), + false + ) + ) { + assertFalse(coordinating1.maybeSplit().isPresent()); + Optional split2 = coordinating2.maybeSplit(); + assertTrue(split2.isPresent()); + try (Releasable ignored4 = split2.get()) { + assertEquals(indexingPressure.stats().getHighWaterMarkSplits(), 1L); + assertEquals(indexingPressure.stats().getLowWaterMarkSplits(), 1L); + } + } } } } From c96fcf7b72e7a74fecf879a9d836370b90517407 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 5 Mar 2025 09:39:38 +1100 Subject: [PATCH 12/54] Mute org.elasticsearch.entitlement.runtime.policy.FileAccessTreeTests testDuplicatePrunedPaths #124006 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 460d4f915bbec..250898fecb702 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -330,6 +330,9 @@ tests: - class: org.elasticsearch.xpack.esql.heap_attack.HeapAttackIT method: testLookupExplosionNoFetch issue: https://github.com/elastic/elasticsearch/issues/123432 +- class: org.elasticsearch.entitlement.runtime.policy.FileAccessTreeTests + method: testDuplicatePrunedPaths + issue: https://github.com/elastic/elasticsearch/issues/124006 # Examples: # From bef762f4dc7a5fb57f7437552fd96b7ba54ce988 Mon Sep 17 00:00:00 2001 From: Patrick Doyle <810052+prdoyle@users.noreply.github.com> Date: Tue, 4 Mar 2025 17:44:08 -0500 Subject: [PATCH 13/54] Remove duplicate exclusive paths (#124023) * Remove duplicate exclusive paths * Normalize paths in tests to support Windows * Remove withMode --- .../runtime/policy/FileAccessTree.java | 9 +- .../runtime/policy/FileAccessTreeTests.java | 107 ++++++++++++++++-- 2 files changed, 101 insertions(+), 15 deletions(-) diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java index 16e7239678e62..fd2590c114d0d 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java @@ -26,8 +26,10 @@ import java.util.Objects; import java.util.function.BiConsumer; +import static java.util.Comparator.comparing; import static org.elasticsearch.core.PathUtils.getDefaultFileSystem; import static org.elasticsearch.entitlement.runtime.policy.FileUtils.PATH_ORDER; +import static org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement.Mode.READ_WRITE; public final class FileAccessTree { @@ -59,8 +61,7 @@ static List buildExclusivePathList(List } } } - exclusivePaths.sort((ep1, ep2) -> PATH_ORDER.compare(ep1.path(), ep2.path())); - return exclusivePaths; + return exclusivePaths.stream().sorted(comparing(ExclusivePath::path, PATH_ORDER)).distinct().toList(); } static void validateExclusivePaths(List exclusivePaths) { @@ -103,7 +104,7 @@ private FileAccessTree( List writePaths = new ArrayList<>(); BiConsumer addPath = (path, mode) -> { var normalized = normalizePath(path); - if (mode == Mode.READ_WRITE) { + if (mode == READ_WRITE) { writePaths.add(normalized); } readPaths.add(normalized); @@ -139,7 +140,7 @@ private FileAccessTree( } // everything has access to the temp dir, config dir and the jdk - addPathAndMaybeLink.accept(pathLookup.tempDir(), Mode.READ_WRITE); + addPathAndMaybeLink.accept(pathLookup.tempDir(), READ_WRITE); // TODO: this grants read access to the config dir for all modules until explicit read entitlements can be added addPathAndMaybeLink.accept(pathLookup.configDir(), Mode.READ); diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java index fbcbdc3ad5d18..ac9430246324f 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java @@ -11,8 +11,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.entitlement.runtime.policy.FileAccessTree.ExclusiveFileEntitlement; import org.elasticsearch.entitlement.runtime.policy.FileAccessTree.ExclusivePath; import org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement.FileData; import org.elasticsearch.test.ESTestCase; import org.junit.BeforeClass; @@ -26,6 +28,11 @@ import java.util.Map; import static org.elasticsearch.core.PathUtils.getDefaultFileSystem; +import static org.elasticsearch.entitlement.runtime.policy.FileAccessTree.buildExclusivePathList; +import static org.elasticsearch.entitlement.runtime.policy.FileAccessTree.normalizePath; +import static org.elasticsearch.entitlement.runtime.policy.Platform.WINDOWS; +import static org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement.Mode.READ; +import static org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement.Mode.READ_WRITE; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -195,7 +202,7 @@ public void testNormalizePath() { } public void testNormalizeDirectorySeparatorWindows() { - assumeTrue("normalization of windows paths", Platform.WINDOWS.isCurrent()); + assumeTrue("normalization of windows paths", WINDOWS.isCurrent()); assertThat(FileAccessTree.normalizePath(Path.of("C:\\a\\b")), equalTo("C:\\a\\b")); assertThat(FileAccessTree.normalizePath(Path.of("C:/a.xml")), equalTo("C:\\a.xml")); @@ -254,7 +261,7 @@ public void testJdkAccess() { @SuppressForbidden(reason = "don't care about the directory location in tests") public void testFollowLinks() throws IOException { - assumeFalse("Windows requires admin right to create symbolic links", Platform.WINDOWS.isCurrent()); + assumeFalse("Windows requires admin right to create symbolic links", WINDOWS.isCurrent()); Path baseSourceDir = Files.createTempDirectory("fileaccess_source"); Path source1Dir = baseSourceDir.resolve("source1"); @@ -348,23 +355,101 @@ public void testInvalidExclusiveAccess() { } public void testDuplicatePrunedPaths() { - List paths = List.of("/a", "/a", "/a/b", "/a/b", "/b/c", "b/c/d", "b/c/d", "b/c/d", "e/f", "e/f"); - paths = FileAccessTree.pruneSortedPaths(paths); - assertEquals(List.of("/a", "/b/c", "b/c/d", "e/f"), paths); + List inputPaths = List.of("/a", "/a", "/a/b", "/a/b", "/b/c", "b/c/d", "b/c/d", "b/c/d", "e/f", "e/f"); + List outputPaths = List.of("/a", "/b/c", "b/c/d", "e/f"); + var actual = FileAccessTree.pruneSortedPaths(inputPaths.stream().map(p -> normalizePath(path(p))).toList()); + var expected = outputPaths.stream().map(p -> normalizePath(path(p))).toList(); + assertEquals(expected, actual); + } + + public void testDuplicateExclusivePaths() { + // Bunch o' handy definitions + var originalFileData = FileData.ofPath(path("/a/b"), READ).withExclusive(true); + var fileDataWithWriteMode = FileData.ofPath(path("/a/b"), READ_WRITE).withExclusive(true); + var original = new ExclusiveFileEntitlement("component1", "module1", new FilesEntitlement(List.of(originalFileData))); + var differentComponent = new ExclusiveFileEntitlement("component2", original.moduleName(), original.filesEntitlement()); + var differentModule = new ExclusiveFileEntitlement(original.componentName(), "module2", original.filesEntitlement()); + var differentPath = new ExclusiveFileEntitlement( + original.componentName(), + original.moduleName(), + new FilesEntitlement( + List.of(FileData.ofPath(path("/c/d"), originalFileData.mode()).withExclusive(originalFileData.exclusive())) + ) + ); + var differentMode = new ExclusiveFileEntitlement( + original.componentName(), + original.moduleName(), + new FilesEntitlement(List.of(fileDataWithWriteMode)) + ); + var differentPlatform = new ExclusiveFileEntitlement( + original.componentName(), + original.moduleName(), + new FilesEntitlement(List.of(originalFileData.withPlatform(WINDOWS))) + ); + var originalExclusivePath = new ExclusivePath("component1", "module1", normalizePath(path("/a/b"))); + + // Some basic tests + + assertEquals( + "Single element should trivially work", + List.of(originalExclusivePath), + buildExclusivePathList(List.of(original), TEST_PATH_LOOKUP) + ); + assertEquals( + "Two identical elements should be combined", + List.of(originalExclusivePath), + buildExclusivePathList(List.of(original, original), TEST_PATH_LOOKUP) + ); + + // Don't merge things we shouldn't + + var distinctEntitlements = List.of(original, differentComponent, differentModule, differentPath); + var distinctPaths = List.of( + originalExclusivePath, + new ExclusivePath("component2", original.moduleName(), originalExclusivePath.path()), + new ExclusivePath(original.componentName(), "module2", originalExclusivePath.path()), + new ExclusivePath(original.componentName(), original.moduleName(), normalizePath(path("/c/d"))) + ); + assertEquals( + "Distinct elements should not be combined", + distinctPaths, + buildExclusivePathList(distinctEntitlements, TEST_PATH_LOOKUP) + ); + + // Do merge things we should + + List interleavedEntitlements = new ArrayList<>(); + distinctEntitlements.forEach(e -> { + interleavedEntitlements.add(e); + interleavedEntitlements.add(original); + }); + assertEquals( + "Identical elements should be combined wherever they are in the list", + distinctPaths, + buildExclusivePathList(interleavedEntitlements, TEST_PATH_LOOKUP) + ); + + var equivalentEntitlements = List.of(original, differentMode, differentPlatform); + var equivalentPaths = List.of(originalExclusivePath); + assertEquals( + "Exclusive paths should be combined even if the entitlements are different", + equivalentPaths, + buildExclusivePathList(equivalentEntitlements, TEST_PATH_LOOKUP) + ); } public void testWindowsAbsolutPathAccess() { - assumeTrue("Specific to windows for paths with a root (DOS or UNC)", Platform.WINDOWS.isCurrent()); + assumeTrue("Specific to windows for paths with a root (DOS or UNC)", WINDOWS.isCurrent()); var fileAccessTree = FileAccessTree.of( "test", "test", new FilesEntitlement( List.of( - FilesEntitlement.FileData.ofPath(Path.of("\\\\.\\pipe\\"), FilesEntitlement.Mode.READ), - FilesEntitlement.FileData.ofPath(Path.of("D:\\.gradle"), FilesEntitlement.Mode.READ), - FilesEntitlement.FileData.ofPath(Path.of("D:\\foo"), FilesEntitlement.Mode.READ), - FilesEntitlement.FileData.ofPath(Path.of("C:\\foo"), FilesEntitlement.Mode.READ_WRITE) + FileData.ofPath(Path.of("\\\\.\\pipe\\"), READ), + FileData.ofPath(Path.of("D:\\.gradle"), READ), + FileData.ofPath(Path.of("D:\\foo"), READ), + FileData.ofPath(Path.of("C:\\foo"), FilesEntitlement.Mode.READ_WRITE) ) ), TEST_PATH_LOOKUP, @@ -400,7 +485,7 @@ static FilesEntitlement entitlement(Map value) { static List exclusivePaths(String componentName, String moduleName, String... paths) { List exclusivePaths = new ArrayList<>(); for (String path : paths) { - exclusivePaths.add(new ExclusivePath(componentName, moduleName, path(path).toString())); + exclusivePaths.add(new ExclusivePath(componentName, moduleName, normalizePath(path(path)))); } return exclusivePaths; } From 80cbdfc87ee92cd070cda0ae58ef23afd16f26aa Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 5 Mar 2025 09:59:19 +1100 Subject: [PATCH 14/54] Mute org.elasticsearch.multiproject.test.CoreWithMultipleProjectsClientYamlTestSuiteIT test {yaml=search.vectors/41_knn_search_bbq_hnsw/Vector rescoring has same scoring as exact search for kNN section} #124052 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 250898fecb702..8d9f571025474 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -333,6 +333,9 @@ tests: - class: org.elasticsearch.entitlement.runtime.policy.FileAccessTreeTests method: testDuplicatePrunedPaths issue: https://github.com/elastic/elasticsearch/issues/124006 +- class: org.elasticsearch.multiproject.test.CoreWithMultipleProjectsClientYamlTestSuiteIT + method: test {yaml=search.vectors/41_knn_search_bbq_hnsw/Vector rescoring has same scoring as exact search for kNN section} + issue: https://github.com/elastic/elasticsearch/issues/124052 # Examples: # From 850a49be7c3b364ca4dc302cca27707675f97653 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 5 Mar 2025 10:50:26 +1100 Subject: [PATCH 15/54] Mute org.elasticsearch.smoketest.MlWithSecurityIT test {yaml=ml/3rd_party_deployment/Test start deployment fails while model download in progress} #120814 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 8d9f571025474..db95b63648393 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -336,6 +336,9 @@ tests: - class: org.elasticsearch.multiproject.test.CoreWithMultipleProjectsClientYamlTestSuiteIT method: test {yaml=search.vectors/41_knn_search_bbq_hnsw/Vector rescoring has same scoring as exact search for kNN section} issue: https://github.com/elastic/elasticsearch/issues/124052 +- class: org.elasticsearch.smoketest.MlWithSecurityIT + method: test {yaml=ml/3rd_party_deployment/Test start deployment fails while model download in progress} + issue: https://github.com/elastic/elasticsearch/issues/120814 # Examples: # From 6646af015a49c9a47404346a7fd6cdbffd285d5f Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Wed, 5 Mar 2025 11:37:50 +1100 Subject: [PATCH 16/54] Use MultiProjectPendingException more consistently (#123955) Use the same exception class in more places where single lookup fails due to the existence of multiple projects. This allows the exception to be handled similarly, e.g. ignored in ClusterApplierService#callClusterStateListener --- .../java/org/elasticsearch/cluster/metadata/Metadata.java | 4 ++-- .../org/elasticsearch/cluster/routing/GlobalRoutingTable.java | 3 ++- .../elasticsearch/cluster/service/ClusterApplierService.java | 2 +- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java index c6d0944ca6d90..ab56621e748ae 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java @@ -397,7 +397,7 @@ private Metadata updateSingleProject(Function reservedStateMetadata ); } else { - throw new UnsupportedOperationException("There are multiple projects " + projectMetadata.keySet()); + throw new MultiProjectPendingException("There are multiple projects " + projectMetadata.keySet()); } } @@ -1310,7 +1310,7 @@ private ProjectMetadata.Builder getSingleProject() { if (projectMetadata.isEmpty()) { createDefaultProject(); } else if (projectMetadata.size() != 1) { - throw new UnsupportedOperationException("There are multiple projects " + projectMetadata.keySet()); + throw new MultiProjectPendingException("There are multiple projects " + projectMetadata.keySet()); } return projectMetadata.values().iterator().next(); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/GlobalRoutingTable.java b/server/src/main/java/org/elasticsearch/cluster/routing/GlobalRoutingTable.java index 642ea54881acc..e7b707b8993c3 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/GlobalRoutingTable.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/GlobalRoutingTable.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.DiffableUtils; import org.elasticsearch.cluster.DiffableUtils.KeySerializer; import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.Metadata.MultiProjectPendingException; import org.elasticsearch.cluster.metadata.ProjectId; import org.elasticsearch.cluster.metadata.ProjectMetadata; import org.elasticsearch.common.collect.ImmutableOpenMap; @@ -134,7 +135,7 @@ public RoutingTable getRoutingTable() { return switch (routingTables.size()) { case 0 -> RoutingTable.EMPTY_ROUTING_TABLE; case 1 -> routingTables.values().iterator().next(); - default -> throw new IllegalStateException("There are multiple project routing tables [" + routingTables.keySet() + "]"); + default -> throw new MultiProjectPendingException("There are multiple project routing tables [" + routingTables.keySet() + "]"); }; } diff --git a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java index 6f1128317a462..b4a626faf79b3 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java @@ -594,7 +594,7 @@ private static void callClusterStateListener( listener.clusterChanged(clusterChangedEvent); } } catch (Metadata.MultiProjectPendingException e) { - // don't warn, this fills the logs + // don't warn, this fills the logs and also slow down applier thread in CI which could cause unrelated failures logger.trace("ClusterStateListener not multi-project compatible", e); } catch (Exception ex) { logger.warn("failed to notify ClusterStateListener", ex); From ae1ad21aac0d8f9652bafde004010b169a5a931c Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Wed, 5 Mar 2025 17:09:36 +1100 Subject: [PATCH 17/54] Change constructor to private for ProjectMetadata (#124060) Similar to Metadata, the constructor should be private since we expect it to be created with the Builder. --- .../org/elasticsearch/cluster/metadata/ProjectMetadata.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ProjectMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ProjectMetadata.java index 5ea0bada50946..148409a9f3b3d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ProjectMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ProjectMetadata.java @@ -100,7 +100,7 @@ public class ProjectMetadata implements Iterable, Diffable indices, ImmutableOpenMap> aliasedIndices, From 67d0dd4df2e9df080722f9de21f89c3dc78de7bc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Wed, 5 Mar 2025 07:36:55 +0100 Subject: [PATCH 18/54] [Entitlements] Add URLConnection instrumentation for file protocol (#123824) --- .../bridge/EntitlementChecker.java | 20 +++ .../qa/entitled/EntitledActions.java | 5 + .../qa/test/RestEntitlementsCheckAction.java | 1 + .../qa/test/URLConnectionFileActions.java | 117 +++++++++++++++++ .../api/ElasticsearchEntitlementChecker.java | 123 ++++++++++++++++++ 5 files changed, 266 insertions(+) create mode 100644 libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/URLConnectionFileActions.java diff --git a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java index c40091f425868..4bd1c4c5d49c6 100644 --- a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java +++ b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java @@ -1214,6 +1214,26 @@ void checkPathRegister( WatchEvent.Modifier... modifiers ); + // URLConnection + + void check$sun_net_www_protocol_file_FileURLConnection$connect(Class callerClass, java.net.URLConnection that); + + void check$sun_net_www_protocol_file_FileURLConnection$getHeaderFields(Class callerClass, java.net.URLConnection that); + + void check$sun_net_www_protocol_file_FileURLConnection$getHeaderField(Class callerClass, java.net.URLConnection that, String name); + + void check$sun_net_www_protocol_file_FileURLConnection$getHeaderField(Class callerClass, java.net.URLConnection that, int n); + + void check$sun_net_www_protocol_file_FileURLConnection$getContentLength(Class callerClass, java.net.URLConnection that); + + void check$sun_net_www_protocol_file_FileURLConnection$getContentLengthLong(Class callerClass, java.net.URLConnection that); + + void check$sun_net_www_protocol_file_FileURLConnection$getHeaderFieldKey(Class callerClass, java.net.URLConnection that, int n); + + void check$sun_net_www_protocol_file_FileURLConnection$getLastModified(Class callerClass, java.net.URLConnection that); + + void check$sun_net_www_protocol_file_FileURLConnection$getInputStream(Class callerClass, java.net.URLConnection that); + //////////////////// // // Thread management diff --git a/libs/entitlement/qa/entitled-plugin/src/main/java/org/elasticsearch/entitlement/qa/entitled/EntitledActions.java b/libs/entitlement/qa/entitled-plugin/src/main/java/org/elasticsearch/entitlement/qa/entitled/EntitledActions.java index 531ac97c65046..a1f59ce2f6006 100644 --- a/libs/entitlement/qa/entitled-plugin/src/main/java/org/elasticsearch/entitlement/qa/entitled/EntitledActions.java +++ b/libs/entitlement/qa/entitled-plugin/src/main/java/org/elasticsearch/entitlement/qa/entitled/EntitledActions.java @@ -71,4 +71,9 @@ public static URLConnection createHttpsURLConnection() throws IOException { public static URLConnection createFtpURLConnection() throws IOException { return URI.create("ftp://127.0.0.1:12345/").toURL().openConnection(); } + + public static URLConnection createFileURLConnection() throws IOException { + var fileUrl = createTempFileForWrite().toUri().toURL(); + return fileUrl.openConnection(); + } } diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java index ecaa76d0b9c2e..cbb5fb514ae59 100644 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java @@ -194,6 +194,7 @@ static CheckAction alwaysDenied(CheckedRunnable action) { getTestEntries(PathActions.class), getTestEntries(SpiActions.class), getTestEntries(SystemActions.class), + getTestEntries(URLConnectionFileActions.class), getTestEntries(URLConnectionNetworkActions.class) ) .flatMap(Function.identity()) diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/URLConnectionFileActions.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/URLConnectionFileActions.java new file mode 100644 index 0000000000000..582d8296488c1 --- /dev/null +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/URLConnectionFileActions.java @@ -0,0 +1,117 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.qa.test; + +import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.entitlement.qa.entitled.EntitledActions; + +import java.io.IOException; +import java.net.URLConnection; + +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.PLUGINS; + +class URLConnectionFileActions { + + private static void withJdkFileConnection(CheckedConsumer connectionConsumer) throws Exception { + var conn = EntitledActions.createFileURLConnection(); + // Be sure we got the connection implementation we want + assert conn.getClass().getSimpleName().equals("FileURLConnection"); + try { + connectionConsumer.accept(conn); + } catch (IOException e) { + // It's OK, it means we passed entitlement checks, and we tried to perform some operation + } + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void sunFileURLConnectionConnect() throws Exception { + withJdkFileConnection(URLConnection::connect); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void sunFileURLConnectionGetHeaderFields() throws Exception { + withJdkFileConnection(URLConnection::getHeaderFields); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void sunFileURLConnectionGetHeaderFieldWithName() throws Exception { + withJdkFileConnection(urlConnection -> urlConnection.getHeaderField("date")); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void sunFileURLConnectionGetHeaderFieldWithIndex() throws Exception { + withJdkFileConnection(urlConnection -> urlConnection.getHeaderField(0)); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void sunFileURLConnectionGetContentLength() throws Exception { + withJdkFileConnection(URLConnection::getContentLength); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void sunFileURLConnectionGetContentLengthLong() throws Exception { + withJdkFileConnection(URLConnection::getContentLengthLong); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void sunFileURLConnectionGetHeaderFieldKey() throws Exception { + withJdkFileConnection(urlConnection -> urlConnection.getHeaderFieldKey(0)); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void sunFileURLConnectionGetLastModified() throws Exception { + withJdkFileConnection(URLConnection::getLastModified); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void sunFileURLConnectionGetInputStream() throws Exception { + withJdkFileConnection(URLConnection::getInputStream); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void sunFileURLConnectionGetContentType() throws Exception { + withJdkFileConnection(URLConnection::getContentType); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void sunFileURLConnectionGetContentEncoding() throws Exception { + withJdkFileConnection(URLConnection::getContentEncoding); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void sunFileURLConnectionGetExpiration() throws Exception { + withJdkFileConnection(URLConnection::getExpiration); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void sunFileURLConnectionGetDate() throws Exception { + withJdkFileConnection(URLConnection::getDate); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void sunFileURLConnectionGetHeaderFieldInt() throws Exception { + withJdkFileConnection(conn -> conn.getHeaderFieldInt("field", 0)); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void sunFileURLConnectionGetHeaderFieldLong() throws Exception { + withJdkFileConnection(conn -> conn.getHeaderFieldLong("field", 0)); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void sunFileURLConnectionGetContent() throws Exception { + withJdkFileConnection(URLConnection::getContent); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void sunFileURLConnectionGetContentWithClasses() throws Exception { + withJdkFileConnection(conn -> conn.getContent(new Class[] { String.class })); + } +} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java index 9f80e0d4bc423..0afb3ff189850 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java @@ -49,6 +49,7 @@ import java.net.SocketAddress; import java.net.SocketImplFactory; import java.net.URI; +import java.net.URISyntaxException; import java.net.URL; import java.net.URLStreamHandler; import java.net.URLStreamHandlerFactory; @@ -74,6 +75,7 @@ import java.nio.file.LinkOption; import java.nio.file.OpenOption; import java.nio.file.Path; +import java.nio.file.Paths; import java.nio.file.StandardOpenOption; import java.nio.file.WatchEvent; import java.nio.file.WatchService; @@ -644,6 +646,8 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { public void check$java_net_URL$openConnection(Class callerClass, java.net.URL that) { if (isNetworkUrl(that)) { policyManager.checkOutboundNetworkAccess(callerClass); + } else if (isFileUrl(that)) { + checkURLFileRead(callerClass, that); } } @@ -651,6 +655,8 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { public void check$java_net_URL$openConnection(Class callerClass, URL that, Proxy proxy) { if (proxy.type() != Proxy.Type.DIRECT || isNetworkUrl(that)) { policyManager.checkOutboundNetworkAccess(callerClass); + } else if (isFileUrl(that)) { + checkURLFileRead(callerClass, that); } } @@ -658,6 +664,8 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { public void check$java_net_URL$openStream(Class callerClass, java.net.URL that) { if (isNetworkUrl(that)) { policyManager.checkOutboundNetworkAccess(callerClass); + } else if (isFileUrl(that)) { + checkURLFileRead(callerClass, that); } } @@ -665,6 +673,8 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { public void check$java_net_URL$getContent(Class callerClass, java.net.URL that) { if (isNetworkUrl(that)) { policyManager.checkOutboundNetworkAccess(callerClass); + } else if (isFileUrl(that)) { + checkURLFileRead(callerClass, that); } } @@ -672,6 +682,8 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { public void check$java_net_URL$getContent(Class callerClass, java.net.URL that, Class[] classes) { if (isNetworkUrl(that)) { policyManager.checkOutboundNetworkAccess(callerClass); + } else if (isFileUrl(that)) { + checkURLFileRead(callerClass, that); } } @@ -681,22 +693,37 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { "sun.net.www.protocol.mailto.MailToURLConnection" ); + private static final List FILE_URL_CONNECT_CLASS_NAMES = List.of("sun.net.www.protocol.file.FileURLConnection"); + private static final Set NETWORK_PROTOCOLS = Set.of("http", "https", "ftp", "mailto"); + private static final Set FILE_PROTOCOLS = Set.of("file"); + private static boolean isNetworkUrl(java.net.URL url) { return NETWORK_PROTOCOLS.contains(url.getProtocol()); } + private static boolean isFileUrl(java.net.URL url) { + return FILE_PROTOCOLS.contains(url.getProtocol()); + } + private static boolean isNetworkUrlConnection(java.net.URLConnection urlConnection) { var connectionClass = urlConnection.getClass(); return HttpURLConnection.class.isAssignableFrom(connectionClass) || ADDITIONAL_NETWORK_URL_CONNECT_CLASS_NAMES.contains(connectionClass.getName()); } + private static boolean isFileUrlConnection(java.net.URLConnection urlConnection) { + var connectionClass = urlConnection.getClass(); + return FILE_URL_CONNECT_CLASS_NAMES.contains(connectionClass.getName()); + } + @Override public void check$java_net_URLConnection$getContentLength(Class callerClass, java.net.URLConnection that) { if (isNetworkUrlConnection(that)) { policyManager.checkOutboundNetworkAccess(callerClass); + } else if (isFileUrlConnection(that)) { + checkURLFileRead(callerClass, that.getURL()); } } @@ -704,6 +731,8 @@ private static boolean isNetworkUrlConnection(java.net.URLConnection urlConnecti public void check$java_net_URLConnection$getContentLengthLong(Class callerClass, java.net.URLConnection that) { if (isNetworkUrlConnection(that)) { policyManager.checkOutboundNetworkAccess(callerClass); + } else if (isFileUrlConnection(that)) { + checkURLFileRead(callerClass, that.getURL()); } } @@ -711,6 +740,8 @@ private static boolean isNetworkUrlConnection(java.net.URLConnection urlConnecti public void check$java_net_URLConnection$getContentType(Class callerClass, java.net.URLConnection that) { if (isNetworkUrlConnection(that)) { policyManager.checkOutboundNetworkAccess(callerClass); + } else if (isFileUrlConnection(that)) { + checkURLFileRead(callerClass, that.getURL()); } } @@ -718,6 +749,8 @@ private static boolean isNetworkUrlConnection(java.net.URLConnection urlConnecti public void check$java_net_URLConnection$getContentEncoding(Class callerClass, java.net.URLConnection that) { if (isNetworkUrlConnection(that)) { policyManager.checkOutboundNetworkAccess(callerClass); + } else if (isFileUrlConnection(that)) { + checkURLFileRead(callerClass, that.getURL()); } } @@ -725,6 +758,8 @@ private static boolean isNetworkUrlConnection(java.net.URLConnection urlConnecti public void check$java_net_URLConnection$getExpiration(Class callerClass, java.net.URLConnection that) { if (isNetworkUrlConnection(that)) { policyManager.checkOutboundNetworkAccess(callerClass); + } else if (isFileUrlConnection(that)) { + checkURLFileRead(callerClass, that.getURL()); } } @@ -732,6 +767,8 @@ private static boolean isNetworkUrlConnection(java.net.URLConnection urlConnecti public void check$java_net_URLConnection$getDate(Class callerClass, java.net.URLConnection that) { if (isNetworkUrlConnection(that)) { policyManager.checkOutboundNetworkAccess(callerClass); + } else if (isFileUrlConnection(that)) { + checkURLFileRead(callerClass, that.getURL()); } } @@ -739,6 +776,8 @@ private static boolean isNetworkUrlConnection(java.net.URLConnection urlConnecti public void check$java_net_URLConnection$getLastModified(Class callerClass, java.net.URLConnection that) { if (isNetworkUrlConnection(that)) { policyManager.checkOutboundNetworkAccess(callerClass); + } else if (isFileUrlConnection(that)) { + checkURLFileRead(callerClass, that.getURL()); } } @@ -751,6 +790,8 @@ private static boolean isNetworkUrlConnection(java.net.URLConnection urlConnecti ) { if (isNetworkUrlConnection(that)) { policyManager.checkOutboundNetworkAccess(callerClass); + } else if (isFileUrlConnection(that)) { + checkURLFileRead(callerClass, that.getURL()); } } @@ -763,6 +804,8 @@ private static boolean isNetworkUrlConnection(java.net.URLConnection urlConnecti ) { if (isNetworkUrlConnection(that)) { policyManager.checkOutboundNetworkAccess(callerClass); + } else if (isFileUrlConnection(that)) { + checkURLFileRead(callerClass, that.getURL()); } } @@ -775,6 +818,8 @@ private static boolean isNetworkUrlConnection(java.net.URLConnection urlConnecti ) { if (isNetworkUrlConnection(that)) { policyManager.checkOutboundNetworkAccess(callerClass); + } else if (isFileUrlConnection(that)) { + checkURLFileRead(callerClass, that.getURL()); } } @@ -782,6 +827,8 @@ private static boolean isNetworkUrlConnection(java.net.URLConnection urlConnecti public void check$java_net_URLConnection$getContent(Class callerClass, java.net.URLConnection that) { if (isNetworkUrlConnection(that)) { policyManager.checkOutboundNetworkAccess(callerClass); + } else if (isFileUrlConnection(that)) { + checkURLFileRead(callerClass, that.getURL()); } } @@ -789,6 +836,8 @@ private static boolean isNetworkUrlConnection(java.net.URLConnection urlConnecti public void check$java_net_URLConnection$getContent(Class callerClass, java.net.URLConnection that, Class[] classes) { if (isNetworkUrlConnection(that)) { policyManager.checkOutboundNetworkAccess(callerClass); + } else if (isFileUrlConnection(that)) { + checkURLFileRead(callerClass, that.getURL()); } } @@ -817,6 +866,8 @@ private static boolean isNetworkUrlConnection(java.net.URLConnection urlConnecti public void check$sun_net_www_URLConnection$getHeaderField(Class callerClass, java.net.URLConnection that, String name) { if (isNetworkUrlConnection(that)) { policyManager.checkOutboundNetworkAccess(callerClass); + } else if (isFileUrlConnection(that)) { + checkURLFileRead(callerClass, that.getURL()); } } @@ -824,6 +875,8 @@ private static boolean isNetworkUrlConnection(java.net.URLConnection urlConnecti public void check$sun_net_www_URLConnection$getHeaderFields(Class callerClass, java.net.URLConnection that) { if (isNetworkUrlConnection(that)) { policyManager.checkOutboundNetworkAccess(callerClass); + } else if (isFileUrlConnection(that)) { + checkURLFileRead(callerClass, that.getURL()); } } @@ -831,6 +884,8 @@ private static boolean isNetworkUrlConnection(java.net.URLConnection urlConnecti public void check$sun_net_www_URLConnection$getHeaderFieldKey(Class callerClass, java.net.URLConnection that, int n) { if (isNetworkUrlConnection(that)) { policyManager.checkOutboundNetworkAccess(callerClass); + } else if (isFileUrlConnection(that)) { + checkURLFileRead(callerClass, that.getURL()); } } @@ -838,6 +893,8 @@ private static boolean isNetworkUrlConnection(java.net.URLConnection urlConnecti public void check$sun_net_www_URLConnection$getHeaderField(Class callerClass, java.net.URLConnection that, int n) { if (isNetworkUrlConnection(that)) { policyManager.checkOutboundNetworkAccess(callerClass); + } else if (isFileUrlConnection(that)) { + checkURLFileRead(callerClass, that.getURL()); } } @@ -845,6 +902,8 @@ private static boolean isNetworkUrlConnection(java.net.URLConnection urlConnecti public void check$sun_net_www_URLConnection$getContentType(Class callerClass, java.net.URLConnection that) { if (isNetworkUrlConnection(that)) { policyManager.checkOutboundNetworkAccess(callerClass); + } else if (isFileUrlConnection(that)) { + checkURLFileRead(callerClass, that.getURL()); } } @@ -852,6 +911,8 @@ private static boolean isNetworkUrlConnection(java.net.URLConnection urlConnecti public void check$sun_net_www_URLConnection$getContentLength(Class callerClass, java.net.URLConnection that) { if (isNetworkUrlConnection(that)) { policyManager.checkOutboundNetworkAccess(callerClass); + } else if (isFileUrlConnection(that)) { + checkURLFileRead(callerClass, that.getURL()); } } @@ -2724,4 +2785,66 @@ public void checkPathRegister( ) { policyManager.checkFileRead(callerClass, that); } + + private void checkURLFileRead(Class callerClass, URL url) { + try { + policyManager.checkFileRead(callerClass, Paths.get(url.toURI())); + } catch (URISyntaxException e) { + // We expect this method to be called only on File URLs; otherwise the underlying method would fail anyway + throw new RuntimeException(e); + } + } + + @Override + public void check$sun_net_www_protocol_file_FileURLConnection$connect(Class callerClass, java.net.URLConnection that) { + checkURLFileRead(callerClass, that.getURL()); + } + + @Override + public void check$sun_net_www_protocol_file_FileURLConnection$getHeaderFields(Class callerClass, java.net.URLConnection that) { + checkURLFileRead(callerClass, that.getURL()); + } + + @Override + public void check$sun_net_www_protocol_file_FileURLConnection$getHeaderField( + Class callerClass, + java.net.URLConnection that, + String name + ) { + checkURLFileRead(callerClass, that.getURL()); + } + + @Override + public void check$sun_net_www_protocol_file_FileURLConnection$getHeaderField(Class callerClass, java.net.URLConnection that, int n) { + checkURLFileRead(callerClass, that.getURL()); + } + + @Override + public void check$sun_net_www_protocol_file_FileURLConnection$getContentLength(Class callerClass, java.net.URLConnection that) { + checkURLFileRead(callerClass, that.getURL()); + } + + @Override + public void check$sun_net_www_protocol_file_FileURLConnection$getContentLengthLong(Class callerClass, java.net.URLConnection that) { + checkURLFileRead(callerClass, that.getURL()); + } + + @Override + public void check$sun_net_www_protocol_file_FileURLConnection$getHeaderFieldKey( + Class callerClass, + java.net.URLConnection that, + int n + ) { + checkURLFileRead(callerClass, that.getURL()); + } + + @Override + public void check$sun_net_www_protocol_file_FileURLConnection$getLastModified(Class callerClass, java.net.URLConnection that) { + checkURLFileRead(callerClass, that.getURL()); + } + + @Override + public void check$sun_net_www_protocol_file_FileURLConnection$getInputStream(Class callerClass, java.net.URLConnection that) { + checkURLFileRead(callerClass, that.getURL()); + } } From 496c38e5a5f83473a25b88117be8b7dd42d86e17 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Wed, 5 Mar 2025 08:02:13 +0100 Subject: [PATCH 19/54] Reapply "Update Gradle wrapper to 8.13 (#122421)" (#123889) (#123896) This reverts commit 36660f2e5f176d879c8deadfb097327ab526975e. --- .../gradle/wrapper/gradle-wrapper.properties | 4 ++-- .../remote/distribution/archives/build.gradle | 2 +- .../BaseInternalPluginBuildPlugin.java | 2 +- .../internal/ElasticsearchJavaBasePlugin.java | 2 +- .../InternalDistributionBwcSetupPlugin.java | 4 ++-- .../internal/InternalTestClustersPlugin.java | 2 +- .../internal/docker/DockerSupportPlugin.java | 2 +- .../info/BuildParameterExtension.java | 6 +++--- .../info/DefaultBuildParameterExtension.java | 12 ++++++------ .../gradle/internal/test/MutedTestPlugin.java | 2 +- .../TestFixturesDeployPlugin.java | 2 +- .../testfixtures/TestFixturesPlugin.java | 2 +- client/rest/build.gradle | 2 +- client/sniffer/build.gradle | 2 +- client/test/build.gradle | 2 +- distribution/archives/build.gradle | 4 ++-- distribution/build.gradle | 6 +++--- distribution/docker/build.gradle | 6 +++--- distribution/packages/build.gradle | 6 +++--- docs/build.gradle | 4 ++-- gradle/build.versions.toml | 3 +++ gradle/verification-metadata.xml | 6 +++--- gradle/wrapper/gradle-wrapper.jar | Bin 43583 -> 43705 bytes gradle/wrapper/gradle-wrapper.properties | 4 ++-- gradlew | 2 +- libs/h3/build.gradle | 2 +- libs/tdigest/build.gradle | 2 +- modules/aggregations/build.gradle | 2 +- modules/data-streams/build.gradle | 2 +- modules/legacy-geo/build.gradle | 2 +- plugins/analysis-icu/build.gradle | 2 +- plugins/build.gradle | 6 ++---- .../examples/custom-processor/build.gradle | 4 ++-- plugins/examples/custom-settings/build.gradle | 4 ++-- .../build.gradle | 4 ++-- .../examples/custom-suggester/build.gradle | 4 ++-- .../gradle/wrapper/gradle-wrapper.properties | 4 ++-- .../examples/painless-whitelist/build.gradle | 4 ++-- plugins/examples/rescore/build.gradle | 4 ++-- plugins/examples/rest-handler/build.gradle | 4 ++-- .../script-expert-scoring/build.gradle | 4 ++-- .../build.gradle | 4 ++-- qa/lucene-index-compatibility/build.gradle | 2 +- qa/system-indices/build.gradle | 4 ++-- qa/verify-version-constants/build.gradle | 2 +- rest-api-spec/build.gradle | 2 +- server/build.gradle | 2 +- .../apm-integration/build.gradle | 2 +- test/external-modules/build.gradle | 4 ++-- .../delayed-aggs/build.gradle | 2 +- .../die-with-dignity/build.gradle | 2 +- .../external-modules/error-query/build.gradle | 2 +- .../esql-heap-attack/build.gradle | 2 +- test/external-modules/jvm-crash/build.gradle | 2 +- x-pack/build.gradle | 4 ++-- x-pack/plugin/analytics/build.gradle | 2 +- x-pack/plugin/build.gradle | 2 +- x-pack/plugin/core/build.gradle | 6 +++--- x-pack/plugin/eql/build.gradle | 2 +- x-pack/plugin/esql/build.gradle | 4 ++-- .../mapper-constant-keyword/build.gradle | 2 +- x-pack/plugin/ml/build.gradle | 2 +- x-pack/plugin/spatial/build.gradle | 2 +- .../build.gradle | 2 +- .../build.gradle | 2 +- .../build.gradle | 2 +- 66 files changed, 104 insertions(+), 103 deletions(-) diff --git a/build-tools-internal/gradle/wrapper/gradle-wrapper.properties b/build-tools-internal/gradle/wrapper/gradle-wrapper.properties index b8cea9f02a5bf..2a6e21b2ba89a 100644 --- a/build-tools-internal/gradle/wrapper/gradle-wrapper.properties +++ b/build-tools-internal/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=296742a352f0b20ec14b143fb684965ad66086c7810b7b255dee216670716175 -distributionUrl=https\://services.gradle.org/distributions/gradle-8.12.1-all.zip +distributionSha256Sum=fba8464465835e74f7270bbf43d6d8a8d7709ab0a43ce1aa3323f73e9aa0c612 +distributionUrl=https\://services.gradle.org/distributions/gradle-8.13-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/archives/build.gradle b/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/archives/build.gradle index c671c18cad030..cf74de2286583 100644 --- a/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/archives/build.gradle +++ b/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/distribution/archives/build.gradle @@ -34,7 +34,7 @@ subprojects { } def calculateBranchVersion() { - File f = rootProject.file(".git/refs/heads/origin") + File f = layout.settingsDirectory.file(".git/refs/heads/origin").asFile def branchName = f.list()?.first().trim() return branchName + ".1" diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BaseInternalPluginBuildPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BaseInternalPluginBuildPlugin.java index 2b79bc2b9173e..6a1df303d6d9e 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BaseInternalPluginBuildPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BaseInternalPluginBuildPlugin.java @@ -39,7 +39,7 @@ public void apply(Project project) { project.getPluginManager().apply(JarHellPrecommitPlugin.class); project.getPluginManager().apply(ElasticsearchJavaPlugin.class); project.getPluginManager().apply(ClusterFeaturesMetadataPlugin.class); - boolean isCi = project.getRootProject().getExtensions().getByType(BuildParameterExtension.class).isCi(); + boolean isCi = project.getRootProject().getExtensions().getByType(BuildParameterExtension.class).getCi(); // Clear default dependencies added by public PluginBuildPlugin as we add our // own project dependencies for internal builds // TODO remove once we removed default dependencies from PluginBuildPlugin diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java index ce779343dfea9..70f6cecb8e725 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java @@ -161,7 +161,7 @@ public void configureCompile(Project project) { compileTask.getConventionMapping().map("sourceCompatibility", () -> java.getSourceCompatibility().toString()); compileTask.getConventionMapping().map("targetCompatibility", () -> java.getTargetCompatibility().toString()); compileOptions.getRelease().set(releaseVersionProviderFromCompileTask(project, compileTask)); - compileOptions.setIncremental(buildParams.isCi() == false); + compileOptions.setIncremental(buildParams.getCi() == false); }); // also apply release flag to groovy, which is used in build-tools project.getTasks().withType(GroovyCompile.class).configureEach(compileTask -> { diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java index fea895f90f91f..2d72c4eeaeb00 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java @@ -73,7 +73,7 @@ public void apply(Project project) { project.getPlugins().apply(JvmToolchainsPlugin.class); toolChainService = project.getExtensions().getByType(JavaToolchainService.class); var buildParams = loadBuildParams(project).get(); - Boolean isCi = buildParams.isCi(); + Boolean isCi = buildParams.getCi(); buildParams.getBwcVersions().forPreviousUnreleased((BwcVersions.UnreleasedVersionInfo unreleasedVersion) -> { configureBwcProject( project.project(unreleasedVersion.gradleProjectPath()), @@ -365,7 +365,7 @@ static void createBuildBwcTask( } else { c.getOutputs().files(expectedOutputFile); } - c.getOutputs().doNotCacheIf("BWC distribution caching is disabled for local builds", task -> buildParams.isCi() == false); + c.getOutputs().doNotCacheIf("BWC distribution caching is disabled for local builds", task -> buildParams.getCi() == false); c.getArgs().add("-p"); c.getArgs().add(projectPath); c.getArgs().add(assembleTaskName); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestClustersPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestClustersPlugin.java index c618fe6c2e1bf..f1b96016286df 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestClustersPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestClustersPlugin.java @@ -30,7 +30,7 @@ public void apply(Project project) { TestClustersPlugin testClustersPlugin = project.getPlugins().apply(TestClustersPlugin.class); testClustersPlugin.setRuntimeJava(buildParams.getRuntimeJavaHome()); testClustersPlugin.setIsReleasedVersion( - version -> (version.equals(VersionProperties.getElasticsearchVersion()) && buildParams.isSnapshotBuild() == false) + version -> (version.equals(VersionProperties.getElasticsearchVersion()) && buildParams.getSnapshotBuild() == false) || buildParams.getBwcVersions().unreleasedInfo(version) == null ); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerSupportPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerSupportPlugin.java index 7348181c4199c..7ec35ccd32e10 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerSupportPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/docker/DockerSupportPlugin.java @@ -40,7 +40,7 @@ public void apply(Project project) { .getSharedServices() .registerIfAbsent(DOCKER_SUPPORT_SERVICE_NAME, DockerSupportService.class, spec -> spec.parameters(params -> { params.setExclusionsFile(new File(project.getRootDir(), DOCKER_ON_LINUX_EXCLUSIONS_FILE)); - params.getIsCI().set(buildParams.isCi()); + params.getIsCI().set(buildParams.getCi()); })); // Ensure that if we are trying to run any DockerBuildTask tasks, we assert an available Docker installation exists diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParameterExtension.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParameterExtension.java index ef9055b3728d3..d6c8e38dac2c3 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParameterExtension.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParameterExtension.java @@ -54,11 +54,11 @@ public interface BuildParameterExtension { String getTestSeed(); - Boolean isCi(); + Boolean getCi(); Integer getDefaultParallel(); - Boolean isSnapshotBuild(); + Boolean getSnapshotBuild(); BwcVersions getBwcVersions(); @@ -66,5 +66,5 @@ public interface BuildParameterExtension { Random getRandom(); - Boolean isGraalVmRuntime(); + Boolean getGraalVmRuntime(); } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/DefaultBuildParameterExtension.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/DefaultBuildParameterExtension.java index 283c02428e4e6..760664f9fa025 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/DefaultBuildParameterExtension.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/DefaultBuildParameterExtension.java @@ -42,7 +42,7 @@ public abstract class DefaultBuildParameterExtension implements BuildParameterEx private final String testSeed; private final Boolean isCi; private final Integer defaultParallel; - private final Boolean isSnapshotBuild; + private final Boolean snapshotBuild; // not final for testing private Provider bwcVersions; @@ -81,7 +81,7 @@ public DefaultBuildParameterExtension( this.testSeed = testSeed; this.isCi = isCi; this.defaultParallel = defaultParallel; - this.isSnapshotBuild = isSnapshotBuild; + this.snapshotBuild = isSnapshotBuild; this.bwcVersions = cache(providers, bwcVersions); this.gitOrigin = gitOrigin; } @@ -183,7 +183,7 @@ public String getTestSeed() { } @Override - public Boolean isCi() { + public Boolean getCi() { return isCi; } @@ -193,8 +193,8 @@ public Integer getDefaultParallel() { } @Override - public Boolean isSnapshotBuild() { - return isSnapshotBuild; + public Boolean getSnapshotBuild() { + return snapshotBuild; } @Override @@ -208,7 +208,7 @@ public Random getRandom() { } @Override - public Boolean isGraalVmRuntime() { + public Boolean getGraalVmRuntime() { return runtimeJavaDetails.get().toLowerCase().contains("graalvm"); } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/MutedTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/MutedTestPlugin.java index c13a5f0e4d30d..ea6e8f0f1b01e 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/MutedTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/MutedTestPlugin.java @@ -51,7 +51,7 @@ public void apply(Project project) { } // Don't fail when all tests are ignored when running in CI - filter.setFailOnNoMatchingTests(buildParams.isCi() == false); + filter.setFailOnNoMatchingTests(buildParams.getCi() == false); }); }); } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesDeployPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesDeployPlugin.java index a934164d11af6..08484346e6908 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesDeployPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesDeployPlugin.java @@ -33,7 +33,7 @@ public void apply(Project project) { var buildParams = loadBuildParams(project).get(); NamedDomainObjectContainer fixtures = project.container(TestFixtureDeployment.class); project.getExtensions().add("dockerFixtures", fixtures); - registerDeployTaskPerFixture(project, fixtures, buildParams.isCi()); + registerDeployTaskPerFixture(project, fixtures, buildParams.getCi()); project.getTasks().register(DEPLOY_FIXTURE_TASK_NAME, task -> task.dependsOn(project.getTasks().withType(DockerBuildTask.class))); } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesPlugin.java index ab28a66d93065..2c40b56c00b9a 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesPlugin.java @@ -131,7 +131,7 @@ public void apply(Project project) { tasks.withType(ComposeUp.class).named("composeUp").configure(t -> { // Avoid running docker-compose tasks in parallel in CI due to some issues on certain Linux distributions - if (buildParams.isCi()) { + if (buildParams.getCi()) { t.usesService(dockerComposeThrottle); t.usesService(dockerSupport); } diff --git a/client/rest/build.gradle b/client/rest/build.gradle index 3fb2aa6595869..7a07f50e4b98d 100644 --- a/client/rest/build.gradle +++ b/client/rest/build.gradle @@ -37,7 +37,7 @@ base { // LLRC is licenses under Apache 2.0 projectLicenses.set(['The Apache Software License, Version 2.0': providers.provider(() -> 'http://www.apache.org/licenses/LICENSE-2.0')]) -licenseFile.set(rootProject.file('licenses/APACHE-LICENSE-2.0.txt')) +licenseFile.set(layout.getSettingsDirectory().file('licenses/APACHE-LICENSE-2.0.txt').asFile) dependencies { api "org.apache.httpcomponents:httpclient:${versions.httpclient}" diff --git a/client/sniffer/build.gradle b/client/sniffer/build.gradle index 9b1cb1140311b..38fc949a5c7c8 100644 --- a/client/sniffer/build.gradle +++ b/client/sniffer/build.gradle @@ -33,7 +33,7 @@ base { // rest client sniffer is licenses under Apache 2.0 projectLicenses.set(['The Apache Software License, Version 2.0': providers.provider(() -> 'http://www.apache.org/licenses/LICENSE-2.0')]) -licenseFile.set(rootProject.file('licenses/APACHE-LICENSE-2.0.txt')) +licenseFile.set(layout.getSettingsDirectory().file('licenses/APACHE-LICENSE-2.0.txt').asFile) dependencies { api project(":client:rest") diff --git a/client/test/build.gradle b/client/test/build.gradle index 3b7f62fd8ef58..27b1577ce3098 100644 --- a/client/test/build.gradle +++ b/client/test/build.gradle @@ -19,7 +19,7 @@ group = "${group}.client.test" // rest client sniffer is licenses under Apache 2.0 projectLicenses.set(['The Apache Software License, Version 2.0': providers.provider(() -> 'http://www.apache.org/licenses/LICENSE-2.0')]) -licenseFile.set(rootProject.file('licenses/APACHE-LICENSE-2.0.txt')) +licenseFile.set(layout.getSettingsDirectory().file('licenses/APACHE-LICENSE-2.0.txt').asFile) dependencies { api "org.apache.httpcomponents:httpcore:${versions.httpcore}" diff --git a/distribution/archives/build.gradle b/distribution/archives/build.gradle index 7bbfb0f313e55..ddfdaa69bb392 100644 --- a/distribution/archives/build.gradle +++ b/distribution/archives/build.gradle @@ -57,13 +57,13 @@ CopySpec archiveFiles(String distributionType, String os, String architecture, b pluginsDir.getParent() } } - from(rootProject.projectDir) { + from(layout.settingsDirectory.asFile) { filePermissions { unix(0644) } include 'README.asciidoc' } - from(rootProject.file('licenses')) { + from(layout.settingsDirectory.file('licenses').asFile) { include isTestDistro ? 'AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt' : 'ELASTIC-LICENSE-2.0.txt' filePermissions { unix(0644) diff --git a/distribution/build.gradle b/distribution/build.gradle index e0302a081ce68..784d730acbf41 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -377,7 +377,7 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { exclude "**/platform/${excludePlatform}/**" } } - if (buildParams.isSnapshotBuild()) { + if (buildParams.getSnapshotBuild()) { from(buildExternalTestModulesTaskProvider) } if (project.path.startsWith(':distribution:packages')) { @@ -518,9 +518,9 @@ subprojects { String licenseText if (isTestDistro) { - licenseText = rootProject.file('licenses/AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt').getText('UTF-8') + licenseText = layout.settingsDirectory.file('licenses/AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt').asFile.getText('UTF-8') } else { - licenseText = rootProject.file('licenses/ELASTIC-LICENSE-2.0.txt').getText('UTF-8') + licenseText = layout.settingsDirectory.file('licenses/ELASTIC-LICENSE-2.0.txt').asFile.getText('UTF-8') } // license text needs to be indented with a single space licenseText = ' ' + licenseText.replace('\n', '\n ') diff --git a/distribution/docker/build.gradle b/distribution/docker/build.gradle index 204cfc18950a8..df23019018733 100644 --- a/distribution/docker/build.gradle +++ b/distribution/docker/build.gradle @@ -119,7 +119,7 @@ ext.expansions = { Architecture architecture, DockerBase base -> // the image. When developing the Docker images, it's very tedious to completely rebuild // an image for every single change. Therefore, outside of CI, we fix the // build time to midnight so that the Docker build cache is usable. - def buildDate = buildParams.isCi() ? buildParams.buildDate : buildParams.buildDate.truncatedTo(ChronoUnit.DAYS).toString() + def buildDate = buildParams.ci ? buildParams.buildDate : buildParams.buildDate.truncatedTo(ChronoUnit.DAYS).toString() return [ 'arch' : architecture.classifier, @@ -389,7 +389,7 @@ void addBuildDockerImageTask(Architecture architecture, DockerBase base) { dockerContext.fileProvider(transformTask.map { Sync task -> task.getDestinationDir() }) - noCache = buildParams.isCi() + noCache = buildParams.ci tags = generateTags(base, architecture) platforms.add(architecture.dockerPlatform) @@ -484,7 +484,7 @@ void addBuildEssDockerImageTask(Architecture architecture) { dockerContext.fileProvider(buildContextTask.map { it.getDestinationDir() }) - noCache = buildParams.isCi() + noCache = buildParams.ci baseImages = [] tags = generateTags(dockerBase, architecture) platforms.add(architecture.dockerPlatform) diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index b7ba4e32edae3..9129c9a12fc9e 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -43,7 +43,7 @@ import java.util.regex.Pattern */ plugins { - id "com.netflix.nebula.ospackage-base" version "11.10.1" + alias(buildLibs.plugins.ospackage) } ['deb', 'rpm'].each { type -> @@ -174,7 +174,7 @@ def commonPackageConfig(String type, String architecture) { } else { assert type == 'rpm' into('/usr/share/elasticsearch') { - from(rootProject.file('licenses')) { + from(layout.settingsDirectory.file('licenses').asFile) { include 'ELASTIC-LICENSE-2.0.txt' rename { 'LICENSE.txt' } } @@ -300,7 +300,7 @@ ospackage { url = 'https://www.elastic.co/' // signing setup - if (project.hasProperty('signing.password') && buildParams.isSnapshotBuild() == false) { + if (project.hasProperty('signing.password') && buildParams.snapshotBuild == false) { signingKeyId = project.hasProperty('signing.keyId') ? project.property('signing.keyId') : 'D88E42B4' signingKeyPassphrase = project.property('signing.password') signingKeyRingFile = project.hasProperty('signing.secretKeyRingFile') ? diff --git a/docs/build.gradle b/docs/build.gradle index 0ebb4b498eabd..8465fa8bef762 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -38,7 +38,7 @@ ext.docsFileTree = fileTree(projectDir) { tasks.named("yamlRestTest") { enabled = false - if (buildParams.isSnapshotBuild() == false) { + if (buildParams.snapshotBuild == false) { // LOOKUP is not available in snapshots systemProperty 'tests.rest.blacklist', [ "reference/esql/processing-commands/lookup/esql-lookup-example" @@ -84,7 +84,7 @@ testClusters.matching { it.name == "yamlRestTest"}.configureEach { setting 'xpack.license.self_generated.type', 'trial' setting 'indices.lifecycle.history_index_enabled', 'false' keystorePassword 'keystore-password' - if (buildParams.isSnapshotBuild() == false) { + if (buildParams.snapshotBuild == false) { requiresFeature 'es.failure_store_feature_flag_enabled', new Version(8, 12, 0) } } diff --git a/gradle/build.versions.toml b/gradle/build.versions.toml index e86a6fe505861..822514abaa40e 100644 --- a/gradle/build.versions.toml +++ b/gradle/build.versions.toml @@ -47,3 +47,6 @@ spock-platform = { group = "org.spockframework", name="spock-bom", version.ref=" spotless-plugin = "com.diffplug.spotless:spotless-plugin-gradle:6.25.0" wiremock = "com.github.tomakehurst:wiremock-jre8-standalone:2.23.2" xmlunit-core = "org.xmlunit:xmlunit-core:2.8.2" + +[plugins] +ospackage = { id = "com.netflix.nebula.ospackage-base", version = "11.11.1" } diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index d66802f49b4c7..010ac5528f46e 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -949,9 +949,9 @@ - - - + + + diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index a4b76b9530d66f5e68d973ea569d8e19de379189..9bbc975c742b298b441bfb90dbc124400a3751b9 100644 GIT binary patch delta 34744 zcmXuJV_+R@)3u$(Y~1X)v28cDZQE*`9qyPrXx!Mg8{4+s*nWFo&-eX5|IMs5>pW(< z=OJ4cAZzeZfy=9lI!r-0aXh8xKdlGq)X)o#ON+mC6t7t0WtgR!HN%?__cvdWdtQC< zrFQ;?l@%CxY55`8y(t7?1P_O7(6pv~(~l!kHB;z2evtUsGHzEDL+y4*no%g#AsI~i zJ%SFMv{j__Yaxnn2NtDK+!1XZX`CB}DGMIT{#8(iAk*`?VagyHx&|p8npkmz=-n!f z3D+^yIjP`D&Lfz500rpq#dJE`vM|-N7=`uN0z86BpiMcCOCS^;6CUG4o1I)W{q6Gv z1vZB6+|7An``GNoG7D!xJGJd_Qv(M-kdVdsIJ?CrXFEH^@Ts83}QX}1%P6KQFNz^-=) z<|qo#qmR!Nonr$p*Uu1Jo2c~KLTrvc*Yw%L+`IL}y|kd+t{NCrXaP=7C00CO?=pgp z!fyr#XFfFXO6z2TP5P1W{H_`$PKzUiGtJd!U52%yAJf}~tgXF`1#}@y`cZl9y{J-A zyUA&-X)+^N?W=2Fm_ce2w$C6>YWp7MgXa{7=kwwy9guBx26=MnPpuSt zB4}vo3{qxa+*{^oHxe7;JMNMp>F`iNv>0!MsFtnb+5eEZ$WI z0M9}rA&cgQ^Q8t_ojofiHaKuhvIB{B9I}3`Dsy3vW8ibigX}Kc912|UZ1uhH?RuHU=i&ePe2w%65)nBkHr7Bx5WwMZj%1B53sUEj0bxI( zEbS%WOUw)3-B0`-m0!{mk7Q%={B#7C^Si>C04@P|qm7$Oxn3ki)G_oNQBTh6CN6d_kt@UKx1Ezdo5)J0Gdf@TcW|{ zdz1V?a>zldA7_5*Pjn6kDj|sbUqt-7X z5+oajeC}*6oi~vxZ#Ac&85cYcC$5OKUnYPv$Y~>H@)mnTtALo*>>5&=0QMr5{5?S; zCDF=RI@94n(!~sa`4Y{JLxgcvRqMM&T!}rRd~Kl#_X4Z&85;})o4W*g>?TaAVXSWB zeY#!8qz^hmC6FERsjTnC)1Xu1UPd7_LfuNvuVqF8(}Jfar=T-K9iChEuZi-FH(P%u zzLrjpq|?}8?g1Vnw^&{eqw~QY0f*9c71&*<5#9f5JlhJmG~IuV*8~nEBLr`KrvOvs zkOLdlZ58K?u>1{vAU0CtT>Il<I{Q8#A!lO7#73V&iN13;oV?Hl?N5xDK63)Rp3%5reb&3n5OQ|9H zDpYEI%JQXcrs^o*SCFY~iYf-VM<`7Tl@+kQS3tfR-fyH_JDaz5SYEMU-bTCLQ=JVG ze?ZPcj95Tci|bVvSZk3^enqQ?pIcZn24V=YT{cf-L|P&{-%%^ql$)^Vu~)Ida=h$bZAMQEi$MM|&b zY8;D;aEba_`W^=VdKfttW)h_zjRA&0A^T*tF*%+}TZQCOvFqKUu=xf1Bx@T?&~S(J zopXniA?s%}Q4p9~F(Ty{8wt$l4oHeT(#U6sAu4>Q+~a;}I>0>??v*wfke}0TwPaeE zj3gWtfNlD{jRgy7;S9PS?su5pnobi%Zoe0LVpw%`<)V=yT~Ht_UUXIna4YUa;p=-T4df6^;bz%;@|$F zK;s9#K@9hqZCST!66N0uPB+FT*kq22%ovtJ%<9ArE%hcX^!(Lz;3?kCZ@Ak*MThjTOKU&t+uJdN*6t$;DDmh zFStdHO>r)8L@qO}K@H~7Z);#f6WU{@Icn7Tc^|IZ`;K^ek9eCWdync`kWCt2s%D-k zE$wyPCui$@gJJ9Q`CtixbMF(GiCCbm`ut(~ce-G|Ji|PZ3~DHlG`Asn;skVhnu0r_ zgGbdmfl|er`87x@uYmd8A+!-3V95GE4&_^9N@hp4SC4 zeFU+Z3Ou&G! zlvZy|iHIIX3X2-Yb7YJ#{SYE9lCoixO+}(|u+H@Z6Rz-l1eZ7{I;vk+Y7kP7ev>hG zv|(I<4?N{EXMSvRgUhbQhDoP1&A;SEUGGep8*!@4u)fNbl3%cts<&=m5<5pi7M-HQ zPS#svbXWu2n&m*K6jL#@xm3VSMJxnxve5J6w1qGv`2>5<6F!uzGVHP1A(_xI7CWlX zm6*wpT@dmQ&pAlm`r~T;)>m5HK^H^cM`pCSoh{;-CE43rMkg<;HnZaCHfMq1LoN0S z%%7|$y~&k6wpiY@rsdCY9ZDh%9W6Pf=2^p=;iv-Ah^ACxwK3VmI}SMNneTa9n%biL z#GoojRHxa}R2zOo!G@<8M-B6vNp?)@_>#mYku#pe{O~t?~}1 zE8`)=BstIRk5W*xZw@2=89@ds?eQ~mxzkrA`y<$oR8bmaUw=rE%lFmzHY&aY8?<-N zp1|bb$(XrOMmiYy{pH#)D1GOmv5aj_?waU~*h~s{VZ&H_PhoXYz`C8Pss{ymY_hPG zt{NY&nPMH#FRvwR+T0(Xo2#T6;=oFmRgA9b-HVY72d|~YF+6v$F%sY0 zS#^LF7sTj>Itvyi!~){Hit*~3imOG*Xh51qLz+!W~`vUBVeZZ5&k34SD%Ha%5#aclSzMfoGWjiq9#rl}j zOf*8NY>VN(`W!DxaBgjBzj3oUAVlLY{R}tiZZ0o>K$vwr?+eggZ!q74m2t?lkvm9z zAmL2=W$jQJL>SSrbIOibe734A(K^B8`M@uao!`E$p+9D!rBea8Oxb|p5r3o4##G8K zMr0I9y&`21{@m=Bi+4tTJ-xy(DB_mG$kYv+qw&VBM(A9^wP9;Yo*6{#5tMpfa;m2FC+%l@ zk_cKXg-d&YUIj3(x{)aNwYGYjSHiOQK2K#yWt$vQomhbnF;Qhkxl`+;i{&+t{PrY` zp5r28&|UvmUK|&Jlv>oX4>XE87Zns?fiE6c;VP7BixT*6n}Zsbv$wd{gXyrE&Sd zhRlv!-{%~xv6yNvx@3^@JEa$={&giRpqZG>`{93 zEjM}YI1i6JSx$DJa&NWcl0M;igxX;est*nz=W16zMfJ0#+s{>Eo>bxmCi)m*43hU1 z;FL43I}nWszjSS%*F1UYt^)4?D6&pDEt1(atK(DKY1pAkNMG`a>_ec;KiT z^xMBBZ9i=;!_hNGlYp^uR0FW^lcBrs_c3ZvhcctW4*T^-DD^OU{{hK8yHahyGyCK& zL0>f0XW|wvi4f`bNTfO+P*Ao^L@8~ezagtl%l z{(2uo71sT3rKTQ-L#Y5Rsy#x)Eo+HQranZmk;r_Hf7WWkRq&QmP{?}do0X=;3U_UYspffJl7v*Y&GnW;M7$C-5ZlL*MU|q*6`Lvx$g^ z6>MRgOZ>~=OyR3>WL0pgh2_ znG)RNd_;ufNwgQ9L6U@`!5=xjzpK_UfYftHOJ)|hrycrpgn-sCKdQ{BY&OEV3`roT|=4I#PT@q`6Lx=Lem2M&k4ghOSjXPH5<%cDd>`!rE} z5;hyRQ|6o>*}@SFEzb7b%5iY}9vOMRGpIQqt%%m)iSpQ@iSAU+A{CmB^&-04fQlV9 z14~oE=?j{b{xE*X^1H)eezKTE27;-=UfNvQZ0kZ+m76{6xqAyTrEB&Oe`Mx{4N;}5 zXp%ojp}JYx6PE}Z`IBO3qWsZEfVPa4EEz0vnsFNkQ!kG8tcec&)k$+s&XmPErROoNxeTh9fATBk)w1g|9*~&S!%r0u6+FTn}dK-qa7cfK~tkJlV zMi{BX!>lQsZhSQUWAf(M6+McPrv>)j<*T&hC!*?qq{@ABJWX z@!~2Y1rhy*Z|x`DZUBuyayz}Kv5Pzrh}1wiHT{9|fh`Wl%ao=lRSwEFl*wy6BZ%vo zrt9Ocbicd1q$a{F6`4#ZQ6vJa@`}IGz+xUr*=6TF^GR?`u{1to&gqJpwf$LN0?G&! zsLNiG+}M+c{*j-Q4I zO!=lj&~{29Os}hgEv`iJ1tU)dx}=ob>DHSHKX|FVu2Y#pO|SsigHRgg4?!FX2>b3W z`m}xI<#_02adGka0TuAIg89kS?>*lKyI)T)Pa)|12XfH;k9}#=dzH6TiciCNO->e9m>!W)l&4B zd74@>_LL9OuJ&v5e0)l7ME@xW)9K@*LUd1RY}Vs_${3YC%+LfSR^H+I=(7Szh2nKB z_8bMoty|M+k9A|hGURVePvMf0XY9NYOiC@h^MLs-X@(8PV4zI7A155!RnZrBE9R1> zuI4E`=JTxyJ#d`!(9_s?T2jxEM*E`){wGI`DBFIz%ouW`Y0cKDfXAGN{};aMpLRvZ zu`PZ-3(+Tsh?UKAr)TQQ;2Jz(kv8{R#!c9Tyeev55@5@Ng*c4-ZQ6vC?o#5>6{;?gVfAIr-+^g>3b$}13U^~?gce6s6k-4ulnzWlFpq}*)2 zd0!wP{2>3U+zYiPaNr+-6O`J;M2Cb`H5hjDXw(1oKK!?dN#Y~ygl{H2|9$( zVg7`gf9*O%Db^Bm6_d808Q!r%K;IUSa(r^hW`w)~)m<)kJ(>{IbCs-LkKJ5Qk~Ujv z|5`OBU>lb7(1IAMvx%~sj+&>%6+_-Pj&OOMzMrkXW}gMmCPOw5zddR}{r9blK&1(w z^6?`m=qMI=B*p~LklFLvlX{LflRXecS#lV$LVwi$+9F8zyE29LgL> zW6R-6z&3x-zL({$nMnbhu|plRO8S_EavN?EKrr+c&Tt;Mk)NC0e|cvyXk%VKb5VIc z;|DN^5)t^}tr&-2q)SbwrF>=k$moYK;yA{Q1!I940KmPvg_Ogb81w$_)i3FgFWG+MS?k=BpkVGk-bRhBF;xJ}wnGN{)?gbry^3=P1@$k^#z9*@tmmB+TZ|L@3#3Z+x z8hJE({GEeEWj#+MnUSN^~c!=G+yW^j=cfN_0!}%(J-f1`G}w^}xi!T8BJDOCri{mGBU? zsKXxeN*=L#<-p_aj6cHtYWMJ+;F`HLeW5cpmeVAhFfy+Y=0rIqqyJ-NRIu-aE*Mvr zVnC-RDR`d1nnQu|^S79I>%9=bPNx1JLOJnB**Y`2WCq zctq<)Cq2^Z%=$*&;QxX30;642;y+=mlMLec6{KA208FQ~_S&tiFQW zp2{C3nyrmgkh+HRmG+$_y19m~0z~b`Mo+m6)Qq82p5)Z6ePn&B=!*twk7Rz%zzm-R z>Qj!PE3XMBY)N-xO(=VpO6=Cky5kpl}fQztM7QzvG#a}5$>2$f5w|}b8=3E)cNQw<%e1xAEwaRHu zhHCGB4Uzs6x3A=7uUBC0({&iNH{!7JgQHVa+ zKfQItwD}sd;587x?M_hzpR|TKtTH^4{`G7*87o_wJrFlmrEjk=jvA z6xBPKYjFB9{0Sj0rBL-z9BuBY_3c||UjVgv2kqw2m<@4#>zfx&8Uhq8u+)q68y+P~ zLT;>P#tv|UD62Nvl`H+UVUXPoFG3>Wt-!sX*=4{XxV|GSC+alg10pP~VaA>^}sRr1I4~ zffa2?H+84k=_w8oc8CQ4Ak-bhjCJIsbX{NQ1Xsi*Ad{!x=^8D6kYup?i~Kr;o`d=$ z*xal=(NL$A?w8d;U8P=`Q;4mh?g@>aqpU}kg5rnx7TExzfX4E=ozb0kFcyc?>p6P# z5=t~3MDR*d{BLI~7ZZG&APgBa4B&r^(9lJO!tGxM7=ng?Py&aN;erj&h``@-V8OA> z=sQ4diM!6K=su^WMbU@R%Tj@%jT5prt8I39 zd3t`Tcw$2G!3;f!#<>>SQ<>g6}Q{xB|sx_%QKm2`NxN|Zl%?Ck6Lu_EMC?*eRxdgS!3zYU#OnO~0&UFei zmP3k9!70^O24j5;G-fH6%T}X{EdO(%*+7ThlNGAh;l?$&{eZ-l`j281o@47x+6Z*DC`R2CkPo{1Behvlt!4${0Q?fBx)iIw$Ky zI#xvxKs1U`uMgeZg5fD>s5AYH*n=+UaRzS?ogn6WwBPK3Gib5@Jj!sZN^tm>M&*r@ zjbBoF7uXJU2MW~JK3%Xa3R}3zsP7qHEqbnC%eKsJ51+% zVAT-eRHwD)0YlfK2&rN549*};CJ8I;dj8rD^PR(>#n?Jccsqx&wF#We;Auv9Vm%-} z3HjpBGp$t5^S$XhJmYAP0q_qM@^#D}NM1FmCCyo;F|wv3_ci@$MA<3An0Aa|>_M&S z%qGjO@w{NI$VKyDF@w5W*6XK~5S`S$@ABWh@uaFIBq~VqOl99dhS}?}3N#JizIfYYt`ZKK0i_e#E;P0)VXh-V!w+qX%^-I0^ok>HAm5)tbBZlYov@XkUL zU}l}NDq{%pc=rmBC>Xi>Y5j9N2WrO58FxmLTZ=$@Fn3>(8~6sbkJ;;Uw!F8zXNoF@ zpW;OS^aL|+aN@xwRNj^&9iX;XxRUuPo`ti>k3Hi3cugt`C(EwuQ&d2lyfO` ze!0fi{eHhU1yN+o%J22|{prPvPOs1S?1eUuGUkR zmzMlCXZtW)ABWasAn53}?BqtPMJ*g>L1i6{$HmoEb@h(kILnMp(2!H!rG?MNH`1V0 zotb`;u#Yz0BZrT1ffVTCV!?{L^z8q11_21ptR0ITbOcaZ!mlWhC_AZb>?2IDV|b_y z9lVt3)0d@W=lNp1ArE;h_;DDQX^_;WtsSIO<;Ly&(#O~Xw$R0~W|xdQk*Y(b2=vLV zt8HX8=;#;$=y}!;Qku2HJbGEzF`2_~&i$&ogHUe5vhx}FLR}K_Mp)J{n*Va2<|pk$ z4tI(7v3A%Z7Z0|ZWw#7%$U#*mv+`Ujlh^N(t63xFt_%*WoJ^oq!U0j+Bx`<>q!J&0sWy4&{@#*BOr-s ztZ68f;l0UT3wf@RRC}_ufMr6rQ69Woa@1sZ50Ww|{yfp8!7rMOh_POTE;|zamq+4OObJ-VeTK|D|h?mfR$^lA{E7pk8DRDz*j&r<&fR>GaG*d zYaJ*q5#n251XIpR6F1o-w>LZ)Cb6Ma^6tCfcOItn1o;$#H?^jqOd(PA)B3HaTlJK zw!~?nh-v-_WBi5*B=IuTZOX2sa{1I!#%VMd5eGe1VcL6 zQ!aDft}>TjlwzEJ9Kr6MWh1MoNNWr$5_?z9BJ=>^_M59+CGj=}Ln)NrZ;Fja%!0oU zAg07?Nw&^fIc9udtYSulVBb-USUpElN!VfpJc>kPV`>B3S$7`SO$B21eH8mymldT} zxRNhSd-uFb&1$^B)%$-O(C$#Ug&+KvM;E9xA=CE*?PIa5wDF_ibV2lMo(Zygl8QK5 zPgH1R(6)1XT9GZ6^ol$p>4UH@5-KV66NF$AH-qOb>-b~+*7)DYsUe&Is0yTx=pn8N zs&2Z4fZ1Wk=dz>AXIfd%>ad=rb-Womi{nVVTfd26+mCx`6ukuQ?gjAROtw&Tuo&w$|&=rEzNzwpuy0 zsqq)r5`=Mst4=HCtEV^^8%+Dv2x+_}4v7qEXSjKf%dOhGh~(FDkBW<~+z&*#4T>r@ z>i7T5TGc96MfD%hr~nK9!%r{Ns9=7fui)N%GN8MvuIrox)(0nNg2{McUIC6nq>dD+ zNvX69vvf=Pw1@x}^K{@%UCL734;&AVta#($&l2E|*VUaKW@h`X*L*;1Kl4tajl}GQ z$K>;*$3y1(<^32Cg8ugi^ZII=I&ina>q@GC&~gQ#Z88(nOj;*j z1{hyEq|R_0v7LZNKB|3jqZPqZOuUG(SuM^Z>0@mzsKqVbRrkTz#TRZ0sTQ|%XiYcE zEE5{9jEB+2Sdga|veYSFZEzOuepHGusAO#pg&R(%Ob@V0Lw;AfQJ{aLUJxnbe`q(m zadg^fXYiWr+mm2akb*J?y`w(!KAL8OfFD!mVWiWrgScgp9^yoh3lNNUxd?YyvgUL z>+!2VXP7Fzq zYQ?(9-r*?N*cJCK&)pbYzuv%R{b;TB_wC1V3nO#12V0ucgp);>!N=;G=l;({KZF>) zNAo=0m|3Zu*PNLa-2v=3r5>-hVI_xYdz0m*f-zUW_=eDqiM3j4MPnS~eIRNdw466? z)yxHI@6d7gL2Qj<_@72W{GDyINBy%X6X&_cF1(##v^}87YGZ87HgfH$&epf>Jlia4 zw53K1M6=Px@YCVTUk!%_MjyBeaWy7c40i47-3B{voi|&|7aXza!(OB~E)U;f>5Wd3&@#UP~gkM*qmK=aeZ zkP}gn%JmKK34}KdEu)4E2~qN)EnAhj>)4dbq&RbLu$BD&kJSoIvr$3A#S%P~l$l1A z!96hNdtFXsta!b+enJ@G;6rv-Rd=IQ_llL#tSGk-mpQi(mhop;lObiTQIARXw~&d> zVuCSG$T&zi?#&PT-fP)`*-d@gc;+tOPDaUA*6>RIrf67& zpZ<1ie#4rJ3HEu>v7sF={4;oXv?_MwEI-^o-Lr@rW%%cd0TR2q`p=rkMOKYzOs&^$ z=xW*e)6p-B(0Ek7w8+!@Cks9>$_#zi44MLyL9X?{sDlihX%V;$%a;wd&RL*XGcb$` zvU}#qxz8wAT)*NQ+lXO>AI`^r7B&IQ3J&{cVNn0aWa)(!fQtV+mm~`vsH24+xI|q{ z4ce$OB1hrqGLn;H#=~Rx%T#b|hN`d6SXt=;Jd=DNX3LO9R8xLX@6p3>SnZO7M+96a z1s=zJKd%qy0#GWLeFgc~?fsCw^$6lG;B*54&@n#>q$#nRSr?2GA4YaSSl5~B2k}R_ zfJE-$C~{O_6Rh6BJbWFuoaeXEI!Q-YSA9EvSG_sjB~-*hf_PM~mJ6BL+IcaF)8$+; z*4A4W&+_Mn6~tF|M8Sz57BxO=W9ZJrNPtdhME>$sS6)etinxj{YkK){@Q${`Vc~dX zLT4UYjwuC>dH8AAjQb{Ji>eMvJ5rH-4a(K{4EyLrCDtta)u#>`V_AvyS?Y(;FRT8L ze`JXZP4s~Quq$m=6NI@}`( z`>o3kbSApxcHP;1Mds3&41!_0r619~@AQr9TW*Swk`Q1JNmIk%nKm(ZbZMHEi z4n%vC0MuAKNz2njKLk~w|6u!|y7FN!SXk5=7>^^p-R4w7R;~G!v<{>H3%SC-?>8jAP&ka=owuQ$sKwU4e8EVyc6V2IpBR56HthbwJ*XdwnwrW4 zcR7oGg7kCmj(q{#ka1d85mRVIo0`1v3+B--4RXv$hGb545y#j7bmu0*>BLnTRZ+mp z29%AP8Id+57Q(6`ep^<tq}GO1dvJ*8~jxjiH0quR*Poy%N3@c8rhlO6YR@LBk%l zux{&bK~LvKYq%d;Tzl|VS=?rkBUD-j$YY-xX)z`zUfH^&($ZYco(Xc1tr|9rwx}=- zk`E2Wwkh*HIVsWej-nJ6HNH)7rWDlB0@`{QG*0)&P+~Ng{m^kG#J*^p`drM(`dnd& z9$U+FH=rXh2py-N$l_0)@|JY;X1hVL`@}qxNi@Zy5hI)@(af%=1cl~L3{fxZWys9G-hLv z*%jvhoba^ePB8YL)`%d%=t6Yh*c5p1S7`+BPjOD*#q4~gv#bn0wOaf_K0SiGC{jp8 zAc_Vk31hKTSUiEU7XNk7`D}S-RUrYb<7%)k+tV0zZ7(}vQN@0C5EI<=$$qW}m7f7I zk>dMLd+kSjN4{OaxBJ^_h?FayJ`Yr)3eC$jdk1@jEzVT=a?{BSjp?&?qPX=xO!ttw zN_s#<#Ve(0i_|cRa=MC2=8MonmoT5)UtF&Wr9-b2ng>>zv{8$*UcIBIXSZ3)x727q zy{r>bdOh?E;ZI(^io=P3`o*tLdsjkjM!rGae!v5QH<3-OBW(XcRhvM!(b)Yas?oK? z$5)Y*YS^_d9H-ZP^_iVooK6EE1(akYvmNkXQGH1`kXg()p94|_F8B@_ABt*7QTmYk z47RyNSjX8nMW&@VZIQ`1WB%-*W4oN#|M}EKDCC_@HQ9!BenOQ{0{i#>IaQkyU-HOT z#8ueeQdKezCP`+p0{|o?!axX6WB@{OJTR;qfs(;uKp@Kjq4Dr)^>R9T+^$ohEYKB= zQx_P+t?e3z}3#W ztf10?br2MbSVn%*3!j2QFu;=K)-ueTmgyYq;%9HjJL_W=dV$#21FIjyv}d3@oIy+c z?IcrTw17F6oYGMQA=66yCh`48DJb}^Q?8r3Lei%QJ!qpxnt5`aP%aJL9ltY7#;qzq)qdoGzpYx=gz7Lz$JJZ4?^Nr`!1MK@k z47M)#_%Bezu?xD<{tFcQ{{@OiDQRGst}MJJdOtp%(wvCymmU}NKvIK%z%RysueJ$h zMe(J;-iblcWW>90Ptma{$`%AUZi8_y>pQy*1GpoiiS>`GK9%)TGXC!$FDO5REO0l^ z&lv``tj^Y#F@DP6&qSkCYO-b8O*XVx^8O@0D}Wv-tbz7`pYOlCS4pVmi!~|4dv-5i^8laoUpk zxH@-rdRED~DyWrZO2290e;bISH8z$=kcmp_ct)+edl012<`vnqx}D^FD$twK8)RpVW@yMvk8CRc&d*ku^a#%~2|u>f%{up2Q6x9Mdt&e&@t?_bEXURy{+@>{ zJjDZB-f~7aGc%-QXc7g4fF1tUfP-hsa@qS*#N2_g3675xMqbzyQnC~pK_jH^3k}w%a6jCW!C?MU zo{9eUxt*=#6(neNmoNf#hiRNdGBu|Q(@9s7|H`J*IMWuCEyE4;3IJtKS-n7f+C1=O z89gY4%6N}DeX%EYz8B!^9f5Sf8V2S}yTJ>r+}=RsLXtADv|&$w!dxTz4oSIuz=8S> ze%G>2|5coCh@K)cA(h6O>kRSfAQt>H_fE#}H@p)v`Tw>aulOfNhyS)7=rI4b9Co$DH=Jd$I?iu%Tq!e%aPW7DXN#iTjDG0TqkpLrhBBzR8`k zD7XbvwV1f*5U7kBxrIxHO}NcgSmCK*P*zt<4FpS5V5@~j2g+wGN-WtIbV``U0-3X< z(0T||f@~2Ebo3UuxzrdG=FuH~6+|7!VsYU$0Z;OEL^Mr^S^zSSbYwE3A~U-vOJDyUDUStXfD%K9;#`BD_z>Zb zYj83mc+8KTgEK6`Y;^Q6ku|@W3|m*M55gt8^^WdrxGslExn_2O8$_a0M&&_Be0KPA zDd|?nYAOvUkTJUXZ7l2Ml&#rK04@AJabu&@g=pIr~b;eo^(8BT(?FunH$AF3j*ZiHB%C({8I)tTa3VRkn) z=9uW|9))}J#GUqRh<&w4yL15QpK%2bM)-YYq2tcqZmh#_)@tYAn7$!Z+6(FhAPs2p z^%a8A6xo5O-hgk)a=r7#iC9Sn=%vgrQsl}WCq)N+4q*=_VT+ac3I+*3lJQ&#epf@`!?G!7S(!aZGWqpGk8(*`ig}*V&iyhzH;xtxA$y_N z>)-lw)z%-mcQ3s#`hcb*fp;U`yikM&{Z0^!k1?*j(d(dK9Vw#6o;HRAhEj6!& zxJ$%z@#hubu+iCATwZBgyl$DO;-%^6*lhP|m`wV*S9e%1oP-d7}LFzNb-nbg&b zLeV~*+>vogxCnjjqMaj6y1jn;s7GQLf{ZSY20O#1YGg;yjg-{KM81iL;0{|;LN@@* z6ST#KrKAJTzEMTb{1d?&eNzE47+;ZFtJ8pB_U~EkOk=`-6MB) zTaU^zm3`7P2kZ;D_=u#Q2t;SHzo8P1xqM5!?7^WSE#u5XoolRV{Q}doTaC)1S08Zy7GJ?pd&8Jjw z`*_`ev(<+Ra2R&CQf7cb97~c^x3voFRhQSEV_1pF(I!QUWEkUh<2Uq?3Cz9FxIKeB|n?CuVkX7tAhr<4Ej#%Cq?uB5e^<(Tu{>54T z!(6b8DmhS=>>S)e9h|J%5}ljxfXIRDVa(%*0*xTQ{+ zUjroY*#_U^>b1Teuc$T-egClH97?IE<0#OhF0Y9ByTKPxej00P`|jMJVCqxQ>44F0 z6StS1JT#Ng(}>CWNb0uNM*qkV5JF(s$Hm`S`+O2LRS#bpUMgwU)x`e2u1#H8woa1YGZIsxydK5$JP$cfI67I1 zBE?jjeY6QO_arp9gg1v9k)(iTssRJl7=WdW!5$tkQ-3&w4c|W=|Bh|HOKy{C>%J3@ zZ|8r+H6nd{{iLE~*`b<}mmrmA{8WRDdlJ%rL%W#To}q01jQ%5ZNy@MC_fzCo_!q8x zb46H1v;|CrZ;mdn-6=g>sqK$5H<)H5rH0*n+c!YnE5YQcu{wHPyVztNP`)K`bv3XO ziFeTQst%KJAd9G3SLmUQ|V9fRRc;+ zPd%sGo1p@XsJh&z8?psQ1@NnY|!@p3%Mm9gi!S*yNThSTSi>xCoEGLx%T*dPC_ zK3J4iwp-OZ&1%b#}32cNRbgvhDTdd7->2vcnO3Mt%o zR22P|KlOg^Lw}@|mzlgUh+KF7hZA-R_k=AFARuTl!02E$Fun#45CtF|+z(y&M--)~ zkX(>sZe#6y_I>oP0}9KH=o`);bPVMO1Tg8k$trp`n2F7Ga^3Z^)#GsOamw&Zg{k!R z#))|f#dP=GU6 zM#KYRBI_eOICiiDR%oBa@n|ggpZJs>v7kQ|)(*x)4xxl6;d76Fl^)QGde*sDZnRit zpWm`UgACR9MH}@~KMp!Y^x#))Vw2>dEk%BKQY#ne{MWqyu__rdoOP0@hS7`G*TR#L zKP;$iLuM2_a){&S^B&D>F@2K;u0F-emkql27M7pe;`+bWflrlI6l9i)&m!9 zKWFwavy<&Bo0Kl4Wl3ARX|f3|khWV=npfMjo3u0yW&5B^b|=Zw-JP&I+cv0p1uCG| z3tkm1a=nURe4rq`*qB%GQMYwPaSWuNfK$rL>_?LeS`IYFZsza~WVW>x%gOxnvRx z*+DI|8n1eKAd%MfOd>si)x&xwi?gu4uHlk~b)mR^xaN%tF_YS3`PXTOwZ^2D9%$Urcby(HWpXn)Q`l!( z7~B_`-0v|36B}x;VwyL(+LqL^S(#KO-+*rJ%orw!fW>yhrco2DwP|GaST2(=ha0EE zZ19qo=BQLbbD5T&9aev)`AlY7yEtL0B7+0ZSiPda4nN~5m_3M9g@G++9U}U;kH`MO+ zQay!Ks-p(j%H||tGzyxHJ2i6Z)>qJ43K#WK*pcaSCRz9rhJS8)X|qkVTTAI)+G?-CUhe%3*J+vM3T=l2Gz?`71c#Z>vkG;A zuZ%vF)I?Bave3%9GUt}zq?{3V&`zQGE16cF8xc#K9>L^p+u?0-go3_WdI?oXJm@Ps6m_FK9%;;epp{iCXIh1z3D?~<4AhPkZ^c-4Z}mO zp@Sa4T#L5>h5BGOn|LS(TA@KB1^r67<@Qp!Vz2yF573JoDBug@iPQ=tr2+7*HcE3(5`Q%{A2 zp%psJG}nJ3lQR>^#z-QI>~|DG_2_261`HHDVmM&*2h2e|uG(OXl?228C|G32{9e%Onc=sVwIVZ=g2{K5s0>v2}V&CZi1_2LA=x)v|&YrWGaH zEe3L=lw}aSiEdWu&2-C5U0O~MpQ2Hj-U8)KQrLg0Wd|XyOt&Gc+g8oC4%@84Q6i;~ zUD^(7ILW`xAcSq1{tW_H3V};43Qpy=%}6HgWDX*C(mPbTgZ`b#A1n`J`|P_^ zx}DxFYEfhc*9DOGsB|m6m#OKsf?;{9-fv{=aPG1$)qI2n`vZ(R8tkySy+d9K1lag&7%F>R(e|_M^wtOmO}n{57Qw z_vv`gm^%s{UN#wnolnujDm_G>W|Bf7g-(AmgR@NtZ2eh!Qb2zWnb$~{NW1qO zOTcT2Y7?BIUmW`dIxST86w{i29$%&}BAXT16@Jl@frJ+a&w-axF1}39sPrZJ3aEbt zugKOG^x537N}*?=(nLD0AKlRpFN5+rz4Uc@PUz|z!k0T|Q|Gq?$bX?pHPS7GG|tpo z&U5}*Zofm%3vR!Q0%370n6-F)0oiLg>VhceaHsY}R>WW2OFytn+z*ke3mBmT0^!HS z{?Ov5rHI*)$%ugasY*W+rL!Vtq)mS`qS@{Gu$O)=8mc?!f0)jjE=p@Ik&KJ_`%4rb z1i-IUdQr3{Zqa|IQA0yz#h--?B>gS@PLTLt6F=3=v*e6s_6w`a%Y2=WmZ&nvqvZtioX0@ykkZ- zm~1cDi>knLm|k~oI5N*eLWoQ&$b|xXCok~ue6B1u&ZPh{SE*bray2(AeBLZMQN#*k zfT&{(5Tr1M2FFltdRtjY)3bk;{gPbHOBtiZ9gNYUs+?A3#)#p@AuY)y3dz(8Dk?cL zCoks}DlcP97juU)dKR8D(GN~9{-WS|ImophC>G;}QVazzTZ6^z91{5<+mRYFhrQeg z|Kn=LOySHXZqU8F1`dXWOJ?NViPE%&FB1@$8!ntuI?)geXh|#JJC1+G^n$h4F)g-P z4WJMPQn{p=fQtw0)}uk;u*&O2z+G5?iW_=1kTy(!AJzj}de{a9WHY+*SqJ7`={VTi)3NK|)*W3PUT#5a$D6oyqH%5zjdO$5 zICHx_V;1Z)4A(rT6aasvZ{{r`HnxK7^fMLS1{;H{o<8j5hz*F@WkKQmDI*Q%Kf$Mo!EpQ)=HV^lsj9KSz->ROVIrXAI0!Q?WUosf8t6CR*rl382^sU3q@($L~E zC(AoyIjS&2(el|I$ za*8oAtqGQs+O~huhBCOFw(^b&bol)FWsp15Sra3v%&#wXz*!kSi!sV>mhe(I=_Zxmz&E1>i6=yB*_X4M#ktdNg7_G}MVRGQ z7^zX=+mQ}1xtg7JN9E(QI&?4}=tP2#z2<7N%zf9rxzynL~!MgNpRvXaU69c*^X2(c?$=h&o~Fvv z06*{JdsM!gF$KALcW(}@Q&Alo`@3h!H3j^@5rFMp8l6-q!cb?1iS$oZfU+}A2< z)&2ZoL34kkSnbf=4>qd%guV7zM1p=amds@nhpkK7mRJlb?9zYI&?4ftd8+RvAYdk~CGE?#q!Bv= zbv1U(iVppMjz8~#Q+|Qzg4qLZ`D&RlZDh_GOr@SyE+h)n%I=lThPD;HsPfbNCEF{k zD;(61l99D=ufxyqS5%Vut1xOqGImJeufdwBLvf7pUVhHb`8`+K+G9 z>llAJ&Yz^XE0;ErC#SR#-@%O3X5^A_t2Kyaba-4~$hvC_#EaAd{YEAr)E*E92q=tk zV;;C}>B}0)oT=NEeZjg^LHx}p zic<&Fy$hApNZFROZbBJ@g_Jp>@Gn*Vg{XhVs!-LSmQL#^6Bh-iT+7Dn)vRT+0ti(1 zYyOQu{Vmgyvx3Tuxk5HG!x2a+(#>q7#Xji%f&ZxT@A*$m8~z`DDl?{&1=gKHThhqt zSBmSpx#kQc$Dh6W76k!dHlhS6V2(R4jj!#3(W?oQfEJB+-dxZOV?gj++sK_7-?qEM1^V z=Sxex)M5X+P{^{c^h3!k*jCU>7pYQ}gsEf>>V^n1+ji40tL#-AxLjHx42bchIx9Z< zz`>51CG4Iboc%m0DAfvd3@b}vv4%oRoYZpZ*dW?+yTcduQlxreAz&6V(Tac9Xw3_` zNotT9g&r{F_{!Xb%hDPJqn`CWqDwai4M@7F4CQ?@C{H~rqxXwD(MFpB4!uljQmH~( zTXJJj3MEVHkt7r8!^R;bp!H=&%-OG&ONKIOgLJtng(VD0u9%2LuXKe7h$?9lQ^#cL zOo}gOx^+ixt2Izmb6{J`u0VexU0j}8Is+?LWLGvQ66Pg0ax4n^G+xW-rwp&fIZ0}l zI?y~wn^6o3{jj*VSEQ}tBVn1#sVTQB(l&Gf(sriC0DKR8#{);Sgb5%k`%l#BfM#W| zfN5C8APnl5w%nrNi{BWrDgudYAZLGEQKTzz^rV(Bst!UI7|8?nB_w}@?_pYX_G?9i zgK?yo0}({MC^6DiO!bB88kijN>+BCQ8v!rg{Y zz$`Hf$tB*WdxSPHMMkJ{&p0(l zyXx|^X_VUQBdh9)?_2P1TViiYqy+91$zg%3%OjzWyY=X^f7I)2-34bDVCEhECAi z^YqS9x@(kD(Bto;VDKfgIo z-)s_q)d2mr4O;DTUTgjOe4f51kd6T9`xa6_AUP*N{jz%!Z0E!Dqq}JlfPZ2EyGN*E zoPHJ^rT;z^0vaI03Z(WcdHTh1suHxs?;>yWLj~GlkAQ#jSWq|nUE}m()bBZ1`Rh^o zO`d+Ar$33kry+En{&JjrML}&gUj3pUFE58(t|p~g@k3p&-uvoFzpGktUMnQ6RxDA& zibYl_A!{@9au^_fB@6;1XHLORS}C(Hi&J8=@>Kw66&QJD@w>_I1XJuBW3_vn?f~bb zTv3_J^W1+E?921QNo!MQiLHISD9?+dP0BsAK+yB?l009uXXMOteoGX;?5I|RG_v#B zf~l?TPy3zGkT`N>WlZRa=k7Vdbz-66IQ979fX!i7Wen@lu-oEcweu$76ZXrc&JWRf z!tLRg2JqNG{;`-H@L` zKHfgY-Lve@vsPT7B0@716|Z$Z-Z{!WV;qGHV!`h!S>b)rZpc`9J))^79ey;7@-=zZ zjys+j=U6maKhDddqZ}XQffIbFYn)R657nRGEG#j`M-Gni4deWVXcr=HoNok4SKTPT zIW&LDw*WrceS&Wj^l1|q_VHWu{Pt**e2;MKxqf%Gt#e^JAKy{jQz4T)LUa6XN40EO zCKLskF@9&B?+PnEe(xB+KN|M<@$&ZP{jM;DemSl!tAG2{Iisge|}6`>*BENm!G2E!s_XsaUit2`a&pfn!ggt)wG<~No zFFD~p(1PRvhIRZaPhi})MXmEm6+(X?Aw+GxB}7gAxHKo)H7d=m&r6ljuG2KX{&D9A zNUe9Q=^7yych#S!-Q!YKbbka8)p==Am-8`N5_Qz~j7dxLQeaeCHYTma$)Fy}ORKS4 z5sf%}(j`4U=~Aq(!-|ZRRXvQijeGJ^%cq3itmW;FI)JsU8k4pNmCazDyH9@=bqwS9 zq)y8?KhH}MpVTd^>?u+Cs!&l|6KH<*pikOqr$wK%YZ7(>z%vWLb^+m&cCQ+h_MDo+ zaXmPW7CD|K$-d&cg$&GVPEi#)hPjGYx|SBxatca)&Ig?*6~uiQKE)tF7l+ci4JvbZ>vQo}1mB?m;{w?j6>1xBD9F+2p#Y zP3U>vfnMicQVHdhK1yDCfacJHG?$*GdGs93XO$LkB~?nFAfNOoRY`xRs9JiG7CM&D zd5!=ra;zY~qn6HhG|^&58(rYoNlP4qwA7KN3mvymz;PR0%5d!IoDF1vxVxNS5wG&fEt`JYIGi>i=Fq;YUc>8aXv_wIKNAm zI$xs8oUc$5M((w)<+NMQ6{7X7iz)2tqz$eebh#@<&91|=(KSq0xZX>fTn|!v{~LlTjaOXR{3kxDZfD5rHpl>gbmAU z@|wOa$t%grx`7}nA|ePPsN0Y)k&2=Mc4?uE@gW0-f>S_2bO;VnKt&W3k$KKdvZh@& z*WWKa@7#~`b#Kuyw9kqd zj%CMuQ9ESPc-)MbM#7}YUL)ZP_L{+siDWcU?e8%n3A4VsFYJpNeLjn2bT>CI3NCJ< zwecm{{XNM@ga#75hHnwEW-M&QOfzo9!Zfi7EH$DX3S}9p>0NY#8jZt#!W_KUc?R>k@Ky-w6=+Da+_s0GJldl zF|P?(31@{B7bweeajQGYky;y%9NZK$oyN7RTWNn&2`?k9Jytjwmk||M(3Z!M&NOYw zT}t~sPOp`iw~(CAw<+U2uUl%xEN7WOyk@N3`M9ikM-q9|HZC|6CJ8jAUA zst!H<<<&6(6Zvbpj!BrzUo!>VHN3A3vo$EF5-6b1Q~ajXENB~lhUA@|>x6=N0u#cf zv&w(qgG`^+5=HoNur`2lvR~b&P zjumO|P8X;=d`c+z1YJlY7&H@Dz-Rts$X0IYE9kSIlqGZ7utSx^+ z2hOEC-eXviWZXQ9;$Va+WlHlU%y|f~w(|)o@(5J0o|3MQ2O@+B<@r*H4*65)(r^JT zq+<*b06XMGclsEElst5dEfFJ;AQfYhRt}O0CVKdGh4Tk3-(^-{kukZb*3oM$ZffpG zMs;jtk2ZjAsn%mND4R~OS73JDbj^Q440{oS&4<@VUYMInc0xxy?FE@$J_^n)b|gY+ zOj;8Pk^)6$w9nbnMms3RSr6q(9wP_)v01|=P}UbkXoS_1#FCl?>&9cjCHOS!yEJqiGd`83Nj00{X6dHFN84%)I^*MZ=*Ihw5FxD0YSJHV{j!9v(DT#k7##q~$ z87Dig!k3EiMO;k|9XhYz8cGVPukGe$N5@yNtQgngIs(U-9QZ2c^1uxg$A}#co1|!Z zzB|+=CrR6lxT%N&|8??u1*Z?CRaGbp6;&#}$uQEzu(M6Tdss;dZl=hPN*%ZG@^9f* zig-F9Wi2cjmjWEC+i?dU`nP`xymRwO$9K3IY`|SvRL^9Jg6|TlJNEL9me$rRD1MJ| z>27?VB1%1i)w5-V-5-nCMyMszfCx0@xjILKpFhA4*}fl9HYZ~jTYYU@{12DS2OXo0 z_u+ot_~UfZNaN>@w4Es$Ye>i&qhgqtxJf9xi6El-@UNPeQ>aXcYVxOUA--x3v1 z3e=7+%#m@}QuMTjN3n--=-{@rNtyYdYS@LJ(G?*np*HILbUeo)+l8N#+F-;^(8w>i z8Q6til8Y^NG7_qa*-n2|4}(k<-HF~R0v*cP7bxlTWNJ1s6#Rz!N zCYesAbm(}4qp%-;B%AF-LyS5Q6@Q|V&Y2ar$uWn(?UstqXy;5$ZOCC_?L$F z@o#dk--?Co{)CGEP^73Kb_^>`G8sAN)M@iNKQLBj>QAcHjIw0!1 zl6{UYd;|bA+CcC#3IGYysWLa4!KA}CsEV#c)JpJcF~NX9mrX2WwItXv+s%I2>x#v) zy%5xDSB`&bU!9COR@6LwbI|OQ&5mf&L^GGZnOXEOLshxOs;Y;ikp^M(l-^>J(o0NIdbt5`(fTq>p%?cG z;%aHXhv=-@!20#xf*q)++kt8IJ5cG{ff?Sy9hfzQIroA8N>Git>3xOUNhe8nUspSV z`GL0DK}<_w!3gRCwOvD~m+Zn6jxTMde<_?egr$S1OySh6XsS!0Wh)wJPX+xd11YQ= zMq7X2tU;U;Xx|ObfO}%y{pchi>ryaM2zAy50_$ltt(ew6h#CF@+U74D#H@hdQ=dX_ z=OChf#oerWnu~l=x>~Mog;wwL7Nl^Iw=e}~8;XZ%co+bp)3O z{Mryc`*3ryyIC*S%Zu;8Y_D3bFAn%8NTYv?y_%Q4zR-DvE(Q*~>ec+JSA76q7D#_w zFR&HI@z>V`9-)xr*ME%7~<$Ykd?U8uZ~EqUe&AlGDqP{uUvna zvy#q%0y2VKf%UxO(ZC2ECkuzLyY#6cJTru6Q`qZQQ+VF1`jr8+bHIwcJg}=iko8FE zDt(bW8pbOr>?{5KLASE=YFFv&(&IM|P6@wK(5#jhxh@Pe7u_QKd{x@L_-HM=1`rX8`BDds3pf+|$)DBqpXrDP>JcOxubC$Dy60;8(mfG^6yXE(+N*UWMW? zA~?H-#B7S@URtmlHC|7dnB!Lqc0vjGi`-tNgQ8uO67%USUuhq}WcpRIpksgNqrx{V z>QkbTfi6_2l0TUk5SXdbPt}D^kwXm^fm04 z^i66Xn0`pLmnhX(P0|TezLiFcQ{E0~v*cmmAR2|PETl7Ls>OakCexUmie^yDw3ccuqd5(wV_6?YM+ zegsV{M=^n{F2a}~qL}DfhDok9nC!X$C9WV!U15~DF2xl0YLvS#K!rPqsqS7(b8m## zZA(3F3H0v&0Z>Z^2u=i$A;aa9-FaPq+e!m55QhI)wY9F+db;s$6+CraswhRp8$lEl zK|$~`-A=dB?15xkFT_5GZ{dXqUibh$lsH=z5gEwL{Q2fjNZvnQ-vDf4Uf{9czi8aM zO&Q!$+;Vr_pzYS&Ac<0?Wu}tYi;@J__n)1+zBq-Wa3ZrY|-n%;+_{BHn|APLH8qfZ}ZXXee!oA>_rzc+m4JD1L)i(VEV-##+;VR(`_BX|7?J@w}DMF>dQQU2}9yj%!XlJ+7xu zIfcB_n#gK7M~}5mjK%ZXMBLy#M!UMUrMK^dti7wUK3mA;FyM@9@onhp=9ppXx^0+a z7(K1q4$i{(u8tiYyW$!Bbn6oV5`vTwt6-<~`;D9~Xq{z`b&lCuCZ~6vv9*bR3El1- zFdbLR<^1FowCbdGTI=6 z$L96-7^dOw5%h5Q7W&>&!&;Mn2Q_!R$8q%hXb#KUj|lRF+m8fk1+7xZPmO|he;<1L zsac`b)EJ~7EpH$ntqD?q8u;tBAStwrzt+K>nq0Mc>(;G;#%f-$?9kmw=}g1wDm#OQM0@K7K=BR+dhUV`*uus`*ND&2x<wG1HL5>74*j@^8Jn_YA_uTKbCF<(bN-6P0vID7dbLE1xY%jjOZPtc z2-(JHfiJCYX>+!y8B2Fm({k0cWxASSs+u_ov64=P?sTYo&rYDDXH?fxvxb>b^|M;q z%}uJ?X5}V30@O1vluQ2hQy*NBwd}kGo8BE>42WYjZn#(~NPFpjeuet!0YO{7M+Et4 zK+vY}8zNGM)1X58C@IM67?0@^Gy_2zq62KcgNW)S%~!UX1LIg~{{L&cVH^pxv&RS8 z7h5Dqhv+b?!UT{rMg#O##tHOouVIW{%W|QnHnAUyjkuZ(R@l7FPsbEG&X{YTZxd6? zGc~wOFg0-e2%mI+LeRc9Mi3vb*?iSmEU7hC;l7%nHAo*ucCtc$edXLFXlD(Sys;Aj z`;iBG;@fw21qcpYFGU6D0@j_)KD&L`tcGuKP_k_u+uZ@Sh<3$bA}GmGrYql z`YBOYe}rLeq-7bVTG?6wpk_57A#-P&*=D9tDbG+8N86Ovlm%$~Fhhg1!#<%uJPW4P+L>rOa{&N2gbFd3Fh-nnA8 zlL@IrHd6K33HFYag|7^pP;EZ&_CU5|tx*P)T5w<3xsYB7C+*ZJvZ7o_)pdFg0Mq37s%lo=)Pp+u-bBo85|bFx@z znXN$P1N#N~1jF)^LHc?61qH?2r$7+}^DzU=b4Sh0ILA`+DkZGwe8`w6RaaLOy2{+; z*G-qRoS@LWVrj2g$m_QBE_9ft8J2%>-hNdge!7N;!t-RmW$Sx$dLFwX06)v6%V+3+ zI_SpK&${J_g&{nfAAf~@mBoJzd1aB-d!go}pMC=xBXEb1?t=6Z2khtQWf04f1vH2D zAzR~Tj#erum;iqZ)uy9mW#IE(g6{gBs0m8`Hho^9SLk>6WYl=|`BSI?aM#~0G0T@g zhZQIE7P486_X7pDDlh!Lpxdh5G=KJg4;1hc2-bl zI9c0tmCMY}Qn=5b(4Vqv{|sKKb)cXA9B?~>}U6*`p`RQ9+ELmfJLHahw z(?8R{AQudS8<=zg^lz2qD}8im+_uhWqYUr=fMT#sIo${8zZfe2N&j7)tPfNL^8Z2} z6)v8;x|<$fDzHr5?L0g@AOmYTwm%3~HQmw+c~!W5LEVM>2|z;BF)jd7U&jQ>xPb5h zeEn5a91wogI=6UL`b7g^&v-q5Y#V}Z4=>PWem5wViJ&4Bv3xeU=0-BSSJgLq4+X0GzB+;^$X5GmqzaR*xhkIN?DGhN6_q3Am7=yuN- zb_|MEpaRpI;Cvp9%i(}%s}RtlP5ojEwsLfL7&QhevV-Nsj0eq<1@D5yAlgMl5n&O9 zX|Vqp%RY4oNyRFF7sWu6%!Dt0yWz|+d4`L7CrbsM*o^`YllRPf2_m#~2I3w7AEh+I zzBIIu%uA#2wR>--P{=o&yasGhV$95c?|JRlO>qdUDA33j5IN=@U7M#9+aa>fFb^X45 z?2QBBpdyCETfk(qrO_G9QH{AF(1{Qg6c9(jWVU>`9kPNV#kqZxKsnG@ z%?+|N3y9-DUAf>)sBX#CYB(Ss;o`eS>0TYtk8(ugt>(!)?E#S%6uC82XIZqAYlIHH zMHZAe8xkWHvSk$;54;FuF~4*RSLzf()!C1J`J>iHkKBN2e70b?Xqa3NOvAB(w2*)%usxAitdXR zXsosCjl0P-*iH$V%MrP>2!E3ZHl@yU_+CN1fffNwny;LnWvPf(q;(3vd z)}hwfgz-(OR5H?(nx==K>;(!(<@t9;uhDT<@L}{HO(kEVmC@_oXQ(0S**-;H@pAPM zql=DME;|u{PV`eSkr1cw8-cy+VdH~Tho_^5PQzI5hn0Vy#^@BR|0?|QZJ6^W2bop9*@$1i0N4&+iqmgc&o1yom5?K6W zxbL!%ch!H^B7N{Ew#U$ikDm9zAzzB|J{M9$Mf%ALP$`-!(j_?i*`%M1k~*I7dLkp< z=!h>iQXd~_`k9coWTEF$u+PukkXqb;1zKnw?ZnMCAU$*2j^CZL_F4f6AMEu3*y|O1 zH*on~MrSW(JZQTj(qC~jzsPRd?74SC6t~&Ho{fJ*H*AMvXXx@p@_Al3UkBY^gXE8Bdj+ z^csKuPu+aSU<4<E+ z*bM#6<ud+wQMn*g0ivOoLF2sMG zMX|YA+;yTTVpqi0qIi@1?JkN$!q*sv^Y<6UyZ3E5ufmiwQi z%d*cc_c?mG&n@>~qR-1dx7`0aeM9!S<^Jm^0J+aC`obd`xi4Gp$3(a6bIbj-cuMM7 zii;+o|1H4kBUC4nix*$<2{av@xW8pXsPUVs;6 zJVT3+(1xAt?9Q3@Iqyu)%%8u%egjy8DR6vr^rrerZ%S*Q{Fc6`FJH6}@8{p6nQo%F$e3uUKnOSQ}Q)_}#>H zIS{p_QQ;x^w&N3pj&F1Hkiv+)I9^?SyjnF{bf|wGg%C(Lf+V!)h2xUId=T2E9mcN1L$QF^ z5g2*u_)h#xV5qoL+7?I^OWPS_a6JtT*$mPcAHy(mJmUtoz)Z1zp0^RJebf|pVGWIs zQB0nO8D@fneP+6d6PT}AA2UVLt7UKlb7PprygKtn-5>!^V1XRwIrG!}4+mn=`W zBk<_rS~lAZls_hOj;GnnAs;L$9u zaRbuj_dhXN_<^afP)`ndO!qW}o+exVj;Uj$zv1Tc32vVWmrHP`CoJ`Zxvp@$E4=rv z{Dp%8tK5(97c5fP{T{ZAA#Omvi%lqOVetgT%V6phEDiQ6oM7cL#+QIm<(v8kP)i30 z>q=X}6rk(Ww~ zN);x^iv)>V)F>R%WhPu8Gn7lW${nB1g?2dLWg6t73{<@%o=iq^d`ejx{msu;S`%=Y z2!BRo(WJ^CT4hqAYqXBuA|4G-hEb5yvQw2Bx7zVRpD;RR2ccOu@PhR3faoc zzJIZ5StRhvJT*c`VV6u>2x;0SlCBHsQ7n>YhA$6iQU$Rd`#A*0pf5UAX^2~Qi`Ky%f6RGsoueIc_WKEcM!=sZzkijF|}LFs~GM=v-1aFc3dl?tifz zSiqvXmL+l|5-?ahOL%3?PG<>&D{-(~{sG3$mZG!I^`lqCHWOSn}?5JWosiW?}R7Hz45Z6M; z|I3ZkC#9f+gJwObwvJ7+lKPKs9)HS$N-3eNAWZc~d`TP=sY$X_md=Li)LwW?#|kR6 zy$#RzQ>|l?27Kf`O2bZM(f5 zT<@B@DC9-<3~{+a6@$%* zbtze+^?#(ya}=}LbSblhT0Q6Rm4>3=gi)o*G!B_6$tq*ItV%e0&U6FU!uj0%!h9}S zX6NEZ9}oimg4WPW?76Hk0#QwuQj$)~3QJw+v|eX=>YZgbHMJs34ZXEzFL($9Pw6>L zDO8nGd&N^$GQH4GKq$+GsmsL%*AWQpwp1!JQ-AyUofV|o;~RKj0^!|%nF=P~ai{JL zHLCol`|FQ7a$D7+PR6Mx&`hnhg>;JWrBjTd0T_>aUBJK||PoA}xw zjpy>>3&$74TY?_p_n~D4+YZ_`VA~C};yEAv@pMP)u1z-biGn_klvcL6s zU`UFOa5WKV3&fLwP#~_QGqNI?vZjX9e_Ddmyv`La8Jre}B_kXk=J63Dn>GS%Nl7ty zD3D2o(^4iZ3mZc%E$ibOHj%F0n#U)zib4~{uoPZTL$0P|m2+KIQ#3oub%T7-d~5T@ z=GJh6j|NV-!5BPIEvv`*E?MCW0ZmUuQo58-cw|hMG8wK%_B(RtIFDydO?RP^e__!P zX;g|RlA4P24jtif(}ij>mC-fQG-YluEa|d!vZky=`ljZ$Ff1r&IZhWinz9xVW74RO zYid$XF*J6~9#4m@lhthw1!$|R%I2dC^$n%=%E!^TkD;QWai13pu*d@!Y6y9c-dw2l zpbj-&crkx2s<6ZhH|C13WnOqNe@}d^VDJ{l;le5kl8?)VY1pm@y|@qed$1aQ;y}@) zL?Jvc0$AuFD-SZv*SVC~K`>q0t1Aq34UJs|`lF_(@D?xDV66bu6ClOSK1t`Q>F~QK z56Cm(MI(a3aT7ypQO-6;vTAZ&m6Uwuwr6=LD-tLFL&h0P zIO1GPDmNp0`#UM72-bPfjP(o)4PIiAp{Ai!ThwhM9u`&DL*e7r45@}qS>??T@1^nnVwqpqQ|k{%dq*L zC>flElRbiyesX2Z>T19VbuXQiV{#@+&4oMF+fTiOA{>-6PSIjcOoKFS6iq+l;13qz z9r6xO;T=vS2R}50ccv2#o=Q|h+CAJH)AW%6InA}KX&=!}FH#s5e>yTlWkaW!*oqO6 z8SU{JVB)Hl0v zvZTX1MRnmt>R(Ase@{zh`Mq(VYx=EF{=B@5S3GzLuQCMxe}@eW>)Mz!MD4@r)31AQ z0&md9FQ^oyd75EqanI>gGg*_2aw+Y?TZJByZ%K~Lw>>z6cc`nDyCqzBkH{8`(LOG~ zi!9q#KEQ__ypNCak(H{r@CidzT+zgq{Y+dopW-YvxkPDIf8F?;VQslqQT}{=AzZ6F zxnZyS=YB7*X}^!B6yLBv)PF1Vi?pQN^vOp4KT@~m?Cor>*}GrNCrA8Eop<;|;99Y} zKl%=)R=@D=O1lzz203Idf@c;Io*aod|N(Ldvd&;<#t}{mYn$t?;DCw($YAa`5v;U*>3p2K6PL7 zys(f}dR3lZQ!YEl$O}x4oh@DO@qatRvqM}Vm)_j>J-94ELt=Krd$CtZ8|QKA>}ys5b|I0wKk~(gw@WTg-gz-E z-n{phQ@gf~i|(7xw!Vj%cOG@#m!2tdzIT#XUxY_=#kr=;#50FJdPiKX;<6g%q5bcD(S^wB;}3Jp@7< zZ8SLqRYg^%-#s)lqC8l`qOsgr%x+u3JE@b!)d9qQ{Pr~%n=KFw@&Ec@m*Rq_0JbiJ-FiiY_(H~OychZCO!23^?kxr zsb6t9-n)(!fBU=h#GNC%a*MbEeJ^QR$1+>KO}iv^@kf((?fv)jjy!#k$T;iB`fx9s zvzxcKJl2e6tM1)!{qv34mp6vCtlhS;y6DDUlXXfveK%ZiQ8{u;>;0mt%BNQ^#D=u4 zTW8me!45Xh8a%S}8iHk*; zc34jqTp|rTRNYt_aaJ*KIuAv!@??P}v9jPJZ-M46271&EMPA8~VY0rX2RK?0r?4_G z=%c8Lbe^oZLUeMavnp62{G3T(ETUTH>k3u~IlNU5tQh%hJ`)sE-+Mq6Yk?H9f)CP} zY_Lp}$-xIK5$7WgHUV@9%T1u`HvwI*i(Pa>H^(8RR7~s8;^31S^uMk^xyMjTmQSU{F9Y?c8LA z6*jEkA*0EOD@2*(y1`E9U7;!i9~1$43N=S==mjf!yh29?-XUURV9-M`*{~m^2y+-k vO&Z*)1cp)oP!FoJdnQj@>B$Ny9`3IcWx78NY!UY=EiM6G;6aIVL4^VU&1=uc delta 34727 zcmXV%Ra6`cvxO5Z$lx}3aCi6M?oM!bCpZ&qa2?#;f(LgPoZ#+m!6j&boByo)(og-+ zYgN^*s&7}fEx`25!_*O>gBqKvn~dOCN!``g&ecy%t0`n>G*p;ir0B{<{sUU9M>#WqH4lTN!~PgB@D;`rIdQ#hRw z?T|`wO^O=zovKDMVjuZHAeratT0Q-HK<95;BTTtc%A5Bo>Z{jfiz& z$W5u4#(O_eLYQDY_i&xqzVd#y&cR>MOQU@-w1GN((w{b+PM;=Y3ndBGVv|>|_=ZIC zB^E2+XVovHYl%!I#}4)Pma4)hM2Ly6E;&R5LmOnMf-Qz43>#K*j*LSWoYxxIR5Csm zuHXA8{`YgmqApC|BgY0wGwj-im6rmS^jrAbN8^PEIHj1WH#AVVuUA2HXj&Vm*QD^# zWX8+sR14XM!@6HrfzFpcC$ZXlhjA{{oq5cs&VRBUX2VwX$fdjO~`3n~1})#Bxr5Vh%KwFov=k zW;Jy5qsvC$lw>?*BsoPIo}YgJN>u)C^4Abbjx$NW@n5S8aN_T0BeAXWjz#dQ=3v*# zRQrjH1%R&krxBrfITop};aQdE=ZRgLN%n%+^y5BOs|pO6lg|I3prX{gSgQuRK%177 zlE#t+nHbT~VSO995imTaX&SCB&pgp`Izkg}-NV zI%~Z42T+^_9-gw;yOI&!oZf=H(Cot~)w4^gX&q(zg`7ekm4un&?FuaJQKIrLF$<_% zR;ok9K%L!NlTYgW8?uhX&TS?ojtu~oLm(`7iY<5Ci@V)7+gRHbb!o0OipVh)`vKW) zp9OVLDkaP@Sn!ZRa zpfwY36ct~JlEsS7_Dr%e0UL8^zRSsSv3K)+n$b@Xq9*^-p|AFj(*#}L-%5Z}D@Zl%y2gokn7l;Zr z3CK}pP8BDR1$L~R{R^BwKH~@v9m;O_$00a5MMXTe!u0FG^=2=_f-XZR!DQeQ`5S_$ zO>mOUF8Y-Wfl3P|Mk-VDsBp`X&=kMQl<>nt9$C)^A<4v@xtW>qn@`Z)`|gCedb?$A z^S(N0{?3!oy|^tx0p&<-D62OWo$gVhEodpMi;O#DM7P>i6bnTf$_=~8)PdQ+^h30pu>DfM=LQT20!&5)= zGdR6}f=YHb45NFG9?dd44$Dm~B6k3w1%E%atidmZ`Kaw4q&8yb+5=wqe`pXWH0J%);cCo710p3&(EMuAI{aKjT^Z!u)Eq~b?HpnrSE9ftF4Ibs#HFpuPR zyT$g5JIX12nSw?q!}IY^iHMikUh8V)gjx{JN@8Am6<$2Mz^mHY*_n$LNj)%w6Vs2|Kwpq;J=(VFf`y)>|;A@J@8mL zpw=k%oRd`%OdUL*1^Bd27^<|sYM9NqMxOfyc56FSDcG3u;oJKCAOsBvw)JlyBt5jT zQZ;fkKI1}9MJMtnCEG?ZUph^R-lV{%Av1S91fH#pacM-EI@93$Z)d@UUxu6ruJMHVl=>YjT8reRi0SjW8t!4qJkSw2EWvi_K%!>35@JDfw9#W$~G@9?4ubk&}M9<~>f3`r6~|Hun&D&#w^ zZ2xrK!I3O(3uNXz*JhWWdgESs3jPCOS_W_J;0ggAduavgNUuLi`PfS*0$=1$q$C-# z>ca0l=Pm+p9&+rJQNFKvb%8vn0!qW9SGnIO&tjv!kv980`FquGKanhc(YAwQTGx)(9c1fRnojjxST~<*=y|?=9V1w`t~7Ag$5h)P#FwB7FM=E`e^youj?Nh^d}|GOC7mPW z_H&16WtD5M9H)i@@=Vzo^f`%yIQZ-qGuCko?CP8h^B$X|UkaKazJe>9C00F82u$Iz zFOjPU5)>;*KBg9UezT$OL$aW(Ogut^COwjSO2!@-ZbW#lHVfb_k?7DlEGcbl^tn{p z#+go${sx^TPB3R5272wadT(x2lACj6Y4~LktAm z<+#pEqlksdo%9?Q29%rP9C+LM*WZM-N-e*wX85OOu}J7Zrt%9iGjxN358Fy5GGaNA zlr-b*b{4zqiK)A~_jjEnJhRaVOdID52{6I%oS^X6)EYS(>ZE6NKd-S?F}lIJNYkBz zX=;apb)xyAi#nMFCj#Ex($CGiR?oF|gei))16?8E-mB*}o2=$UtMDZxq+&Q?liP(n z&Ni8pBpgnCai7%!7$wG2n4{^JeW)f-h&_$4648~!d7<~p8apf5f~7e0n$lV_qbrLM zH6T|df(D0@=>WA5f5yN)2BIZFqObOK5I*vhD*2~PZSt*83>fM))aLjXIEokDF;KGw zZ_75?2$lhYW)I_!@r8QpYKr4p27lOeG~ESg#8)LE@pH;oozO*hv19;A7iT#2eow_h z8?gZtDstc~s|f{hFXH|~d~zQ~z_94FB&hp$n~Uv_DB!2y<6&VqZs>-fmUU^yuJGdJ zNCHP?2Q+FZr?J{^_M3`92rOWnrL2vymWZ&0dYxz>Kv&GXWgwxTKz)<+J43r&!q}II z1DmfLl8nu-xGa?TgsrX45d}j{QAC!m8iO1JU=|Pb8D@9FE-V0hJEA?F)srec5$GqD z8(`^KQozt$N;6ts8^+R_uiy|d8MO=#Jvd3z_#2aHXjF94XkEdq3myI_UvT|r>1&LP zU*Mm7Fk}T$qbutLyH`@m{L57Mlkq!hAMe>2-o(8*axogLh^b!!{|amH_{Hrdu!4kWol?jSB%l2>w;Jry$!mf_nbz9_B1#8bWJwL@w!No42F zZ!YAr(^WO;wuxHb`%ZD(qKIOW&)L%j)eAUf-WERo1D?D~FV`np( z5x$@RPj8}2Rbm<>mRjfuPFJ`nN>>ltyp;oE9#K9IU>+pE$;Cq!IYr!NXvc_-MDFXBXW=Z9LZM(k9}OKqEKn5 zMk4%l_POO{UM$2M+YvQV#N~$?Ycqe>LbTz9ur0(-Wp!^8a^GDh7h{U~8h980RG|9E z6RPnEU0ccY1fEIdJfnZ?3Nl4X0Ag>*m6>|oajhbexf9~a8(K`2Ys~o)z{jnuOj93V zg4L4K@x2Dewt5Bok=03M@JIhBSWy2hwxcxRv7ukj`8uYPGrMdH0q!`qHJ^xDQ_bLG ze*?ZCvMv^t`JI7rlqLPEo^WJ0b^>d@C~mI!Zv)-ljBg#u;uvw%ZXMqZsz8Mxdtvbh zbK^eGn90ynsgjzKUOl)O`l3#-uY%L?tj;+Edgz+awV132>9Z-?mj*}u ziM4~P{Pc$s;}v&zYF)Te5J7W2!$o`EH|~F3NfA2NjF&~?@K5S*f_mv2@wT};{Sj`b z%#^~iJN17>qQ6aej~{ubsrhkBAD`C(j7{y)+hU@!^SU03F0Vu6vU3+>!lN@MLR}42 zLOtGS+@f@~=id z8&aK=-2+Pz*y)te)kF3xgyS?qgp@L;G(tM1&#!4p&Z$yX2<+lj>VWT1tiO4`_h^}* zQ@WGd`H9t~sH>+NT2d{O5(~BeYjG#5=s&k0J)iACkpC8u;rFz@_E-w@s0bAs_;b>+ zeR6?5n@}4wjy}GSL@%#%!-~chg|$Q=CE38#Hj0u5P4^Y-V?j(=38#%L#%l4={T(Rq z=x*H|^!EG)+e-leqrbec5?(g)@Op(cHsVg4*>F$Xb=BheCE*5LdSmdwZ-MSJs@@i{5t){y; zxAVyon;`>Rns;YH^`c&M3QdxzNaJl(Byct8a9v38fkXaJ_<=8oe=(6%mZ}CJAQ}2r z#oHZ)q;H0pGydy~@02e)oeVW*rQaD_OLr+)29*|p(gAHd<9*JxBnu0W61lNr+cO_= zX$B`VmPwyz9?FV9j3-@v0D7Z1Z}O;#KZ!@Gm7ZeKORcLQsPN8= zAZRd8VWqow?b1Kp8!AiYk8acC$>6xHuUZWkNk~?EqKsUr2$iixV=zYwM9laPwn)(W z7b-$PlwKh6n5^&Rs$#s&98P1ch#7FGNN6yU!Nwzcesp2Ylw~C1F@G^YA!PF|a$MJ+ z{!r?468ju$sWQLL=o~SYP|CBJ7(3`;c^t;TL4ScL$Pvv>N+5iugRLdmL zaD(CzY&3J+N)7MS)Jw`U8u*IevtEAUKN4~AiL82B$4Bl5oK#No3jGEW-o4`>c%G#8 z!h<$iX*efTk1lnM-d*7Db6h_94Y@IcQg@UJ1-g76_d9@vHWB%F55WG&!4DAy{K)Xv zz~7iiiq(J#G*Jdb2F>RKFnc3y>bIwlQ_Jhzoc4h(EOVm|0C}@X1v`lf-*wuaH5_H)kg%$_&tAkc`-Mk_04t+f0A_7=y20O8`7#X)4WDMOUpG*Z~n ziH5Zevf@*c28LS>z60h(QH92FxJHOKTj&>ep>z##ag+Tm*{QU<#Sk`f3)1y<#hgNV zkGRx3`qggo)?FK!Vd`6U+lA@MVk3QlsjDj#M*^!8JsEqK;p+%l%NyiKg#EX^3GBuk zlh2;u`5~mtZgY!005*{*dmF!OsrxVg*Rpvf{ieqF1ZPV6Mm4vb&^x06M8jn4XO#a* zXJhi$qNRT@M;;!sLq`lbqmcnAsSvSakQ{XcfmP-CU5_ini_P>t3m1P+(5I3tq028F zE8xAnu-M!FQ{&(q8oC{RXMCqw5&ri5tvt$=P|_J!+#m6Iz;U2BaX7}7%E%i{`jgjM^OfP1@K6wN+iSJ-2z7%MfLBS2$+zC|(5j4tu zq@N1d5n}UyXF>Bz{_%qT2O=&{@hkb|g++>5oZPMe%j~Ee^;OCr)Y7u{V4m&Qf@%WD zEUKEu%teX>pmF5DMIP1!>pm1D);32{D-N5>U4W*9kTO|z(Tb#n-@+j!vWj-S8aRy<(xvQm zwZ-#hyB%RQf|G(r&oI7iZhf^pG13lCEWA>mk}rI8IFlm%*!~#7;2xQps>NS2$f@g2 z1EoM!1ML(HjM)=bp>Z>u=jEM5{Ir>yFJ{m8hLv-$1jxB4a{4HNUhk+Rj5-H8}G za~r&Uoh}bQzyC)f6#o3mEkwFNhaD8_~{CW03Dv2Tbl4{ zAFamTS$i&ZYWmae1aCxVNIKrj+u4g3%D96}iqw8~HBu+gFA&*oRP5Z`MikjjDgYjq zkf0&#_Xj->@bJ>!}JGl=t1|~ zGIx9!u63fRtm^?=^0z=^H2SZA43p1deVixbphteFyrqycaRq6DLy2$x4nxgB;-Dug zzoN<>vK7~UxLPDR{wE0ps6mN9MKC>dWM{~@#F)ne0*ExL**#VrA^|@km1xCtF`2N( ze{G#meS3J5(rIs2)mwi>518)j5=wQ+Q`|O{br)MyktYd}-u+5QYQmrBU2ckYE7#Z$ z>MgHjknqi-2`)(Z+pJ?ah4UMg*D%PFgHFMnKg?{GSZZ*f3V+g@129FH@79v%&$&v32_So*G$-3SIp6 zYTlLgF2}s>)U;QtdWf5P&xikI0p1eg2{G!w0+xXNuYf%n#X#fou8}EYvAw$zmrjK&OZkS!$REMr$*aG zyPPjsYd_SXp#Vt9NGI*R;-*4~Gz)&7!zq>hh7)i?8PzCAAv(pNcUGlPNf^OXS$=bx(V#ji2eMF6q{U@ z9?ldp%YEsl;)d%}_Qs81OX>!2>kyChh!-n0Xd@2C1cI2qkRk&b4)(?@KY|?%qMoYb zEi7l}n$O`v+T31;YZF(;FEwj`I8Dz*9fbKrE)8#&?joolVY~3YbZuJwfRt4-kCOM; zcm34HXKH>;a?joGLqjIBG|B??@rS`LSU(l!vxSyfKmGa^x5&S$gvrsrlVT0@Yw#bP z-3#zdbm1;n!DpT@>AnxkZ4llVa;h^fj?R3uN5?-F)SLb}a%TBE=HM5_U*{K=ddu;L7kJ## zqyyGh;WY5rpvMm)$*xZHv!CUlc{zU8huQp`KmQT*yq*ugOu_#Kt-kRa+ODx`Va(;{ zLMO*lsSV`U%+u>-R9GmwqgWulP#>jO9|V60TBE z5ONjntHY2V_MmDJHr3CyuL5X%IlQKbDRch~>EBrwAM? zvOJj&z#NzlWa*K*VEZgjP#cAQ-HRG&mC)aqyjY19GP$U zSKm`d_gXzrLE_^a!9R<~vT9n;>{y3F`!rB%M5psN(yv*%*}F{akxIj9`XBf6jg8a| z^a*Bnpt%;w7P)rXQ8ZkhEt)_RlV=QxL5Ub(IPe9H%T>phrx_UNUT(Tx_Ku09G2}!K($6 zk&bmp@^oUdf8qZpAqrEe`R@M|WEk$lzm$X=&;cRF7^D#Nd;~}a8z$(h7q%A88yb=# zVd1n3r|vPZuhe!9QR*ZtnjELX5i*NoXH%d1E1O1wmebT~HX0F~DbFxk=J^<v|BCiebRdAHYXxOo$YS#BHYecz?S6CX@AcF_k;#_IF+JIV*5|%lV=Y;Ql?=b^ zt}1qN)~qaKnz~KZRf9Aa7U5S&Opz~;SF2ojOSD3HP8WYTbvlEyYK~);#wr+UO8_Sl z$-Yx3B~JYU!uChjzf0v1TKYAtsRkH`QZeF8Q$_`7iPJ79{8V(jbX4T=-LF59vw>au zY6LS|t!~Zz>*ops1&9o5w z3lQx+lhgdg^4d0r-%q!s(A$J%XYhUx~)v|ptx_cU#?44pnz*s$G%3=wh_01 z5l7f$uM;P6oqhM8F|$4h0me5--syUE%vI)HuhLv@kL`s1eP@buw&}80Umf5QOXBlP zAY(8r9}paD1p*&Bir^3<@3Cc4Mr>EpoDHghr{U$hcD8$^OZ6bZS{UYhl_*Otp}Be} z-P^9U7tc!@aodKCp{~TV6o}?M9xG$hN$Kr>|7e~E4mJK>_yjrqF@Kk1;fHw1PP`UI z1Aoa$7yGRMrUVO0M9$rM;=Glzi>SO8!lqon9E_1^0b)CsR0%Nv-$st+be?a*qJkqI zUNaqi*6Y^E>qlHH+*M=aj?)y2r>RGkG?X;Rv!7JG6Uz=^g7B`jEKEvgUq)s3Fw|zFMdak((XwlUaSRN4hGMrH zn2xFaLH!t8txnTiQW;qUWd^m#<3zgCp(=5~i~xw9lU{R~o1qSo#Sh1_4W5(^hL%O9 zOauMH!uGL}u?hV!4V~#?F-<;)X<)4B$u1F4 zf=%}>{b#f`$Ixo^Du_42V6Wir?Muh`(!izQSV9Y3d-MCQT|9bs zIlCtJP7*;A%^1-=u(Laj97hG}uP6Hq0+DzAjB^|$CG(?e_adMTiO&^_9WwrW4H!ju zWEYrjLw<{fSyh-yiPOP{O;c|453fxkp`E;k&)d^wYK=ipbD_kG$u*Ro!kQJOppV5* zP4o#ab%r@RITbag_zHMKF5$z8fJd1L+D8G@m^`*H->XyF$E{x;d;A+T`A zR!1#O!ed)ai|TF054f1+K6 zTDH=fps}vL7=Yl3_R)o948I{CP*`f1v{E~-xX#PaLvb?#qQRElOF-pVuL>d8_�{ zSCu|?z-R)71@L#eM!y^Z6p;ZjzlW@gZzHJC3~O?Pk5QEa0q(aFy!-~pFZ%vBM{a0B zOfAZFmYc{!vg!PSF@l2U zJK`=N@CTmAO4Wuqv6k{SNl?~rs-CcW0VFIdAj^B2Wacs>M@3N&63=c06V6Rf2sR|QLucLaU zKEq5=F9zA=+3ZT|OlY$lIrFmvTV4H!iv+MxhtKJ%j}wlD3qAoT@g^}Cw`#0dsQnXX zETbS9p{IGl{fkz7ld(7^$~HEkkh7pv3NYi8<1qwOw!a|xaQ$TntGU7;01Z4?b9D8N zBh&aOYgatY!f;X<$(oO>v=8iOcEG%aUvS8Uu1du6!YK*G&VLOXlHRCKu=FF(IkNo_ z!128k!z=B?9(@872S5v{*=6WjNH3gAJAUYkC%^7Y;H4r>$kZZC%?&3E-qa#4n-YG$ z{5tlV`bCK=X~Idzr7&v8p)y!whKx;pP;V!X^4&igR1g*2j}8HyVC+>KqbPFthf}+i z5*V2^NBvmwfWIU)3;IBGEwFtYFWVWUoB2RyvL7S*E#d%FT_ytxM895Q4V_PCQh+>< zlu~L{SuQcQ?il+AeFdE87H!P8>HgIJjkGW8@`{o5wNd6uVn=dNX5$aDi14$pTSR=` z!YTmifM=Cy`Z=%xX-u&9>1bJBw3nKr0@mO&YfAp~^V^fzVJyvwMY(hM5 z=T^FaQL~&c{7fIT@FE@vI;GbS=Go0=v=3x<1AaB@b>U z;-hwvu#U||CUj!>9G3YgO6yQX+H)L6*ozXXaV=U_b`_DQWq#`f$?cZ;??y9(AcTLq zHrc9U_$w&NRKgWZ>e};_T#tf-g1TX#Ttj{JjKjCJqlf63U8$=~02ty9Nn3p2WX;CqqYS% zz5QZEArIj!d6Y0VI^JFWKudu=NFUPF=6TxRR|reQB5_2vIn)qBV}S3;MX1}04E3Mt z#5d$zK8z>OW^i7tXPB6e%UCqcK(le)>M}pUp6H17YHZ$`4urRAwERt6^`Bj>zwymc z6H+f|4zhQjlg1Gy%93Sw`uMScxrA;vQE~ta!zM?jz@&c;IxYkrPHXB+h4)S0@SIgF zdm{UTZqxJaxzBR!!`71;K*uco18U~X>AK&Pu-C&`R?B-Aj0=_$cxPzn{MlJK>ywJq zsw-Yj{^>7%vDCYw^iw(od$~o-Pz6ks8aQ}A1JFWnE@Ez_SYh@cOMFVY`?D$Y&Z~a1 zd>zg|c6+o8_xSfEUIvTsdiN&WOe=n|xS;8X;CYLvf)|=u($YtOu_6J z0tW_ukuKXj2f=f}eva;=T4k7`&zTqf{?>lGm&{Fe_;9R2b^^i}Krru0>ta|4^_A$H z7DO?PFho!p4A2C|$W~JYbWN&eW(4R;;Tmhz zkr;EbZ4D?Birca@{afZpp_|p2YAInGJ`1Fkz7A$droV0#{h=lZdX+xO4B%I?B_3ac z=7FCkf`P*_R`SaCnBPG1Jd|Abx!brVL zIt?Rv1@qnIGKpG7W-M54@Oi;BujL}Xdacfmc_9q?u&4#P2hPg`({??ZOOjRFnps_D z-f(IqU)UUW`f&U}`A@568jBEz<~CX~Yv+1et@-+dsV3RVrNTx?H9ht?VAAS0D1{G? zJbr4_B_Tqy_Ag;Xppzr)KXQ9QX}21eoMW|m_{|BBHJ*=OjhvNq(4HgLp`u-X3tw>X z9A?^?H5zIU4r9K*QM+{?cdUL9B5b=rk!&F@Nffz-w_pG9&x+7;!Am0;Llsa02xfYC z*PtggCwO@a;vLXCgarLHOaCqh;)QBGzd)|oeVtn=&wvyz)rOR3B)bLn=ZqpwZHq0G z#6YvZtco3reVEzgsfMR6A16B&XJA|n?MuIu8bp_){SA_{zu;H?8${rR&r^T3v9C(nb5F3yeC zBCfU1>1a`bLUbS{A0x;?CCtvBD58$7u3>y2A_P9vigNVLI2|Lin+b~C-EytjMOHW0NTui}pkxXdFdIJ$-J+Bm$%CN%mac~u zc65u)RMsVt!-|8Ysv6BvqDBlFKElp~B6L!lpd@XpeV9f#ZPtB*A?b!2cQ>(0KpkD3 zcX2g{WebJL!6EmdE>s!+V>?WUff2Qb1G0)SgHlNwmhKjxqoM~UZ>S=G#3}dZqbOgm zLQr$%IH~rG-VibZjQxA+wx_MOF@JC7m(z5WFp@?e-&dnA^W!f5(1q_mx7SHG&7Mjz zJ*FkzBLiO~YXM}_WN$-^LB=)#9j0}Ig(60{oTJ7L{`hY&|LX}pO&lXsa+ZJY)@FOggOhohsSKci~64T#~a*U>?#ib&8;moQD4mX2U+S(Fg|)$9R86W zITbI3PGBmng{xAMx7@wkfPyHgTBnY--U-MN(8g4;hg*?%-H-2y9+fMsROmUruu~DJ zD`y+zHt;&kEmb0pX<5f>5axt7b!mHhGZrk)cPJl8fFV}4Hof{DHc?nmlNe4OZlh%Hw~gDORC9fFH@ z(dp|iOIbEM2+*ogN5G5IIj5N6dcX2{rbl=|y=_lReUu(wdD=vfPY1!pN@X;H)!7M& zsVSTH?G;8EjqWqJgt8F#raa9{%Ig46>|d7k@)*edY9u$q-2MD_g(YtesUb(fF@ zeIca^`q$v%I*l@1*pSA^WwV15>IOc#+Fmv`%pKtg3<1=cn#Ja|#i_eqW9ZRn2w?3Zu_&o>0hrKEWdq=wCF&fL1pI33H z5NrC$5!#iQpC~h3&=-FwKV0nX1y6cWqW7`fBi39 zRr%M}*B_mXH{5;YJwIOwK9T9bU^f*OUt#~R;VnR}qpl2)y`p76Dk90bpUnmP%jt$sr^*lRURZhg{Jc|t% zzJ@`+8sVJPXQ1iJ<*|KHnVaNh6Bw9w7(H5d@A2z)pFDaQHfA+~;ft*Wl5TXgXt$X+ zw>HuHuNiPuH}l);i?tm23b}z`d*)Fc#9aSTR0**x64KPFxH=waD^aF`<3*U+;u(Jl z%Vml|ibUgNPW@Mu(3F&xqqX`Ywa;f)vz@_@ai=KchFb+T#v=)>bVeCp(|;s8%R{-yG(vI#MB|PpTf%;Q_dytxihYgUEEp*4UnBD2i zFzwhlAsbs^rvyOn1@$Y4a#xL*#mfe*-%9pKM;rMxBrQ{x6g=Z)-ac6r2QHFaIB3Cb z)MlIq>|a&HnWt;JF7aNioc_56#kOM7`*3HQOh2zj587o#jVvMmd0^Lq^}+G*kE4L@ zyr1bonUrLt{25*}164@vq#vyAHWXa=#coq+BP`G?NvJ{D6iI(?WK_#=?Sghj z1PAobWSn&T1JN2+aDKWLzLa-vkU}op+rSMu-^54o|YB$BNlXsc4)Pk+N;1Zjv_2G@*gdMul2v zus9!wq9-nM_j*C2j*4}T#EOpQH+mG;>6M45k1Bv!l)vdjfmgsSe9%ze*37SC0>9_L zi$J!Ziite+mT#sPW;8{9EdmpRcM_V2yctTOVr}V45Ya@X%iVpnLr%`<6JxcpQZJW7 z8cdPFktXB1WhRl~Hl4PUPw4E0+n*{!yDCO9mjal(#n-SeE6ATb`3BWpmcOoQtW0YC&i_4DFt9eMt#<$YtDl1dXA!$_EIQN?X#w1#3P}!YVg2_+D)GMjl zY@_EZ_ZKP?D)_w?>J6RZnB*Q7Ruv~$QHEOp7abg-XyAe)|FAORoics58~_N@dE!`8kvn*VMyv=fg8F zE;Y1gK-hU9#R`_&5n`$v&+@j=#2b-LIZsY&v=}NAOjfOB3*&2UItP}{OqgRpGh>_f zh%mJf#U&@U;;T#cyP}$M2?X^}$+%Xb$hdUMG3A`>ty6>%4yuP<(Yi8VcxH+@{t9(T zEf55zdju@GID-2&%(4Va<|Ra3khy_F5iqDnK(rPsYx`73WPueFWRJV)QFt_0MR4ew z^AAwRM+u8@ln#u7JFYkT)O+ zi#|KR&In+^((C^Qz6W~{byGrm-eEQBwWk;Gru$Vq&12PTBnehngdy#zSGdTlw| zntnZVw0Zw8@x6+gX%7C`9GLL`vpHbla6TX+B7XSrfgEy0hYHbGenBTju?E1^# zcPx@a{i?zW3ISa;V@%Kjgr2)Vx3UHv;v0j#v5i!do{bld!wDqWoiXLi;bP20NC_Q1 zWmLa5QI~_)A`d}#*aQ+SfANbQB7Qd!Ncl(>6 zheiX141UI3v(dtiSKg*zR;+|a*Uv_OU@_I@u$Sw%+tp%rqDxg~Va^*|OD%zXAYe6! z!Osuw69pNHQ-?@qEDa7bt^Ga?Xa(5g6(KJGSSDy#r$D2V;~$a?q6O+}b4^#6wsf5E zX_GK0Km%Z@vtZr~zNs08B zzlMH4(M*)#G5 zynvFiw~srA#@cLNhHk`!r@!W}8-+5UBM7C2P^oZ%kc0uzbTp>FHRO=xYa=v)0aQul z9UgNxrY#bF^%AFxsI;{sv#0ekRc8}5bc+e-tghcK-OU0FGl`O!q9lk-bQK3kz*s7? zV*U~Q9=~-fem_OJizGL{$4*=a7|@ZKwLY%#p@2?FP3Q>15nTl#b(ZW{k6q`Nx zOMonpItf;aZ4(|66znCH7E27N)R9I&GsIJ z*ClS8kTkcOvZ{S>Fv|`^GkxEX=rkW1(MQX6IyC;Za75_)p3!=|BF|6pLRsYUq@}YIj4k#cwM<(2dKCeZZpd6cJ$fz6 zXU8ca+ou~;k@S379zHDD8S5)O*BT7~{)Dj3LCoshK9dt=*UEKo$P_!yxozT=ZtBkj zev^`G~ zc4AoF3d|9i#^@>JywzuSvW7krJ{v(4IX&@ZU5})Jy)F_p647?_s=B2@mHHAWI5l=- znNFit0x5-AIV}8zv2z;Y-K9McGGqK{hU0@PjRaEJG*_X4Jo*Ua=DamQ8b7f09*Mazbhhn6LBj%&=C`Zw8uz@XoMbA z%j)N=G34Q-&zQal!IQE=*PWyC%Nzbkc?SQz^J9l> z3}_mkctbvtd6Vvr=Tx5dQ|k=lg-=zHk76OjP=g9IPH_%tWed^LXiY9Cazf??c$snr zz!4}Hl4G4@_xpkYJf2FXoKOO9-6J)oiWYVXuSJAY&Q`aFnV)5L@nU~x9O9VuEbZmm zRJHYpRyw?}bQVa47oYcRa)$0@{Whq+Eszd#|A;H146&zmxR5#?^3=Qdiij=KX-Bvd zk&plq0|^#&B~AjImXrDvvJ40$v(^a!JSp>w3$@6tFc)7&spiek=YVmKkS2(%uo;S; zqBCrWkh+zGsP=MQ_NEL>&43-zSnE7k>kbEB)jJWqRV5}k>J?*Rcn)jx=c`6*MZ~|i z%~^le&(UQK^+n_>?xxUQts<>aPR-TgOJSE6Uvk5ZUkP+>VveCD#mghIG(nOynL#Rs z2$vVgxk2{9-OsO=D`|Z%@x3w)&CjCgeKN0P_V|BE-c%IL`c-nXVk9#S-YNj3*P!-C z^7XvFA|Fc zQxCIu-q?|)UMe%sa3wKx=4brU5@->gWRLT4CltHUIy;}a|KrUJ{a?72odi_$Jtv~g zkQWC&u|Ui#HMR{#IS~nXxMkhhGSf zY@Od4)>#^qTHlZOA6ih(()g<+OnN3wb6{Q^(N3|JFQ>wk@M>uhX) zr)h?8eW=WL#|vUm?PV9~lwWnXh-FzzJ%!x>#?s)dgZwur=+ie)NL%H#f~c%;e2_O? ztRDfj%ldcOwjk(ny5_GYpz}QMZ&YY${hM|O2AyZWre5QzFI62O!>~tkqcDdtBY{-$ zuP(XeSh@3Xk*0o^Wa)qAsTKNxZe}ik_%)PtKt<$f>wWvxMo*99^R)3&;*5cJd|r=q^}Qw~=ZGkr7Dg^@4b4T-b$ zv#R2Xe!$2km%(4C))AfZ26hixuAF}-+f zZwfDSoMo+1_8Bu$7xPtlaoSMSxTLFO1~#1+>uc(Djj`l$TpKz(SF{%R8g%NC7!}{IaPsNc}&S&M`WZu4&tu*tTukwv8*!#C9^# z72CG$WMbR4ZQGgo=6>GqNB3UctM{K?)xCF}Rdo~rsc4{MqGT*X7Wi1f9D7k%cwP1a?U&RIrc`PKXV&fRKgI#_d$X(&SXS1O&!lRovJGQJQVg60S*AF9wDZ zh9=X$yV0h)E%*z&CuydVyRSQ+JH9@TQ=dpevf`7)2Bn*IUCx&ilfbHu<}m{SoElh7 z39m})DpJWpAR!Qp@x3%)%4JbzWB4LPxVLQRSboj0EXO)iCbQ->>+)1T{T~oy%}-k zZPiD;=v1*g?z+0TArLF-QXVcw-NDyEHfrSgjtgkt>ep=3P%Q6WnvrJt z+4RwtdR4Q#RUS7xS~!Qbs=E;lje z53Oy>LXWHQ$2v+95NE2^FeUsgp1y4FyvUw1VadDrg*G_B4otGbMYIlWq>so@%yJ!C zV+>DAk}AXSYO|>TXO$oecP3UZixgcI-#ccF znJq7up8Zjx1AN0)D-mL!udb@{XsbvCrCnAgur+f+WxIfw{$K!o4 zfn|*egR+@Cqfbd)SeHLedNl(erm}_}Clq=82-p7cA`8%vq@&iJlk<}*b;&T@mm@wX z}1cA((mK@yos zPW0ZW@JX#qtMNijTe@pH1gG4`^<{AR@h;s(T} z&3#(~u$Qi#%j!zW{ss#Xsm|DQOrmKNB0cK9N~^$rZJLyDEKoClR=V$R;aujtgT#1b zA`U4#ht`VKoHWuito?@~br1x@B1L^j>cuo=exM!L_g$Gz0SpZ^`C+o-yaA}LPlf0= z^n~1R7J(vVSULvS{$R8709Q#R@ZbWBjZyY(AbHaC(7|(oHtzZ@NbtoHn;_g=+H3fa zy!pe)r}Lf|tftQ|FMWp`rny9HZ;N&8jH3-LHf6@ zM&!|x^O%ZcPJiq#EK4mpID>Rd469b;u>zA+kvrUva9OQIDXPl_*T6IGn29GAYKQ0n zASA;!l#^KpqRw`sb%#}-2}Ud`ZK&<)htt;RIog2CA2(DI+sP*f^;yl%Jzz6%{0}^a#h=NyKLgPR? z+h)#g+PQn_^B*+snviZU(joHWllOKpV9D$p5IwQbsoi6pC_`)m%$bm~s>3~@oHT|MFt~;^&e$k z`!AZ@c$^%MzW3|Jt;kr?yNKC`4g;qphv-mowYqO~qxIDHG&T*1Il;sp@iK|H~; zRY8%8d5`6`s8oac%2s^AFKN^&{3cN##QttYZ`4w%O1kG)vS3r_nko@(3WSWY^hy%k zD_xZkb0hmkTBJdfu$mY-P*DN?TlRxM-eP1OB3FiJK5ogaE%S@t)Zzn*d&`8NQU6AL zC9qU0aDA(=vpOu~8PPvMOGiOGcbw0;i&OIZa_^2(khD z;&117LsI_yz=<&pOSpyG0=nv1z6nB$uqp6DxHM4~*{6ytIT39}>Z<;BowyqFU@THt z9tvb``MojCN=M7LPJs?9k>}02!$N}>-Hdf5sj+7zPsGcEpJ72v5=@DHxVbShM znTCaXY66l$r(TQRo{5JpXcn1GZ4$yFyu=I%t%@xcR3pUKP%~9_4y2j%Q(-)PkDfn} z9I;eUk*#9=IplZ{KjMiWV(J5dk%FI*g!Mq0g2h}Kb^c8wfG~@54Ml|sRB_zCI<@{6 z^>GrT2@cGf?mzHC4F8I^S9r33+|on(dnh|1Z>%)RxVYT~j~E*AoAP*jexWIP76myS zPmxHAcOLo4+KFvX7leBb75ClA;yi&nJL{!SU3@ zWMvA{qx5Pu{sRs@9^q`F3_ray9*Q&n76E5u$F_G0Tl}P{sn+HS)^78+pUqFXayKO{ zi^~-OJkHkEj&_t9g1Y0<`H^--_8B+x!zqT9=#17`5WUA@RUk-mPwZ;c+8RhB+N`=K znJs*ymvdg07$&iKn$G*Mk6>^D1*zhr9ipPUJ%R8Yk{s78rc=2jq zx?!bk{FtF%6OeF@OlMxwiOa{3JZqSunUzIK$Krxk3j28$=JhtBUVAPyC$e(tOs@2&>aIiai+vP@s~9CD!K+B*cxuJH5{ZoroEdkOb07;B!(&?FM&tYiDzMEi^#Kvu)$>mUMf_&sIXt9V z1`|{6PuR}`LE+?M@z!%&B1y|M_RaF73@U??hm`07>sJ^Y!2lLnd(8Vpp>y1ny1lr3 zl!y`Wp!J+)z{ok;P0$-LP(J+_fL&p*f0=;J+-ts3-7_(rS04#pN+)SQz)n%tOxR6_ z@iS9s7}z{TeV+AZUSI^TvB)a<)51kpw?}19ciIMhgxJi+fk$dzsUIxLVQ}Nw6>zz% zYtr38Z538+YKBWeW51rNm{Tpg2qKiX&!^s#!ve?C(NY6ft*#v{M7+r!kFvwni9Vg9 zVE>1ImnPXi@nY&lD&bwEzxTI{dNtF18pL$JC~#UVZdYp;{nAd(+?7ql2-I0p0a3h^ zdE7VU7KJ)trJ-z)KsCRt^QH%e#W!F~rPh@w4+*$@ zK4)>+_gDsG){RQP2XFWefCz@LxK4qr#%x=WmPy&Qi9cIKa_7gh__E4y=^U1@#vNfA=^ut28X2_ieyr<^WqKZ6Z-Or8MH|Ad<`?oNVuOc^D;a300H_ zM@89Pv5h{>T$*iPbD?^mIOFe&5u_Bf2CQ{5|AFdS+Fwi*XSv_QuaOXm*g$E@V6`8E zQRKWE^)Z_$Y0gO|a~q&cE+vcV=jv9uS%8|>#SnVFD4{g@06WNT*HBsw>2!tC0{d{{ z-?m)$6BB^p0Jsu~0e@^&+QoxKB>XGk((rAyZ?!zC_Y&)X*aR~{dd)P4=tBS}&bgS2 z{qy^PL8LkzJ@}LlCE)1?0?Rcsi(8&_kltfWR6M$DM zB@k7TLP~t7P?uK;Ts)*HwZe_wZDjbBZM%!6b?Jhxe7&{7sfsC;9!MX@l+!aDwGefQ z4x^TY#)Apr3tC6_!dw?x(%AL$?5VUr|4VvE0UoX+_onVuhyG zjno6xQ`GYfpa&yn`;1$$&NDY>HXLD&54al2@3A?CO|q4u_Avv9^NpXV^|y@IoDy42y31Z)~eiGpE6 zjFQWawJp?DvP0va!#N^er>_g=QN4?!$QgS^+?fbZUO$e-pB_^&i#<6xi*}@zikhr) zQ3p!O-n4OUat{Ysi^*BT_O2f8jyx#;l8S9XRMCoMZ2A)_ zX({EoS{qBU0kjhm%{)Y@gbA}dPEho2-^nP_{xyxl3R{(C!oi@~ily18z0RaLa0~`Q z-}?ov&mj*bb++L+Cn&la1{QW6ioeY&-ik0^fbt>FeFp7$E%vk?b`~WsQnvbzyglt2 z9`}pj;QLZOF2GfJW`1Ani=s|17tLg$8U+`!R+s>XANYrUg=l>KXV@4VJI=(f0lM4q zc{QF7gEfqt;%le{C3*5Z;l{WC zFSAqZwN$9H)7C|NkiQGy?ue@E(A}7Xg?|NcL2!wKV2fX9dAtshHJ||p-F=%=!ny8q z6#06TOF*fvSQIa|E4OQ!zt_m$j8YEAXLb#*=)p7dhKLDe#O1>ypGw~Mhuiss4SE&o zUCOJU9zDRJ%X0NAEI1iD47H_vlSGZkF~C$89(cGGOkm&MeNlaq=G0Z^LGoC#&+(5; zaLHJmE~eLwe)P>Soonm@y#9COv=j>${%>Y)XCS}#)W(vgsSVQX`2E(M^D$y3#n~@U zgV@DGaFc@HzP4;aOZH2b_Z$V?;5?hCMg* zn!6cCC{y}g^m+AoL?$;eAC=f(GWM_EJYNcPYf@{mDE%^ugN=T0ugCc2Ib$OHbSS~)R(7Omi zjZ9k3U(d1-{M$k<#<4`~+j1kbgN}?&yxq;C&cE~NugdUGNRR`qr}^`}2t-ziw}9Yu zND&z4NgN_teN~?NfvUpDyi>c_B^0D$$U%w_9IM8HxQLYy){J#zv$J|XC2k3T=4g!TR3r2+)_P(#EJsgpZU#ejJ820y9k*w+P@sqnB zl9o~obFSN-5jU6z9D=9cynbWie^HJCnF-Ek_hYH71W5_lcLsNLo|gKJBcNoqk5c#` ze{rg+LtS})^(X{gJxq+Am1Jg{hJ6adCBk8!+}{d>I_;u1kC3In1Oy{5Hv>zNHJZs5 znjAml*}FNZQo=Ul=BGBKuJg#6S6ZrlZyojk7hV6B@O&_H#+`Ni^H}s&=v1+EevijAm=O*FaVtKKpajjc} ztaO=b1DMn~BYxd*1Ljzw4}l3A@`qiyNuq=mV%qB(#Sat#fi05rT^EFLO~bNLgjSc> zSJeJCu>K0517vo(tmJk=ys?J>M|?&{ev!nS5H~cObS#1rSXcN(j8<2c>5`D6w2tf7 zjkvK{8I{la@AP+{l|PZ5ymZ+vIZ)x*a@lgzr?3`tKDAD@YKBNf+PeRun(}CTCE(QK$%Jyv^`vksei?l5pL8gQ{6s0E?fw#I?&W!G9 z+C)pZbxWvq8L3$`GAe}p$97nO+37R48}bxo#dEr&Qg2J#ZMnsBo=g#@IeASh%rv$3 zCyobcB()INWZIHZD`1NqVUEe;JpLx>!$#$~`lfTHjZNvIt*&KmP29<5qHD)>(a~>x zDT_5fVT~3K%Ybc3xNBC1#@T$N^+~ISZ6!Z%293?xQi>N0^`8#KfX@*0`rA@o@8FAT zsB`&GEUOCN_|)~=lHXT#bL%f2XZWAqP55N5u%n`YbLctRQH>0A*QR;vQFGqagnY+W1#k`J)!VJdJRaXokyH%~~(F{OUSN8mX&?MrQyK$stRrJN_8j?Wp zkvR4O{4Z^Vqxx%u2m=IUj^=*~`lcNV5Y9)}4C60QCd=D9OJJjRd!f6-KB(4iLqL0d z06RKXrX;z+KDpkwUBP~_lcJsC)qGnR83P3c9A(LFOs=@F++QC+{gdCcPuUTcIvlZ| z1hzapkd$@yJ+ayMyfQFU1*rdhojeGzLl{LMmVJLfqNj@w~3XBub!DJCFknUoW~z8qjLV2$^@+>HX1 zzkSZ4A3OtiiMH9G)F{x8-`pxn7O@+>p8bL7A}3@y3{7A@M8Vy*CAVFWIF!T1DH%dJu5FlvnwyLF0#cSdT1$M6# zZ18qzTQfAt9;sl^A2aK%_~@pCg>_Qp()DFxmpa6s=1SZ4*=uzdMYCjqo;X(5oMhv{ z(dB(zEBvvp#a1pisvEaXUh>{EKF)%>rO~fl_8B-_Ime(8ne*WlnsG* z=ur;WDhz}R_=p6&Me__0Dnqa)Vm(Gjshb;d)FwR&H(;EMbdzAFeKFCT-Ig4E$-4aK zGi-#-;?EInxP?iXbRq=$>IBkhmhdo$FOD!Kejf)(j0kQ2kZL;=o?Rn5)dp>0x9TTa zCPh;SH*Hd8zFU~s1yV6Aqabc3g)G)YP&0~_iN4(1;c@Mm-(~T@_R?w9F6{(DUIimi zp3cI_mO`0P?HWD-gKBwij}GDE1U1oqsx#4xf_P&!$(ge3=p}rPpg(z7QtSLwVp%wr z)b0###i4ADrG59KZ8H5jrgmQYIGWL*j+|7cc$#s65id0@KZnq(3&wC@I#!RvrVJD` zc}=SdM#lo1wY7qQ?%8r4UAkOF5s^!cBg2nM=0e+U=;dHNa8Rk z6OSdR1P^6%75kui(xcdvAns#PwNEUe)W6QKvx++Gk|I@P=%B{I!M1%mN#BD~Z&~S> z$J6!HZEokW811c=}jB3iJ%ga)vN0pvV7DdI!MQ|gk(^k^%8^T$}3nBR>8|jLy4Kc zE=NuJDc;yGJK4Q)RVO0FMbi#2d?W{tqrvP2@CjY;agYympLu+8SM^1Bm^UyXv=)A) z$BGy?QAf}MC3Q9vaj5ue2ht+%CG->!2?Xo*aAjdD>+D7_N2BVDezDXJyMf0#@!V-l zodn=f$EwhwvPjP_`FNCTC?>YxIjNyQ{JA`OmQ^H@t*Ugyq^(rOx@Jb)%18SEeuX)K#ChVAWHY=G3=!Nw39B8L}Up9V)+ma4^A&pH?m z!ZxP?A|Ow92k*S%zgJf&B;)6NY_3^}60 zB^*Tq4Y^#YePB|#FBZNY8^FhrqL)yz@kIB=2}87#%Sz7pTM@ebhNF*?h-zOlGaGfv zZQ6P7qKX#@;EeeS%nI0kqiA2Vr6}63Y&%v5y0ML^&*z*~kj@ok`vxQmDwUd}iS^e} z-?Z%5Rm&l#PM70=N&Wo!2i0KZ&gRQpo@dtJqbT)p_hI@y$KO)UOh{V+3hcj2VhIFR)|`=Pg4tx(@};;bTtOsuNyB$QXe9pmHv*L z1ben*Fi>HnWoMC*FSQmeJ=SCE7~L=5TdT2brdx>Lpwa+1d|$6We068K6Wxxe&F!baQ|&s7pR zl$NXuC6`oi3J}9TYEA17G5kP5aP5fSaDISnI#xzANK&8QAygL9p|IKcF>Js?yRHxU zXvzf=6iuHcb=PWBZ^DVxxF3fDUpU6wevU*hwgyKVtY3u>XIdUCa0x^aO19CqYHPS9 zu`dYUXsTy$uB%DR^04ViJd4h7l#|9UlYmL0#XJR0%{SPhqaVrB&z{5U&dg+Rrx@9o zO385wN^)BuxZOicKQ)$`=k7N#;9Rnz+VF@5%Y`gGshFy8Hw5qg1W|DShA!yJt9nJq z$TD$(FaiuiWu6WUWb_!WUy*ZE@V4svwd&C@-1t~Z{HSQZ`B<(gJ*A@AOX3QZPVwMQNTn>MiKs)cfbC0;XP9g$wQ(ssw*!|cIBS)~BQVg{XNM;6Q z;Z4vGuyho7&kMD)b8KPy{I)E0CA9=YS*^)sySa<+o{t^_`#Wr&9lM#6YQ7DV>6?p(hnyN`!Gj7pUlUK!ybM`VhCQNEdRJw0Ukd^J@oN^+6;{FFz;7a!3hiE!Py)C;^8Cbt>|>vA@hw*yV9$+*+F}_|C^C{ z^$4FY6yp6QXa@b-Xbg5FDP(X<&GfJpd+IZhw5H3X1pyX`UgqephJAD<7@yKcmyak{ zBe-1l&h}3?t;+`H{Z5<-0A-Ed?nmf4oZn+6q=JKLD0`|9;b#lCP+P-NR`c8`gG}~o za_Wop;jix$On;U>r}s_Z#~q-fxnlbMCTVSaw6-|ETsY)HQi$+ZohweoYG;J!#MmYU zJ-&E}<7=c5?zK`~6X1y;X3s^0gnjdu`^z8PyA=m4zB2}%OVJ>2-(KV1!c_UG5tvz;-b<-P>67PMe-{!%S$+ge-~q#h{~r!iBIm0yR$+-JIM$&8J3`IN$zZby7XCwIYN&KX**xR?3#I`P@$25sP73{J~Fr{&VSx zWjo4(!WZY0!WRLG+&5_hs+36ennIRCGszV{g{c&nVv<_CY*JB76~&P_B3|dIkxj~o zswLyq+@`s3IgBXdfGL(JNd6+zp~TOG2=b5kop^*4-kRP~>$H7FNTn$aAkWn2(`%K@ zrFm>^ze(m-JNeWHOSG8y%D)sDXEXClyF~dn{9#!|`|qY&trq!g^80r!*MCE+{w?so ziMQ>7@&6_Yxnljhy1zm7fOt$qRr3GE8*nPAj(P{1Ed#RkgKMS8Kldx-Y36B97IYsk z|9}y6IW9i}gPJn_ITCs#0(+!0^=F_B17!!Ja0Fejsus9etsKjEH{|gRobo=RabqWx z+E&({i>_*%E@=1X|NH^2N9Z7gBRCL{zZm~NrH23ixJRLXwVMH>*4=hnF@c(Vhz6L? zfp{Y5=prJH88g|6MHz78O^o71L#>V^fpA29VW_j}65@zQ*^j4uK+%Uk_aBf(U@o9> zNJyvCe618gc(S4%qX--Jg9r=UYJd}3g)VM{2sg3JVv3zB=}QO#SbJNpmK#M~YdHii zU{sg3c`hw~d2=^L3ugw$bl$tWmJOz@l-DIhqBt!HD{X}KbwYy==H+zrbaN?|>TEYr z0CKrru|C>d!2)@Ga^_fEG(5+9tE4#&&R_0^_9d@-J|c81x}VBM4}h2AIy2OFiy9l) z2iDN_TbnQHnDsiZ1q<~HtUsOfO(hHZK(R8@n&|X&-gme5v8YW}j;=D)lv_A@`oA1+ zNUKZ`vXjqpP>7Wn$t?Ru;6+8)qSGP}KP5OAm_7UIg5B&VzSzLZ|8a+!1NZ5<@uMGk zC%5@!@%x4*mY3luwenb&Jx8X{=A`6&qZX+C^T;Z}lVq*`rMsN|JN}nXopeTxk#y!Q z1;nHgX~8#Wp%Il5CkUX>H2{TkrZ7rd*OxBTr?aAamEB~ISQMB2*=}#sQIjND1HPa_ z`VzU_VYSd?wZLZglgn%4^}vuEa|9P^noEhB(MO`zY_m{qND#(h`HJd6D$kG_kme5{oszd&i( zEO$uPV&<4Nk5pW9Y~0A>hUeCvz*EBZtGT4R@XC&cP9DRNGq&SM(;Fuyixh&|s@)*| z@R`oGyCdd^huhWJ8piCIg>D{fJaRF-E(BkVkmZr9$R)jZlgrWyD^K@hc1=v&CD8pe z|GW*rcuG~5uTj?g8(^WxCdG#oo4vAFn|A@Rd|ExPvW?j!sPofTRq+M|eN6jwD!arC z+^(8p%`i9gjQ87zSIaT_w`yIkE5IZBJF{Y3?WWGaHoew93sB1j*FTe;A{Yecfk@wu zpS8McksjKqHCMF1dFHK)V52~|0NiRI9G!n8tyZOz2fMkVdBpl=JIpar9_Zchau!WviRC`DxWD%D3h_317BbUl44j1a4&^ zGs$RKV+L}b>ga6jc(uQI1uWd|5+t!4_96Io%_HvJhrg2uY)acmo&SFF&mSd9q|{jTx^fJvbGU$-P~^aGpDRPn#1$1;sIRL24$V+`egtex zE0k}VA5-#zF0nBs%l&y#BhpJ~zUqR^xco=d$&7V*PH zZ=(514Nu-@FP;;Wg?->1LF)jYHi}1_6XDz?5r0lRq0^lXaH8k<3vAvt#)oP8Jqopn zrAsa?bw*t^03OdK3HpRM0`p{7XB=%X>0D6C*+UeG(3y##xz;tUM1{^fo^F%pfTlLd z#?dCv%;ETjo#!e$C)Lv`iA+?t?z5~zU%{cd-;DX>v_MGiYDW9< zxgX|zu<79r0gb4~B!MrWUytBX=pu9m7rpvVIlw0`O1cN41Fb?v&Z6_1mp2eH4{GvQB3CrHZWyrJ;VnXLHO@%E zN}Lo;kSiq2fzh`?=X#gM-#%8;q(d{1S4eY6v`^npV%ZZaTx~x^K8$(CSiZ=xP0G{T zc0(O^50=d&>c_p$N43*lVIrBX3n(=G{Ivvw*be|0`dVQ&l^=&sB&pxb7BL=}$~X|` ztZcSIzQG9LxDz1?LIBcJ3y2zUcP~kNIxR=HnK=Z z$Wk>Vx#^8P+vXHHZAm8UFFR3!#hHtX@Y<}(s$-Omy#$v~zLk0N7ajAJ`o~JX()PFc zWrpRbuu*pK0Y{Qv34&GzdRHoS@k8)D4bmvj40_&)M`F5^D#&F=t-fRWF}}{L+uiU-6_d--48;;BRMD~TQn3cBij`+7B^`ye zsH$AndXoEoe5G+SztfZ>ycU7WwiDI7j(Hy<<)HI8pVpN-D@n?jWThZq|4u{WT}l92 zgM;60dekYz?-Rl2H}NbCJEz1jbe>FP6mCEO|JH z3_(<5pMGGP-K>)xQsP2Z@yxwywe=+~J8hr?y<61l@QJh!w3q+x(#_Sz9{Bx!pLVXL z{iT(lg=r-K!a?=*bUB9|;0w>|#mOz~OgdS&|qCbH}A(#|zMe z6uhN4%e@WH%s+CNx4`g<@yk+@jM2&i3I*YUczoxe{`UFds_i7|K$3OrDWvUK^)PS? z(^0gc@Mr-vEMRId6m`k1!K4hmkN3)Qk5^@QXnC&?+bWtOgAP#?ryk z-yqkXeE_ZvHcB`Ny#azmP1R>8^$}PRZmr+)@s90MQEgqYX4H|wG8~Ib$fDbyeKRg zCr8v{0HDv)uS^-HK1K0?s1#GqxSF3QK#JA|7|!-3K+AsTY$58G27<7Yzi!9C&IH3NshKKtMbEHyh%yHtJl3+Aey;Lh59(yqb??B4IeD zm9F)fMrB^tbIcgRMuM#3d^gvtS4S7aPR#7$h;)>PH|;*1>MMn6A&JiwkKa5Ur9(F% zL1dS_1Db1u`Yo_*JP-F_C^XB9Z1L%C4q+orHgXL8I1Qzx`W4jrt?5EU|8G;!NSzWeNG&Hjli{v-u-D zK|+c?Ehk)<>H{WSI-Kn-rf=uD{+^_AaB*JD!npc%U;;R6;)=QgB=CEuocaaljF4O^ zzh3^FZZYf2_(J=uj?=7+#$yjMqav7#SK`)IPa+SN+=qlo_e!s_>W_|fWSCEG>IbO+ z4~)$s6yV~rwtl@A73o)$Yk~A`&@)zpUu5o!>pQ^bK5JG@s%yBlD8XJoz4WyhRr{-` z?Y1%AV;Q(Y+WnWiWpoZI&hV+9#4!9`FijOI@(C?1UzJ^>n9lL#QAP-l!i{zRSv<6R z-q_H#O;B*_X_3TXT$HKUC@(K30Wj4E%Fq<+eqfFlpWALXdOM@zUE?2&^x{Qy^^Dtt z*Y?F&^c#zfut^`~ypB85(1^?KWviDYa?{pmRuWi<*D~0!==#k1&d;P@9dzR${4gPB zwpXZ4yV+KSPcXZie_65QSFS_9K!xMM7Tp>3_QvsJ%!ks=-y`(=P~s!T>LVL`=9Fn( zwrA;<@ShpH%kZK^?dCHz9;K;XWzc*$k8w!=)r;%MyJB`A{(L~!RKHz5kLw!7l}#vm zfdT(gIdpqd2PW;L{|mA*)jiC@ld6k!y~x7Vq+SD5%{FE28WGgeY&{kY))D6f*D25Q zZIKpb)^m&1>KPLxb=G4OC^kX6rCPowoo~yKCR>iMApU@GvgktHya9$ou^;6|xY1)2 z77Yy*2*QhNRl*Z61(u(lX+Cs`!LhAByn$as6T5%IiG(Yp|Eglf-rG+vBMiH zNSRL~4z>Ds_`*DKHWA$IFyjUaiNWXB=oRPVpNREz~ zJdb0>;6p5v6{Ap$$6i?8IF(M#@^o+V%BY6TpW3(m|8$-~te>WSGA)dn=IQI+0JCc+ z1Y5UG&yN3{fgyr)pIgpUQ2yMG@mf>~r-@em=hB4Fs zPb*keoJx*#qEzubR$|G;*rVNlJ}u6i+w3bM2#6>C|3n4uC`O>oe;pP>cTvtnX++y$ zFws|ab+tA7kWz5b7Keh1RemB!_9(Q5T@M&c7%-2FA?<6G&u6~%6Ya&Z<`zguZ-j1N zUEO57^4w-*X9xj--;nh%YI{#dM+)aj25BoK?+CuStuN0U+pt}!hZAcsK7(+$L-+A| zi75A`YLcPLxgP>|q589cvPj-(Q-~QFwVzNdrq#xNZy(E{6RzPeFY#v$sNQj|a;fsnxzI(QS z{VxM!EhB2fwQ1s@ODoItDdL!WmT2NhHhUwuspBfFUp5T@DIKRY>vG>{lLz)G7BuoJ zwpEerKA-82becp1o*+DJ>_L7^2=fnU_9O77RM<8@$jNktpD?X$roUS71EkVyD%j1m zi;9B(0p=z`tb2#kAf~F~b4j)G>2^Cov%uDKasoo}w8VVriKr*Tw%&Zqj7~!Sy7;1^ zYXoZCSciBN^qHn`ZBGtWsl93LukGbpBV!*@Rb@_{ngsW#*s99n=UBvfoEUa;`FK47AVK3Z(Kk(`VMK%yB0isQfAzy_3+`v+SvC`vx<*mRenZ{rYe)+FRhOGb8<>o1JfoC4lLp|Q8h!ZVWpYp z07yBY#DyLjqm#Ft%nC9?=7gD;Q5ew0z{kR7g;rohjNHvfHj3lzM9_A+B0g#t*@*@9 z{}HX0C=Zbt-1H1+v=)mJxzxka&}Zhp+WrDpM_JLG{nPm;I$-s3wqsAM49srLc&@FG zsSi5S^wPxDXRWkHj_AgJiOi0$SLF4XOF4+)uII;p@9csmNs#=Xu4Mh=zwZ!?83ZP2 zzXTmw?U#$InVqt;gQJO)TX9nQFNFeHunGU#0U(YKcfCc z84#4Am^@i|WI`3q8)xJJ+WL)Ocu)OW2EQ`trvMLoSx7zacwbm6zN#CgSZU@pQ&aCR zzPAo}yMO;2Yk{QA8Ljy|n6|eiR65#dv@I{WPE?jW&`jF2*oHy1oZ>3f(Lw{$22i%J z$ZZ{W>v0DF&zlND9Quc`Ob->B+m;Wh#&kr5&d1KptP&lKZ9ffd_z-{i1>s?(MC!Kc zlN4XC!04kblxYWJQI%0fNorJ=_(cb@oSD@zFgPu`gNv;sJ&Wo;RFc77Cbj}ZF(=}_ zh1nhC;t&HEzIbjDwXMUM;e~)lHeGv;tp?ha{OFqb#^J_IjDbO#@TZH90(P5p*I5hvP54 zxh0t^54jbYv)5d@)6zndct=vo?){V~T9*+g0?@lE_Ss9^nBNUh9nOK$dv>AWhxfFD z6#^xKpSd@D+*JeQIFJmZj}rJa8ls@5H2WI&ZSG5fxHg^_xoapOW%| zOow14uOw#3p6V1%SNXsjPT39#z4-#;Op=pZXA{=Qs?W9GHMIeh)t^7o0(woLngo8H z4+<`;3k_TF3ii8&u70}@15*aHJ6uf>^L}bt?G_vGHDOJ#Bov{K;>*h3QRG}&gQA@e z9uuwy{Gu;!pid-0$Sm*--v8_BhG$5_$izneQaowLRi9<@l0X3jTqMppT7(t&mgqZd zDr(dm2mtDIXaq9!9H6->&ZG}aZPHH0aT{I$=!SpgV87(Dkm)+bc$OZ3T-qn z!OMiD!w1mEJvir zW2aB4yS38ZKex_!?|*;5l|zc^%zwxkMacgz)ng?gr$HrASK=q_C1C*z{EtQAsZzj) zn*sykJ8fjxA4I<3d*+5lhOqoVgp!?FJjzN0Y?J=AZu#rr?qUAAdP^kq z!-%j2#;2oW!dx)?7og3^T15{9j>1Wj-ZG`KT3Kyn$y9=lHG4H9e)>KgFRGv=@ zc=wADdn#VCmndt<5**Fy^goF*{V1TuD`h;j(UT&s-&L=ek|zL~ziK8}$2jZC2=^h57nb&+Xj0;6SK0M{Not zdZz(j4-L_ilW$;OzN@|ih7mQU2i-~jJ|$tSoAseoPDM>*%W1v2)MgWKlT^6ZZHGNF z8c*EwJ6_0X#_|qDK*Y&GQL+Wb5n00*6lHD1u^afa915W- zT?Loj+aB5k@$jc%8FKd!@1QnC~E88_D_bL04aMukP?cxyVom601|3fVoQoI-RZwN7@6Q2ln#~spKR=Ry(6IxzC zF#%G+G2D|id5_3Z6hUrCG9IDR-DvGwThMI#;US{nZ6p)-TOnW1-kx0TTX2w&(1xm(aP0F71hR_K*TMY<5a+Phx^w{W=@t17gH^mSK(im&ZG=( zHY+&j8`#KC*)CXO1mRNQ2prSNvye;Fm5%5KQCx; z+dA2~9tVLR*2#}wl3kX<%G~y*mW&hYC(@b49;C3o^Z~v_7$_x*N|I|v`&i45IX|B1=4vaVd3PpNY;;~A ztC*Q@XS!v7{8;phXUsnbA-TMXmOWsCxte$qib6tBnljH_wrg(qy)J~r(YKJKiI^@L z32i1FU~UBL+>rPfVS4sWYUk4F-yrQH&d^$snQ+bh=Grrl*yp_Y6P_G42ksY7{XDy!@BpD zR7o?eFWUQz?llUyQc1AcFyYNn=wV8H2Y518w=C)>qG}Dt!QVs|`{G*hTt>yKL6|Aws-73L-7Tq6n*O^57tyDvcRy5%UYtiLUv~R9V`;&h>u37{T3v< zEBXKCudNlzz882L^h?Hd@5OHmzJA%W>qTRDqg3I?%i+B{zU6xQGfmPHm>A*ke=Wu%L&yh?jK4PyH&G0^GizJmh0C&7taf*Z*5)C+PrUhW`)J}iYwoBdLQi! zymZKrJCpl-q=9Zvghi#~YAfIYXmtHkldpVts$g2*daUr-xl%9PhOn4}vooBx z>sA*WndWYo;?1g_Qz?|5Q#tKlD@&m0iOKa%0)at}MK@K>9kr5nK3KR%deeuEts7sf z9Dg_AUd*L9mK#SdF{`(~aW#FXyi>J;`E;$gPED!!y#?=?Rxim}-+3Z4@##G+!MZhz z50xuMN%s8Om$^jdSm8%LMah3l>iHvAE_{D<+mdXX^!xL>&-kvnt+rg?s><9=mrW;J z&Qr=2>`l|(aq0Wtdz>+x-?%TZ)a{LWl(}xNs*L|lqZ_YV_D(#0Z&u%0rJSw3cc&kg zTTm!^QnsnpO-XUv+E03`riaII-*pXraqE>~$i|mBB|)aSMoyPc3anhatYF66U$rZK z@Pj%~f{}?Yf+zRPUCBB*p(;Xgvemp~mc!G9W=>u>PmIY$U~=F*naQ;RqLUx26kvti zt^R+WC=uynoD+HdCGWoQ!JlHzW4QPvi zy~J8z4dn~9WW=t+?#W_cFh)`QKm$p!HY@l>rpW?}M47_1;Syepv}BO) z$+1T4#Ch@z3~DGQ#h6Y$uviIrMFm75 z_%L*!57z*(4vNChmOzE>vXH}}85rgOPp3!q)hcU-$qx2Xliyn_gY1-rpH~bFEJqZh zgzZ5py}_#B$KL`~*`cTsa%7ln@8|(`KjI`-1_pf;RUXchA1oD}+`rUR8gbAhx`j5A z?=OvI1)s+^*>RaD(_NscOXVhOdMbiVM;w*|Je&{3bX^~yLfOd=mdVS&4_g5`R2N0j zt5C2L43-axH1|&#=Wr3=B#r3YSm5zuZm+d94eoZBHsE zKUgk1*`f-PT@V9^3=9e=25qVaDwLVLbA`MNVnm36K^{dBLpRu2{@vi5DT5dWK~EIW&pHfkaU4roNf6g>=uCr>T__Rcg`=}3c15@4P_ a%EQ2*fnt2> 'http://www.apache.org/licenses/LICENSE-2.0')]) -licenseFile.set(rootProject.file('licenses/APACHE-LICENSE-2.0.txt')) +licenseFile.set(layout.settingsDirectory.file('licenses/APACHE-LICENSE-2.0.txt').asFile) tasks.withType(LicenseHeadersTask.class).configureEach { approvedLicenses = ['Apache', 'Generated', 'Vendored'] diff --git a/libs/tdigest/build.gradle b/libs/tdigest/build.gradle index b79a6ce0a486a..47fc0dbc239cf 100644 --- a/libs/tdigest/build.gradle +++ b/libs/tdigest/build.gradle @@ -37,7 +37,7 @@ tasks.named('forbiddenApisMain').configure { } ext.projectLicenses.set(['The Apache Software License, Version 2.0': providers.provider(() -> 'http://www.apache.org/licenses/LICENSE-2.0')]) -licenseFile.set(rootProject.file('licenses/APACHE-LICENSE-2.0.txt')) +licenseFile.set(layout.settingsDirectory.file('licenses/APACHE-LICENSE-2.0.txt').asFile) tasks.withType(LicenseHeadersTask.class).configureEach { approvedLicenses = ['Apache', 'Generated', 'Vendored'] diff --git a/modules/aggregations/build.gradle b/modules/aggregations/build.gradle index d65b6e8fd2ddd..70e679c9d786e 100644 --- a/modules/aggregations/build.gradle +++ b/modules/aggregations/build.gradle @@ -26,7 +26,7 @@ restResources { } } -if (buildParams.isSnapshotBuild() == false) { +if (buildParams.snapshotBuild == false) { tasks.named("test").configure { systemProperty 'es.index_mode_feature_flag_registered', 'true' } diff --git a/modules/data-streams/build.gradle b/modules/data-streams/build.gradle index bf961dcf4297c..62c3efdc5ecc5 100644 --- a/modules/data-streams/build.gradle +++ b/modules/data-streams/build.gradle @@ -35,7 +35,7 @@ if (buildParams.inFipsJvm){ tasks.named("yamlRestTest").configure{enabled = false } } -if (buildParams.isSnapshotBuild() == false) { +if (buildParams.snapshotBuild == false) { tasks.withType(Test).configureEach { systemProperty 'es.failure_store_feature_flag_enabled', 'true' } diff --git a/modules/legacy-geo/build.gradle b/modules/legacy-geo/build.gradle index 4ed8d84ab96bc..f26c828897642 100644 --- a/modules/legacy-geo/build.gradle +++ b/modules/legacy-geo/build.gradle @@ -24,7 +24,7 @@ dependencies { testImplementation project(":test:framework") } -if (buildParams.isSnapshotBuild() == false) { +if (buildParams.snapshotBuild == false) { tasks.named("test").configure { systemProperty 'es.index_mode_feature_flag_registered', 'true' } diff --git a/plugins/analysis-icu/build.gradle b/plugins/analysis-icu/build.gradle index 0d576d316f855..c96c86976ac57 100644 --- a/plugins/analysis-icu/build.gradle +++ b/plugins/analysis-icu/build.gradle @@ -26,7 +26,7 @@ dependencies { api "com.ibm.icu:icu4j:${versions.icu4j}" } -if (buildParams.isSnapshotBuild() == false) { +if (buildParams.snapshotBuild == false) { tasks.named("test").configure { systemProperty 'es.index_mode_feature_flag_registered', 'true' } diff --git a/plugins/build.gradle b/plugins/build.gradle index 32fd646ef0be8..ba9d7851d9d9a 100644 --- a/plugins/build.gradle +++ b/plugins/build.gradle @@ -23,11 +23,9 @@ configure(subprojects.findAll { it.parent.path == project.path }) { esplugin { // for local ES plugins, the name of the plugin is the same as the directory name = project.name - - licenseFile = rootProject.file('licenses/AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') - noticeFile = rootProject.file('NOTICE.txt') + licenseFile = layout.settingsDirectory.file('licenses/AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt').asFile + noticeFile = layout.settingsDirectory.file('NOTICE.txt').asFile } - parent.artifacts.add('allPlugins', tasks.named('bundlePlugin')) } diff --git a/plugins/examples/custom-processor/build.gradle b/plugins/examples/custom-processor/build.gradle index 6c0281d899a4e..9358f4d4998ae 100644 --- a/plugins/examples/custom-processor/build.gradle +++ b/plugins/examples/custom-processor/build.gradle @@ -13,8 +13,8 @@ esplugin { name = 'custom-processor' description = 'An example plugin showing how to register a custom ingest processor' classname ='org.elasticsearch.example.customprocessor.ExampleProcessorPlugin' - licenseFile = rootProject.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') - noticeFile = rootProject.file('NOTICE.txt') + licenseFile = layout.settingsDirectory.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt').asFile + noticeFile = layout.settingsDirectory.file('NOTICE.txt').asFile } dependencies { diff --git a/plugins/examples/custom-settings/build.gradle b/plugins/examples/custom-settings/build.gradle index 2774bf6e75c78..556cddc8efae7 100644 --- a/plugins/examples/custom-settings/build.gradle +++ b/plugins/examples/custom-settings/build.gradle @@ -13,8 +13,8 @@ esplugin { name = 'custom-settings' description = 'An example plugin showing how to register custom settings' classname ='org.elasticsearch.example.customsettings.ExampleCustomSettingsPlugin' - licenseFile = rootProject.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') - noticeFile = rootProject.file('NOTICE.txt') + licenseFile = layout.settingsDirectory.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt').asFile + noticeFile = layout.settingsDirectory.file('NOTICE.txt').asFile } testClusters.configureEach { diff --git a/plugins/examples/custom-significance-heuristic/build.gradle b/plugins/examples/custom-significance-heuristic/build.gradle index f2f0cefa6d6f5..766bfa5014a56 100644 --- a/plugins/examples/custom-significance-heuristic/build.gradle +++ b/plugins/examples/custom-significance-heuristic/build.gradle @@ -13,8 +13,8 @@ esplugin { name = 'custom-significance-heuristic' description = 'An example plugin showing how to write and register a custom significance heuristic' classname ='org.elasticsearch.example.customsigheuristic.CustomSignificanceHeuristicPlugin' - licenseFile = rootProject.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') - noticeFile = rootProject.file('NOTICE.txt') + licenseFile = layout.settingsDirectory.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt').asFile + noticeFile = layout.settingsDirectory.file('NOTICE.txt').asFile } dependencies { diff --git a/plugins/examples/custom-suggester/build.gradle b/plugins/examples/custom-suggester/build.gradle index a1cf345f5e819..bc5877f79a3d9 100644 --- a/plugins/examples/custom-suggester/build.gradle +++ b/plugins/examples/custom-suggester/build.gradle @@ -13,8 +13,8 @@ esplugin { name = 'custom-suggester' description = 'An example plugin showing how to write and register a custom suggester' classname ='org.elasticsearch.example.customsuggester.CustomSuggesterPlugin' - licenseFile = rootProject.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') - noticeFile = rootProject.file('NOTICE.txt') + licenseFile = layout.settingsDirectory.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt').asFile + noticeFile = layout.settingsDirectory.file('NOTICE.txt').asFile } testClusters.configureEach { diff --git a/plugins/examples/gradle/wrapper/gradle-wrapper.properties b/plugins/examples/gradle/wrapper/gradle-wrapper.properties index b8cea9f02a5bf..2a6e21b2ba89a 100644 --- a/plugins/examples/gradle/wrapper/gradle-wrapper.properties +++ b/plugins/examples/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=296742a352f0b20ec14b143fb684965ad66086c7810b7b255dee216670716175 -distributionUrl=https\://services.gradle.org/distributions/gradle-8.12.1-all.zip +distributionSha256Sum=fba8464465835e74f7270bbf43d6d8a8d7709ab0a43ce1aa3323f73e9aa0c612 +distributionUrl=https\://services.gradle.org/distributions/gradle-8.13-all.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/plugins/examples/painless-whitelist/build.gradle b/plugins/examples/painless-whitelist/build.gradle index f501bd466ebe5..2b0df5c710a58 100644 --- a/plugins/examples/painless-whitelist/build.gradle +++ b/plugins/examples/painless-whitelist/build.gradle @@ -14,8 +14,8 @@ esplugin { description = 'An example whitelisting additional classes and methods in painless' classname ='org.elasticsearch.example.painlesswhitelist.MyWhitelistPlugin' extendedPlugins = ['lang-painless'] - licenseFile = rootProject.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') - noticeFile = rootProject.file('NOTICE.txt') + licenseFile = layout.settingsDirectory.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt').asFile + noticeFile = layout.settingsDirectory.file('NOTICE.txt').asFile } dependencies { diff --git a/plugins/examples/rescore/build.gradle b/plugins/examples/rescore/build.gradle index 023033349dd8c..4b39befb1dfdf 100644 --- a/plugins/examples/rescore/build.gradle +++ b/plugins/examples/rescore/build.gradle @@ -13,8 +13,8 @@ esplugin { name = 'example-rescore' description = 'An example plugin implementing rescore and verifying that plugins *can* implement rescore' classname ='org.elasticsearch.example.rescore.ExampleRescorePlugin' - licenseFile = rootProject.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') - noticeFile = rootProject.file('NOTICE.txt') + licenseFile = layout.settingsDirectory.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt').asFile + noticeFile = layout.settingsDirectory.file('NOTICE.txt').asFile } dependencies { diff --git a/plugins/examples/rest-handler/build.gradle b/plugins/examples/rest-handler/build.gradle index 43590b166a545..872edf00617f5 100644 --- a/plugins/examples/rest-handler/build.gradle +++ b/plugins/examples/rest-handler/build.gradle @@ -13,8 +13,8 @@ esplugin { name = 'rest-handler' description = 'An example plugin showing how to register a REST handler' classname ='org.elasticsearch.example.resthandler.ExampleRestHandlerPlugin' - licenseFile = rootProject.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') - noticeFile = rootProject.file('NOTICE.txt') + licenseFile = layout.settingsDirectory.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt').asFile + noticeFile = layout.settingsDirectory.file('NOTICE.txt').asFile } dependencies { diff --git a/plugins/examples/script-expert-scoring/build.gradle b/plugins/examples/script-expert-scoring/build.gradle index 0fb1baaea2f03..159129872176b 100644 --- a/plugins/examples/script-expert-scoring/build.gradle +++ b/plugins/examples/script-expert-scoring/build.gradle @@ -13,8 +13,8 @@ esplugin { name = 'script-expert-scoring' description = 'An example script engine to use low level Lucene internals for expert scoring' classname ='org.elasticsearch.example.expertscript.ExpertScriptPlugin' - licenseFile = rootProject.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') - noticeFile = rootProject.file('NOTICE.txt') + licenseFile = layout.settingsDirectory.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt').asFile + noticeFile = layout.settingsDirectory.file('NOTICE.txt').asFile } dependencies { diff --git a/plugins/examples/security-authorization-engine/build.gradle b/plugins/examples/security-authorization-engine/build.gradle index faf32774a20ac..ea147cb730c5e 100644 --- a/plugins/examples/security-authorization-engine/build.gradle +++ b/plugins/examples/security-authorization-engine/build.gradle @@ -6,8 +6,8 @@ esplugin { description = 'An example spi extension plugin for security that implements an Authorization Engine' classname ='org.elasticsearch.example.AuthorizationEnginePlugin' extendedPlugins = ['x-pack-security'] - licenseFile = rootProject.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') - noticeFile = rootProject.file('NOTICE.txt') + licenseFile = layout.settingsDirectory.file('AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt').asFile + noticeFile = layout.settingsDirectory.file('NOTICE.txt').asFile } dependencies { diff --git a/qa/lucene-index-compatibility/build.gradle b/qa/lucene-index-compatibility/build.gradle index 3b2e69ec9859f..50ecc66f1cc53 100644 --- a/qa/lucene-index-compatibility/build.gradle +++ b/qa/lucene-index-compatibility/build.gradle @@ -16,7 +16,7 @@ buildParams.bwcVersions.withLatestReadOnlyIndexCompatible { bwcVersion -> usesBwcDistribution(bwcVersion) // Tests rely on unreleased code in 8.18 branch - enabled = buildParams.isSnapshotBuild() + enabled = buildParams.snapshotBuild } } diff --git a/qa/system-indices/build.gradle b/qa/system-indices/build.gradle index c619d4f02e527..cbd7da37b49f3 100644 --- a/qa/system-indices/build.gradle +++ b/qa/system-indices/build.gradle @@ -14,8 +14,8 @@ esplugin { name = 'system-indices-qa' description = 'Plugin for performing QA of system indices' classname ='org.elasticsearch.system.indices.SystemIndicesQA' - licenseFile = rootProject.file('licenses/AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') - noticeFile = rootProject.file('NOTICE.txt') + licenseFile = layout.settingsDirectory.file('licenses/AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt').asFile + noticeFile = layout.settingsDirectory.file('NOTICE.txt').asFile } testClusters.configureEach { diff --git a/qa/verify-version-constants/build.gradle b/qa/verify-version-constants/build.gradle index 67fc962e087cb..c5f8fcabbda2a 100644 --- a/qa/verify-version-constants/build.gradle +++ b/qa/verify-version-constants/build.gradle @@ -38,7 +38,7 @@ buildParams.bwcVersions.withIndexCompatible { bwcVersion, baseName -> tasks.register("verifyDocsLuceneVersion") { doFirst { - File docsVersionsFile = rootProject.file('docs/Versions.asciidoc') + File docsVersionsFile = layout.settingsDirectory.file('docs/Versions.asciidoc').asFile List versionLines = docsVersionsFile.readLines('UTF-8') String docsLuceneVersion = null for (String line : versionLines) { diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 1f5de727fbc0c..0d598016facff 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -14,7 +14,7 @@ restResources { // REST API specifications are published under the Apache 2.0 License ext.projectLicenses.set(['The Apache Software License, Version 2.0': providers.provider(() -> 'http://www.apache.org/licenses/LICENSE-2.0')]) -licenseFile.set(rootProject.file('licenses/APACHE-LICENSE-2.0.txt')) +licenseFile.set(layout.settingsDirectory.file('licenses/APACHE-LICENSE-2.0.txt').asFile) configurations { // configuration to make use by external yaml rest test plugin in our examples diff --git a/server/build.gradle b/server/build.gradle index 4b952a60b8b5e..8bed775e4efbe 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -131,7 +131,7 @@ def generatePluginsList = tasks.register("generatePluginsList") { sourceSets.main.output.dir(generatedResourcesDir) sourceSets.main.compiledBy(generateModulesList, generatePluginsList) -if (buildParams.isSnapshotBuild() == false) { +if (buildParams.snapshotBuild == false) { tasks.named("test").configure { systemProperty 'es.index_mode_feature_flag_registered', 'true' systemProperty 'es.failure_store_feature_flag_enabled', 'true' diff --git a/test/external-modules/apm-integration/build.gradle b/test/external-modules/apm-integration/build.gradle index 4a5d6d9d6d914..63e149490a9a4 100644 --- a/test/external-modules/apm-integration/build.gradle +++ b/test/external-modules/apm-integration/build.gradle @@ -25,7 +25,7 @@ tasks.named("test").configure { } tasks.named('javaRestTest').configure { - it.onlyIf("snapshot build") { buildParams.isSnapshotBuild() } + it.onlyIf("snapshot build") { buildParams.snapshotBuild } } dependencies { diff --git a/test/external-modules/build.gradle b/test/external-modules/build.gradle index 47b909dbd708f..dfdc47d9f5beb 100644 --- a/test/external-modules/build.gradle +++ b/test/external-modules/build.gradle @@ -12,7 +12,7 @@ subprojects { esplugin { name = it.name - licenseFile = rootProject.file('licenses/AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt') - noticeFile = rootProject.file('NOTICE.txt') + licenseFile = layout.settingsDirectory.file('licenses/AGPL-3.0+SSPL-1.0+ELASTIC-LICENSE-2.0.txt').asFile + noticeFile = layout.settingsDirectory.file('NOTICE.txt').asFile } } diff --git a/test/external-modules/delayed-aggs/build.gradle b/test/external-modules/delayed-aggs/build.gradle index 5d1291c8ee503..09fe28a459047 100644 --- a/test/external-modules/delayed-aggs/build.gradle +++ b/test/external-modules/delayed-aggs/build.gradle @@ -10,7 +10,7 @@ apply plugin: 'elasticsearch.legacy-yaml-rest-test' tasks.named('yamlRestTest').configure { - it.onlyIf("snapshot build") { buildParams.isSnapshotBuild() } + it.onlyIf("snapshot build") { buildParams.snapshotBuild } } esplugin { diff --git a/test/external-modules/die-with-dignity/build.gradle b/test/external-modules/die-with-dignity/build.gradle index a6622997bf5db..dbe3c1579b706 100644 --- a/test/external-modules/die-with-dignity/build.gradle +++ b/test/external-modules/die-with-dignity/build.gradle @@ -25,5 +25,5 @@ tasks.named("test").configure { } tasks.named('javaRestTest').configure { - it.onlyIf("snapshot build") { buildParams.isSnapshotBuild() } + it.onlyIf("snapshot build") { buildParams.snapshotBuild } } diff --git a/test/external-modules/error-query/build.gradle b/test/external-modules/error-query/build.gradle index 8ac47c339f21d..3131e9bde004a 100644 --- a/test/external-modules/error-query/build.gradle +++ b/test/external-modules/error-query/build.gradle @@ -10,7 +10,7 @@ apply plugin: 'elasticsearch.legacy-yaml-rest-test' tasks.named('yamlRestTest').configure { - it.onlyIf("snapshot build") { buildParams.isSnapshotBuild() } + it.onlyIf("snapshot build") { buildParams.snapshotBuild } } esplugin { diff --git a/test/external-modules/esql-heap-attack/build.gradle b/test/external-modules/esql-heap-attack/build.gradle index fa8c43048a6d6..e760f1bd067c2 100644 --- a/test/external-modules/esql-heap-attack/build.gradle +++ b/test/external-modules/esql-heap-attack/build.gradle @@ -20,5 +20,5 @@ esplugin { tasks.named('javaRestTest') { usesDefaultDistribution() - it.onlyIf("snapshot build") { buildParams.isSnapshotBuild() } + it.onlyIf("snapshot build") { buildParams.snapshotBuild } } diff --git a/test/external-modules/jvm-crash/build.gradle b/test/external-modules/jvm-crash/build.gradle index bc1a96836889b..cf67fdc786e26 100644 --- a/test/external-modules/jvm-crash/build.gradle +++ b/test/external-modules/jvm-crash/build.gradle @@ -20,5 +20,5 @@ esplugin { tasks.named('javaRestTest') { usesDefaultDistribution() - it.onlyIf("snapshot build") { buildParams.isSnapshotBuild() } + it.onlyIf("snapshot build") { buildParams.snapshotBuild } } diff --git a/x-pack/build.gradle b/x-pack/build.gradle index da21ffc829d03..b7f38b61a61a0 100644 --- a/x-pack/build.gradle +++ b/x-pack/build.gradle @@ -26,7 +26,7 @@ subprojects { ext.xpackModule = { String moduleName -> ":x-pack:plugin:${moduleName}" } plugins.withType(PluginBuildPlugin).whenPluginAdded { - project.esplugin.licenseFile = rootProject.file('licenses/ELASTIC-LICENSE-2.0.txt') + project.esplugin.licenseFile = layout.settingsDirectory.file('licenses/ELASTIC-LICENSE-2.0.txt').asFile project.esplugin.noticeFile = xpackRootProject.file('NOTICE.txt') } @@ -40,7 +40,7 @@ subprojects { } project.pluginManager.withPlugin("elasticsearch.build") { - project.ext.licenseFile.set(rootProject.file('licenses/ELASTIC-LICENSE-2.0.txt')) + project.ext.licenseFile.set(layout.settingsDirectory.file('licenses/ELASTIC-LICENSE-2.0.txt').asFile) project.ext.noticeFile.set(xpackRootProject.file('NOTICE.txt')) } } diff --git a/x-pack/plugin/analytics/build.gradle b/x-pack/plugin/analytics/build.gradle index 7aaaaaf668643..b69e9f0ec3ec2 100644 --- a/x-pack/plugin/analytics/build.gradle +++ b/x-pack/plugin/analytics/build.gradle @@ -31,7 +31,7 @@ dependencies { testImplementation(testArtifact(project(xpackModule('core')))) } -if (buildParams.isSnapshotBuild() == false) { +if (buildParams.snapshotBuild == false) { tasks.named("test").configure { systemProperty 'es.index_mode_feature_flag_registered', 'true' } diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index 784eed9bf0fa6..18a9286a8e3cf 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -41,7 +41,7 @@ artifacts { def restTestBlacklist = [] // TODO: fix this rest test to not depend on a hardcoded port! restTestBlacklist.addAll(['getting_started/10_monitor_cluster_health/*']) -if (buildParams.isSnapshotBuild() == false) { +if (buildParams.snapshotBuild == false) { // these tests attempt to install basic/internal licenses signed against the dev/public.key // Since there is no infrastructure in place (anytime soon) to generate licenses using the production // private key, these tests are blacklisted in non-snapshot test runs diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index df830eb9462b6..f9c408ba302ea 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -99,12 +99,12 @@ tasks.named("processResources").configure { String licenseKey = providers.systemProperty("license.key").getOrNull() if (licenseKey != null) { println "Using provided license key from ${licenseKey}" - } else if (buildParams.isSnapshotBuild()) { + } else if (buildParams.snapshotBuild) { licenseKey = Paths.get(project.projectDir.path, 'snapshot.key') } else { throw new IllegalArgumentException('Property license.key must be set for release build') } - File licenseKeyFile = rootProject.file(licenseKey) + File licenseKeyFile = layout.settingsDirectory.file(licenseKey).asFile if (licenseKeyFile.exists() == false) { throw new IllegalArgumentException('license.key at specified path [' + licenseKey + '] does not exist') } @@ -161,7 +161,7 @@ testClusters.configureEach { systemProperty 'es.queryable_built_in_roles_enabled', 'false' } -if (buildParams.isSnapshotBuild() == false) { +if (buildParams.snapshotBuild == false) { tasks.withType(Test).configureEach { systemProperty 'es.failure_store_feature_flag_enabled', 'true' } diff --git a/x-pack/plugin/eql/build.gradle b/x-pack/plugin/eql/build.gradle index 72ab7dabf0fcb..dc6607d2f60a6 100644 --- a/x-pack/plugin/eql/build.gradle +++ b/x-pack/plugin/eql/build.gradle @@ -38,7 +38,7 @@ dependencies { * Enable QA/rest integration tests for snapshot builds only * * TODO: Enable for all builds upon this feature release * ****************************************************************/ -if (buildParams.isSnapshotBuild()) { +if (buildParams.snapshotBuild) { addQaCheckDependencies(project) } diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index 99e19a98dd031..3337484c5c838 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -83,7 +83,7 @@ interface Injected { } tasks.named("test").configure { - if (buildParams.isCi() == false) { + if (buildParams.ci == false) { systemProperty 'generateDocs', true def injected = project.objects.newInstance(Injected) doFirst { @@ -153,7 +153,7 @@ tasks.named("test").configure { * Enable QA/rest integration tests for snapshot builds only * * TODO: Enable for all builds upon this feature release * ****************************************************************/ -if (buildParams.isSnapshotBuild()) { +if (buildParams.snapshotBuild) { addQaCheckDependencies(project) } diff --git a/x-pack/plugin/mapper-constant-keyword/build.gradle b/x-pack/plugin/mapper-constant-keyword/build.gradle index 1650016949661..3304dfec82da2 100644 --- a/x-pack/plugin/mapper-constant-keyword/build.gradle +++ b/x-pack/plugin/mapper-constant-keyword/build.gradle @@ -23,7 +23,7 @@ dependencies { compileOnly project(path: xpackModule('core')) } -if (buildParams.isSnapshotBuild() == false) { +if (buildParams.getSnapshotBuild() == false) { tasks.named("test").configure { systemProperty 'es.index_mode_feature_flag_registered', 'true' } diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index 8373d9c6582e3..db87c27d30d21 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -99,7 +99,7 @@ dependencies { } def mlCppVersion(){ - return (project.gradle.parent != null && buildParams.isSnapshotBuild() == false) ? + return (project.gradle.parent != null && buildParams.snapshotBuild == false) ? (project.version + "-SNAPSHOT") : project.version; } diff --git a/x-pack/plugin/spatial/build.gradle b/x-pack/plugin/spatial/build.gradle index 49af925c0a422..e009f5e6be0ff 100644 --- a/x-pack/plugin/spatial/build.gradle +++ b/x-pack/plugin/spatial/build.gradle @@ -33,7 +33,7 @@ testClusters.configureEach { setting 'xpack.security.enabled', 'false' } -if (buildParams.isSnapshotBuild() == false) { +if (buildParams.snapshotBuild == false) { tasks.named("test").configure { systemProperty 'es.index_mode_feature_flag_registered', 'true' } diff --git a/x-pack/qa/core-rest-tests-with-security/build.gradle b/x-pack/qa/core-rest-tests-with-security/build.gradle index f85d6ba961e0b..46d480889d244 100644 --- a/x-pack/qa/core-rest-tests-with-security/build.gradle +++ b/x-pack/qa/core-rest-tests-with-security/build.gradle @@ -33,7 +33,7 @@ tasks.named("yamlRestTest").configure { 'index/10_with_id/Index with ID', 'indices.get_alias/10_basic/Get alias against closed indices', ]; - if (buildParams.isSnapshotBuild() == false) { + if (buildParams.snapshotBuild == false) { blacklist += [ 'synonyms_privileges/10_synonyms_with_privileges/*', 'synonyms_privileges/20_synonyms_no_privileges/*' diff --git a/x-pack/qa/multi-project/core-rest-tests-with-multiple-projects/build.gradle b/x-pack/qa/multi-project/core-rest-tests-with-multiple-projects/build.gradle index a18b34bf4a129..40b03c0359814 100644 --- a/x-pack/qa/multi-project/core-rest-tests-with-multiple-projects/build.gradle +++ b/x-pack/qa/multi-project/core-rest-tests-with-multiple-projects/build.gradle @@ -85,7 +85,7 @@ tasks.named("yamlRestTest").configure { '^reindex/90_remote/*', '^reindex/95_parent_join/Reindex from remote*' ]; - if (buildParams.isSnapshotBuild() == false) { + if (buildParams.snapshotBuild == false) { blacklist += []; } systemProperty 'tests.rest.blacklist', blacklist.join(',') diff --git a/x-pack/qa/multi-project/xpack-rest-tests-with-multiple-projects/build.gradle b/x-pack/qa/multi-project/xpack-rest-tests-with-multiple-projects/build.gradle index d37523a3130e3..457a50f9d6b33 100644 --- a/x-pack/qa/multi-project/xpack-rest-tests-with-multiple-projects/build.gradle +++ b/x-pack/qa/multi-project/xpack-rest-tests-with-multiple-projects/build.gradle @@ -164,7 +164,7 @@ tasks.named("yamlRestTest").configure { '^transform/transforms_upgrade/*', '^voting_only_node/10_basic/*' ]; - if (buildParams.isSnapshotBuild() == false) { + if (buildParams.snapshotBuild == false) { blacklist += []; } systemProperty 'tests.rest.blacklist', blacklist.join(',') From 10e23b31b4017157a2475bba33c3f01bab0ec60a Mon Sep 17 00:00:00 2001 From: Niels Bauman <33722607+nielsbauman@users.noreply.github.com> Date: Wed, 5 Mar 2025 08:30:45 +0100 Subject: [PATCH 20/54] Explicitly pass project ID in simulate pipeline request (#124033) Instead of implicitly relying on the `IngestService` to resolve the project ID, the simulate pipeline request should do this and pass the project ID explicitly. --- .../ingest/SimulatePipelineRequest.java | 4 ++- .../SimulatePipelineTransportAction.java | 8 +++++- .../SimulatePipelineRequestParsingTests.java | 26 ++++++++++++++++--- 3 files changed, 33 insertions(+), 5 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java index 02027b1f633d2..f7415b438dfae 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.cluster.metadata.ProjectId; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.io.stream.StreamInput; @@ -130,6 +131,7 @@ record Parsed(Pipeline pipeline, List documents, boolean verbose static final String SIMULATED_PIPELINE_ID = "_simulate_pipeline"; static Parsed parseWithPipelineId( + ProjectId projectId, String pipelineId, Map config, boolean verbose, @@ -139,7 +141,7 @@ static Parsed parseWithPipelineId( if (pipelineId == null) { throw new IllegalArgumentException("param [pipeline] is null"); } - Pipeline pipeline = ingestService.getPipeline(pipelineId); + Pipeline pipeline = ingestService.getPipeline(projectId, pipelineId); if (pipeline == null) { throw new IllegalArgumentException("pipeline [" + pipelineId + "] does not exist"); } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java index 6b2b96fb76402..6d2f2d8044388 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java @@ -17,6 +17,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.project.ProjectResolver; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.util.concurrent.EsExecutors; @@ -49,6 +50,7 @@ public class SimulatePipelineTransportAction extends HandledTransportAction random = ThreadLocal.withInitial(Randomness::get); @@ -58,7 +60,8 @@ public SimulatePipelineTransportAction( ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, - IngestService ingestService + IngestService ingestService, + ProjectResolver projectResolver ) { super( SimulatePipelineAction.NAME, @@ -70,6 +73,7 @@ public SimulatePipelineTransportAction( this.ingestService = ingestService; this.executionService = new SimulateExecutionService(threadPool); this.transportService = transportService; + this.projectResolver = projectResolver; this.ingestNodeTransportActionTimeout = INGEST_NODE_TRANSPORT_ACTION_TIMEOUT.get(ingestService.getClusterService().getSettings()); ingestService.getClusterService() .getClusterSettings() @@ -96,9 +100,11 @@ protected void doExecute(Task task, SimulatePipelineRequest request, ActionListe } try { if (discoveryNodes.getLocalNode().isIngestNode()) { + final var projectId = projectResolver.getProjectId(); final SimulatePipelineRequest.Parsed simulateRequest; if (request.getId() != null) { simulateRequest = SimulatePipelineRequest.parseWithPipelineId( + projectId, request.getId(), source, request.isVerbose(), diff --git a/server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestParsingTests.java b/server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestParsingTests.java index 391c258b6f098..575c3e87dfcd7 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestParsingTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestParsingTests.java @@ -44,6 +44,8 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -61,7 +63,7 @@ public void init() throws IOException { (factories, tag, description, config) -> processor ); ingestService = mock(IngestService.class); - when(ingestService.getPipeline(SIMULATED_PIPELINE_ID)).thenReturn(pipeline); + when(ingestService.getPipeline(any(), eq(SIMULATED_PIPELINE_ID))).thenReturn(pipeline); when(ingestService.getProcessorFactories()).thenReturn(registry); } @@ -89,7 +91,9 @@ public void testParseUsingPipelineStore() throws Exception { expectedDocs.add(expectedDoc); } + var projectId = randomProjectIdOrDefault(); SimulatePipelineRequest.Parsed actualRequest = SimulatePipelineRequest.parseWithPipelineId( + projectId, SIMULATED_PIPELINE_ID, requestContent, false, @@ -213,24 +217,40 @@ public void testParseWithProvidedPipeline() throws Exception { } public void testNullPipelineId() { + var projectId = randomProjectIdOrDefault(); Map requestContent = new HashMap<>(); List> docs = new ArrayList<>(); requestContent.put(Fields.DOCS, docs); Exception e = expectThrows( IllegalArgumentException.class, - () -> SimulatePipelineRequest.parseWithPipelineId(null, requestContent, false, ingestService, RestApiVersion.current()) + () -> SimulatePipelineRequest.parseWithPipelineId( + projectId, + null, + requestContent, + false, + ingestService, + RestApiVersion.current() + ) ); assertThat(e.getMessage(), equalTo("param [pipeline] is null")); } public void testNonExistentPipelineId() { + var projectId = randomProjectIdOrDefault(); String pipelineId = randomAlphaOfLengthBetween(1, 10); Map requestContent = new HashMap<>(); List> docs = new ArrayList<>(); requestContent.put(Fields.DOCS, docs); Exception e = expectThrows( IllegalArgumentException.class, - () -> SimulatePipelineRequest.parseWithPipelineId(pipelineId, requestContent, false, ingestService, RestApiVersion.current()) + () -> SimulatePipelineRequest.parseWithPipelineId( + projectId, + pipelineId, + requestContent, + false, + ingestService, + RestApiVersion.current() + ) ); assertThat(e.getMessage(), equalTo("pipeline [" + pipelineId + "] does not exist")); } From 1bf6a7741fade22bb07d509d9da56d7bb19e89ee Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Wed, 5 Mar 2025 18:47:24 +1100 Subject: [PATCH 21/54] Use singleton instance for default project-id (#123677) No need to create new instances for the default project-id. We can use the singleton field which should speeds up key comparsion for Map.get operations. Relates: #123662 --- .../cluster/block/ClusterBlocks.java | 2 +- .../cluster/metadata/Metadata.java | 4 +- .../cluster/metadata/ProjectId.java | 46 +++++++++++-- .../cluster/metadata/ProjectMetadata.java | 2 +- .../project/AbstractProjectResolver.java | 4 +- .../cluster/routing/GlobalRoutingTable.java | 2 +- .../AbstractAllocateAllocationCommand.java | 2 +- .../command/CancelAllocationCommand.java | 2 +- .../command/MoveAllocationCommand.java | 2 +- .../TransportClusterHealthActionTests.java | 10 +-- .../TransportClusterStateActionTests.java | 6 +- .../TransportCreateIndexActionTests.java | 2 +- .../action/bulk/TransportBulkActionTests.java | 2 +- .../DataStreamsActionUtilTests.java | 3 +- .../action/support/ActiveShardCountTests.java | 3 +- .../BroadcastReplicationTests.java | 2 +- .../TransportMultiTermVectorsActionTests.java | 2 +- .../cluster/ClusterChangedEventTests.java | 9 ++- .../cluster/ClusterStateTests.java | 12 ++-- .../health/ClusterStateHealthTests.java | 20 +++--- .../IndexNameExpressionResolverTests.java | 64 +++++++++---------- .../MetadataCreateIndexServiceTests.java | 18 +++--- .../cluster/metadata/MetadataTests.java | 30 ++++----- .../cluster/metadata/ProjectIdTests.java | 16 ++--- .../metadata/ProjectMetadataTests.java | 2 +- .../cluster/project/ProjectResolverTests.java | 12 ++-- .../routing/GlobalRoutingTableTests.java | 13 ++-- .../cluster/routing/RoutingTableTests.java | 3 +- .../allocation/AllocationServiceTests.java | 12 ++-- .../allocation/IndexMetadataUpdaterTests.java | 6 +- .../ResizeAllocationDeciderTests.java | 6 +- ...ResizeSourceIndexSettingsUpdaterTests.java | 14 ++-- .../routing/allocation/RoutingNodesTests.java | 2 +- .../BalancedShardsAllocatorTests.java | 2 +- .../DesiredBalanceReconcilerTests.java | 4 +- .../allocator/OrderedShardsIteratorTests.java | 5 +- .../decider/DiskThresholdDeciderTests.java | 4 +- .../decider/FilterAllocationDeciderTests.java | 5 +- ...eOnlyWhenActiveAllocationDeciderTests.java | 3 +- .../structure/RoutingIteratorTests.java | 2 +- .../PersistedClusterStateServiceTests.java | 4 +- .../TimestampFieldMapperServiceTests.java | 3 +- .../ingest/IngestServiceTests.java | 28 ++++---- .../security/SecurityRolesMultiProjectIT.java | 12 ++-- .../action/DeleteProjectAction.java | 2 +- .../multiproject/action/PutProjectAction.java | 4 +- .../action/RestDeleteProjectAction.java | 2 +- .../action/RestPutProjectAction.java | 2 +- .../action/DeleteProjectActionTests.java | 6 +- .../cluster/project/TestProjectResolvers.java | 2 +- .../org/elasticsearch/test/ESTestCase.java | 4 +- .../project/TestProjectResolversTests.java | 14 ++-- ...PrimaryFollowerAllocationDeciderTests.java | 4 +- .../AsyncTaskMaintenanceServiceTests.java | 8 +-- .../bwc/ArchiveAllocationDeciderTests.java | 5 +- .../action/GetJobsActionRequestTests.java | 13 ++-- .../SearchableSnapshotAllocatorTests.java | 2 +- .../authz/AuthorizationServiceTests.java | 4 +- .../authz/IndicesAndAliasesResolverTests.java | 2 +- 59 files changed, 254 insertions(+), 227 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java b/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java index 85298e495fd05..bbcd816410a45 100644 --- a/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java +++ b/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java @@ -438,7 +438,7 @@ private static void writeBlockSet(Set blocks, StreamOutput out) th public static ClusterBlocks readFrom(StreamInput in) throws IOException { if (in.getTransportVersion().onOrAfter(TransportVersions.MULTI_PROJECT)) { final Set global = readBlockSet(in); - final Map projectBlocksMap = in.readImmutableMap(ProjectId::new, ProjectBlocks::readFrom); + final Map projectBlocksMap = in.readImmutableMap(ProjectId::readFrom, ProjectBlocks::readFrom); if (global.isEmpty() && noProjectOrDefaultProjectOnly(projectBlocksMap) && projectBlocksMap.getOrDefault(Metadata.DEFAULT_PROJECT_ID, ProjectBlocks.EMPTY).indices().isEmpty()) { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java index ab56621e748ae..55c0c29247b5f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java @@ -88,7 +88,7 @@ public class Metadata implements Diffable, ChunkedToXContent { public static final String UNKNOWN_CLUSTER_UUID = "_na_"; // TODO multi-project: verify that usages are really expected to work on the default project only, // and that they are not a stop-gap solution to make the tests pass - public static final ProjectId DEFAULT_PROJECT_ID = new ProjectId("default"); + public static final ProjectId DEFAULT_PROJECT_ID = ProjectId.DEFAULT; public enum XContentContext { /* Custom metadata should be returned as part of API call */ @@ -1154,7 +1154,7 @@ public static Metadata readFrom(StreamInput in) throws IOException { builder.put(ReservedStateMetadata.readFrom(in)); } - builder.projectMetadata(in.readMap(ProjectId::new, ProjectMetadata::readFrom)); + builder.projectMetadata(in.readMap(ProjectId::readFrom, ProjectMetadata::readFrom)); } return builder.build(); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ProjectId.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ProjectId.java index 71b7ef1557c57..2363f9efa2b36 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ProjectId.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ProjectId.java @@ -19,17 +19,39 @@ import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; +import java.util.Objects; -public record ProjectId(String id) implements Writeable, ToXContent { +public class ProjectId implements Writeable, ToXContent { - public static final Reader READER = ProjectId::new; + private static final String DEFAULT_STRING = "default"; + public static final ProjectId DEFAULT = new ProjectId(DEFAULT_STRING); + public static final Reader READER = ProjectId::readFrom; private static final int MAX_LENGTH = 128; - public ProjectId { + private final String id; + + private ProjectId(String id) { if (Strings.isNullOrBlank(id)) { throw new IllegalArgumentException("project-id cannot be empty"); } - assert isValidFormatId(id) : "project-id [" + id + "] must be alphanumeric ASCII with up to " + MAX_LENGTH + " chars"; + if (isValidFormatId(id) == false) { + final var message = "project-id [" + id + "] must be alphanumeric ASCII with up to " + MAX_LENGTH + " chars"; + assert false : message; + throw new IllegalArgumentException(message); + } + this.id = id; + } + + public String id() { + return id; + } + + public static ProjectId fromId(String id) { + if (DEFAULT_STRING.equals(id)) { + return DEFAULT; + } else { + return new ProjectId(id); + } } static boolean isValidFormatId(String id) { @@ -53,8 +75,8 @@ private static boolean isValidIdChar(char c) { return (c >= '0' && c <= '9') || (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z') || c == '_' || c == '-'; } - public ProjectId(StreamInput in) throws IOException { - this(in.readString()); + public static ProjectId readFrom(StreamInput in) throws IOException { + return fromId(in.readString()); } @Override @@ -79,4 +101,16 @@ public static ProjectId ofNullable(@Nullable String id, @Nullable ProjectId fall public String toString() { return this.id; } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) return false; + ProjectId projectId = (ProjectId) o; + return Objects.equals(id, projectId.id); + } + + @Override + public int hashCode() { + return Objects.hashCode(id); + } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ProjectMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ProjectMetadata.java index 148409a9f3b3d..fd7472ebc2b0d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ProjectMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ProjectMetadata.java @@ -2136,7 +2136,7 @@ public Iterator toXContentChunked(ToXContent.Params p) { } public static ProjectMetadata readFrom(StreamInput in) throws IOException { - ProjectId id = new ProjectId(in); + ProjectId id = ProjectId.readFrom(in); Builder builder = builder(id); Function mappingLookup; Map mappingMetadataMap = in.readMapValues(MappingMetadata::new, MappingMetadata::getSha256); diff --git a/server/src/main/java/org/elasticsearch/cluster/project/AbstractProjectResolver.java b/server/src/main/java/org/elasticsearch/cluster/project/AbstractProjectResolver.java index 8d13c529379de..250a2ac04ea96 100644 --- a/server/src/main/java/org/elasticsearch/cluster/project/AbstractProjectResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/project/AbstractProjectResolver.java @@ -50,7 +50,7 @@ public ProjectId getProjectId() { if (headerValue == null) { return getFallbackProjectId(); } - return new ProjectId(headerValue); + return ProjectId.fromId(headerValue); } @Override @@ -89,7 +89,7 @@ public boolean supportsMultipleProjects() { } protected static ProjectMetadata findProject(Metadata metadata, String headerValue) { - var project = metadata.projects().get(new ProjectId(headerValue)); + var project = metadata.projects().get(ProjectId.fromId(headerValue)); if (project == null) { throw new IllegalArgumentException("Could not find project with id [" + headerValue + "]"); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/GlobalRoutingTable.java b/server/src/main/java/org/elasticsearch/cluster/routing/GlobalRoutingTable.java index e7b707b8993c3..c6899b34cf577 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/GlobalRoutingTable.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/GlobalRoutingTable.java @@ -179,7 +179,7 @@ public static Diff readDiffFrom(StreamInput in) throws IOExc } public static GlobalRoutingTable readFrom(StreamInput in) throws IOException { - final var table = in.readImmutableOpenMap(ProjectId::new, RoutingTable::readFrom); + final var table = in.readImmutableOpenMap(ProjectId::readFrom, RoutingTable::readFrom); return new GlobalRoutingTable(table); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AbstractAllocateAllocationCommand.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AbstractAllocateAllocationCommand.java index 762f14dfc2d36..7abcef56af5f0 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AbstractAllocateAllocationCommand.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AbstractAllocateAllocationCommand.java @@ -109,7 +109,7 @@ protected AbstractAllocateAllocationCommand(StreamInput in) throws IOException { shardId = in.readVInt(); node = in.readString(); if (in.getTransportVersion().onOrAfter(TransportVersions.MULTI_PROJECT)) { - projectId = new ProjectId(in); + projectId = ProjectId.readFrom(in); } else { projectId = Metadata.DEFAULT_PROJECT_ID; } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java index 281e01e486bc4..f8d569e83330f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/CancelAllocationCommand.java @@ -84,7 +84,7 @@ public CancelAllocationCommand(StreamInput in) throws IOException { node = in.readString(); allowPrimary = in.readBoolean(); if (in.getTransportVersion().onOrAfter(TransportVersions.MULTI_PROJECT)) { - projectId = new ProjectId(in); + projectId = ProjectId.readFrom(in); } else { projectId = Metadata.DEFAULT_PROJECT_ID; } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java index 0745ef22a1f06..a9efc705a9685 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/MoveAllocationCommand.java @@ -69,7 +69,7 @@ public MoveAllocationCommand(StreamInput in) throws IOException { fromNode = in.readString(); toNode = in.readString(); if (in.getTransportVersion().onOrAfter(TransportVersions.MULTI_PROJECT)) { - projectId = new ProjectId(in); + projectId = ProjectId.readFrom(in); } else { projectId = Metadata.DEFAULT_PROJECT_ID; } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthActionTests.java index 0907ba663f465..bc06ed2356469 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthActionTests.java @@ -41,21 +41,21 @@ public void testWaitForInitializingShards() throws Exception { final String[] indices = { "test" }; final ClusterHealthRequest request = new ClusterHealthRequest(TEST_REQUEST_TIMEOUT); request.waitForNoInitializingShards(true); - var projectId = new ProjectId(randomUUID()); + var projectId = randomUniqueProjectId(); ClusterState clusterState = randomClusterStateWithInitializingShards("test", 0, projectId); var project = clusterState.metadata().getProject(projectId); ClusterHealthResponse response = createResponse(indices, clusterState, project); assertThat(TransportClusterHealthAction.prepareResponse(request, response, project, null), equalTo(1)); request.waitForNoInitializingShards(true); - projectId = new ProjectId(randomUUID()); + projectId = randomUniqueProjectId(); clusterState = randomClusterStateWithInitializingShards("test", between(1, 10), projectId); project = clusterState.metadata().getProject(projectId); response = createResponse(indices, clusterState, project); assertThat(TransportClusterHealthAction.prepareResponse(request, response, project, null), equalTo(0)); request.waitForNoInitializingShards(false); - projectId = new ProjectId(randomUUID()); + projectId = randomUniqueProjectId(); clusterState = randomClusterStateWithInitializingShards("test", randomInt(20), projectId); project = clusterState.metadata().getProject(projectId); response = createResponse(indices, clusterState, project); @@ -67,7 +67,7 @@ public void testWaitForAllShards() { final ClusterHealthRequest request = new ClusterHealthRequest(TEST_REQUEST_TIMEOUT); request.waitForActiveShards(ActiveShardCount.ALL); - var projectId = new ProjectId(randomUUID()); + var projectId = randomUniqueProjectId(); ClusterState clusterState = randomClusterStateWithInitializingShards("test", 1, projectId); var project = clusterState.metadata().getProject(projectId); ClusterHealthResponse response = createResponse(indices, clusterState, project); @@ -125,7 +125,7 @@ ClusterState randomClusterStateWithInitializingShards(String index, final int in } var projects = randomMap(0, 5, () -> { - var id = new ProjectId(randomUUID()); + var id = randomUniqueProjectId(); return Tuple.tuple(id, ProjectMetadata.builder(id).build()); }); return ClusterState.builder(ClusterName.DEFAULT) diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateActionTests.java index d21809cdf4630..29bdaf9b6b7c5 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateActionTests.java @@ -107,7 +107,7 @@ public void testGetClusterStateWithDefaultProjectOnly() throws Exception { public void testGetClusterStateForOneProjectOfMany() throws Exception { final Set indexNames = randomSet(1, 8, () -> randomAlphaOfLengthBetween(4, 12)); - final ProjectId projectId = new ProjectId(randomUUID()); + final ProjectId projectId = randomUniqueProjectId(); final ProjectResolver projectResolver = TestProjectResolvers.singleProject(projectId); final ClusterStateRequest request = buildRandomRequest(indexNames); @@ -117,7 +117,7 @@ public void testGetClusterStateForOneProjectOfMany() throws Exception { final ProjectMetadata.Builder[] projects = new ProjectMetadata.Builder[numberOfProjects]; projects[0] = projectBuilder(projectId, indexNames); for (int i = 1; i < numberOfProjects; i++) { - projects[i] = projectBuilder(new ProjectId(randomUUID()), randomSet(0, 12, () -> randomAlphaOfLengthBetween(4, 12))); + projects[i] = projectBuilder(randomUniqueProjectId(), randomSet(0, 12, () -> randomAlphaOfLengthBetween(4, 12))); } final ClusterState state = buildClusterState(projects); @@ -133,7 +133,7 @@ public void testGetClusterStateForManyProjects() throws Exception { final ProjectId[] projectIds = new ProjectId[numberOfProjects]; final Set indexNames = randomSet(5, 20, () -> randomAlphaOfLengthBetween(4, 12)); for (int i = 0; i < numberOfProjects; i++) { - projectIds[i] = new ProjectId(randomUUID()); + projectIds[i] = randomUniqueProjectId(); projects[i] = projectBuilder(projectIds[i], randomSubsetOf(indexNames)); } final ClusterState state = buildClusterState(projects); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexActionTests.java index 8417509e17a2f..10a086675fa6a 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/create/TransportCreateIndexActionTests.java @@ -61,7 +61,7 @@ public class TransportCreateIndexActionTests extends ESTestCase { private static final String UNMANAGED_SYSTEM_INDEX_NAME = ".my-system"; private static final String MANAGED_SYSTEM_INDEX_NAME = ".my-managed"; private static final String SYSTEM_ALIAS_NAME = ".my-alias"; - private static final ProjectId PROJECT_ID = new ProjectId("test_project_id"); + private static final ProjectId PROJECT_ID = ProjectId.fromId("test_project_id"); private static final ClusterState CLUSTER_STATE = ClusterState.builder(new ClusterName("test")) .metadata(Metadata.builder().build()) .putProjectMetadata(ProjectMetadata.builder(PROJECT_ID).build()) diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java index fdc0abf5880f5..294cb1fbdb7db 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java @@ -632,7 +632,7 @@ public void testFailuresDuringPrerequisiteActions() throws InterruptedException // Construct a cluster state that contains the required data streams. // using a single, non-default project final ClusterState oldState = clusterService.state(); - final ProjectId projectId = new ProjectId(randomUUID()); + final ProjectId projectId = randomUniqueProjectId(); final Metadata metadata = Metadata.builder(oldState.metadata()) .removeProject(Metadata.DEFAULT_PROJECT_ID) .put( diff --git a/server/src/test/java/org/elasticsearch/action/datastreams/DataStreamsActionUtilTests.java b/server/src/test/java/org/elasticsearch/action/datastreams/DataStreamsActionUtilTests.java index 3e06afac1d22f..873d700423bd5 100644 --- a/server/src/test/java/org/elasticsearch/action/datastreams/DataStreamsActionUtilTests.java +++ b/server/src/test/java/org/elasticsearch/action/datastreams/DataStreamsActionUtilTests.java @@ -19,7 +19,6 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression; -import org.elasticsearch.cluster.metadata.ProjectId; import org.elasticsearch.cluster.metadata.ProjectMetadata; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; @@ -52,7 +51,7 @@ public void testDataStreamsResolveConcreteIndexNames() { var dataStreamFailureIndex1 = new Index(".fs-foo1", IndexMetadata.INDEX_UUID_NA_VALUE); var dataStreamFailureIndex2 = new Index(".fs-bar2", IndexMetadata.INDEX_UUID_NA_VALUE); - var projectId = new ProjectId(randomUUID()); + var projectId = randomUniqueProjectId(); ClusterState clusterState = ClusterState.builder(new ClusterName("test-cluster")) .putProjectMetadata( ProjectMetadata.builder(projectId) diff --git a/server/src/test/java/org/elasticsearch/action/support/ActiveShardCountTests.java b/server/src/test/java/org/elasticsearch/action/support/ActiveShardCountTests.java index 86a90c3760c6d..1096f92e4c605 100644 --- a/server/src/test/java/org/elasticsearch/action/support/ActiveShardCountTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/ActiveShardCountTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.cluster.TestShardRoutingRoleStrategies; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.cluster.metadata.ProjectId; import org.elasticsearch.cluster.metadata.ProjectMetadata; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; @@ -63,7 +62,7 @@ public void testEnoughShardsWhenProjectIsGone() { .numberOfShards(randomIntBetween(1, 3)) .numberOfReplicas(randomIntBetween(1, 3)) .build(); - ProjectMetadata projectMetadata = ProjectMetadata.builder(new ProjectId(randomUUID())).put(indexMetadata, randomBoolean()).build(); + ProjectMetadata projectMetadata = ProjectMetadata.builder(randomUniqueProjectId()).put(indexMetadata, randomBoolean()).build(); Index index = new Index(indexName, "_uuid"); ShardId shardId = new ShardId(index, 0); ShardRouting shardRouting = ShardRouting.newUnassigned( diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java index 57532dc9860f9..c1567d72d059a 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java @@ -116,7 +116,7 @@ public void setUp() throws Exception { ); transportService.start(); transportService.acceptIncomingRequests(); - projectId = new ProjectId(randomUUID()); + projectId = randomUniqueProjectId(); broadcastReplicationAction = new TestBroadcastReplicationAction( clusterService, transportService, diff --git a/server/src/test/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsActionTests.java b/server/src/test/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsActionTests.java index 70c4d0ca8ce54..0bd348e0e398e 100644 --- a/server/src/test/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsActionTests.java @@ -91,7 +91,7 @@ public static void beforeClass() throws Exception { emptySet() ); - ProjectId projectId = new ProjectId(randomBase64UUID()); + ProjectId projectId = randomUniqueProjectId(); projectResolver = TestProjectResolvers.singleProject(projectId); final Index index1 = new Index("index1", randomBase64UUID()); final Index index2 = new Index("index2", randomBase64UUID()); diff --git a/server/src/test/java/org/elasticsearch/cluster/ClusterChangedEventTests.java b/server/src/test/java/org/elasticsearch/cluster/ClusterChangedEventTests.java index 24b6d50a07820..c08c82e27fa8b 100644 --- a/server/src/test/java/org/elasticsearch/cluster/ClusterChangedEventTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/ClusterChangedEventTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.cluster.metadata.IndexGraveyard; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.cluster.metadata.ProjectId; import org.elasticsearch.cluster.metadata.ProjectMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeRole; @@ -485,8 +484,8 @@ public void testChangedCustomMetadataSet() { public void testChangedCustomMetadataSetMultiProject() { final CustomProjectMetadata project1Custom = new CustomProjectMetadata("project1"); final CustomProjectMetadata project2Custom = new CustomProjectMetadata("project2"); - final ProjectMetadata project1 = ProjectMetadata.builder(new ProjectId(randomUUID())).build(); - final ProjectMetadata project2 = ProjectMetadata.builder(new ProjectId(randomUUID())).build(); + final ProjectMetadata project1 = ProjectMetadata.builder(randomUniqueProjectId()).build(); + final ProjectMetadata project2 = ProjectMetadata.builder(randomUniqueProjectId()).build(); final ClusterState originalState = ClusterState.builder(TEST_CLUSTER_NAME) .metadata(Metadata.builder().put(project1).put(project2).build()) .build(); @@ -514,7 +513,7 @@ public void testChangedCustomMetadataSetMultiProject() { // Add custom in completely new project newState = ClusterState.builder(originalState) .putProjectMetadata( - ProjectMetadata.builder(new ProjectId(randomUUID())).putCustom(project2Custom.getWriteableName(), project2Custom).build() + ProjectMetadata.builder(randomUniqueProjectId()).putCustom(project2Custom.getWriteableName(), project2Custom).build() ) .build(); event = new ClusterChangedEvent("_na_", originalState, newState); @@ -727,7 +726,7 @@ private static DiscoveryNode newNode(final String nodeId, Set // Create the metadata for a cluster state. private static ProjectMetadata createProject(final List indices) { - final ProjectMetadata.Builder builder = ProjectMetadata.builder(new ProjectId(randomUUID())); + final ProjectMetadata.Builder builder = ProjectMetadata.builder(randomUniqueProjectId()); for (Index index : indices) { builder.put(createIndexMetadata(index), true); } diff --git a/server/src/test/java/org/elasticsearch/cluster/ClusterStateTests.java b/server/src/test/java/org/elasticsearch/cluster/ClusterStateTests.java index b065e02754341..14e13cc2ab5ca 100644 --- a/server/src/test/java/org/elasticsearch/cluster/ClusterStateTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/ClusterStateTests.java @@ -151,7 +151,7 @@ public void testCopyAndUpdateMetadata() throws IOException { public void testGetNonExistingProjectStateThrows() { final List projects = IntStream.range(0, between(1, 3)) - .mapToObj(i -> MetadataTests.randomProject(new ProjectId("p_" + i), between(0, 5))) + .mapToObj(i -> MetadataTests.randomProject(ProjectId.fromId("p_" + i), between(0, 5))) .toList(); final Metadata metadata = MetadataTests.randomMetadata(projects); final ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); @@ -835,7 +835,7 @@ private static ClusterState buildMultiProjectClusterState(DiscoveryNode... nodes .build() ) .put( - ProjectMetadata.builder(new ProjectId("3LftaL7hgfXAsF60Gm6jcD")) + ProjectMetadata.builder(ProjectId.fromId("3LftaL7hgfXAsF60Gm6jcD")) .put( IndexMetadata.builder("common-index") .settings( @@ -850,7 +850,7 @@ private static ClusterState buildMultiProjectClusterState(DiscoveryNode... nodes ) ) .put( - ProjectMetadata.builder(new ProjectId("tb5W0bx765nDVIwqJPw92G")) + ProjectMetadata.builder(ProjectId.fromId("tb5W0bx765nDVIwqJPw92G")) .put( IndexMetadata.builder("common-index") .settings( @@ -858,7 +858,7 @@ private static ClusterState buildMultiProjectClusterState(DiscoveryNode... nodes ) ) ) - .put(ProjectMetadata.builder(new ProjectId("WHyuJ0uqBYOPgHX9kYUXlZ"))) + .put(ProjectMetadata.builder(ProjectId.fromId("WHyuJ0uqBYOPgHX9kYUXlZ"))) .build(); final DiscoveryNodes.Builder discoveryNodes = DiscoveryNodes.builder(); for (var node : nodes) { @@ -874,8 +874,8 @@ private static ClusterState buildMultiProjectClusterState(DiscoveryNode... nodes .blocks( ClusterBlocks.builder() .addGlobalBlock(Metadata.CLUSTER_READ_ONLY_BLOCK) - .addIndexBlock(new ProjectId("tb5W0bx765nDVIwqJPw92G"), "common-index", IndexMetadata.INDEX_METADATA_BLOCK) - .addIndexBlock(new ProjectId("3LftaL7hgfXAsF60Gm6jcD"), "another-index", IndexMetadata.INDEX_READ_ONLY_BLOCK) + .addIndexBlock(ProjectId.fromId("tb5W0bx765nDVIwqJPw92G"), "common-index", IndexMetadata.INDEX_METADATA_BLOCK) + .addIndexBlock(ProjectId.fromId("3LftaL7hgfXAsF60Gm6jcD"), "another-index", IndexMetadata.INDEX_READ_ONLY_BLOCK) ) .build(); } diff --git a/server/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java b/server/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java index 9b530be665003..cd00b7749e899 100644 --- a/server/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java @@ -127,7 +127,7 @@ public void testClusterHealthWaitsForClusterStateApplication() throws Interrupte var projectId = state.metadata().projects().keySet().iterator().next(); // Randomly add an extra project. if (randomBoolean()) { - state = ClusterState.builder(state).putProjectMetadata(ProjectMetadata.builder(new ProjectId(randomUUID())).build()).build(); + state = ClusterState.builder(state).putProjectMetadata(ProjectMetadata.builder(randomUniqueProjectId()).build()).build(); } setState(clusterService, state); @@ -183,7 +183,7 @@ public void testClusterHealth() throws IOException { RoutingTableGenerator routingTableGenerator = new RoutingTableGenerator(); RoutingTableGenerator.ShardCounter counter = new RoutingTableGenerator.ShardCounter(); RoutingTable.Builder routingTable = RoutingTable.builder(); - ProjectId projectId = new ProjectId(randomUUID()); + ProjectId projectId = randomUniqueProjectId(); ProjectMetadata.Builder project = ProjectMetadata.builder(projectId); for (int i = randomInt(4); i >= 0; i--) { int numberOfShards = randomInt(3) + 1; @@ -215,7 +215,7 @@ public void testClusterHealth() throws IOException { public void testClusterHealthOnIndexCreation() { final String indexName = "test-idx"; final String[] indices = new String[] { indexName }; - var projectId = new ProjectId(randomUUID()); + var projectId = randomUniqueProjectId(); final List clusterStates = simulateIndexCreationStates(indexName, false, projectId); for (int i = 0; i < clusterStates.size(); i++) { // make sure cluster health is always YELLOW, up until the last state where it should be GREEN @@ -232,7 +232,7 @@ public void testClusterHealthOnIndexCreation() { public void testClusterHealthOnIndexCreationWithFailedAllocations() { final String indexName = "test-idx"; final String[] indices = new String[] { indexName }; - var projectId = new ProjectId(randomUUID()); + var projectId = randomUniqueProjectId(); final List clusterStates = simulateIndexCreationStates(indexName, true, projectId); for (int i = 0; i < clusterStates.size(); i++) { // make sure cluster health is YELLOW up until the final cluster state, which contains primary shard @@ -250,7 +250,7 @@ public void testClusterHealthOnIndexCreationWithFailedAllocations() { public void testClusterHealthOnClusterRecovery() { final String indexName = "test-idx"; final String[] indices = new String[] { indexName }; - var projectId = new ProjectId(randomUUID()); + var projectId = randomUniqueProjectId(); final List clusterStates = simulateClusterRecoveryStates(indexName, false, false, projectId); for (int i = 0; i < clusterStates.size(); i++) { // make sure cluster health is YELLOW up until the final cluster state, when it turns GREEN @@ -267,7 +267,7 @@ public void testClusterHealthOnClusterRecovery() { public void testClusterHealthOnClusterRecoveryWithFailures() { final String indexName = "test-idx"; final String[] indices = new String[] { indexName }; - var projectId = new ProjectId(randomUUID()); + var projectId = randomUniqueProjectId(); final List clusterStates = simulateClusterRecoveryStates(indexName, false, true, projectId); for (int i = 0; i < clusterStates.size(); i++) { // make sure cluster health is YELLOW up until the final cluster state, which contains primary shard @@ -285,7 +285,7 @@ public void testClusterHealthOnClusterRecoveryWithFailures() { public void testClusterHealthOnClusterRecoveryWithPreviousAllocationIds() { final String indexName = "test-idx"; final String[] indices = new String[] { indexName }; - var projectId = new ProjectId(randomUUID()); + var projectId = randomUniqueProjectId(); final List clusterStates = simulateClusterRecoveryStates(indexName, true, false, projectId); for (int i = 0; i < clusterStates.size(); i++) { // because there were previous allocation ids, we should be RED until the primaries are started, @@ -309,7 +309,7 @@ public void testClusterHealthOnClusterRecoveryWithPreviousAllocationIds() { public void testClusterHealthOnClusterRecoveryWithPreviousAllocationIdsAndAllocationFailures() { final String indexName = "test-idx"; final String[] indices = new String[] { indexName }; - var projectId = new ProjectId(randomUUID()); + var projectId = randomUniqueProjectId(); for (final ClusterState clusterState : simulateClusterRecoveryStates(indexName, true, true, projectId)) { final ClusterStateHealth health = new ClusterStateHealth(clusterState, indices, projectId); // if the inactive primaries are due solely to recovery (not failed allocation or previously being allocated) @@ -350,7 +350,7 @@ private List simulateIndexCreationStates( .put(projectId, RoutingTable.builder(TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY).addAsNew(indexMetadata)); final int nrOfProjects = randomIntBetween(0, 5); for (int i = 0; i < nrOfProjects; i++) { - var id = new ProjectId(randomUUID()); + var id = randomUniqueProjectId(); mdBuilder.put(ProjectMetadata.builder(id).build()); rtBuilder.put(id, RoutingTable.EMPTY_ROUTING_TABLE); } @@ -393,7 +393,7 @@ private List simulateClusterRecoveryStates( .put(projectId, RoutingTable.builder(TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY).addAsNew(indexMetadata)); final int nrOfProjects = randomIntBetween(0, 5); for (int i = 0; i < nrOfProjects; i++) { - var id = new ProjectId(randomUUID()); + var id = randomUniqueProjectId(); mdBuilder.put(ProjectMetadata.builder(id).build()); rtBuilder.put(id, RoutingTable.EMPTY_ROUTING_TABLE); } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java index f8d8b9dc8cd13..d2928a782da51 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java @@ -108,7 +108,7 @@ public void setUp() throws Exception { } public void testConcreteIndexNamesStrictExpand() { - final ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())) + final ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()) .put(indexBuilder("foo").putAlias(AliasMetadata.builder("foofoobar"))) .put(indexBuilder("foobar").putAlias(AliasMetadata.builder("foofoobar"))) .put(indexBuilder("foofoo-closed").state(State.CLOSE)) @@ -182,7 +182,7 @@ public void testConcreteIndexNamesStrictExpand() { } public void testConcreteIndexNamesLenientExpand() { - final ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())) + final ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()) .put(indexBuilder("foo").putAlias(AliasMetadata.builder("foofoobar"))) .put(indexBuilder("foobar").putAlias(AliasMetadata.builder("foofoobar"))) .put(indexBuilder("foofoo-closed").state(State.CLOSE)) @@ -242,7 +242,7 @@ public void testConcreteIndexNamesLenientExpand() { } public void testConcreteIndexNamesIgnoreUnavailableDisallowEmpty() { - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()) .put(indexBuilder("foo")) .put(indexBuilder("foobar")) .put(indexBuilder("foofoo-closed").state(IndexMetadata.State.CLOSE)) @@ -296,7 +296,7 @@ public void testConcreteIndexNamesIgnoreUnavailableDisallowEmpty() { } public void testConcreteIndexNamesExpandWildcards() { - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()) .put(indexBuilder("foo").state(IndexMetadata.State.CLOSE)) .put(indexBuilder("bar")) .put(indexBuilder("foobar").putAlias(AliasMetadata.builder("barbaz"))) @@ -521,7 +521,7 @@ public void testConcreteIndexNamesExpandWildcards() { } public void testConcreteIndexNamesNoExpandWildcards() { - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()) .put(indexBuilder("foo").putAlias(AliasMetadata.builder("foofoobar"))) .put(indexBuilder("foobar").putAlias(AliasMetadata.builder("foofoobar"))) .put(indexBuilder("foofoo-closed").state(IndexMetadata.State.CLOSE)) @@ -649,7 +649,7 @@ public void testConcreteIndexNamesNoExpandWildcards() { } public void testIndexOptionsSingleIndexNoExpandWildcards() { - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()) .put(indexBuilder("foo").putAlias(AliasMetadata.builder("foofoobar"))) .put(indexBuilder("foobar").putAlias(AliasMetadata.builder("foofoobar"))) .put(indexBuilder("foofoo-closed").state(IndexMetadata.State.CLOSE)) @@ -735,7 +735,7 @@ public void testIndexOptionsSingleIndexNoExpandWildcards() { } public void testIndexOptionsEmptyCluster() { - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())).build(); + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()).build(); IndicesOptions options = IndicesOptions.strictExpandOpen(); final IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context( @@ -824,7 +824,7 @@ public void testIndexOptionsEmptyCluster() { } public void testConcreteIndicesIgnoreIndicesOneMissingIndex() { - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()) .put(indexBuilder("testXXX")) .put(indexBuilder("kuku")) .build(); @@ -852,7 +852,7 @@ public void testConcreteIndicesIgnoreIndicesOneMissingIndex() { } public void testConcreteIndicesIgnoreIndicesOneMissingIndexOtherFound() { - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()) .put(indexBuilder("testXXX")) .put(indexBuilder("kuku")) .build(); @@ -869,7 +869,7 @@ public void testConcreteIndicesIgnoreIndicesOneMissingIndexOtherFound() { } public void testConcreteIndicesIgnoreIndicesAllMissing() { - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()) .put(indexBuilder("testXXX")) .put(indexBuilder("kuku")) .build(); @@ -898,7 +898,7 @@ public void testConcreteIndicesIgnoreIndicesAllMissing() { } public void testConcreteIndicesIgnoreIndicesEmptyRequest() { - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()) .put(indexBuilder("testXXX")) .put(indexBuilder("kuku")) .build(); @@ -911,7 +911,7 @@ public void testConcreteIndicesIgnoreIndicesEmptyRequest() { } public void testConcreteIndicesNoIndicesErrorMessage() { - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())).build(); + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()).build(); IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context( project, IndicesOptions.fromOptions(false, false, true, true), @@ -925,7 +925,7 @@ public void testConcreteIndicesNoIndicesErrorMessage() { } public void testConcreteIndicesNoIndicesErrorMessageNoExpand() { - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())).build(); + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()).build(); IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context( project, IndicesOptions.fromOptions(false, false, false, false), @@ -939,7 +939,7 @@ public void testConcreteIndicesNoIndicesErrorMessageNoExpand() { } public void testConcreteIndicesWildcardExpansion() { - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()) .put(indexBuilder("testXXX").state(State.OPEN)) .put(indexBuilder("testXXY").state(State.OPEN)) .put(indexBuilder("testXYY").state(State.CLOSE)) @@ -980,7 +980,7 @@ public void testConcreteIndicesWildcardExpansion() { } public void testConcreteIndicesWildcardWithNegation() { - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()) .put(indexBuilder("testXXX").state(State.OPEN)) .put(indexBuilder("testXXY").state(State.OPEN)) .put(indexBuilder("testXYY").state(State.OPEN)) @@ -1073,7 +1073,7 @@ public void testConcreteIndicesWildcardWithNegation() { } public void testConcreteIndicesWildcardAndAliases() { - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()) .put(indexBuilder("foo_foo").state(State.OPEN).putAlias(AliasMetadata.builder("foo"))) .put(indexBuilder("bar_bar").state(State.OPEN).putAlias(AliasMetadata.builder("foo"))) .build(); @@ -1174,7 +1174,7 @@ public void testHiddenAliasAndHiddenIndexResolution() { { // A visible index with a visible alias and a hidden index with a hidden alias - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()) .put(indexBuilder(visibleIndex).state(State.OPEN).putAlias(AliasMetadata.builder(visibleAlias))) .put( indexBuilder(hiddenIndex, Settings.builder().put(INDEX_HIDDEN_SETTING.getKey(), true).build()).state(State.OPEN) @@ -1217,7 +1217,7 @@ public void testHiddenAliasAndHiddenIndexResolution() { { // A visible alias that points to one hidden and one visible index - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()) .put(indexBuilder(visibleIndex).state(State.OPEN).putAlias(AliasMetadata.builder(visibleAlias))) .put( indexBuilder(hiddenIndex, Settings.builder().put(INDEX_HIDDEN_SETTING.getKey(), true).build()).state(State.OPEN) @@ -1243,7 +1243,7 @@ public void testHiddenAliasAndHiddenIndexResolution() { { // A hidden alias that points to one hidden and one visible index - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()) .put(indexBuilder(visibleIndex).state(State.OPEN).putAlias(AliasMetadata.builder(hiddenAlias).isHidden(true))) .put( indexBuilder(hiddenIndex, Settings.builder().put(INDEX_HIDDEN_SETTING.getKey(), true).build()).state(State.OPEN) @@ -1274,7 +1274,7 @@ public void testHiddenAliasAndHiddenIndexResolution() { { // A hidden alias with a dot-prefixed name that points to one hidden index with a dot prefix, and one hidden index without - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()) .put( indexBuilder(dottedHiddenIndex, Settings.builder().put(INDEX_HIDDEN_SETTING.getKey(), true).build()).state(State.OPEN) .putAlias(AliasMetadata.builder(dottedHiddenAlias).isHidden(true)) @@ -1310,7 +1310,7 @@ public void testHiddenIndexWithVisibleAliasOverlappingNameResolution() { IndicesOptions excludeHiddenOptions = IndicesOptions.fromOptions(false, true, true, false, false, true, false, false, false); IndicesOptions includeHiddenOptions = IndicesOptions.fromOptions(false, true, true, false, true, true, false, false, false); - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()) .put( indexBuilder(hiddenIndex, Settings.builder().put(INDEX_HIDDEN_SETTING.getKey(), true).build()).state(State.OPEN) .putAlias(AliasMetadata.builder(hiddenAlias).isHidden(true)) @@ -1349,7 +1349,7 @@ public void testConcreteIndicesAllPatternRandom() { ); { - final ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())).build(); + final ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()).build(); IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context( project, indicesOptions, @@ -1368,7 +1368,7 @@ public void testConcreteIndicesAllPatternRandom() { { // with existing indices, asking for all indices should return all open/closed indices depending on options - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()) .put(indexBuilder("aaa").state(State.OPEN).putAlias(AliasMetadata.builder("aaa_alias1"))) .put(indexBuilder("bbb").state(State.OPEN).putAlias(AliasMetadata.builder("bbb_alias1"))) .put(indexBuilder("ccc").state(State.CLOSE).putAlias(AliasMetadata.builder("ccc_alias1"))) @@ -1402,7 +1402,7 @@ public void testConcreteIndicesAllPatternRandom() { public void testConcreteIndicesWildcardNoMatch() { for (int i = 0; i < 10; i++) { IndicesOptions indicesOptions = IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean()); - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()) .put(indexBuilder("aaa").state(State.OPEN).putAlias(AliasMetadata.builder("aaa_alias1"))) .put(indexBuilder("bbb").state(State.OPEN).putAlias(AliasMetadata.builder("bbb_alias1"))) .put(indexBuilder("ccc").state(State.CLOSE).putAlias(AliasMetadata.builder("ccc_alias1"))) @@ -1478,7 +1478,7 @@ public void testIsExplicitAllIndicesWildcard() { } public void testIndexOptionsFailClosedIndicesAndAliases() { - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()) .put( indexBuilder("foo1-closed").state(IndexMetadata.State.CLOSE) .putAlias(AliasMetadata.builder("foobar1-closed")) @@ -1557,7 +1557,7 @@ public void testIndexOptionsFailClosedIndicesAndAliases() { } public void testDedupConcreteIndices() { - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()) .put(indexBuilder("index1").putAlias(AliasMetadata.builder("alias1"))) .build(); IndicesOptions[] indicesOptions = new IndicesOptions[] { @@ -1577,7 +1577,7 @@ public void testDedupConcreteIndices() { } public void testFilterClosedIndicesOnAliases() { - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()) .put(indexBuilder("test-0").state(State.OPEN).putAlias(AliasMetadata.builder("alias-0"))) .put(indexBuilder("test-1").state(IndexMetadata.State.CLOSE).putAlias(AliasMetadata.builder("alias-1"))) .build(); @@ -1848,7 +1848,7 @@ public IndicesOptions indicesOptions() { } public void testConcreteWriteIndexWithInvalidIndicesRequest() { - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()) .put(indexBuilder("test-0").state(State.OPEN).putAlias(AliasMetadata.builder("test-alias"))) .build(); Function requestGen = (indices) -> new IndicesRequest() { @@ -2122,7 +2122,7 @@ public void testDeleteIndexIgnoresAliases() { } public void testIndicesAliasesRequestIgnoresAliases() { - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()) .put(indexBuilder("test-index").state(State.OPEN).putAlias(AliasMetadata.builder("test-alias"))) .put(indexBuilder("index").state(State.OPEN).putAlias(AliasMetadata.builder("test-alias2"))) .build(); @@ -2225,7 +2225,7 @@ public void testIndicesAliasesRequestTargetDataStreams() { final String dataStreamName = "my-data-stream"; IndexMetadata backingIndex = createBackingIndex(dataStreamName, 1).build(); - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()) .put(backingIndex, false) .put(newInstance(dataStreamName, List.of(backingIndex.getIndex()))) .build(); @@ -2258,7 +2258,7 @@ public void testIndicesAliasesRequestTargetDataStreams() { } public void testInvalidIndex() { - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())).put(indexBuilder("test")).build(); + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()).put(indexBuilder("test")).build(); IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context( project, IndicesOptions.lenientExpandOpen(), @@ -3328,7 +3328,7 @@ public void testMathExpressionSupportWithOlderDate() { } public void testRemoteIndex() { - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())).build(); + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()).build(); { IndicesOptions options = IndicesOptions.fromOptions(false, randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean()); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java index e2bcc8e95aeaf..7c0fc80ea73c4 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java @@ -565,7 +565,7 @@ public void testCreateIndexInUnknownProject() { new IndexSettingProviders(Set.of()) ); PlainActionFuture createIndexFuture = new PlainActionFuture<>(); - ProjectId unknownProjectId = new ProjectId(randomUUID()); + ProjectId unknownProjectId = randomUniqueProjectId(); checkerService.createIndex( TimeValue.MAX_VALUE, TimeValue.MAX_VALUE, @@ -931,7 +931,7 @@ public void testInvalidAliasName() { request.index(), request.aliases(), List.of(), - ProjectMetadata.builder(new ProjectId(randomUUID())).build(), + ProjectMetadata.builder(randomUniqueProjectId()).build(), xContentRegistry(), searchExecutionContext, IndexNameExpressionResolver::resolveDateMathExpression, @@ -949,7 +949,7 @@ public void testAliasNameWithMathExpression() { request.index(), request.aliases(), List.of(), - ProjectMetadata.builder(new ProjectId(randomUUID())).build(), + ProjectMetadata.builder(randomUniqueProjectId()).build(), xContentRegistry(), searchExecutionContext, IndexNameExpressionResolver::resolveDateMathExpression, @@ -983,7 +983,7 @@ public void testRequestDataHavePriorityOverTemplateData() throws Exception { request.index(), request.aliases(), MetadataIndexTemplateService.resolveAliases(List.of(templateMetadata)), - ProjectMetadata.builder(new ProjectId(randomUUID())).build(), + ProjectMetadata.builder(randomUniqueProjectId()).build(), xContentRegistry(), searchExecutionContext, IndexNameExpressionResolver::resolveDateMathExpression, @@ -1083,7 +1083,7 @@ public void testTemplateOrder() throws Exception { request.index(), request.aliases(), MetadataIndexTemplateService.resolveAliases(templates), - ProjectMetadata.builder(new ProjectId(randomUUID())).build(), + ProjectMetadata.builder(randomUniqueProjectId()).build(), xContentRegistry(), searchExecutionContext, IndexNameExpressionResolver::resolveDateMathExpression, @@ -1124,7 +1124,7 @@ public void testResolvedAliasInTemplate() { request.index(), request.aliases(), MetadataIndexTemplateService.resolveAliases(templates), - ProjectMetadata.builder(new ProjectId(randomUUID())).build(), + ProjectMetadata.builder(randomUniqueProjectId()).build(), xContentRegistry(), searchExecutionContext, IndexNameExpressionResolver::resolveDateMathExpression, @@ -1173,7 +1173,7 @@ public void testClusterStateCreateIndexThrowsWriteIndexValidationException() thr .numberOfShards(1) .numberOfReplicas(0) .build(); - ProjectId projectId = new ProjectId(randomUUID()); + ProjectId projectId = randomUniqueProjectId(); ClusterState currentClusterState = ClusterState.builder(ClusterState.EMPTY_STATE) .putProjectMetadata(ProjectMetadata.builder(projectId).put(existingWriteIndex, false)) .build(); @@ -1202,7 +1202,7 @@ public void testClusterStateCreateIndexThrowsWriteIndexValidationException() thr } public void testClusterStateCreateIndex() { - ProjectId projectId = new ProjectId(randomUUID()); + ProjectId projectId = randomUniqueProjectId(); ClusterState currentClusterState = ClusterState.builder(ClusterState.EMPTY_STATE) .putProjectMetadata(ProjectMetadata.builder(projectId)) .build(); @@ -1238,7 +1238,7 @@ public void testClusterStateCreateIndex() { } public void testClusterStateCreateIndexWithMetadataTransaction() { - ProjectId projectId = new ProjectId(randomUUID()); + ProjectId projectId = randomUniqueProjectId(); ClusterState currentClusterState = ClusterState.builder(ClusterState.EMPTY_STATE) .putProjectMetadata( ProjectMetadata.builder(projectId) diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java index 300005bf7d1d1..146dd13186b7e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java @@ -679,10 +679,10 @@ public void testMetadataGlobalStateChangesOnClusterUUIDChanges() { public void testMetadataGlobalStateChangesOnProjectChanges() { final Metadata metadata1 = Metadata.builder().build(); - final Metadata metadata2 = Metadata.builder(metadata1).put(ProjectMetadata.builder(new ProjectId(randomUUID())).build()).build(); + final Metadata metadata2 = Metadata.builder(metadata1).put(ProjectMetadata.builder(randomUniqueProjectId()).build()).build(); final Metadata metadata3 = Metadata.builder(metadata1) .put( - ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata.builder(randomUniqueProjectId()) .put(IndexMetadata.builder("some-index").settings(indexSettings(IndexVersion.current(), 1, 1))) .build() ) @@ -690,7 +690,7 @@ public void testMetadataGlobalStateChangesOnProjectChanges() { // A project with a ProjectCustom. final Metadata metadata4 = Metadata.builder(metadata1) .put( - ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata.builder(randomUniqueProjectId()) .put("template", new ComponentTemplate(new Template(null, null, null), null, null)) .build() ) @@ -1568,8 +1568,8 @@ public void testSerialization() throws IOException { public void testMultiProjectSerialization() throws IOException { // TODO: this whole suite needs to be updated for multiple projects - ProjectMetadata project1 = randomProject(new ProjectId("1"), 1); - ProjectMetadata project2 = randomProject(new ProjectId("2"), randomIntBetween(2, 10)); + ProjectMetadata project1 = randomProject(ProjectId.fromId("1"), 1); + ProjectMetadata project2 = randomProject(ProjectId.fromId("2"), randomIntBetween(2, 10)); Metadata metadata = randomMetadata(List.of(project1, project2)); BytesStreamOutput out = new BytesStreamOutput(); metadata.writeTo(out); @@ -1632,7 +1632,7 @@ public void testDiffSerializationPreMultiProject() throws IOException { public void testGetNonExistingProjectThrows() { final List projects = IntStream.range(0, between(1, 3)) - .mapToObj(i -> randomProject(new ProjectId("p_" + i), between(0, 5))) + .mapToObj(i -> randomProject(ProjectId.fromId("p_" + i), between(0, 5))) .toList(); final Metadata metadata = randomMetadata(projects); expectThrows(IllegalArgumentException.class, () -> metadata.getProject(randomProjectIdOrDefault())); @@ -2587,7 +2587,7 @@ public void testEmptyDiffReturnsSameInstance() throws IOException { public void testMultiProjectXContent() throws IOException { final long lastAllocationId = randomNonNegativeLong(); - final List projects = randomList(1, 5, () -> randomProject(new ProjectId(randomUUID()), randomIntBetween(1, 3))) + final List projects = randomList(1, 5, () -> randomProject(randomUniqueProjectId(), randomIntBetween(1, 3))) .stream() .map( project -> ProjectMetadata.builder(project) @@ -2658,7 +2658,7 @@ public void testMultiProjectXContent() throws IOException { public void testSingleNonDefaultProjectXContent() throws IOException { // When ClusterStateAction acts in a project scope, it returns cluster state metadata that has a single project that may // not have the default project-id. We need to be able to convert this to XContent in the Rest response - final ProjectMetadata project = ProjectMetadata.builder(new ProjectId("c8af967d644b3219")) + final ProjectMetadata project = ProjectMetadata.builder(ProjectId.fromId("c8af967d644b3219")) .put(IndexMetadata.builder("index-1").settings(indexSettings(IndexVersion.current(), 1, 1)).build(), false) .put(IndexMetadata.builder("index-2").settings(indexSettings(IndexVersion.current(), 2, 2)).build(), false) .build(); @@ -2987,14 +2987,14 @@ public void testGetSingleProjectWithCustom() { assertNull(metadata.getSingleProjectWithCustom(type)); } { - Metadata metadata = Metadata.builder().put(ProjectMetadata.builder(new ProjectId(randomUUID())).build()).build(); + Metadata metadata = Metadata.builder().put(ProjectMetadata.builder(randomUniqueProjectId()).build()).build(); assertNull(metadata.getSingleProjectCustom(type)); assertNull(metadata.getSingleProjectWithCustom(type)); } { var ingestMetadata = new IngestMetadata(Map.of()); Metadata metadata = Metadata.builder() - .put(ProjectMetadata.builder(new ProjectId(randomUUID())).putCustom(type, ingestMetadata)) + .put(ProjectMetadata.builder(randomUniqueProjectId()).putCustom(type, ingestMetadata)) .build(); assertEquals(ingestMetadata, metadata.getSingleProjectCustom(type)); assertEquals(ingestMetadata, metadata.getSingleProjectWithCustom(type).custom(type)); @@ -3002,8 +3002,8 @@ public void testGetSingleProjectWithCustom() { { var ingestMetadata = new IngestMetadata(Map.of()); Metadata metadata = Metadata.builder() - .put(ProjectMetadata.builder(new ProjectId(randomUUID()))) - .put(ProjectMetadata.builder(new ProjectId(randomUUID())).putCustom(type, ingestMetadata)) + .put(ProjectMetadata.builder(randomUniqueProjectId())) + .put(ProjectMetadata.builder(randomUniqueProjectId()).putCustom(type, ingestMetadata)) .build(); assertEquals(ingestMetadata, metadata.getSingleProjectCustom(type)); assertEquals(ingestMetadata, metadata.getSingleProjectWithCustom(type).custom(type)); @@ -3011,8 +3011,8 @@ public void testGetSingleProjectWithCustom() { { var ingestMetadata = new IngestMetadata(Map.of()); Metadata metadata = Metadata.builder() - .put(ProjectMetadata.builder(new ProjectId(randomUUID())).putCustom(type, new IngestMetadata(Map.of()))) - .put(ProjectMetadata.builder(new ProjectId(randomUUID())).putCustom(type, ingestMetadata)) + .put(ProjectMetadata.builder(randomUniqueProjectId()).putCustom(type, new IngestMetadata(Map.of()))) + .put(ProjectMetadata.builder(randomUniqueProjectId()).putCustom(type, ingestMetadata)) .build(); assertThrows(UnsupportedOperationException.class, () -> metadata.getSingleProjectCustom(type)); assertThrows(UnsupportedOperationException.class, () -> metadata.getSingleProjectWithCustom(type)); @@ -3056,7 +3056,7 @@ public void testProjectLookupWithMultipleProjects() { final Metadata.Builder metadataBuilder = Metadata.builder(); final Map> indices = Maps.newMapWithExpectedSize(numberOfProjects); for (int p = 1; p <= numberOfProjects; p++) { - final ProjectId projectId = new ProjectId(org.elasticsearch.core.Strings.format("proj_%02d", p)); + final ProjectId projectId = ProjectId.fromId(org.elasticsearch.core.Strings.format("proj_%02d", p)); final ProjectMetadata.Builder projectBuilder = ProjectMetadata.builder(projectId); final int numberOfIndices = randomIntBetween(p, 10); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/ProjectIdTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/ProjectIdTests.java index 0eaf81921889b..30fbfad730327 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/ProjectIdTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/ProjectIdTests.java @@ -22,9 +22,9 @@ public class ProjectIdTests extends AbstractWireSerializingTestCase { public void testCannotCreateBlankProjectId() { - expectThrows(IllegalArgumentException.class, () -> new ProjectId((String) null)); - expectThrows(IllegalArgumentException.class, () -> new ProjectId("")); - expectThrows(IllegalArgumentException.class, () -> new ProjectId(" ")); + expectThrows(IllegalArgumentException.class, () -> ProjectId.fromId(null)); + expectThrows(IllegalArgumentException.class, () -> ProjectId.fromId("")); + expectThrows(IllegalArgumentException.class, () -> ProjectId.fromId(" ")); } public void testValidateProjectId() { @@ -63,10 +63,10 @@ protected Writeable.Reader instanceReader() { @Override protected ProjectId createTestInstance() { return switch (randomIntBetween(1, 4)) { - case 1 -> new ProjectId(randomUUID()); - case 2 -> new ProjectId(randomAlphaOfLengthBetween(1, 30)); - case 3 -> new ProjectId(Long.toString(randomLongBetween(1, Long.MAX_VALUE))); - default -> new ProjectId(Long.toString(randomLongBetween(1, Long.MAX_VALUE), Character.MAX_RADIX)); + case 1 -> randomUniqueProjectId(); + case 2 -> ProjectId.fromId(randomAlphaOfLengthBetween(1, 30)); + case 3 -> ProjectId.fromId(Long.toString(randomLongBetween(1, Long.MAX_VALUE))); + default -> ProjectId.fromId(Long.toString(randomLongBetween(1, Long.MAX_VALUE), Character.MAX_RADIX)); }; } @@ -77,7 +77,7 @@ protected ProjectId mutateInstance(ProjectId instance) throws IOException { public void testToString() { String s = randomAlphaOfLengthBetween(8, 16); - ProjectId id = new ProjectId(s); + ProjectId id = ProjectId.fromId(s); assertThat(id.toString(), equalTo(s)); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/ProjectMetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/ProjectMetadataTests.java index c8dc8177b74eb..ee84c63a0cc37 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/ProjectMetadataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/ProjectMetadataTests.java @@ -34,7 +34,7 @@ public class ProjectMetadataTests extends ESTestCase { public void testToXContent() throws IOException { - final ProjectId projectId = new ProjectId(randomUUID()); + final ProjectId projectId = randomUniqueProjectId(); final ProjectMetadata.Builder builder = ProjectMetadata.builder(projectId); for (int i = 1; i <= 3; i++) { builder.put( diff --git a/server/src/test/java/org/elasticsearch/cluster/project/ProjectResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/project/ProjectResolverTests.java index a1730181ee63f..6fdf1ce9fd619 100644 --- a/server/src/test/java/org/elasticsearch/cluster/project/ProjectResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/project/ProjectResolverTests.java @@ -69,7 +69,7 @@ public void cleanup() { public void testGetById() { var projects = createProjects(); - var expectedProject = ProjectMetadata.builder(new ProjectId(randomUUID())).build(); + var expectedProject = ProjectMetadata.builder(randomUniqueProjectId()).build(); projects.put(expectedProject.id(), expectedProject); var metadata = Metadata.builder().projectMetadata(projects).build(); threadPool.getThreadContext().putHeader(Task.X_ELASTIC_PROJECT_ID_HTTP_HEADER, expectedProject.id().id()); @@ -114,7 +114,7 @@ public void testGetByIdNonExisting() { public void testGetAllProjectIdsWhenAllowed() { allowAllProjects = () -> true; var projects = createProjects(); - var randomProject = ProjectMetadata.builder(new ProjectId(randomUUID())).build(); + var randomProject = ProjectMetadata.builder(randomUniqueProjectId()).build(); projects.put(randomProject.id(), randomProject); var state = ClusterState.builder(ClusterName.DEFAULT).metadata(Metadata.builder().projectMetadata(projects).build()).build(); var actualProjects = resolver.getProjectIds(state); @@ -134,7 +134,7 @@ public void testGetAllProjectIdsWhenNotAllowed() { public void testGetProjectIdsWithHeader() { var projects = createProjects(); - var expectedProject = ProjectMetadata.builder(new ProjectId(randomUUID())).build(); + var expectedProject = ProjectMetadata.builder(randomUniqueProjectId()).build(); projects.put(expectedProject.id(), expectedProject); var state = ClusterState.builder(ClusterName.DEFAULT).metadata(Metadata.builder().projectMetadata(projects).build()).build(); threadPool.getThreadContext().putHeader(Task.X_ELASTIC_PROJECT_ID_HTTP_HEADER, expectedProject.id().id()); @@ -144,8 +144,8 @@ public void testGetProjectIdsWithHeader() { } public void testExecuteOnProject() { - final ProjectId projectId1 = new ProjectId("1-" + randomAlphaOfLength(4)); - final ProjectId projectId2 = new ProjectId("2-" + randomAlphaOfLength(4)); + final ProjectId projectId1 = randomUniqueProjectId(); + final ProjectId projectId2 = randomUniqueProjectId(); final Map projects = createProjects(); projects.put(projectId1, ProjectMetadata.builder(projectId1).build()); @@ -214,7 +214,7 @@ public void testShouldSupportsMultipleProjects() { private static Map createProjects() { return randomMap(0, 5, () -> { - var id = new ProjectId(randomUUID()); + var id = randomUniqueProjectId(); return Tuple.tuple(id, ProjectMetadata.builder(id).build()); }); } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/GlobalRoutingTableTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/GlobalRoutingTableTests.java index 84774e499e13e..fbaaf876116b8 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/GlobalRoutingTableTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/GlobalRoutingTableTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.index.IndexVersion; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.test.DiffableTestUtils; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TransportVersionUtils; import java.io.IOException; @@ -208,7 +209,7 @@ public void testInitializeProjects() { Set addition = randomSet( 1, 5, - () -> randomValueOtherThanMany(table1.routingTables()::containsKey, () -> new ProjectId(randomUUID())) + () -> randomValueOtherThanMany(table1.routingTables()::containsKey, ESTestCase::randomUniqueProjectId) ); var table2 = table1.initializeProjects(addition); assertThat(table2, not(sameInstance(table1))); @@ -223,7 +224,7 @@ public void testInitializeProjects() { public void testBuilderFromEmpty() { final int numberOfProjects = randomIntBetween(1, 10); - final ProjectId[] projectIds = randomArray(numberOfProjects, numberOfProjects, ProjectId[]::new, () -> new ProjectId(randomUUID())); + final ProjectId[] projectIds = randomArray(numberOfProjects, numberOfProjects, ProjectId[]::new, ESTestCase::randomUniqueProjectId); final Integer[] projectIndexCount = randomArray(numberOfProjects, numberOfProjects, Integer[]::new, () -> randomIntBetween(0, 12)); final GlobalRoutingTable.Builder builder = GlobalRoutingTable.builder(); @@ -272,8 +273,8 @@ public void testBuilderFromExisting() { public void testRoutingNodesRoundtrip() { final ClusterState clusterState = buildClusterState( Map.ofEntries( - Map.entry(new ProjectId(randomAlphaOfLength(11) + "1"), Set.of("test-a", "test-b", "test-c")), - Map.entry(new ProjectId(randomAlphaOfLength(11) + "2"), Set.of("test-a", "test-z")) + Map.entry(ProjectId.fromId(randomAlphaOfLength(11) + "1"), Set.of("test-a", "test-b", "test-c")), + Map.entry(ProjectId.fromId(randomAlphaOfLength(11) + "2"), Set.of("test-a", "test-z")) ) ); @@ -285,8 +286,8 @@ public void testRoutingNodesRoundtrip() { } public void testRebuildAfterShardInitialized() { - final ProjectId project1 = new ProjectId(randomAlphaOfLength(11) + "1"); - final ProjectId project2 = new ProjectId(randomAlphaOfLength(11) + "2"); + final ProjectId project1 = ProjectId.fromId(randomAlphaOfLength(11) + "1"); + final ProjectId project2 = ProjectId.fromId(randomAlphaOfLength(11) + "2"); final ClusterState clusterState = buildClusterState( Map.ofEntries(Map.entry(project1, Set.of("test-a", "test-b", "test-c")), Map.entry(project2, Set.of("test-b", "test-z"))) ); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java index 86171f467ccf0..1393be120f9ac 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.metadata.MetadataIndexStateService; -import org.elasticsearch.cluster.metadata.ProjectId; import org.elasticsearch.cluster.metadata.ProjectMetadata; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.node.DiscoveryNodes.Builder; @@ -382,7 +381,7 @@ public void testValidations() { final RoutingTableGenerator.ShardCounter counter = new RoutingTableGenerator.ShardCounter(); final IndexRoutingTable indexRoutingTable = routingTableGenerator.genIndexRoutingTable(indexMetadata, counter); indexMetadata = updateActiveAllocations(indexRoutingTable, indexMetadata); - var projectId = new ProjectId(randomUUID()); + var projectId = randomUniqueProjectId(); ProjectMetadata metadata = ProjectMetadata.builder(projectId).put(indexMetadata, true).build(); // test no validation errors assertTrue(indexRoutingTable.validate(metadata)); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationServiceTests.java index 2f96197b402f8..3e341413eb77f 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationServiceTests.java @@ -123,7 +123,9 @@ public void testAssignsPrimariesInPriorityOrderThenReplicas() { final int numberOfProjects = randomIntBetween(1, 5); final List projects = numberOfProjects == 1 ? List.of(ProjectMetadata.builder(Metadata.DEFAULT_PROJECT_ID)) - : IntStream.range(0, numberOfProjects).mapToObj(n -> ProjectMetadata.builder(new ProjectId(randomUUID() + "-" + n))).toList(); + : IntStream.range(0, numberOfProjects) + .mapToObj(n -> ProjectMetadata.builder(ProjectId.fromId(randomUUID() + "-" + n))) + .toList(); // throttle (incoming) recoveries in order to observe the order of operations, but do not throttle outgoing recoveries since // the effects of that depend on the earlier (random) allocations @@ -330,7 +332,7 @@ public void testExplainsNonAllocationOfShardWithUnknownAllocator() { public void testHealthStatusWithMultipleProjects() { final Supplier buildProject = () -> { - final ProjectMetadata.Builder builder = ProjectMetadata.builder(new ProjectId(randomUUID())); + final ProjectMetadata.Builder builder = ProjectMetadata.builder(randomUniqueProjectId()); final Set indices = randomSet(1, 8, () -> randomAlphaOfLengthBetween(3, 12)); indices.forEach( indexName -> builder.put( @@ -391,9 +393,9 @@ public void testAutoExpandReplicas() throws Exception { TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY ); - final ProjectId project1 = new ProjectId(randomUUID()); - final var project2 = new ProjectId(randomUUID()); - final var project3 = new ProjectId(randomUUID()); + final ProjectId project1 = randomUniqueProjectId(); + final var project2 = randomUniqueProjectId(); + final var project3 = randomUniqueProjectId(); // return same cluster state when there are no changes ClusterState state = ClusterState.builder(ClusterName.DEFAULT) diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/IndexMetadataUpdaterTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/IndexMetadataUpdaterTests.java index 4aa32531fcb6e..2d3b03f5161d4 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/IndexMetadataUpdaterTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/IndexMetadataUpdaterTests.java @@ -40,9 +40,9 @@ public class IndexMetadataUpdaterTests extends ESTestCase { public void testApplyChangesAcrossMultipleProjects() { final IndexMetadataUpdater updater = new IndexMetadataUpdater(); - final ProjectId project1 = new ProjectId(randomUUID()); - final ProjectId project2 = new ProjectId(randomUUID()); - final ProjectId project3 = new ProjectId(randomUUID()); + final ProjectId project1 = randomUniqueProjectId(); + final ProjectId project2 = randomUniqueProjectId(); + final ProjectId project3 = randomUniqueProjectId(); final DiscoveryNode node1 = DiscoveryNodeUtils.create("n1"); final DiscoveryNode node2 = DiscoveryNodeUtils.create("n2"); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ResizeAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ResizeAllocationDeciderTests.java index 13e3cbf57c467..cdf47ee1dc6cc 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ResizeAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ResizeAllocationDeciderTests.java @@ -70,7 +70,7 @@ public void setUp() throws Exception { private ClusterState createInitialClusterState(boolean startShards) { Metadata.Builder metaBuilder = Metadata.builder(); - projectId = new ProjectId(randomUUID()); + projectId = randomUniqueProjectId(); metaBuilder.put( ProjectMetadata.builder(projectId) .put( @@ -308,7 +308,7 @@ public void testSourcePrimaryActive() { public void testGetForcedInitialShardAllocationToNodes() { final int additionalProjects = randomIntBetween(0, 5); - projectId = additionalProjects == 0 ? Metadata.DEFAULT_PROJECT_ID : new ProjectId(randomUUID()); + projectId = additionalProjects == 0 ? Metadata.DEFAULT_PROJECT_ID : randomUniqueProjectId(); var source = IndexMetadata.builder("source") .settings(indexSettings(IndexVersion.current(), 1, 0).put(IndexMetadata.SETTING_INDEX_UUID, "uuid-1")) .build(); @@ -367,7 +367,7 @@ public void testGetForcedInitialShardAllocationToNodes() { private static void includeAdditionalProjects(int projectCount, Metadata.Builder metadataBuilder) { for (int i = 0; i < projectCount; i++) { - final ProjectMetadata.Builder project = ProjectMetadata.builder(new ProjectId(randomUUID())); + final ProjectMetadata.Builder project = ProjectMetadata.builder(randomUniqueProjectId()); for (String index : randomSubsetOf(List.of("source", "target", "index-" + i))) { final Settings.Builder indexSettings = indexSettings(IndexVersion.current(), randomIntBetween(1, 5), randomIntBetween(0, 2)) .put(IndexMetadata.SETTING_INDEX_UUID, randomUUID()); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ResizeSourceIndexSettingsUpdaterTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ResizeSourceIndexSettingsUpdaterTests.java index 7d1cd60b92828..9f6d13a2ed142 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ResizeSourceIndexSettingsUpdaterTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ResizeSourceIndexSettingsUpdaterTests.java @@ -61,7 +61,7 @@ public void testResizeIndexSettingsRemovedAfterStart() { final String targetIndex = "target"; final ProjectMetadata sourceProject = ProjectMetadata.builder( - additionalProjects == 0 ? Metadata.DEFAULT_PROJECT_ID : new ProjectId(randomUUID()) + additionalProjects == 0 ? Metadata.DEFAULT_PROJECT_ID : randomUniqueProjectId() ) .put( IndexMetadata.builder(sourceIndex) @@ -79,7 +79,7 @@ public void testResizeIndexSettingsRemovedAfterStart() { final Metadata.Builder mBuilder = Metadata.builder().put(sourceProject); for (int i = 0; i < additionalProjects; i++) { - final ProjectMetadata.Builder project = ProjectMetadata.builder(new ProjectId(randomUUID())); + final ProjectMetadata.Builder project = ProjectMetadata.builder(randomUniqueProjectId()); final int indexCount = randomIntBetween(0, 5); for (int j = 0; j < indexCount; j++) { project.put( @@ -226,9 +226,9 @@ public void testResizeIndexSettingsRemovedAfterStart() { public void testHandleChangesAcrossMultipleProjects() { final ResizeSourceIndexSettingsUpdater updater = new ResizeSourceIndexSettingsUpdater(); final Metadata metadata = Metadata.builder() - .put(ProjectMetadata.builder(new ProjectId("p1"))) + .put(ProjectMetadata.builder(ProjectId.fromId("p1"))) .put( - ProjectMetadata.builder(new ProjectId("p2")) + ProjectMetadata.builder(ProjectId.fromId("p2")) .put( IndexMetadata.builder("index-a") .settings( @@ -243,7 +243,7 @@ public void testHandleChangesAcrossMultipleProjects() { ) ) .put( - ProjectMetadata.builder(new ProjectId("p3")) + ProjectMetadata.builder(ProjectId.fromId("p3")) .put( IndexMetadata.builder("index-a") .settings(indexSettings(IndexVersion.current(), 1, 1).put(IndexMetadata.SETTING_INDEX_UUID, randomUUID())) @@ -263,7 +263,7 @@ public void testHandleChangesAcrossMultipleProjects() { ) ) .put( - ProjectMetadata.builder(new ProjectId("p4")) + ProjectMetadata.builder(ProjectId.fromId("p4")) .put( IndexMetadata.builder("index-a") .settings(indexSettings(IndexVersion.current(), 1, 1).put(IndexMetadata.SETTING_INDEX_UUID, randomUUID())) @@ -278,7 +278,7 @@ public void testHandleChangesAcrossMultipleProjects() { ) ) .put( - ProjectMetadata.builder(new ProjectId("p5")) + ProjectMetadata.builder(ProjectId.fromId("p5")) .put( IndexMetadata.builder("index-a") .settings( diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RoutingNodesTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RoutingNodesTests.java index aec5261efb24a..a053b5c6d515f 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RoutingNodesTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RoutingNodesTests.java @@ -427,7 +427,7 @@ public void testBuildRoutingNodesForMultipleProjects() { final int numberOfProjects = randomIntBetween(2, 10); int shardCount = 0; for (int i = 1; i <= numberOfProjects; i++) { - var projectId = new ProjectId("p" + i); + var projectId = ProjectId.fromId("p" + i); mb.put( ProjectMetadata.builder(projectId) .put( diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocatorTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocatorTests.java index ba64ffa80111d..b32621e62ab58 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocatorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocatorTests.java @@ -142,7 +142,7 @@ public void testDecideShardAllocationWhenThereAreMultipleProjects() { // Create some projects with some assigned indices for (int i = 1; i <= numberOfProjects; i++) { - var projectId = new ProjectId(Strings.format("proj_%02d", i)); + var projectId = ProjectId.fromId(Strings.format("proj_%02d", i)); String[] indices = { // 2 indices that are unique to this project "index_proj_" + i + "a", diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerTests.java index f2ca22937c80c..4eed552d5f1af 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerTests.java @@ -229,9 +229,9 @@ private static void doTestUnassignedPrimariesBeforeUnassignedReplicas(boolean mu final ProjectId project0, project1; if (multiProject) { - project0 = new ProjectId(randomUUID()); + project0 = randomUniqueProjectId(); metadataBuilder.put(ProjectMetadata.builder(project0).put(indexMetadata0, true)); - project1 = new ProjectId(randomUUID()); + project1 = randomUniqueProjectId(); metadataBuilder.put(ProjectMetadata.builder(project1).put(indexMetadata1, true)); } else { project0 = Metadata.DEFAULT_PROJECT_ID; diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/OrderedShardsIteratorTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/OrderedShardsIteratorTests.java index 4799c63a13fef..c4345d50c394e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/OrderedShardsIteratorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/OrderedShardsIteratorTests.java @@ -16,7 +16,6 @@ import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.cluster.metadata.ProjectId; import org.elasticsearch.cluster.metadata.ProjectMetadata; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.GlobalRoutingTable; @@ -185,9 +184,9 @@ public void testShouldOrderShardByPriorityAcrossMultipleProjects() { // "lookup" index in project 1, data streams in project 2 var metadata = Metadata.builder() - .put(ProjectMetadata.builder(new ProjectId(randomUUID())).put(lookup, false)) + .put(ProjectMetadata.builder(randomUniqueProjectId()).put(lookup, false)) .put( - ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata.builder(randomUniqueProjectId()) .put(ds1, false) .put(ds2, false) .put(DataStream.builder("data-stream", List.of(ds1.getIndex(), ds2.getIndex())).build()) diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java index 9c27ab0e3ca6e..9522629de8f0d 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java @@ -663,8 +663,8 @@ private void doTestShardRelocationsTakenIntoAccount(boolean testMaxHeadroom, boo var metadataBuilder = Metadata.builder(); final ProjectId projectId1, projectId2; if (multipleProjects) { - projectId1 = new ProjectId(randomUUID()); - projectId2 = new ProjectId(randomUUID()); + projectId1 = randomUniqueProjectId(); + projectId2 = randomUniqueProjectId(); metadataBuilder.put(ProjectMetadata.builder(projectId1).put(indexMetadata1, false)); metadataBuilder.put(ProjectMetadata.builder(projectId2).put(indexMetadata2, false)); } else { diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDeciderTests.java index a89db019fa579..e7459c2edfb31 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDeciderTests.java @@ -16,7 +16,6 @@ import org.elasticsearch.cluster.TestShardRoutingRoleStrategies; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.cluster.metadata.ProjectId; import org.elasticsearch.cluster.metadata.ProjectMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; @@ -357,7 +356,7 @@ public void testWithMultipleProjects() { .attributes(Map.ofEntries(Map.entry("can_allocate", "false"))) .build(); - ProjectMetadata project1 = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata project1 = ProjectMetadata.builder(randomUniqueProjectId()) .put( IndexMetadata.builder("index-a") .settings(indexSettings(IndexVersion.current(), 1, 1).put(IndexMetadata.SETTING_INDEX_UUID, randomUUID())) @@ -370,7 +369,7 @@ public void testWithMultipleProjects() { ) ) .build(); - ProjectMetadata project2 = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata project2 = ProjectMetadata.builder(randomUniqueProjectId()) .put( IndexMetadata.builder("index-a") .settings( diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/RebalanceOnlyWhenActiveAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/RebalanceOnlyWhenActiveAllocationDeciderTests.java index f82bdd68511b3..0129b18fe7bba 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/RebalanceOnlyWhenActiveAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/RebalanceOnlyWhenActiveAllocationDeciderTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.cluster.TestShardRoutingRoleStrategies; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.cluster.metadata.ProjectId; import org.elasticsearch.cluster.metadata.ProjectMetadata; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.GlobalRoutingTable; @@ -74,7 +73,7 @@ public void testAllowRebalanceForMultipleIndicesAcrossMultipleProjects() { final Iterator nodeItr = Iterators.cycling(nodeIds); for (int p = 1; p <= numberOfProjects; p++) { final int numberOfIndices = randomIntBetween(1, 3); - var project = ProjectMetadata.builder(new ProjectId(randomUUID())); + var project = ProjectMetadata.builder(randomUniqueProjectId()); var rt = RoutingTable.builder(TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY); for (int i = 1; i <= numberOfIndices; i++) { final int numberOfShards = randomIntBetween(1, 5); diff --git a/server/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java b/server/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java index e76dcc82364c6..87230f9f8a9b0 100644 --- a/server/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java @@ -359,7 +359,7 @@ public void testShardsAndPreferNodeRouting() { Settings.builder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build() ); - ProjectId projectId = new ProjectId(randomUUID()); + ProjectId projectId = randomUniqueProjectId(); Metadata metadata = Metadata.builder() .put( ProjectMetadata.builder(projectId) diff --git a/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java b/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java index 9f5ca0e559126..40c5efaa8dec9 100644 --- a/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java @@ -1028,7 +1028,7 @@ public void testPersistsAndReloadsIndexMetadataForMultipleIndicesInMultipleProje final PersistedClusterStateService persistedClusterStateService = newPersistedClusterStateService(nodeEnvironment); final long term = randomLongBetween(1L, Long.MAX_VALUE); - final List projectIds = randomList(1, 5, () -> new ProjectId(randomUUID())); + final List projectIds = randomList(1, 5, ESTestCase::randomUniqueProjectId); try (Writer writer = persistedClusterStateService.createWriter()) { final ClusterState clusterState = loadPersistedClusterState(persistedClusterStateService); @@ -1597,7 +1597,7 @@ public void testOldestIndexVersionIsCorrectlySerialized() throws IOException { IndexVersion.fromId(IndexVersion.current().id() + 1) }; int lastIndexNum = randomIntBetween(9, 50); Metadata.Builder b = Metadata.builder(); - List projects = randomList(1, 3, () -> ProjectMetadata.builder(new ProjectId(randomUUID()))); + List projects = randomList(1, 3, () -> ProjectMetadata.builder(randomUniqueProjectId())); projects.forEach(b::put); for (IndexVersion indexVersion : indexVersions) { String indexUUID = UUIDs.randomBase64UUID(random()); diff --git a/server/src/test/java/org/elasticsearch/indices/TimestampFieldMapperServiceTests.java b/server/src/test/java/org/elasticsearch/indices/TimestampFieldMapperServiceTests.java index 6cf0d66630edb..7ef1c56f27a4a 100644 --- a/server/src/test/java/org/elasticsearch/indices/TimestampFieldMapperServiceTests.java +++ b/server/src/test/java/org/elasticsearch/indices/TimestampFieldMapperServiceTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.cluster.metadata.ProjectId; import org.elasticsearch.cluster.metadata.ProjectMetadata; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Tuple; @@ -85,7 +84,7 @@ public void testApplyingClusterStateWithMultipleProjects() { private static ClusterState initialClusterState() { final var projects = randomMap(1, 5, () -> { - ProjectMetadata.Builder builder = ProjectMetadata.builder(new ProjectId(randomUUID())); + ProjectMetadata.Builder builder = ProjectMetadata.builder(randomUniqueProjectId()); randomList(5, () -> createIndex(randomBoolean())).forEach(index -> builder.put(index, false)); return Tuple.tuple(builder.getId(), builder.build()); }); diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java index 86f5a4dae1ade..36ce4292c7d6d 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java @@ -2595,7 +2595,7 @@ public void testResolveRequiredOrDefaultPipelineDefaultPipeline() { .numberOfShards(1) .numberOfReplicas(0) .putAlias(AliasMetadata.builder("alias").writeIndex(true).build()); - ProjectMetadata projectMetadata = ProjectMetadata.builder(new ProjectId(randomUUID())).put(builder).build(); + ProjectMetadata projectMetadata = ProjectMetadata.builder(randomUniqueProjectId()).put(builder).build(); // index name matches with IDM: IndexRequest indexRequest = new IndexRequest("idx"); @@ -2615,7 +2615,7 @@ public void testResolveRequiredOrDefaultPipelineDefaultPipeline() { IndexTemplateMetadata.Builder templateBuilder = IndexTemplateMetadata.builder("name1") .patterns(List.of("id*")) .settings(settings(IndexVersion.current()).put(IndexSettings.DEFAULT_PIPELINE.getKey(), "default-pipeline")); - projectMetadata = ProjectMetadata.builder(new ProjectId(randomUUID())).put(templateBuilder).build(); + projectMetadata = ProjectMetadata.builder(randomUniqueProjectId()).put(templateBuilder).build(); indexRequest = new IndexRequest("idx"); IngestService.resolvePipelinesAndUpdateIndexRequest(indexRequest, indexRequest, projectMetadata); assertTrue(hasPipeline(indexRequest)); @@ -2629,7 +2629,7 @@ public void testResolveFinalPipeline() { .numberOfShards(1) .numberOfReplicas(0) .putAlias(AliasMetadata.builder("alias").writeIndex(true).build()); - ProjectMetadata projectMetadata = ProjectMetadata.builder(new ProjectId(randomUUID())).put(builder).build(); + ProjectMetadata projectMetadata = ProjectMetadata.builder(randomUniqueProjectId()).put(builder).build(); // index name matches with IDM: IndexRequest indexRequest = new IndexRequest("idx"); @@ -2651,7 +2651,7 @@ public void testResolveFinalPipeline() { IndexTemplateMetadata.Builder templateBuilder = IndexTemplateMetadata.builder("name1") .patterns(List.of("id*")) .settings(settings(IndexVersion.current()).put(IndexSettings.FINAL_PIPELINE.getKey(), "final-pipeline")); - projectMetadata = ProjectMetadata.builder(new ProjectId(randomUUID())).put(templateBuilder).build(); + projectMetadata = ProjectMetadata.builder(randomUniqueProjectId()).put(templateBuilder).build(); indexRequest = new IndexRequest("idx"); IngestService.resolvePipelinesAndUpdateIndexRequest(indexRequest, indexRequest, projectMetadata); assertTrue(hasPipeline(indexRequest)); @@ -2681,7 +2681,7 @@ public void testResolveFinalPipelineWithDateMathExpression() { public void testResolveRequestOrDefaultPipelineAndFinalPipeline() { // no pipeline: { - ProjectMetadata projectMetadata = ProjectMetadata.builder(new ProjectId(randomUUID())).build(); + ProjectMetadata projectMetadata = ProjectMetadata.builder(randomUniqueProjectId()).build(); IndexRequest indexRequest = new IndexRequest("idx"); IngestService.resolvePipelinesAndUpdateIndexRequest(indexRequest, indexRequest, projectMetadata); assertFalse(hasPipeline(indexRequest)); @@ -2691,7 +2691,7 @@ public void testResolveRequestOrDefaultPipelineAndFinalPipeline() { // request pipeline: { - ProjectMetadata projectMetadata = ProjectMetadata.builder(new ProjectId(randomUUID())).build(); + ProjectMetadata projectMetadata = ProjectMetadata.builder(randomUniqueProjectId()).build(); IndexRequest indexRequest = new IndexRequest("idx").setPipeline("request-pipeline"); IngestService.resolvePipelinesAndUpdateIndexRequest(indexRequest, indexRequest, projectMetadata); assertTrue(hasPipeline(indexRequest)); @@ -2705,7 +2705,7 @@ public void testResolveRequestOrDefaultPipelineAndFinalPipeline() { .settings(settings(IndexVersion.current()).put(IndexSettings.DEFAULT_PIPELINE.getKey(), "default-pipeline")) .numberOfShards(1) .numberOfReplicas(0); - ProjectMetadata projectMetadata = ProjectMetadata.builder(new ProjectId(randomUUID())).put(builder).build(); + ProjectMetadata projectMetadata = ProjectMetadata.builder(randomUniqueProjectId()).put(builder).build(); IndexRequest indexRequest = new IndexRequest("idx").setPipeline("request-pipeline"); IngestService.resolvePipelinesAndUpdateIndexRequest(indexRequest, indexRequest, projectMetadata); assertTrue(hasPipeline(indexRequest)); @@ -2719,7 +2719,7 @@ public void testResolveRequestOrDefaultPipelineAndFinalPipeline() { .settings(settings(IndexVersion.current()).put(IndexSettings.FINAL_PIPELINE.getKey(), "final-pipeline")) .numberOfShards(1) .numberOfReplicas(0); - ProjectMetadata projectMetadata = ProjectMetadata.builder(new ProjectId(randomUUID())).put(builder).build(); + ProjectMetadata projectMetadata = ProjectMetadata.builder(randomUniqueProjectId()).put(builder).build(); IndexRequest indexRequest = new IndexRequest("idx").setPipeline("request-pipeline"); IngestService.resolvePipelinesAndUpdateIndexRequest(indexRequest, indexRequest, projectMetadata); assertTrue(hasPipeline(indexRequest)); @@ -3099,7 +3099,7 @@ public void testPutPipelineWithVersionedUpdateIncrementsVersion() throws Excepti public void testResolvePipelinesWithNonePipeline() { // _none request pipeline: { - ProjectMetadata projectMetadata = ProjectMetadata.builder(new ProjectId(randomUUID())).build(); + ProjectMetadata projectMetadata = ProjectMetadata.builder(randomUniqueProjectId()).build(); IndexRequest indexRequest = new IndexRequest("idx").setPipeline(NOOP_PIPELINE_NAME); IngestService.resolvePipelinesAndUpdateIndexRequest(indexRequest, indexRequest, projectMetadata); assertFalse(hasPipeline(indexRequest)); @@ -3113,7 +3113,7 @@ public void testResolvePipelinesWithNonePipeline() { .settings(settings(IndexVersion.current()).put(IndexSettings.DEFAULT_PIPELINE.getKey(), NOOP_PIPELINE_NAME)) .numberOfShards(1) .numberOfReplicas(0); - ProjectMetadata projectMetadata = ProjectMetadata.builder(new ProjectId(randomUUID())).put(builder).build(); + ProjectMetadata projectMetadata = ProjectMetadata.builder(randomUniqueProjectId()).put(builder).build(); IndexRequest indexRequest = new IndexRequest("idx"); IngestService.resolvePipelinesAndUpdateIndexRequest(indexRequest, indexRequest, projectMetadata); assertFalse(hasPipeline(indexRequest)); @@ -3127,7 +3127,7 @@ public void testResolvePipelinesWithNonePipeline() { .settings(settings(IndexVersion.current()).put(IndexSettings.DEFAULT_PIPELINE.getKey(), NOOP_PIPELINE_NAME)) .numberOfShards(1) .numberOfReplicas(0); - ProjectMetadata projectMetadata = ProjectMetadata.builder(new ProjectId(randomUUID())).put(builder).build(); + ProjectMetadata projectMetadata = ProjectMetadata.builder(randomUniqueProjectId()).put(builder).build(); IndexRequest indexRequest = new IndexRequest("idx").setPipeline("pipeline1"); IngestService.resolvePipelinesAndUpdateIndexRequest(indexRequest, indexRequest, projectMetadata); assertTrue(hasPipeline(indexRequest)); @@ -3141,7 +3141,7 @@ public void testResolvePipelinesWithNonePipeline() { .settings(settings(IndexVersion.current()).put(IndexSettings.DEFAULT_PIPELINE.getKey(), "default-pipeline")) .numberOfShards(1) .numberOfReplicas(0); - ProjectMetadata projectMetadata = ProjectMetadata.builder(new ProjectId(randomUUID())).put(builder).build(); + ProjectMetadata projectMetadata = ProjectMetadata.builder(randomUniqueProjectId()).put(builder).build(); IndexRequest indexRequest = new IndexRequest("idx").setPipeline(NOOP_PIPELINE_NAME); IngestService.resolvePipelinesAndUpdateIndexRequest(indexRequest, indexRequest, projectMetadata); assertFalse(hasPipeline(indexRequest)); @@ -3155,7 +3155,7 @@ public void testResolvePipelinesWithNonePipeline() { .settings(settings(IndexVersion.current()).put(IndexSettings.FINAL_PIPELINE.getKey(), "final-pipeline")) .numberOfShards(1) .numberOfReplicas(0); - ProjectMetadata projectMetadata = ProjectMetadata.builder(new ProjectId(randomUUID())).put(builder).build(); + ProjectMetadata projectMetadata = ProjectMetadata.builder(randomUniqueProjectId()).put(builder).build(); IndexRequest indexRequest = new IndexRequest("idx").setPipeline(NOOP_PIPELINE_NAME); IngestService.resolvePipelinesAndUpdateIndexRequest(indexRequest, indexRequest, projectMetadata); assertTrue(hasPipeline(indexRequest)); @@ -3170,7 +3170,7 @@ public void testResolvePipelinesWithNonePipeline() { .settings(settings(IndexVersion.current()).put(IndexSettings.FINAL_PIPELINE.getKey(), NOOP_PIPELINE_NAME)) .numberOfShards(1) .numberOfReplicas(0); - ProjectMetadata projectMetadata = ProjectMetadata.builder(new ProjectId(randomUUID())).put(builder).build(); + ProjectMetadata projectMetadata = ProjectMetadata.builder(randomUniqueProjectId()).put(builder).build(); IndexRequest indexRequest = new IndexRequest("idx").setPipeline("pipeline1"); IngestService.resolvePipelinesAndUpdateIndexRequest(indexRequest, indexRequest, projectMetadata); assertTrue(hasPipeline(indexRequest)); diff --git a/test/external-modules/multi-project/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityRolesMultiProjectIT.java b/test/external-modules/multi-project/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityRolesMultiProjectIT.java index f6e910a48081d..5f4d7eb19bfe2 100644 --- a/test/external-modules/multi-project/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityRolesMultiProjectIT.java +++ b/test/external-modules/multi-project/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityRolesMultiProjectIT.java @@ -60,8 +60,8 @@ protected Settings restClientSettings() { } public void testUsersWithSameRoleNamesInDifferentProjects() throws Exception { - var project1 = new ProjectId(randomIdentifier()); - var project2 = new ProjectId(randomIdentifier()); + var project1 = randomUniqueProjectId(); + var project2 = randomUniqueProjectId(); createProject(project1.id()); createProject(project2.id()); @@ -79,8 +79,8 @@ public void testUsersWithSameRoleNamesInDifferentProjects() throws Exception { } public void testInvalidateRoleInSingleProjectOnly() throws Exception { - var project1 = new ProjectId(randomIdentifier()); - var project2 = new ProjectId(randomIdentifier()); + var project1 = randomUniqueProjectId(); + var project2 = randomUniqueProjectId(); createProject(project1.id()); createProject(project2.id()); @@ -117,8 +117,8 @@ public void testUpdatingFileBasedRoleAffectsAllProjects() throws Exception { - monitor """, roleName))); - var project1 = new ProjectId(randomIdentifier()); - var project2 = new ProjectId(randomIdentifier()); + var project1 = randomUniqueProjectId(); + var project2 = randomUniqueProjectId(); createProject(project1.id()); createProject(project2.id()); diff --git a/test/external-modules/multi-project/src/main/java/org/elasticsearch/multiproject/action/DeleteProjectAction.java b/test/external-modules/multi-project/src/main/java/org/elasticsearch/multiproject/action/DeleteProjectAction.java index 1f0c207149601..56a6378c9f461 100644 --- a/test/external-modules/multi-project/src/main/java/org/elasticsearch/multiproject/action/DeleteProjectAction.java +++ b/test/external-modules/multi-project/src/main/java/org/elasticsearch/multiproject/action/DeleteProjectAction.java @@ -147,7 +147,7 @@ public Request(TimeValue masterNodeTimeout, TimeValue ackTimeout, ProjectId proj public Request(StreamInput in) throws IOException { super(in); - this.projectId = new ProjectId(in); + this.projectId = ProjectId.readFrom(in); } @Override diff --git a/test/external-modules/multi-project/src/main/java/org/elasticsearch/multiproject/action/PutProjectAction.java b/test/external-modules/multi-project/src/main/java/org/elasticsearch/multiproject/action/PutProjectAction.java index 8fd829b7812cf..b6a799d4ce0e7 100644 --- a/test/external-modules/multi-project/src/main/java/org/elasticsearch/multiproject/action/PutProjectAction.java +++ b/test/external-modules/multi-project/src/main/java/org/elasticsearch/multiproject/action/PutProjectAction.java @@ -121,7 +121,7 @@ public ClusterState execute(BatchExecutionContext batchExecution public static class Request extends AcknowledgedRequest { - private static final Pattern VALID_PROJECT_ID_PATTERN = Pattern.compile("[a-z0-9]+"); + private static final Pattern VALID_PROJECT_ID_PATTERN = Pattern.compile("[-_a-zA-Z0-9]+"); private final ProjectId projectId; @@ -132,7 +132,7 @@ public Request(TimeValue masterNodeTimeout, TimeValue ackTimeout, ProjectId proj public Request(StreamInput in) throws IOException { super(in); - this.projectId = new ProjectId(in); + this.projectId = ProjectId.readFrom(in); } @Override diff --git a/test/external-modules/multi-project/src/main/java/org/elasticsearch/multiproject/action/RestDeleteProjectAction.java b/test/external-modules/multi-project/src/main/java/org/elasticsearch/multiproject/action/RestDeleteProjectAction.java index 264c53a360a51..0ce85ca2460c7 100644 --- a/test/external-modules/multi-project/src/main/java/org/elasticsearch/multiproject/action/RestDeleteProjectAction.java +++ b/test/external-modules/multi-project/src/main/java/org/elasticsearch/multiproject/action/RestDeleteProjectAction.java @@ -41,7 +41,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient final DeleteProjectAction.Request deleteProjectRequest = new DeleteProjectAction.Request( getMasterNodeTimeout(restRequest), getAckTimeout(restRequest), - new ProjectId(restRequest.param("id")) + ProjectId.fromId(restRequest.param("id")) ); return channel -> client.execute(DeleteProjectAction.INSTANCE, deleteProjectRequest, new RestToXContentListener<>(channel)); } diff --git a/test/external-modules/multi-project/src/main/java/org/elasticsearch/multiproject/action/RestPutProjectAction.java b/test/external-modules/multi-project/src/main/java/org/elasticsearch/multiproject/action/RestPutProjectAction.java index 170bbd6a62654..6d16735e69d06 100644 --- a/test/external-modules/multi-project/src/main/java/org/elasticsearch/multiproject/action/RestPutProjectAction.java +++ b/test/external-modules/multi-project/src/main/java/org/elasticsearch/multiproject/action/RestPutProjectAction.java @@ -41,7 +41,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient final PutProjectAction.Request putProjectRequest = new PutProjectAction.Request( getMasterNodeTimeout(restRequest), getAckTimeout(restRequest), - new ProjectId(restRequest.param("id")) + ProjectId.fromId(restRequest.param("id")) ); return channel -> client.execute(PutProjectAction.INSTANCE, putProjectRequest, new RestToXContentListener<>(channel)); } diff --git a/test/external-modules/multi-project/src/test/java/org/elasticsearch/multiproject/action/DeleteProjectActionTests.java b/test/external-modules/multi-project/src/test/java/org/elasticsearch/multiproject/action/DeleteProjectActionTests.java index c4cf82560d460..1fb11acb550f7 100644 --- a/test/external-modules/multi-project/src/test/java/org/elasticsearch/multiproject/action/DeleteProjectActionTests.java +++ b/test/external-modules/multi-project/src/test/java/org/elasticsearch/multiproject/action/DeleteProjectActionTests.java @@ -37,7 +37,7 @@ public void init() { } public void testSimpleDelete() throws Exception { - var projects = randomList(1, 5, () -> new ProjectId(randomUUID())); + var projects = randomList(1, 5, ESTestCase::randomUniqueProjectId); var deletedProjects = randomSubsetOf(projects); var state = buildState(projects); var tasks = deletedProjects.stream().map(this::createTask).toList(); @@ -56,13 +56,13 @@ public void testSimpleDelete() throws Exception { } public void testDeleteNonExisting() throws Exception { - var projects = randomList(1, 5, () -> new ProjectId(randomUUID())); + var projects = randomList(1, 5, ESTestCase::randomUniqueProjectId); var deletedProjects = randomSubsetOf(projects); var state = buildState(projects); var listener = ActionListener.assertAtLeastOnce( ActionTestUtils.assertNoSuccessListener(e -> assertTrue(e instanceof IllegalArgumentException)) ); - var nonExistingTask = createTask(new ProjectId(randomUUID()), listener); + var nonExistingTask = createTask(randomUniqueProjectId(), listener); var tasks = Stream.concat(Stream.of(nonExistingTask), deletedProjects.stream().map(this::createTask)).toList(); var result = ClusterStateTaskExecutorUtils.executeIgnoringFailures(state, executor, tasks); for (ProjectId deletedProject : deletedProjects) { diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/project/TestProjectResolvers.java b/test/framework/src/main/java/org/elasticsearch/cluster/project/TestProjectResolvers.java index abf4620cc8167..951b5da7cfab9 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/project/TestProjectResolvers.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/project/TestProjectResolvers.java @@ -165,7 +165,7 @@ public ProjectMetadata getProjectMetadata(Metadata metadata) { @Override public ProjectId getProjectId() { String headerValue = threadContext.getHeader(Task.X_ELASTIC_PROJECT_ID_HTTP_HEADER); - return headerValue != null ? new ProjectId(headerValue) : Metadata.DEFAULT_PROJECT_ID; + return headerValue != null ? ProjectId.fromId(headerValue) : Metadata.DEFAULT_PROJECT_ID; } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 2ed86cf701378..cc44fba22a196 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -1284,14 +1284,14 @@ public static String randomIdentifier() { * Returns a project id. This may be {@link Metadata#DEFAULT_PROJECT_ID}, or it may be a randomly-generated id. */ public static ProjectId randomProjectIdOrDefault() { - return randomBoolean() ? Metadata.DEFAULT_PROJECT_ID : new ProjectId(randomUUID()); + return randomBoolean() ? Metadata.DEFAULT_PROJECT_ID : randomUniqueProjectId(); } /** * Returns a new randomly-generated project id */ public static ProjectId randomUniqueProjectId() { - return new ProjectId(randomUUID()); + return ProjectId.fromId(randomUUID()); } public static String randomUUID() { diff --git a/test/framework/src/test/java/org/elasticsearch/cluster/project/TestProjectResolversTests.java b/test/framework/src/test/java/org/elasticsearch/cluster/project/TestProjectResolversTests.java index 6c05508ee8757..39a1ef7f48d72 100644 --- a/test/framework/src/test/java/org/elasticsearch/cluster/project/TestProjectResolversTests.java +++ b/test/framework/src/test/java/org/elasticsearch/cluster/project/TestProjectResolversTests.java @@ -35,7 +35,7 @@ public void testAllProjects() { } public void testSingleProject() { - final ProjectId projectId = new ProjectId(randomUUID()); + final ProjectId projectId = randomUniqueProjectId(); final ProjectResolver projectResolver = TestProjectResolvers.singleProject(projectId); assertThat(projectResolver.getProjectId(), equalTo(projectId)); @@ -44,7 +44,7 @@ public void testSingleProject() { } public void testSingleProjectOnly_getProjectIdAndMetadata() { - final ProjectId projectId = new ProjectId(randomUUID()); + final ProjectId projectId = randomUniqueProjectId(); final ClusterState state = buildClusterState(projectId); final ProjectResolver projectResolver = TestProjectResolvers.singleProjectOnly(); @@ -60,19 +60,19 @@ public void testSingleProjectOnly_getProjectIdAndMetadata() { public void testSingleProjectOnly_getProjectIds() { { final ProjectResolver projectResolver = TestProjectResolvers.singleProjectOnly(); - final ProjectId projectId = new ProjectId(randomUUID()); + final ProjectId projectId = randomUniqueProjectId(); ClusterState state = buildClusterState(projectId); assertThat(state.metadata().projects().values(), hasSize(1)); expectThrows(UnsupportedOperationException.class, () -> projectResolver.getProjectIds(state)); projectResolver.executeOnProject(projectId, () -> assertThat(projectResolver.getProjectIds(state), contains(projectId))); - projectResolver.executeOnProject(new ProjectId(randomUUID()), () -> { + projectResolver.executeOnProject(randomUniqueProjectId(), () -> { expectThrows(IllegalArgumentException.class, () -> projectResolver.getProjectIds(state)); }); } { final ProjectResolver projectResolver = TestProjectResolvers.singleProjectOnly(); - final ProjectId projectId = new ProjectId(randomUUID()); + final ProjectId projectId = randomUniqueProjectId(); ClusterState state = buildClusterState(projectId, randomIntBetween(1, 10)); assertThat(state.metadata().projects().values().size(), greaterThan(1)); @@ -95,7 +95,7 @@ private ClusterState buildClusterState(ProjectId projectId, int numberOfExtraPro Metadata.Builder metadata = Metadata.builder(); metadata.put(ProjectMetadata.builder(projectId).build()); for (int i = 0; i < numberOfExtraProjects; i++) { - metadata.put(ProjectMetadata.builder(new ProjectId("p" + i + "_" + randomAlphaOfLength(8))).build()); + metadata.put(ProjectMetadata.builder(ProjectId.fromId("p" + i + "_" + randomAlphaOfLength(8))).build()); } return ClusterState.builder(new ClusterName(randomAlphaOfLengthBetween(4, 8))).metadata(metadata).build(); } @@ -103,7 +103,7 @@ private ClusterState buildClusterState(ProjectId projectId, int numberOfExtraPro private ClusterState buildClusterState(int numberOfProjects) { Metadata.Builder metadata = Metadata.builder(); for (int i = 0; i < numberOfProjects; i++) { - metadata.put(ProjectMetadata.builder(new ProjectId("p" + i + "_" + randomAlphaOfLength(8))).build()); + metadata.put(ProjectMetadata.builder(ProjectId.fromId("p" + i + "_" + randomAlphaOfLength(8))).build()); } return ClusterState.builder(new ClusterName(randomAlphaOfLengthBetween(4, 8))).metadata(metadata).build(); } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/allocation/CcrPrimaryFollowerAllocationDeciderTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/allocation/CcrPrimaryFollowerAllocationDeciderTests.java index 3642db2f5cb93..b039e93093331 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/allocation/CcrPrimaryFollowerAllocationDeciderTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/allocation/CcrPrimaryFollowerAllocationDeciderTests.java @@ -58,7 +58,7 @@ public void setupProject() { if (extraProjectCount == 0) { projectId = Metadata.DEFAULT_PROJECT_ID; } else { - projectId = new ProjectId(randomUUID()); + projectId = randomUniqueProjectId(); } } @@ -117,7 +117,7 @@ private Metadata createMetadata(IndexMetadata.Builder indexMetadata) { final Metadata.Builder metadataBuilder = Metadata.builder(); metadataBuilder.put(projectMetadata); for (int i = 0; i < extraProjectCount; i++) { - metadataBuilder.put(ProjectMetadata.builder(new ProjectId(randomUUID())).build()); + metadataBuilder.put(ProjectMetadata.builder(randomUniqueProjectId()).build()); } return metadataBuilder.build(); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncTaskMaintenanceServiceTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncTaskMaintenanceServiceTests.java index 5ded307a8def3..491d2077039d8 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncTaskMaintenanceServiceTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncTaskMaintenanceServiceTests.java @@ -70,18 +70,18 @@ public void testStartStopDuringClusterChanges() { final Metadata.Builder metadataBuilder = Metadata.builder(); final GlobalRoutingTable.Builder grtBuilder = GlobalRoutingTable.builder(); - final ProjectId p1 = new ProjectId("p1"); + final ProjectId p1 = ProjectId.fromId("p1"); metadataBuilder.put(ProjectMetadata.builder(p1).put(getIndexMetadata(), false)); grtBuilder.put(p1, buildRoutingTableWithIndex(localNodeId)); - final ProjectId p2 = new ProjectId("p2"); + final ProjectId p2 = ProjectId.fromId("p2"); metadataBuilder.put(ProjectMetadata.builder(p2).put(getIndexMetadata(), false)); grtBuilder.put(p2, buildRoutingTableWithIndex(alternateNodeId)); - final ProjectId p3 = new ProjectId("p3"); + final ProjectId p3 = ProjectId.fromId("p3"); grtBuilder.put(p3, RoutingTable.builder()); - final ProjectId p4 = new ProjectId("p4"); + final ProjectId p4 = ProjectId.fromId("p4"); metadataBuilder.put(ProjectMetadata.builder(p4).put(getIndexMetadata(), false)); grtBuilder.put(p4, buildRoutingTableWithIndex(localNodeId)); diff --git a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/ArchiveAllocationDeciderTests.java b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/ArchiveAllocationDeciderTests.java index 0da93537a68e7..d2a534c241804 100644 --- a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/ArchiveAllocationDeciderTests.java +++ b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/ArchiveAllocationDeciderTests.java @@ -12,7 +12,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.cluster.metadata.ProjectId; import org.elasticsearch.cluster.metadata.ProjectMetadata; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; @@ -62,14 +61,14 @@ private static void checkCanAllocate(IndexVersion indexVersion, boolean validLic true, ShardRoutingState.STARTED ); - final ProjectMetadata.Builder projectbuilder = ProjectMetadata.builder(new ProjectId(randomUUID())); + final ProjectMetadata.Builder projectbuilder = ProjectMetadata.builder(randomUniqueProjectId()); addIndex(projectbuilder, indexName, indexUuid, indexVersion); addRandomIndices(projectbuilder, randomIntBetween(1, 5)); final Metadata.Builder metadataBuilder = Metadata.builder(); metadataBuilder.put(projectbuilder); for (int p = randomIntBetween(0, 5); p > 0; p--) { - metadataBuilder.put(addRandomIndices(ProjectMetadata.builder(new ProjectId(randomUUID())), randomIntBetween(0, 10))); + metadataBuilder.put(addRandomIndices(ProjectMetadata.builder(randomUniqueProjectId()), randomIntBetween(0, 10))); } final ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(metadataBuilder).build(); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetJobsActionRequestTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetJobsActionRequestTests.java index 5e8e5795739ef..547a3ce79ced1 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetJobsActionRequestTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetJobsActionRequestTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.rollup.action; import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.cluster.metadata.ProjectId; import org.elasticsearch.cluster.metadata.ProjectMetadata; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.Maps; @@ -43,7 +42,7 @@ protected Writeable.Reader instanceReader() { public void testStateCheckNoPersistentTasks() { GetRollupJobsAction.Request request = new GetRollupJobsAction.Request("foo"); - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()) .putCustom(PersistentTasksCustomMetadata.TYPE, new PersistentTasksCustomMetadata(0L, Collections.emptyMap())) .build(); boolean hasRollupJobs = TransportGetRollupJobAction.stateHasRollupJobs(request, project); @@ -52,7 +51,7 @@ public void testStateCheckNoPersistentTasks() { public void testStateCheckAllNoPersistentTasks() { GetRollupJobsAction.Request request = new GetRollupJobsAction.Request("_all"); - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()) .putCustom(PersistentTasksCustomMetadata.TYPE, new PersistentTasksCustomMetadata(0L, Collections.emptyMap())) .build(); boolean hasRollupJobs = TransportGetRollupJobAction.stateHasRollupJobs(request, project); @@ -65,7 +64,7 @@ public void testStateCheckNoMatchingPersistentTasks() { "bar", new PersistentTasksCustomMetadata.PersistentTask<>("bar", "bar", null, 1, null) ); - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()) .putCustom(PersistentTasksCustomMetadata.TYPE, new PersistentTasksCustomMetadata(0L, tasks)) .build(); boolean hasRollupJobs = TransportGetRollupJobAction.stateHasRollupJobs(request, project); @@ -79,7 +78,7 @@ public void testStateCheckMatchingPersistentTasks() { "foo", new PersistentTasksCustomMetadata.PersistentTask<>("foo", RollupJob.NAME, job, 1, null) ); - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()) .putCustom(PersistentTasksCustomMetadata.TYPE, new PersistentTasksCustomMetadata(0L, tasks)) .build(); boolean hasRollupJobs = TransportGetRollupJobAction.stateHasRollupJobs(request, project); @@ -93,7 +92,7 @@ public void testStateCheckAllMatchingPersistentTasks() { "foo", new PersistentTasksCustomMetadata.PersistentTask<>("foo", RollupJob.NAME, job, 1, null) ); - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()) .putCustom(PersistentTasksCustomMetadata.TYPE, new PersistentTasksCustomMetadata(0L, tasks)) .build(); boolean hasRollupJobs = TransportGetRollupJobAction.stateHasRollupJobs(request, project); @@ -107,7 +106,7 @@ public void testStateCheckAllWithSeveralMatchingPersistentTasks() { Map> tasks = Maps.newMapWithExpectedSize(2); tasks.put("foo", new PersistentTasksCustomMetadata.PersistentTask<>("foo", RollupJob.NAME, job, 1, null)); tasks.put("bar", new PersistentTasksCustomMetadata.PersistentTask<>("bar", RollupJob.NAME, job2, 1, null)); - ProjectMetadata project = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata project = ProjectMetadata.builder(randomUniqueProjectId()) .putCustom(PersistentTasksCustomMetadata.TYPE, new PersistentTasksCustomMetadata(0L, tasks)) .build(); boolean hasRollupJobs = TransportGetRollupJobAction.stateHasRollupJobs(request, project); diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/allocation/SearchableSnapshotAllocatorTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/allocation/SearchableSnapshotAllocatorTests.java index b6658957f9a7d..33a110b36673b 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/allocation/SearchableSnapshotAllocatorTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/allocation/SearchableSnapshotAllocatorTests.java @@ -233,7 +233,7 @@ private static Metadata buildSingleShardIndexMetadata(ShardId shardId, UnaryOper builder.put(buildSingleShardIndexProject(shardId, extraSettings)); for (int i = 0; i < extraProjects; i++) { - builder.put(ProjectMetadata.builder(new ProjectId("project-" + i)).build()); + builder.put(ProjectMetadata.builder(ProjectId.fromId("project-" + i)).build()); } return builder.build(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index 9d4a1782891ca..e1250a7dd2081 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -271,7 +271,7 @@ public class AuthorizationServiceTests extends ESTestCase { @SuppressWarnings("unchecked") @Before public void setup() { - projectId = new ProjectId(randomUUID()); + projectId = randomUniqueProjectId(); fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY); rolesStore = mock(CompositeRolesStore.class); clusterService = mock(ClusterService.class); @@ -1208,7 +1208,7 @@ public void testSearchAgainstIndex() { metadataBuilder.put(ProjectMetadata.builder(projectId).put(createIndexMetadata(indexName), true)); for (int p = 0; p < additionalProjects; p++) { - ProjectMetadata.Builder projectBuilder = ProjectMetadata.builder(new ProjectId(randomUUID())); + ProjectMetadata.Builder projectBuilder = ProjectMetadata.builder(randomUniqueProjectId()); int indices = randomIntBetween(1, 3); for (int i = 0; i < indices; i++) { projectBuilder.put(createIndexMetadata(i == 0 ? indexName : randomAlphaOfLength(12)), true); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java index 15ab948791116..d230aeeb5666c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java @@ -163,7 +163,7 @@ public void setup() { IndexMetadata dataStreamFailureStore1 = DataStreamTestHelper.createFailureStore(dataStreamName, 1).build(); IndexMetadata dataStreamFailureStore2 = DataStreamTestHelper.createFailureStore(dataStreamName, 2).build(); IndexMetadata dataStreamIndex3 = DataStreamTestHelper.createBackingIndex(otherDataStreamName, 1).build(); - ProjectMetadata.Builder projectBuilder = ProjectMetadata.builder(new ProjectId(randomUUID())) + ProjectMetadata.Builder projectBuilder = ProjectMetadata.builder(randomUniqueProjectId()) .put( indexBuilder("foo").putAlias(AliasMetadata.builder("foofoobar")) .putAlias(AliasMetadata.builder("foounauthorized")) From 792ab39637d1102191f7a46ac7ef773a7fa3896b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim=20R=C3=BChsen?= Date: Wed, 5 Mar 2025 09:09:59 +0100 Subject: [PATCH 22/54] [profiling] Take care of @UpdateForV9 (#123977) * [profiling] Take care of @UpdateForV9 * Fix GetStackTracesResponseTests --- .../elasticsearch/xpack/profiling/action/CO2Calculator.java | 5 +++-- .../xpack/profiling/action/GetFlamegraphResponse.java | 6 ++---- .../xpack/profiling/action/GetStackTracesResponse.java | 4 ---- .../elasticsearch/xpack/profiling/action/HostMetadata.java | 5 +++-- .../elasticsearch/xpack/profiling/action/InstanceType.java | 5 +++-- .../org/elasticsearch/xpack/profiling/action/SubGroup.java | 4 ++-- .../xpack/profiling/action/GetStackTracesResponseTests.java | 4 ++-- 7 files changed, 15 insertions(+), 18 deletions(-) diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CO2Calculator.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CO2Calculator.java index 0a05fc5930942..1155982fa810d 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CO2Calculator.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/CO2Calculator.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.profiling.action; -import org.elasticsearch.core.UpdateForV9; +import org.elasticsearch.core.UpdateForV10; import java.util.Map; @@ -54,7 +54,8 @@ public double getAnnualCO2Tons(String hostID, long samples) { return getKiloWattsPerCore(host) * getCO2TonsPerKWH(host) * annualCoreHours * getDatacenterPUE(host); } - @UpdateForV9(owner = UpdateForV9.Owner.PROFILING) // only allow OTEL semantic conventions + @UpdateForV10(owner = UpdateForV10.Owner.PROFILING) // only allow OTEL semantic conventions + // still required for data that has been migrated from 8.x to 9.x private double getKiloWattsPerCore(HostMetadata host) { return switch (host.hostArchitecture) { // For the OTEL donation of the profiling agent, we switch to OTEL semantic conventions, diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphResponse.java index 1c6adff5c63c8..cf0f06719222d 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphResponse.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetFlamegraphResponse.java @@ -13,7 +13,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; -import org.elasticsearch.core.UpdateForV9; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.xcontent.ToXContent; import java.io.IOException; @@ -25,9 +25,7 @@ public class GetFlamegraphResponse extends ActionResponse implements ChunkedToXC private final int size; private final double samplingRate; private final long selfCPU; - @UpdateForV9(owner = UpdateForV9.Owner.PROFILING) // remove this field - it is unused in Kibana private final long totalCPU; - @UpdateForV9(owner = UpdateForV9.Owner.PROFILING) // remove this field - it is unused in Kibana private final long totalSamples; private final List> edges; private final List fileIds; @@ -173,7 +171,7 @@ public long getTotalSamples() { return totalSamples; } - @UpdateForV9(owner = UpdateForV9.Owner.PROFILING) // change casing from Camel Case to Snake Case (requires updates in Kibana as well) + @UpdateForV10(owner = UpdateForV10.Owner.PROFILING) // change casing from Camel Case to Snake Case (requires updates in Kibana as well) @Override public Iterator toXContentChunked(ToXContent.Params params) { /* diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponse.java index 6ec1f9a7aa7e1..2e61eaf72b2da 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponse.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponse.java @@ -13,7 +13,6 @@ import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ToXContent; import java.util.Collections; @@ -29,10 +28,8 @@ public class GetStackTracesResponse extends ActionResponse implements ChunkedToX private final Map stackFrames; @Nullable private final Map executables; - @UpdateForV9(owner = UpdateForV9.Owner.PROFILING) // remove this field - it is unused in Kibana @Nullable private final Map stackTraceEvents; - @UpdateForV9(owner = UpdateForV9.Owner.PROFILING) // remove this field - it is unused in Kibana private final int totalFrames; private final double samplingRate; private final long totalSamples; @@ -101,7 +98,6 @@ public Iterator toXContentChunked(ToXContent.Params params stackTraceEvents, (n, v) -> ChunkedToXContentHelper.object(n, v, entry -> (b, p) -> b.field(entry.getKey(), entry.getValue().count)) ), - Iterators.single((b, p) -> b.field("total_frames", totalFrames)), Iterators.single((b, p) -> b.field("sampling_rate", samplingRate)), // the following fields are intentionally not written to the XContent representation (only needed on the transport layer): // diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/HostMetadata.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/HostMetadata.java index 65934626c9ec6..2b87f95227046 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/HostMetadata.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/HostMetadata.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.profiling.action; -import org.elasticsearch.core.UpdateForV9; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -31,8 +31,9 @@ final class HostMetadata implements ToXContentObject { this.profilingNumCores = profilingNumCores != null ? profilingNumCores : DEFAULT_PROFILING_NUM_CORES; } - @UpdateForV9(owner = UpdateForV9.Owner.PROFILING) + @UpdateForV10(owner = UpdateForV10.Owner.PROFILING) // remove fallback to the "profiling.host.machine" field and remove it from the component template "profiling-hosts". + // still required for data that has been migrated from 8.x to 9.x public static HostMetadata fromSource(Map source) { if (source != null) { String hostID = (String) source.get("host.id"); diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/InstanceType.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/InstanceType.java index 427cf07af7bef..f2a409c8845f7 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/InstanceType.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/InstanceType.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.profiling.action; -import org.elasticsearch.core.UpdateForV9; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -74,7 +74,8 @@ public static InstanceType fromHostSource(Map source) { return new InstanceType(provider, region, null); } - @UpdateForV9(owner = UpdateForV9.Owner.PROFILING) // remove this method + @UpdateForV10(owner = UpdateForV10.Owner.PROFILING) // remove this method + // still required for data that has been migrated from 8.x to 9.x private static InstanceType fromObsoleteHostSource(Map source) { // Check and handle AWS. String region = (String) source.get("ec2.placement.region"); diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/SubGroup.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/SubGroup.java index 9c953eea04967..eae8b9f16c0ae 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/SubGroup.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/SubGroup.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.profiling.action; -import org.elasticsearch.core.UpdateForV9; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; @@ -19,7 +19,7 @@ public class SubGroup implements ToXContentFragment { private final String name; private Long count; - @UpdateForV9(owner = UpdateForV9.Owner.PROFILING) // remove legacy XContent rendering + @UpdateForV10(owner = UpdateForV10.Owner.PROFILING) // remove legacy XContent rendering private final Map subgroups; public static SubGroup root(String name) { diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponseTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponseTests.java index 973f9ce3df820..a202df1e72288 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponseTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/GetStackTracesResponseTests.java @@ -56,8 +56,8 @@ private GetStackTracesResponse createTestInstance() { public void testChunking() { AbstractChunkedSerializingTestCase.assertChunkCount(createTestInstance(), instance -> { - // start, end, total_frames, samplingrate - int chunks = 4; + // start, end, samplingrate + int chunks = 3; chunks += size(instance.getExecutables()); chunks += size(instance.getStackFrames()); chunks += size(instance.getStackTraces()); From 6925bc98dbc6e7d19c2aef54f2c1c928e85bcb40 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Wed, 5 Mar 2025 08:35:26 +0000 Subject: [PATCH 23/54] Remove code to handle pre-7.6 TCP headers (#123899) And 7.6 transport version --- .../org/elasticsearch/TransportVersions.java | 1 - .../transport/InboundDecoder.java | 22 +++++---------- .../transport/OutboundMessage.java | 14 +++------- .../elasticsearch/transport/TcpHeader.java | 28 +++---------------- .../transport/TransportLogger.java | 7 +---- .../transport/InboundDecoderTests.java | 19 ++++--------- .../transport/InboundHandlerTests.java | 5 ++-- .../transport/InboundPipelineTests.java | 5 ++-- .../SecurityNetty4HeaderSizeLimitTests.java | 10 +++---- 9 files changed, 30 insertions(+), 81 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 06e8d3177a4ca..764ca018490f5 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -57,7 +57,6 @@ static TransportVersion def(int id) { public static final TransportVersion V_7_3_0 = def(7_03_00_99); public static final TransportVersion V_7_3_2 = def(7_03_02_99); public static final TransportVersion V_7_4_0 = def(7_04_00_99); - public static final TransportVersion V_7_6_0 = def(7_06_00_99); public static final TransportVersion V_7_8_0 = def(7_08_00_99); public static final TransportVersion V_7_8_1 = def(7_08_01_99); public static final TransportVersion V_7_9_0 = def(7_09_00_99); diff --git a/server/src/main/java/org/elasticsearch/transport/InboundDecoder.java b/server/src/main/java/org/elasticsearch/transport/InboundDecoder.java index fdcbd6912bc4c..a22b0d0229ed0 100644 --- a/server/src/main/java/org/elasticsearch/transport/InboundDecoder.java +++ b/server/src/main/java/org/elasticsearch/transport/InboundDecoder.java @@ -167,23 +167,17 @@ private static int headerBytesToRead(BytesReference reference, ByteSizeValue max return 0; } - TransportVersion remoteVersion = TransportVersion.fromId(reference.getInt(TcpHeader.VERSION_POSITION)); - int fixedHeaderSize = TcpHeader.headerSize(remoteVersion); - if (fixedHeaderSize > reference.length()) { + if (reference.length() <= TcpHeader.HEADER_SIZE) { return 0; - } else if (remoteVersion.before(TcpHeader.VERSION_WITH_HEADER_SIZE)) { - return fixedHeaderSize; } else { int variableHeaderSize = reference.getInt(TcpHeader.VARIABLE_HEADER_SIZE_POSITION); if (variableHeaderSize < 0) { throw new StreamCorruptedException("invalid negative variable header size: " + variableHeaderSize); } - if (variableHeaderSize > maxHeaderSize.getBytes() - fixedHeaderSize) { - throw new StreamCorruptedException( - "header size [" + (fixedHeaderSize + variableHeaderSize) + "] exceeds limit of [" + maxHeaderSize + "]" - ); + int totalHeaderSize = TcpHeader.HEADER_SIZE + variableHeaderSize; + if (totalHeaderSize > maxHeaderSize.getBytes()) { + throw new StreamCorruptedException("header size [" + totalHeaderSize + "] exceeds limit of [" + maxHeaderSize + "]"); } - int totalHeaderSize = fixedHeaderSize + variableHeaderSize; if (totalHeaderSize > reference.length()) { return 0; } else { @@ -211,11 +205,9 @@ private static Header readHeader(int networkMessageSize, BytesReference bytesRef checkVersionCompatibility(header.getVersion()); } - if (header.getVersion().onOrAfter(TcpHeader.VERSION_WITH_HEADER_SIZE)) { - // Skip since we already have ensured enough data available - streamInput.readInt(); - header.finishParsingHeader(streamInput); - } + // Skip since we already have ensured enough data available + streamInput.readInt(); + header.finishParsingHeader(streamInput); return header; } } diff --git a/server/src/main/java/org/elasticsearch/transport/OutboundMessage.java b/server/src/main/java/org/elasticsearch/transport/OutboundMessage.java index c3b405aebe2a2..798385edefd6f 100644 --- a/server/src/main/java/org/elasticsearch/transport/OutboundMessage.java +++ b/server/src/main/java/org/elasticsearch/transport/OutboundMessage.java @@ -43,26 +43,20 @@ abstract class OutboundMessage extends NetworkMessage { BytesReference serialize(RecyclerBytesStreamOutput bytesStream) throws IOException { bytesStream.setTransportVersion(version); - bytesStream.skip(TcpHeader.headerSize(version)); + bytesStream.skip(TcpHeader.HEADER_SIZE); // The compressible bytes stream will not close the underlying bytes stream BytesReference reference; - int variableHeaderLength = -1; final long preHeaderPosition = bytesStream.position(); - if (version.onOrAfter(TcpHeader.VERSION_WITH_HEADER_SIZE)) { - writeVariableHeader(bytesStream); - variableHeaderLength = Math.toIntExact(bytesStream.position() - preHeaderPosition); - } + writeVariableHeader(bytesStream); + int variableHeaderLength = Math.toIntExact(bytesStream.position() - preHeaderPosition); final boolean compress = TransportStatus.isCompress(status); final StreamOutput stream = compress ? wrapCompressed(bytesStream) : bytesStream; final ReleasableBytesReference zeroCopyBuffer; try { stream.setTransportVersion(version); - if (variableHeaderLength == -1) { - writeVariableHeader(stream); - } if (message instanceof BytesTransportRequest bRequest) { bRequest.writeThin(stream); zeroCopyBuffer = bRequest.bytes; @@ -89,7 +83,7 @@ BytesReference serialize(RecyclerBytesStreamOutput bytesStream) throws IOExcepti } bytesStream.seek(0); - final int contentSize = reference.length() - TcpHeader.headerSize(version); + final int contentSize = reference.length() - TcpHeader.HEADER_SIZE; TcpHeader.writeHeader(bytesStream, requestId, status, version, contentSize, variableHeaderLength); return reference; } diff --git a/server/src/main/java/org/elasticsearch/transport/TcpHeader.java b/server/src/main/java/org/elasticsearch/transport/TcpHeader.java index 22f5a69aa08f6..d6948b50790da 100644 --- a/server/src/main/java/org/elasticsearch/transport/TcpHeader.java +++ b/server/src/main/java/org/elasticsearch/transport/TcpHeader.java @@ -10,15 +10,12 @@ package org.elasticsearch.transport; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; public class TcpHeader { - public static final TransportVersion VERSION_WITH_HEADER_SIZE = TransportVersions.V_7_6_0; - public static final int MARKER_BYTES_SIZE = 2; public static final int MESSAGE_LENGTH_SIZE = 4; @@ -37,19 +34,9 @@ public class TcpHeader { public static final int VARIABLE_HEADER_SIZE_POSITION = VERSION_POSITION + VERSION_ID_SIZE; - private static final int PRE_76_HEADER_SIZE = VERSION_POSITION + VERSION_ID_SIZE; - - public static final int BYTES_REQUIRED_FOR_VERSION = PRE_76_HEADER_SIZE; + public static final int BYTES_REQUIRED_FOR_VERSION = VERSION_POSITION + VERSION_ID_SIZE; - private static final int HEADER_SIZE = PRE_76_HEADER_SIZE + VARIABLE_HEADER_SIZE; - - public static int headerSize(TransportVersion version) { - if (version.onOrAfter(VERSION_WITH_HEADER_SIZE)) { - return HEADER_SIZE; - } else { - return PRE_76_HEADER_SIZE; - } - } + public static final int HEADER_SIZE = BYTES_REQUIRED_FOR_VERSION + VARIABLE_HEADER_SIZE; private static final byte[] PREFIX = { (byte) 'E', (byte) 'S' }; @@ -63,17 +50,10 @@ public static void writeHeader( ) throws IOException { output.writeBytes(PREFIX); // write the size, the size indicates the remaining message size, not including the size int - if (version.onOrAfter(VERSION_WITH_HEADER_SIZE)) { - output.writeInt(contentSize + REQUEST_ID_SIZE + STATUS_SIZE + VERSION_ID_SIZE + VARIABLE_HEADER_SIZE); - } else { - output.writeInt(contentSize + REQUEST_ID_SIZE + STATUS_SIZE + VERSION_ID_SIZE); - } + output.writeInt(contentSize + REQUEST_ID_SIZE + STATUS_SIZE + VERSION_ID_SIZE + VARIABLE_HEADER_SIZE); output.writeLong(requestId); output.writeByte(status); output.writeInt(version.id()); - if (version.onOrAfter(VERSION_WITH_HEADER_SIZE)) { - assert variableHeaderSize != -1 : "Variable header size not set"; - output.writeInt(variableHeaderSize); - } + output.writeInt(variableHeaderSize); } } diff --git a/server/src/main/java/org/elasticsearch/transport/TransportLogger.java b/server/src/main/java/org/elasticsearch/transport/TransportLogger.java index 00cfd1f7f63d4..6cad97acab7ba 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportLogger.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportLogger.java @@ -85,12 +85,7 @@ private static String format(TcpChannel channel, BytesReference message, String sb.append(", type: ").append(type); sb.append(", version: ").append(version); - if (version.onOrAfter(TcpHeader.VERSION_WITH_HEADER_SIZE)) { - sb.append(", header size: ").append(streamInput.readInt()).append('B'); - } else { - streamInput = decompressingStream(status, streamInput); - assert InboundHandler.assertRemoteVersion(streamInput, version); - } + sb.append(", header size: ").append(streamInput.readInt()).append('B'); // read and discard headers ThreadContext.readHeadersFromStream(streamInput); diff --git a/server/src/test/java/org/elasticsearch/transport/InboundDecoderTests.java b/server/src/test/java/org/elasticsearch/transport/InboundDecoderTests.java index 97ca7d2ecd98b..be51cecc2cf9a 100644 --- a/server/src/test/java/org/elasticsearch/transport/InboundDecoderTests.java +++ b/server/src/test/java/org/elasticsearch/transport/InboundDecoderTests.java @@ -78,9 +78,7 @@ public void testDecode() throws IOException { try (RecyclerBytesStreamOutput os = new RecyclerBytesStreamOutput(recycler)) { final BytesReference totalBytes = message.serialize(os); - int totalHeaderSize = TcpHeader.headerSize(TransportVersion.current()) + totalBytes.getInt( - TcpHeader.VARIABLE_HEADER_SIZE_POSITION - ); + int totalHeaderSize = TcpHeader.HEADER_SIZE + totalBytes.getInt(TcpHeader.VARIABLE_HEADER_SIZE_POSITION); final BytesReference messageBytes = totalBytes.slice(totalHeaderSize, totalBytes.length() - totalHeaderSize); InboundDecoder decoder = new InboundDecoder(recycler); @@ -151,13 +149,12 @@ private void doHandshakeCompatibilityTest(TransportVersion transportVersion, Com try (RecyclerBytesStreamOutput os = new RecyclerBytesStreamOutput(recycler)) { final BytesReference bytes = message.serialize(os); - int totalHeaderSize = TcpHeader.headerSize(transportVersion); InboundDecoder decoder = new InboundDecoder(recycler); final ArrayList fragments = new ArrayList<>(); final ReleasableBytesReference releasable1 = wrapAsReleasable(bytes); int bytesConsumed = decoder.decode(releasable1, fragments::add); - assertThat(bytesConsumed, greaterThan(totalHeaderSize)); + assertThat(bytesConsumed, greaterThan(TcpHeader.HEADER_SIZE)); assertTrue(releasable1.hasReferences()); final Header header = (Header) fragments.get(0); @@ -213,9 +210,7 @@ public void testClientChannelTypeFailsDecodingRequests() throws Exception { try (InboundDecoder decoder = new InboundDecoder(recycler, randomFrom(ChannelType.SERVER, ChannelType.MIX))) { final ArrayList fragments = new ArrayList<>(); int bytesConsumed = decoder.decode(wrapAsReleasable(bytes), fragments::add); - int totalHeaderSize = TcpHeader.headerSize(version) + (version.onOrAfter(TransportVersions.V_7_6_0) - ? bytes.getInt(TcpHeader.VARIABLE_HEADER_SIZE_POSITION) - : 0); + int totalHeaderSize = TcpHeader.HEADER_SIZE + bytes.getInt(TcpHeader.VARIABLE_HEADER_SIZE_POSITION); assertEquals(totalHeaderSize, bytesConsumed); final Header header = (Header) fragments.get(0); assertEquals(requestId, header.getRequestId()); @@ -259,9 +254,7 @@ public void testServerChannelTypeFailsDecodingResponses() throws Exception { try (InboundDecoder decoder = new InboundDecoder(recycler, randomFrom(ChannelType.CLIENT, ChannelType.MIX))) { final ArrayList fragments = new ArrayList<>(); int bytesConsumed = decoder.decode(wrapAsReleasable(bytes), fragments::add); - int totalHeaderSize = TcpHeader.headerSize(version) + (version.onOrAfter(TransportVersions.V_7_6_0) - ? bytes.getInt(TcpHeader.VARIABLE_HEADER_SIZE_POSITION) - : 0); + int totalHeaderSize = TcpHeader.HEADER_SIZE + bytes.getInt(TcpHeader.VARIABLE_HEADER_SIZE_POSITION); assertEquals(totalHeaderSize, bytesConsumed); final Header header = (Header) fragments.get(0); assertEquals(requestId, header.getRequestId()); @@ -304,9 +297,7 @@ public void testCompressedDecode() throws IOException { final BytesStreamOutput out = new BytesStreamOutput(); transportMessage.writeTo(out); final BytesReference uncompressedBytes = out.bytes(); - int totalHeaderSize = TcpHeader.headerSize(TransportVersion.current()) + totalBytes.getInt( - TcpHeader.VARIABLE_HEADER_SIZE_POSITION - ); + int totalHeaderSize = TcpHeader.HEADER_SIZE + totalBytes.getInt(TcpHeader.VARIABLE_HEADER_SIZE_POSITION); InboundDecoder decoder = new InboundDecoder(recycler); final ArrayList fragments = new ArrayList<>(); diff --git a/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java b/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java index 3ec248e0d8d9a..0dc72cb9ce252 100644 --- a/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/InboundHandlerTests.java @@ -128,7 +128,6 @@ public void testPing() throws Exception { public void testRequestAndResponse() throws Exception { String action = "test-request"; - int headerSize = TcpHeader.headerSize(TransportVersion.current()); boolean isError = randomBoolean(); AtomicReference requestCaptor = new AtomicReference<>(); AtomicReference responseCaptor = new AtomicReference<>(); @@ -183,7 +182,7 @@ public TestResponse read(StreamInput in) throws IOException { BytesRefRecycler recycler = new BytesRefRecycler(PageCacheRecycler.NON_RECYCLING_INSTANCE); BytesReference fullRequestBytes = request.serialize(new RecyclerBytesStreamOutput(recycler)); - BytesReference requestContent = fullRequestBytes.slice(headerSize, fullRequestBytes.length() - headerSize); + BytesReference requestContent = fullRequestBytes.slice(TcpHeader.HEADER_SIZE, fullRequestBytes.length() - TcpHeader.HEADER_SIZE); Header requestHeader = new Header( fullRequestBytes.length() - 6, requestId, @@ -208,7 +207,7 @@ public TestResponse read(StreamInput in) throws IOException { } BytesReference fullResponseBytes = channel.getMessageCaptor().get(); - BytesReference responseContent = fullResponseBytes.slice(headerSize, fullResponseBytes.length() - headerSize); + BytesReference responseContent = fullResponseBytes.slice(TcpHeader.HEADER_SIZE, fullResponseBytes.length() - TcpHeader.HEADER_SIZE); Header responseHeader = new Header(fullRequestBytes.length() - 6, requestId, responseStatus, TransportVersion.current()); InboundMessage responseMessage = new InboundMessage(responseHeader, ReleasableBytesReference.wrap(responseContent), () -> {}); responseHeader.finishParsingHeader(responseMessage.openOrGetStreamInput()); diff --git a/server/src/test/java/org/elasticsearch/transport/InboundPipelineTests.java b/server/src/test/java/org/elasticsearch/transport/InboundPipelineTests.java index d0c6cd8b00ff5..b21299219f2bd 100644 --- a/server/src/test/java/org/elasticsearch/transport/InboundPipelineTests.java +++ b/server/src/test/java/org/elasticsearch/transport/InboundPipelineTests.java @@ -274,9 +274,8 @@ public void testEnsureBodyIsNotPrematurelyReleased() throws IOException { } final BytesReference reference = message.serialize(streamOutput); - final int fixedHeaderSize = TcpHeader.headerSize(TransportVersion.current()); - final int variableHeaderSize = reference.getInt(fixedHeaderSize - 4); - final int totalHeaderSize = fixedHeaderSize + variableHeaderSize; + final int variableHeaderSize = reference.getInt(TcpHeader.HEADER_SIZE - 4); + final int totalHeaderSize = TcpHeader.HEADER_SIZE + variableHeaderSize; final AtomicBoolean bodyReleased = new AtomicBoolean(false); for (int i = 0; i < totalHeaderSize - 1; ++i) { try (ReleasableBytesReference slice = ReleasableBytesReference.wrap(reference.slice(i, 1))) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HeaderSizeLimitTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HeaderSizeLimitTests.java index ba7c2e3844521..3cd6fbfe3c413 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HeaderSizeLimitTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HeaderSizeLimitTests.java @@ -138,7 +138,7 @@ public void terminateThreadPool() { public void testThatAcceptableHeaderSizeGoesThroughTheRemoteClusterPort() throws Exception { int messageLength = randomIntBetween(128, 256); long requestId = randomLongBetween(1L, 1000L); - int acceptableHeaderSize = randomIntBetween(0, maxHeaderSize - TcpHeader.headerSize(TransportVersion.current())); + int acceptableHeaderSize = randomIntBetween(0, maxHeaderSize - TcpHeader.HEADER_SIZE); try ( ReleasableBytesStreamOutput out = new ReleasableBytesStreamOutput( messageLength + TcpHeader.BYTES_REQUIRED_FOR_MESSAGE_SIZE, @@ -163,8 +163,8 @@ public void testThatLargerHeaderSizeClosesTheRemoteClusterPort() throws Exceptio int messageLength = randomIntBetween(128, 256); long requestId = randomLongBetween(1L, 1000L); int largeHeaderSize = randomIntBetween( - maxHeaderSize - TcpHeader.headerSize(TransportVersion.current()) + 1, - messageLength + TcpHeader.BYTES_REQUIRED_FOR_MESSAGE_SIZE - TcpHeader.headerSize(TransportVersion.current()) + maxHeaderSize - TcpHeader.HEADER_SIZE + 1, + messageLength + TcpHeader.BYTES_REQUIRED_FOR_MESSAGE_SIZE - TcpHeader.HEADER_SIZE ); try ( ReleasableBytesStreamOutput out = new ReleasableBytesStreamOutput( @@ -190,8 +190,8 @@ public void testThatLargerHeaderSizeIsAcceptableForDefaultTransportPort() throws int messageLength = randomIntBetween(128, 256); long requestId = randomLongBetween(1L, 1000L); int largeHeaderSize = randomIntBetween( - maxHeaderSize - TcpHeader.headerSize(TransportVersion.current()) + 1, - messageLength + TcpHeader.BYTES_REQUIRED_FOR_MESSAGE_SIZE - TcpHeader.headerSize(TransportVersion.current()) + maxHeaderSize - TcpHeader.HEADER_SIZE + 1, + messageLength + TcpHeader.BYTES_REQUIRED_FOR_MESSAGE_SIZE - TcpHeader.HEADER_SIZE ); try ( ReleasableBytesStreamOutput out = new ReleasableBytesStreamOutput( From badfc935968b33ca9551d7a654e915d95dc8b013 Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Wed, 5 Mar 2025 10:22:34 +0100 Subject: [PATCH 24/54] [DOCS] Update API ref link in docs README (#124069) https://github.com/elastic/elasticsearch-specification/pull/3873/files --- docs/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/README.md b/docs/README.md index caec3afb0916a..c95cdd5e8af59 100644 --- a/docs/README.md +++ b/docs/README.md @@ -17,7 +17,7 @@ Docs live in **three places**: 1. **Reference content** lives in this repo. This covers low-level stuff like settings and configuration information that is tightly coupled to code. - 👩🏽‍💻 **Engineers** own the bulk of this content. -2. **API reference docs** live in the [Elasticsearch specification](https://github.com/elastic/elasticsearch-specification) +2. **API reference docs** live in the [Elasticsearch specification](https://github.com/elastic/elasticsearch-specification/blob/main/README.md#how-to-generate-the-openapi-representation) - This is where you need to update API docs published in the [new API docs system](https://www.elastic.co/docs/api/doc/elasticsearch/v8/) - 👩🏽‍💻 **Engineers** own this content. 3. **Narrative, overview, and conceptual content** mostly lives in the [`docs-content`](https://github.com/elastic/docs-content/) repo. @@ -106,4 +106,4 @@ This allows slightly more expressive testing of the snippets. Since that syntax % TEST[s/\n$/\nstartyaml\n - compare_analyzers: {index: thai_example, first: thai, second: rebuilt_thai}\nendyaml\n/] ``` -Any place you can use json you can use elements like `$body.path.to.thing` which is replaced on the fly with the contents of the thing at `path.to.thing` in the last response. \ No newline at end of file +Any place you can use json you can use elements like `$body.path.to.thing` which is replaced on the fly with the contents of the thing at `path.to.thing` in the last response. From cfa98f30e52126b6710d0595f3a24258e8d9c90c Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Wed, 5 Mar 2025 11:06:31 +0100 Subject: [PATCH 25/54] Fix configuration cache compatibility issues (#124073) - for running :plugins:discovery-ec2:check - checking if in idea in build scan background action --- .../main/groovy/elasticsearch.build-scan.gradle | 2 +- plugins/discovery-ec2/build.gradle | 15 +++++++-------- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle b/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle index 4113e1c1c9d20..ebb33dbe06884 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle @@ -133,7 +133,7 @@ develocity { } } else { tag 'LOCAL' - if (providers.systemProperty('idea.active').present) { + if (System.getProperty('idea.active') == 'true') { tag 'IDEA' } } diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index 6cf5997c24a8a..9bfae881d708b 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -46,13 +46,12 @@ esplugin.bundleSpec.from('config/discovery-ec2') { } tasks.register("writeTestJavaPolicy") { + boolean inFips = buildParams.inFipsJvm + inputs.property("inFipsJvm", inFips) + final File javaPolicy = new File(layout.buildDirectory.asFile.get(), "tmp/java.policy") + outputs.file(javaPolicy) doLast { - final File tmp = file("${buildDir}/tmp") - if (tmp.exists() == false && tmp.mkdirs() == false) { - throw new GradleException("failed to create temporary directory [${tmp}]") - } - final File javaPolicy = file("${tmp}/java.policy") - if (buildParams.inFipsJvm) { + if (inFips) { javaPolicy.write( [ "grant {", @@ -95,9 +94,9 @@ tasks.withType(Test).configureEach { // this is needed to manipulate com.amazonaws.sdk.ec2MetadataServiceEndpointOverride system property // it is better rather disable security manager at all with `systemProperty 'tests.security.manager', 'false'` if (buildParams.inFipsJvm){ - nonInputProperties.systemProperty 'java.security.policy', "=file://${buildDir}/tmp/java.policy" + nonInputProperties.systemProperty 'java.security.policy', "=file://${layout.buildDirectory.asFile.get()}/tmp/java.policy" } else { - nonInputProperties.systemProperty 'java.security.policy', "file://${buildDir}/tmp/java.policy" + nonInputProperties.systemProperty 'java.security.policy', "file://${layout.buildDirectory.asFile.get()}/tmp/java.policy" } } From b1c75d1868f78f0bda2b18a552fb89c868b2620d Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 5 Mar 2025 12:08:29 +0100 Subject: [PATCH 26/54] Move some security APIs to using promises in place of callbacks (#123812) We have some incredibly deep callstacks in security that seem to visibly raise context switch costs, make profiling more complicated and generally make the code rather hard to follow. Since the methods adjusted here return a result synchronously we can both save overhead and make things a little easier to follow by using promises as returns in place of consuming callbacks. --- .../example/CustomAuthorizationEngine.java | 10 +- .../action/support/SubscribableListener.java | 7 + .../security/authz/AuthorizationEngine.java | 14 +- .../ProfileCancellationIntegTests.java | 8 +- .../security/authz/AuthorizationService.java | 128 +++++++++--------- .../xpack/security/authz/RBACEngine.java | 37 +++-- .../BulkShardRequestInterceptor.java | 9 +- .../DlsFlsLicenseRequestInterceptor.java | 12 +- ...cumentLevelSecurityRequestInterceptor.java | 11 +- .../IndicesAliasesRequestInterceptor.java | 13 +- .../authz/interceptor/RequestInterceptor.java | 7 +- .../interceptor/ResizeRequestInterceptor.java | 13 +- ...earchRequestCacheDisablingInterceptor.java | 9 +- .../authz/AuthorizationServiceTests.java | 6 +- .../xpack/security/authz/RBACEngineTests.java | 5 +- ...IndicesAliasesRequestInterceptorTests.java | 6 +- .../ResizeRequestInterceptorTests.java | 6 +- ...RequestCacheDisablingInterceptorTests.java | 2 +- 18 files changed, 162 insertions(+), 141 deletions(-) diff --git a/plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/CustomAuthorizationEngine.java b/plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/CustomAuthorizationEngine.java index b0db261d67e19..ea99880117f17 100644 --- a/plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/CustomAuthorizationEngine.java +++ b/plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/CustomAuthorizationEngine.java @@ -10,6 +10,7 @@ package org.elasticsearch.example; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.ProjectMetadata; import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesResponse; @@ -86,14 +87,14 @@ public void authorizeClusterAction(RequestInfo requestInfo, AuthorizationInfo au } @Override - public void authorizeIndexAction( + SubscribableListener void authorizeIndexAction( RequestInfo requestInfo, AuthorizationInfo authorizationInfo, AsyncSupplier indicesAsyncSupplier, - ProjectMetadata project, - ActionListener listener + ProjectMetadata project ) { if (isSuperuser(requestInfo.getAuthentication().getEffectiveSubject().getUser())) { + ActionListener listener = new SubscribableListener<>(); indicesAsyncSupplier.getAsync(ActionListener.wrap(resolvedIndices -> { Map indexAccessControlMap = new HashMap<>(); for (String name : resolvedIndices.getLocal()) { @@ -103,8 +104,9 @@ public void authorizeIndexAction( new IndicesAccessControl(true, Collections.unmodifiableMap(indexAccessControlMap)); listener.onResponse(new IndexAuthorizationResult(indicesAccessControl)); }, listener::onFailure)); + return listener; } else { - listener.onResponse(new IndexAuthorizationResult(IndicesAccessControl.DENIED)); + return SubscribableListener.succcess(new IndexAuthorizationResult(IndicesAccessControl.DENIED)); } } diff --git a/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java b/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java index c6c240e3b6759..3056f7cda6429 100644 --- a/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java +++ b/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java @@ -266,6 +266,13 @@ public final boolean isDone() { return isDone(state); } + /** + * @return return {@code true} if and only if this listener is done and has been completed successfully + */ + public final boolean isSuccess() { + return state instanceof SuccessResult; + } + /** * @return the result with which this listener completed successfully, or throw the exception with which it failed. * diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/AuthorizationEngine.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/AuthorizationEngine.java index c36a5c350658e..9f18e7915a725 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/AuthorizationEngine.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/AuthorizationEngine.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.ProjectMetadata; import org.elasticsearch.common.bytes.BytesReference; @@ -75,7 +76,7 @@ * can actually impersonate the user running the request. *
  • {@link #authorizeClusterAction(RequestInfo, AuthorizationInfo, ActionListener)} if the * request is a cluster level operation.
  • - *
  • {@link #authorizeIndexAction(RequestInfo, AuthorizationInfo, AsyncSupplier, ProjectMetadata, ActionListener)} if + *
  • {@link #authorizeIndexAction(RequestInfo, AuthorizationInfo, AsyncSupplier, ProjectMetadata)} if * the request is a an index action. This method may be called multiple times for a single * request as the request may be made up of sub-requests that also need to be authorized. The async supplier * for resolved indices will invoke the @@ -85,7 +86,7 @@ *

    * NOTE: the {@link #loadAuthorizedIndices(RequestInfo, AuthorizationInfo, Map, ActionListener)} * method may be called prior to - * {@link #authorizeIndexAction(RequestInfo, AuthorizationInfo, AsyncSupplier, ProjectMetadata, ActionListener)} + * {@link #authorizeIndexAction(RequestInfo, AuthorizationInfo, AsyncSupplier, ProjectMetadata)} * in cases where wildcards need to be expanded. *


    * Authorization engines can be called from various threads including network threads that should @@ -161,14 +162,13 @@ public interface AuthorizationEngine { * attempting to operate on * @param metadata a map of a string name to the cluster metadata specific to that * alias or index - * @param listener the listener to be notified of the authorization result + * @return a listener to be notified of the authorization result */ - void authorizeIndexAction( + SubscribableListener authorizeIndexAction( RequestInfo requestInfo, AuthorizationInfo authorizationInfo, AsyncSupplier indicesAsyncSupplier, - ProjectMetadata metadata, - ActionListener listener + ProjectMetadata metadata ); /** @@ -766,6 +766,6 @@ interface AsyncSupplier { * Asynchronously retrieves the value that is being supplied and notifies the listener upon * completion. */ - void getAsync(ActionListener listener); + SubscribableListener getAsync(); } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileCancellationIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileCancellationIntegTests.java index 23807a318981c..0f832b83a177f 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileCancellationIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileCancellationIntegTests.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.TransportSearchAction; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.client.Cancellable; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; @@ -406,14 +407,13 @@ public void authorizeClusterAction( } @Override - public void authorizeIndexAction( + public SubscribableListener authorizeIndexAction( RequestInfo requestInfo, AuthorizationInfo authorizationInfo, AsyncSupplier indicesAsyncSupplier, - ProjectMetadata metadata, - ActionListener listener + ProjectMetadata metadata ) { - listener.onResponse(IndexAuthorizationResult.ALLOW_NO_INDICES); + return SubscribableListener.newSucceeded(IndexAuthorizationResult.ALLOW_NO_INDICES); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index 137237457d816..6cd937cb2f8c7 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -28,6 +28,7 @@ import org.elasticsearch.action.index.TransportIndexAction; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.GroupedActionListener; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.support.replication.TransportReplicationAction.ConcreteShardRequest; import org.elasticsearch.action.update.TransportUpdateAction; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -482,16 +483,16 @@ private void authorizeAction( } else if (isIndexAction(action)) { final ProjectMetadata projectMetadata = projectResolver.getProjectMetadata(clusterService.state()); assert projectMetadata != null; - final AsyncSupplier resolvedIndicesAsyncSupplier = new CachingAsyncSupplier<>(resolvedIndicesListener -> { + final AsyncSupplier resolvedIndicesAsyncSupplier = new CachingAsyncSupplier<>(() -> { if (request instanceof SearchRequest searchRequest && searchRequest.pointInTimeBuilder() != null) { var resolvedIndices = indicesAndAliasesResolver.resolvePITIndices(searchRequest); - resolvedIndicesListener.onResponse(resolvedIndices); - return; + return SubscribableListener.newSucceeded(resolvedIndices); } final ResolvedIndices resolvedIndices = indicesAndAliasesResolver.tryResolveWithoutWildcards(action, request); if (resolvedIndices != null) { - resolvedIndicesListener.onResponse(resolvedIndices); + return SubscribableListener.newSucceeded(resolvedIndices); } else { + final SubscribableListener resolvedIndicesListener = new SubscribableListener<>(); authzEngine.loadAuthorizedIndices( requestInfo, authzInfo, @@ -510,33 +511,31 @@ private void authorizeAction( } ) ); + return resolvedIndicesListener; } }); - authzEngine.authorizeIndexAction( - requestInfo, - authzInfo, - resolvedIndicesAsyncSupplier, - projectMetadata, - wrapPreservingContext( - new AuthorizationResultListener<>( - result -> handleIndexActionAuthorizationResult( - result, + authzEngine.authorizeIndexAction(requestInfo, authzInfo, resolvedIndicesAsyncSupplier, projectMetadata) + .addListener( + wrapPreservingContext( + new AuthorizationResultListener<>( + result -> handleIndexActionAuthorizationResult( + result, + requestInfo, + requestId, + authzInfo, + authzEngine, + resolvedIndicesAsyncSupplier, + projectMetadata, + listener + ), + listener::onFailure, requestInfo, requestId, - authzInfo, - authzEngine, - resolvedIndicesAsyncSupplier, - projectMetadata, - listener + authzInfo ), - listener::onFailure, - requestInfo, - requestId, - authzInfo - ), - threadContext - ) - ); + threadContext + ) + ); } else { logger.warn("denying access for [{}] as action [{}] is not an index or cluster action", authentication, action); auditTrail.accessDenied(requestId, authentication, action, request, authzInfo); @@ -580,29 +579,30 @@ private void handleIndexActionAuthorizationResult( TransportIndicesAliasesAction.NAME, authzContext ); - authzEngine.authorizeIndexAction( - aliasesRequestInfo, - authzInfo, - ril -> resolvedIndicesAsyncSupplier.getAsync(ril.delegateFailureAndWrap((l, resolvedIndices) -> { + authzEngine.authorizeIndexAction(aliasesRequestInfo, authzInfo, () -> { + SubscribableListener ril = new SubscribableListener<>(); + resolvedIndicesAsyncSupplier.getAsync().addListener(ril.delegateFailureAndWrap((l, resolvedIndices) -> { List aliasesAndIndices = new ArrayList<>(resolvedIndices.getLocal()); for (Alias alias : aliases) { aliasesAndIndices.add(alias.name()); } ResolvedIndices withAliases = new ResolvedIndices(aliasesAndIndices, Collections.emptyList()); l.onResponse(withAliases); - })), - projectMetadata, - wrapPreservingContext( - new AuthorizationResultListener<>( - authorizationResult -> runRequestInterceptors(requestInfo, authzInfo, authorizationEngine, listener), - listener::onFailure, - aliasesRequestInfo, - requestId, - authzInfo - ), - threadContext - ) - ); + })); + return ril; + }, projectMetadata) + .addListener( + wrapPreservingContext( + new AuthorizationResultListener<>( + authorizationResult -> runRequestInterceptors(requestInfo, authzInfo, authorizationEngine, listener), + listener::onFailure, + aliasesRequestInfo, + requestId, + authzInfo + ), + threadContext + ) + ); } } else if (action.equals(TransportShardBulkAction.ACTION_NAME)) { // if this is performing multiple actions on the index, then check each of those actions. @@ -631,22 +631,26 @@ private void runRequestInterceptors( AuthorizationEngine authorizationEngine, ActionListener listener ) { - if (requestInterceptors.isEmpty()) { - listener.onResponse(null); - } else { - final Iterator requestInterceptorIterator = requestInterceptors.iterator(); - requestInterceptorIterator.next() - .intercept(requestInfo, authorizationEngine, authorizationInfo, new DelegatingActionListener<>(listener) { + final Iterator requestInterceptorIterator = requestInterceptors.iterator(); + while (requestInterceptorIterator.hasNext()) { + var res = requestInterceptorIterator.next().intercept(requestInfo, authorizationEngine, authorizationInfo); + if (res.isSuccess() == false) { + res.addListener(new DelegatingActionListener<>(listener) { @Override public void onResponse(Void unused) { if (requestInterceptorIterator.hasNext()) { - requestInterceptorIterator.next().intercept(requestInfo, authorizationEngine, authorizationInfo, this); + requestInterceptorIterator.next() + .intercept(requestInfo, authorizationEngine, authorizationInfo) + .addListener(this); } else { - listener.onResponse(null); + delegate.onResponse(null); } } }); + return; + } } + listener.onResponse(null); } // pkg-private for testing @@ -776,7 +780,7 @@ private void authorizeBulkItems( final Map> actionToIndicesMap = new HashMap<>(4); final AuditTrail auditTrail = auditTrailService.get(); - resolvedIndicesAsyncSupplier.getAsync(ActionListener.wrap(overallResolvedIndices -> { + resolvedIndicesAsyncSupplier.getAsync().addListener(ActionListener.wrap(overallResolvedIndices -> { final Set localIndices = new HashSet<>(overallResolvedIndices.getLocal()); for (BulkItemRequest item : request.items()) { final String itemAction = getAction(item); @@ -871,12 +875,14 @@ private void authorizeBulkItems( authzEngine.authorizeIndexAction( bulkItemInfo, authzInfo, - ril -> ril.onResponse(new ResolvedIndices(new ArrayList<>(indices), Collections.emptyList())), - projectMetadata, - groupedActionListener.delegateFailureAndWrap( - (l, indexAuthorizationResult) -> l.onResponse(new Tuple<>(bulkItemAction, indexAuthorizationResult)) - ) - ); + () -> SubscribableListener.newSucceeded(new ResolvedIndices(new ArrayList<>(indices), Collections.emptyList())), + projectMetadata + ) + .addListener( + groupedActionListener.delegateFailureAndWrap( + (l, indexAuthorizationResult) -> l.onResponse(new Tuple<>(bulkItemAction, indexAuthorizationResult)) + ) + ); }); }, listener::onFailure)); } @@ -1068,7 +1074,7 @@ private CachingAsyncSupplier(AsyncSupplier supplier) { } @Override - public void getAsync(ActionListener listener) { + public SubscribableListener getAsync() { if (valueFuture == null) { boolean firstInvocation = false; synchronized (this) { @@ -1078,10 +1084,10 @@ public void getAsync(ActionListener listener) { } } if (firstInvocation) { - asyncSupplier.getAsync(valueFuture); + asyncSupplier.getAsync().addListener(valueFuture); } } - valueFuture.addListener(listener); + return valueFuture; } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java index 24fc7480eda42..771cc4185bbed 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java @@ -31,6 +31,7 @@ import org.elasticsearch.action.search.TransportClosePointInTimeAction; import org.elasticsearch.action.search.TransportMultiSearchAction; import org.elasticsearch.action.search.TransportSearchScrollAction; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.termvectors.MultiTermVectorsAction; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.IndexAbstraction; @@ -314,12 +315,11 @@ private static boolean shouldAuthorizeIndexActionNameOnly(String action, Transpo } @Override - public void authorizeIndexAction( + public SubscribableListener authorizeIndexAction( RequestInfo requestInfo, AuthorizationInfo authorizationInfo, AsyncSupplier indicesAsyncSupplier, - ProjectMetadata metadata, - ActionListener listener + ProjectMetadata metadata ) { final String action = requestInfo.getAction(); final TransportRequest request = requestInfo.getRequest(); @@ -327,13 +327,14 @@ public void authorizeIndexAction( try { role = ensureRBAC(authorizationInfo).getRole(); } catch (Exception e) { - listener.onFailure(e); - return; + return SubscribableListener.newFailed(e); } if (TransportActionProxy.isProxyAction(action) || shouldAuthorizeIndexActionNameOnly(action, request)) { // we've already validated that the request is a proxy request so we can skip that but we still // need to validate that the action is allowed and then move on - listener.onResponse(role.checkIndicesAction(action) ? IndexAuthorizationResult.EMPTY : IndexAuthorizationResult.DENIED); + return SubscribableListener.newSucceeded( + role.checkIndicesAction(action) ? IndexAuthorizationResult.EMPTY : IndexAuthorizationResult.DENIED + ); } else if (request instanceof IndicesRequest == false) { if (SCROLL_RELATED_ACTIONS.contains(action)) { // scroll is special @@ -351,6 +352,7 @@ public void authorizeIndexAction( // index and if they cannot, we can fail the request early before we allow the execution of the action and in // turn the shard actions if (TransportSearchScrollAction.TYPE.name().equals(action)) { + final SubscribableListener listener = new SubscribableListener<>(); ActionRunnable.supply(listener.delegateFailureAndWrap((l, parsedScrollId) -> { if (parsedScrollId.hasLocalIndices()) { l.onResponse( @@ -360,6 +362,7 @@ public void authorizeIndexAction( l.onResponse(IndexAuthorizationResult.EMPTY); } }), ((SearchScrollRequest) request)::parseScrollId).run(); + return listener; } else { // RBACEngine simply authorizes scroll related actions without filling in any DLS/FLS permissions. // Scroll related actions have special security logic, where the security context of the initial search @@ -369,26 +372,26 @@ public void authorizeIndexAction( // The DLS/FLS permissions are used inside the {@code DirectoryReader} that {@code SecurityIndexReaderWrapper} // built while handling the initial search request. In addition, for consistency, the DLS/FLS permissions from // the originating search request are attached to the thread context upon validating the scroll. - listener.onResponse(IndexAuthorizationResult.EMPTY); + return SubscribableListener.newSucceeded(IndexAuthorizationResult.EMPTY); } } else if (isAsyncRelatedAction(action)) { if (SubmitAsyncSearchAction.NAME.equals(action)) { // authorize submit async search but don't fill in the DLS/FLS permissions // the `null` IndicesAccessControl parameter indicates that this action has *not* determined // which DLS/FLS controls should be applied to this action - listener.onResponse(IndexAuthorizationResult.EMPTY); + return SubscribableListener.newSucceeded(IndexAuthorizationResult.EMPTY); } else { // async-search actions other than submit have a custom security layer that checks if the current user is // the same as the user that submitted the original request so no additional checks are needed here. - listener.onResponse(IndexAuthorizationResult.ALLOW_NO_INDICES); + return SubscribableListener.newSucceeded(IndexAuthorizationResult.ALLOW_NO_INDICES); } } else if (action.equals(TransportClosePointInTimeAction.TYPE.name())) { - listener.onResponse(IndexAuthorizationResult.ALLOW_NO_INDICES); + return SubscribableListener.newSucceeded(IndexAuthorizationResult.ALLOW_NO_INDICES); } else { assert false : "only scroll and async-search related requests are known indices api that don't " + "support retrieving the indices they relate to"; - listener.onFailure( + return SubscribableListener.newFailed( new IllegalStateException( "only scroll and async-search related requests are known indices " + "api that don't support retrieving the indices they relate to" @@ -396,13 +399,16 @@ public void authorizeIndexAction( ); } } else if (isChildActionAuthorizedByParentOnLocalNode(requestInfo, authorizationInfo)) { - listener.onResponse(new IndexAuthorizationResult(requestInfo.getOriginatingAuthorizationContext().getIndicesAccessControl())); + return SubscribableListener.newSucceeded( + new IndexAuthorizationResult(requestInfo.getOriginatingAuthorizationContext().getIndicesAccessControl()) + ); } else if (PreAuthorizationUtils.shouldPreAuthorizeChildByParentAction(requestInfo, authorizationInfo)) { // We only pre-authorize child actions if DLS/FLS is not configured, // hence we can allow here access for all requested indices. - listener.onResponse(new IndexAuthorizationResult(IndicesAccessControl.allowAll())); + return SubscribableListener.newSucceeded(new IndexAuthorizationResult(IndicesAccessControl.allowAll())); } else if (allowsRemoteIndices(request) || role.checkIndicesAction(action)) { - indicesAsyncSupplier.getAsync(listener.delegateFailureAndWrap((delegateListener, resolvedIndices) -> { + final SubscribableListener listener = new SubscribableListener<>(); + indicesAsyncSupplier.getAsync().addListener(listener.delegateFailureAndWrap((delegateListener, resolvedIndices) -> { assert resolvedIndices.isEmpty() == false : "every indices request needs to have its indices set thus the resolved indices must not be empty"; // all wildcard expressions have been resolved and only the security plugin could have set '-*' here. @@ -437,8 +443,9 @@ public void authorizeIndexAction( delegateListener.onResponse(result); } })); + return listener; } else { - listener.onResponse(IndexAuthorizationResult.DENIED); + return SubscribableListener.newSucceeded(IndexAuthorizationResult.DENIED); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/BulkShardRequestInterceptor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/BulkShardRequestInterceptor.java index 715cbdbf06752..fb9dc02e387e7 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/BulkShardRequestInterceptor.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/BulkShardRequestInterceptor.java @@ -9,9 +9,9 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchSecurityException; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.bulk.BulkItemRequest; import org.elasticsearch.action.bulk.BulkShardRequest; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.license.XPackLicenseState; @@ -42,11 +42,10 @@ public BulkShardRequestInterceptor(ThreadPool threadPool, XPackLicenseState lice } @Override - public void intercept( + public SubscribableListener intercept( RequestInfo requestInfo, AuthorizationEngine authzEngine, - AuthorizationInfo authorizationInfo, - ActionListener listener + AuthorizationInfo authorizationInfo ) { final boolean isDlsLicensed = DOCUMENT_LEVEL_SECURITY_FEATURE.checkWithoutTracking(licenseState); final boolean isFlsLicensed = FIELD_LEVEL_SECURITY_FEATURE.checkWithoutTracking(licenseState); @@ -82,6 +81,6 @@ public void intercept( } } } - listener.onResponse(null); + return SubscribableListener.newSucceeded(null); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/DlsFlsLicenseRequestInterceptor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/DlsFlsLicenseRequestInterceptor.java index 73cb2bea3441d..9d739f6db54ad 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/DlsFlsLicenseRequestInterceptor.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/DlsFlsLicenseRequestInterceptor.java @@ -10,8 +10,8 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchSecurityException; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; @@ -40,11 +40,10 @@ public DlsFlsLicenseRequestInterceptor(ThreadContext threadContext, XPackLicense } @Override - public void intercept( + public SubscribableListener intercept( AuthorizationEngine.RequestInfo requestInfo, AuthorizationEngine authorizationEngine, - AuthorizationInfo authorizationInfo, - ActionListener listener + AuthorizationInfo authorizationInfo ) { if (requestInfo.getRequest() instanceof IndicesRequest && false == TransportActionProxy.isProxyAction(requestInfo.getAction())) { final Role role = RBACEngine.maybeGetRBACEngineRole(threadContext.getTransient(AUTHORIZATION_INFO_KEY)); @@ -96,13 +95,12 @@ public void intercept( "es.indices_with_dls_or_fls", indicesAccessControl.getIndicesWithFieldOrDocumentLevelSecurity() ); - listener.onFailure(licenseException); - return; + return SubscribableListener.newFailed(licenseException); } } } } } - listener.onResponse(null); + return SubscribableListener.newSucceeded(null); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/FieldAndDocumentLevelSecurityRequestInterceptor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/FieldAndDocumentLevelSecurityRequestInterceptor.java index 83edb0f1115ac..93e07dabbdcf3 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/FieldAndDocumentLevelSecurityRequestInterceptor.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/FieldAndDocumentLevelSecurityRequestInterceptor.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.transport.TransportActionProxy; @@ -42,11 +43,10 @@ abstract class FieldAndDocumentLevelSecurityRequestInterceptor implements Reques } @Override - public void intercept( + public SubscribableListener intercept( RequestInfo requestInfo, AuthorizationEngine authorizationEngine, - AuthorizationInfo authorizationInfo, - ActionListener listener + AuthorizationInfo authorizationInfo ) { final boolean isDlsLicensed = DOCUMENT_LEVEL_SECURITY_FEATURE.checkWithoutTracking(licenseState); final boolean isFlsLicensed = FIELD_LEVEL_SECURITY_FEATURE.checkWithoutTracking(licenseState); @@ -72,11 +72,12 @@ && supports(indicesRequest) } } if (false == accessControlByIndex.isEmpty()) { + final SubscribableListener listener = new SubscribableListener<>(); disableFeatures(indicesRequest, accessControlByIndex, listener); - return; + return listener; } } - listener.onResponse(null); + return SubscribableListener.newSucceeded(null); } abstract void disableFeatures( diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/IndicesAliasesRequestInterceptor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/IndicesAliasesRequestInterceptor.java index 01cc1e9dd4cdf..93f06efa32e93 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/IndicesAliasesRequestInterceptor.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/IndicesAliasesRequestInterceptor.java @@ -9,6 +9,7 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Tuple; import org.elasticsearch.license.XPackLicenseState; @@ -53,11 +54,10 @@ public IndicesAliasesRequestInterceptor( } @Override - public void intercept( + public SubscribableListener intercept( RequestInfo requestInfo, AuthorizationEngine authorizationEngine, - AuthorizationInfo authorizationInfo, - ActionListener listener + AuthorizationInfo authorizationInfo ) { if (requestInfo.getRequest() instanceof IndicesAliasesRequest request) { final AuditTrail auditTrail = auditTrailService.get(); @@ -72,14 +72,13 @@ public void intercept( if (indexAccessControl != null && (indexAccessControl.getFieldPermissions().hasFieldLevelSecurity() || indexAccessControl.getDocumentPermissions().hasDocumentLevelPermissions())) { - listener.onFailure( + return SubscribableListener.newFailed( new ElasticsearchSecurityException( "Alias requests are not allowed for " + "users who have field or document level security enabled on one of the indices", RestStatus.BAD_REQUEST ) ); - return; } } } @@ -99,6 +98,7 @@ public void intercept( list.addAll(toMerge); return list; })); + final SubscribableListener listener = new SubscribableListener<>(); authorizationEngine.validateIndexPermissionsAreSubset( requestInfo, authorizationInfo, @@ -123,8 +123,9 @@ public void intercept( } }, listener::onFailure), threadContext) ); + return listener; } else { - listener.onResponse(null); + return SubscribableListener.newSucceeded(null); } } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/RequestInterceptor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/RequestInterceptor.java index ba36cd2b78bb0..8f968f747bb74 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/RequestInterceptor.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/RequestInterceptor.java @@ -6,7 +6,7 @@ */ package org.elasticsearch.xpack.security.authz.interceptor; -import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine; import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.AuthorizationInfo; import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.RequestInfo; @@ -20,10 +20,9 @@ public interface RequestInterceptor { * This interceptor will introspect the request and potentially modify it. If the interceptor does not apply * to the request then the request will not be modified. */ - void intercept( + SubscribableListener intercept( RequestInfo requestInfo, AuthorizationEngine authorizationEngine, - AuthorizationInfo authorizationInfo, - ActionListener listener + AuthorizationInfo authorizationInfo ); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/ResizeRequestInterceptor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/ResizeRequestInterceptor.java index c2bea70613b46..e3f13fe1e10cf 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/ResizeRequestInterceptor.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/ResizeRequestInterceptor.java @@ -9,6 +9,7 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.shrink.ResizeRequest; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestStatus; @@ -49,11 +50,10 @@ public ResizeRequestInterceptor( } @Override - public void intercept( + public SubscribableListener intercept( RequestInfo requestInfo, AuthorizationEngine authorizationEngine, - AuthorizationInfo authorizationInfo, - ActionListener listener + AuthorizationInfo authorizationInfo ) { if (requestInfo.getRequest() instanceof ResizeRequest request) { final AuditTrail auditTrail = auditTrailService.get(); @@ -67,17 +67,17 @@ public void intercept( if (indexAccessControl != null && (indexAccessControl.getFieldPermissions().hasFieldLevelSecurity() || indexAccessControl.getDocumentPermissions().hasDocumentLevelPermissions())) { - listener.onFailure( + return SubscribableListener.newFailed( new ElasticsearchSecurityException( "Resize requests are not allowed for users when " + "field or document level security is enabled on the source index", RestStatus.BAD_REQUEST ) ); - return; } } + final SubscribableListener listener = new SubscribableListener<>(); authorizationEngine.validateIndexPermissionsAreSubset( requestInfo, authorizationInfo, @@ -101,8 +101,9 @@ public void intercept( } }, listener::onFailure), threadContext) ); + return listener; } else { - listener.onResponse(null); + return SubscribableListener.newSucceeded(null); } } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/SearchRequestCacheDisablingInterceptor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/SearchRequestCacheDisablingInterceptor.java index d8ec078507bfe..830ea90e3beed 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/SearchRequestCacheDisablingInterceptor.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/SearchRequestCacheDisablingInterceptor.java @@ -6,8 +6,8 @@ */ package org.elasticsearch.xpack.security.authz.interceptor; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.threadpool.ThreadPool; @@ -33,11 +33,10 @@ public SearchRequestCacheDisablingInterceptor(ThreadPool threadPool, XPackLicens } @Override - public void intercept( + public SubscribableListener intercept( AuthorizationEngine.RequestInfo requestInfo, AuthorizationEngine authorizationEngine, - AuthorizationEngine.AuthorizationInfo authorizationInfo, - ActionListener listener + AuthorizationEngine.AuthorizationInfo authorizationInfo ) { final boolean isDlsLicensed = DOCUMENT_LEVEL_SECURITY_FEATURE.checkWithoutTracking(licenseState); final boolean isFlsLicensed = FIELD_LEVEL_SECURITY_FEATURE.checkWithoutTracking(licenseState); @@ -50,7 +49,7 @@ && hasRemoteIndices(searchRequest) searchRequest.requestCache(false); } } - listener.onResponse(null); + return SubscribableListener.newSucceeded(null); } // package private for test diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index e1250a7dd2081..904c9af18c7e2 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -76,6 +76,7 @@ import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.replication.TransportReplicationAction; import org.elasticsearch.action.termvectors.MultiTermVectorsAction; @@ -3456,12 +3457,11 @@ public void authorizeClusterAction( } @Override - public void authorizeIndexAction( + public SubscribableListener authorizeIndexAction( RequestInfo requestInfo, AuthorizationInfo authorizationInfo, AsyncSupplier indicesAsyncSupplier, - ProjectMetadata metadata, - ActionListener listener + ProjectMetadata metadata ) { throw new UnsupportedOperationException("not implemented"); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java index 4ce74f942a228..59fa2e417432c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.ElasticsearchClient; import org.elasticsearch.cluster.metadata.DataStream; @@ -1985,7 +1986,7 @@ private void authorizeIndicesAction( final ResolvedIndices resolvedIndices = new ResolvedIndices(List.of(indices), List.of()); final TransportRequest searchRequest = new SearchRequest(indices); final RequestInfo requestInfo = createRequestInfo(searchRequest, action, parentAuthorization); - final AsyncSupplier indicesAsyncSupplier = s -> s.onResponse(resolvedIndices); + final AsyncSupplier indicesAsyncSupplier = () -> SubscribableListener.newSucceeded(resolvedIndices); Metadata.Builder metadata = Metadata.builder(); Stream.of(indices) @@ -1996,7 +1997,7 @@ private void authorizeIndicesAction( ) ); - engine.authorizeIndexAction(requestInfo, authzInfo, indicesAsyncSupplier, metadata.build().getProject(), listener); + engine.authorizeIndexAction(requestInfo, authzInfo, indicesAsyncSupplier, metadata.build().getProject()).addListener(listener); } private static RequestInfo createRequestInfo(TransportRequest request, String action, ParentActionAuthorization parentAuthorization) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/IndicesAliasesRequestInterceptorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/IndicesAliasesRequestInterceptorTests.java index 0dacf6b37e2df..500a1aadf9f28 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/IndicesAliasesRequestInterceptorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/IndicesAliasesRequestInterceptorTests.java @@ -128,7 +128,7 @@ public void checkInterceptorWithDlsFlsConfigured(boolean dlsFlsFeatureEnabled, S }).when(mockEngine) .validateIndexPermissionsAreSubset(eq(requestInfo), eq(EmptyAuthorizationInfo.INSTANCE), anyMap(), anyActionListener()); ElasticsearchSecurityException securityException = expectThrows(ElasticsearchSecurityException.class, () -> { - interceptor.intercept(requestInfo, mockEngine, EmptyAuthorizationInfo.INSTANCE, plainActionFuture); + interceptor.intercept(requestInfo, mockEngine, EmptyAuthorizationInfo.INSTANCE).addListener(plainActionFuture); plainActionFuture.actionGet(); }); assertEquals(expectedErrorMessage, securityException.getMessage()); @@ -184,7 +184,7 @@ public void testInterceptorThrowsWhenTargetHasGreaterPermissions() throws Except anyActionListener() ); ElasticsearchSecurityException securityException = expectThrows(ElasticsearchSecurityException.class, () -> { - interceptor.intercept(requestInfo, mockEngine, EmptyAuthorizationInfo.INSTANCE, plainActionFuture); + interceptor.intercept(requestInfo, mockEngine, EmptyAuthorizationInfo.INSTANCE).addListener(plainActionFuture); plainActionFuture.actionGet(); }); assertEquals( @@ -217,7 +217,7 @@ public void testInterceptorThrowsWhenTargetHasGreaterPermissions() throws Except any(Map.class), anyActionListener() ); - interceptor.intercept(requestInfo, mockEngine, EmptyAuthorizationInfo.INSTANCE, plainActionFuture); + interceptor.intercept(requestInfo, mockEngine, EmptyAuthorizationInfo.INSTANCE).addListener(plainActionFuture); plainActionFuture.actionGet(); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/ResizeRequestInterceptorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/ResizeRequestInterceptorTests.java index 68c86a561025a..e3b402d96d416 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/ResizeRequestInterceptorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/ResizeRequestInterceptorTests.java @@ -120,7 +120,7 @@ public void checkResizeWithDlsFlsConfigured(boolean dlsFlsFeatureEnabled, String }).when(mockEngine) .validateIndexPermissionsAreSubset(eq(requestInfo), eq(EmptyAuthorizationInfo.INSTANCE), anyMap(), anyActionListener()); ElasticsearchSecurityException securityException = expectThrows(ElasticsearchSecurityException.class, () -> { - resizeRequestInterceptor.intercept(requestInfo, mockEngine, EmptyAuthorizationInfo.INSTANCE, plainActionFuture); + resizeRequestInterceptor.intercept(requestInfo, mockEngine, EmptyAuthorizationInfo.INSTANCE).addListener(plainActionFuture); plainActionFuture.actionGet(); }); assertEquals(expectedErrorMessage, securityException.getMessage()); @@ -160,7 +160,7 @@ public void testResizeRequestInterceptorThrowsWhenTargetHasGreaterPermissions() anyActionListener() ); ElasticsearchSecurityException securityException = expectThrows(ElasticsearchSecurityException.class, () -> { - resizeRequestInterceptor.intercept(requestInfo, mockEngine, EmptyAuthorizationInfo.INSTANCE, plainActionFuture); + resizeRequestInterceptor.intercept(requestInfo, mockEngine, EmptyAuthorizationInfo.INSTANCE).addListener(plainActionFuture); plainActionFuture.actionGet(); }); assertEquals( @@ -184,7 +184,7 @@ public void testResizeRequestInterceptorThrowsWhenTargetHasGreaterPermissions() any(Map.class), anyActionListener() ); - resizeRequestInterceptor.intercept(requestInfo, mockEngine, EmptyAuthorizationInfo.INSTANCE, plainActionFuture); + resizeRequestInterceptor.intercept(requestInfo, mockEngine, EmptyAuthorizationInfo.INSTANCE).addListener(plainActionFuture); plainActionFuture.actionGet(); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/SearchRequestCacheDisablingInterceptorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/SearchRequestCacheDisablingInterceptorTests.java index b09527061f0d5..f75080f5f9792 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/SearchRequestCacheDisablingInterceptorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/SearchRequestCacheDisablingInterceptorTests.java @@ -103,7 +103,7 @@ public void testRequestCacheWillBeDisabledWhenSearchRemoteIndices() { threadPool.getThreadContext().putTransient(AuthorizationServiceField.INDICES_PERMISSIONS_KEY, indicesAccessControl); final PlainActionFuture future = new PlainActionFuture<>(); - interceptor.intercept(requestInfo, mock(AuthorizationEngine.class), mock(AuthorizationInfo.class), future); + interceptor.intercept(requestInfo, mock(AuthorizationEngine.class), mock(AuthorizationInfo.class)).addListener(future); future.actionGet(); if (remoteIndices.length > 0) { From a6e47ae85b5fa30f62d10d4b28974409aab943a3 Mon Sep 17 00:00:00 2001 From: Gal Lalouche Date: Wed, 5 Mar 2025 14:09:36 +0200 Subject: [PATCH 27/54] Refactor FieldCapabilities creation by adding a proper builder object (#121310) Reduce boilerplate associated with creating `FieldCapabilities` instances. Since it's a class with a huge number of fields, it makes sense to define a builder object, as that can also help with all the Boolean and null blindness going on. Note while there is a static Builder class in `FieldCapabilities`, it is not a proper builder object (no setters, still need to pass a lot of otherwise default parameters) and also package-private. To avoid changing that, I defined a new `FieldCapabilitiesBuilder` class. I also went over the code and refactored places which used the old constructor. --- .../extras/FieldCapsRankFeatureTests.java | 26 ++-- .../search/fieldcaps/FieldCapabilitiesIT.java | 52 ++------ .../fieldcaps/FieldCapsHasValueTests.java | 89 +++---------- .../MergedFieldCapabilitiesResponseTests.java | 46 ++----- .../fieldcaps/FieldCapabilitiesBuilder.java | 125 ++++++++++++++++++ .../analyses/ClassificationTests.java | 3 +- .../xpack/eql/analysis/CancellationTests.java | 18 +-- .../telemetry/PlanExecutorMetricsTests.java | 5 +- .../ml/dataframe/DestinationIndexTests.java | 3 +- .../ExtractedFieldsDetectorTests.java | 3 +- .../xpack/sql/analysis/CancellationTests.java | 18 +-- .../analysis/index/IndexResolverTests.java | 28 ++-- .../common/DocumentConversionUtilsTests.java | 16 +-- .../AggregationSchemaAndResultTests.java | 6 +- .../transforms/pivot/SchemaUtilTests.java | 17 +-- 15 files changed, 213 insertions(+), 242 deletions(-) create mode 100644 test/framework/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesBuilder.java diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/FieldCapsRankFeatureTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/FieldCapsRankFeatureTests.java index 3289bec339126..adf4d9fc57b20 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/FieldCapsRankFeatureTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/FieldCapsRankFeatureTests.java @@ -10,6 +10,7 @@ package org.elasticsearch.index.mapper.extras; import org.elasticsearch.action.fieldcaps.FieldCapabilities; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesBuilder; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.plugins.Plugin; @@ -19,7 +20,6 @@ import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.Map; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -59,10 +59,7 @@ public void testRankFeatureInIndex() { Map fooRankField = response.getField("fooRank"); assertEquals(1, fooRankField.size()); assertThat(fooRankField, Matchers.hasKey("rank_feature")); - assertEquals( - new FieldCapabilities("fooRank", "rank_feature", false, true, false, null, null, null, Collections.emptyMap()), - fooRankField.get("rank_feature") - ); + assertEquals(fieldCapabilities("fooRank"), fooRankField.get("rank_feature")); } public void testRankFeatureInIndexAfterRestart() throws Exception { @@ -79,10 +76,7 @@ public void testRankFeatureInIndexAfterRestart() throws Exception { Map fooRankField = response.getField("fooRank"); assertEquals(1, fooRankField.size()); assertThat(fooRankField, Matchers.hasKey("rank_feature")); - assertEquals( - new FieldCapabilities("fooRank", "rank_feature", false, true, false, null, null, null, Collections.emptyMap()), - fooRankField.get("rank_feature") - ); + assertEquals(fieldCapabilities("fooRank"), fooRankField.get("rank_feature")); } public void testAllRankFeatureReturnedIfOneIsPresent() { @@ -98,18 +92,16 @@ public void testAllRankFeatureReturnedIfOneIsPresent() { Map fooRankField = response.getField("fooRank"); assertEquals(1, fooRankField.size()); assertThat(fooRankField, Matchers.hasKey("rank_feature")); - assertEquals( - new FieldCapabilities("fooRank", "rank_feature", false, true, false, null, null, null, Collections.emptyMap()), - fooRankField.get("rank_feature") - ); + assertEquals(fieldCapabilities("fooRank"), fooRankField.get("rank_feature")); assertThat(response.get(), Matchers.hasKey("barRank")); // Check the capabilities for the 'barRank' field. Map barRankField = response.getField("barRank"); assertEquals(1, barRankField.size()); assertThat(barRankField, Matchers.hasKey("rank_feature")); - assertEquals( - new FieldCapabilities("barRank", "rank_feature", false, true, false, null, null, null, Collections.emptyMap()), - barRankField.get("rank_feature") - ); + assertEquals(fieldCapabilities("barRank"), barRankField.get("rank_feature")); + } + + private static FieldCapabilities fieldCapabilities(String fieldName) { + return new FieldCapabilitiesBuilder(fieldName, "rank_feature").isAggregatable(false).build(); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java index 610928be6fb8c..3db6a8edc0425 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java @@ -17,6 +17,7 @@ import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.fieldcaps.FieldCapabilities; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesBuilder; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesFailure; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; @@ -219,24 +220,11 @@ public void testFieldAlias() { assertEquals(2, distance.size()); assertTrue(distance.containsKey("double")); - assertEquals( - new FieldCapabilities( - "distance", - "double", - false, - true, - true, - new String[] { "old_index" }, - null, - null, - Collections.emptyMap() - ), - distance.get("double") - ); + assertEquals(new FieldCapabilitiesBuilder("distance", "double").indices("old_index").build(), distance.get("double")); assertTrue(distance.containsKey("text")); assertEquals( - new FieldCapabilities("distance", "text", false, true, false, new String[] { "new_index" }, null, null, Collections.emptyMap()), + new FieldCapabilitiesBuilder("distance", "text").isAggregatable(false).indices("new_index").build(), distance.get("text") ); @@ -245,10 +233,7 @@ public void testFieldAlias() { assertEquals(1, routeLength.size()); assertTrue(routeLength.containsKey("double")); - assertEquals( - new FieldCapabilities("route_length_miles", "double", false, true, true, null, null, null, Collections.emptyMap()), - routeLength.get("double") - ); + assertEquals(new FieldCapabilitiesBuilder("route_length_miles", "double").build(), routeLength.get("double")); } public void testFieldAliasWithWildcard() { @@ -284,24 +269,11 @@ public void testWithUnmapped() { assertEquals(2, oldField.size()); assertTrue(oldField.containsKey("long")); - assertEquals( - new FieldCapabilities("old_field", "long", false, true, true, new String[] { "old_index" }, null, null, Collections.emptyMap()), - oldField.get("long") - ); + assertEquals(new FieldCapabilitiesBuilder("old_field", "long").indices("old_index").build(), oldField.get("long")); assertTrue(oldField.containsKey("unmapped")); assertEquals( - new FieldCapabilities( - "old_field", - "unmapped", - false, - false, - false, - new String[] { "new_index" }, - null, - null, - Collections.emptyMap() - ), + new FieldCapabilitiesBuilder("old_field", "unmapped").isSearchable(false).isAggregatable(false).indices("new_index").build(), oldField.get("unmapped") ); @@ -309,10 +281,7 @@ public void testWithUnmapped() { assertEquals(1, newField.size()); assertTrue(newField.containsKey("long")); - assertEquals( - new FieldCapabilities("new_field", "long", false, true, true, null, null, null, Collections.emptyMap()), - newField.get("long") - ); + assertEquals(new FieldCapabilitiesBuilder("new_field", "long").build(), newField.get("long")); } public void testWithIndexAlias() { @@ -431,7 +400,7 @@ public void testMetadataFields() { assertTrue(idField.containsKey("_id")); assertEquals( - new FieldCapabilities("_id", "_id", true, true, false, null, null, null, Collections.emptyMap()), + new FieldCapabilitiesBuilder("_id", "_id").isMetadataField(true).isAggregatable(false).build(), idField.get("_id") ); @@ -439,10 +408,7 @@ public void testMetadataFields() { assertEquals(1, testField.size()); assertTrue(testField.containsKey("keyword")); - assertEquals( - new FieldCapabilities("_test", "keyword", true, true, true, null, null, null, Collections.emptyMap()), - testField.get("keyword") - ); + assertEquals(new FieldCapabilitiesBuilder("_test", "keyword").isMetadataField(true).build(), testField.get("keyword")); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapsHasValueTests.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapsHasValueTests.java index e7c5f7d97c765..ca281a30b4577 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapsHasValueTests.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapsHasValueTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.fieldcaps.FieldCapabilities; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesBuilder; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.support.ActiveShardCount; @@ -76,10 +77,7 @@ public void testOnlyFieldsWithValueInIndex() { Map fooField = response.getField("foo"); assertEquals(1, fooField.size()); assertThat(fooField, Matchers.hasKey("text")); - assertEquals( - new FieldCapabilities("foo", "text", false, true, false, null, null, null, Collections.emptyMap()), - fooField.get("text") - ); + assertEquals(new FieldCapabilitiesBuilder("foo", "text").isAggregatable(false).build(), fooField.get("text")); } public void testOnlyFieldsWithValueInAlias() { @@ -94,10 +92,7 @@ public void testOnlyFieldsWithValueInAlias() { Map fooField = response.getField("foo"); assertEquals(1, fooField.size()); assertThat(fooField, Matchers.hasKey("text")); - assertEquals( - new FieldCapabilities("foo", "text", false, true, false, null, null, null, Collections.emptyMap()), - fooField.get("text") - ); + assertEquals(new FieldCapabilitiesBuilder("foo", "text").isAggregatable(false).build(), fooField.get("text")); } public void testOnlyFieldsWithValueInSpecifiedIndex() { @@ -112,10 +107,7 @@ public void testOnlyFieldsWithValueInSpecifiedIndex() { Map fooField = response.getField("foo"); assertEquals(1, fooField.size()); assertThat(fooField, Matchers.hasKey("text")); - assertEquals( - new FieldCapabilities("foo", "text", false, true, false, null, null, null, Collections.emptyMap()), - fooField.get("text") - ); + assertEquals(new FieldCapabilitiesBuilder("foo", "text").isAggregatable(false).build(), fooField.get("text")); } public void testOnlyFieldsWithValueInSpecifiedAlias() { @@ -130,10 +122,7 @@ public void testOnlyFieldsWithValueInSpecifiedAlias() { Map fooField = response.getField("foo"); assertEquals(1, fooField.size()); assertThat(fooField, Matchers.hasKey("text")); - assertEquals( - new FieldCapabilities("foo", "text", false, true, false, null, null, null, Collections.emptyMap()), - fooField.get("text") - ); + assertEquals(new FieldCapabilitiesBuilder("foo", "text").isAggregatable(false).build(), fooField.get("text")); } public void testFieldsWithValueAfterUpdate() { @@ -150,18 +139,12 @@ public void testFieldsWithValueAfterUpdate() { Map fooField = response.getField("foo"); assertEquals(1, fooField.size()); assertThat(fooField, Matchers.hasKey("text")); - assertEquals( - new FieldCapabilities("foo", "text", false, true, false, null, null, null, Collections.emptyMap()), - fooField.get("text") - ); + assertEquals(new FieldCapabilitiesBuilder("foo", "text").isAggregatable(false).build(), fooField.get("text")); // Check the capabilities for the 'bar' field. Map barField = response.getField("bar"); assertEquals(1, barField.size()); assertThat(barField, Matchers.hasKey("keyword")); - assertEquals( - new FieldCapabilities("bar", "keyword", false, true, true, null, null, null, Collections.emptyMap()), - barField.get("keyword") - ); + assertEquals(new FieldCapabilitiesBuilder("bar", "keyword").build(), barField.get("keyword")); } public void testOnlyFieldsWithValueAfterNodesRestart() throws Exception { @@ -177,10 +160,7 @@ public void testOnlyFieldsWithValueAfterNodesRestart() throws Exception { Map fooField = response.getField("foo"); assertEquals(1, fooField.size()); assertThat(fooField, Matchers.hasKey("text")); - assertEquals( - new FieldCapabilities("foo", "text", false, true, false, null, null, null, Collections.emptyMap()), - fooField.get("text") - ); + assertEquals(new FieldCapabilitiesBuilder("foo", "text").isAggregatable(false).build(), fooField.get("text")); } public void testFieldsAndAliasWithValue() { @@ -198,26 +178,17 @@ public void testFieldsAndAliasWithValue() { Map fooField = response.getField("foo"); assertEquals(1, fooField.size()); assertThat(fooField, Matchers.hasKey("text")); - assertEquals( - new FieldCapabilities("foo", "text", false, true, false, null, null, null, Collections.emptyMap()), - fooField.get("text") - ); + assertEquals(new FieldCapabilitiesBuilder("foo", "text").isAggregatable(false).build(), fooField.get("text")); // Check the capabilities for the 'bar' field. Map barField = response.getField("bar"); assertEquals(1, barField.size()); assertThat(barField, Matchers.hasKey("keyword")); - assertEquals( - new FieldCapabilities("bar", "keyword", false, true, true, null, null, null, Collections.emptyMap()), - barField.get("keyword") - ); + assertEquals(new FieldCapabilitiesBuilder("bar", "keyword").build(), barField.get("keyword")); // Check the capabilities for the 'bar-alias' field. Map barAlias = response.getField("bar-alias"); assertEquals(1, barAlias.size()); assertThat(barAlias, Matchers.hasKey("keyword")); - assertEquals( - new FieldCapabilities("bar-alias", "keyword", false, true, true, null, null, null, Collections.emptyMap()), - barAlias.get("keyword") - ); + assertEquals(new FieldCapabilitiesBuilder("bar-alias", "keyword").build(), barAlias.get("keyword")); } public void testUnmappedFieldsWithValueAfterRestart() throws Exception { @@ -238,7 +209,7 @@ public void testUnmappedFieldsWithValueAfterRestart() throws Exception { assertEquals(2, unmappedField.size()); assertThat(unmappedField, Matchers.hasKey("text")); assertEquals( - new FieldCapabilities("unmapped", "text", false, true, false, new String[] { INDEX1 }, null, null, Collections.emptyMap()), + new FieldCapabilitiesBuilder("unmapped", "text").isAggregatable(false).indices(INDEX1).build(), unmappedField.get("text") ); } @@ -257,18 +228,12 @@ public void testTwoFieldsNameTwoIndices() { Map fooField = response.getField("foo"); assertEquals(1, fooField.size()); assertThat(fooField, Matchers.hasKey("text")); - assertEquals( - new FieldCapabilities("foo", "text", false, true, false, null, null, null, Collections.emptyMap()), - fooField.get("text") - ); + assertEquals(new FieldCapabilitiesBuilder("foo", "text").isAggregatable(false).build(), fooField.get("text")); // Check the capabilities for the 'bar' field. Map barField = response.getField("bar"); assertEquals(1, barField.size()); assertThat(barField, Matchers.hasKey("date")); - assertEquals( - new FieldCapabilities("bar", "date", false, true, true, null, null, null, Collections.emptyMap()), - barField.get("date") - ); + assertEquals(new FieldCapabilitiesBuilder("bar", "date").build(), barField.get("date")); } public void testSameFieldNameTwoIndices() { @@ -284,15 +249,9 @@ public void testSameFieldNameTwoIndices() { Map barField = response.getField("bar"); assertEquals(2, barField.size()); assertThat(barField, Matchers.hasKey("keyword")); - assertEquals( - new FieldCapabilities("bar", "keyword", false, true, true, new String[] { INDEX1 }, null, null, Collections.emptyMap()), - barField.get("keyword") - ); + assertEquals(new FieldCapabilitiesBuilder("bar", "keyword").indices(INDEX1).build(), barField.get("keyword")); assertThat(barField, Matchers.hasKey("date")); - assertEquals( - new FieldCapabilities("bar", "date", false, true, true, new String[] { INDEX2 }, null, null, Collections.emptyMap()), - barField.get("date") - ); + assertEquals(new FieldCapabilitiesBuilder("bar", "date").indices(INDEX2).build(), barField.get("date")); } public void testDeletedDocsReturned() { @@ -311,10 +270,7 @@ public void testDeletedDocsReturned() { Map fooField = response.getField("foo"); assertEquals(1, fooField.size()); assertThat(fooField, Matchers.hasKey("text")); - assertEquals( - new FieldCapabilities("foo", "text", false, true, false, null, null, null, Collections.emptyMap()), - fooField.get("text") - ); + assertEquals(new FieldCapabilitiesBuilder("foo", "text").isAggregatable(false).build(), fooField.get("text")); } public void testNoNestedFieldsInEmptyIndex() { @@ -339,7 +295,7 @@ public void testNestedFields() { assertEquals(1, nestedTypeField.size()); assertThat(nestedTypeField, Matchers.hasKey("nested")); assertEquals( - new FieldCapabilities("nested_type", "nested", false, false, false, null, null, null, Collections.emptyMap()), + new FieldCapabilitiesBuilder("nested_type", "nested").isSearchable(false).isAggregatable(false).build(), nestedTypeField.get("nested") ); // Check the capabilities for the 'nested_type.nested_field' field. @@ -347,7 +303,7 @@ public void testNestedFields() { assertEquals(1, nestedTypeNestedField.size()); assertThat(nestedTypeNestedField, Matchers.hasKey("text")); assertEquals( - new FieldCapabilities("nested_type.nested_field", "text", false, true, false, null, null, null, Collections.emptyMap()), + new FieldCapabilitiesBuilder("nested_type.nested_field", "text").isAggregatable(false).build(), nestedTypeNestedField.get("text") ); } @@ -374,17 +330,14 @@ public void testObjectFields() { assertEquals(1, objectTypeField.size()); assertThat(objectTypeField, Matchers.hasKey("object")); assertEquals( - new FieldCapabilities("object", "object", false, false, false, null, null, null, Collections.emptyMap()), + new FieldCapabilitiesBuilder("object", "object").isSearchable(false).isAggregatable(false).build(), objectTypeField.get("object") ); // Check the capabilities for the 'object.sub_field' field. Map objectSubfield = response.getField("object.sub_field"); assertEquals(1, objectSubfield.size()); assertThat(objectSubfield, Matchers.hasKey("keyword")); - assertEquals( - new FieldCapabilities("object.sub_field", "keyword", false, true, true, null, null, null, Collections.emptyMap()), - objectSubfield.get("keyword") - ); + assertEquals(new FieldCapabilitiesBuilder("object.sub_field", "keyword").build(), objectSubfield.get("keyword")); } public void testWithIndexFilter() throws InterruptedException { diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/MergedFieldCapabilitiesResponseTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/MergedFieldCapabilitiesResponseTests.java index 2059e9dd78b04..425336f2bb3c6 100644 --- a/server/src/test/java/org/elasticsearch/action/fieldcaps/MergedFieldCapabilitiesResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/MergedFieldCapabilitiesResponseTests.java @@ -152,47 +152,25 @@ public void testToXContent() throws IOException { private static FieldCapabilitiesResponse createSimpleResponse() { Map titleCapabilities = new HashMap<>(); - titleCapabilities.put( - "text", - new FieldCapabilities("title", "text", false, true, false, false, null, null, null, null, null, null, Collections.emptyMap()) - ); + titleCapabilities.put("text", new FieldCapabilitiesBuilder("title", "text").isAggregatable(false).build()); Map ratingCapabilities = new HashMap<>(); ratingCapabilities.put( "long", - new FieldCapabilities( - "rating", - "long", - false, - true, - false, - false, - TimeSeriesParams.MetricType.COUNTER, - new String[] { "index1", "index2" }, - null, - new String[] { "index1" }, - new String[] { "index4" }, - null, - Collections.emptyMap() - ) + new FieldCapabilitiesBuilder("rating", "long").isAggregatable(false) + .metricType(TimeSeriesParams.MetricType.COUNTER) + .indices("index1", "index2") + .nonAggregatableIndices("index1") + .nonDimensionIndices("index4") + .build() ); ratingCapabilities.put( "keyword", - new FieldCapabilities( - "rating", - "keyword", - false, - false, - true, - true, - null, - new String[] { "index3", "index4" }, - new String[] { "index4" }, - null, - null, - null, - Collections.emptyMap() - ) + new FieldCapabilitiesBuilder("rating", "keyword").isSearchable(false) + .isDimension(true) + .indices("index3", "index4") + .nonSearchableIndices("index4") + .build() ); Map> responses = new HashMap<>(); diff --git a/test/framework/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesBuilder.java b/test/framework/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesBuilder.java new file mode 100644 index 0000000000000..3f19012cc00ae --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesBuilder.java @@ -0,0 +1,125 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.action.fieldcaps; + +import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.mapper.TimeSeriesParams; + +import java.util.Arrays; +import java.util.Collections; +import java.util.Map; +import java.util.Set; +import java.util.TreeMap; + +public class FieldCapabilitiesBuilder { + private final String name; + private final String type; + + private boolean isMetadataField; + private boolean isSearchable; + private boolean isAggregatable; + private boolean isDimension; + private @Nullable TimeSeriesParams.MetricType metricType; + + private @Nullable String[] indices; + private @Nullable String[] nonSearchableIndices; + private @Nullable String[] nonAggregatableIndices; + private @Nullable String[] nonDimensionIndices; + private @Nullable String[] metricConflictsIndices; + + private Map> meta; + + public FieldCapabilitiesBuilder(String name, String type) { + this.name = name; + this.type = type; + + this.isSearchable = true; + this.isAggregatable = true; + + this.meta = Collections.emptyMap(); + } + + public FieldCapabilitiesBuilder isMetadataField(boolean isMetadataField) { + this.isMetadataField = isMetadataField; + return this; + } + + public FieldCapabilitiesBuilder isSearchable(boolean isSearchable) { + this.isSearchable = isSearchable; + return this; + } + + public FieldCapabilitiesBuilder isAggregatable(boolean isAggregatable) { + this.isAggregatable = isAggregatable; + return this; + } + + public FieldCapabilitiesBuilder isDimension(boolean isDimension) { + this.isDimension = isDimension; + return this; + } + + public FieldCapabilitiesBuilder metricType(TimeSeriesParams.MetricType metricType) { + this.metricType = metricType; + return this; + } + + public FieldCapabilitiesBuilder indices(String... indices) { + this.indices = copyStringArray(indices); + return this; + } + + public FieldCapabilitiesBuilder nonSearchableIndices(String... nonSearchableIndices) { + this.nonSearchableIndices = copyStringArray(nonSearchableIndices); + return this; + } + + public FieldCapabilitiesBuilder nonAggregatableIndices(String... nonAggregatableIndices) { + this.nonAggregatableIndices = copyStringArray(nonAggregatableIndices); + return this; + } + + public FieldCapabilitiesBuilder nonDimensionIndices(String... nonDimensionIndices) { + this.nonDimensionIndices = copyStringArray(nonDimensionIndices); + return this; + } + + public FieldCapabilitiesBuilder metricConflictsIndices(String... metricConflictsIndices) { + this.metricConflictsIndices = copyStringArray(metricConflictsIndices); + return this; + } + + private static String[] copyStringArray(@Nullable String[] strings) { + return strings != null ? Arrays.copyOf(strings, strings.length) : null; + } + + public FieldCapabilitiesBuilder meta(Map> meta) { + this.meta = meta != null ? new TreeMap<>(meta) : null; + return this; + } + + public FieldCapabilities build() { + return new FieldCapabilities( + name, + type, + isMetadataField, + isSearchable, + isAggregatable, + isDimension, + metricType, + indices, + nonSearchableIndices, + nonAggregatableIndices, + nonDimensionIndices, + metricConflictsIndices, + meta + ); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/analyses/ClassificationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/analyses/ClassificationTests.java index f12c754d105ac..d14bd8c8c8196 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/analyses/ClassificationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/analyses/ClassificationTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.TransportVersion; import org.elasticsearch.action.fieldcaps.FieldCapabilities; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesBuilder; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; @@ -582,6 +583,6 @@ public Long getCardinality(String field) { } private static FieldCapabilities createFieldCapabilities(String field, String type) { - return new FieldCapabilities(field, type, false, true, true, null, null, null, Collections.emptyMap()); + return new FieldCapabilitiesBuilder(field, type).build(); } } diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/analysis/CancellationTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/analysis/CancellationTests.java index af1f876906237..cb0f969216cec 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/analysis/CancellationTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/analysis/CancellationTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.fieldcaps.FieldCapabilities; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesBuilder; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequestBuilder; @@ -48,7 +49,6 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; -import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.instanceOf; import static org.mockito.ArgumentMatchers.any; @@ -118,19 +118,9 @@ public void onFailure(Exception e) { } private Map> fields(String[] indices) { - FieldCapabilities fooField = new FieldCapabilities("foo", "integer", false, true, true, indices, null, null, emptyMap()); - FieldCapabilities categoryField = new FieldCapabilities( - "event.category", - "keyword", - false, - true, - true, - indices, - null, - null, - emptyMap() - ); - FieldCapabilities timestampField = new FieldCapabilities("@timestamp", "date", false, true, true, indices, null, null, emptyMap()); + FieldCapabilities fooField = new FieldCapabilitiesBuilder("foo", "integer").indices(indices).build(); + FieldCapabilities categoryField = new FieldCapabilitiesBuilder("event.category", "keyword").indices(indices).build(); + FieldCapabilities timestampField = new FieldCapabilitiesBuilder("@timestamp", "date").indices(indices).build(); Map> fields = new HashMap<>(); fields.put(fooField.getName(), singletonMap(fooField.getName(), fooField)); fields.put(categoryField.getName(), singletonMap(categoryField.getName(), categoryField)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/PlanExecutorMetricsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/PlanExecutorMetricsTests.java index aa735e5cb6d86..6c3995302767f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/PlanExecutorMetricsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/telemetry/PlanExecutorMetricsTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.fieldcaps.FieldCapabilities; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesBuilder; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesIndexResponse; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.action.fieldcaps.IndexFieldCapabilities; @@ -194,8 +195,8 @@ private List indexFieldCapabilities(String[] ind } private Map> fields(String[] indices) { - FieldCapabilities fooField = new FieldCapabilities("foo", "integer", false, true, true, indices, null, null, Map.of()); - FieldCapabilities barField = new FieldCapabilities("bar", "long", false, true, true, indices, null, null, Map.of()); + FieldCapabilities fooField = new FieldCapabilitiesBuilder("foo", "integer").indices(indices).build(); + FieldCapabilities barField = new FieldCapabilitiesBuilder("bar", "long").indices(indices).build(); Map> fields = new HashMap<>(); fields.put(fooField.getName(), Map.of(fooField.getName(), fooField)); fields.put(barField.getName(), Map.of(barField.getName(), barField)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndexTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndexTests.java index f0f7dec448d99..b7646f430726a 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndexTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndexTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.action.fieldcaps.FieldCapabilities; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesBuilder; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.action.fieldcaps.TransportFieldCapabilitiesAction; @@ -800,6 +801,6 @@ private static DataFrameAnalyticsConfig createConfig(DataFrameAnalysis analysis) } private static FieldCapabilities createFieldCapabilities(String field, String type) { - return new FieldCapabilities(field, type, false, true, true, null, null, null, Collections.emptyMap()); + return new FieldCapabilitiesBuilder(field, type).build(); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetectorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetectorTests.java index ee7bdcb51d60d..f028f39c6069f 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetectorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/ExtractedFieldsDetectorTests.java @@ -8,6 +8,7 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.fieldcaps.FieldCapabilities; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesBuilder; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.common.util.Maps; import org.elasticsearch.core.Tuple; @@ -1715,7 +1716,7 @@ private MockFieldCapsResponseBuilder addField(String field, boolean isMetadataFi for (String type : types) { caps.put( type, - new FieldCapabilities(field, type, isMetadataField, true, isAggregatable, null, null, null, Collections.emptyMap()) + new FieldCapabilitiesBuilder(field, type).isMetadataField(isMetadataField).isAggregatable(isAggregatable).build() ); } fieldCaps.put(field, caps); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/CancellationTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/CancellationTests.java index 3e1f910c9f72e..773bb4584d173 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/CancellationTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/CancellationTests.java @@ -8,6 +8,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.fieldcaps.FieldCapabilities; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesBuilder; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.action.search.ClosePointInTimeRequest; import org.elasticsearch.action.search.ClosePointInTimeResponse; @@ -50,7 +51,6 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; -import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.instanceOf; import static org.mockito.ArgumentMatchers.any; @@ -94,19 +94,9 @@ public void onFailure(Exception e) { } private Map> fields(String[] indices) { - FieldCapabilities fooField = new FieldCapabilities("foo", "integer", false, true, true, indices, null, null, emptyMap()); - FieldCapabilities categoryField = new FieldCapabilities( - "event.category", - "keyword", - false, - true, - true, - indices, - null, - null, - emptyMap() - ); - FieldCapabilities timestampField = new FieldCapabilities("@timestamp", "date", false, true, true, indices, null, null, emptyMap()); + FieldCapabilities fooField = new FieldCapabilitiesBuilder("foo", "integer").indices(indices).build(); + FieldCapabilities categoryField = new FieldCapabilitiesBuilder("event.category", "keyword").indices(indices).build(); + FieldCapabilities timestampField = new FieldCapabilitiesBuilder("@timestamp", "date").indices(indices).build(); Map> fields = new HashMap<>(); fields.put(fooField.getName(), singletonMap(fooField.getName(), fooField)); fields.put(categoryField.getName(), singletonMap(categoryField.getName(), categoryField)); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java index 8388dabe23592..0610721c04537 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.sql.analysis.index; import org.elasticsearch.action.fieldcaps.FieldCapabilities; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesBuilder; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.action.fieldcaps.FieldCapsUtils; import org.elasticsearch.common.bytes.BytesReference; @@ -282,24 +283,8 @@ public void testMergeIncompatibleCapabilitiesOfObjectFields() throws Exception { addFieldCaps(fieldCaps, fieldName + ".keyword", "keyword", true, true); Map multi = new HashMap<>(); - multi.put( - "long", - new FieldCapabilities(fieldName, "long", false, true, true, new String[] { "one-index" }, null, null, Collections.emptyMap()) - ); - multi.put( - "text", - new FieldCapabilities( - fieldName, - "text", - false, - true, - false, - new String[] { "another-index" }, - null, - null, - Collections.emptyMap() - ) - ); + multi.put("long", new FieldCapabilitiesBuilder(fieldName, "long").indices("one-index").build()); + multi.put("text", new FieldCapabilitiesBuilder(fieldName, "text").indices("another-index").isAggregatable(false).build()); fieldCaps.put(fieldName, multi); String wildcard = "*"; @@ -400,7 +385,7 @@ public void testIndexWithNoMapping() { "_version", singletonMap( "_index", - new FieldCapabilities("_version", "_version", true, false, false, null, null, null, Collections.emptyMap()) + new FieldCapabilitiesBuilder("_version", "_version").isMetadataField(true).isAggregatable(false).isSearchable(false).build() ) ); assertTrue(mergedMappings("*", new String[] { "empty" }, versionFC).isValid()); @@ -599,7 +584,10 @@ private void addFieldCaps( Map cap = new HashMap<>(); cap.put( type, - new FieldCapabilities(name, type, isMetadataField, isSearchable, isAggregatable, null, null, null, Collections.emptyMap()) + new FieldCapabilitiesBuilder(name, type).isMetadataField(isMetadataField) + .isSearchable(isSearchable) + .isAggregatable(isAggregatable) + .build() ); fieldCaps.put(name, cap); } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/common/DocumentConversionUtilsTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/common/DocumentConversionUtilsTests.java index b4d38ab517bb7..18af78d704646 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/common/DocumentConversionUtilsTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/common/DocumentConversionUtilsTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.transform.transforms.common; import org.elasticsearch.action.fieldcaps.FieldCapabilities; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesBuilder; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.Strings; @@ -104,16 +105,9 @@ public void testExtractFieldMappings() { } private static FieldCapabilities createFieldCapabilities(String name, String type) { - return new FieldCapabilities( - name, - type, - false, - true, - true, - Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - Collections.emptyMap() - ); + return new FieldCapabilitiesBuilder(name, type).indices(Strings.EMPTY_ARRAY) + .nonSearchableIndices(Strings.EMPTY_ARRAY) + .nonAggregatableIndices(Strings.EMPTY_ARRAY) + .build(); } } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationSchemaAndResultTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationSchemaAndResultTests.java index 1eb86b813f260..ca096c26f30f8 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationSchemaAndResultTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationSchemaAndResultTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.fieldcaps.FieldCapabilities; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesBuilder; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.action.support.ActionTestUtils; @@ -92,10 +93,7 @@ protected void String[] nameTypePair = Strings.split(field, "_"); String type = nameTypePair != null ? nameTypePair[0] : "long"; - fieldCaps.put( - field, - Collections.singletonMap(type, new FieldCapabilities(field, type, false, true, true, null, null, null, emptyMap())) - ); + fieldCaps.put(field, Collections.singletonMap(type, new FieldCapabilitiesBuilder(field, type).build())); } // FieldCapabilitiesResponse is package private, thats why we use a mock diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/SchemaUtilTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/SchemaUtilTests.java index 212942a09e40e..d65428a3912de 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/SchemaUtilTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/SchemaUtilTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.fieldcaps.FieldCapabilities; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesBuilder; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.action.support.ActionTestUtils; @@ -33,7 +34,6 @@ import org.elasticsearch.xpack.core.transform.transforms.pivot.TermsGroupSource; import java.math.BigInteger; -import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -297,17 +297,10 @@ protected void } private static FieldCapabilities createFieldCapabilities(String name, String type) { - return new FieldCapabilities( - name, - type, - false, - true, - true, - Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - Strings.EMPTY_ARRAY, - Collections.emptyMap() - ); + return new FieldCapabilitiesBuilder(name, type).indices(Strings.EMPTY_ARRAY) + .nonSearchableIndices(Strings.EMPTY_ARRAY) + .nonAggregatableIndices(Strings.EMPTY_ARRAY) + .build(); } private void assertAsync(Consumer> function, Consumer furtherTests) throws InterruptedException { From 20742b69460bef6d93510af4e6db8800e2f46921 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Wed, 5 Mar 2025 13:20:51 +0100 Subject: [PATCH 28/54] Simplify Lucene60 and Luene62 codec constructors (#124054) The only public constructor needed in the archive index codecs is the default one, as that's called by SPI. The other one will only ever be called by the default one, hence we can merge the two into one and simplify things a bit. --- .../bwc/codecs/lucene60/Lucene60Codec.java | 17 +++-------------- .../bwc/codecs/lucene62/Lucene62Codec.java | 12 +++++------- 2 files changed, 8 insertions(+), 21 deletions(-) diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene60/Lucene60Codec.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene60/Lucene60Codec.java index 9694c8bf34d67..0bfff648681ec 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene60/Lucene60Codec.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene60/Lucene60Codec.java @@ -38,8 +38,6 @@ import org.elasticsearch.xpack.lucene.bwc.codecs.lucene50.Lucene50SegmentInfoFormat; import org.elasticsearch.xpack.lucene.bwc.codecs.lucene54.Lucene54DocValuesFormat; -import java.util.Objects; - /** * Implements the Lucene 6.0 index format. * @@ -71,21 +69,12 @@ protected PostingsFormat getPostingsFormat(String formatName) { }; /** - * Instantiates a new codec. + * Instantiates a new codec. Called by SPI. */ + @SuppressWarnings("unused") public Lucene60Codec() { - this(Lucene50StoredFieldsFormat.Mode.BEST_SPEED); - } - - /** - * Instantiates a new codec, specifying the stored fields compression - * mode to use. - * @param mode stored fields compression mode to use for newly - * flushed/merged segments. - */ - public Lucene60Codec(Lucene50StoredFieldsFormat.Mode mode) { super("Lucene60"); - this.storedFieldsFormat = new Lucene50StoredFieldsFormat(Objects.requireNonNull(mode)); + this.storedFieldsFormat = new Lucene50StoredFieldsFormat(Lucene50StoredFieldsFormat.Mode.BEST_SPEED); } @Override diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene62/Lucene62Codec.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene62/Lucene62Codec.java index 61579d33e41cb..6d4734f807260 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene62/Lucene62Codec.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene62/Lucene62Codec.java @@ -38,8 +38,6 @@ import org.elasticsearch.xpack.lucene.bwc.codecs.lucene54.Lucene54DocValuesFormat; import org.elasticsearch.xpack.lucene.bwc.codecs.lucene60.Lucene60MetadataOnlyPointsFormat; -import java.util.Objects; - /** * Implements the Lucene 6.2 index format. * @@ -70,13 +68,13 @@ protected PostingsFormat getPostingsFormat(String formatName) { } }; + /** + * Instantiates a new codec. Called by SPI. + */ + @SuppressWarnings("unused") public Lucene62Codec() { - this(Lucene50StoredFieldsFormat.Mode.BEST_SPEED); - } - - public Lucene62Codec(Lucene50StoredFieldsFormat.Mode mode) { super("Lucene62"); - this.storedFieldsFormat = new Lucene50StoredFieldsFormat(Objects.requireNonNull(mode)); + this.storedFieldsFormat = new Lucene50StoredFieldsFormat(Lucene50StoredFieldsFormat.Mode.BEST_SPEED); } @Override From 025acaa603b432b7ce68e3c58f2aea909f3fcb75 Mon Sep 17 00:00:00 2001 From: Maxim Kholod Date: Wed, 5 Mar 2025 14:28:53 +0200 Subject: [PATCH 29/54] add wiz and aws security hub new full posture data streams to kibana_system role permissions (#124074) --- .../authz/store/KibanaOwnedReservedRoleDescriptors.java | 2 ++ .../core/security/authz/store/ReservedRolesStoreTests.java | 2 ++ 2 files changed, 4 insertions(+) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java index bf6d32bdd549c..d9822b8a9de33 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java @@ -470,8 +470,10 @@ static RoleDescriptor kibanaSystem(String name) { .indices( "logs-wiz.vulnerability-*", "logs-wiz.cloud_configuration_finding-*", + "logs-wiz.cloud_configuration_finding_full_posture-*", "logs-google_scc.finding-*", "logs-aws.securityhub_findings-*", + "logs-aws.securityhub_findings_full_posture-*", "logs-aws.inspector-*", "logs-amazon_security_lake.findings-*", "logs-qualys_vmdr.asset_host_detection-*", diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index 0dec67f3939e2..5dfc95feb9de4 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -1639,8 +1639,10 @@ public void testKibanaSystemRole() { Arrays.asList( "logs-wiz.vulnerability-" + randomAlphaOfLength(randomIntBetween(0, 13)), "logs-wiz.cloud_configuration_finding-" + randomAlphaOfLength(randomIntBetween(0, 13)), + "logs-wiz.cloud_configuration_finding_full_posture-" + randomAlphaOfLength(randomIntBetween(0, 13)), "logs-google_scc.finding-" + randomAlphaOfLength(randomIntBetween(0, 13)), "logs-aws.securityhub_findings-" + randomAlphaOfLength(randomIntBetween(0, 13)), + "logs-aws.securityhub_findings_full_posture-" + randomAlphaOfLength(randomIntBetween(0, 13)), "logs-aws.inspector-" + randomAlphaOfLength(randomIntBetween(0, 13)), "logs-amazon_security_lake.findings-" + randomAlphaOfLength(randomIntBetween(0, 13)), "logs-qualys_vmdr.asset_host_detection-" + randomAlphaOfLength(randomIntBetween(0, 13)), From 3b323dc5885dbdd645dd7e58624412a4f8e08b24 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Wed, 5 Mar 2025 12:29:29 +0000 Subject: [PATCH 30/54] Collapse 8.16.1 transport versions (#124003) --- server/src/main/java/org/elasticsearch/TransportVersions.java | 4 +--- .../action/admin/cluster/node/info/NodeInfo.java | 4 ++-- .../org/elasticsearch/search/builder/SearchSourceBuilder.java | 4 ++-- .../xpack/application/rules/QueryRulesetListItem.java | 4 ++-- .../ListQueryRulesetsActionResponseBWCSerializingTests.java | 3 +-- 5 files changed, 8 insertions(+), 11 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 764ca018490f5..2e79775ab0562 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -98,9 +98,7 @@ static TransportVersion def(int id) { public static final TransportVersion V_8_15_0 = def(8_702_0_02); public static final TransportVersion V_8_15_2 = def(8_702_0_03); public static final TransportVersion V_8_16_0 = def(8_772_0_01); - public static final TransportVersion ADD_COMPATIBILITY_VERSIONS_TO_NODE_INFO_BACKPORT_8_16 = def(8_772_0_02); - public static final TransportVersion SKIP_INNER_HITS_SEARCH_SOURCE_BACKPORT_8_16 = def(8_772_0_03); - public static final TransportVersion QUERY_RULES_LIST_INCLUDES_TYPES_BACKPORT_8_16 = def(8_772_0_04); + public static final TransportVersion V_8_16_1 = def(8_772_0_04); public static final TransportVersion INITIAL_ELASTICSEARCH_8_16_5 = def(8_772_0_05); public static final TransportVersion INITIAL_ELASTICSEARCH_8_16_6 = def(8_772_0_06); public static final TransportVersion REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_773_0_00); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java index 08825706c09ef..bde7ae5971845 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java @@ -66,7 +66,7 @@ public NodeInfo(StreamInput in) throws IOException { super(in); if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { version = in.readString(); - if (in.getTransportVersion().isPatchFrom(TransportVersions.ADD_COMPATIBILITY_VERSIONS_TO_NODE_INFO_BACKPORT_8_16) + if (in.getTransportVersion().isPatchFrom(TransportVersions.V_8_16_1) || in.getTransportVersion().onOrAfter(TransportVersions.ADD_COMPATIBILITY_VERSIONS_TO_NODE_INFO)) { compatibilityVersions = CompatibilityVersions.readVersion(in); } else { @@ -252,7 +252,7 @@ public void writeTo(StreamOutput out) throws IOException { } else { Version.writeVersion(Version.fromString(version), out); } - if (out.getTransportVersion().isPatchFrom(TransportVersions.ADD_COMPATIBILITY_VERSIONS_TO_NODE_INFO_BACKPORT_8_16) + if (out.getTransportVersion().isPatchFrom(TransportVersions.V_8_16_1) || out.getTransportVersion().onOrAfter(TransportVersions.ADD_COMPATIBILITY_VERSIONS_TO_NODE_INFO)) { compatibilityVersions.writeTo(out); } else if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { diff --git a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 6d47493e4d063..8e40a57447ea7 100644 --- a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -288,7 +288,7 @@ public SearchSourceBuilder(StreamInput in) throws IOException { if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { rankBuilder = in.readOptionalNamedWriteable(RankBuilder.class); } - if (in.getTransportVersion().isPatchFrom(TransportVersions.SKIP_INNER_HITS_SEARCH_SOURCE_BACKPORT_8_16) + if (in.getTransportVersion().isPatchFrom(TransportVersions.V_8_16_1) || in.getTransportVersion().onOrAfter(TransportVersions.SKIP_INNER_HITS_SEARCH_SOURCE)) { skipInnerHits = in.readBoolean(); } else { @@ -383,7 +383,7 @@ public void writeTo(StreamOutput out) throws IOException { } else if (rankBuilder != null) { throw new IllegalArgumentException("cannot serialize [rank] to version [" + out.getTransportVersion().toReleaseVersion() + "]"); } - if (out.getTransportVersion().isPatchFrom(TransportVersions.SKIP_INNER_HITS_SEARCH_SOURCE_BACKPORT_8_16) + if (out.getTransportVersion().isPatchFrom(TransportVersions.V_8_16_1) || out.getTransportVersion().onOrAfter(TransportVersions.SKIP_INNER_HITS_SEARCH_SOURCE)) { out.writeBoolean(skipInnerHits); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesetListItem.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesetListItem.java index d694b2681ee88..f43ab815090c6 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesetListItem.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesetListItem.java @@ -68,7 +68,7 @@ public QueryRulesetListItem(StreamInput in) throws IOException { this.criteriaTypeToCountMap = Map.of(); } TransportVersion streamTransportVersion = in.getTransportVersion(); - if (streamTransportVersion.isPatchFrom(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES_BACKPORT_8_16) + if (streamTransportVersion.isPatchFrom(TransportVersions.V_8_16_1) || streamTransportVersion.onOrAfter(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES)) { this.ruleTypeToCountMap = in.readMap(m -> in.readEnum(QueryRule.QueryRuleType.class), StreamInput::readInt); } else { @@ -103,7 +103,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeMap(criteriaTypeToCountMap, StreamOutput::writeEnum, StreamOutput::writeInt); } TransportVersion streamTransportVersion = out.getTransportVersion(); - if (streamTransportVersion.isPatchFrom(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES_BACKPORT_8_16) + if (streamTransportVersion.isPatchFrom(TransportVersions.V_8_16_1) || streamTransportVersion.onOrAfter(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES)) { out.writeMap(ruleTypeToCountMap, StreamOutput::writeEnum, StreamOutput::writeInt); } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsActionResponseBWCSerializingTests.java index c822dd123d3f8..e1243b782f73f 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsActionResponseBWCSerializingTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsActionResponseBWCSerializingTests.java @@ -59,8 +59,7 @@ protected ListQueryRulesetsAction.Response mutateInstanceForVersion( ListQueryRulesetsAction.Response instance, TransportVersion version ) { - if (version.isPatchFrom(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES_BACKPORT_8_16) - || version.onOrAfter(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES)) { + if (version.isPatchFrom(TransportVersions.V_8_16_1) || version.onOrAfter(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES)) { return instance; } else if (version.onOrAfter(QueryRulesetListItem.EXPANDED_RULESET_COUNT_TRANSPORT_VERSION)) { List updatedResults = new ArrayList<>(); From 30a37c361b0961a55d4eec98ec1cde112c685579 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Wed, 5 Mar 2025 12:49:07 +0000 Subject: [PATCH 31/54] Re-remove min compatible version from SearchRequest (#123859) Re-apply transport changes from #114713 --- .../main/java/org/elasticsearch/TransportVersions.java | 1 + .../org/elasticsearch/action/search/SearchRequest.java | 10 +++------- 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 2e79775ab0562..9d521ffc1503a 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -208,6 +208,7 @@ static TransportVersion def(int id) { public static final TransportVersion MULTI_PROJECT = def(9_018_0_00); public static final TransportVersion STORED_SCRIPT_CONTENT_LENGTH = def(9_019_0_00); public static final TransportVersion JINA_AI_EMBEDDING_TYPE_SUPPORT_ADDED = def(9_020_0_00); + public static final TransportVersion RE_REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(9_021_0_00); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java b/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java index 8b77ec7fb5463..113d4d78c4d32 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java @@ -20,7 +20,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.mapper.SourceLoader; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.Rewriteable; @@ -254,10 +253,8 @@ public SearchRequest(StreamInput in) throws IOException { finalReduce = true; } ccsMinimizeRoundtrips = in.readBoolean(); - if ((in.getTransportVersion().before(TransportVersions.REMOVE_MIN_COMPATIBLE_SHARD_NODE) - || in.getTransportVersion().onOrAfter(TransportVersions.REVERT_REMOVE_MIN_COMPATIBLE_SHARD_NODE)) && in.readBoolean()) { - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // this can be removed (again) when the v9 transport version can diverge - Version v = Version.readVersion(in); // and drop on the floor + if (in.getTransportVersion().before(TransportVersions.RE_REMOVE_MIN_COMPATIBLE_SHARD_NODE) && in.readBoolean()) { + Version.readVersion(in); // and drop on the floor } waitForCheckpoints = in.readMap(StreamInput::readLongArray); waitForCheckpointsTimeout = in.readTimeValue(); @@ -293,8 +290,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(finalReduce); } out.writeBoolean(ccsMinimizeRoundtrips); - if (out.getTransportVersion().before(TransportVersions.REMOVE_MIN_COMPATIBLE_SHARD_NODE) - || out.getTransportVersion().onOrAfter(TransportVersions.REVERT_REMOVE_MIN_COMPATIBLE_SHARD_NODE)) { + if (out.getTransportVersion().before(TransportVersions.RE_REMOVE_MIN_COMPATIBLE_SHARD_NODE)) { out.writeBoolean(false); } out.writeMap(waitForCheckpoints, StreamOutput::writeLongArray); From 5a62fd1b0ee0405044cc07f20a14ecd6a9800126 Mon Sep 17 00:00:00 2001 From: Gal Lalouche Date: Wed, 5 Mar 2025 15:09:18 +0200 Subject: [PATCH 32/54] ESQL: Fix ShapeGeometryFieldMapperTests (and rename) (#122871) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes #122661. The issue was caused by RandomIndexWriter (randomly) reshuffling the document writing order. Since this test also ensures that the documents are read in the input order, I've opted to use a regular IndexWriter instead. I've also renamed the class to AbstractShapeGeometryFieldMapperTests since it was originally renamed due to a misunderstanding of muted tests (which caused it to be muted again! Busted 😅). --- muted-tests.yml | 6 ------ ...=> AbstractShapeGeometryFieldMapperTests.java} | 15 +++++++++------ 2 files changed, 9 insertions(+), 12 deletions(-) rename server/src/test/java/org/elasticsearch/index/mapper/{ShapeGeometryFieldMapperTests.java => AbstractShapeGeometryFieldMapperTests.java} (93%) diff --git a/muted-tests.yml b/muted-tests.yml index db95b63648393..fe9702984adbe 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -130,9 +130,6 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/118914 - class: org.elasticsearch.xpack.security.authc.ldap.ActiveDirectoryRunAsIT issue: https://github.com/elastic/elasticsearch/issues/115727 -- class: org.elasticsearch.index.mapper.AbstractShapeGeometryFieldMapperTests - method: testCartesianBoundsBlockLoader - issue: https://github.com/elastic/elasticsearch/issues/119201 - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=transform/transforms_start_stop/Test start/stop/start transform} issue: https://github.com/elastic/elasticsearch/issues/119508 @@ -266,9 +263,6 @@ tests: - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT method: test {yaml=reference/snapshot-restore/apis/get-snapshot-api/line_488} issue: https://github.com/elastic/elasticsearch/issues/121611 -- class: org.elasticsearch.index.mapper.ShapeGeometryFieldMapperTests - method: testCartesianBoundsBlockLoader - issue: https://github.com/elastic/elasticsearch/issues/122661 - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT method: test {yaml=reference/snapshot-restore/apis/get-snapshot-api/line_408} issue: https://github.com/elastic/elasticsearch/issues/122681 diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ShapeGeometryFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/AbstractShapeGeometryFieldMapperTests.java similarity index 93% rename from server/src/test/java/org/elasticsearch/index/mapper/ShapeGeometryFieldMapperTests.java rename to server/src/test/java/org/elasticsearch/index/mapper/AbstractShapeGeometryFieldMapperTests.java index 0322286277b25..73d76ad48c955 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ShapeGeometryFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/AbstractShapeGeometryFieldMapperTests.java @@ -12,9 +12,10 @@ import org.apache.lucene.document.Document; import org.apache.lucene.geo.GeoEncodingUtils; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.LeafReader; import org.apache.lucene.store.Directory; -import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.common.geo.GeometryNormalizer; import org.elasticsearch.core.Strings; import org.elasticsearch.geo.GeometryTestUtils; @@ -39,14 +40,14 @@ import static org.apache.lucene.geo.GeoEncodingUtils.decodeLongitude; import static org.elasticsearch.common.geo.Orientation.RIGHT; -public class ShapeGeometryFieldMapperTests extends ESTestCase { +public class AbstractShapeGeometryFieldMapperTests extends ESTestCase { public void testCartesianBoundsBlockLoader() throws IOException { testBoundsBlockLoader( CoordinateEncoder.CARTESIAN, () -> ShapeTestUtils.randomGeometryWithoutCircle(0, false), CartesianShapeIndexer::new, SpatialEnvelopeVisitor::visitCartesian, - ShapeGeometryFieldMapperTests::makeCartesianRectangle + AbstractShapeGeometryFieldMapperTests::makeCartesianRectangle ); } @@ -58,7 +59,7 @@ public void ignoreTestGeoBoundsBlockLoader() throws IOException { () -> normalize(GeometryTestUtils.randomGeometryWithoutCircle(0, false)), field -> new GeoShapeIndexer(RIGHT, field), g -> SpatialEnvelopeVisitor.visitGeo(g, SpatialEnvelopeVisitor.WrapLongitude.WRAP), - ShapeGeometryFieldMapperTests::makeGeoRectangle + AbstractShapeGeometryFieldMapperTests::makeGeoRectangle ); } @@ -72,7 +73,7 @@ public void ignoreTestRectangleCrossingDateline() throws IOException { geometries, field -> new GeoShapeIndexer(RIGHT, field), g -> SpatialEnvelopeVisitor.visitGeo(g, SpatialEnvelopeVisitor.WrapLongitude.WRAP), - ShapeGeometryFieldMapperTests::makeGeoRectangle + AbstractShapeGeometryFieldMapperTests::makeGeoRectangle ); } @@ -100,7 +101,9 @@ private static void testBoundsBlockLoaderAux( ) throws IOException { var loader = new AbstractShapeGeometryFieldMapper.AbstractShapeGeometryFieldType.BoundsBlockLoader("field"); try (Directory directory = newDirectory()) { - try (var iw = new RandomIndexWriter(random(), directory)) { + // Since we also test that the documents are loaded in the correct order, we need to write them in order, so we can't use + // RandomIndexWriter here. + try (var iw = new IndexWriter(directory, new IndexWriterConfig(null /* analyzer */))) { for (Geometry geometry : geometries) { var shape = new BinaryShapeDocValuesField("field", encoder); shape.add(indexerFactory.apply("field").indexShape(geometry), geometry); From 2456cd375a70c0a2f4dac044829898fd6f029d84 Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Wed, 5 Mar 2025 15:26:22 +0100 Subject: [PATCH 33/54] Add note to servicenow connector ref (#124101) --- .../search-connectors/es-connectors-servicenow.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/reference/ingestion-tools/search-connectors/es-connectors-servicenow.md b/docs/reference/ingestion-tools/search-connectors/es-connectors-servicenow.md index be90d97d2e151..3977cc1d983d1 100644 --- a/docs/reference/ingestion-tools/search-connectors/es-connectors-servicenow.md +++ b/docs/reference/ingestion-tools/search-connectors/es-connectors-servicenow.md @@ -205,9 +205,12 @@ The ServiceNow connector supports roles for access control lists (ACLs) to enabl For services other than these defaults, the connector iterates over access controls with `read` operations and finds the respective roles for those services. +:::{important} +The ServiceNow connector applies access control at the service (table) level. This means documents within a given ServiceNow table share the same access control settings. Users with permission to a table can access all documents from that table in Elasticsearch. +::: + ::::{note} The ServiceNow connector does not support scripted and conditional permissions. - :::: From 26de5343a21e85c17fcc8aee0c6c47d86cae66f9 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 5 Mar 2025 15:43:33 +0100 Subject: [PATCH 34/54] Remove synthetic recovery source feature flag. (#122615) This feature flag controls whether synthetic recovery source is enabled by default when the source mode is synthetic. The synthetic recovery source feature itself is already available via the index.recovery.use_synthetic_source index setting and can be enabled by anyone using synthetic source. The default value of index.recovery.use_synthetic_source setting defaults to true when index.mapping.source.mode is enabled. The index.mapping.source.mode default to true if index.mode is logsdb or time_series. In other words, with this change synthetic recovery source will be enabled by default for logsdb and tsdb. Closes #116726 --- docs/changelog/122615.yaml | 8 +++ ...okeTestMultiNodeClientYamlTestSuiteIT.java | 1 - .../test/rest/ClientYamlTestSuiteIT.java | 1 - .../elasticsearch/index/IndexSettings.java | 6 +-- .../NativeArrayIntegrationTestCase.java | 23 ++------ .../index/mapper/SourceFieldMapperTests.java | 53 +++---------------- .../test/cluster/FeatureFlag.java | 5 -- .../xpack/logsdb/LogsdbTestSuiteIT.java | 1 - ...CoreWithSecurityClientYamlTestSuiteIT.java | 1 - 9 files changed, 18 insertions(+), 81 deletions(-) create mode 100644 docs/changelog/122615.yaml diff --git a/docs/changelog/122615.yaml b/docs/changelog/122615.yaml new file mode 100644 index 0000000000000..0070fea6a3b95 --- /dev/null +++ b/docs/changelog/122615.yaml @@ -0,0 +1,8 @@ +pr: 122615 +summary: Enable synthetic recovery source by default when synthetic source is enabled. + Using synthetic recovery source significantly improves indexing performance compared + to regular recovery source. +area: Mapping +type: enhancement +issues: + - 116726 diff --git a/qa/smoke-test-multinode/src/yamlRestTest/java/org/elasticsearch/smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java b/qa/smoke-test-multinode/src/yamlRestTest/java/org/elasticsearch/smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java index f0afac879df7a..16b5b9186753f 100644 --- a/qa/smoke-test-multinode/src/yamlRestTest/java/org/elasticsearch/smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java +++ b/qa/smoke-test-multinode/src/yamlRestTest/java/org/elasticsearch/smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java @@ -36,7 +36,6 @@ public class SmokeTestMultiNodeClientYamlTestSuiteIT extends ESClientYamlSuiteTe .node(0, n -> n.setting("node.roles", "[master,data,ml,remote_cluster_client,transform]")) .feature(FeatureFlag.TIME_SERIES_MODE) .feature(FeatureFlag.SUB_OBJECTS_AUTO_ENABLED) - .feature(FeatureFlag.INDEX_RECOVERY_USE_SYNTHETIC_SOURCE) .feature(FeatureFlag.DOC_VALUES_SKIPPER) .build(); diff --git a/rest-api-spec/src/yamlRestTest/java/org/elasticsearch/test/rest/ClientYamlTestSuiteIT.java b/rest-api-spec/src/yamlRestTest/java/org/elasticsearch/test/rest/ClientYamlTestSuiteIT.java index 8879c185be943..289f9cbcdfd22 100644 --- a/rest-api-spec/src/yamlRestTest/java/org/elasticsearch/test/rest/ClientYamlTestSuiteIT.java +++ b/rest-api-spec/src/yamlRestTest/java/org/elasticsearch/test/rest/ClientYamlTestSuiteIT.java @@ -36,7 +36,6 @@ public class ClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { .module("data-streams") .feature(FeatureFlag.TIME_SERIES_MODE) .feature(FeatureFlag.SUB_OBJECTS_AUTO_ENABLED) - .feature(FeatureFlag.INDEX_RECOVERY_USE_SYNTHETIC_SOURCE) .feature(FeatureFlag.DOC_VALUES_SKIPPER) .build(); diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettings.java b/server/src/main/java/org/elasticsearch/index/IndexSettings.java index 4c7bda1c52f17..e530415ed0f59 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -732,19 +732,15 @@ public Iterator> settings() { Setting.Property.ServerlessPublic ); - public static final FeatureFlag RECOVERY_USE_SYNTHETIC_SOURCE = new FeatureFlag("index_recovery_use_synthetic_source"); public static final Setting RECOVERY_USE_SYNTHETIC_SOURCE_SETTING = Setting.boolSetting( "index.recovery.use_synthetic_source", settings -> { - boolean isSyntheticSourceRecoveryFeatureFlagEnabled = RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled(); boolean isNewIndexVersion = SETTING_INDEX_VERSION_CREATED.get(settings) .onOrAfter(IndexVersions.USE_SYNTHETIC_SOURCE_FOR_RECOVERY_BY_DEFAULT); boolean isIndexVersionInBackportRange = SETTING_INDEX_VERSION_CREATED.get(settings) .between(IndexVersions.USE_SYNTHETIC_SOURCE_FOR_RECOVERY_BY_DEFAULT_BACKPORT, IndexVersions.UPGRADE_TO_LUCENE_10_0_0); - boolean useSyntheticRecoverySource = isSyntheticSourceRecoveryFeatureFlagEnabled - && (isNewIndexVersion || isIndexVersionInBackportRange); - + boolean useSyntheticRecoverySource = isNewIndexVersion || isIndexVersionInBackportRange; return String.valueOf( useSyntheticRecoverySource && Objects.equals(INDEX_MAPPER_SOURCE_MODE_SETTING.get(settings), SourceFieldMapper.Mode.SYNTHETIC) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NativeArrayIntegrationTestCase.java b/server/src/test/java/org/elasticsearch/index/mapper/NativeArrayIntegrationTestCase.java index 7283dc822e12f..4b44f2444f27e 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NativeArrayIntegrationTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NativeArrayIntegrationTestCase.java @@ -20,7 +20,6 @@ import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.query.IdsQueryBuilder; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xcontent.XContentBuilder; @@ -35,7 +34,6 @@ import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasKey; @@ -136,14 +134,7 @@ protected void verifySyntheticArray(Object[][] arrays, XContentBuilder mapping, var document = reader.storedFields().document(i); // Verify that there is no ignored source: Set storedFieldNames = new LinkedHashSet<>(document.getFields().stream().map(IndexableField::name).toList()); - if (IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled()) { - assertThat(storedFieldNames, contains(expectedStoredFields)); - } else { - var copyExpectedStoredFields = new String[expectedStoredFields.length + 1]; - System.arraycopy(expectedStoredFields, 0, copyExpectedStoredFields, 0, expectedStoredFields.length); - copyExpectedStoredFields[copyExpectedStoredFields.length - 1] = "_recovery_source"; - assertThat(storedFieldNames, containsInAnyOrder(copyExpectedStoredFields)); - } + assertThat(storedFieldNames, contains(expectedStoredFields)); } var fieldInfo = FieldInfos.getMergedFieldInfos(reader).fieldInfo("field.offsets"); assertThat(fieldInfo.getDocValuesType(), equalTo(DocValuesType.SORTED)); @@ -208,11 +199,7 @@ protected void verifySyntheticObjectArray(List> documents) throws var document = reader.storedFields().document(i); // Verify that there is ignored source because of leaf array being wrapped by object array: List storedFieldNames = document.getFields().stream().map(IndexableField::name).toList(); - if (IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled()) { - assertThat(storedFieldNames, contains("_id", "_ignored_source")); - } else { - assertThat(storedFieldNames, containsInAnyOrder("_id", "_ignored_source", "_recovery_source")); - } + assertThat(storedFieldNames, contains("_id", "_ignored_source")); // Verify that there is no offset field: LeafReader leafReader = reader.leaves().get(0).reader(); @@ -285,11 +272,7 @@ protected void verifySyntheticArrayInObject(List documents) throws IOE var document = reader.storedFields().document(i); // Verify that there is no ignored source: Set storedFieldNames = new LinkedHashSet<>(document.getFields().stream().map(IndexableField::name).toList()); - if (IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled()) { - assertThat(storedFieldNames, contains("_id")); - } else { - assertThat(storedFieldNames, containsInAnyOrder("_id", "_recovery_source")); - } + assertThat(storedFieldNames, contains("_id")); } var fieldInfo = FieldInfos.getMergedFieldInfos(reader).fieldInfo("object.field.offsets"); assertThat(fieldInfo.getDocValuesType(), equalTo(DocValuesType.SORTED)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java index 70010084cdb96..0ff6ff5fecad0 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java @@ -489,13 +489,7 @@ public void testRecoverySourceWithSyntheticSource() throws IOException { MapperService mapperService = createMapperService(settings, topMapping(b -> {})); DocumentMapper docMapper = mapperService.documentMapper(); ParsedDocument doc = docMapper.parse(source(b -> b.field("field1", "value1"))); - if (IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled() == false) { - // TODO: remove this if branch when removing the 'index_recovery_use_synthetic_source' feature flag - assertNotNull(doc.rootDoc().getField("_recovery_source")); - assertThat(doc.rootDoc().getField("_recovery_source").binaryValue(), equalTo(new BytesRef("{\"field1\":\"value1\"}"))); - } else { - assertNull(doc.rootDoc().getField("_recovery_source")); - } + assertNull(doc.rootDoc().getField("_recovery_source")); } { Settings settings = Settings.builder() @@ -526,16 +520,8 @@ public void testRecoverySourceWithLogs() throws IOException { MapperService mapperService = createMapperService(settings, mapping(b -> {})); DocumentMapper docMapper = mapperService.documentMapper(); ParsedDocument doc = docMapper.parse(source(b -> { b.field("@timestamp", "2012-02-13"); })); - if (IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled() == false) { - // TODO: remove this if branch when removing the 'index_recovery_use_synthetic_source' feature flag - assertNotNull(doc.rootDoc().getField("_recovery_source")); - assertThat( - doc.rootDoc().getField("_recovery_source").binaryValue(), - equalTo(new BytesRef("{\"@timestamp\":\"2012-02-13\"}")) - ); - } else { - assertNull(doc.rootDoc().getField("_recovery_source")); - } + assertNotNull(doc.rootDoc().getField("_recovery_source_size")); + assertThat(doc.rootDoc().getField("_recovery_source_size").numericValue(), equalTo(27L)); } { Settings settings = Settings.builder() @@ -728,16 +714,7 @@ public void testRecoverySourceWithLogsCustom() throws IOException { MapperService mapperService = createMapperService(settings, mappings); DocumentMapper docMapper = mapperService.documentMapper(); ParsedDocument doc = docMapper.parse(source(b -> { b.field("@timestamp", "2012-02-13"); })); - if (IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled() == false) { - // TODO: remove this if branch when removing the 'index_recovery_use_synthetic_source' feature flag - assertNotNull(doc.rootDoc().getField("_recovery_source")); - assertThat( - doc.rootDoc().getField("_recovery_source").binaryValue(), - equalTo(new BytesRef("{\"@timestamp\":\"2012-02-13\"}")) - ); - } else { - assertNull(doc.rootDoc().getField("_recovery_source")); - } + assertNull(doc.rootDoc().getField("_recovery_source")); } { Settings settings = Settings.builder() @@ -763,16 +740,7 @@ public void testRecoverySourceWithTimeSeries() throws IOException { })); DocumentMapper docMapper = mapperService.documentMapper(); ParsedDocument doc = docMapper.parse(source("123", b -> b.field("@timestamp", "2012-02-13").field("field", "value1"), null)); - if (IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled() == false) { - // TODO: remove this if branch when removing the 'index_recovery_use_synthetic_source' feature flag - assertNotNull(doc.rootDoc().getField("_recovery_source")); - assertThat( - doc.rootDoc().getField("_recovery_source").binaryValue(), - equalTo(new BytesRef("{\"@timestamp\":\"2012-02-13\",\"field\":\"value1\"}")) - ); - } else { - assertNull(doc.rootDoc().getField("_recovery_source")); - } + assertNull(doc.rootDoc().getField("_recovery_source")); } { Settings settings = Settings.builder() @@ -816,16 +784,7 @@ public void testRecoverySourceWithTimeSeriesCustom() throws IOException { MapperService mapperService = createMapperService(settings, mappings); DocumentMapper docMapper = mapperService.documentMapper(); ParsedDocument doc = docMapper.parse(source("123", b -> b.field("@timestamp", "2012-02-13").field("field", "value1"), null)); - if (IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled() == false) { - // TODO: remove this if branch when removing the 'index_recovery_use_synthetic_source' feature flag - assertNotNull(doc.rootDoc().getField("_recovery_source")); - assertThat( - doc.rootDoc().getField("_recovery_source").binaryValue(), - equalTo(new BytesRef("{\"@timestamp\":\"2012-02-13\",\"field\":\"value1\"}")) - ); - } else { - assertNull(doc.rootDoc().getField("_recovery_source")); - } + assertNull(doc.rootDoc().getField("_recovery_source")); } { Settings settings = Settings.builder() diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java index 811e5fcb62aca..cc80d6d8d9bb0 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java @@ -19,11 +19,6 @@ public enum FeatureFlag { TIME_SERIES_MODE("es.index_mode_feature_flag_registered=true", Version.fromString("8.0.0"), null), FAILURE_STORE_ENABLED("es.failure_store_feature_flag_enabled=true", Version.fromString("8.12.0"), null), SUB_OBJECTS_AUTO_ENABLED("es.sub_objects_auto_feature_flag_enabled=true", Version.fromString("8.16.0"), null), - INDEX_RECOVERY_USE_SYNTHETIC_SOURCE( - "es.index_recovery_use_synthetic_source_feature_flag_enabled=true", - Version.fromString("8.18.0"), - null - ), DOC_VALUES_SKIPPER("es.doc_values_skipper_feature_flag_enabled=true", Version.fromString("8.18.1"), null); public final String systemProperty; diff --git a/x-pack/plugin/logsdb/src/yamlRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbTestSuiteIT.java b/x-pack/plugin/logsdb/src/yamlRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbTestSuiteIT.java index a25712e717e2a..a6325d16ec6ed 100644 --- a/x-pack/plugin/logsdb/src/yamlRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbTestSuiteIT.java +++ b/x-pack/plugin/logsdb/src/yamlRestTest/java/org/elasticsearch/xpack/logsdb/LogsdbTestSuiteIT.java @@ -24,7 +24,6 @@ public class LogsdbTestSuiteIT extends ESClientYamlSuiteTestCase { .distribution(DistributionType.DEFAULT) .setting("xpack.security.enabled", "false") .setting("xpack.license.self_generated.type", "trial") - .feature(FeatureFlag.INDEX_RECOVERY_USE_SYNTHETIC_SOURCE) .feature(FeatureFlag.DOC_VALUES_SKIPPER) .build(); diff --git a/x-pack/qa/core-rest-tests-with-security/src/yamlRestTest/java/org/elasticsearch/xpack/security/CoreWithSecurityClientYamlTestSuiteIT.java b/x-pack/qa/core-rest-tests-with-security/src/yamlRestTest/java/org/elasticsearch/xpack/security/CoreWithSecurityClientYamlTestSuiteIT.java index cb0bfc84e33b1..e62775c965a40 100644 --- a/x-pack/qa/core-rest-tests-with-security/src/yamlRestTest/java/org/elasticsearch/xpack/security/CoreWithSecurityClientYamlTestSuiteIT.java +++ b/x-pack/qa/core-rest-tests-with-security/src/yamlRestTest/java/org/elasticsearch/xpack/security/CoreWithSecurityClientYamlTestSuiteIT.java @@ -50,7 +50,6 @@ public class CoreWithSecurityClientYamlTestSuiteIT extends ESClientYamlSuiteTest .user(USER, PASS) .feature(FeatureFlag.TIME_SERIES_MODE) .feature(FeatureFlag.SUB_OBJECTS_AUTO_ENABLED) - .feature(FeatureFlag.INDEX_RECOVERY_USE_SYNTHETIC_SOURCE) .feature(FeatureFlag.DOC_VALUES_SKIPPER) .build(); From 152d086c0f3c2bb8198bed111ca2a530a9ef661f Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 6 Mar 2025 02:02:59 +1100 Subject: [PATCH 35/54] Mute org.elasticsearch.test.apmintegration.MetricsApmIT testApmIntegration #124106 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index fe9702984adbe..a33d6debab573 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -333,6 +333,9 @@ tests: - class: org.elasticsearch.smoketest.MlWithSecurityIT method: test {yaml=ml/3rd_party_deployment/Test start deployment fails while model download in progress} issue: https://github.com/elastic/elasticsearch/issues/120814 +- class: org.elasticsearch.test.apmintegration.MetricsApmIT + method: testApmIntegration + issue: https://github.com/elastic/elasticsearch/issues/124106 # Examples: # From 22a7b5ea1292a822f4626ba7ee45bcccb8784f92 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Wed, 5 Mar 2025 15:18:01 +0000 Subject: [PATCH 36/54] Collapse transport versions for 8.17.0 (#124005) --- .../org/elasticsearch/TransportVersions.java | 26 +------------------ .../admin/cluster/node/info/NodeInfo.java | 6 ++--- .../admin/cluster/stats/MappingStats.java | 3 +-- .../indices/stats/IndicesStatsResponse.java | 20 ++------------ .../action/bulk/SimulateBulkRequest.java | 4 +-- .../action/index/IndexRequest.java | 4 +-- .../action/support/IndicesOptions.java | 4 +-- .../org/elasticsearch/index/IndexMode.java | 2 +- .../ingest/PipelineConfiguration.java | 4 +-- .../org/elasticsearch/monitor/os/OsStats.java | 8 +++--- .../search/builder/SearchSourceBuilder.java | 6 ++--- .../application/LogsDBFeatureSetUsage.java | 10 +++---- .../action/GetInferenceModelAction.java | 6 ++--- .../permission/RemoteClusterPermissions.java | 3 +-- .../security/authc/AuthenticationTests.java | 2 +- .../RemoteClusterPermissionsTests.java | 2 +- .../deprecation/DeprecationInfoAction.java | 6 ++--- .../rules/QueryRulesetListItem.java | 6 ++--- .../rules/retriever/RuleQueryRankDoc.java | 2 +- ...setsActionResponseBWCSerializingTests.java | 2 +- .../esql/core/expression/FieldAttribute.java | 4 +-- .../xpack/esql/action/EsqlExecutionInfo.java | 4 +-- .../esql/enrich/EnrichLookupService.java | 4 +-- .../esql/enrich/LookupFromIndexService.java | 4 +-- .../GoogleVertexAiEmbeddingsTaskSettings.java | 6 ++--- ...leVertexAiEmbeddingsTaskSettingsTests.java | 2 +- 26 files changed, 48 insertions(+), 102 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 9d521ffc1503a..c5cbf22fcc76f 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -103,31 +103,7 @@ static TransportVersion def(int id) { public static final TransportVersion INITIAL_ELASTICSEARCH_8_16_6 = def(8_772_0_06); public static final TransportVersion REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_773_0_00); public static final TransportVersion REVERT_REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(8_774_0_00); - public static final TransportVersion ESQL_FIELD_ATTRIBUTE_PARENT_SIMPLIFIED = def(8_775_0_00); - public static final TransportVersion INFERENCE_DONT_PERSIST_ON_READ = def(8_776_0_00); - public static final TransportVersion SIMULATE_MAPPING_ADDITION = def(8_777_0_00); - public static final TransportVersion INTRODUCE_ALL_APPLICABLE_SELECTOR = def(8_778_0_00); - public static final TransportVersion INDEX_MODE_LOOKUP = def(8_779_0_00); - public static final TransportVersion INDEX_REQUEST_REMOVE_METERING = def(8_780_0_00); - public static final TransportVersion CPU_STAT_STRING_PARSING = def(8_781_0_00); - public static final TransportVersion QUERY_RULES_RETRIEVER = def(8_782_0_00); - public static final TransportVersion ESQL_CCS_EXEC_INFO_WITH_FAILURES = def(8_783_0_00); - public static final TransportVersion LOGSDB_TELEMETRY = def(8_784_0_00); - public static final TransportVersion LOGSDB_TELEMETRY_STATS = def(8_785_0_00); - public static final TransportVersion KQL_QUERY_ADDED = def(8_786_0_00); - public static final TransportVersion ROLE_MONITOR_STATS = def(8_787_0_00); - public static final TransportVersion DATA_STREAM_INDEX_VERSION_DEPRECATION_CHECK = def(8_788_0_00); - public static final TransportVersion ADD_COMPATIBILITY_VERSIONS_TO_NODE_INFO = def(8_789_0_00); - public static final TransportVersion VERTEX_AI_INPUT_TYPE_ADDED = def(8_790_0_00); - public static final TransportVersion SKIP_INNER_HITS_SEARCH_SOURCE = def(8_791_0_00); - public static final TransportVersion QUERY_RULES_LIST_INCLUDES_TYPES = def(8_792_0_00); - public static final TransportVersion INDEX_STATS_ADDITIONAL_FIELDS = def(8_793_0_00); - public static final TransportVersion INDEX_STATS_ADDITIONAL_FIELDS_REVERT = def(8_794_0_00); - public static final TransportVersion FAST_REFRESH_RCO_2 = def(8_795_0_00); - public static final TransportVersion ESQL_ENRICH_RUNTIME_WARNINGS = def(8_796_0_00); - public static final TransportVersion INGEST_PIPELINE_CONFIGURATION_AS_MAP = def(8_797_0_00); - public static final TransportVersion LOGSDB_TELEMETRY_CUSTOM_CUTOFF_DATE_FIX_8_17 = def(8_797_0_01); - public static final TransportVersion SOURCE_MODE_TELEMETRY_FIX_8_17 = def(8_797_0_02); + public static final TransportVersion V_8_17_0 = def(8_797_0_02); public static final TransportVersion INITIAL_ELASTICSEARCH_8_17_3 = def(8_797_0_03); public static final TransportVersion INITIAL_ELASTICSEARCH_8_17_4 = def(8_797_0_04); public static final TransportVersion INDEXING_PRESSURE_THROTTLING_STATS = def(8_798_0_00); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java index bde7ae5971845..e52b29438999d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfo.java @@ -66,8 +66,7 @@ public NodeInfo(StreamInput in) throws IOException { super(in); if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { version = in.readString(); - if (in.getTransportVersion().isPatchFrom(TransportVersions.V_8_16_1) - || in.getTransportVersion().onOrAfter(TransportVersions.ADD_COMPATIBILITY_VERSIONS_TO_NODE_INFO)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_1)) { compatibilityVersions = CompatibilityVersions.readVersion(in); } else { compatibilityVersions = new CompatibilityVersions(TransportVersion.readVersion(in), Map.of()); // unknown mappings versions @@ -252,8 +251,7 @@ public void writeTo(StreamOutput out) throws IOException { } else { Version.writeVersion(Version.fromString(version), out); } - if (out.getTransportVersion().isPatchFrom(TransportVersions.V_8_16_1) - || out.getTransportVersion().onOrAfter(TransportVersions.ADD_COMPATIBILITY_VERSIONS_TO_NODE_INFO)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_1)) { compatibilityVersions.writeTo(out); } else if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { TransportVersion.writeVersion(compatibilityVersions.transportVersion(), out); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/MappingStats.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/MappingStats.java index 02b581ecbdda2..686d432b01a20 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/MappingStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/MappingStats.java @@ -291,8 +291,7 @@ public void writeTo(StreamOutput out) throws IOException { } private static boolean canReadOrWriteSourceModeTelemetry(TransportVersion version) { - return version.isPatchFrom(TransportVersions.SOURCE_MODE_TELEMETRY_FIX_8_17) - || version.onOrAfter(TransportVersions.SOURCE_MODE_TELEMETRY); + return version.isPatchFrom(TransportVersions.V_8_17_0) || version.onOrAfter(TransportVersions.SOURCE_MODE_TELEMETRY); } private static OptionalLong ofNullable(Long l) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java index 0b1cc98f07b42..43dcdf220e6d2 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java @@ -55,15 +55,7 @@ public class IndicesStatsResponse extends ChunkedBroadcastResponse { IndicesStatsResponse(StreamInput in) throws IOException { super(in); shards = in.readArray(ShardStats::new, ShardStats[]::new); - if (in.getTransportVersion().onOrAfter(TransportVersions.INDEX_STATS_ADDITIONAL_FIELDS_REVERT)) { - indexHealthMap = in.readMap(ClusterHealthStatus::readFrom); - indexStateMap = in.readMap(IndexMetadata.State::readFrom); - } else if (in.getTransportVersion().onOrAfter(TransportVersions.INDEX_STATS_ADDITIONAL_FIELDS)) { - indexHealthMap = in.readMap(ClusterHealthStatus::readFrom); - indexStateMap = in.readMap(IndexMetadata.State::readFrom); - in.readMap(StreamInput::readStringCollectionAsList); // unused, reverted - in.readMap(StreamInput::readLong); // unused, reverted - } else if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0)) { // Between 8.1 and INDEX_STATS_ADDITIONAL_FIELDS, we had a different format for the response // where we only had health and state available. indexHealthMap = in.readMap(ClusterHealthStatus::readFrom); @@ -186,15 +178,7 @@ public CommonStats getPrimaries() { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeArray(shards); - if (out.getTransportVersion().onOrAfter(TransportVersions.INDEX_STATS_ADDITIONAL_FIELDS_REVERT)) { - out.writeMap(indexHealthMap, StreamOutput::writeWriteable); - out.writeMap(indexStateMap, StreamOutput::writeWriteable); - } else if (out.getTransportVersion().onOrAfter(TransportVersions.INDEX_STATS_ADDITIONAL_FIELDS)) { - out.writeMap(indexHealthMap, StreamOutput::writeWriteable); - out.writeMap(indexStateMap, StreamOutput::writeWriteable); - out.writeMap(Map.of(), StreamOutput::writeStringCollection); - out.writeMap(Map.of(), StreamOutput::writeLong); - } else if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0)) { out.writeMap(indexHealthMap, StreamOutput::writeWriteable); out.writeMap(indexStateMap, StreamOutput::writeWriteable); } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/SimulateBulkRequest.java b/server/src/main/java/org/elasticsearch/action/bulk/SimulateBulkRequest.java index 290d342e9dc12..60485047e70a1 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/SimulateBulkRequest.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/SimulateBulkRequest.java @@ -142,7 +142,7 @@ public SimulateBulkRequest(StreamInput in) throws IOException { componentTemplateSubstitutions = Map.of(); indexTemplateSubstitutions = Map.of(); } - if (in.getTransportVersion().onOrAfter(TransportVersions.SIMULATE_MAPPING_ADDITION)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) { this.mappingAddition = (Map) in.readGenericValue(); } else { mappingAddition = Map.of(); @@ -157,7 +157,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeGenericValue(componentTemplateSubstitutions); out.writeGenericValue(indexTemplateSubstitutions); } - if (out.getTransportVersion().onOrAfter(TransportVersions.SIMULATE_MAPPING_ADDITION)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) { out.writeGenericValue(mappingAddition); } } diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java index 4ed38ba16b60e..b2a02dce403a5 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -203,7 +203,7 @@ public IndexRequest(@Nullable ShardId shardId, StreamInput in) throws IOExceptio requireDataStream = false; } - if (in.getTransportVersion().before(TransportVersions.INDEX_REQUEST_REMOVE_METERING)) { + if (in.getTransportVersion().before(TransportVersions.V_8_17_0)) { if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { in.readZLong(); // obsolete normalisedBytesParsed } @@ -803,7 +803,7 @@ private void writeBody(StreamOutput out) throws IOException { out.writeBoolean(requireDataStream); } - if (out.getTransportVersion().before(TransportVersions.INDEX_REQUEST_REMOVE_METERING)) { + if (out.getTransportVersion().before(TransportVersions.V_8_17_0)) { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeZLong(-1); // obsolete normalisedBytesParsed } diff --git a/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java b/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java index b300fc647a18f..ae55c3f552502 100644 --- a/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java +++ b/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java @@ -911,7 +911,7 @@ public void writeIndicesOptions(StreamOutput out) throws IOException { } if (out.getTransportVersion() .between(TransportVersions.V_8_16_0, TransportVersions.REPLACE_FAILURE_STORE_OPTIONS_WITH_SELECTOR_SYNTAX)) { - if (out.getTransportVersion().before(TransportVersions.INTRODUCE_ALL_APPLICABLE_SELECTOR)) { + if (out.getTransportVersion().before(TransportVersions.V_8_17_0)) { out.writeVInt(1); // Enum set sized 1 out.writeVInt(0); // ordinal 0 (::data selector) } else { @@ -955,7 +955,7 @@ public static IndicesOptions readIndicesOptions(StreamInput in) throws IOExcepti if (in.getTransportVersion() .between(TransportVersions.V_8_16_0, TransportVersions.REPLACE_FAILURE_STORE_OPTIONS_WITH_SELECTOR_SYNTAX)) { // Reading from an older node, which will be sending either an enum set or a single byte that needs to be read out and ignored. - if (in.getTransportVersion().before(TransportVersions.INTRODUCE_ALL_APPLICABLE_SELECTOR)) { + if (in.getTransportVersion().before(TransportVersions.V_8_17_0)) { int size = in.readVInt(); for (int i = 0; i < size; i++) { in.readVInt(); diff --git a/server/src/main/java/org/elasticsearch/index/IndexMode.java b/server/src/main/java/org/elasticsearch/index/IndexMode.java index a4708f93335cc..2be1c6fc41d96 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexMode.java +++ b/server/src/main/java/org/elasticsearch/index/IndexMode.java @@ -587,7 +587,7 @@ public static void writeTo(IndexMode indexMode, StreamOutput out) throws IOExcep case STANDARD -> 0; case TIME_SERIES -> 1; case LOGSDB -> 2; - case LOOKUP -> out.getTransportVersion().onOrAfter(TransportVersions.INDEX_MODE_LOOKUP) ? 3 : 0; + case LOOKUP -> out.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0) ? 3 : 0; }; out.writeByte((byte) code); } diff --git a/server/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java b/server/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java index f628130983a39..d258eeb9ed050 100644 --- a/server/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java +++ b/server/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java @@ -168,7 +168,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws public static PipelineConfiguration readFrom(StreamInput in) throws IOException { final String id = in.readString(); final Map config; - if (in.getTransportVersion().onOrAfter(TransportVersions.INGEST_PIPELINE_CONFIGURATION_AS_MAP)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) { config = in.readGenericMap(); } else { final BytesReference bytes = in.readSlicedBytesReference(); @@ -190,7 +190,7 @@ public String toString() { @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(id); - if (out.getTransportVersion().onOrAfter(TransportVersions.INGEST_PIPELINE_CONFIGURATION_AS_MAP)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) { out.writeGenericMap(config); } else { XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent).prettyPrint(); diff --git a/server/src/main/java/org/elasticsearch/monitor/os/OsStats.java b/server/src/main/java/org/elasticsearch/monitor/os/OsStats.java index 6c1ba2dfbe63a..fe08d30e0dd87 100644 --- a/server/src/main/java/org/elasticsearch/monitor/os/OsStats.java +++ b/server/src/main/java/org/elasticsearch/monitor/os/OsStats.java @@ -488,7 +488,7 @@ public Cgroup( Cgroup(final StreamInput in) throws IOException { cpuAcctControlGroup = in.readString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.CPU_STAT_STRING_PARSING)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) { cpuAcctUsageNanos = in.readBigInteger(); } else { cpuAcctUsageNanos = BigInteger.valueOf(in.readLong()); @@ -505,7 +505,7 @@ public Cgroup( @Override public void writeTo(final StreamOutput out) throws IOException { out.writeString(cpuAcctControlGroup); - if (out.getTransportVersion().onOrAfter(TransportVersions.CPU_STAT_STRING_PARSING)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) { out.writeBigInteger(cpuAcctUsageNanos); } else { out.writeLong(cpuAcctUsageNanos.longValue()); @@ -605,7 +605,7 @@ public CpuStat( } CpuStat(final StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(TransportVersions.CPU_STAT_STRING_PARSING)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) { numberOfElapsedPeriods = in.readBigInteger(); numberOfTimesThrottled = in.readBigInteger(); timeThrottledNanos = in.readBigInteger(); @@ -618,7 +618,7 @@ public CpuStat( @Override public void writeTo(final StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.CPU_STAT_STRING_PARSING)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) { out.writeBigInteger(numberOfElapsedPeriods); out.writeBigInteger(numberOfTimesThrottled); out.writeBigInteger(timeThrottledNanos); diff --git a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 8e40a57447ea7..d1fb8bf83af21 100644 --- a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -288,8 +288,7 @@ public SearchSourceBuilder(StreamInput in) throws IOException { if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { rankBuilder = in.readOptionalNamedWriteable(RankBuilder.class); } - if (in.getTransportVersion().isPatchFrom(TransportVersions.V_8_16_1) - || in.getTransportVersion().onOrAfter(TransportVersions.SKIP_INNER_HITS_SEARCH_SOURCE)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_1)) { skipInnerHits = in.readBoolean(); } else { skipInnerHits = false; @@ -383,8 +382,7 @@ public void writeTo(StreamOutput out) throws IOException { } else if (rankBuilder != null) { throw new IllegalArgumentException("cannot serialize [rank] to version [" + out.getTransportVersion().toReleaseVersion() + "]"); } - if (out.getTransportVersion().isPatchFrom(TransportVersions.V_8_16_1) - || out.getTransportVersion().onOrAfter(TransportVersions.SKIP_INNER_HITS_SEARCH_SOURCE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_1)) { out.writeBoolean(skipInnerHits); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/application/LogsDBFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/application/LogsDBFeatureSetUsage.java index b32e95c5fc9d8..f35dcfadf0beb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/application/LogsDBFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/application/LogsDBFeatureSetUsage.java @@ -28,7 +28,7 @@ public LogsDBFeatureSetUsage(StreamInput input) throws IOException { super(input); indicesCount = input.readVInt(); indicesWithSyntheticSource = input.readVInt(); - if (input.getTransportVersion().onOrAfter(TransportVersions.LOGSDB_TELEMETRY_STATS)) { + if (input.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) { numDocs = input.readVLong(); sizeInBytes = input.readVLong(); } else { @@ -36,7 +36,7 @@ public LogsDBFeatureSetUsage(StreamInput input) throws IOException { sizeInBytes = 0; } var transportVersion = input.getTransportVersion(); - if (transportVersion.isPatchFrom(TransportVersions.LOGSDB_TELEMETRY_CUSTOM_CUTOFF_DATE_FIX_8_17) + if (transportVersion.isPatchFrom(TransportVersions.V_8_17_0) || transportVersion.onOrAfter(TransportVersions.LOGSDB_TELEMETRY_CUSTOM_CUTOFF_DATE)) { hasCustomCutoffDate = input.readBoolean(); } else { @@ -49,12 +49,12 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeVInt(indicesCount); out.writeVInt(indicesWithSyntheticSource); - if (out.getTransportVersion().onOrAfter(TransportVersions.LOGSDB_TELEMETRY_STATS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) { out.writeVLong(numDocs); out.writeVLong(sizeInBytes); } var transportVersion = out.getTransportVersion(); - if (transportVersion.isPatchFrom(TransportVersions.LOGSDB_TELEMETRY_CUSTOM_CUTOFF_DATE_FIX_8_17) + if (transportVersion.isPatchFrom(TransportVersions.V_8_17_0) || transportVersion.onOrAfter(TransportVersions.LOGSDB_TELEMETRY_CUSTOM_CUTOFF_DATE)) { out.writeBoolean(hasCustomCutoffDate); } @@ -79,7 +79,7 @@ public LogsDBFeatureSetUsage( @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.LOGSDB_TELEMETRY; + return TransportVersions.V_8_17_0; } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceModelAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceModelAction.java index ba3d417d02672..00c73e4d8edc9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceModelAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceModelAction.java @@ -62,8 +62,7 @@ public Request(StreamInput in) throws IOException { super(in); this.inferenceEntityId = in.readString(); this.taskType = TaskType.fromStream(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_DONT_PERSIST_ON_READ) - || in.getTransportVersion().isPatchFrom(TransportVersions.V_8_16_0)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { this.persistDefaultConfig = in.readBoolean(); } else { this.persistDefaultConfig = PERSIST_DEFAULT_CONFIGS; @@ -88,8 +87,7 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(inferenceEntityId); taskType.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.INFERENCE_DONT_PERSIST_ON_READ) - || out.getTransportVersion().isPatchFrom(TransportVersions.V_8_16_0)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeBoolean(this.persistDefaultConfig); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/RemoteClusterPermissions.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/RemoteClusterPermissions.java index 1928cf117dde3..006015a9c8541 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/RemoteClusterPermissions.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/RemoteClusterPermissions.java @@ -32,8 +32,6 @@ import java.util.TreeSet; import java.util.stream.Collectors; -import static org.elasticsearch.TransportVersions.ROLE_MONITOR_STATS; - /** * Represents the set of permissions for remote clusters. This is intended to be the model for both the {@link RoleDescriptor} * and {@link Role}. This model is intended to be converted to local cluster permissions @@ -71,6 +69,7 @@ public class RemoteClusterPermissions implements NamedWriteable, ToXContentObject { public static final TransportVersion ROLE_REMOTE_CLUSTER_PRIVS = TransportVersions.V_8_15_0; + public static final TransportVersion ROLE_MONITOR_STATS = TransportVersions.V_8_17_0; public static final String NAME = "remote_cluster_permissions"; private static final Logger logger = LogManager.getLogger(RemoteClusterPermissions.class); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/AuthenticationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/AuthenticationTests.java index c999c970a76da..a485c495b5583 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/AuthenticationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/AuthenticationTests.java @@ -44,10 +44,10 @@ import java.util.stream.Collectors; import static java.util.Map.entry; -import static org.elasticsearch.TransportVersions.ROLE_MONITOR_STATS; import static org.elasticsearch.xpack.core.security.authc.Authentication.VERSION_API_KEY_ROLES_AS_BYTES; import static org.elasticsearch.xpack.core.security.authc.AuthenticationTestHelper.randomCrossClusterAccessSubjectInfo; import static org.elasticsearch.xpack.core.security.authc.CrossClusterAccessSubjectInfoTests.randomRoleDescriptorsIntersection; +import static org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions.ROLE_MONITOR_STATS; import static org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions.ROLE_REMOTE_CLUSTER_PRIVS; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.containsString; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/RemoteClusterPermissionsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/RemoteClusterPermissionsTests.java index a39aff3a6137f..fbf85d216fce5 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/RemoteClusterPermissionsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/RemoteClusterPermissionsTests.java @@ -31,7 +31,7 @@ import java.util.Set; import java.util.stream.Collectors; -import static org.elasticsearch.TransportVersions.ROLE_MONITOR_STATS; +import static org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions.ROLE_MONITOR_STATS; import static org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions.ROLE_REMOTE_CLUSTER_PRIVS; import static org.elasticsearch.xpack.core.security.authz.permission.RemoteClusterPermissions.lastTransportVersionPermission; import static org.hamcrest.Matchers.containsString; diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java index 1fceb917ece53..cf88f21689edb 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java @@ -67,8 +67,7 @@ public Response(StreamInput in) throws IOException { if (in.getTransportVersion().before(TransportVersions.RESOURCE_DEPRECATION_CHECKS)) { mutableResourceDeprecations.put(IndexDeprecationChecker.NAME, in.readMapOfLists(DeprecationIssue::new)); } - if (in.getTransportVersion() - .between(TransportVersions.DATA_STREAM_INDEX_VERSION_DEPRECATION_CHECK, TransportVersions.RESOURCE_DEPRECATION_CHECKS)) { + if (in.getTransportVersion().between(TransportVersions.V_8_17_0, TransportVersions.RESOURCE_DEPRECATION_CHECKS)) { mutableResourceDeprecations.put(DataStreamDeprecationChecker.NAME, in.readMapOfLists(DeprecationIssue::new)); } if (in.getTransportVersion().before(TransportVersions.V_7_11_0)) { @@ -140,8 +139,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().before(TransportVersions.RESOURCE_DEPRECATION_CHECKS)) { out.writeMap(getIndexSettingsIssues(), StreamOutput::writeCollection); } - if (out.getTransportVersion() - .between(TransportVersions.DATA_STREAM_INDEX_VERSION_DEPRECATION_CHECK, TransportVersions.RESOURCE_DEPRECATION_CHECKS)) { + if (out.getTransportVersion().between(TransportVersions.V_8_17_0, TransportVersions.RESOURCE_DEPRECATION_CHECKS)) { out.writeMap(getDataStreamDeprecationIssues(), StreamOutput::writeCollection); } if (out.getTransportVersion().before(TransportVersions.V_7_11_0)) { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesetListItem.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesetListItem.java index f43ab815090c6..88a61d50f8d52 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesetListItem.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesetListItem.java @@ -68,8 +68,7 @@ public QueryRulesetListItem(StreamInput in) throws IOException { this.criteriaTypeToCountMap = Map.of(); } TransportVersion streamTransportVersion = in.getTransportVersion(); - if (streamTransportVersion.isPatchFrom(TransportVersions.V_8_16_1) - || streamTransportVersion.onOrAfter(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES)) { + if (streamTransportVersion.onOrAfter(TransportVersions.V_8_16_1)) { this.ruleTypeToCountMap = in.readMap(m -> in.readEnum(QueryRule.QueryRuleType.class), StreamInput::readInt); } else { this.ruleTypeToCountMap = Map.of(); @@ -103,8 +102,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeMap(criteriaTypeToCountMap, StreamOutput::writeEnum, StreamOutput::writeInt); } TransportVersion streamTransportVersion = out.getTransportVersion(); - if (streamTransportVersion.isPatchFrom(TransportVersions.V_8_16_1) - || streamTransportVersion.onOrAfter(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES)) { + if (streamTransportVersion.onOrAfter(TransportVersions.V_8_16_1)) { out.writeMap(ruleTypeToCountMap, StreamOutput::writeEnum, StreamOutput::writeInt); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/retriever/RuleQueryRankDoc.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/retriever/RuleQueryRankDoc.java index 59f3d9aed31ce..aa9d06a62b6c4 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/retriever/RuleQueryRankDoc.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/retriever/RuleQueryRankDoc.java @@ -121,6 +121,6 @@ protected void doToXContent(XContentBuilder builder, Params params) throws IOExc @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.QUERY_RULES_RETRIEVER; + return TransportVersions.V_8_17_0; } } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsActionResponseBWCSerializingTests.java index e1243b782f73f..7f22866de4412 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsActionResponseBWCSerializingTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/action/ListQueryRulesetsActionResponseBWCSerializingTests.java @@ -59,7 +59,7 @@ protected ListQueryRulesetsAction.Response mutateInstanceForVersion( ListQueryRulesetsAction.Response instance, TransportVersion version ) { - if (version.isPatchFrom(TransportVersions.V_8_16_1) || version.onOrAfter(TransportVersions.QUERY_RULES_LIST_INCLUDES_TYPES)) { + if (version.onOrAfter(TransportVersions.V_8_16_1)) { return instance; } else if (version.onOrAfter(QueryRulesetListItem.EXPANDED_RULESET_COUNT_TRANSPORT_VERSION)) { List updatedResults = new ArrayList<>(); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/FieldAttribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/FieldAttribute.java index d7ae438bc3189..0f29c9b1da068 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/FieldAttribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/FieldAttribute.java @@ -148,7 +148,7 @@ public static FieldAttribute readFrom(StreamInput in) throws IOException { } private void writeParentName(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_FIELD_ATTRIBUTE_PARENT_SIMPLIFIED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) { ((PlanStreamOutput) out).writeOptionalCachedString(parentName); } else { // Previous versions only used the parent field attribute to retrieve the parent's name, so we can use just any @@ -159,7 +159,7 @@ private void writeParentName(StreamOutput out) throws IOException { } private static String readParentName(StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_FIELD_ATTRIBUTE_PARENT_SIMPLIFIED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) { return ((PlanStreamInput) in).readOptionalCachedString(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java index 76a790b25c8d2..ba986403a8350 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlExecutionInfo.java @@ -424,7 +424,7 @@ public Cluster(StreamInput in) throws IOException { this.failedShards = in.readOptionalInt(); this.took = in.readOptionalTimeValue(); this.skipUnavailable = in.readBoolean(); - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_CCS_EXEC_INFO_WITH_FAILURES)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) { this.failures = Collections.unmodifiableList(in.readCollectionAsList(ShardSearchFailure::readShardSearchFailure)); } else { this.failures = Collections.emptyList(); @@ -442,7 +442,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalInt(failedShards); out.writeOptionalTimeValue(took); out.writeBoolean(skipUnavailable); - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_CCS_EXEC_INFO_WITH_FAILURES)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) { out.writeCollection(failures); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index 1dc18c090c1dd..67783c2556cd6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -211,7 +211,7 @@ static TransportRequest readFrom(StreamInput in, BlockFactory blockFactory) thro PlanStreamInput planIn = new PlanStreamInput(in, in.namedWriteableRegistry(), null); List extractFields = planIn.readNamedWriteableCollectionAsList(NamedExpression.class); var source = Source.EMPTY; - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_RUNTIME_WARNINGS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) { source = Source.readFrom(planIn); } TransportRequest result = new TransportRequest( @@ -242,7 +242,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeWriteable(inputPage); PlanStreamOutput planOut = new PlanStreamOutput(out, null); planOut.writeNamedWriteableCollection(extractFields); - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_RUNTIME_WARNINGS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) { source.writeTo(planOut); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java index 62d9733a04581..b372570ed5026 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexService.java @@ -149,7 +149,7 @@ static TransportRequest readFrom(StreamInput in, BlockFactory blockFactory) thro List extractFields = planIn.readNamedWriteableCollectionAsList(NamedExpression.class); String matchField = in.readString(); var source = Source.EMPTY; - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_RUNTIME_WARNINGS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) { source = Source.readFrom(planIn); } // Source.readFrom() requires the query from the Configuration passed to PlanStreamInput. @@ -182,7 +182,7 @@ public void writeTo(StreamOutput out) throws IOException { PlanStreamOutput planOut = new PlanStreamOutput(out, null); planOut.writeNamedWriteableCollection(extractFields); out.writeString(matchField); - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_RUNTIME_WARNINGS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) { source.writeTo(planOut); } if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_LOOKUP_JOIN_SOURCE_TEXT)) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsTaskSettings.java index 9b759a4661bce..e3c9c3524584c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsTaskSettings.java @@ -107,9 +107,7 @@ public GoogleVertexAiEmbeddingsTaskSettings(@Nullable Boolean autoTruncate, @Nul public GoogleVertexAiEmbeddingsTaskSettings(StreamInput in) throws IOException { this.autoTruncate = in.readOptionalBoolean(); - var inputType = (in.getTransportVersion().onOrAfter(TransportVersions.VERTEX_AI_INPUT_TYPE_ADDED)) - ? in.readOptionalEnum(InputType.class) - : null; + var inputType = (in.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) ? in.readOptionalEnum(InputType.class) : null; validateInputType(inputType); this.inputType = inputType; @@ -150,7 +148,7 @@ public TransportVersion getMinimalSupportedVersion() { public void writeTo(StreamOutput out) throws IOException { out.writeOptionalBoolean(this.autoTruncate); - if (out.getTransportVersion().onOrAfter(TransportVersions.VERTEX_AI_INPUT_TYPE_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_17_0)) { out.writeOptionalEnum(this.inputType); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsTaskSettingsTests.java index 0a390b114702c..45af4a94c3f17 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/embeddings/GoogleVertexAiEmbeddingsTaskSettingsTests.java @@ -256,7 +256,7 @@ protected GoogleVertexAiEmbeddingsTaskSettings mutateInstanceForVersion( GoogleVertexAiEmbeddingsTaskSettings instance, TransportVersion version ) { - if (version.before(TransportVersions.VERTEX_AI_INPUT_TYPE_ADDED)) { + if (version.before(TransportVersions.V_8_17_0)) { // default to null input type if node is on a version before input type was introduced return new GoogleVertexAiEmbeddingsTaskSettings(instance.autoTruncate(), null); } From 106d7eaef5672c6742409a4c29dcf610648c2146 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Wed, 5 Mar 2025 15:18:27 +0000 Subject: [PATCH 37/54] Remove 7.11 and 7.12 transport versions (#124024) --- .../org/elasticsearch/TransportVersions.java | 2 - .../indices/create/CreateIndexRequest.java | 8 +-- .../cluster/metadata/RepositoryMetadata.java | 10 +--- .../plugins/PluginDescriptor.java | 23 +++----- .../snapshots/SnapshotsService.java | 2 - .../action/search/SearchRequestTests.java | 4 -- .../cluster/node/DiscoveryNodeTests.java | 19 ------- .../MultiTermsAggregationBuilder.java | 2 +- .../rate/RateAggregationBuilder.java | 18 +++--- .../AggregateMetricFeatureSetUsage.java | 2 +- .../core/ml/datafeed/DatafeedConfig.java | 7 +-- .../upgrade/SnapshotUpgradeTaskParams.java | 2 +- .../security/SecurityFeatureSetUsage.java | 8 +-- .../DelegatePkiAuthenticationResponse.java | 9 +-- .../OpenIdConnectAuthenticateResponse.java | 5 +- ...dConnectPrepareAuthenticationResponse.java | 5 +- .../action/saml/SamlAuthenticateResponse.java | 5 +- .../action/token/CreateTokenResponse.java | 9 +-- .../core/spatial/SpatialFeatureSetUsage.java | 10 +--- .../transform/transforms/TransformConfig.java | 15 +---- .../transforms/TransformConfigTests.java | 46 ---------------- .../deprecation/DeprecationInfoAction.java | 14 +---- .../task/OpenJobPersistentTasksExecutor.java | 15 +---- .../TransportStartDatafeedActionTests.java | 55 ------------------- .../GeoLineAggregationBuilder.java | 2 +- 25 files changed, 39 insertions(+), 258 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index c5cbf22fcc76f..0dd856ab8c64e 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -61,8 +61,6 @@ static TransportVersion def(int id) { public static final TransportVersion V_7_8_1 = def(7_08_01_99); public static final TransportVersion V_7_9_0 = def(7_09_00_99); public static final TransportVersion V_7_10_0 = def(7_10_00_99); - public static final TransportVersion V_7_11_0 = def(7_11_00_99); - public static final TransportVersion V_7_12_0 = def(7_12_00_99); public static final TransportVersion V_7_13_0 = def(7_13_00_99); public static final TransportVersion V_7_14_0 = def(7_14_00_99); public static final TransportVersion V_7_15_0 = def(7_15_00_99); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java index cc96954c8a8e4..580da139aa0f1 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java @@ -105,9 +105,7 @@ public CreateIndexRequest(StreamInput in) throws IOException { aliases.add(new Alias(in)); } waitForActiveShards = ActiveShardCount.readFrom(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_12_0)) { - origin = in.readString(); - } + origin = in.readString(); if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { requireDataStream = in.readBoolean(); } else { @@ -518,9 +516,7 @@ public void writeTo(StreamOutput out) throws IOException { } out.writeCollection(aliases); waitForActiveShards.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_12_0)) { - out.writeString(origin); - } + out.writeString(origin); if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeBoolean(this.requireDataStream); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/RepositoryMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/RepositoryMetadata.java index 0b9c359006b23..0695c284c5cac 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/RepositoryMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/RepositoryMetadata.java @@ -143,11 +143,7 @@ public long pendingGeneration() { public RepositoryMetadata(StreamInput in) throws IOException { name = in.readString(); - if (in.getTransportVersion().onOrAfter(SnapshotsService.UUIDS_IN_REPO_DATA_TRANSPORT_VERSION)) { - uuid = in.readString(); - } else { - uuid = RepositoryData.MISSING_UUID; - } + uuid = in.readString(); type = in.readString(); settings = Settings.readSettingsFromStream(in); generation = in.readLong(); @@ -162,9 +158,7 @@ public RepositoryMetadata(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(name); - if (out.getTransportVersion().onOrAfter(SnapshotsService.UUIDS_IN_REPO_DATA_TRANSPORT_VERSION)) { - out.writeString(uuid); - } + out.writeString(uuid); out.writeString(type); settings.writeTo(out); out.writeLong(generation); diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginDescriptor.java b/server/src/main/java/org/elasticsearch/plugins/PluginDescriptor.java index 5e5c7b3e00421..ba40e9ad2bdd8 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginDescriptor.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginDescriptor.java @@ -48,7 +48,6 @@ public class PluginDescriptor implements Writeable, ToXContentObject { public static final String ES_PLUGIN_POLICY = "plugin-security.policy"; - private static final TransportVersion LICENSED_PLUGINS_SUPPORT = TransportVersions.V_7_11_0; private static final TransportVersion MODULE_NAME_SUPPORT = TransportVersions.V_8_3_0; private static final TransportVersion BOOTSTRAP_SUPPORT_REMOVED = TransportVersions.V_8_4_0; @@ -140,15 +139,11 @@ public PluginDescriptor(final StreamInput in) throws IOException { extendedPlugins = in.readStringCollectionAsList(); hasNativeController = in.readBoolean(); - if (in.getTransportVersion().onOrAfter(LICENSED_PLUGINS_SUPPORT)) { - if (in.getTransportVersion().before(BOOTSTRAP_SUPPORT_REMOVED)) { - in.readString(); // plugin type - in.readOptionalString(); // java opts - } - isLicensed = in.readBoolean(); - } else { - isLicensed = false; + if (in.getTransportVersion().before(BOOTSTRAP_SUPPORT_REMOVED)) { + in.readString(); // plugin type + in.readOptionalString(); // java opts } + isLicensed = in.readBoolean(); if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_4_0)) { isModular = in.readBoolean(); @@ -183,13 +178,11 @@ public void writeTo(final StreamOutput out) throws IOException { out.writeStringCollection(extendedPlugins); out.writeBoolean(hasNativeController); - if (out.getTransportVersion().onOrAfter(LICENSED_PLUGINS_SUPPORT)) { - if (out.getTransportVersion().before(BOOTSTRAP_SUPPORT_REMOVED)) { - out.writeString("ISOLATED"); - out.writeOptionalString(null); - } - out.writeBoolean(isLicensed); + if (out.getTransportVersion().before(BOOTSTRAP_SUPPORT_REMOVED)) { + out.writeString("ISOLATED"); + out.writeOptionalString(null); } + out.writeBoolean(isLicensed); if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_4_0)) { out.writeBoolean(isModular); out.writeBoolean(isStable); diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index 8bc7d3bcb840a..6ee1067d8149d 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -13,7 +13,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; @@ -146,7 +145,6 @@ public final class SnapshotsService extends AbstractLifecycleComponent implement public static final IndexVersion INDEX_GEN_IN_REPO_DATA_VERSION = IndexVersions.V_7_9_0; public static final IndexVersion UUIDS_IN_REPO_DATA_VERSION = IndexVersions.V_7_12_0; - public static final TransportVersion UUIDS_IN_REPO_DATA_TRANSPORT_VERSION = TransportVersions.V_7_12_0; public static final IndexVersion FILE_INFO_WRITER_UUIDS_IN_SHARD_DATA_VERSION = IndexVersions.V_7_16_0; diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java index 08f717dacf47d..9d9132ecdffe8 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java @@ -172,10 +172,6 @@ private static RescoreVectorBuilder randomRescoreVectorBuilder() { public void testRandomVersionSerialization() throws IOException { SearchRequest searchRequest = createSearchRequest(); TransportVersion version = TransportVersionUtils.randomVersion(random()); - if (version.before(TransportVersions.V_7_11_0) && searchRequest.source() != null) { - // Versions before 7.11.0 don't support runtime mappings - searchRequest.source().runtimeMappings(emptyMap()); - } if (version.before(TransportVersions.V_8_4_0)) { // Versions before 8.4.0 don't support force_synthetic_source searchRequest.setForceSyntheticSource(false); diff --git a/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeTests.java b/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeTests.java index 744a12d5ab6e0..3a3e1414cf0e7 100644 --- a/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeTests.java @@ -10,7 +10,6 @@ package org.elasticsearch.cluster.node; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -113,24 +112,6 @@ public void testDiscoveryNodeRoleWithOldVersion() throws Exception { assertThat(role.roleNameAbbreviation(), equalTo("z")); assertTrue(role.canContainData()); } - - { - BytesStreamOutput streamOutput = new BytesStreamOutput(); - streamOutput.setTransportVersion(TransportVersions.V_7_11_0); - node.writeTo(streamOutput); - - StreamInput in = StreamInput.wrap(streamOutput.bytes().toBytesRef().bytes); - in.setTransportVersion(TransportVersions.V_7_11_0); - DiscoveryNode serialized = new DiscoveryNode(in); - final Set roles = serialized.getRoles(); - assertThat(roles, hasSize(1)); - @SuppressWarnings("OptionalGetWithoutIsPresent") - final DiscoveryNodeRole role = roles.stream().findFirst().get(); - assertThat(role.roleName(), equalTo("data_custom_role")); - assertThat(role.roleNameAbbreviation(), equalTo("z")); - assertTrue(role.canContainData()); - } - } public void testDiscoveryNodeIsRemoteClusterClientDefault() { diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregationBuilder.java index 6307cfa5b3674..bc1cd3fa05d52 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregationBuilder.java @@ -394,6 +394,6 @@ public boolean equals(Object obj) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.V_7_12_0; + return TransportVersions.ZERO; } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/RateAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/RateAggregationBuilder.java index 11de2032e27d7..0b2d3606854a4 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/RateAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/RateAggregationBuilder.java @@ -88,10 +88,8 @@ public RateAggregationBuilder(StreamInput in) throws IOException { } else { rateUnit = null; } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_11_0)) { - if (in.readBoolean()) { - rateMode = in.readEnum(RateMode.class); - } + if (in.readBoolean()) { + rateMode = in.readEnum(RateMode.class); } } @@ -107,13 +105,11 @@ protected void innerWriteTo(StreamOutput out) throws IOException { } else { out.writeByte((byte) 0); } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_11_0)) { - if (rateMode != null) { - out.writeBoolean(true); - out.writeEnum(rateMode); - } else { - out.writeBoolean(false); - } + if (rateMode != null) { + out.writeBoolean(true); + out.writeEnum(rateMode); + } else { + out.writeBoolean(false); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/aggregatemetric/AggregateMetricFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/aggregatemetric/AggregateMetricFeatureSetUsage.java index 5505cf3271b8b..28f8b2013892e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/aggregatemetric/AggregateMetricFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/aggregatemetric/AggregateMetricFeatureSetUsage.java @@ -28,7 +28,7 @@ public AggregateMetricFeatureSetUsage(boolean available, boolean enabled) { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.V_7_11_0; + return TransportVersions.ZERO; } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java index 742b538b6ce4b..44151f992f75e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java @@ -10,7 +10,6 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.SimpleDiffable; @@ -86,8 +85,6 @@ */ public class DatafeedConfig implements SimpleDiffable, ToXContentObject { - private static final TransportVersion RUNTIME_MAPPINGS_INTRODUCED = TransportVersions.V_7_11_0; - public static final int DEFAULT_SCROLL_SIZE = 1000; private static final int SECONDS_IN_MINUTE = 60; @@ -341,9 +338,7 @@ public Integer getScrollSize() { } public Optional> minRequiredTransportVersion() { - return runtimeMappings.isEmpty() - ? Optional.empty() - : Optional.of(Tuple.tuple(RUNTIME_MAPPINGS_INTRODUCED, SearchSourceBuilder.RUNTIME_MAPPINGS_FIELD.getPreferredName())); + return Optional.empty(); } /** diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/snapshot/upgrade/SnapshotUpgradeTaskParams.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/snapshot/upgrade/SnapshotUpgradeTaskParams.java index c159f997798a9..7a0a524f55920 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/snapshot/upgrade/SnapshotUpgradeTaskParams.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/snapshot/upgrade/SnapshotUpgradeTaskParams.java @@ -69,7 +69,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.V_7_11_0; + return TransportVersions.ZERO; } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityFeatureSetUsage.java index 3ebfad04a0f13..52ccc3c91985c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityFeatureSetUsage.java @@ -64,9 +64,7 @@ public SecurityFeatureSetUsage(StreamInput in) throws IOException { anonymousUsage = in.readGenericMap(); roleMappingStoreUsage = in.readGenericMap(); fips140Usage = in.readGenericMap(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_11_0)) { - operatorPrivilegesUsage = in.readGenericMap(); - } + operatorPrivilegesUsage = in.readGenericMap(); if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_2_0)) { domainsUsage = in.readGenericMap(); } @@ -132,9 +130,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeGenericMap(anonymousUsage); out.writeGenericMap(roleMappingStoreUsage); out.writeGenericMap(fips140Usage); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_11_0)) { - out.writeGenericMap(operatorPrivilegesUsage); - } + out.writeGenericMap(operatorPrivilegesUsage); if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_2_0)) { out.writeGenericMap(domainsUsage); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/DelegatePkiAuthenticationResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/DelegatePkiAuthenticationResponse.java index a5c8e10496b3b..70d0986ac6dc7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/DelegatePkiAuthenticationResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/DelegatePkiAuthenticationResponse.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.action; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -45,9 +44,7 @@ public DelegatePkiAuthenticationResponse(StreamInput input) throws IOException { super(input); accessToken = input.readString(); expiresIn = input.readTimeValue(); - if (input.getTransportVersion().onOrAfter(TransportVersions.V_7_11_0)) { - authentication = new Authentication(input); - } + authentication = new Authentication(input); } public String getAccessToken() { @@ -66,9 +63,7 @@ public Authentication getAuthentication() { public void writeTo(StreamOutput out) throws IOException { out.writeString(accessToken); out.writeTimeValue(expiresIn); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_11_0)) { - authentication.writeTo(out); - } + authentication.writeTo(out); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateResponse.java index 92b27826e8759..93cbdd40c10c9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectAuthenticateResponse.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.core.security.action.oidc; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.TimeValue; @@ -61,8 +60,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(accessTokenString); out.writeString(refreshTokenString); out.writeTimeValue(expiresIn); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_11_0)) { - authentication.writeTo(out); - } + authentication.writeTo(out); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java index 5dcfadd3dd01c..5013da1b4abdf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/oidc/OpenIdConnectPrepareAuthenticationResponse.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.core.security.action.oidc; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContentObject; @@ -62,9 +61,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(authenticationRequestUrl); out.writeString(state); out.writeString(nonce); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_11_0)) { - out.writeString(realmName); - } + out.writeString(realmName); } public String toString() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlAuthenticateResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlAuthenticateResponse.java index 71b5e93e60a2c..6b9f1632a159e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlAuthenticateResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlAuthenticateResponse.java @@ -69,9 +69,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(tokenString); out.writeString(refreshToken); out.writeTimeValue(expiresIn); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_11_0)) { - authentication.writeTo(out); - } + authentication.writeTo(out); } - } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenResponse.java index 30522e3389a8a..a670482b7935c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenResponse.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.core.security.action.token; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -39,9 +38,7 @@ public CreateTokenResponse(StreamInput in) throws IOException { scope = in.readOptionalString(); refreshToken = in.readOptionalString(); kerberosAuthenticationResponseToken = in.readOptionalString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_11_0)) { - authentication = new Authentication(in); - } + authentication = new Authentication(in); } public CreateTokenResponse( @@ -91,9 +88,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(scope); out.writeOptionalString(refreshToken); out.writeOptionalString(kerberosAuthenticationResponseToken); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_11_0)) { - authentication.writeTo(out); - } + authentication.writeTo(out); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/spatial/SpatialFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/spatial/SpatialFeatureSetUsage.java index f6aa8b884105f..215043a7b4b8e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/spatial/SpatialFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/spatial/SpatialFeatureSetUsage.java @@ -29,11 +29,7 @@ public SpatialFeatureSetUsage(SpatialStatsAction.Response statsResponse) { public SpatialFeatureSetUsage(StreamInput input) throws IOException { super(input); - if (input.getTransportVersion().onOrAfter(TransportVersions.V_7_11_0)) { - this.statsResponse = new SpatialStatsAction.Response(input); - } else { - this.statsResponse = null; - } + this.statsResponse = new SpatialStatsAction.Response(input); } @Override @@ -48,9 +44,7 @@ SpatialStatsAction.Response statsResponse() { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_11_0)) { - this.statsResponse.writeTo(out); - } + this.statsResponse.writeTo(out); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfig.java index 41279d83fdae1..9459605507c7b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfig.java @@ -7,8 +7,6 @@ package org.elasticsearch.xpack.core.transform.transforms; -import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.cluster.SimpleDiffable; import org.elasticsearch.common.Strings; @@ -25,7 +23,6 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.common.time.TimeUtils; -import org.elasticsearch.xpack.core.common.validation.SourceDestValidator; import org.elasticsearch.xpack.core.common.validation.SourceDestValidator.SourceDestValidation; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue.Level; @@ -63,8 +60,6 @@ public final class TransformConfig implements SimpleDiffable, W public static final TransformConfigVersion CONFIG_VERSION_LAST_DEFAULTS_CHANGED = TransformConfigVersion.V_7_15_0; public static final String NAME = "data_frame_transform_config"; public static final ParseField HEADERS = new ParseField("headers"); - /** Version in which {@code FieldCapabilitiesRequest.runtime_fields} field was introduced. */ - private static final TransportVersion FIELD_CAPS_RUNTIME_MAPPINGS_INTRODUCED_TRANSPORT_VERSION = TransportVersions.V_7_12_0; /** Specifies all the possible transform functions. */ public enum Function { @@ -341,15 +336,7 @@ public RetentionPolicyConfig getRetentionPolicyConfig() { * @return version */ public List getAdditionalSourceDestValidations() { - if ((source.getRuntimeMappings() == null || source.getRuntimeMappings().isEmpty()) == false) { - SourceDestValidation validation = new SourceDestValidator.RemoteClusterMinimumVersionValidation( - FIELD_CAPS_RUNTIME_MAPPINGS_INTRODUCED_TRANSPORT_VERSION, - "source.runtime_mappings field was set" - ); - return Collections.singletonList(validation); - } else { - return Collections.emptyList(); - } + return List.of(); } public ActionRequestValidationException validate(ActionRequestValidationException validationException) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigTests.java index e6b9e7f75a87d..bd97544f5b3e9 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.transform.transforms; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -23,8 +22,6 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.common.validation.SourceDestValidator.RemoteClusterMinimumVersionValidation; -import org.elasticsearch.xpack.core.common.validation.SourceDestValidator.SourceDestValidation; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue.Level; import org.elasticsearch.xpack.core.transform.AbstractSerializingTransformTestCase; @@ -50,8 +47,6 @@ import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; public class TransformConfigTests extends AbstractSerializingTransformTestCase { @@ -853,47 +848,6 @@ public void testGetAdditionalSourceDestValidations_WithNoRuntimeMappings() throw assertThat(transformConfig.getAdditionalSourceDestValidations(), is(empty())); } - public void testGetAdditionalSourceDestValidations_WithRuntimeMappings() throws IOException { - String json = """ - { - "id": "body_id", - "source": { - "index": "src", - "runtime_mappings": { - "some-field": "some-value" - } - }, - "dest": { - "index": "dest" - }, - "pivot": { - "group_by": { - "id": { - "terms": { - "field": "id" - } - } - }, - "aggs": { - "avg": { - "avg": { - "field": "points" - } - } - } - } - }"""; - - TransformConfig transformConfig = createTransformConfigFromString(json, "body_id", true); - List additiionalValidations = transformConfig.getAdditionalSourceDestValidations(); - assertThat(additiionalValidations, hasSize(1)); - assertThat(additiionalValidations.get(0), is(instanceOf(RemoteClusterMinimumVersionValidation.class))); - RemoteClusterMinimumVersionValidation remoteClusterMinimumVersionValidation = - (RemoteClusterMinimumVersionValidation) additiionalValidations.get(0); - assertThat(remoteClusterMinimumVersionValidation.getMinExpectedTransportVersion(), is(equalTo(TransportVersions.V_7_12_0))); - assertThat(remoteClusterMinimumVersionValidation.getReason(), is(equalTo("source.runtime_mappings field was set"))); - } - public void testGroupByStayInOrder() throws IOException { String json = Strings.format(""" { diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java index cf88f21689edb..762283a1a5b82 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java @@ -70,13 +70,7 @@ public Response(StreamInput in) throws IOException { if (in.getTransportVersion().between(TransportVersions.V_8_17_0, TransportVersions.RESOURCE_DEPRECATION_CHECKS)) { mutableResourceDeprecations.put(DataStreamDeprecationChecker.NAME, in.readMapOfLists(DeprecationIssue::new)); } - if (in.getTransportVersion().before(TransportVersions.V_7_11_0)) { - List mlIssues = in.readCollectionAsList(DeprecationIssue::new); - pluginSettingsIssues = new HashMap<>(); - pluginSettingsIssues.put("ml_settings", mlIssues); - } else { - pluginSettingsIssues = in.readMapOfLists(DeprecationIssue::new); - } + pluginSettingsIssues = in.readMapOfLists(DeprecationIssue::new); if (in.getTransportVersion().onOrAfter(TransportVersions.RESOURCE_DEPRECATION_CHECKS)) { resourceDeprecationIssues = in.readMap(in2 -> in2.readMapOfLists(DeprecationIssue::new)); } else { @@ -142,11 +136,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().between(TransportVersions.V_8_17_0, TransportVersions.RESOURCE_DEPRECATION_CHECKS)) { out.writeMap(getDataStreamDeprecationIssues(), StreamOutput::writeCollection); } - if (out.getTransportVersion().before(TransportVersions.V_7_11_0)) { - out.writeCollection(pluginSettingsIssues.getOrDefault("ml_settings", Collections.emptyList())); - } else { - out.writeMap(pluginSettingsIssues, StreamOutput::writeCollection); - } + out.writeMap(pluginSettingsIssues, StreamOutput::writeCollection); if (out.getTransportVersion().onOrAfter(TransportVersions.RESOURCE_DEPRECATION_CHECKS)) { out.writeMap(resourceDeprecationIssues, (o, v) -> o.writeMap(v, StreamOutput::writeCollection)); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java index 2ed0321e32c77..3b364721205cb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java @@ -12,8 +12,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.RetryableAction; import org.elasticsearch.client.internal.Client; @@ -82,10 +80,6 @@ public class OpenJobPersistentTasksExecutor extends AbstractJobPersistentTasksEx private static final Logger logger = LogManager.getLogger(OpenJobPersistentTasksExecutor.class); - // Resuming a job with a running datafeed from its current snapshot was added in 7.11 and - // can only be done if the master node is on or after that version. - private static final TransportVersion MIN_TRANSPORT_VERSION_FOR_REVERTING_TO_CURRENT_SNAPSHOT = TransportVersions.V_7_11_0; - public static String[] indicesOfInterest(String resultsIndex) { if (resultsIndex == null) { return new String[] { AnomalyDetectorsIndex.jobStateIndexPattern(), MlMetaIndex.indexName(), MlConfigIndex.indexName() }; @@ -308,14 +302,7 @@ private void runJob(JobTask jobTask, JobState jobState, OpenJobAction.JobParams } ActionListener getRunningDatafeedListener = ActionListener.wrap(runningDatafeedId -> { - if (runningDatafeedId != null - // If the minimum TransportVersion is on or above MIN_TRANSPORT_VERSION_FOR_REVERTING_TO_CURRENT_SNAPSHOT then so must be - // the version associated with the master node, which is what is required to perform this action - && TransportVersionUtils.isMinTransportVersionOnOrAfter( - clusterState, - MIN_TRANSPORT_VERSION_FOR_REVERTING_TO_CURRENT_SNAPSHOT - )) { - + if (runningDatafeedId != null) { // This job has a running datafeed attached to it. // In order to prevent gaps in the model we revert to the current snapshot deleting intervening results. RevertToCurrentSnapshotAction revertToCurrentSnapshotAction = new RevertToCurrentSnapshotAction( diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedActionTests.java index 8fd1082e0df5c..1ee1d0a30fa63 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedActionTests.java @@ -8,8 +8,6 @@ package org.elasticsearch.xpack.ml.action; import org.elasticsearch.ElasticsearchStatusException; -import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.search.SearchModule; @@ -18,21 +16,17 @@ import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ml.action.StartDatafeedAction; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; -import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfigTests; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.ml.datafeed.DatafeedRunner; import org.elasticsearch.xpack.ml.datafeed.DatafeedRunnerTests; import org.elasticsearch.xpack.ml.notifications.AnomalyDetectionAuditor; -import java.util.Arrays; import java.util.Collections; import java.util.Date; -import java.util.Map; import static org.elasticsearch.persistent.PersistentTasksCustomMetadata.INITIAL_ASSIGNMENT; import static org.elasticsearch.xpack.ml.job.task.OpenJobPersistentTasksExecutorTests.addJobTask; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.mockito.ArgumentMatchers.any; @@ -112,55 +106,6 @@ public void testNoDeprecationsLogged() { verify(auditor, never()).warning(any(), any()); } - public void testRemoteClusterVersionCheck() { - Map clusterVersions = Map.of( - "modern_cluster_1", - TransportVersion.current(), - "modern_cluster_2", - TransportVersion.current(), - "old_cluster_1", - TransportVersions.V_7_0_0 - ); - - Map field = Map.of("runtime_field_foo", Map.of("type", "keyword", "script", "")); - - DatafeedConfig config = new DatafeedConfig.Builder(DatafeedConfigTests.createRandomizedDatafeedConfig("foo")).setRuntimeMappings( - field - ).build(); - ElasticsearchStatusException ex = expectThrows( - ElasticsearchStatusException.class, - () -> TransportStartDatafeedAction.checkRemoteConfigVersions( - config, - Arrays.asList("old_cluster_1", "modern_cluster_2"), - clusterVersions::get - ) - ); - assertThat( - ex.getMessage(), - containsString( - "remote clusters are expected to run at least version [7.11.0] (reason: [runtime_mappings]), " - + "but the following clusters were too old: [old_cluster_1]" - ) - ); - - // The rest should not throw - TransportStartDatafeedAction.checkRemoteConfigVersions( - config, - Arrays.asList("modern_cluster_1", "modern_cluster_2"), - clusterVersions::get - ); - - DatafeedConfig configWithoutRuntimeMappings = new DatafeedConfig.Builder().setId("foo-datafeed") - .setIndices(Collections.singletonList("bar")) - .setJobId("foo") - .build(); - TransportStartDatafeedAction.checkRemoteConfigVersions( - configWithoutRuntimeMappings, - Arrays.asList("old_cluster_1", "modern_cluster_2"), - clusterVersions::get - ); - } - public static TransportStartDatafeedAction.DatafeedTask createDatafeedTask( long id, String type, diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/GeoLineAggregationBuilder.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/GeoLineAggregationBuilder.java index e232ec9e463d9..7c8b1545a3a94 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/GeoLineAggregationBuilder.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/GeoLineAggregationBuilder.java @@ -212,6 +212,6 @@ public String getType() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.V_7_11_0; + return TransportVersions.ZERO; } } From c3e7493d7ae01c1fb0a506991882c2093e4e4e35 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Wed, 5 Mar 2025 16:09:37 +0000 Subject: [PATCH 38/54] [ML] Remove deprecated routes for ml trained models APIs (#124019) The 7.x routes for ml trained models _ml/inference/ have been deprecated since 8 and replaced with _ml/trained_models. Also removes query parameters that are no longer supported. --- .../api/ml.get_trained_models.json | 7 -- .../api/ml.infer_trained_model.json | 17 --- .../rest-api-spec/api/ml.stop_datafeed.json | 6 - .../xpack/ml/integration/PyTorchModelIT.java | 9 -- .../xpack/ml/MachineLearning.java | 2 - .../TransportTrainedModelCacheInfoAction.java | 8 -- .../RestDeleteTrainedModelAction.java | 16 +-- .../inference/RestGetTrainedModelsAction.java | 41 +------ .../RestGetTrainedModelsStatsAction.java | 18 +-- ...RestInferTrainedModelDeploymentAction.java | 116 ------------------ .../inference/RestPutTrainedModelAction.java | 16 +-- .../test/ml/3rd_party_deployment.yml | 20 --- .../rest-api-spec/test/ml/inference_crud.yml | 12 +- ...MLModelDeploymentFullClusterRestartIT.java | 18 --- .../upgrades/MLModelDeploymentsUpgradeIT.java | 17 --- .../MlAssignmentPlannerUpgradeIT.java | 11 -- 16 files changed, 8 insertions(+), 326 deletions(-) delete mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestInferTrainedModelDeploymentAction.java diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_trained_models.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_trained_models.json index 1581fe314246f..e5b635b02075c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_trained_models.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_trained_models.json @@ -43,13 +43,6 @@ "required":false, "description":"A comma-separate list of fields to optionally include. Valid options are 'definition' and 'total_feature_importance'. Default is none." }, - "include_model_definition":{ - "type":"boolean", - "required":false, - "description":"Should the full model definition be included in the results. These definitions can be large. So be cautious when including them. Defaults to false.", - "default":false, - "deprecated": true - }, "decompress_definition":{ "type":"boolean", "required":false, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.infer_trained_model.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.infer_trained_model.json index 6041155b1ea6e..95d389266302c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.infer_trained_model.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.infer_trained_model.json @@ -24,23 +24,6 @@ "required":true } } - }, - { - "path":"/_ml/trained_models/{model_id}/deployment/_infer", - "methods":[ - "POST" - ], - "parts":{ - "model_id":{ - "type":"string", - "description":"The unique identifier of the trained model.", - "required":true - } - }, - "deprecated": { - "version":"8.3.0", - "description": "/_ml/trained_models/{model_id}/deployment/_infer is deprecated. Use /_ml/trained_models/{model_id}/_infer instead" - } } ] }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.stop_datafeed.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.stop_datafeed.json index f689a33d72c7d..6eb64661f6f41 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.stop_datafeed.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.stop_datafeed.json @@ -32,12 +32,6 @@ "required":false, "description":"Whether to ignore if a wildcard expression matches no datafeeds. (This includes `_all` string or when no datafeeds have been specified)" }, - "allow_no_datafeeds":{ - "type":"boolean", - "required":false, - "description":"Whether to ignore if a wildcard expression matches no datafeeds. (This includes `_all` string or when no datafeeds have been specified)", - "deprecated":true - }, "force":{ "type":"boolean", "required":false, diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PyTorchModelIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PyTorchModelIT.java index 04f349d67d7fe..3ca24edf25ada 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PyTorchModelIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PyTorchModelIT.java @@ -560,15 +560,6 @@ public void testInferWithMultipleDocs() throws IOException { assertArrayEquals(expectedEmbeddings.get(i).toArray(), embedding.toArray()); } } - { - // the deprecated deployment/_infer endpoint does not support multiple docs - Request request = new Request("POST", "/_ml/trained_models/" + modelId + "/deployment/_infer"); - request.setJsonEntity(String.format(Locale.ROOT, """ - { "docs": [%s] } - """, docsBuilder)); - Exception ex = expectThrows(Exception.class, () -> client().performRequest(request)); - assertThat(ex.getMessage(), containsString("multiple documents are not supported")); - } } public void testGetPytorchModelWithDefinition() throws IOException { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index dcbe18a1f34d2..b4042d674a30f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -422,7 +422,6 @@ import org.elasticsearch.xpack.ml.rest.inference.RestGetTrainedModelsAction; import org.elasticsearch.xpack.ml.rest.inference.RestGetTrainedModelsStatsAction; import org.elasticsearch.xpack.ml.rest.inference.RestInferTrainedModelAction; -import org.elasticsearch.xpack.ml.rest.inference.RestInferTrainedModelDeploymentAction; import org.elasticsearch.xpack.ml.rest.inference.RestPutTrainedModelAction; import org.elasticsearch.xpack.ml.rest.inference.RestPutTrainedModelAliasAction; import org.elasticsearch.xpack.ml.rest.inference.RestPutTrainedModelDefinitionPartAction; @@ -1503,7 +1502,6 @@ public List getRestHandlers( if (machineLearningExtension.get().isNlpEnabled()) { restHandlers.add(new RestStartTrainedModelDeploymentAction(machineLearningExtension.get().disableInferenceProcessCache())); restHandlers.add(new RestStopTrainedModelDeploymentAction()); - restHandlers.add(new RestInferTrainedModelDeploymentAction()); restHandlers.add(new RestUpdateTrainedModelDeploymentAction()); restHandlers.add(new RestPutTrainedModelVocabularyAction()); restHandlers.add(new RestClearDeploymentCacheAction()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportTrainedModelCacheInfoAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportTrainedModelCacheInfoAction.java index 7b52cf09f2204..c5e4584a80b34 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportTrainedModelCacheInfoAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportTrainedModelCacheInfoAction.java @@ -13,8 +13,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; @@ -87,7 +85,6 @@ protected CacheInfo nodeOperation(NodeModelCacheInfoRequest nodeModelCacheInfoRe ); } - @UpdateForV9(owner = UpdateForV9.Owner.MACHINE_LEARNING) // this can be replaced with TransportRequest.Empty in v9 public static class NodeModelCacheInfoRequest extends TransportRequest { NodeModelCacheInfoRequest() {} @@ -100,10 +97,5 @@ public NodeModelCacheInfoRequest(StreamInput in) throws IOException { public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { return new CancellableTask(id, type, action, "", parentTaskId, headers); } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - } } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestDeleteTrainedModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestDeleteTrainedModelAction.java index b04b5f2f45006..d1ca1ae230ec5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestDeleteTrainedModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestDeleteTrainedModelAction.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -28,22 +27,9 @@ @ServerlessScope(Scope.PUBLIC) public class RestDeleteTrainedModelAction extends BaseRestHandler { - @UpdateForV9(owner = UpdateForV9.Owner.MACHINE_LEARNING) - // one or more routes use ".replaces" with RestApiVersion.V_8 which will require use of REST API compatibility headers to access - // that route in v9. It is unclear if this was intentional for v9, and the code has been updated to ".deprecateAndKeep" which will - // continue to emit deprecations warnings but will not require any special headers to access the API in v9. - // Please review and update the code and tests as needed. The original code remains commented out below for reference. @Override public List routes() { - return List.of( - // Route.builder(DELETE, BASE_PATH + "trained_models/{" + TrainedModelConfig.MODEL_ID + "}") - // .replaces(DELETE, BASE_PATH + "inference/{" + TrainedModelConfig.MODEL_ID + "}", RestApiVersion.V_8) - // .build() - new Route(DELETE, BASE_PATH + "trained_models/{" + TrainedModelConfig.MODEL_ID + "}"), - Route.builder(DELETE, BASE_PATH + "inference/{" + TrainedModelConfig.MODEL_ID + "}") - .deprecateAndKeep("Use the trained_models API instead.") - .build() - ); + return List.of(new Route(DELETE, BASE_PATH + "trained_models/{" + TrainedModelConfig.MODEL_ID + "}")); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestGetTrainedModelsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestGetTrainedModelsAction.java index 14f6858f51fba..509060c17274d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestGetTrainedModelsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestGetTrainedModelsAction.java @@ -9,9 +9,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationCategory; -import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestRequest; @@ -38,7 +35,6 @@ import static java.util.Arrays.asList; import static org.elasticsearch.rest.RestRequest.Method.GET; -import static org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction.Includes.DEFINITION; import static org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction.Request.ALLOW_NO_MATCH; import static org.elasticsearch.xpack.core.ml.utils.ToXContentParams.EXCLUDE_GENERATED; import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; @@ -46,27 +42,11 @@ @ServerlessScope(Scope.PUBLIC) public class RestGetTrainedModelsAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestGetTrainedModelsAction.class); - private static final String INCLUDE_MODEL_DEFINITION = "include_model_definition"; - - @UpdateForV9(owner = UpdateForV9.Owner.MACHINE_LEARNING) - // one or more routes use ".replaces" with RestApiVersion.V_8 which will require use of REST API compatibility headers to access - // that route in v9. It is unclear if this was intentional for v9, and the code has been updated to ".deprecateAndKeep" which will - // continue to emit deprecations warnings but will not require any special headers to access the API in v9. - // Please review and update the code and tests as needed. The original code remains commented out below for reference. @Override public List routes() { return List.of( - // Route.builder(GET, BASE_PATH + "trained_models/{" + TrainedModelConfig.MODEL_ID + "}") - // .replaces(GET, BASE_PATH + "inference/{" + TrainedModelConfig.MODEL_ID + "}", RestApiVersion.V_8) - // .build(), - // Route.builder(GET, BASE_PATH + "trained_models").replaces(GET, BASE_PATH + "inference", RestApiVersion.V_8).build() new Route(GET, BASE_PATH + "trained_models/{" + TrainedModelConfig.MODEL_ID + "}"), - Route.builder(GET, BASE_PATH + "inference/{" + TrainedModelConfig.MODEL_ID + "}") - .deprecateAndKeep("Use the trained_models API instead.") - .build(), - new Route(GET, BASE_PATH + "trained_models"), - Route.builder(GET, BASE_PATH + "inference").deprecateAndKeep("Use the trained_models API instead.").build() + new Route(GET, BASE_PATH + "trained_models") ); } @@ -90,22 +70,9 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient Set includes = new HashSet<>( asList(restRequest.paramAsStringArray(GetTrainedModelsAction.Request.INCLUDE.getPreferredName(), Strings.EMPTY_ARRAY)) ); - final GetTrainedModelsAction.Request request; - if (restRequest.hasParam(INCLUDE_MODEL_DEFINITION)) { - deprecationLogger.warn( - DeprecationCategory.API, - INCLUDE_MODEL_DEFINITION, - "[{}] parameter is deprecated! Use [include=definition] instead.", - INCLUDE_MODEL_DEFINITION - ); - request = new GetTrainedModelsAction.Request( - modelId, - tags, - restRequest.paramAsBoolean(INCLUDE_MODEL_DEFINITION, false) ? Set.of(DEFINITION) : Set.of() - ); - } else { - request = new GetTrainedModelsAction.Request(modelId, tags, includes); - } + + final GetTrainedModelsAction.Request request = new GetTrainedModelsAction.Request(modelId, tags, includes); + if (restRequest.hasParam(PageParams.FROM.getPreferredName()) || restRequest.hasParam(PageParams.SIZE.getPreferredName())) { request.setPageParams( new PageParams( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestGetTrainedModelsStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestGetTrainedModelsStatsAction.java index 92a89c1365960..f4cc3726c6fdc 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestGetTrainedModelsStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestGetTrainedModelsStatsAction.java @@ -9,7 +9,6 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.Strings; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -30,26 +29,11 @@ @ServerlessScope(Scope.PUBLIC) public class RestGetTrainedModelsStatsAction extends BaseRestHandler { - @UpdateForV9(owner = UpdateForV9.Owner.MACHINE_LEARNING) - // one or more routes use ".replaces" with RestApiVersion.V_8 which will require use of REST API compatibility headers to access - // that route in v9. It is unclear if this was intentional for v9, and the code has been updated to ".deprecateAndKeep" which will - // continue to emit deprecations warnings but will not require any special headers to access the API in v9. - // Please review and update the code and tests as needed. The original code remains commented out below for reference. @Override public List routes() { return List.of( - // Route.builder(GET, BASE_PATH + "trained_models/{" + TrainedModelConfig.MODEL_ID + "}/_stats") - // .replaces(GET, BASE_PATH + "inference/{" + TrainedModelConfig.MODEL_ID + "}/_stats", RestApiVersion.V_8) - // .build(), - // Route.builder(GET, BASE_PATH + "trained_models/_stats") - // .replaces(GET, BASE_PATH + "inference/_stats", RestApiVersion.V_8) - // .build() new Route(GET, BASE_PATH + "trained_models/{" + TrainedModelConfig.MODEL_ID + "}/_stats"), - Route.builder(GET, BASE_PATH + "inference/{" + TrainedModelConfig.MODEL_ID + "}/_stats") - .deprecateAndKeep("Use the trained_models API instead.") - .build(), - new Route(GET, BASE_PATH + "trained_models/_stats"), - Route.builder(GET, BASE_PATH + "inference/_stats").deprecateAndKeep("Use the trained_models API instead.").build() + new Route(GET, BASE_PATH + "trained_models/_stats") ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestInferTrainedModelDeploymentAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestInferTrainedModelDeploymentAction.java deleted file mode 100644 index e377d185e16df..0000000000000 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestInferTrainedModelDeploymentAction.java +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.ml.rest.inference; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.ValidationException; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.UpdateForV9; -import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.RestCancellableNodeClient; -import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.core.ml.action.InferModelAction; -import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; -import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; -import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; - -import static org.elasticsearch.rest.RestRequest.Method.POST; -import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; - -public class RestInferTrainedModelDeploymentAction extends BaseRestHandler { - - static final String PATH = BASE_PATH + "trained_models/{" + TrainedModelConfig.MODEL_ID.getPreferredName() + "}/deployment/_infer"; - - @Override - public String getName() { - return "xpack_ml_infer_trained_models_deployment_action"; - } - - @UpdateForV9(owner = UpdateForV9.Owner.MACHINE_LEARNING) - // these routes were ".deprecated" in RestApiVersion.V_8 which will require use of REST API compatibility headers to access - // this API in v9. It is unclear if this was intentional for v9, and the code has been updated to ".deprecateAndKeep" which will - // continue to emit deprecations warnings but will not require any special headers to access the API in v9. - // Please review and update the code and tests as needed. The original code remains commented out below for reference. - @Override - public List routes() { - return Collections.singletonList( - // Route.builder(POST, PATH) - // .deprecated( - // "[" - // + POST.name() - // + " " - // + PATH - // + "] is deprecated! Use [" - // + POST.name() - // + " " - // + RestInferTrainedModelAction.PATH - // + "] instead.", - // RestApiVersion.V_8 - // ) - // .build() - Route.builder(POST, PATH) - .deprecateAndKeep( - "[" - + POST.name() - + " " - + PATH - + "] is deprecated! Use [" - + POST.name() - + " " - + RestInferTrainedModelAction.PATH - + "] instead." - ) - .build() - ); - } - - @Override - protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { - String modelId = restRequest.param(TrainedModelConfig.MODEL_ID.getPreferredName()); - if (restRequest.hasContent() == false) { - throw ExceptionsHelper.badRequestException("requires body"); - } - InferModelAction.Request.Builder requestBuilder = InferModelAction.Request.parseRequest(modelId, restRequest.contentParser()); - - if (restRequest.hasParam(InferModelAction.Request.TIMEOUT.getPreferredName())) { - TimeValue inferTimeout = restRequest.paramAsTime( - InferModelAction.Request.TIMEOUT.getPreferredName(), - InferModelAction.Request.DEFAULT_TIMEOUT_FOR_API - ); - requestBuilder.setInferenceTimeout(inferTimeout); - } - - // Unlike the _infer API, deployment/_infer only accepts a single document - var request = requestBuilder.build(); - if (request.getObjectsToInfer() != null && request.getObjectsToInfer().size() > 1) { - ValidationException ex = new ValidationException(); - ex.addValidationError("multiple documents are not supported"); - throw ex; - } - - return channel -> new RestCancellableNodeClient(client, restRequest.getHttpChannel()).execute( - InferModelAction.EXTERNAL_INSTANCE, - request, - // This API is deprecated but refactoring makes it simpler to call - // the new replacement API and swap in the old response. - ActionListener.wrap(response -> { - InferTrainedModelDeploymentAction.Response oldResponse = new InferTrainedModelDeploymentAction.Response( - response.getInferenceResults() - ); - new RestToXContentListener<>(channel).onResponse(oldResponse); - }, e -> new RestToXContentListener<>(channel).onFailure(e)) - - ); - } -} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestPutTrainedModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestPutTrainedModelAction.java index 437861c5f9a00..523066f359b05 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestPutTrainedModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestPutTrainedModelAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.rest.inference; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -27,22 +26,9 @@ @ServerlessScope(Scope.PUBLIC) public class RestPutTrainedModelAction extends BaseRestHandler { - @UpdateForV9(owner = UpdateForV9.Owner.MACHINE_LEARNING) - // one or more routes use ".replaces" with RestApiVersion.V_8 which will require use of REST API compatibility headers to access - // that route in v9. It is unclear if this was intentional for v9, and the code has been updated to ".deprecateAndKeep" which will - // continue to emit deprecations warnings but will not require any special headers to access the API in v9. - // Please review and update the code and tests as needed. The original code remains commented out below for reference. @Override public List routes() { - return List.of( - // Route.builder(PUT, BASE_PATH + "trained_models/{" + TrainedModelConfig.MODEL_ID + "}") - // .replaces(PUT, BASE_PATH + "inference/{" + TrainedModelConfig.MODEL_ID + "}", RestApiVersion.V_8) - // .build() - new Route(PUT, BASE_PATH + "trained_models/{" + TrainedModelConfig.MODEL_ID + "}"), - Route.builder(PUT, BASE_PATH + "inference/{" + TrainedModelConfig.MODEL_ID + "}") - .deprecateAndKeep("Use the trained_models API instead.") - .build() - ); + return List.of(new Route(PUT, BASE_PATH + "trained_models/{" + TrainedModelConfig.MODEL_ID + "}")); } @Override diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml index 18332e14a4e34..6fbc5e346b0eb 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml @@ -271,8 +271,6 @@ setup: - match: { assignment.task_parameters.cache_size: 10kb } - do: - allowed_warnings: - - '[POST /_ml/trained_models/{model_id}/deployment/_infer] is deprecated! Use [POST /_ml/trained_models/{model_id}/_infer] instead.' ml.infer_trained_model: model_id: "test_model" body: > @@ -283,8 +281,6 @@ setup: } - do: - allowed_warnings: - - '[POST /_ml/trained_models/{model_id}/deployment/_infer] is deprecated! Use [POST /_ml/trained_models/{model_id}/_infer] instead.' ml.infer_trained_model: model_id: "test_model" body: > @@ -295,8 +291,6 @@ setup: } - do: - allowed_warnings: - - '[POST /_ml/trained_models/{model_id}/deployment/_infer] is deprecated! Use [POST /_ml/trained_models/{model_id}/_infer] instead.' ml.infer_trained_model: model_id: "test_model" body: > @@ -426,8 +420,6 @@ setup: - match: { assignment.task_parameters.cache_size: 10kb } - do: - allowed_warnings: - - '[POST /_ml/trained_models/{model_id}/deployment/_infer] is deprecated! Use [POST /_ml/trained_models/{model_id}/_infer] instead.' ml.infer_trained_model: model_id: "test_model" body: > @@ -438,8 +430,6 @@ setup: } - do: - allowed_warnings: - - '[POST /_ml/trained_models/{model_id}/deployment/_infer] is deprecated! Use [POST /_ml/trained_models/{model_id}/_infer] instead.' ml.infer_trained_model: model_id: "test_model" body: > @@ -450,8 +440,6 @@ setup: } - do: - allowed_warnings: - - '[POST /_ml/trained_models/{model_id}/deployment/_infer] is deprecated! Use [POST /_ml/trained_models/{model_id}/_infer] instead.' ml.infer_trained_model: model_id: "test_model" body: > @@ -475,8 +463,6 @@ setup: - match: { cleared: true } - do: - allowed_warnings: - - '[POST /_ml/trained_models/{model_id}/deployment/_infer] is deprecated! Use [POST /_ml/trained_models/{model_id}/_infer] instead.' ml.infer_trained_model: model_id: "test_model" body: > @@ -538,8 +524,6 @@ setup: model_id: "test_model" - do: - allowed_warnings: - - '[POST /_ml/trained_models/{model_id}/deployment/_infer] is deprecated! Use [POST /_ml/trained_models/{model_id}/_infer] instead.' ml.infer_trained_model: model_id: "pytorch" body: > @@ -610,8 +594,6 @@ setup: - match: { assignment.task_parameters.model_id: test_model } - match: { assignment.task_parameters.deployment_id: test_model_for_search } - do: - allowed_warnings: - - '[POST /_ml/trained_models/{model_id}/deployment/_infer] is deprecated! Use [POST /_ml/trained_models/{model_id}/_infer] instead.' ml.infer_trained_model: model_id: "test_model_for_search" body: > @@ -631,8 +613,6 @@ setup: - match: { assignment.task_parameters.model_id: test_model } - match: { assignment.task_parameters.deployment_id: test_model_for_ingest } - do: - allowed_warnings: - - '[POST /_ml/trained_models/{model_id}/deployment/_infer] is deprecated! Use [POST /_ml/trained_models/{model_id}/_infer] instead.' ml.infer_trained_model: model_id: "test_model_for_ingest" body: > diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/inference_crud.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/inference_crud.yml index a53e5be54e35b..18e3c73f98110 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/inference_crud.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/inference_crud.yml @@ -931,17 +931,7 @@ setup: - is_false: trained_model_configs.0.estimated_heap_memory_usage - is_false: trained_model_configs.0.estimated_operations - is_false: trained_model_configs.0.license_level ---- -"Test deprecation of include model definition param": - - skip: - features: "warnings" - - do: - warnings: - - "[include_model_definition] parameter is deprecated! Use [include=definition] instead." - ml.get_trained_models: - model_id: "a-regression-model-1" - include_model_definition: true - decompress_definition: false + --- "Test put model model aliases": diff --git a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java index dc9afb1bec237..7ef15a13a16c5 100644 --- a/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/restart/MLModelDeploymentFullClusterRestartIT.java @@ -109,7 +109,6 @@ public void testDeploymentSurvivesRestart() throws Exception { assertBusy(() -> { try { assertInfer(modelId); - assertNewInfer(modelId); } catch (ResponseException e) { // assertBusy only loops on AssertionErrors, so we have // to convert failure status exceptions to these @@ -139,11 +138,6 @@ private void waitForDeploymentStarted(String modelId) throws Exception { private void assertInfer(String modelId) throws IOException { Response inference = infer("my words", modelId); - assertThat(EntityUtils.toString(inference.getEntity()), equalTo("{\"predicted_value\":[[1.0,1.0]]}")); - } - - private void assertNewInfer(String modelId) throws IOException { - Response inference = newInfer("my words", modelId); assertThat(EntityUtils.toString(inference.getEntity()), equalTo("{\"inference_results\":[{\"predicted_value\":[[1.0,1.0]]}]}")); } @@ -236,18 +230,6 @@ private Response getTrainedModelStats(String modelId) throws IOException { } private Response infer(String input, String modelId) throws IOException { - Request request = new Request("POST", "/_ml/trained_models/" + modelId + "/deployment/_infer"); - request.setJsonEntity(Strings.format(""" - { "docs": [{"input":"%s"}] } - """, input)); - - request.setOptions(request.getOptions().toBuilder().setWarningsHandler(PERMISSIVE).build()); - var response = client().performRequest(request); - assertOK(response); - return response; - } - - private Response newInfer(String input, String modelId) throws IOException { Request request = new Request("POST", "/_ml/trained_models/" + modelId + "/_infer"); request.setJsonEntity(Strings.format(""" { "docs": [{"input":"%s"}] } diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java index 8c051d03d5f04..adeaeae31be45 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java @@ -125,7 +125,6 @@ public void testTrainedModelDeployment() throws Exception { waitForDeploymentStarted(modelId); assertInfer(modelId); - assertNewInfer(modelId); stopDeployment(modelId); } default -> throw new UnsupportedOperationException("Unknown cluster type [" + CLUSTER_TYPE + "]"); @@ -179,11 +178,6 @@ private void waitForDeploymentStarted(String modelId) throws Exception { private void assertInfer(String modelId) throws IOException { Response inference = infer("my words", modelId); - assertThat(EntityUtils.toString(inference.getEntity()), equalTo("{\"predicted_value\":[[1.0,1.0]]}")); - } - - private void assertNewInfer(String modelId) throws IOException { - Response inference = newInfer("my words", modelId); assertThat(EntityUtils.toString(inference.getEntity()), equalTo("{\"inference_results\":[{\"predicted_value\":[[1.0,1.0]]}]}")); } @@ -296,17 +290,6 @@ private Response getTrainedModelStats(String modelId) throws IOException { } private Response infer(String input, String modelId) throws IOException { - Request request = new Request("POST", "/_ml/trained_models/" + modelId + "/deployment/_infer"); - request.setJsonEntity(Strings.format(""" - { "docs": [{"input":"%s"}] } - """, input)); - request.setOptions(request.getOptions().toBuilder().setWarningsHandler(PERMISSIVE).build()); - var response = client().performRequest(request); - assertOK(response); - return response; - } - - private Response newInfer(String input, String modelId) throws IOException { Request request = new Request("POST", "/_ml/trained_models/" + modelId + "/_infer"); request.setJsonEntity(Strings.format(""" { "docs": [{"input":"%s"}] } diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlAssignmentPlannerUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlAssignmentPlannerUpgradeIT.java index aa166311f6465..1f3db455c9ce7 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlAssignmentPlannerUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlAssignmentPlannerUpgradeIT.java @@ -161,17 +161,6 @@ private Response getTrainedModelStats(String modelId) throws IOException { return response; } - private Response infer(String input, String modelId) throws IOException { - Request request = new Request("POST", "/_ml/trained_models/" + modelId + "/deployment/_infer"); - request.setJsonEntity(Strings.format(""" - { "docs": [{"input":"%s"}] } - """, input)); - request.setOptions(request.getOptions().toBuilder().setWarningsHandler(PERMISSIVE).build()); - var response = client().performRequest(request); - assertOK(response); - return response; - } - private void putModelDefinition(String modelId) throws IOException { Request request = new Request("PUT", "_ml/trained_models/" + modelId + "/definition/0"); request.setJsonEntity(Strings.format(""" From a06c8ea5b8d7148467860c0b283e8eda89aa776a Mon Sep 17 00:00:00 2001 From: shainaraskas <58563081+shainaraskas@users.noreply.github.com> Date: Wed, 5 Mar 2025 11:21:16 -0500 Subject: [PATCH 39/54] Update node-settings.md (#123997) * Update node-settings.md Port change https://github.com/elastic/elasticsearch/pull/123939 forward to new docs system * Update docs/reference/elasticsearch/configuration-reference/node-settings.md --- .../elasticsearch/configuration-reference/node-settings.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/elasticsearch/configuration-reference/node-settings.md b/docs/reference/elasticsearch/configuration-reference/node-settings.md index 3a9634a37fc6b..47f8fa18ff453 100644 --- a/docs/reference/elasticsearch/configuration-reference/node-settings.md +++ b/docs/reference/elasticsearch/configuration-reference/node-settings.md @@ -46,7 +46,7 @@ The following additional roles are available: * `voting_only` -$$$coordinating-only-node$$$If you leave `node.roles` unset, then the node is considered to be a [coordinating only node](docs-content://deploy-manage/distributed-architecture/clusters-nodes-shards/node-roles.md#coordinating-only-node-role). +$$$coordinating-only-node$$$If If you set `node.roles` to an empty array (`node.roles: [ ]`), then the node is considered to be a [coordinating only node](docs-content://deploy-manage/distributed-architecture/clusters-nodes-shards/node-roles.md#coordinating-only-node-role). ::::{important} If you set `node.roles`, ensure you specify every node role your cluster needs. Every cluster requires the following node roles: From ecb3d21b29da363748901f1f73231d4511af749a Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Wed, 5 Mar 2025 18:35:35 +0200 Subject: [PATCH 40/54] ESQL: Use a must boolean statement when pushing down to Lucene when scoring is also needed (#124001) * Use a "must" instead of "filter" when building the pushed down filter AND when scoring is needed --- docs/changelog/124001.yaml | 7 ++ .../core/expression/MetadataAttribute.java | 2 +- .../xpack/esql/plugin/MatchOperatorIT.java | 117 ++++++++++++++++++ .../physical/local/PushFiltersToSource.java | 3 +- .../local/ReplaceSourceAttributes.java | 33 +++-- .../xpack/esql/plan/physical/EsQueryExec.java | 10 ++ .../esql/plan/physical/EsStatsQueryExec.java | 1 - .../planner/EsPhysicalOperationProviders.java | 5 +- 8 files changed, 159 insertions(+), 19 deletions(-) create mode 100644 docs/changelog/124001.yaml diff --git a/docs/changelog/124001.yaml b/docs/changelog/124001.yaml new file mode 100644 index 0000000000000..374a7ad7efb58 --- /dev/null +++ b/docs/changelog/124001.yaml @@ -0,0 +1,7 @@ +pr: 124001 +summary: Use a must boolean statement when pushing down to Lucene when scoring is + also needed +area: ES|QL +type: bug +issues: + - 123967 diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java index dc75ac3a96248..a07e2d9589034 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MetadataAttribute.java @@ -29,7 +29,7 @@ import static org.elasticsearch.core.Tuple.tuple; public class MetadataAttribute extends TypedAttribute { - public static final String TIMESTAMP_FIELD = "@timestamp"; + public static final String TIMESTAMP_FIELD = "@timestamp"; // this is not a true metadata attribute public static final String TSID_FIELD = "_tsid"; public static final String SCORE = "_score"; public static final String INDEX = "_index"; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java index c978dead8f4fd..216786798ad34 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java @@ -11,12 +11,16 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase; import org.junit.Before; import java.util.List; +import static org.elasticsearch.index.query.QueryBuilders.boolQuery; +import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.CoreMatchers.containsString; @@ -120,6 +124,119 @@ public void testWhereMatchWithScoring() { } } + /** + * Test for https://github.com/elastic/elasticsearch/issues/123967 + */ + public void testWhereMatchWithScoring_AndRequestFilter() { + var query = """ + FROM test METADATA _score + | WHERE content:"fox" + | SORT _score DESC + | KEEP content, _score + """; + + QueryBuilder filter = boolQuery().must(matchQuery("content", "brown")); + + try (var resp = run(query, randomPragmas(), filter)) { + assertColumnNames(resp.columns(), List.of("content", "_score")); + assertColumnTypes(resp.columns(), List.of("text", "double")); + assertValues( + resp.values(), + List.of( + List.of("This is a brown fox", 1.4274532794952393), + List.of("The quick brown fox jumps over the lazy dog", 1.1248724460601807) + ) + ); + } + } + + public void testWhereMatchWithScoring_AndNoScoreRequestFilter() { + var query = """ + FROM test METADATA _score + | WHERE content:"fox" + | SORT _score DESC + | KEEP content, _score + """; + + QueryBuilder filter = boolQuery().filter(matchQuery("content", "brown")); + + try (var resp = run(query, randomPragmas(), filter)) { + assertColumnNames(resp.columns(), List.of("content", "_score")); + assertColumnTypes(resp.columns(), List.of("text", "double")); + assertValues( + resp.values(), + List.of( + List.of("This is a brown fox", 1.156558871269226), + List.of("The quick brown fox jumps over the lazy dog", 0.9114001989364624) + ) + ); + } + } + + public void testWhereMatchWithScoring_And_MatchAllRequestFilter() { + var query = """ + FROM test METADATA _score + | WHERE content:"fox" + | SORT _score DESC + | KEEP content, _score + """; + + QueryBuilder filter = QueryBuilders.matchAllQuery(); + + try (var resp = run(query, randomPragmas(), filter)) { + assertColumnNames(resp.columns(), List.of("content", "_score")); + assertColumnTypes(resp.columns(), List.of("text", "double")); + assertValues( + resp.values(), + List.of( + List.of("This is a brown fox", 2.1565589904785156), + List.of("The quick brown fox jumps over the lazy dog", 1.9114001989364624) + ) + ); + } + } + + public void testScoringOutsideQuery() { + var query = """ + FROM test METADATA _score + | SORT _score DESC + | KEEP content, _score + """; + + QueryBuilder filter = boolQuery().must(matchQuery("content", "fox")); + + try (var resp = run(query, randomPragmas(), filter)) { + assertColumnNames(resp.columns(), List.of("content", "_score")); + assertColumnTypes(resp.columns(), List.of("text", "double")); + assertValues( + resp.values(), + List.of( + List.of("This is a brown fox", 1.156558871269226), + List.of("The quick brown fox jumps over the lazy dog", 0.9114001989364624) + ) + ); + } + } + + public void testScoring_Zero_OutsideQuery() { + var query = """ + FROM test METADATA _score + | SORT _score DESC + | KEEP content, _score + """; + + QueryBuilder filter = boolQuery().filter(matchQuery("content", "fox")); + + try (var resp = run(query, randomPragmas(), filter)) { + assertColumnNames(resp.columns(), List.of("content", "_score")); + assertColumnTypes(resp.columns(), List.of("text", "double")); + assertValues( + resp.values(), + List.of(List.of("This is a brown fox", 0.0), List.of("The quick brown fox jumps over the lazy dog", 0.0)) + ); + } + } + public void testWhereMatchWithScoringDifferentSort() { var query = """ FROM test diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java index 2f28b1a0e41ba..f902f261e7dc9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java @@ -101,7 +101,8 @@ private static PhysicalPlan rewrite( if (newPushable.size() > 0) { // update the executable with pushable conditions Query queryDSL = TRANSLATOR_HANDLER.asQuery(Predicates.combineAnd(newPushable)); QueryBuilder planQuery = queryDSL.asBuilder(); - var query = Queries.combine(Queries.Clause.FILTER, asList(queryExec.query(), planQuery)); + Queries.Clause combiningQueryClauseType = queryExec.hasScoring() ? Queries.Clause.MUST : Queries.Clause.FILTER; + var query = Queries.combine(combiningQueryClauseType, asList(queryExec.query(), planQuery)); queryExec = new EsQueryExec( queryExec.source(), queryExec.indexPattern(), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/ReplaceSourceAttributes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/ReplaceSourceAttributes.java index 4f3358c539b05..4730f561348c9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/ReplaceSourceAttributes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/ReplaceSourceAttributes.java @@ -32,27 +32,36 @@ protected PhysicalPlan rule(EsSourceExec plan) { var docId = new FieldAttribute(plan.source(), EsQueryExec.DOC_ID_FIELD.getName(), EsQueryExec.DOC_ID_FIELD); final List attributes = new ArrayList<>(); attributes.add(docId); - if (plan.indexMode() == IndexMode.TIME_SERIES) { - Attribute tsid = null, timestamp = null; - for (Attribute attr : plan.output()) { - String name = attr.name(); - if (name.equals(MetadataAttribute.TSID_FIELD)) { + + var outputIterator = plan.output().iterator(); + var isTimeSeries = plan.indexMode() == IndexMode.TIME_SERIES; + var keepIterating = true; + Attribute tsid = null, timestamp = null, score = null; + + while (keepIterating && outputIterator.hasNext()) { + Attribute attr = outputIterator.next(); + if (attr instanceof MetadataAttribute ma) { + if (ma.name().equals(MetadataAttribute.SCORE)) { + score = attr; + } else if (isTimeSeries && ma.name().equals(MetadataAttribute.TSID_FIELD)) { tsid = attr; - } else if (name.equals(MetadataAttribute.TIMESTAMP_FIELD)) { - timestamp = attr; } + } else if (attr.name().equals(MetadataAttribute.TIMESTAMP_FIELD)) { + timestamp = attr; } + keepIterating = score == null || (isTimeSeries && (tsid == null || timestamp == null)); + } + if (isTimeSeries) { if (tsid == null || timestamp == null) { throw new IllegalStateException("_tsid or @timestamp are missing from the time-series source"); } attributes.add(tsid); attributes.add(timestamp); } - plan.output().forEach(attr -> { - if (attr instanceof MetadataAttribute ma && ma.name().equals(MetadataAttribute.SCORE)) { - attributes.add(ma); - } - }); + if (score != null) { + attributes.add(score); + } + return new EsQueryExec(plan.source(), plan.indexPattern(), plan.indexMode(), plan.indexNameWithModes(), attributes, plan.query()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsQueryExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsQueryExec.java index a3fc62d935795..60e7eb535f444 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsQueryExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsQueryExec.java @@ -21,6 +21,7 @@ import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.NodeUtils; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -204,6 +205,15 @@ public static boolean isSourceAttribute(Attribute attr) { return DOC_ID_FIELD.getName().equals(attr.name()); } + public boolean hasScoring() { + for (Attribute a : attrs()) { + if (a instanceof MetadataAttribute && a.name().equals(MetadataAttribute.SCORE)) { + return true; + } + } + return false; + } + @Override protected NodeInfo info() { return NodeInfo.create( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsStatsQueryExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsStatsQueryExec.java index 96214652b87cb..5519e7fbc7083 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsStatsQueryExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsStatsQueryExec.java @@ -37,7 +37,6 @@ public enum StatsType { } public record Stat(String name, StatsType type, QueryBuilder query) { - public QueryBuilder filter(QueryBuilder sourceQuery) { return query == null ? sourceQuery : Queries.combine(Queries.Clause.FILTER, asList(sourceQuery, query)); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java index 10c380f2db56d..e1e296fc12de9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java @@ -49,7 +49,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.FoldContext; -import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.KeywordEsField; import org.elasticsearch.xpack.esql.core.type.MultiTypeEsField; @@ -186,9 +185,7 @@ public final PhysicalOperation sourcePhysicalOperation(EsQueryExec esQueryExec, assert esQueryExec.estimatedRowSize() != null : "estimated row size not initialized"; int rowEstimatedSize = esQueryExec.estimatedRowSize(); int limit = esQueryExec.limit() != null ? (Integer) esQueryExec.limit().fold(context.foldCtx()) : NO_LIMIT; - boolean scoring = esQueryExec.attrs() - .stream() - .anyMatch(a -> a instanceof MetadataAttribute && a.name().equals(MetadataAttribute.SCORE)); + boolean scoring = esQueryExec.hasScoring(); if ((sorts != null && sorts.isEmpty() == false)) { List> sortBuilders = new ArrayList<>(sorts.size()); for (Sort sort : sorts) { From 0290b1ce2893921e875f33b524420c0460ec948b Mon Sep 17 00:00:00 2001 From: Valentin Crettaz Date: Wed, 5 Mar 2025 17:44:41 +0100 Subject: [PATCH 41/54] =?UTF-8?q?[Stack=20Monitoring]=20[REVERT]=C2=A0Upda?= =?UTF-8?q?te=20stack=20monitoring=20templates=20for=20Stack=209=20release?= =?UTF-8?q?=20(#124112)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * [REVERT] Update stack monitoring templates for Stack 9 release --- .../common/logging/HeaderWarningTests.java | 6 +++--- .../main/resources/monitoring-ent-search-mb.json | 2 +- .../main/resources/monitoring-mb-ilm-policy.json | 2 +- .../monitoring/MonitoringTemplateRegistry.java | 16 ++++++++-------- 4 files changed, 13 insertions(+), 13 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/common/logging/HeaderWarningTests.java b/server/src/test/java/org/elasticsearch/common/logging/HeaderWarningTests.java index 4d194462d9601..9f817d45f4930 100644 --- a/server/src/test/java/org/elasticsearch/common/logging/HeaderWarningTests.java +++ b/server/src/test/java/org/elasticsearch/common/logging/HeaderWarningTests.java @@ -315,9 +315,9 @@ public void testAddComplexWarning() { + "profiling-symbols,synthetics] with patterns (.deprecation-indexing-template => [.logs-deprecation.*]," + ".fleet-file-data => [.fleet-file-data-*-*],.fleet-files => [.fleet-files-*-*],.ml-anomalies- => [.ml-anomalies-*]," + ".ml-notifications-000002 => [.ml-notifications-000002],.ml-state => [.ml-state*],.ml-stats => [.ml-stats-*]," - + ".monitoring-beats-mb => [.monitoring-beats-9-*],.monitoring-ent-search-mb => [.monitoring-ent-search-8-*]," - + ".monitoring-es-mb => [.monitoring-es-9-*],.monitoring-kibana-mb => [.monitoring-kibana-9-*]," - + ".monitoring-logstash-mb => [.monitoring-logstash-9-*],.profiling-ilm-lock => [.profiling-ilm-lock*]," + + ".monitoring-beats-mb => [.monitoring-beats-8-*],.monitoring-ent-search-mb => [.monitoring-ent-search-8-*]," + + ".monitoring-es-mb => [.monitoring-es-8-*],.monitoring-kibana-mb => [.monitoring-kibana-8-*]," + + ".monitoring-logstash-mb => [.monitoring-logstash-8-*],.profiling-ilm-lock => [.profiling-ilm-lock*]," + ".slm-history => [.slm-history-7*],.watch-history-16 => [.watcher-history-16*]," + "behavioral_analytics-events-default => [behavioral_analytics-events-*],ilm-history => [ilm-history-7*]," + "logs => [logs-*-*],metrics => [metrics-*-*],profiling-events => [profiling-events*],profiling-executables => " diff --git a/x-pack/plugin/core/template-resources/src/main/resources/monitoring-ent-search-mb.json b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-ent-search-mb.json index 1f5f5b8ab7a96..d80738a1df54e 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/monitoring-ent-search-mb.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-ent-search-mb.json @@ -1,5 +1,5 @@ { - "index_patterns": [".monitoring-ent-search-8-*"], + "index_patterns": [".monitoring-ent-search-${xpack.stack.monitoring.template.version}-*"], "version": ${xpack.stack.monitoring.template.release.version}, "_meta": { "description": "Template used by Enterprise Search Metricbeat module monitoring information for Stack Monitoring", diff --git a/x-pack/plugin/core/template-resources/src/main/resources/monitoring-mb-ilm-policy.json b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-mb-ilm-policy.json index 8540cbfbaba38..8ef931aa2f12f 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/monitoring-mb-ilm-policy.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/monitoring-mb-ilm-policy.json @@ -23,7 +23,7 @@ } }, "_meta": { - "description": "Index lifecycle policy generated for [monitoring-*-9] data streams", + "description": "Index lifecycle policy generated for [monitoring-*-8] data streams", "defaults": { "delete_min_age": "Using value of [${xpack.stack.monitoring.history.duration}] based on ${xpack.stack.monitoring.history.duration.reason}" } diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java index d9d20852f5a44..6a76d6749489a 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java @@ -38,7 +38,7 @@ * * This template registry manages templates for two purposes: * 1) Internal Monitoring Collection (.monitoring-{product}-7-*) - * 2) Stack Monitoring templates for bridging ECS format data to legacy monitoring data (.monitoring-{product}-9-*) + * 2) Stack Monitoring templates for bridging ECS format data to legacy monitoring data (.monitoring-{product}-8-*) */ public class MonitoringTemplateRegistry extends IndexTemplateRegistry { private static final Logger logger = LogManager.getLogger(MonitoringTemplateRegistry.class); @@ -73,13 +73,13 @@ public class MonitoringTemplateRegistry extends IndexTemplateRegistry { private static final String MONITORING_POLICY_RETENTION_REASON_VARIABLE = "xpack.stack.monitoring.history.duration.reason"; /** - * The stack monitoring template registry version. This is the version id for templates used by Metricbeat in versions >8.x. Metricbeat + * The stack monitoring template registry version. This is the version id for templates used by Metricbeat in version 8.x. Metricbeat * writes monitoring data in ECS format as of 8.0. These templates define the ECS schema as well as alias fields for the old monitoring * mappings that point to the corresponding ECS fields. */ - public static final int STACK_MONITORING_REGISTRY_VERSION = 9_00_00_99 + 1; + public static final int STACK_MONITORING_REGISTRY_VERSION = 8_00_00_99 + 21; private static final String STACK_MONITORING_REGISTRY_VERSION_VARIABLE = "xpack.stack.monitoring.template.release.version"; - private static final String STACK_TEMPLATE_VERSION = "9"; + private static final String STACK_TEMPLATE_VERSION = "8"; private static final String STACK_TEMPLATE_VERSION_VARIABLE = "xpack.stack.monitoring.template.version"; private static final Map STACK_TEMPLATE_VARIABLES = Map.of( STACK_TEMPLATE_VERSION_VARIABLE, @@ -159,7 +159,7 @@ public class MonitoringTemplateRegistry extends IndexTemplateRegistry { ); ////////////////////////////////////////////////////////// - // Beats metricbeat template (for matching ".monitoring-beats-${version}-*" indices) + // Beats metricbeat template (for matching ".monitoring-beats-8-*" indices) ////////////////////////////////////////////////////////// public static final String BEATS_STACK_INDEX_TEMPLATE_NAME = ".monitoring-beats-mb"; public static final IndexTemplateConfig BEATS_STACK_INDEX_TEMPLATE = new IndexTemplateConfig( @@ -171,7 +171,7 @@ public class MonitoringTemplateRegistry extends IndexTemplateRegistry { ); ////////////////////////////////////////////////////////// - // ES metricbeat template (for matching ".monitoring-es-${version}-*" indices) + // ES metricbeat template (for matching ".monitoring-es-8-*" indices) ////////////////////////////////////////////////////////// public static final String ES_STACK_INDEX_TEMPLATE_NAME = ".monitoring-es-mb"; public static final IndexTemplateConfig ES_STACK_INDEX_TEMPLATE = new IndexTemplateConfig( @@ -183,7 +183,7 @@ public class MonitoringTemplateRegistry extends IndexTemplateRegistry { ); ////////////////////////////////////////////////////////// - // Kibana metricbeat template (for matching ".monitoring-kibana-${version}-*" indices) + // Kibana metricbeat template (for matching ".monitoring-kibana-8-*" indices) ////////////////////////////////////////////////////////// public static final String KIBANA_STACK_INDEX_TEMPLATE_NAME = ".monitoring-kibana-mb"; public static final IndexTemplateConfig KIBANA_STACK_INDEX_TEMPLATE = new IndexTemplateConfig( @@ -195,7 +195,7 @@ public class MonitoringTemplateRegistry extends IndexTemplateRegistry { ); ////////////////////////////////////////////////////////// - // Logstash metricbeat template (for matching ".monitoring-logstash-${version}-*" indices) + // Logstash metricbeat template (for matching ".monitoring-logstash-8-*" indices) ////////////////////////////////////////////////////////// public static final String LOGSTASH_STACK_INDEX_TEMPLATE_NAME = ".monitoring-logstash-mb"; public static final IndexTemplateConfig LOGSTASH_STACK_INDEX_TEMPLATE = new IndexTemplateConfig( From e6d6d30360081bb9b238b9d4b4066ce46026b950 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Wed, 5 Mar 2025 18:20:27 +0100 Subject: [PATCH 42/54] Revert "Mute org.elasticsearch.test.apmintegration.MetricsApmIT testApmIntegration #124106" (#124108) This reverts commit 152d086c0f3c2bb8198bed111ca2a530a9ef661f. --- muted-tests.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index a33d6debab573..fe9702984adbe 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -333,9 +333,6 @@ tests: - class: org.elasticsearch.smoketest.MlWithSecurityIT method: test {yaml=ml/3rd_party_deployment/Test start deployment fails while model download in progress} issue: https://github.com/elastic/elasticsearch/issues/120814 -- class: org.elasticsearch.test.apmintegration.MetricsApmIT - method: testApmIntegration - issue: https://github.com/elastic/elasticsearch/issues/124106 # Examples: # From 98f80057049bd598a983a0033e069ff38631368f Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Wed, 5 Mar 2025 09:25:22 -0800 Subject: [PATCH 43/54] Generate compatible versions artifact in distributions dir (#124119) --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 71f8f8df9e8d7..d0e0739c7b1df 100644 --- a/build.gradle +++ b/build.gradle @@ -241,7 +241,7 @@ def generateUpgradeCompatibilityFile = tasks.register("generateUpgradeCompatibil } def upgradeCompatibilityZip = tasks.register("upgradeCompatibilityZip", Zip) { - archiveFile.set(project.layout.buildDirectory.file("rolling-upgrade-compatible-${VersionProperties.elasticsearch}.zip")) + archiveFile.set(project.layout.buildDirectory.file("distributions/rolling-upgrade-compatible-${VersionProperties.elasticsearch}.zip")) from(generateUpgradeCompatibilityFile) } From 64c08550af9c34460e36cbb87ffc2ca15482b5c2 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 6 Mar 2025 04:34:59 +1100 Subject: [PATCH 44/54] Mute org.elasticsearch.xpack.esql.plugin.MatchOperatorIT testScoring_Zero_OutsideQuery #124132 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index fe9702984adbe..78662291b7286 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -333,6 +333,9 @@ tests: - class: org.elasticsearch.smoketest.MlWithSecurityIT method: test {yaml=ml/3rd_party_deployment/Test start deployment fails while model download in progress} issue: https://github.com/elastic/elasticsearch/issues/120814 +- class: org.elasticsearch.xpack.esql.plugin.MatchOperatorIT + method: testScoring_Zero_OutsideQuery + issue: https://github.com/elastic/elasticsearch/issues/124132 # Examples: # From 65a8e778e3ebca2b1a5f0dc20796fe165dbaf4fc Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Wed, 5 Mar 2025 13:19:12 -0500 Subject: [PATCH 45/54] Cleanup RegisteredDomainProcessorTests (#124118) --- .../RegisteredDomainProcessorTests.java | 152 +++++++++--------- 1 file changed, 75 insertions(+), 77 deletions(-) diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RegisteredDomainProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RegisteredDomainProcessorTests.java index 0a0666de9b014..7f23e81cce48d 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RegisteredDomainProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RegisteredDomainProcessorTests.java @@ -13,52 +13,45 @@ import org.elasticsearch.ingest.TestIngestDocument; import org.elasticsearch.test.ESTestCase; +import java.util.Collections; import java.util.Map; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; +import static java.util.Map.entry; +import static org.hamcrest.Matchers.anEmptyMap; +import static org.hamcrest.Matchers.is; /** * Test parsing of an eTLD from a FQDN. The list of eTLDs is maintained here: * https://github.com/publicsuffix/list/blob/master/public_suffix_list.dat - * - * Effective TLDs (eTLS) are not the same as DNS TLDs. Uses for eTLDs are listed here. + *

    + * Effective TLDs (eTLDs) are not the same as DNS TLDs. Uses for eTLDs are listed here: * https://publicsuffix.org/learn/ */ public class RegisteredDomainProcessorTests extends ESTestCase { - private Map buildEvent(String domain) { - return Map.of("domain", domain); - } public void testBasic() throws Exception { - testRegisteredDomainProcessor(buildEvent("www.google.com"), "www.google.com", "google.com", "com", "www"); - testRegisteredDomainProcessor(buildEvent("google.com"), "google.com", "google.com", "com", null); - testRegisteredDomainProcessor(buildEvent(""), null, null, null, null); - testRegisteredDomainProcessor(buildEvent("."), null, null, null, null); - testRegisteredDomainProcessor(buildEvent("$"), null, null, null, null); - testRegisteredDomainProcessor(buildEvent("foo.bar.baz"), null, null, null, null); - testRegisteredDomainProcessor(buildEvent("www.books.amazon.co.uk"), "www.books.amazon.co.uk", "amazon.co.uk", "co.uk", "www.books"); + testRegisteredDomainProcessor("www.google.com", "www.google.com", "google.com", "com", "www"); + testRegisteredDomainProcessor("google.com", "google.com", "google.com", "com", null); + testRegisteredDomainProcessor("", null, null, null, null); + testRegisteredDomainProcessor(".", null, null, null, null); + testRegisteredDomainProcessor("$", null, null, null, null); + testRegisteredDomainProcessor("foo.bar.baz", null, null, null, null); + testRegisteredDomainProcessor("www.books.amazon.co.uk", "www.books.amazon.co.uk", "amazon.co.uk", "co.uk", "www.books"); // Verify "com" is returned as the eTLD, for that FQDN or subdomain - testRegisteredDomainProcessor(buildEvent("com"), "com", null, "com", null); - testRegisteredDomainProcessor(buildEvent("example.com"), "example.com", "example.com", "com", null); - testRegisteredDomainProcessor(buildEvent("googleapis.com"), "googleapis.com", "googleapis.com", "com", null); + testRegisteredDomainProcessor("com", "com", null, "com", null); + testRegisteredDomainProcessor("example.com", "example.com", "example.com", "com", null); + testRegisteredDomainProcessor("googleapis.com", "googleapis.com", "googleapis.com", "com", null); testRegisteredDomainProcessor( - buildEvent("content-autofill.googleapis.com"), + "content-autofill.googleapis.com", "content-autofill.googleapis.com", "googleapis.com", "com", "content-autofill" ); // Verify "ssl.fastly.net" is returned as the eTLD, for that FQDN or subdomain + testRegisteredDomainProcessor("global.ssl.fastly.net", "global.ssl.fastly.net", "global.ssl.fastly.net", "ssl.fastly.net", null); testRegisteredDomainProcessor( - buildEvent("global.ssl.fastly.net"), - "global.ssl.fastly.net", - "global.ssl.fastly.net", - "ssl.fastly.net", - null - ); - testRegisteredDomainProcessor( - buildEvent("1.www.global.ssl.fastly.net"), + "1.www.global.ssl.fastly.net", "1.www.global.ssl.fastly.net", "global.ssl.fastly.net", "ssl.fastly.net", @@ -67,76 +60,81 @@ public void testBasic() throws Exception { } public void testUseRoot() throws Exception { - Map source = buildEvent("www.google.co.uk"); - - String domainField = "domain"; - String registeredDomainField = "registered_domain"; - String topLevelDomainField = "top_level_domain"; - String subdomainField = "subdomain"; - var processor = new RegisteredDomainProcessor(null, null, "domain", "", false); - - IngestDocument input = TestIngestDocument.withDefaultVersion(source); - IngestDocument output = processor.execute(input); - - String domain = output.getFieldValue(domainField, String.class); - assertThat(domain, equalTo("www.google.co.uk")); - String registeredDomain = output.getFieldValue(registeredDomainField, String.class); - assertThat(registeredDomain, equalTo("google.co.uk")); - String eTLD = output.getFieldValue(topLevelDomainField, String.class); - assertThat(eTLD, equalTo("co.uk")); - String subdomain = output.getFieldValue(subdomainField, String.class); - assertThat(subdomain, equalTo("www")); + IngestDocument document = TestIngestDocument.withDefaultVersion(Map.of("domain", "www.google.co.uk")); + processor.execute(document); + assertThat( + document.getSource(), + is( + Map.ofEntries( + entry("domain", "www.google.co.uk"), + entry("registered_domain", "google.co.uk"), + entry("top_level_domain", "co.uk"), + entry("subdomain", "www") + ) + ) + ); } public void testError() throws Exception { - IllegalArgumentException e = expectThrows( - IllegalArgumentException.class, - () -> testRegisteredDomainProcessor(buildEvent("foo.bar.baz"), null, null, null, null, false) - ); - assertThat(e.getMessage(), containsString("unable to set domain information for document")); - e = expectThrows( - IllegalArgumentException.class, - () -> testRegisteredDomainProcessor(buildEvent("$"), null, null, null, null, false) - ); - assertThat(e.getMessage(), containsString("unable to set domain information for document")); + var processor = new RegisteredDomainProcessor(null, null, "domain", "", false); + + { + IngestDocument document = TestIngestDocument.withDefaultVersion(Map.of("domain", "foo.bar.baz")); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> processor.execute(document)); + assertThat(e.getMessage(), is("unable to set domain information for document")); + assertThat(document.getSource(), is(Map.of("domain", "foo.bar.baz"))); + } + + { + IngestDocument document = TestIngestDocument.withDefaultVersion(Map.of("domain", "$")); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> processor.execute(document)); + assertThat(e.getMessage(), is("unable to set domain information for document")); + assertThat(document.getSource(), is(Map.of("domain", "$"))); + } } - private void testRegisteredDomainProcessor( - Map source, - String expectedDomain, - String expectedRegisteredDomain, - String expectedETLD, - String expectedSubdomain - ) throws Exception { - testRegisteredDomainProcessor(source, expectedDomain, expectedRegisteredDomain, expectedETLD, expectedSubdomain, true); + public void testIgnoreMissing() throws Exception { + { + var processor = new RegisteredDomainProcessor(null, null, "domain", "", false); + IngestDocument document = TestIngestDocument.withDefaultVersion(Map.of()); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> processor.execute(document)); + assertThat(e.getMessage(), is("field [domain] not present as part of path [domain]")); + assertThat(document.getSource(), is(anEmptyMap())); + } + + { + var processor = new RegisteredDomainProcessor(null, null, "domain", "", true); + IngestDocument document = TestIngestDocument.withDefaultVersion(Collections.singletonMap("domain", null)); + processor.execute(document); + assertThat(document.getSource(), is(Collections.singletonMap("domain", null))); + } } private void testRegisteredDomainProcessor( - Map source, + String fqdn, String expectedDomain, String expectedRegisteredDomain, String expectedETLD, - String expectedSubdomain, - boolean ignoreMissing + String expectedSubdomain ) throws Exception { String domainField = "url.domain"; String registeredDomainField = "url.registered_domain"; String topLevelDomainField = "url.top_level_domain"; String subdomainField = "url.subdomain"; - var processor = new RegisteredDomainProcessor(null, null, "domain", "url", ignoreMissing); + var processor = new RegisteredDomainProcessor(null, null, "domain", "url", true); - IngestDocument input = TestIngestDocument.withDefaultVersion(source); - IngestDocument output = processor.execute(input); + IngestDocument document = TestIngestDocument.withDefaultVersion(Map.of("domain", fqdn)); + processor.execute(document); - String domain = output.getFieldValue(domainField, String.class, expectedDomain == null); - assertThat(domain, equalTo(expectedDomain)); - String registeredDomain = output.getFieldValue(registeredDomainField, String.class, expectedRegisteredDomain == null); - assertThat(registeredDomain, equalTo(expectedRegisteredDomain)); - String eTLD = output.getFieldValue(topLevelDomainField, String.class, expectedETLD == null); - assertThat(eTLD, equalTo(expectedETLD)); - String subdomain = output.getFieldValue(subdomainField, String.class, expectedSubdomain == null); - assertThat(subdomain, equalTo(expectedSubdomain)); + String domain = document.getFieldValue(domainField, String.class, expectedDomain == null); + assertThat(domain, is(expectedDomain)); + String registeredDomain = document.getFieldValue(registeredDomainField, String.class, expectedRegisteredDomain == null); + assertThat(registeredDomain, is(expectedRegisteredDomain)); + String eTLD = document.getFieldValue(topLevelDomainField, String.class, expectedETLD == null); + assertThat(eTLD, is(expectedETLD)); + String subdomain = document.getFieldValue(subdomainField, String.class, expectedSubdomain == null); + assertThat(subdomain, is(expectedSubdomain)); } } From a92b1d6892758074eed617f9b736b294220bf230 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 5 Mar 2025 14:19:21 -0500 Subject: [PATCH 46/54] Adjust exception thrown when unable to load hunspell dict (#123743) On index creation, its possible to configure an hunspell analyzer, but reference a locale file that actually doesn't exist or isn't accessible. This error, like our other user dictionary errors, should be an IAE not an ISE. closes: https://github.com/elastic/elasticsearch/issues/123729 --- docs/changelog/123743.yaml | 5 ++++ modules/analysis-common/build.gradle | 3 +-- .../test/indices.analyze/15_analyze.yml | 12 +++------ .../test/indices.create/10_basic.yml | 27 +++++++++++++++++++ .../indices/analysis/HunspellService.java | 2 +- .../indices/CreateIndexCapabilities.java | 5 +++- .../indices/analyze/HunspellServiceTests.java | 4 +-- 7 files changed, 44 insertions(+), 14 deletions(-) create mode 100644 docs/changelog/123743.yaml diff --git a/docs/changelog/123743.yaml b/docs/changelog/123743.yaml new file mode 100644 index 0000000000000..50fccfd6030ae --- /dev/null +++ b/docs/changelog/123743.yaml @@ -0,0 +1,5 @@ +pr: 123743 +summary: Adjust exception thrown when unable to load hunspell dict +area: Analysis +type: bug +issues: [] diff --git a/modules/analysis-common/build.gradle b/modules/analysis-common/build.gradle index 0c8821f29dbf1..a2d00b5276a02 100644 --- a/modules/analysis-common/build.gradle +++ b/modules/analysis-common/build.gradle @@ -6,8 +6,6 @@ * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.Version - apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' @@ -36,6 +34,7 @@ artifacts { tasks.named("yamlRestCompatTestTransform").configure { task -> task.replaceValueInMatch("tokens.0.token", "absenț", "romanian") + task.skipTest("indices.analyze/15_analyze/Custom analyzer is not buildable", "error response changed with #123743") } tasks.named("yamlRestTest").configure { diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/indices.analyze/15_analyze.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/indices.analyze/15_analyze.yml index 24e04174cd1e4..72d74fa51d6af 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/indices.analyze/15_analyze.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/indices.analyze/15_analyze.yml @@ -64,11 +64,11 @@ "Custom analyzer is not buildable": - requires: test_runner_features: [ capabilities ] - reason: This capability required to run test capabilities: - - method: GET - path: /_analyze - capabilities: [ wrong_custom_analyzer_returns_400 ] + - method: PUT + path: /{index} + capabilities: [ hunspell_dict_400 ] + reason: "bugfix 'hunspell_dict_400' capability required" - do: catch: bad_request @@ -80,7 +80,3 @@ filter: type: hunspell locale: en_US - - - match: { status: 400 } - - match: { error.type: illegal_argument_exception } - - match: { error.reason: "Can not build a custom analyzer" } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/10_basic.yml index 7fe95aa4f4ff1..dc5fcf1596e89 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.create/10_basic.yml @@ -273,3 +273,30 @@ - match: { error.type: "mapper_parsing_exception" } - match: { error.reason: "Failed to parse mapping: The mapper type [invalid] declared on field [raw] does not exist. It might have been created within a future version or requires a plugin to be installed. Check the documentation." } +--- +"Create index with hunspell missing dict": + - requires: + test_runner_features: [ capabilities ] + capabilities: + - method: PUT + path: /{index} + capabilities: [ hunspell_dict_400 ] + reason: "bugfix 'hunspell_dict_400' capability required" + + - do: + catch: bad_request + indices.create: + index: bad_hunspell_index + body: + settings: + analysis: + analyzer: + en: + tokenizer: standard + filter: + - my_en_US_dict_stemmer + filter: + my_en_US_dict_stemmer: + type: hunspell + locale: en_US + dedup: false diff --git a/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java b/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java index bfe1cd9b28de1..ab6e20c39f720 100644 --- a/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java +++ b/server/src/main/java/org/elasticsearch/indices/analysis/HunspellService.java @@ -99,7 +99,7 @@ public HunspellService(final Settings settings, final Environment env, final Map try { return loadDictionary(locale, settings, env); } catch (Exception e) { - throw new IllegalStateException("failed to load hunspell dictionary for locale: " + locale, e); + throw new IllegalArgumentException("failed to load hunspell dictionary for locale: " + locale, e); } }; if (HUNSPELL_LAZY_LOAD.get(settings) == false) { diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/CreateIndexCapabilities.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/CreateIndexCapabilities.java index 334e68648d853..928c872b6ad71 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/CreateIndexCapabilities.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/CreateIndexCapabilities.java @@ -28,9 +28,12 @@ public class CreateIndexCapabilities { private static final String NESTED_DENSE_VECTOR_SYNTHETIC_TEST = "nested_dense_vector_synthetic_test"; + private static final String HUNSPELL_DICT_400 = "hunspell_dict_400"; + public static final Set CAPABILITIES = Set.of( LOGSDB_INDEX_MODE_CAPABILITY, LOOKUP_INDEX_MODE_CAPABILITY, - NESTED_DENSE_VECTOR_SYNTHETIC_TEST + NESTED_DENSE_VECTOR_SYNTHETIC_TEST, + HUNSPELL_DICT_400 ); } diff --git a/server/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceTests.java b/server/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceTests.java index 1a27954eed98b..8530fd21ea77d 100644 --- a/server/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceTests.java +++ b/server/src/test/java/org/elasticsearch/indices/analyze/HunspellServiceTests.java @@ -64,7 +64,7 @@ public void testDicWithNoAff() throws Exception { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); - IllegalStateException e = expectThrows(IllegalStateException.class, () -> { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { final Environment environment = new Environment(settings, getDataPath("/indices/analyze/no_aff_conf_dir")); new HunspellService(settings, environment, emptyMap()).getDictionary("en_US"); }); @@ -78,7 +78,7 @@ public void testDicWithTwoAffs() throws Exception { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); - IllegalStateException e = expectThrows(IllegalStateException.class, () -> { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { final Environment environment = new Environment(settings, getDataPath("/indices/analyze/two_aff_conf_dir")); new HunspellService(settings, environment, emptyMap()).getDictionary("en_US"); }); From 99262c6256465fbcfbce8a549dfc9529c7b25edd Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Wed, 5 Mar 2025 11:43:47 -0800 Subject: [PATCH 47/54] Use FallbackSyntheticSourceBlockLoader for boolean and date fields (#124050) --- docs/changelog/124050.yaml | 5 + .../mapper/extras/ScaledFloatFieldMapper.java | 2 +- .../index/mapper/BooleanFieldMapper.java | 66 ++++++++- .../index/mapper/DateFieldMapper.java | 80 ++++++++++- .../FallbackSyntheticSourceBlockLoader.java | 4 +- .../index/mapper/KeywordFieldMapper.java | 2 +- .../fielddata/IndexFieldDataServiceTests.java | 2 +- .../index/mapper/BooleanFieldTypeTests.java | 1 + .../index/mapper/DateFieldTypeTests.java | 2 + .../BooleanFieldBlockLoaderTests.java | 73 ++++++++++ .../DateFieldBlockLoaderTests.java | 90 ++++++++++++ .../KeywordFieldBlockLoaderTests.java | 1 - .../index/mapper/BlockLoaderTestCase.java | 10 ++ .../NumberFieldBlockLoaderTestCase.java | 2 +- .../logsdb/datageneration/FieldType.java | 8 +- .../datasource/DataSourceHandler.java | 12 ++ .../datasource/DataSourceRequest.java | 21 +++ .../datasource/DataSourceResponse.java | 7 + .../DefaultMappingParametersHandler.java | 53 +++++++ .../DefaultPrimitiveTypesHandler.java | 13 ++ .../datasource/DefaultWrappersHandler.java | 16 ++- .../leaf/BooleanFieldDataGenerator.java | 52 +++++++ .../fields/leaf/DateFieldDataGenerator.java | 52 +++++++ .../matchers/source/FieldSpecificMatcher.java | 130 +++++++++++++++++- .../matchers/source/SourceMatcher.java | 1 + .../unsignedlong/UnsignedLongFieldMapper.java | 2 +- 26 files changed, 684 insertions(+), 23 deletions(-) create mode 100644 docs/changelog/124050.yaml create mode 100644 server/src/test/java/org/elasticsearch/index/mapper/blockloader/BooleanFieldBlockLoaderTests.java create mode 100644 server/src/test/java/org/elasticsearch/index/mapper/blockloader/DateFieldBlockLoaderTests.java create mode 100644 test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/BooleanFieldDataGenerator.java create mode 100644 test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/DateFieldDataGenerator.java diff --git a/docs/changelog/124050.yaml b/docs/changelog/124050.yaml new file mode 100644 index 0000000000000..352678dd4bb5a --- /dev/null +++ b/docs/changelog/124050.yaml @@ -0,0 +1,5 @@ +pr: 124050 +summary: Use `FallbackSyntheticSourceBlockLoader` for boolean and date fields +area: Mapping +type: enhancement +issues: [] diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java index 17729d7c57dde..d68f417f91b42 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java @@ -342,7 +342,7 @@ public Builder builder(BlockFactory factory, int expectedCount) { private FallbackSyntheticSourceBlockLoader.Reader fallbackSyntheticSourceBlockLoaderReader() { var nullValueAdjusted = nullValue != null ? adjustSourceValue(nullValue, scalingFactor) : null; - return new FallbackSyntheticSourceBlockLoader.ReaderWithNullValueSupport<>(nullValue) { + return new FallbackSyntheticSourceBlockLoader.ReaderWithNullValueSupport(nullValue) { @Override public void convertValue(Object value, List accumulator) { if (coerce && value.equals("")) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java index 8100b83463c93..d7f4a9d4241a3 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java @@ -50,6 +50,7 @@ import java.util.Collection; import java.util.Collections; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; @@ -159,7 +160,8 @@ public BooleanFieldMapper build(MapperBuilderContext context) { nullValue.getValue(), scriptValues(), meta.getValue(), - dimension.getValue() + dimension.getValue(), + context.isSourceSynthetic() ); hasScript = script.get() != null; onScriptError = onScriptErrorParam.getValue(); @@ -188,6 +190,7 @@ public static final class BooleanFieldType extends TermBasedFieldType { private final Boolean nullValue; private final FieldValues scriptValues; private final boolean isDimension; + private final boolean isSyntheticSource; public BooleanFieldType( String name, @@ -197,12 +200,14 @@ public BooleanFieldType( Boolean nullValue, FieldValues scriptValues, Map meta, - boolean isDimension + boolean isDimension, + boolean isSyntheticSource ) { super(name, isIndexed, isStored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_ONLY, meta); this.nullValue = nullValue; this.scriptValues = scriptValues; this.isDimension = isDimension; + this.isSyntheticSource = isSyntheticSource; } public BooleanFieldType(String name) { @@ -214,7 +219,7 @@ public BooleanFieldType(String name, boolean isIndexed) { } public BooleanFieldType(String name, boolean isIndexed, boolean hasDocValues) { - this(name, isIndexed, isIndexed, hasDocValues, false, null, Collections.emptyMap(), false); + this(name, isIndexed, isIndexed, hasDocValues, false, null, Collections.emptyMap(), false, false); } @Override @@ -251,12 +256,16 @@ protected Boolean parseSourceValue(Object value) { return (Boolean) value; } else { String textValue = value.toString(); - return Booleans.parseBoolean(textValue.toCharArray(), 0, textValue.length(), false); + return parseBoolean(textValue); } } }; } + private boolean parseBoolean(String text) { + return Booleans.parseBoolean(text.toCharArray(), 0, text.length(), false); + } + @Override public BytesRef indexedValueForSearch(Object value) { if (value == null) { @@ -304,6 +313,16 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { if (hasDocValues()) { return new BlockDocValuesReader.BooleansBlockLoader(name()); } + + if (isSyntheticSource) { + return new FallbackSyntheticSourceBlockLoader(fallbackSyntheticSourceBlockLoaderReader(), name()) { + @Override + public Builder builder(BlockFactory factory, int expectedCount) { + return factory.booleans(expectedCount); + } + }; + } + ValueFetcher fetcher = sourceValueFetcher(blContext.sourcePaths(name())); BlockSourceReader.LeafIteratorLookup lookup = isIndexed() || isStored() ? BlockSourceReader.lookupFromFieldNames(blContext.fieldNames(), name()) @@ -311,6 +330,45 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { return new BlockSourceReader.BooleansBlockLoader(fetcher, lookup); } + private FallbackSyntheticSourceBlockLoader.Reader fallbackSyntheticSourceBlockLoaderReader() { + return new FallbackSyntheticSourceBlockLoader.ReaderWithNullValueSupport(nullValue) { + @Override + public void convertValue(Object value, List accumulator) { + try { + if (value instanceof Boolean b) { + accumulator.add(b); + } else { + String stringValue = value.toString(); + // Matches logic in parser invoked by `parseCreateField` + accumulator.add(parseBoolean(stringValue)); + } + } catch (Exception e) { + // Malformed value, skip it + } + } + + @Override + protected void parseNonNullValue(XContentParser parser, List accumulator) throws IOException { + // Aligned with implementation of `parseCreateField(XContentParser)` + try { + var value = parser.booleanValue(); + accumulator.add(value); + } catch (Exception e) { + // Malformed value, skip it + } + } + + @Override + public void writeToBlock(List values, BlockLoader.Builder blockBuilder) { + var longBuilder = (BlockLoader.BooleanBuilder) blockBuilder; + + for (var value : values) { + longBuilder.appendBoolean(value); + } + } + }; + } + @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { FielddataOperation operation = fieldDataContext.fielddataOperation(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index 0bf53682d2d17..72eb8841d5bae 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -61,6 +61,7 @@ import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.runtime.LongScriptFieldDistanceFeatureQuery; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.text.NumberFormat; @@ -417,6 +418,7 @@ public DateFieldMapper build(MapperBuilderContext context) { store.getValue(), docValues.getValue(), hasDocValuesSkipper, + context.isSourceSynthetic(), buildFormatter(), resolution, nullValue.getValue(), @@ -485,6 +487,7 @@ public static final class DateFieldType extends MappedFieldType { private final FieldValues scriptValues; private final boolean pointsMetadataAvailable; private final boolean hasDocValuesSkipper; + private final boolean isSyntheticSource; public DateFieldType( String name, @@ -505,6 +508,7 @@ public DateFieldType( isStored, hasDocValues, false, + false, dateTimeFormatter, resolution, nullValue, @@ -520,6 +524,7 @@ public DateFieldType( boolean isStored, boolean hasDocValues, boolean hasDocValuesSkipper, + boolean isSyntheticSource, DateFormatter dateTimeFormatter, Resolution resolution, String nullValue, @@ -534,6 +539,7 @@ public DateFieldType( this.scriptValues = scriptValues; this.pointsMetadataAvailable = pointsMetadataAvailable; this.hasDocValuesSkipper = hasDocValuesSkipper; + this.isSyntheticSource = isSyntheticSource; } public DateFieldType( @@ -547,7 +553,20 @@ public DateFieldType( FieldValues scriptValues, Map meta ) { - this(name, isIndexed, isIndexed, isStored, hasDocValues, false, dateTimeFormatter, resolution, nullValue, scriptValues, meta); + this( + name, + isIndexed, + isIndexed, + isStored, + hasDocValues, + false, + false, + dateTimeFormatter, + resolution, + nullValue, + scriptValues, + meta + ); } public DateFieldType(String name) { @@ -558,6 +577,7 @@ public DateFieldType(String name) { false, true, false, + false, DEFAULT_DATE_TIME_FORMATTER, Resolution.MILLISECONDS, null, @@ -574,6 +594,7 @@ public DateFieldType(String name, boolean isIndexed) { false, true, false, + false, DEFAULT_DATE_TIME_FORMATTER, Resolution.MILLISECONDS, null, @@ -583,15 +604,15 @@ public DateFieldType(String name, boolean isIndexed) { } public DateFieldType(String name, DateFormatter dateFormatter) { - this(name, true, true, false, true, false, dateFormatter, Resolution.MILLISECONDS, null, null, Collections.emptyMap()); + this(name, true, true, false, true, false, false, dateFormatter, Resolution.MILLISECONDS, null, null, Collections.emptyMap()); } public DateFieldType(String name, Resolution resolution) { - this(name, true, true, false, true, false, DEFAULT_DATE_TIME_FORMATTER, resolution, null, null, Collections.emptyMap()); + this(name, true, true, false, true, false, false, DEFAULT_DATE_TIME_FORMATTER, resolution, null, null, Collections.emptyMap()); } public DateFieldType(String name, Resolution resolution, DateFormatter dateFormatter) { - this(name, true, true, false, true, false, dateFormatter, resolution, null, null, Collections.emptyMap()); + this(name, true, true, false, true, false, false, dateFormatter, resolution, null, null, Collections.emptyMap()); } @Override @@ -923,12 +944,63 @@ public BlockLoader blockLoader(BlockLoaderContext blContext) { if (hasDocValues()) { return new BlockDocValuesReader.LongsBlockLoader(name()); } + + if (isSyntheticSource) { + return new FallbackSyntheticSourceBlockLoader(fallbackSyntheticSourceBlockLoaderReader(), name()) { + @Override + public Builder builder(BlockFactory factory, int expectedCount) { + return factory.longs(expectedCount); + } + }; + } + BlockSourceReader.LeafIteratorLookup lookup = isStored() || isIndexed() ? BlockSourceReader.lookupFromFieldNames(blContext.fieldNames(), name()) : BlockSourceReader.lookupMatchingAll(); return new BlockSourceReader.LongsBlockLoader(sourceValueFetcher(blContext.sourcePaths(name())), lookup); } + private FallbackSyntheticSourceBlockLoader.Reader fallbackSyntheticSourceBlockLoaderReader() { + Function dateParser = this::parse; + + return new FallbackSyntheticSourceBlockLoader.ReaderWithNullValueSupport(nullValue) { + @Override + public void convertValue(Object value, List accumulator) { + try { + String date = value instanceof Number ? NUMBER_FORMAT.format(value) : value.toString(); + accumulator.add(dateParser.apply(date)); + } catch (Exception e) { + // Malformed value, skip it + } + } + + @Override + protected void parseNonNullValue(XContentParser parser, List accumulator) throws IOException { + // Aligned with implementation of `parseCreateField(XContentParser)` + try { + String dateAsString = parser.textOrNull(); + + if (dateAsString == null) { + accumulator.add(dateParser.apply(nullValue)); + } else { + accumulator.add(dateParser.apply(dateAsString)); + } + } catch (Exception e) { + // Malformed value, skip it + } + } + + @Override + public void writeToBlock(List values, BlockLoader.Builder blockBuilder) { + var longBuilder = (BlockLoader.LongBuilder) blockBuilder; + + for (var value : values) { + longBuilder.appendLong(value); + } + } + }; + } + @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { FielddataOperation operation = fieldDataContext.fielddataOperation(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FallbackSyntheticSourceBlockLoader.java b/server/src/main/java/org/elasticsearch/index/mapper/FallbackSyntheticSourceBlockLoader.java index 68b2e31a4b011..8474b754e951d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FallbackSyntheticSourceBlockLoader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FallbackSyntheticSourceBlockLoader.java @@ -275,9 +275,9 @@ public interface Reader { } public abstract static class ReaderWithNullValueSupport implements Reader { - private final T nullValue; + private final Object nullValue; - public ReaderWithNullValueSupport(T nullValue) { + public ReaderWithNullValueSupport(Object nullValue) { this.nullValue = nullValue; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index 127024f17a222..340af979c0593 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -770,7 +770,7 @@ public Builder builder(BlockFactory factory, int expectedCount) { private FallbackSyntheticSourceBlockLoader.Reader fallbackSyntheticSourceBlockLoaderReader() { var nullValueBytes = nullValue != null ? new BytesRef(nullValue) : null; - return new FallbackSyntheticSourceBlockLoader.ReaderWithNullValueSupport<>(nullValueBytes) { + return new FallbackSyntheticSourceBlockLoader.ReaderWithNullValueSupport(nullValueBytes) { @Override public void convertValue(Object value, List accumulator) { String stringValue = ((BytesRef) value).utf8ToString(); diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java index 172545ab459c2..07e836403e7ef 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java @@ -363,7 +363,7 @@ public void testRequireDocValuesOnDoubles() { public void testRequireDocValuesOnBools() { doTestRequireDocValues(new BooleanFieldMapper.BooleanFieldType("field")); doTestRequireDocValues( - new BooleanFieldMapper.BooleanFieldType("field", true, false, false, null, null, Collections.emptyMap(), false) + new BooleanFieldMapper.BooleanFieldType("field", true, false, false, null, null, Collections.emptyMap(), false, false) ); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldTypeTests.java index 19a38e19fbf6e..38f73cd9f681d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldTypeTests.java @@ -81,6 +81,7 @@ public void testFetchSourceValue() throws IOException { true, null, Collections.emptyMap(), + false, false ); assertEquals(List.of(true), fetchSourceValue(nullFieldType, null)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java index ad258086affc7..16c2c5ca5ddb8 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java @@ -94,6 +94,7 @@ public void testIsFieldWithinQueryDateMillisDocValueSkipper() throws IOException false, true, true, + false, DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, Resolution.MILLISECONDS, null, @@ -111,6 +112,7 @@ public void testIsFieldWithinQueryDateNanosDocValueSkipper() throws IOException false, true, true, + false, DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, Resolution.NANOSECONDS, null, diff --git a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/BooleanFieldBlockLoaderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/BooleanFieldBlockLoaderTests.java new file mode 100644 index 0000000000000..906208548b963 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/BooleanFieldBlockLoaderTests.java @@ -0,0 +1,73 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.mapper.blockloader; + +import org.elasticsearch.index.mapper.BlockLoaderTestCase; +import org.elasticsearch.logsdb.datageneration.FieldType; + +import java.util.List; +import java.util.Map; +import java.util.Objects; + +public class BooleanFieldBlockLoaderTests extends BlockLoaderTestCase { + public BooleanFieldBlockLoaderTests() { + super(FieldType.BOOLEAN); + } + + @Override + @SuppressWarnings("unchecked") + protected Object expected(Map fieldMapping, Object value, boolean syntheticSource) { + var nullValue = switch (fieldMapping.get("null_value")) { + case Boolean b -> b; + case String s -> Boolean.parseBoolean(s); + case null -> null; + default -> throw new IllegalStateException("Unexpected null_value format"); + }; + + if (value instanceof List == false) { + return convert(value, nullValue); + } + + if ((boolean) fieldMapping.getOrDefault("doc_values", false)) { + // Sorted + var resultList = ((List) value).stream().map(v -> convert(v, nullValue)).filter(Objects::nonNull).sorted().toList(); + return maybeFoldList(resultList); + } + + // parsing from source, not sorted + var resultList = ((List) value).stream().map(v -> convert(v, nullValue)).filter(Objects::nonNull).toList(); + return maybeFoldList(resultList); + } + + @SuppressWarnings("unchecked") + private Object convert(Object value, Object nullValue) { + if (value == null) { + return nullValue; + } + if (value instanceof String s) { + if (s.isEmpty()) { + // This is a documented behavior. + return false; + } + if (value.equals("true")) { + return true; + } + if (value.equals("false")) { + return false; + } + } + if (value instanceof Boolean b) { + return b; + } + + // Malformed values are excluded + return null; + } +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/DateFieldBlockLoaderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/DateFieldBlockLoaderTests.java new file mode 100644 index 0000000000000..a969a79aa044f --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/DateFieldBlockLoaderTests.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.mapper.blockloader; + +import org.elasticsearch.index.mapper.BlockLoaderTestCase; +import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.logsdb.datageneration.FieldType; + +import java.time.Instant; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.format.DateTimeFormatter; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Objects; + +public class DateFieldBlockLoaderTests extends BlockLoaderTestCase { + public DateFieldBlockLoaderTests() { + super(FieldType.DATE); + } + + @Override + @SuppressWarnings("unchecked") + protected Object expected(Map fieldMapping, Object value, boolean syntheticSource) { + var format = (String) fieldMapping.get("format"); + var nullValue = fieldMapping.get("null_value") != null ? format(fieldMapping.get("null_value"), format) : null; + + if (value instanceof List == false) { + return convert(value, nullValue, format); + } + + if ((boolean) fieldMapping.getOrDefault("doc_values", false)) { + // Sorted + var resultList = ((List) value).stream() + .map(v -> convert(v, nullValue, format)) + .filter(Objects::nonNull) + .sorted() + .toList(); + return maybeFoldList(resultList); + } + + // parsing from source, not sorted + var resultList = ((List) value).stream().map(v -> convert(v, nullValue, format)).filter(Objects::nonNull).toList(); + return maybeFoldList(resultList); + } + + private Long convert(Object value, Long nullValue, String format) { + if (value == null) { + return nullValue; + } + + return format(value, format); + } + + private Long format(Object value, String format) { + if (format == null) { + return switch (value) { + case Integer i -> i.longValue(); + case Long l -> l; + case String s -> { + try { + yield Instant.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse(s)).toEpochMilli(); + } catch (Exception e) { + // malformed + yield null; + } + } + case null -> null; + default -> throw new IllegalStateException("Unexpected value: " + value); + }; + } + + try { + return Instant.from( + DateTimeFormatter.ofPattern(format, Locale.ROOT).withZone(ZoneId.from(ZoneOffset.UTC)).parse((String) value) + ).toEpochMilli(); + } catch (Exception e) { + // malformed + return null; + } + } +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/KeywordFieldBlockLoaderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/KeywordFieldBlockLoaderTests.java index 909cccf9e7d54..b7f7bd9de5ef7 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/KeywordFieldBlockLoaderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/KeywordFieldBlockLoaderTests.java @@ -46,7 +46,6 @@ protected Object expected(Map fieldMapping, Object value, boolea if ((boolean) fieldMapping.getOrDefault("doc_values", false)) { // Sorted and no duplicates - var resultList = convertValues.andThen(Stream::distinct) .andThen(Stream::sorted) .andThen(Stream::toList) diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/BlockLoaderTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/BlockLoaderTestCase.java index ab6fd109ed375..a8b1e68061041 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/BlockLoaderTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/BlockLoaderTestCase.java @@ -71,6 +71,16 @@ public DataSourceResponse.ObjectMappingParametersGenerator handle( this.documentGenerator = new DocumentGenerator(specification); } + @Override + public void testFieldHasValue() { + assumeTrue("random test inherited from MapperServiceTestCase", false); + } + + @Override + public void testFieldHasValueWithEmptyFieldInfos() { + assumeTrue("random test inherited from MapperServiceTestCase", false); + } + public void testBlockLoader() throws IOException { var template = new Template(Map.of(fieldName, new Template.Leaf(fieldName, fieldType))); var syntheticSource = randomBoolean(); diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/NumberFieldBlockLoaderTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/NumberFieldBlockLoaderTestCase.java index e5f68362005d5..43c2663ac71e8 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/NumberFieldBlockLoaderTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/NumberFieldBlockLoaderTestCase.java @@ -30,7 +30,7 @@ protected Object expected(Map fieldMapping, Object value, boolea } if ((boolean) fieldMapping.getOrDefault("doc_values", false)) { - // Sorted and no duplicates + // Sorted var resultList = ((List) value).stream() .map(v -> convert(v, nullValue, fieldMapping)) .filter(Objects::nonNull) diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldType.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldType.java index 4bf65fcf6ecf6..851812268e9ba 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldType.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/FieldType.java @@ -10,8 +10,10 @@ package org.elasticsearch.logsdb.datageneration; import org.elasticsearch.logsdb.datageneration.datasource.DataSource; +import org.elasticsearch.logsdb.datageneration.fields.leaf.BooleanFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.ByteFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.CountedKeywordFieldDataGenerator; +import org.elasticsearch.logsdb.datageneration.fields.leaf.DateFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.DoubleFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.FloatFieldDataGenerator; import org.elasticsearch.logsdb.datageneration.fields.leaf.HalfFloatFieldDataGenerator; @@ -36,7 +38,9 @@ public enum FieldType { FLOAT("float"), HALF_FLOAT("half_float"), SCALED_FLOAT("scaled_float"), - COUNTED_KEYWORD("counted_keyword"); + COUNTED_KEYWORD("counted_keyword"), + BOOLEAN("boolean"), + DATE("date"); private final String name; @@ -57,6 +61,8 @@ public FieldDataGenerator generator(String fieldName, DataSource dataSource) { case HALF_FLOAT -> new HalfFloatFieldDataGenerator(fieldName, dataSource); case SCALED_FLOAT -> new ScaledFloatFieldDataGenerator(fieldName, dataSource); case COUNTED_KEYWORD -> new CountedKeywordFieldDataGenerator(fieldName, dataSource); + case BOOLEAN -> new BooleanFieldDataGenerator(dataSource); + case DATE -> new DateFieldDataGenerator(dataSource); }; } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceHandler.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceHandler.java index beef9fb4dd799..2a17f9311faa9 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceHandler.java @@ -46,6 +46,14 @@ default DataSourceResponse.StringGenerator handle(DataSourceRequest.StringGenera return null; } + default DataSourceResponse.BooleanGenerator handle(DataSourceRequest.BooleanGenerator request) { + return null; + } + + default DataSourceResponse.InstantGenerator handle(DataSourceRequest.InstantGenerator request) { + return null; + } + default DataSourceResponse.NullWrapper handle(DataSourceRequest.NullWrapper request) { return null; } @@ -62,6 +70,10 @@ default DataSourceResponse.MalformedWrapper handle(DataSourceRequest.MalformedWr return null; } + default DataSourceResponse.TransformWrapper handle(DataSourceRequest.TransformWrapper request) { + return null; + } + default DataSourceResponse.ChildFieldGenerator handle(DataSourceRequest.ChildFieldGenerator request) { return null; } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceRequest.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceRequest.java index 0e6e796ff6d54..2a1ca7297d7db 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceRequest.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceRequest.java @@ -15,6 +15,7 @@ import org.elasticsearch.logsdb.datageneration.fields.DynamicMapping; import java.util.Set; +import java.util.function.Function; import java.util.function.Supplier; public interface DataSourceRequest { @@ -74,6 +75,18 @@ public DataSourceResponse.StringGenerator accept(DataSourceHandler handler) { } } + record BooleanGenerator() implements DataSourceRequest { + public DataSourceResponse.BooleanGenerator accept(DataSourceHandler handler) { + return handler.handle(this); + } + } + + record InstantGenerator() implements DataSourceRequest { + public DataSourceResponse.InstantGenerator accept(DataSourceHandler handler) { + return handler.handle(this); + } + } + record NullWrapper() implements DataSourceRequest { public DataSourceResponse.NullWrapper accept(DataSourceHandler handler) { return handler.handle(this); @@ -98,6 +111,14 @@ public DataSourceResponse.MalformedWrapper accept(DataSourceHandler handler) { } } + record TransformWrapper(double transformedProportion, Function transformation) + implements + DataSourceRequest { + public DataSourceResponse.TransformWrapper accept(DataSourceHandler handler) { + return handler.handle(this); + } + } + record ChildFieldGenerator(DataGeneratorSpecification specification) implements DataSourceRequest { diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceResponse.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceResponse.java index e9f1adb98d248..e7a64471e024c 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceResponse.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DataSourceResponse.java @@ -11,6 +11,7 @@ import org.elasticsearch.logsdb.datageneration.FieldType; +import java.time.Instant; import java.util.Map; import java.util.Optional; import java.util.function.Function; @@ -35,6 +36,10 @@ record HalfFloatGenerator(Supplier generator) implements DataSourceRespon record StringGenerator(Supplier generator) implements DataSourceResponse {} + record BooleanGenerator(Supplier generator) implements DataSourceResponse {} + + record InstantGenerator(Supplier generator) implements DataSourceResponse {} + record NullWrapper(Function, Supplier> wrapper) implements DataSourceResponse {} record ArrayWrapper(Function, Supplier> wrapper) implements DataSourceResponse {} @@ -43,6 +48,8 @@ record RepeatingWrapper(Function, Supplier> wrapper) im record MalformedWrapper(Function, Supplier> wrapper) implements DataSourceResponse {} + record TransformWrapper(Function, Supplier> wrapper) implements DataSourceResponse {} + interface ChildFieldGenerator extends DataSourceResponse { int generateChildFieldCount(); diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java index e7b9140b07daf..93faf795ff565 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java @@ -14,7 +14,12 @@ import org.elasticsearch.logsdb.datageneration.FieldType; import org.elasticsearch.test.ESTestCase; +import java.time.Instant; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.format.DateTimeFormatter; import java.util.HashMap; +import java.util.Locale; import java.util.Map; import java.util.function.Supplier; import java.util.stream.Collectors; @@ -35,6 +40,8 @@ public DataSourceResponse.LeafMappingParametersGenerator handle(DataSourceReques case LONG, INTEGER, SHORT, BYTE, DOUBLE, FLOAT, HALF_FLOAT, UNSIGNED_LONG -> numberMapping(map, request.fieldType()); case SCALED_FLOAT -> scaledFloatMapping(map); case COUNTED_KEYWORD -> plain(Map.of("index", ESTestCase.randomBoolean())); + case BOOLEAN -> booleanMapping(map); + case DATE -> dateMapping(map); }); } @@ -113,6 +120,52 @@ private Supplier> scaledFloatMapping(Map inj }; } + private Supplier> booleanMapping(Map injected) { + return () -> { + if (ESTestCase.randomDouble() <= 0.2) { + injected.put("null_value", ESTestCase.randomFrom(true, false, "true", "false")); + } + + if (ESTestCase.randomBoolean()) { + injected.put("ignore_malformed", ESTestCase.randomBoolean()); + } + + return injected; + }; + } + + // just a custom format, specific format does not matter + private static final String FORMAT = "yyyy_MM_dd_HH_mm_ss_n"; + + private Supplier> dateMapping(Map injected) { + return () -> { + String format = null; + if (ESTestCase.randomBoolean()) { + format = FORMAT; + injected.put("format", format); + } + + if (ESTestCase.randomDouble() <= 0.2) { + var instant = ESTestCase.randomInstantBetween(Instant.parse("2300-01-01T00:00:00Z"), Instant.parse("2350-01-01T00:00:00Z")); + + if (format == null) { + injected.put("null_value", instant.toEpochMilli()); + } else { + injected.put( + "null_value", + DateTimeFormatter.ofPattern(format, Locale.ROOT).withZone(ZoneId.from(ZoneOffset.UTC)).format(instant) + ); + } + } + + if (ESTestCase.randomBoolean()) { + injected.put("ignore_malformed", ESTestCase.randomBoolean()); + } + + return injected; + }; + } + @Override public DataSourceResponse.ObjectMappingParametersGenerator handle(DataSourceRequest.ObjectMappingParametersGenerator request) { if (request.isNested()) { diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultPrimitiveTypesHandler.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultPrimitiveTypesHandler.java index 5d94a841dc0ed..80f7e3c24e3a9 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultPrimitiveTypesHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultPrimitiveTypesHandler.java @@ -12,6 +12,7 @@ import org.elasticsearch.test.ESTestCase; import java.math.BigInteger; +import java.time.Instant; public class DefaultPrimitiveTypesHandler implements DataSourceHandler { @Override @@ -59,4 +60,16 @@ public DataSourceResponse.HalfFloatGenerator handle(DataSourceRequest.HalfFloatG public DataSourceResponse.StringGenerator handle(DataSourceRequest.StringGenerator request) { return new DataSourceResponse.StringGenerator(() -> ESTestCase.randomAlphaOfLengthBetween(0, 50)); } + + @Override + public DataSourceResponse.BooleanGenerator handle(DataSourceRequest.BooleanGenerator request) { + return new DataSourceResponse.BooleanGenerator(ESTestCase::randomBoolean); + } + + private static final Instant MAX_INSTANT = Instant.parse("2200-01-01T00:00:00Z"); + + @Override + public DataSourceResponse.InstantGenerator handle(DataSourceRequest.InstantGenerator request) { + return new DataSourceResponse.InstantGenerator(() -> ESTestCase.randomInstantBetween(Instant.ofEpochMilli(1), MAX_INSTANT)); + } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultWrappersHandler.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultWrappersHandler.java index ac686e0201327..ae4ad196c47d0 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultWrappersHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultWrappersHandler.java @@ -37,9 +37,14 @@ public DataSourceResponse.MalformedWrapper handle(DataSourceRequest.MalformedWra return new DataSourceResponse.MalformedWrapper(injectMalformed(request.malformedValues())); } + @Override + public DataSourceResponse.TransformWrapper handle(DataSourceRequest.TransformWrapper request) { + return new DataSourceResponse.TransformWrapper(transform(request.transformedProportion(), request.transformation())); + } + private static Function, Supplier> injectNulls() { // Inject some nulls but majority of data should be non-null (as it likely is in reality). - return (values) -> () -> ESTestCase.randomDouble() <= 0.05 ? null : values.get(); + return transform(0.05, ignored -> null); } private static Function, Supplier> wrapInArray() { @@ -69,6 +74,13 @@ private static Function, Supplier> repeatValues() { } private static Function, Supplier> injectMalformed(Supplier malformedValues) { - return (values) -> () -> ESTestCase.randomDouble() <= 0.1 ? malformedValues.get() : values.get(); + return transform(0.1, ignored -> malformedValues.get()); + } + + private static Function, Supplier> transform( + double transformedProportion, + Function transformation + ) { + return (values) -> () -> ESTestCase.randomDouble() <= transformedProportion ? transformation.apply(values.get()) : values.get(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/BooleanFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/BooleanFieldDataGenerator.java new file mode 100644 index 0000000000000..482ed1b1321e5 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/BooleanFieldDataGenerator.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.logsdb.datageneration.fields.leaf; + +import org.elasticsearch.logsdb.datageneration.FieldDataGenerator; +import org.elasticsearch.logsdb.datageneration.datasource.DataSource; +import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; + +import java.util.Map; +import java.util.function.Supplier; + +public class BooleanFieldDataGenerator implements FieldDataGenerator { + private final DataSource dataSource; + private final Supplier booleans; + private final Supplier booleansWithStrings; + private final Supplier booleansWithStringsAndMalformed; + + public BooleanFieldDataGenerator(DataSource dataSource) { + this.dataSource = dataSource; + + var booleans = dataSource.get(new DataSourceRequest.BooleanGenerator()).generator(); + this.booleans = booleans::get; + + // produces "true" and "false" strings + var toStringTransform = dataSource.get(new DataSourceRequest.TransformWrapper(0.5, Object::toString)).wrapper(); + this.booleansWithStrings = toStringTransform.apply(this.booleans::get); + + var strings = dataSource.get(new DataSourceRequest.StringGenerator()).generator(); + this.booleansWithStringsAndMalformed = Wrappers.defaultsWithMalformed(booleansWithStrings, strings::get, dataSource); + } + + @Override + public Object generateValue(Map fieldMapping) { + if (fieldMapping == null) { + // dynamically mapped, use booleans only to avoid mapping the field as string + return Wrappers.defaults(booleans, dataSource).get(); + } + + if ((Boolean) fieldMapping.getOrDefault("ignore_malformed", false)) { + return booleansWithStringsAndMalformed.get(); + } + + return Wrappers.defaults(booleansWithStrings, dataSource).get(); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/DateFieldDataGenerator.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/DateFieldDataGenerator.java new file mode 100644 index 0000000000000..ae267ca7570fc --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/fields/leaf/DateFieldDataGenerator.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.logsdb.datageneration.fields.leaf; + +import org.elasticsearch.logsdb.datageneration.FieldDataGenerator; +import org.elasticsearch.logsdb.datageneration.datasource.DataSource; +import org.elasticsearch.logsdb.datageneration.datasource.DataSourceRequest; + +import java.time.Instant; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.format.DateTimeFormatter; +import java.util.Locale; +import java.util.Map; +import java.util.function.Supplier; + +public class DateFieldDataGenerator implements FieldDataGenerator { + private final DataSource dataSource; + private final Supplier instants; + private final Supplier strings; + + public DateFieldDataGenerator(DataSource dataSource) { + this.dataSource = dataSource; + this.instants = () -> dataSource.get(new DataSourceRequest.InstantGenerator()).generator().get(); + this.strings = dataSource.get(new DataSourceRequest.StringGenerator()).generator(); + } + + @Override + public Object generateValue(Map fieldMapping) { + Supplier supplier = () -> instants.get().toEpochMilli(); + + if (fieldMapping != null && fieldMapping.get("format") != null) { + String format = (String) fieldMapping.get("format"); + supplier = () -> DateTimeFormatter.ofPattern(format, Locale.ROOT).withZone(ZoneId.from(ZoneOffset.UTC)).format(instants.get()); + } + + if (fieldMapping != null && (Boolean) fieldMapping.getOrDefault("ignore_malformed", false)) { + supplier = Wrappers.defaultsWithMalformed(supplier, strings::get, dataSource); + } else { + supplier = Wrappers.defaults(supplier, dataSource); + } + + return supplier.get(); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/FieldSpecificMatcher.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/FieldSpecificMatcher.java index e2acea4ad91de..f424a7ecf45ed 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/FieldSpecificMatcher.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/FieldSpecificMatcher.java @@ -11,15 +11,22 @@ import org.apache.lucene.sandbox.document.HalfFloatPoint; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.logsdb.datageneration.matchers.MatchResult; import org.elasticsearch.xcontent.XContentBuilder; import java.math.BigInteger; +import java.time.Instant; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.format.DateTimeFormatter; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.TreeMap; +import java.util.function.Function; import java.util.stream.Collectors; import static org.elasticsearch.logsdb.datageneration.matchers.Messages.formatErrorMessage; @@ -334,23 +341,138 @@ Object convert(Object value, Object nullValue) { } } - // TODO basic implementation only right now - class DateMatcher extends GenericMappingAwareMatcher { - DateMatcher( + class BooleanMatcher extends GenericMappingAwareMatcher { + BooleanMatcher( XContentBuilder actualMappings, Settings.Builder actualSettings, XContentBuilder expectedMappings, Settings.Builder expectedSettings ) { - super("date", actualMappings, actualSettings, expectedMappings, expectedSettings); + super("boolean", actualMappings, actualSettings, expectedMappings, expectedSettings); } @Override Object convert(Object value, Object nullValue) { + Boolean nullValueBool = null; + if (nullValue != null) { + nullValueBool = nullValue instanceof Boolean b ? b : Boolean.parseBoolean((String) nullValue); + } + + if (value == null) { + return nullValueBool; + } + if (value instanceof String s && s.isEmpty()) { + // This a documented behavior. + return false; + } + if (value instanceof String s) { + try { + return Boolean.parseBoolean(s); + } catch (Exception e) { + // malformed + return value; + } + } + return value; } } + class DateMatcher implements FieldSpecificMatcher { + private final XContentBuilder actualMappings; + private final Settings.Builder actualSettings; + private final XContentBuilder expectedMappings; + private final Settings.Builder expectedSettings; + + DateMatcher( + XContentBuilder actualMappings, + Settings.Builder actualSettings, + XContentBuilder expectedMappings, + Settings.Builder expectedSettings + ) { + this.actualMappings = actualMappings; + this.actualSettings = actualSettings; + this.expectedMappings = expectedMappings; + this.expectedSettings = expectedSettings; + } + + @Override + public MatchResult match( + List actual, + List expected, + Map actualMapping, + Map expectedMapping + ) { + var format = (String) getMappingParameter("format", actualMapping, expectedMapping); + var nullValue = getNullValue(actualMapping, expectedMapping); + + Function convert = v -> convert(v, nullValue); + if (format != null) { + var formatter = DateTimeFormatter.ofPattern(format, Locale.ROOT).withZone(ZoneId.from(ZoneOffset.UTC)); + convert = v -> convert(v, nullValue, formatter); + } + + var actualNormalized = normalize(actual, convert); + var expectedNormalized = normalize(expected, convert); + + return actualNormalized.equals(expectedNormalized) + ? MatchResult.match() + : MatchResult.noMatch( + formatErrorMessage( + actualMappings, + actualSettings, + expectedMappings, + expectedSettings, + "Values of type [date] don't match after normalization, normalized " + + prettyPrintCollections(actualNormalized, expectedNormalized) + ) + ); + } + + private Set normalize(List values, Function convert) { + if (values == null) { + return Set.of(); + } + + return values.stream().map(convert).filter(Objects::nonNull).collect(Collectors.toSet()); + } + + Object convert(Object value, Object nullValue) { + if (value == null) { + return nullValue == null ? null : Instant.ofEpochMilli((Long) nullValue); + } + if (value instanceof Integer i) { + return Instant.ofEpochMilli(i); + } + if (value instanceof Long l) { + return Instant.ofEpochMilli(l); + } + + assert value instanceof String; + try { + // values from synthetic source will be formatted with default formatter + return Instant.from(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parse((String) value)); + } catch (Exception e) { + // malformed + return value; + } + } + + Object convert(Object value, Object nullValue, DateTimeFormatter dateTimeFormatter) { + if (value == null) { + return nullValue == null ? null : Instant.from(dateTimeFormatter.parse((String) nullValue)).toEpochMilli(); + } + + assert value instanceof String; + try { + return Instant.from(dateTimeFormatter.parse((String) value)).toEpochMilli(); + } catch (Exception e) { + // malformed + return value; + } + } + } + /** * Generic matcher that supports common matching logic like null values. */ diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/SourceMatcher.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/SourceMatcher.java index 7390f846b017a..8350ef3ab7a72 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/SourceMatcher.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/matchers/source/SourceMatcher.java @@ -94,6 +94,7 @@ public SourceMatcher( "counted_keyword", new FieldSpecificMatcher.CountedKeywordMatcher(actualMappings, actualSettings, expectedMappings, expectedSettings) ); + put("boolean", new FieldSpecificMatcher.BooleanMatcher(actualMappings, actualSettings, expectedMappings, expectedSettings)); } }; this.dynamicFieldMatcher = new DynamicFieldMatcher(actualMappings, actualSettings, expectedMappings, expectedSettings); diff --git a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java index 04ea6a73e228e..d67f8c388479e 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java +++ b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java @@ -364,7 +364,7 @@ private FallbackSyntheticSourceBlockLoader.Reader fallbackSyntheticSourceBloc var nullValueEncoded = nullValueFormatted != null ? (Number) unsignedToSortableSignedLong(parseUnsignedLong(nullValueFormatted)) : null; - return new FallbackSyntheticSourceBlockLoader.ReaderWithNullValueSupport<>(nullValueFormatted) { + return new FallbackSyntheticSourceBlockLoader.ReaderWithNullValueSupport(nullValueFormatted) { @Override public void convertValue(Object value, List accumulator) { if (value.equals("")) { From 8a28b51c574fb7ec5e7c239ec28141af25d7b50b Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 6 Mar 2025 06:45:01 +1100 Subject: [PATCH 48/54] Mute org.elasticsearch.search.query.QueryPhaseTimeoutTests testScorerTimeoutPoints #124140 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 78662291b7286..75b57daccdc36 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -336,6 +336,9 @@ tests: - class: org.elasticsearch.xpack.esql.plugin.MatchOperatorIT method: testScoring_Zero_OutsideQuery issue: https://github.com/elastic/elasticsearch/issues/124132 +- class: org.elasticsearch.search.query.QueryPhaseTimeoutTests + method: testScorerTimeoutPoints + issue: https://github.com/elastic/elasticsearch/issues/124140 # Examples: # From f534fc3ccf06174ba2248f9e01d5eda9d42462ff Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 6 Mar 2025 06:45:09 +1100 Subject: [PATCH 49/54] Mute org.elasticsearch.search.query.QueryPhaseTimeoutTests testScorerTimeoutTerms #124141 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 75b57daccdc36..20b150e421eb3 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -339,6 +339,9 @@ tests: - class: org.elasticsearch.search.query.QueryPhaseTimeoutTests method: testScorerTimeoutPoints issue: https://github.com/elastic/elasticsearch/issues/124140 +- class: org.elasticsearch.search.query.QueryPhaseTimeoutTests + method: testScorerTimeoutTerms + issue: https://github.com/elastic/elasticsearch/issues/124141 # Examples: # From 2fa6651a68081420e632eb45f804f8d8ed4f3490 Mon Sep 17 00:00:00 2001 From: Mike Pellegrini Date: Wed, 5 Mar 2025 15:01:46 -0500 Subject: [PATCH 50/54] Remove matched text from chunks (#123607) --- .../inference/ChunkedInference.java | 11 ++- .../results/ChunkedInferenceEmbedding.java | 3 +- .../results/ChunkedInferenceError.java | 7 +- .../inference/results/EmbeddingResults.java | 8 +- .../results/SparseEmbeddingResults.java | 10 +-- .../results/TextEmbeddingByteResults.java | 8 +- .../results/TextEmbeddingFloatResults.java | 8 +- .../TestDenseInferenceServiceExtension.java | 1 - .../TestSparseInferenceServiceExtension.java | 8 +- .../ShardBulkInferenceActionFilter.java | 2 +- .../chunking/EmbeddingRequestChunker.java | 5 +- .../inference/mapper/SemanticTextField.java | 2 +- .../elser/HuggingFaceElserService.java | 1 - .../EmbeddingRequestChunkerTests.java | 74 +++++++++---------- .../mapper/SemanticTextFieldTests.java | 12 +-- .../AmazonBedrockServiceTests.java | 6 +- .../AzureAiStudioServiceTests.java | 8 +- .../azureopenai/AzureOpenAiServiceTests.java | 8 +- .../services/cohere/CohereServiceTests.java | 16 ++-- .../elastic/ElasticInferenceServiceTests.java | 1 - .../ElasticsearchInternalServiceTests.java | 18 ++--- .../GoogleAiStudioServiceTests.java | 6 +- .../HuggingFaceElserServiceTests.java | 1 - .../huggingface/HuggingFaceServiceTests.java | 3 +- .../ibmwatsonx/IbmWatsonxServiceTests.java | 8 +- .../services/jinaai/JinaAIServiceTests.java | 8 +- .../services/openai/OpenAiServiceTests.java | 8 +- .../voyageai/VoyageAIServiceTests.java | 8 +- 28 files changed, 119 insertions(+), 140 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/inference/ChunkedInference.java b/server/src/main/java/org/elasticsearch/inference/ChunkedInference.java index c54e5a98d56cc..90116b4f0e3b8 100644 --- a/server/src/main/java/org/elasticsearch/inference/ChunkedInference.java +++ b/server/src/main/java/org/elasticsearch/inference/ChunkedInference.java @@ -21,18 +21,17 @@ public interface ChunkedInference { * Implementations of this function serialize their embeddings to {@link BytesReference} for storage in semantic text fields. * * @param xcontent provided by the SemanticTextField - * @return an iterator of the serialized {@link Chunk} which includes the matched text (input) and bytes reference (output/embedding). + * @return an iterator of the serialized {@link Chunk} which includes the offset into the input text and bytes reference + * (output/embedding). */ - Iterator chunksAsMatchedTextAndByteReference(XContent xcontent) throws IOException; + Iterator chunksAsByteReference(XContent xcontent) throws IOException; /** - * A chunk of inference results containing matched text, the substring location - * in the original text and the bytes reference. - * @param matchedText + * A chunk of inference results containing the substring location in the original text and the bytes reference. * @param textOffset * @param bytesReference */ - record Chunk(String matchedText, TextOffset textOffset, BytesReference bytesReference) {} + record Chunk(TextOffset textOffset, BytesReference bytesReference) {} record TextOffset(int start, int end) {} } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedInferenceEmbedding.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedInferenceEmbedding.java index e723a3b4f8f60..3159419ad718c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedInferenceEmbedding.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedInferenceEmbedding.java @@ -29,7 +29,6 @@ public static List listOf(List inputs, SparseEmbedding List.of( new SparseEmbeddingResults.Chunk( sparseEmbeddingResults.embeddings().get(i).tokens(), - inputs.get(i), new TextOffset(0, inputs.get(i).length()) ) ) @@ -41,7 +40,7 @@ public static List listOf(List inputs, SparseEmbedding } @Override - public Iterator chunksAsMatchedTextAndByteReference(XContent xcontent) throws IOException { + public Iterator chunksAsByteReference(XContent xcontent) throws IOException { var asChunk = new ArrayList(); for (var chunk : chunks()) { asChunk.add(chunk.toChunk(xcontent)); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedInferenceError.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedInferenceError.java index 65be9f12d7686..9b0abd11badc6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedInferenceError.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/ChunkedInferenceError.java @@ -7,17 +7,16 @@ package org.elasticsearch.xpack.core.inference.results; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.inference.ChunkedInference; import org.elasticsearch.xcontent.XContent; +import java.util.Collections; import java.util.Iterator; -import java.util.stream.Stream; public record ChunkedInferenceError(Exception exception) implements ChunkedInference { @Override - public Iterator chunksAsMatchedTextAndByteReference(XContent xcontent) { - return Stream.of(exception).map(e -> new Chunk(e.getMessage(), new TextOffset(0, 0), BytesArray.EMPTY)).iterator(); + public Iterator chunksAsByteReference(XContent xcontent) { + return Collections.emptyIterator(); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/EmbeddingResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/EmbeddingResults.java index c6f4c6915024b..8cd5d78a8ca9d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/EmbeddingResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/EmbeddingResults.java @@ -24,13 +24,11 @@ public interface EmbeddingResults { /** - * Combines the resulting embedding with the input into a chunk. + * Combines the resulting embedding with the offset into the input text into a chunk. */ - C toChunk(String text, ChunkedInference.TextOffset offset); + C toChunk(ChunkedInference.TextOffset offset); } /** diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/SparseEmbeddingResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/SparseEmbeddingResults.java index 894e8c6c97bfd..c4001a6325fcf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/SparseEmbeddingResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/SparseEmbeddingResults.java @@ -175,17 +175,15 @@ public String toString() { } @Override - public Chunk toChunk(String text, ChunkedInference.TextOffset offset) { - return new Chunk(tokens, text, offset); + public Chunk toChunk(ChunkedInference.TextOffset offset) { + return new Chunk(tokens, offset); } } - public record Chunk(List weightedTokens, String matchedText, ChunkedInference.TextOffset offset) - implements - EmbeddingResults.Chunk { + public record Chunk(List weightedTokens, ChunkedInference.TextOffset offset) implements EmbeddingResults.Chunk { public ChunkedInference.Chunk toChunk(XContent xcontent) throws IOException { - return new ChunkedInference.Chunk(matchedText, offset, toBytesReference(xcontent, weightedTokens)); + return new ChunkedInference.Chunk(offset, toBytesReference(xcontent, weightedTokens)); } private static BytesReference toBytesReference(XContent xContent, List tokens) throws IOException { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingByteResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingByteResults.java index f8268d7bd4683..fd8f22e535ee8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingByteResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingByteResults.java @@ -187,18 +187,18 @@ public int hashCode() { } @Override - public Chunk toChunk(String text, ChunkedInference.TextOffset offset) { - return new Chunk(values, text, offset); + public Chunk toChunk(ChunkedInference.TextOffset offset) { + return new Chunk(values, offset); } } /** * Serialises the {@code value} array, according to the provided {@link XContent}, into a {@link BytesReference}. */ - public record Chunk(byte[] embedding, String matchedText, ChunkedInference.TextOffset offset) implements EmbeddingResults.Chunk { + public record Chunk(byte[] embedding, ChunkedInference.TextOffset offset) implements EmbeddingResults.Chunk { public ChunkedInference.Chunk toChunk(XContent xcontent) throws IOException { - return new ChunkedInference.Chunk(matchedText, offset, toBytesReference(xcontent, embedding)); + return new ChunkedInference.Chunk(offset, toBytesReference(xcontent, embedding)); } private static BytesReference toBytesReference(XContent xContent, byte[] value) throws IOException { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingFloatResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingFloatResults.java index cef381982b447..8dfdf57f9d1b0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingFloatResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingFloatResults.java @@ -221,15 +221,15 @@ public int hashCode() { } @Override - public Chunk toChunk(String text, ChunkedInference.TextOffset offset) { - return new Chunk(values, text, offset); + public Chunk toChunk(ChunkedInference.TextOffset offset) { + return new Chunk(values, offset); } } - public record Chunk(float[] embedding, String matchedText, ChunkedInference.TextOffset offset) implements EmbeddingResults.Chunk { + public record Chunk(float[] embedding, ChunkedInference.TextOffset offset) implements EmbeddingResults.Chunk { public ChunkedInference.Chunk toChunk(XContent xcontent) throws IOException { - return new ChunkedInference.Chunk(matchedText, offset, toBytesReference(xcontent, embedding)); + return new ChunkedInference.Chunk(offset, toBytesReference(xcontent, embedding)); } /** diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java index da7acf122bb72..b9aee73932e14 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java @@ -183,7 +183,6 @@ private List makeChunkedResults(List input, int dimens List.of( new TextEmbeddingFloatResults.Chunk( nonChunkedResults.embeddings().get(i).values(), - input.get(i), new ChunkedInference.TextOffset(0, input.get(i).length()) ) ) diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java index fcc175a051964..4e10ce45efeac 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java @@ -172,13 +172,7 @@ private List makeChunkedResults(List input) { } results.add( new ChunkedInferenceEmbedding( - List.of( - new SparseEmbeddingResults.Chunk( - tokens, - input.get(i), - new ChunkedInference.TextOffset(0, input.get(i).length()) - ) - ) + List.of(new SparseEmbeddingResults.Chunk(tokens, new ChunkedInference.TextOffset(0, input.get(i).length()))) ) ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java index 250c365e52093..7783e8599279d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java @@ -606,7 +606,7 @@ static IndexRequest getIndexRequestOrNull(DocWriteRequest docWriteRequest) { private static class EmptyChunkedInference implements ChunkedInference { @Override - public Iterator chunksAsMatchedTextAndByteReference(XContent xcontent) { + public Iterator chunksAsByteReference(XContent xcontent) { return Collections.emptyIterator(); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunker.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunker.java index d8751a542392d..0d018f30a8a63 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunker.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunker.java @@ -197,10 +197,7 @@ private ChunkedInference mergeResultsWithInputs(int index) { AtomicReferenceArray> result = results.get(index); for (int i = 0; i < request.size(); i++) { EmbeddingResults.Chunk chunk = result.get(i) - .toChunk( - request.get(i).chunkText(), - new ChunkedInference.TextOffset(request.get(i).chunk.start(), request.get(i).chunk.end()) - ); + .toChunk(new ChunkedInference.TextOffset(request.get(i).chunk.start(), request.get(i).chunk.end())); chunks.add(chunk); } return new ChunkedInferenceEmbedding(chunks); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextField.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextField.java index 489951a206149..ba86a45159b0d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextField.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextField.java @@ -275,7 +275,7 @@ public static List toSemanticTextFieldChunks( boolean useLegacyFormat ) throws IOException { List chunks = new ArrayList<>(); - Iterator it = results.chunksAsMatchedTextAndByteReference(contentType.xContent()); + Iterator it = results.chunksAsByteReference(contentType.xContent()); while (it.hasNext()) { chunks.add(toSemanticTextFieldChunk(input, offsetAdjustment, it.next(), useLegacyFormat)); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java index 434b94e6f8ac4..8009fae673a34 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java @@ -121,7 +121,6 @@ private static List translateToChunkedResults(DocumentsOnlyInp List.of( new TextEmbeddingFloatResults.Chunk( textEmbeddingResults.embeddings().get(i).values(), - inputs.getInputs().get(i), new ChunkedInference.TextOffset(0, inputs.getInputs().get(i).length()) ) ) diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunkerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunkerTests.java index 7cdae8d51ce0b..aa33cd0000b42 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunkerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunkerTests.java @@ -246,7 +246,7 @@ public void testMergingListener_Float() { for (int i = 0; i < numberOfWordsInPassage; i++) { passageBuilder.append("passage_input").append(i).append(" "); // chunk on whitespace } - List inputs = List.of("1st small", passageBuilder.toString(), "2nd small", "3rd small"); + List inputs = List.of("a", passageBuilder.toString(), "bb", "ccc"); var finalListener = testListener(); var batches = new EmbeddingRequestChunker(inputs, batchSize, chunkSize, overlap).batchRequestsWithListeners(finalListener); @@ -275,7 +275,7 @@ public void testMergingListener_Float() { assertThat(chunkedResult, instanceOf(ChunkedInferenceEmbedding.class)); var chunkedFloatResult = (ChunkedInferenceEmbedding) chunkedResult; assertThat(chunkedFloatResult.chunks(), hasSize(1)); - assertEquals("1st small", chunkedFloatResult.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 1), chunkedFloatResult.chunks().get(0).offset()); } { // this is the large input split in multiple chunks @@ -283,26 +283,26 @@ public void testMergingListener_Float() { assertThat(chunkedResult, instanceOf(ChunkedInferenceEmbedding.class)); var chunkedFloatResult = (ChunkedInferenceEmbedding) chunkedResult; assertThat(chunkedFloatResult.chunks(), hasSize(6)); - assertThat(chunkedFloatResult.chunks().get(0).matchedText(), startsWith("passage_input0 ")); - assertThat(chunkedFloatResult.chunks().get(1).matchedText(), startsWith(" passage_input20 ")); - assertThat(chunkedFloatResult.chunks().get(2).matchedText(), startsWith(" passage_input40 ")); - assertThat(chunkedFloatResult.chunks().get(3).matchedText(), startsWith(" passage_input60 ")); - assertThat(chunkedFloatResult.chunks().get(4).matchedText(), startsWith(" passage_input80 ")); - assertThat(chunkedFloatResult.chunks().get(5).matchedText(), startsWith(" passage_input100 ")); + assertThat(chunkedFloatResult.chunks().get(0).offset(), equalTo(new ChunkedInference.TextOffset(0, 309))); + assertThat(chunkedFloatResult.chunks().get(1).offset(), equalTo(new ChunkedInference.TextOffset(309, 629))); + assertThat(chunkedFloatResult.chunks().get(2).offset(), equalTo(new ChunkedInference.TextOffset(629, 949))); + assertThat(chunkedFloatResult.chunks().get(3).offset(), equalTo(new ChunkedInference.TextOffset(949, 1269))); + assertThat(chunkedFloatResult.chunks().get(4).offset(), equalTo(new ChunkedInference.TextOffset(1269, 1589))); + assertThat(chunkedFloatResult.chunks().get(5).offset(), equalTo(new ChunkedInference.TextOffset(1589, 1675))); } { var chunkedResult = finalListener.results.get(2); assertThat(chunkedResult, instanceOf(ChunkedInferenceEmbedding.class)); var chunkedFloatResult = (ChunkedInferenceEmbedding) chunkedResult; assertThat(chunkedFloatResult.chunks(), hasSize(1)); - assertEquals("2nd small", chunkedFloatResult.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 2), chunkedFloatResult.chunks().get(0).offset()); } { var chunkedResult = finalListener.results.get(3); assertThat(chunkedResult, instanceOf(ChunkedInferenceEmbedding.class)); var chunkedFloatResult = (ChunkedInferenceEmbedding) chunkedResult; assertThat(chunkedFloatResult.chunks(), hasSize(1)); - assertEquals("3rd small", chunkedFloatResult.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 3), chunkedFloatResult.chunks().get(0).offset()); } } @@ -318,7 +318,7 @@ public void testMergingListener_Byte() { for (int i = 0; i < numberOfWordsInPassage; i++) { passageBuilder.append("passage_input").append(i).append(" "); // chunk on whitespace } - List inputs = List.of("1st small", passageBuilder.toString(), "2nd small", "3rd small"); + List inputs = List.of("a", passageBuilder.toString(), "bb", "ccc"); var finalListener = testListener(); var batches = new EmbeddingRequestChunker(inputs, batchSize, chunkSize, overlap).batchRequestsWithListeners(finalListener); @@ -347,7 +347,7 @@ public void testMergingListener_Byte() { assertThat(chunkedResult, instanceOf(ChunkedInferenceEmbedding.class)); var chunkedByteResult = (ChunkedInferenceEmbedding) chunkedResult; assertThat(chunkedByteResult.chunks(), hasSize(1)); - assertEquals("1st small", chunkedByteResult.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 1), chunkedByteResult.chunks().get(0).offset()); } { // this is the large input split in multiple chunks @@ -355,26 +355,26 @@ public void testMergingListener_Byte() { assertThat(chunkedResult, instanceOf(ChunkedInferenceEmbedding.class)); var chunkedByteResult = (ChunkedInferenceEmbedding) chunkedResult; assertThat(chunkedByteResult.chunks(), hasSize(6)); - assertThat(chunkedByteResult.chunks().get(0).matchedText(), startsWith("passage_input0 ")); - assertThat(chunkedByteResult.chunks().get(1).matchedText(), startsWith(" passage_input20 ")); - assertThat(chunkedByteResult.chunks().get(2).matchedText(), startsWith(" passage_input40 ")); - assertThat(chunkedByteResult.chunks().get(3).matchedText(), startsWith(" passage_input60 ")); - assertThat(chunkedByteResult.chunks().get(4).matchedText(), startsWith(" passage_input80 ")); - assertThat(chunkedByteResult.chunks().get(5).matchedText(), startsWith(" passage_input100 ")); + assertThat(chunkedByteResult.chunks().get(0).offset(), equalTo(new ChunkedInference.TextOffset(0, 309))); + assertThat(chunkedByteResult.chunks().get(1).offset(), equalTo(new ChunkedInference.TextOffset(309, 629))); + assertThat(chunkedByteResult.chunks().get(2).offset(), equalTo(new ChunkedInference.TextOffset(629, 949))); + assertThat(chunkedByteResult.chunks().get(3).offset(), equalTo(new ChunkedInference.TextOffset(949, 1269))); + assertThat(chunkedByteResult.chunks().get(4).offset(), equalTo(new ChunkedInference.TextOffset(1269, 1589))); + assertThat(chunkedByteResult.chunks().get(5).offset(), equalTo(new ChunkedInference.TextOffset(1589, 1675))); } { var chunkedResult = finalListener.results.get(2); assertThat(chunkedResult, instanceOf(ChunkedInferenceEmbedding.class)); var chunkedByteResult = (ChunkedInferenceEmbedding) chunkedResult; assertThat(chunkedByteResult.chunks(), hasSize(1)); - assertEquals("2nd small", chunkedByteResult.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 2), chunkedByteResult.chunks().get(0).offset()); } { var chunkedResult = finalListener.results.get(3); assertThat(chunkedResult, instanceOf(ChunkedInferenceEmbedding.class)); var chunkedByteResult = (ChunkedInferenceEmbedding) chunkedResult; assertThat(chunkedByteResult.chunks(), hasSize(1)); - assertEquals("3rd small", chunkedByteResult.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 3), chunkedByteResult.chunks().get(0).offset()); } } @@ -390,7 +390,7 @@ public void testMergingListener_Bit() { for (int i = 0; i < numberOfWordsInPassage; i++) { passageBuilder.append("passage_input").append(i).append(" "); // chunk on whitespace } - List inputs = List.of("1st small", passageBuilder.toString(), "2nd small", "3rd small"); + List inputs = List.of("a", passageBuilder.toString(), "bb", "ccc"); var finalListener = testListener(); var batches = new EmbeddingRequestChunker(inputs, batchSize, chunkSize, overlap).batchRequestsWithListeners(finalListener); @@ -419,7 +419,7 @@ public void testMergingListener_Bit() { assertThat(chunkedResult, instanceOf(ChunkedInferenceEmbedding.class)); var chunkedByteResult = (ChunkedInferenceEmbedding) chunkedResult; assertThat(chunkedByteResult.chunks(), hasSize(1)); - assertEquals("1st small", chunkedByteResult.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 1), chunkedByteResult.chunks().get(0).offset()); } { // this is the large input split in multiple chunks @@ -427,26 +427,26 @@ public void testMergingListener_Bit() { assertThat(chunkedResult, instanceOf(ChunkedInferenceEmbedding.class)); var chunkedByteResult = (ChunkedInferenceEmbedding) chunkedResult; assertThat(chunkedByteResult.chunks(), hasSize(6)); - assertThat(chunkedByteResult.chunks().get(0).matchedText(), startsWith("passage_input0 ")); - assertThat(chunkedByteResult.chunks().get(1).matchedText(), startsWith(" passage_input20 ")); - assertThat(chunkedByteResult.chunks().get(2).matchedText(), startsWith(" passage_input40 ")); - assertThat(chunkedByteResult.chunks().get(3).matchedText(), startsWith(" passage_input60 ")); - assertThat(chunkedByteResult.chunks().get(4).matchedText(), startsWith(" passage_input80 ")); - assertThat(chunkedByteResult.chunks().get(5).matchedText(), startsWith(" passage_input100 ")); + assertThat(chunkedByteResult.chunks().get(0).offset(), equalTo(new ChunkedInference.TextOffset(0, 309))); + assertThat(chunkedByteResult.chunks().get(1).offset(), equalTo(new ChunkedInference.TextOffset(309, 629))); + assertThat(chunkedByteResult.chunks().get(2).offset(), equalTo(new ChunkedInference.TextOffset(629, 949))); + assertThat(chunkedByteResult.chunks().get(3).offset(), equalTo(new ChunkedInference.TextOffset(949, 1269))); + assertThat(chunkedByteResult.chunks().get(4).offset(), equalTo(new ChunkedInference.TextOffset(1269, 1589))); + assertThat(chunkedByteResult.chunks().get(5).offset(), equalTo(new ChunkedInference.TextOffset(1589, 1675))); } { var chunkedResult = finalListener.results.get(2); assertThat(chunkedResult, instanceOf(ChunkedInferenceEmbedding.class)); var chunkedByteResult = (ChunkedInferenceEmbedding) chunkedResult; assertThat(chunkedByteResult.chunks(), hasSize(1)); - assertEquals("2nd small", chunkedByteResult.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 2), chunkedByteResult.chunks().get(0).offset()); } { var chunkedResult = finalListener.results.get(3); assertThat(chunkedResult, instanceOf(ChunkedInferenceEmbedding.class)); var chunkedByteResult = (ChunkedInferenceEmbedding) chunkedResult; assertThat(chunkedByteResult.chunks(), hasSize(1)); - assertEquals("3rd small", chunkedByteResult.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 3), chunkedByteResult.chunks().get(0).offset()); } } @@ -462,7 +462,7 @@ public void testMergingListener_Sparse() { for (int i = 0; i < numberOfWordsInPassage; i++) { passageBuilder.append("passage_input").append(i).append(" "); // chunk on whitespace } - List inputs = List.of("1st small", "2nd small", "3rd small", passageBuilder.toString()); + List inputs = List.of("a", "bb", "ccc", passageBuilder.toString()); var finalListener = testListener(); var batches = new EmbeddingRequestChunker(inputs, batchSize, chunkSize, overlap).batchRequestsWithListeners(finalListener); @@ -498,21 +498,21 @@ public void testMergingListener_Sparse() { assertThat(chunkedResult, instanceOf(ChunkedInferenceEmbedding.class)); var chunkedSparseResult = (ChunkedInferenceEmbedding) chunkedResult; assertThat(chunkedSparseResult.chunks(), hasSize(1)); - assertEquals("1st small", chunkedSparseResult.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 1), chunkedSparseResult.chunks().get(0).offset()); } { var chunkedResult = finalListener.results.get(1); assertThat(chunkedResult, instanceOf(ChunkedInferenceEmbedding.class)); var chunkedSparseResult = (ChunkedInferenceEmbedding) chunkedResult; assertThat(chunkedSparseResult.chunks(), hasSize(1)); - assertEquals("2nd small", chunkedSparseResult.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 2), chunkedSparseResult.chunks().get(0).offset()); } { var chunkedResult = finalListener.results.get(2); assertThat(chunkedResult, instanceOf(ChunkedInferenceEmbedding.class)); var chunkedSparseResult = (ChunkedInferenceEmbedding) chunkedResult; assertThat(chunkedSparseResult.chunks(), hasSize(1)); - assertEquals("3rd small", chunkedSparseResult.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 3), chunkedSparseResult.chunks().get(0).offset()); } { // this is the large input split in multiple chunks @@ -520,9 +520,9 @@ public void testMergingListener_Sparse() { assertThat(chunkedResult, instanceOf(ChunkedInferenceEmbedding.class)); var chunkedSparseResult = (ChunkedInferenceEmbedding) chunkedResult; assertThat(chunkedSparseResult.chunks(), hasSize(9)); // passage is split into 9 chunks, 10 words each - assertThat(chunkedSparseResult.chunks().get(0).matchedText(), startsWith("passage_input0 ")); - assertThat(chunkedSparseResult.chunks().get(1).matchedText(), startsWith(" passage_input10 ")); - assertThat(chunkedSparseResult.chunks().get(8).matchedText(), startsWith(" passage_input80 ")); + assertThat(chunkedSparseResult.chunks().get(0).offset(), equalTo(new ChunkedInference.TextOffset(0, 149))); + assertThat(chunkedSparseResult.chunks().get(1).offset(), equalTo(new ChunkedInference.TextOffset(149, 309))); + assertThat(chunkedSparseResult.chunks().get(8).offset(), equalTo(new ChunkedInference.TextOffset(1269, 1350))); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldTests.java index 9eb10cfd9f1a8..2f700b402e957 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldTests.java @@ -177,7 +177,7 @@ public static ChunkedInferenceEmbedding randomChunkedInferenceEmbeddingByte(Mode for (int j = 0; j < values.length; j++) { values[j] = randomByte(); } - chunks.add(new TextEmbeddingByteResults.Chunk(values, input, new ChunkedInference.TextOffset(0, input.length()))); + chunks.add(new TextEmbeddingByteResults.Chunk(values, new ChunkedInference.TextOffset(0, input.length()))); } return new ChunkedInferenceEmbedding(chunks); } @@ -189,7 +189,7 @@ public static ChunkedInferenceEmbedding randomChunkedInferenceEmbeddingFloat(Mod for (int j = 0; j < values.length; j++) { values[j] = randomFloat(); } - chunks.add(new TextEmbeddingFloatResults.Chunk(values, input, new ChunkedInference.TextOffset(0, input.length()))); + chunks.add(new TextEmbeddingFloatResults.Chunk(values, new ChunkedInference.TextOffset(0, input.length()))); } return new ChunkedInferenceEmbedding(chunks); } @@ -205,7 +205,7 @@ public static ChunkedInferenceEmbedding randomChunkedInferenceEmbeddingSparse(Li for (var token : input.split("\\s+")) { tokens.add(new WeightedToken(token, withFloats ? randomFloat() : randomIntBetween(1, 255))); } - chunks.add(new SparseEmbeddingResults.Chunk(tokens, input, new ChunkedInference.TextOffset(0, input.length()))); + chunks.add(new SparseEmbeddingResults.Chunk(tokens, new ChunkedInference.TextOffset(0, input.length()))); } return new ChunkedInferenceEmbedding(chunks); } @@ -243,7 +243,7 @@ public static SemanticTextField semanticTextFieldFromChunkedInferenceResults( final List chunks = new ArrayList<>(inputs.size()); int offsetAdjustment = 0; Iterator inputsIt = inputs.iterator(); - Iterator chunkIt = results.chunksAsMatchedTextAndByteReference(contentType.xContent()); + Iterator chunkIt = results.chunksAsByteReference(contentType.xContent()); while (inputsIt.hasNext() && chunkIt.hasNext()) { String input = inputsIt.next(); var chunk = chunkIt.next(); @@ -308,7 +308,7 @@ public static ChunkedInference toChunkedResult( String matchedText = matchedTextIt.next(); ChunkedInference.TextOffset offset = createOffset(useLegacyFormat, chunk, matchedText); var tokens = parseWeightedTokens(chunk.rawEmbeddings(), field.contentType()); - chunks.add(new SparseEmbeddingResults.Chunk(tokens, matchedText, offset)); + chunks.add(new SparseEmbeddingResults.Chunk(tokens, offset)); } } return new ChunkedInferenceEmbedding(chunks); @@ -329,7 +329,7 @@ public static ChunkedInference toChunkedResult( field.inference().modelSettings().dimensions(), field.contentType() ); - chunks.add(new TextEmbeddingFloatResults.Chunk(FloatConversionUtils.floatArrayOf(values), matchedText, offset)); + chunks.add(new TextEmbeddingFloatResults.Chunk(FloatConversionUtils.floatArrayOf(values), offset)); } } return new ChunkedInferenceEmbedding(chunks); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java index 970dab45731bd..43c6422ee041f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java @@ -1444,7 +1444,7 @@ private void testChunkedInfer(AmazonBedrockEmbeddingsModel model) throws IOExcep service.chunkedInfer( model, null, - List.of("abc", "xyz"), + List.of("a", "bb"), new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -1457,7 +1457,7 @@ private void testChunkedInfer(AmazonBedrockEmbeddingsModel model) throws IOExcep assertThat(results.get(0), CoreMatchers.instanceOf(ChunkedInferenceEmbedding.class)); var floatResult = (ChunkedInferenceEmbedding) results.get(0); assertThat(floatResult.chunks(), hasSize(1)); - assertEquals("abc", floatResult.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 1), floatResult.chunks().get(0).offset()); assertThat(floatResult.chunks().get(0), instanceOf(TextEmbeddingFloatResults.Chunk.class)); assertArrayEquals( new float[] { 0.123F, 0.678F }, @@ -1469,7 +1469,7 @@ private void testChunkedInfer(AmazonBedrockEmbeddingsModel model) throws IOExcep assertThat(results.get(1), CoreMatchers.instanceOf(ChunkedInferenceEmbedding.class)); var floatResult = (ChunkedInferenceEmbedding) results.get(1); assertThat(floatResult.chunks(), hasSize(1)); - assertEquals("xyz", floatResult.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 2), floatResult.chunks().get(0).offset()); assertThat(floatResult.chunks().get(0), instanceOf(TextEmbeddingFloatResults.Chunk.class)); assertArrayEquals( new float[] { 0.223F, 0.278F }, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java index cdd8494c9b343..369fdc6d46848 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java @@ -1191,7 +1191,7 @@ private void testChunkedInfer(AzureAiStudioEmbeddingsModel model) throws IOExcep service.chunkedInfer( model, null, - List.of("foo", "bar"), + List.of("a", "bb"), new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -1204,7 +1204,7 @@ private void testChunkedInfer(AzureAiStudioEmbeddingsModel model) throws IOExcep assertThat(results.get(0), CoreMatchers.instanceOf(ChunkedInferenceEmbedding.class)); var floatResult = (ChunkedInferenceEmbedding) results.get(0); assertThat(floatResult.chunks(), hasSize(1)); - assertEquals("foo", floatResult.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 1), floatResult.chunks().get(0).offset()); assertThat(floatResult.chunks().get(0), instanceOf(TextEmbeddingFloatResults.Chunk.class)); assertArrayEquals( new float[] { 0.0123f, -0.0123f }, @@ -1216,7 +1216,7 @@ private void testChunkedInfer(AzureAiStudioEmbeddingsModel model) throws IOExcep assertThat(results.get(1), CoreMatchers.instanceOf(ChunkedInferenceEmbedding.class)); var floatResult = (ChunkedInferenceEmbedding) results.get(1); assertThat(floatResult.chunks(), hasSize(1)); - assertEquals("bar", floatResult.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 2), floatResult.chunks().get(0).offset()); assertThat(floatResult.chunks().get(0), instanceOf(TextEmbeddingFloatResults.Chunk.class)); assertArrayEquals( new float[] { 1.0123f, -1.0123f }, @@ -1232,7 +1232,7 @@ private void testChunkedInfer(AzureAiStudioEmbeddingsModel model) throws IOExcep var requestMap = entityAsMap(webServer.requests().get(0).getBody()); assertThat(requestMap.size(), Matchers.is(2)); - assertThat(requestMap.get("input"), Matchers.is(List.of("foo", "bar"))); + assertThat(requestMap.get("input"), Matchers.is(List.of("a", "bb"))); assertThat(requestMap.get("user"), Matchers.is("user")); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java index 7ee595cddf084..837c3ec8e6e21 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java @@ -1341,7 +1341,7 @@ private void testChunkedInfer(AzureOpenAiEmbeddingsModel model) throws IOExcepti service.chunkedInfer( model, null, - List.of("foo", "bar"), + List.of("a", "bb"), new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -1354,7 +1354,7 @@ private void testChunkedInfer(AzureOpenAiEmbeddingsModel model) throws IOExcepti assertThat(results.get(0), CoreMatchers.instanceOf(ChunkedInferenceEmbedding.class)); var floatResult = (ChunkedInferenceEmbedding) results.get(0); assertThat(floatResult.chunks(), hasSize(1)); - assertEquals("foo", floatResult.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 1), floatResult.chunks().get(0).offset()); assertThat(floatResult.chunks().get(0), instanceOf(TextEmbeddingFloatResults.Chunk.class)); assertArrayEquals( new float[] { 0.123f, -0.123f }, @@ -1366,7 +1366,7 @@ private void testChunkedInfer(AzureOpenAiEmbeddingsModel model) throws IOExcepti assertThat(results.get(1), CoreMatchers.instanceOf(ChunkedInferenceEmbedding.class)); var floatResult = (ChunkedInferenceEmbedding) results.get(1); assertThat(floatResult.chunks(), hasSize(1)); - assertEquals("bar", floatResult.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 2), floatResult.chunks().get(0).offset()); assertThat(floatResult.chunks().get(0), instanceOf(TextEmbeddingFloatResults.Chunk.class)); assertArrayEquals( new float[] { 1.123f, -1.123f }, @@ -1382,7 +1382,7 @@ private void testChunkedInfer(AzureOpenAiEmbeddingsModel model) throws IOExcepti var requestMap = entityAsMap(webServer.requests().get(0).getBody()); assertThat(requestMap.size(), Matchers.is(2)); - assertThat(requestMap.get("input"), Matchers.is(List.of("foo", "bar"))); + assertThat(requestMap.get("input"), Matchers.is(List.of("a", "bb"))); assertThat(requestMap.get("user"), Matchers.is("user")); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java index 7d959b9bff0a0..f1f8fb0140a3f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java @@ -1452,7 +1452,7 @@ private void testChunkedInfer(CohereEmbeddingsModel model) throws IOException { service.chunkedInfer( model, null, - List.of("foo", "bar"), + List.of("a", "bb"), new HashMap<>(), InputType.UNSPECIFIED, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -1465,7 +1465,7 @@ private void testChunkedInfer(CohereEmbeddingsModel model) throws IOException { assertThat(results.get(0), CoreMatchers.instanceOf(ChunkedInferenceEmbedding.class)); var floatResult = (ChunkedInferenceEmbedding) results.get(0); assertThat(floatResult.chunks(), hasSize(1)); - assertEquals("foo", floatResult.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 1), floatResult.chunks().get(0).offset()); assertArrayEquals( new float[] { 0.123f, -0.123f }, ((TextEmbeddingFloatResults.Chunk) floatResult.chunks().get(0)).embedding(), @@ -1476,7 +1476,7 @@ private void testChunkedInfer(CohereEmbeddingsModel model) throws IOException { assertThat(results.get(1), CoreMatchers.instanceOf(ChunkedInferenceEmbedding.class)); var floatResult = (ChunkedInferenceEmbedding) results.get(1); assertThat(floatResult.chunks(), hasSize(1)); - assertEquals("bar", floatResult.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 2), floatResult.chunks().get(0).offset()); assertArrayEquals( new float[] { 0.223f, -0.223f }, ((TextEmbeddingFloatResults.Chunk) floatResult.chunks().get(0)).embedding(), @@ -1495,7 +1495,7 @@ private void testChunkedInfer(CohereEmbeddingsModel model) throws IOException { var requestMap = entityAsMap(webServer.requests().get(0).getBody()); MatcherAssert.assertThat( requestMap, - is(Map.of("texts", List.of("foo", "bar"), "model", "model", "embedding_types", List.of("float"))) + is(Map.of("texts", List.of("a", "bb"), "model", "model", "embedding_types", List.of("float"))) ); } } @@ -1551,7 +1551,7 @@ public void testChunkedInfer_BatchesCalls_Bytes() throws IOException { service.chunkedInfer( model, null, - List.of("foo", "bar"), + List.of("a", "bb"), new HashMap<>(), InputType.UNSPECIFIED, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -1564,7 +1564,7 @@ public void testChunkedInfer_BatchesCalls_Bytes() throws IOException { assertThat(results.get(0), CoreMatchers.instanceOf(ChunkedInferenceEmbedding.class)); var byteResult = (ChunkedInferenceEmbedding) results.get(0); assertThat(byteResult.chunks(), hasSize(1)); - assertEquals("foo", byteResult.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 1), byteResult.chunks().get(0).offset()); assertThat(byteResult.chunks().get(0), instanceOf(TextEmbeddingByteResults.Chunk.class)); assertArrayEquals(new byte[] { 23, -23 }, ((TextEmbeddingByteResults.Chunk) byteResult.chunks().get(0)).embedding()); } @@ -1572,7 +1572,7 @@ public void testChunkedInfer_BatchesCalls_Bytes() throws IOException { assertThat(results.get(1), CoreMatchers.instanceOf(ChunkedInferenceEmbedding.class)); var byteResult = (ChunkedInferenceEmbedding) results.get(1); assertThat(byteResult.chunks(), hasSize(1)); - assertEquals("bar", byteResult.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 2), byteResult.chunks().get(0).offset()); assertThat(byteResult.chunks().get(0), instanceOf(TextEmbeddingByteResults.Chunk.class)); assertArrayEquals(new byte[] { 24, -24 }, ((TextEmbeddingByteResults.Chunk) byteResult.chunks().get(0)).embedding()); } @@ -1588,7 +1588,7 @@ public void testChunkedInfer_BatchesCalls_Bytes() throws IOException { var requestMap = entityAsMap(webServer.requests().get(0).getBody()); MatcherAssert.assertThat( requestMap, - is(Map.of("texts", List.of("foo", "bar"), "model", "model", "embedding_types", List.of("int8"))) + is(Map.of("texts", List.of("a", "bb"), "model", "model", "embedding_types", List.of("int8"))) ); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java index 2ecd39b3991b8..de3dac3577d44 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java @@ -566,7 +566,6 @@ public void testChunkedInfer_PassesThrough() throws IOException { List.of( new SparseEmbeddingResults.Chunk( List.of(new WeightedToken("hello", 2.1259406f), new WeightedToken("greet", 1.7073475f)), - "input text", new ChunkedInference.TextOffset(0, "input text".length()) ) ) diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java index e7e654b599fe6..2bf47b06c771c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java @@ -902,7 +902,7 @@ private void testChunkInfer_e5(ChunkingSettings chunkingSettings) throws Interru ((TextEmbeddingFloatResults.Chunk) result1.chunks().get(0)).embedding(), 0.0001f ); - assertEquals("foo", result1.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 1), result1.chunks().get(0).offset()); assertThat(chunkedResponse.get(1), instanceOf(ChunkedInferenceEmbedding.class)); var result2 = (ChunkedInferenceEmbedding) chunkedResponse.get(1); assertThat(result2.chunks(), hasSize(1)); @@ -912,7 +912,7 @@ private void testChunkInfer_e5(ChunkingSettings chunkingSettings) throws Interru ((TextEmbeddingFloatResults.Chunk) result2.chunks().get(0)).embedding(), 0.0001f ); - assertEquals("bar", result2.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 2), result2.chunks().get(0).offset()); gotResults.set(true); }, ESTestCase::fail); @@ -923,7 +923,7 @@ private void testChunkInfer_e5(ChunkingSettings chunkingSettings) throws Interru service.chunkedInfer( model, null, - List.of("foo", "bar"), + List.of("a", "bb"), Map.of(), InputType.SEARCH, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -977,7 +977,7 @@ private void testChunkInfer_Sparse(ChunkingSettings chunkingSettings) throws Int ((TextExpansionResults) mlTrainedModelResults.get(0)).getWeightedTokens(), ((SparseEmbeddingResults.Chunk) result1.chunks().get(0)).weightedTokens() ); - assertEquals("foo", result1.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 1), result1.chunks().get(0).offset()); assertThat(chunkedResponse.get(1), instanceOf(ChunkedInferenceEmbedding.class)); var result2 = (ChunkedInferenceEmbedding) chunkedResponse.get(1); assertThat(result2.chunks().get(0), instanceOf(SparseEmbeddingResults.Chunk.class)); @@ -985,7 +985,7 @@ private void testChunkInfer_Sparse(ChunkingSettings chunkingSettings) throws Int ((TextExpansionResults) mlTrainedModelResults.get(1)).getWeightedTokens(), ((SparseEmbeddingResults.Chunk) result2.chunks().get(0)).weightedTokens() ); - assertEquals("bar", result2.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 2), result2.chunks().get(0).offset()); gotResults.set(true); }, ESTestCase::fail); @@ -995,7 +995,7 @@ private void testChunkInfer_Sparse(ChunkingSettings chunkingSettings) throws Int service.chunkedInfer( model, null, - List.of("foo", "bar"), + List.of("a", "bb"), Map.of(), InputType.SEARCH, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -1049,7 +1049,7 @@ private void testChunkInfer_Elser(ChunkingSettings chunkingSettings) throws Inte ((TextExpansionResults) mlTrainedModelResults.get(0)).getWeightedTokens(), ((SparseEmbeddingResults.Chunk) result1.chunks().get(0)).weightedTokens() ); - assertEquals("foo", result1.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 1), result1.chunks().get(0).offset()); assertThat(chunkedResponse.get(1), instanceOf(ChunkedInferenceEmbedding.class)); var result2 = (ChunkedInferenceEmbedding) chunkedResponse.get(1); assertThat(result2.chunks().get(0), instanceOf(SparseEmbeddingResults.Chunk.class)); @@ -1057,7 +1057,7 @@ private void testChunkInfer_Elser(ChunkingSettings chunkingSettings) throws Inte ((TextExpansionResults) mlTrainedModelResults.get(1)).getWeightedTokens(), ((SparseEmbeddingResults.Chunk) result2.chunks().get(0)).weightedTokens() ); - assertEquals("bar", result2.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 2), result2.chunks().get(0).offset()); gotResults.set(true); }, ESTestCase::fail); @@ -1067,7 +1067,7 @@ private void testChunkInfer_Elser(ChunkingSettings chunkingSettings) throws Inte service.chunkedInfer( model, null, - List.of("foo", "bar"), + List.of("a", "bb"), Map.of(), InputType.SEARCH, InferenceAction.Request.DEFAULT_TIMEOUT, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java index 9828a4f21ab51..9343d1c25f48f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java @@ -844,7 +844,7 @@ public void testChunkedInfer_ChunkingSettingsSet() throws IOException { private void testChunkedInfer(String modelId, String apiKey, GoogleAiStudioEmbeddingsModel model) throws IOException { - var input = List.of("foo", "bar"); + var input = List.of("a", "bb"); var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); @@ -881,7 +881,7 @@ private void testChunkedInfer(String modelId, String apiKey, GoogleAiStudioEmbed assertThat(results.get(0), instanceOf(ChunkedInferenceEmbedding.class)); var floatResult = (ChunkedInferenceEmbedding) results.get(0); assertThat(floatResult.chunks(), hasSize(1)); - assertEquals(input.get(0), floatResult.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, input.get(0).length()), floatResult.chunks().get(0).offset()); assertThat(floatResult.chunks().get(0), Matchers.instanceOf(TextEmbeddingFloatResults.Chunk.class)); assertTrue( Arrays.equals( @@ -896,7 +896,7 @@ private void testChunkedInfer(String modelId, String apiKey, GoogleAiStudioEmbed assertThat(results.get(1), instanceOf(ChunkedInferenceEmbedding.class)); var floatResult = (ChunkedInferenceEmbedding) results.get(1); assertThat(floatResult.chunks(), hasSize(1)); - assertEquals(input.get(1), floatResult.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, input.get(1).length()), floatResult.chunks().get(0).offset()); assertThat(floatResult.chunks().get(0), Matchers.instanceOf(TextEmbeddingFloatResults.Chunk.class)); assertTrue( Arrays.equals( diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceElserServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceElserServiceTests.java index 1050ac137be8d..d732f4f85f60d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceElserServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceElserServiceTests.java @@ -111,7 +111,6 @@ public void testChunkedInfer_CallsInfer_Elser_ConvertsFloatResponse() throws IOE List.of( new SparseEmbeddingResults.Chunk( List.of(new WeightedToken(".", 0.13315596f)), - "abc", new ChunkedInference.TextOffset(0, "abc".length()) ) ) diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java index b9e7cda1461cc..32a597aecb410 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java @@ -787,7 +787,6 @@ public void testChunkedInfer_CallsInfer_TextEmbedding_ConvertsFloatResponse() th assertThat(result, CoreMatchers.instanceOf(ChunkedInferenceEmbedding.class)); var embeddingResult = (ChunkedInferenceEmbedding) result; assertThat(embeddingResult.chunks(), hasSize(1)); - assertThat(embeddingResult.chunks().get(0).matchedText(), is("abc")); assertThat(embeddingResult.chunks().get(0).offset(), is(new ChunkedInference.TextOffset(0, "abc".length()))); assertThat(embeddingResult.chunks().get(0), Matchers.instanceOf(TextEmbeddingFloatResults.Chunk.class)); assertArrayEquals( @@ -842,7 +841,7 @@ public void testChunkedInfer() throws IOException { assertThat(results.get(0), CoreMatchers.instanceOf(ChunkedInferenceEmbedding.class)); var floatResult = (ChunkedInferenceEmbedding) results.get(0); assertThat(floatResult.chunks(), hasSize(1)); - assertEquals("abc", floatResult.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 3), floatResult.chunks().get(0).offset()); assertThat(floatResult.chunks().get(0), Matchers.instanceOf(TextEmbeddingFloatResults.Chunk.class)); assertArrayEquals( new float[] { 0.123f, -0.123f }, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java index 74d055d44363d..d74c9a7eafd06 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java @@ -686,7 +686,7 @@ public void testChunkedInfer_ChunkingSettingsSet() throws IOException { } private void testChunkedInfer_Batches(ChunkingSettings chunkingSettings) throws IOException { - var input = List.of("foo", "bar"); + var input = List.of("a", "bb"); var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); @@ -733,7 +733,7 @@ private void testChunkedInfer_Batches(ChunkingSettings chunkingSettings) throws assertThat(results.get(0), instanceOf(ChunkedInferenceEmbedding.class)); var floatResult = (ChunkedInferenceEmbedding) results.get(0); assertThat(floatResult.chunks(), hasSize(1)); - assertEquals(input.get(0), floatResult.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, input.get(0).length()), floatResult.chunks().get(0).offset()); assertThat(floatResult.chunks().get(0), Matchers.instanceOf(TextEmbeddingFloatResults.Chunk.class)); assertTrue( Arrays.equals( @@ -748,7 +748,7 @@ private void testChunkedInfer_Batches(ChunkingSettings chunkingSettings) throws assertThat(results.get(1), instanceOf(ChunkedInferenceEmbedding.class)); var floatResult = (ChunkedInferenceEmbedding) results.get(1); assertThat(floatResult.chunks(), hasSize(1)); - assertEquals(input.get(1), floatResult.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, input.get(1).length()), floatResult.chunks().get(0).offset()); assertThat(floatResult.chunks().get(0), Matchers.instanceOf(TextEmbeddingFloatResults.Chunk.class)); assertTrue( Arrays.equals( @@ -763,7 +763,7 @@ private void testChunkedInfer_Batches(ChunkingSettings chunkingSettings) throws var requestMap = entityAsMap(webServer.requests().get(0).getBody()); assertThat(requestMap, aMapWithSize(3)); - assertThat(requestMap, is(Map.of("project_id", projectId, "inputs", List.of("foo", "bar"), "model_id", modelId))); + assertThat(requestMap, is(Map.of("project_id", projectId, "inputs", List.of("a", "bb"), "model_id", modelId))); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIServiceTests.java index 5d2ab9e6d2f57..392069be01908 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIServiceTests.java @@ -1819,7 +1819,7 @@ private void test_Embedding_ChunkedInfer_BatchesCalls(JinaAIEmbeddingsModel mode service.chunkedInfer( model, null, - List.of("foo", "bar"), + List.of("a", "bb"), new HashMap<>(), InputType.UNSPECIFIED, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -1832,7 +1832,7 @@ private void test_Embedding_ChunkedInfer_BatchesCalls(JinaAIEmbeddingsModel mode assertThat(results.get(0), CoreMatchers.instanceOf(ChunkedInferenceEmbedding.class)); var floatResult = (ChunkedInferenceEmbedding) results.get(0); assertThat(floatResult.chunks(), hasSize(1)); - assertEquals("foo", floatResult.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 1), floatResult.chunks().get(0).offset()); assertThat(floatResult.chunks().get(0), Matchers.instanceOf(TextEmbeddingFloatResults.Chunk.class)); assertArrayEquals( new float[] { 0.123f, -0.123f }, @@ -1844,7 +1844,7 @@ private void test_Embedding_ChunkedInfer_BatchesCalls(JinaAIEmbeddingsModel mode assertThat(results.get(1), CoreMatchers.instanceOf(ChunkedInferenceEmbedding.class)); var floatResult = (ChunkedInferenceEmbedding) results.get(1); assertThat(floatResult.chunks(), hasSize(1)); - assertEquals("bar", floatResult.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 2), floatResult.chunks().get(0).offset()); assertThat(floatResult.chunks().get(0), Matchers.instanceOf(TextEmbeddingFloatResults.Chunk.class)); assertArrayEquals( new float[] { 0.223f, -0.223f }, @@ -1864,7 +1864,7 @@ private void test_Embedding_ChunkedInfer_BatchesCalls(JinaAIEmbeddingsModel mode var requestMap = entityAsMap(webServer.requests().get(0).getBody()); MatcherAssert.assertThat( requestMap, - is(Map.of("input", List.of("foo", "bar"), "model", "jina-clip-v2", "embedding_type", "float")) + is(Map.of("input", List.of("a", "bb"), "model", "jina-clip-v2", "embedding_type", "float")) ); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java index d608f4a33ff52..4aff8732ed9ab 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java @@ -1857,7 +1857,7 @@ private void testChunkedInfer(OpenAiEmbeddingsModel model) throws IOException { service.chunkedInfer( model, null, - List.of("foo", "bar"), + List.of("a", "bb"), new HashMap<>(), InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -1870,7 +1870,7 @@ private void testChunkedInfer(OpenAiEmbeddingsModel model) throws IOException { assertThat(results.get(0), CoreMatchers.instanceOf(ChunkedInferenceEmbedding.class)); var floatResult = (ChunkedInferenceEmbedding) results.get(0); assertThat(floatResult.chunks(), hasSize(1)); - assertEquals("foo", floatResult.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 1), floatResult.chunks().get(0).offset()); assertThat(floatResult.chunks().get(0), Matchers.instanceOf(TextEmbeddingFloatResults.Chunk.class)); assertTrue( Arrays.equals( @@ -1883,7 +1883,7 @@ private void testChunkedInfer(OpenAiEmbeddingsModel model) throws IOException { assertThat(results.get(1), CoreMatchers.instanceOf(ChunkedInferenceEmbedding.class)); var floatResult = (ChunkedInferenceEmbedding) results.get(1); assertThat(floatResult.chunks(), hasSize(1)); - assertEquals("bar", floatResult.chunks().get(0).matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 2), floatResult.chunks().get(0).offset()); assertThat(floatResult.chunks().get(0), Matchers.instanceOf(TextEmbeddingFloatResults.Chunk.class)); assertTrue( Arrays.equals( @@ -1901,7 +1901,7 @@ private void testChunkedInfer(OpenAiEmbeddingsModel model) throws IOException { var requestMap = entityAsMap(webServer.requests().get(0).getBody()); assertThat(requestMap.size(), Matchers.is(3)); - assertThat(requestMap.get("input"), Matchers.is(List.of("foo", "bar"))); + assertThat(requestMap.get("input"), Matchers.is(List.of("a", "bb"))); assertThat(requestMap.get("model"), Matchers.is("model")); assertThat(requestMap.get("user"), Matchers.is("user")); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/voyageai/VoyageAIServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/voyageai/VoyageAIServiceTests.java index 6a0428e962f52..3a5fce350046e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/voyageai/VoyageAIServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/voyageai/VoyageAIServiceTests.java @@ -1826,7 +1826,7 @@ private void test_Embedding_ChunkedInfer_BatchesCalls(VoyageAIEmbeddingsModel mo service.chunkedInfer( model, null, - List.of("foo", "bar"), + List.of("a", "bb"), new HashMap<>(), InputType.UNSPECIFIED, InferenceAction.Request.DEFAULT_TIMEOUT, @@ -1839,7 +1839,7 @@ private void test_Embedding_ChunkedInfer_BatchesCalls(VoyageAIEmbeddingsModel mo assertThat(results.getFirst(), CoreMatchers.instanceOf(ChunkedInferenceEmbedding.class)); var floatResult = (ChunkedInferenceEmbedding) results.getFirst(); assertThat(floatResult.chunks(), hasSize(1)); - assertEquals("foo", floatResult.chunks().getFirst().matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 1), floatResult.chunks().getFirst().offset()); assertThat(floatResult.chunks().getFirst(), CoreMatchers.instanceOf(TextEmbeddingFloatResults.Chunk.class)); assertArrayEquals( new float[] { 0.123f, -0.123f }, @@ -1851,7 +1851,7 @@ private void test_Embedding_ChunkedInfer_BatchesCalls(VoyageAIEmbeddingsModel mo assertThat(results.get(1), CoreMatchers.instanceOf(ChunkedInferenceEmbedding.class)); var floatResult = (ChunkedInferenceEmbedding) results.get(1); assertThat(floatResult.chunks(), hasSize(1)); - assertEquals("bar", floatResult.chunks().getFirst().matchedText()); + assertEquals(new ChunkedInference.TextOffset(0, 2), floatResult.chunks().getFirst().offset()); assertThat(floatResult.chunks().getFirst(), CoreMatchers.instanceOf(TextEmbeddingFloatResults.Chunk.class)); assertArrayEquals( new float[] { 0.223f, -0.223f }, @@ -1871,7 +1871,7 @@ private void test_Embedding_ChunkedInfer_BatchesCalls(VoyageAIEmbeddingsModel mo var requestMap = entityAsMap(webServer.requests().getFirst().getBody()); MatcherAssert.assertThat( requestMap, - is(Map.of("input", List.of("foo", "bar"), "model", "voyage-3-large", "output_dtype", "float", "output_dimension", 1024)) + is(Map.of("input", List.of("a", "bb"), "model", "voyage-3-large", "output_dtype", "float", "output_dimension", 1024)) ); } } From 47706b505f6510c3314a8a83c8c3f68c3d0eaefc Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Wed, 5 Mar 2025 13:39:58 -0700 Subject: [PATCH 51/54] Add index mode to get data stream API (#122486) This commit adds the `index_mode` for both the data stream and each backing index to the output of `GET /_data_stream`. An example looks like: ``` { "data_streams" : [ { "name" : "foo-things", "indices" : [ { "index_name" : ".ds-foo-things-2025.02.13-000001", ... "index_mode" : "standard" } ], ... "index_mode" : "standard" }, { "name" : "logs-foo-bar", "indices" : [ { "index_name" : ".ds-logs-foo-bar-2025.02.13-000001", ... "index_mode" : "logsdb" }, { "index_name" : ".ds-logs-foo-bar-2025.02.13-000002", ... "index_mode" : "logsdb" } ], ... "index_mode" : "logsdb", } ] } ``` --- docs/changelog/122486.yaml | 5 + .../indices/get-data-stream.asciidoc | 376 ++++++++++++++++++ .../action/TransportGetDataStreamsAction.java | 70 +++- .../action/GetDataStreamsResponseTests.java | 30 +- .../TransportGetDataStreamsActionTests.java | 65 +++ .../org/elasticsearch/TransportVersions.java | 1 + .../datastreams/GetDataStreamAction.java | 44 +- .../MetadataIndexTemplateService.java | 7 +- .../datastreams/GetDataStreamActionTests.java | 1 + .../LogsdbIndexModeSettingsProvider.java | 9 +- .../LogsdbIndexModeSettingsProviderTests.java | 3 +- 11 files changed, 585 insertions(+), 26 deletions(-) create mode 100644 docs/changelog/122486.yaml create mode 100644 docs/reference/indices/get-data-stream.asciidoc diff --git a/docs/changelog/122486.yaml b/docs/changelog/122486.yaml new file mode 100644 index 0000000000000..027d2a5e63ba3 --- /dev/null +++ b/docs/changelog/122486.yaml @@ -0,0 +1,5 @@ +pr: 122486 +summary: Add index mode to get data stream API +area: Data streams +type: enhancement +issues: [] diff --git a/docs/reference/indices/get-data-stream.asciidoc b/docs/reference/indices/get-data-stream.asciidoc new file mode 100644 index 0000000000000..a46d247d9a9a0 --- /dev/null +++ b/docs/reference/indices/get-data-stream.asciidoc @@ -0,0 +1,376 @@ +[role="xpack"] +[[indices-get-data-stream]] +=== Get data stream API +++++ +Get data stream +++++ + +.New API reference +[sidebar] +-- +For the most up-to-date API details, refer to {api-es}/group/endpoint-data-stream[Data stream APIs]. +-- + +Retrieves information about one or more <>. +See <>. + +//// +[source,console] +---- +PUT /_ilm/policy/my-lifecycle-policy +{ + "policy": { + "phases": { + "hot": { + "actions": { + "rollover": { + "max_primary_shard_size": "25GB" + } + } + }, + "delete": { + "min_age": "30d", + "actions": { + "delete": {} + } + } + } + } +} + +PUT /_index_template/my-index-template +{ + "index_patterns": [ "my-data-stream*" ], + "data_stream": {}, + "template": { + "settings": { + "index.lifecycle.name": "my-lifecycle-policy" + } + }, + "_meta": { + "my-meta-field": "foo" + } +} + +PUT /_data_stream/my-data-stream + +POST /my-data-stream/_rollover + +PUT /_data_stream/my-data-stream-two + +DELETE /_data_stream/my-data-stream*/_lifecycle +---- +// TESTSETUP +//// + +//// +[source,console] +---- +DELETE /_data_stream/* +DELETE /_index_template/* +DELETE /_ilm/policy/my-lifecycle-policy +---- +// TEARDOWN +//// + +[source,console] +---- +GET /_data_stream/my-data-stream +---- + +[[get-data-stream-api-request]] +==== {api-request-title} + +`GET /_data_stream/` + +[[get-data-stream-api-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have the +`view_index_metadata` or `manage` <> +for the data stream. + +[[get-data-stream-api-path-params]] +==== {api-path-parms-title} + +``:: +(Optional, string) +Comma-separated list of data stream names used to limit the request. Wildcard +(`*`) expressions are supported. If omitted, all data streams will be +returned. + +[role="child_attributes"] +[[get-data-stream-api-query-parms]] +==== {api-query-parms-title} + +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=ds-expand-wildcards] ++ +Defaults to `open`. + +`include_defaults`:: +(Optional, Boolean) Functionality in preview:[]. If `true`, return all default settings in the response. +Defaults to `false`. + +`verbose`:: +(Optional, Boolean). If `true`, Returns the `maximum_timestamp` corresponding to the `@timestamp` field for documents in the data stream. +Defaults to `false`. + +[role="child_attributes"] +[[get-data-stream-api-response-body]] +==== {api-response-body-title} + +`data_streams`:: +(array of objects) +Contains information about retrieved data streams. ++ +.Properties of objects in `data_streams` +[%collapsible%open] +==== +`name`:: +(string) +Name of the data stream. + +`timestamp_field`:: +(object) +Contains information about the data stream's `@timestamp` field. ++ +.Properties of `timestamp_field` +[%collapsible%open] +===== +`name`:: +(string) +Name of the data stream's timestamp field, which must be `@timestamp`. The +`@timestamp` field must be included in every document indexed to the data +stream. +===== + +`indices`:: +(array of objects) +Array of objects containing information about the data stream's backing +indices. ++ +The last item in this array contains information about the stream's current +<>. ++ +.Properties of `indices` objects +[%collapsible%open] +===== +`index_name`:: +(string) +Name of the backing index. For naming conventions, see +<>. + +`index_uuid`:: +(string) +Universally unique identifier (UUID) for the index. + +`prefer_ilm`:: +(boolean) +Functionality in preview:[]. Indicates if this index is configured to prefer {ilm} +when both {ilm-cap} and <> are configured to +manage this index. + +`managed_by`:: +(string) +Functionality in preview:[]. Indicates the system that managed this index. +===== + +`generation`:: +(integer) +Current <> for the data stream. This number +acts as a cumulative count of the stream's rollovers, starting at `1`. + +`_meta`:: +(object) +Custom metadata for the stream, copied from the `_meta` object of the +stream's matching <>. If empty, +the response omits this property. + +`status`:: +(string) +<> of the data stream. ++ +This health status is based on the state of the primary and replica shards of +the stream's backing indices. ++ +.Values for `status` +[%collapsible%open] +===== +`GREEN`::: +All shards are assigned. + +`YELLOW`::: +All primary shards are assigned, but one or more replica shards are +unassigned. + +`RED`::: +One or more primary shards are unassigned, so some data is unavailable. +===== + +`template`:: +(string) +Name of the index template used to create the data stream's backing indices. ++ +The template's index pattern must match the name of this data stream. See +<>. + +`ilm_policy`:: +(string) +Name of the current {ilm-init} lifecycle policy in the stream's matching index +template. This lifecycle policy is set in the `index.lifecycle.name` setting. ++ +If the template does not include a lifecycle policy, this property is not +included in the response. ++ +NOTE: A data stream's backing indices may be assigned different lifecycle +policies. To retrieve the lifecycle policy for individual backing indices, +use the <>. + +`next_generation_managed_by`:: +(string) +Functionality in preview:[]. Indicates the system that will managed the next generation index +(i.e. the next data stream write index). + +`prefer_ilm`:: +(boolean) +Functionality in preview:[]. Indicates if the index template used to create the data +stream's backing indices is configured to prefer {ilm-cap} when both {ilm-cap} and +<> are configured to manage this index. + +`hidden`:: +(Boolean) If `true`, the data stream is <>. + +`system`:: +(Boolean) +If `true`, the data stream is created and managed by an Elastic stack component +and cannot be modified through normal user interaction. + +`allow_custom_routing`:: +(Boolean) +If `true`, the data stream this data stream allows custom routing on write request. + +`replicated`:: +(Boolean) +If `true`, the data stream is created and managed by {ccr} and the local +cluster can not write into this data stream or change its mappings. + +`lifecycle`:: +(object) +Functionality in preview:[]. Contains the configuration for the data stream lifecycle management of this data stream. ++ +.Properties of `lifecycle` +[%collapsible%open] +===== +`data_retention`:: +(string) +If defined, every document added to this data stream will be stored at least for this time frame. Any time after this +duration the document could be deleted. When empty, every document in this data stream will be stored indefinitely. + +`rollover`:: +(object) +The conditions which will trigger the rollover of a backing index as configured by the cluster setting +`cluster.lifecycle.default.rollover`. This property is an implementation detail and it will only be retrieved when the query +param `include_defaults` is set to `true`. The contents of this field are subject to change. +===== + +`rollover_on_write`:: +(Boolean) +If `true`, the next write to this data stream will trigger a rollover first and the document will be +indexed in the new backing index. If the rollover fails the indexing request will fail too. +==== + +[[get-data-stream-api-example]] +==== {api-examples-title} + +[source,console] +---- +GET _data_stream/my-data-stream* +---- + +The API returns the following response: + +[source,console-result] +---- +{ + "data_streams": [ + { + "name": "my-data-stream", + "timestamp_field": { + "name": "@timestamp" + }, + "indices": [ + { + "index_name": ".ds-my-data-stream-2099.03.07-000001", + "index_uuid": "xCEhwsp8Tey0-FLNFYVwSg", + "prefer_ilm": true, + "ilm_policy": "my-lifecycle-policy", + "managed_by": "Index Lifecycle Management", + "index_mode": "standard" + }, + { + "index_name": ".ds-my-data-stream-2099.03.08-000002", + "index_uuid": "PA_JquKGSiKcAKBA8DJ5gw", + "prefer_ilm": true, + "ilm_policy": "my-lifecycle-policy", + "managed_by": "Index Lifecycle Management", + "index_mode": "standard" + } + ], + "generation": 2, + "_meta": { + "my-meta-field": "foo" + }, + "status": "GREEN", + "index_mode": "standard", + "next_generation_managed_by": "Index Lifecycle Management", + "prefer_ilm": true, + "template": "my-index-template", + "ilm_policy": "my-lifecycle-policy", + "hidden": false, + "system": false, + "allow_custom_routing": false, + "replicated": false, + "rollover_on_write": false + }, + { + "name": "my-data-stream-two", + "timestamp_field": { + "name": "@timestamp" + }, + "indices": [ + { + "index_name": ".ds-my-data-stream-two-2099.03.08-000001", + "index_uuid": "3liBu2SYS5axasRt6fUIpA", + "prefer_ilm": true, + "ilm_policy": "my-lifecycle-policy", + "managed_by": "Index Lifecycle Management", + "index_mode": "standard" + } + ], + "generation": 1, + "_meta": { + "my-meta-field": "foo" + }, + "status": "YELLOW", + "index_mode": "standard", + "next_generation_managed_by": "Index Lifecycle Management", + "prefer_ilm": true, + "template": "my-index-template", + "ilm_policy": "my-lifecycle-policy", + "hidden": false, + "system": false, + "allow_custom_routing": false, + "replicated": false, + "rollover_on_write": false + } + ] +} +---- +// TESTRESPONSE[s/"index_name": ".ds-my-data-stream-2099.03.07-000001"/"index_name": $body.data_streams.0.indices.0.index_name/] +// TESTRESPONSE[s/"index_uuid": "xCEhwsp8Tey0-FLNFYVwSg"/"index_uuid": $body.data_streams.0.indices.0.index_uuid/] +// TESTRESPONSE[s/"index_name": ".ds-my-data-stream-2099.03.08-000002"/"index_name": $body.data_streams.0.indices.1.index_name/] +// TESTRESPONSE[s/"index_uuid": "PA_JquKGSiKcAKBA8DJ5gw"/"index_uuid": $body.data_streams.0.indices.1.index_uuid/] +// TESTRESPONSE[s/"index_name": ".ds-my-data-stream-two-2099.03.08-000001"/"index_name": $body.data_streams.1.indices.0.index_name/] +// TESTRESPONSE[s/"index_uuid": "3liBu2SYS5axasRt6fUIpA"/"index_uuid": $body.data_streams.1.indices.0.index_uuid/] +// TESTRESPONSE[s/"status": "GREEN"/"status": "YELLOW"/] +// TESTRESPONSE[s/"replicated": false/"replicated": false,"failure_store":{"enabled": false, "indices": [], "rollover_on_write": true}/] diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/TransportGetDataStreamsAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/TransportGetDataStreamsAction.java index 5728e5c0a8dbf..f37158ba3fc6e 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/TransportGetDataStreamsAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/TransportGetDataStreamsAction.java @@ -24,6 +24,7 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.health.ClusterStateHealth; +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.DataStreamFailureStoreSettings; import org.elasticsearch.cluster.metadata.DataStreamGlobalRetentionSettings; @@ -40,6 +41,9 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.IndexSettingProvider; +import org.elasticsearch.index.IndexSettingProviders; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.indices.SystemDataStreamDescriptor; import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.injection.guice.Inject; @@ -53,6 +57,7 @@ import java.util.Comparator; import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.stream.Collectors; @@ -68,6 +73,7 @@ public class TransportGetDataStreamsAction extends TransportMasterNodeReadProjec private final ClusterSettings clusterSettings; private final DataStreamGlobalRetentionSettings globalRetentionSettings; private final DataStreamFailureStoreSettings dataStreamFailureStoreSettings; + private final IndexSettingProviders indexSettingProviders; private final Client client; @Inject @@ -81,6 +87,7 @@ public TransportGetDataStreamsAction( SystemIndices systemIndices, DataStreamGlobalRetentionSettings globalRetentionSettings, DataStreamFailureStoreSettings dataStreamFailureStoreSettings, + IndexSettingProviders indexSettingProviders, Client client ) { super( @@ -99,6 +106,7 @@ public TransportGetDataStreamsAction( this.globalRetentionSettings = globalRetentionSettings; clusterSettings = clusterService.getClusterSettings(); this.dataStreamFailureStoreSettings = dataStreamFailureStoreSettings; + this.indexSettingProviders = indexSettingProviders; this.client = new OriginSettingClient(client, "stack"); } @@ -131,6 +139,7 @@ public void onResponse(DataStreamsStatsAction.Response response) { clusterSettings, globalRetentionSettings, dataStreamFailureStoreSettings, + indexSettingProviders, maxTimestamps ) ); @@ -151,12 +160,43 @@ public void onFailure(Exception e) { clusterSettings, globalRetentionSettings, dataStreamFailureStoreSettings, + indexSettingProviders, null ) ); } } + /** + * Resolves the index mode ("index.mode" setting) for the given data stream, from the template or additional setting providers + */ + @Nullable + static IndexMode resolveMode( + ProjectState state, + IndexSettingProviders indexSettingProviders, + DataStream dataStream, + Settings settings, + ComposableIndexTemplate indexTemplate + ) { + IndexMode indexMode = state.metadata().retrieveIndexModeFromTemplate(indexTemplate); + for (IndexSettingProvider provider : indexSettingProviders.getIndexSettingProviders()) { + Settings addlSettinsg = provider.getAdditionalIndexSettings( + MetadataIndexTemplateService.VALIDATE_INDEX_NAME, + dataStream.getName(), + indexMode, + state.metadata(), + Instant.now(), + settings, + List.of() + ); + var rawMode = addlSettinsg.get(IndexSettings.MODE.getKey()); + if (rawMode != null) { + indexMode = Enum.valueOf(IndexMode.class, rawMode.toUpperCase(Locale.ROOT)); + } + } + return indexMode; + } + static GetDataStreamAction.Response innerOperation( ProjectState state, GetDataStreamAction.Request request, @@ -165,6 +205,7 @@ static GetDataStreamAction.Response innerOperation( ClusterSettings clusterSettings, DataStreamGlobalRetentionSettings globalRetentionSettings, DataStreamFailureStoreSettings dataStreamFailureStoreSettings, + IndexSettingProviders indexSettingProviders, @Nullable Map maxTimestamps ) { List dataStreams = getDataStreams(state.metadata(), indexNameExpressionResolver, request); @@ -177,6 +218,7 @@ static GetDataStreamAction.Response innerOperation( final String indexTemplate; boolean indexTemplatePreferIlmValue = true; String ilmPolicyName = null; + IndexMode indexMode = dataStream.getIndexMode(); if (dataStream.isSystem()) { SystemDataStreamDescriptor dataStreamDescriptor = systemIndices.findMatchingDataStreamDescriptor(dataStream.getName()); indexTemplate = dataStreamDescriptor != null ? dataStreamDescriptor.getDataStreamName() : null; @@ -186,6 +228,15 @@ static GetDataStreamAction.Response innerOperation( dataStreamDescriptor.getComponentTemplates() ); ilmPolicyName = settings.get(IndexMetadata.LIFECYCLE_NAME); + if (indexMode == null) { + indexMode = resolveMode( + state, + indexSettingProviders, + dataStream, + settings, + dataStreamDescriptor.getComposableIndexTemplate() + ); + } indexTemplatePreferIlmValue = PREFER_ILM_SETTING.get(settings); } } else { @@ -193,6 +244,15 @@ static GetDataStreamAction.Response innerOperation( if (indexTemplate != null) { Settings settings = MetadataIndexTemplateService.resolveSettings(state.metadata(), indexTemplate); ilmPolicyName = settings.get(IndexMetadata.LIFECYCLE_NAME); + if (indexMode == null && state.metadata().templatesV2().get(indexTemplate) != null) { + indexMode = resolveMode( + state, + indexSettingProviders, + dataStream, + settings, + state.metadata().templatesV2().get(indexTemplate) + ); + } indexTemplatePreferIlmValue = PREFER_ILM_SETTING.get(settings); } else { LOGGER.warn( @@ -285,7 +345,9 @@ public int compareTo(IndexInfo o) { timeSeries, backingIndicesSettingsValues, indexTemplatePreferIlmValue, - maxTimestamps == null ? null : maxTimestamps.get(dataStream.getName()) + maxTimestamps == null ? null : maxTimestamps.get(dataStream.getName()), + // Default to standard mode if not specified; should we set this to "unset" or "unspecified" instead? + indexMode == null ? IndexMode.STANDARD.getName() : indexMode.getName() ) ); } @@ -314,7 +376,11 @@ private static void collectIndexSettingsValues( } else { managedBy = ManagedBy.UNMANAGED; } - backingIndicesSettingsValues.put(index, new IndexProperties(preferIlm, indexMetadata.getLifecyclePolicyName(), managedBy)); + String indexMode = IndexSettings.MODE.get(indexMetadata.getSettings()).getName(); + backingIndicesSettingsValues.put( + index, + new IndexProperties(preferIlm, indexMetadata.getLifecyclePolicyName(), managedBy, indexMode) + ); } } diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java index 9414943cbb439..d9efa4d458f49 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java @@ -91,13 +91,13 @@ public void testResponseIlmAndDataStreamLifecycleRepresentation() throws Excepti String ilmPolicyName = "rollover-30days"; Map indexSettingsValues = Map.of( firstGenerationIndex, - new Response.IndexProperties(true, ilmPolicyName, ManagedBy.ILM), + new Response.IndexProperties(true, ilmPolicyName, ManagedBy.ILM, null), secondGenerationIndex, - new Response.IndexProperties(false, ilmPolicyName, ManagedBy.LIFECYCLE), + new Response.IndexProperties(false, ilmPolicyName, ManagedBy.LIFECYCLE, null), writeIndex, - new Response.IndexProperties(false, null, ManagedBy.LIFECYCLE), + new Response.IndexProperties(false, null, ManagedBy.LIFECYCLE, null), failureStoreIndex, - new Response.IndexProperties(false, null, ManagedBy.LIFECYCLE) + new Response.IndexProperties(false, null, ManagedBy.LIFECYCLE, null) ); Response.DataStreamInfo dataStreamInfo = new Response.DataStreamInfo( @@ -109,6 +109,7 @@ public void testResponseIlmAndDataStreamLifecycleRepresentation() throws Excepti null, indexSettingsValues, false, + null, null ); Response response = new Response(List.of(dataStreamInfo)); @@ -195,13 +196,13 @@ public void testResponseIlmAndDataStreamLifecycleRepresentation() throws Excepti String ilmPolicyName = "rollover-30days"; Map indexSettingsValues = Map.of( firstGenerationIndex, - new Response.IndexProperties(true, ilmPolicyName, ManagedBy.ILM), + new Response.IndexProperties(true, ilmPolicyName, ManagedBy.ILM, null), secondGenerationIndex, - new Response.IndexProperties(true, ilmPolicyName, ManagedBy.ILM), + new Response.IndexProperties(true, ilmPolicyName, ManagedBy.ILM, null), writeIndex, - new Response.IndexProperties(false, null, ManagedBy.UNMANAGED), + new Response.IndexProperties(false, null, ManagedBy.UNMANAGED, null), failureStoreIndex, - new Response.IndexProperties(false, null, ManagedBy.UNMANAGED) + new Response.IndexProperties(false, null, ManagedBy.UNMANAGED, null) ); Response.DataStreamInfo dataStreamInfo = new Response.DataStreamInfo( @@ -213,6 +214,7 @@ public void testResponseIlmAndDataStreamLifecycleRepresentation() throws Excepti null, indexSettingsValues, false, + null, null ); Response response = new Response(List.of(dataStreamInfo)); @@ -309,7 +311,8 @@ private Response.DataStreamInfo mutateInstance(Response.DataStreamInfo instance) new Response.IndexProperties( randomBoolean(), randomAlphaOfLengthBetween(50, 100), - randomBoolean() ? ManagedBy.ILM : ManagedBy.LIFECYCLE + randomBoolean() ? ManagedBy.ILM : ManagedBy.LIFECYCLE, + null ) ) ); @@ -328,7 +331,8 @@ private Response.DataStreamInfo mutateInstance(Response.DataStreamInfo instance) timeSeries, indexSettings, templatePreferIlm, - maximumTimestamp + maximumTimestamp, + null ); } @@ -349,7 +353,8 @@ private Map generateRandomIndexSettingsValues() new Response.IndexProperties( randomBoolean(), randomAlphaOfLengthBetween(50, 100), - randomBoolean() ? ManagedBy.ILM : ManagedBy.LIFECYCLE + randomBoolean() ? ManagedBy.ILM : ManagedBy.LIFECYCLE, + randomBoolean() ? randomFrom(IndexMode.values()).getName() : null ) ); } @@ -367,7 +372,8 @@ private Response.DataStreamInfo generateRandomDataStreamInfo() { timeSeries != null ? new Response.TimeSeries(timeSeries) : null, generateRandomIndexSettingsValues(), randomBoolean(), - usually() ? randomNonNegativeLong() : null + usually() ? randomNonNegativeLong() : null, + usually() ? randomFrom(IndexMode.values()).getName() : null ); } } diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/TransportGetDataStreamsActionTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/TransportGetDataStreamsActionTests.java index 0963647e24173..15252528e952b 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/TransportGetDataStreamsActionTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/TransportGetDataStreamsActionTests.java @@ -24,7 +24,9 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.IndexSettingProviders; import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.test.ESTestCase; @@ -32,6 +34,7 @@ import java.time.Instant; import java.time.temporal.ChronoUnit; import java.util.List; +import java.util.Set; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.getClusterStateWithDataStreams; import static org.elasticsearch.test.LambdaMatchers.transformedItemsMatch; @@ -181,6 +184,7 @@ public void testGetTimeSeriesDataStream() { ClusterSettings.createBuiltInClusterSettings(), dataStreamGlobalRetentionSettings, emptyDataStreamFailureStoreSettings, + new IndexSettingProviders(Set.of()), null ); assertThat( @@ -213,6 +217,7 @@ public void testGetTimeSeriesDataStream() { ClusterSettings.createBuiltInClusterSettings(), dataStreamGlobalRetentionSettings, emptyDataStreamFailureStoreSettings, + new IndexSettingProviders(Set.of()), null ); assertThat( @@ -266,6 +271,7 @@ public void testGetTimeSeriesDataStreamWithOutOfOrderIndices() { ClusterSettings.createBuiltInClusterSettings(), dataStreamGlobalRetentionSettings, emptyDataStreamFailureStoreSettings, + new IndexSettingProviders(Set.of()), null ); assertThat( @@ -307,6 +313,7 @@ public void testGetTimeSeriesMixedDataStream() { ClusterSettings.createBuiltInClusterSettings(), dataStreamGlobalRetentionSettings, emptyDataStreamFailureStoreSettings, + new IndexSettingProviders(Set.of()), null ); @@ -341,6 +348,7 @@ public void testPassingGlobalRetention() { ClusterSettings.createBuiltInClusterSettings(), dataStreamGlobalRetentionSettings, emptyDataStreamFailureStoreSettings, + new IndexSettingProviders(Set.of()), null ); assertThat(response.getGlobalRetention(), nullValue()); @@ -367,6 +375,7 @@ public void testPassingGlobalRetention() { ClusterSettings.createBuiltInClusterSettings(), withGlobalRetentionSettings, emptyDataStreamFailureStoreSettings, + new IndexSettingProviders(Set.of()), null ); assertThat(response.getGlobalRetention(), equalTo(globalRetention)); @@ -394,6 +403,7 @@ public void testDataStreamIsFailureStoreEffectivelyEnabled_disabled() { ClusterSettings.createBuiltInClusterSettings(), dataStreamGlobalRetentionSettings, emptyDataStreamFailureStoreSettings, + new IndexSettingProviders(Set.of()), null ); assertThat(response.getDataStreams(), hasSize(1)); @@ -423,6 +433,7 @@ public void testDataStreamIsFailureStoreEffectivelyEnabled_enabledExplicitly() { ClusterSettings.createBuiltInClusterSettings(), dataStreamGlobalRetentionSettings, emptyDataStreamFailureStoreSettings, + new IndexSettingProviders(Set.of()), null ); assertThat(response.getDataStreams(), hasSize(1)); @@ -457,9 +468,63 @@ public void testDataStreamIsFailureStoreEffectivelyEnabled_enabledByClusterSetti .build() ) ), + new IndexSettingProviders(Set.of()), null ); assertThat(response.getDataStreams(), hasSize(1)); assertThat(response.getDataStreams().getFirst().isFailureStoreEffectivelyEnabled(), is(true)); } + + public void testProvidersAffectMode() { + ClusterState state; + var projectId = randomProjectIdOrDefault(); + { + state = DataStreamTestHelper.getClusterStateWithDataStreams( + projectId, + List.of(Tuple.tuple("data-stream-1", 2)), + List.of(), + System.currentTimeMillis(), + Settings.EMPTY, + 0, + false, + false + ); + } + + var req = new GetDataStreamAction.Request(TEST_REQUEST_TIMEOUT, new String[] {}); + var response = TransportGetDataStreamsAction.innerOperation( + state.projectState(projectId), + req, + resolver, + systemIndices, + ClusterSettings.createBuiltInClusterSettings(), + dataStreamGlobalRetentionSettings, + emptyDataStreamFailureStoreSettings, + new IndexSettingProviders( + Set.of( + ( + indexName, + dataStreamName, + templateIndexMode, + metadata, + resolvedAt, + indexTemplateAndCreateRequestSettings, + combinedTemplateMappings) -> Settings.builder().put("index.mode", IndexMode.LOOKUP).build() + ) + ), + null + ); + assertThat(response.getDataStreams().getFirst().getIndexModeName(), equalTo("lookup")); + assertThat( + response.getDataStreams() + .getFirst() + .getIndexSettingsValues() + .values() + .stream() + .findFirst() + .map(GetDataStreamAction.Response.IndexProperties::indexMode) + .orElse("bad"), + equalTo("standard") + ); + } } diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 0dd856ab8c64e..ef6a0176b05fd 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -183,6 +183,7 @@ static TransportVersion def(int id) { public static final TransportVersion STORED_SCRIPT_CONTENT_LENGTH = def(9_019_0_00); public static final TransportVersion JINA_AI_EMBEDDING_TYPE_SUPPORT_ADDED = def(9_020_0_00); public static final TransportVersion RE_REMOVE_MIN_COMPATIBLE_SHARD_NODE = def(9_021_0_00); + public static final TransportVersion INCLUDE_INDEX_MODE_IN_GET_DATA_STREAM = def(9_022_0_00); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java index 5dd60a1122bf7..c0fae14434138 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java @@ -232,6 +232,7 @@ public static class DataStreamInfo implements SimpleDiffable, To ); public static final ParseField FAILURE_STORE_ENABLED = new ParseField("enabled"); public static final ParseField MAXIMUM_TIMESTAMP = new ParseField("maximum_timestamp"); + public static final ParseField INDEX_MODE = new ParseField("index_mode"); private final DataStream dataStream; private final ClusterHealthStatus dataStreamStatus; @@ -246,6 +247,8 @@ public static class DataStreamInfo implements SimpleDiffable, To private final boolean templatePreferIlmValue; @Nullable private final Long maximumTimestamp; + @Nullable + private final String indexMode; public DataStreamInfo( DataStream dataStream, @@ -256,7 +259,8 @@ public DataStreamInfo( @Nullable TimeSeries timeSeries, Map indexSettingsValues, boolean templatePreferIlmValue, - @Nullable Long maximumTimestamp + @Nullable Long maximumTimestamp, + @Nullable String indexMode ) { this.dataStream = dataStream; this.failureStoreEffectivelyEnabled = failureStoreEffectivelyEnabled; @@ -267,6 +271,7 @@ public DataStreamInfo( this.indexSettingsValues = indexSettingsValues; this.templatePreferIlmValue = templatePreferIlmValue; this.maximumTimestamp = maximumTimestamp; + this.indexMode = indexMode; } @SuppressWarnings("unchecked") @@ -287,6 +292,9 @@ public DataStreamInfo( : Map.of(); this.templatePreferIlmValue = in.getTransportVersion().onOrAfter(V_8_11_X) ? in.readBoolean() : true; this.maximumTimestamp = in.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0) ? in.readOptionalVLong() : null; + this.indexMode = in.getTransportVersion().onOrAfter(TransportVersions.INCLUDE_INDEX_MODE_IN_GET_DATA_STREAM) + ? in.readOptionalString() + : null; } public DataStream getDataStream() { @@ -329,6 +337,11 @@ public Long getMaximumTimestamp() { return maximumTimestamp; } + @Nullable + public String getIndexModeName() { + return indexMode; + } + @Override public void writeTo(StreamOutput out) throws IOException { dataStream.writeTo(out); @@ -348,6 +361,9 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_16_0)) { out.writeOptionalVLong(maximumTimestamp); } + if (out.getTransportVersion().onOrAfter(TransportVersions.INCLUDE_INDEX_MODE_IN_GET_DATA_STREAM)) { + out.writeOptionalString(indexMode); + } } @Override @@ -398,6 +414,9 @@ public XContentBuilder toXContent( if (this.maximumTimestamp != null) { builder.field(MAXIMUM_TIMESTAMP.getPreferredName(), this.maximumTimestamp); } + if (this.indexMode != null) { + builder.field(INDEX_MODE.getPreferredName(), indexMode); + } addAutoShardingEvent(builder, params, dataStream.getAutoShardingEvent()); if (timeSeries != null) { builder.startObject(TIME_SERIES.getPreferredName()); @@ -441,6 +460,7 @@ private XContentBuilder indicesToXContent(XContentBuilder builder, List i builder.field(ILM_POLICY_FIELD.getPreferredName(), indexProperties.ilmPolicyName()); } builder.field(MANAGED_BY.getPreferredName(), indexProperties.managedBy.displayValue); + builder.field(INDEX_MODE.getPreferredName(), indexProperties.indexMode); } builder.endObject(); } @@ -500,7 +520,8 @@ public boolean equals(Object o) { && Objects.equals(ilmPolicyName, that.ilmPolicyName) && Objects.equals(timeSeries, that.timeSeries) && Objects.equals(indexSettingsValues, that.indexSettingsValues) - && Objects.equals(maximumTimestamp, that.maximumTimestamp); + && Objects.equals(maximumTimestamp, that.maximumTimestamp) + && Objects.equals(indexMode, that.indexMode); } @Override @@ -514,7 +535,8 @@ public int hashCode() { timeSeries, indexSettingsValues, templatePreferIlmValue, - maximumTimestamp + maximumTimestamp, + indexMode ); } } @@ -551,9 +573,18 @@ public int hashCode() { * Encapsulates the configured properties we want to display for each backing index. * They'll usually be settings values, but could also be additional properties derived from settings. */ - public record IndexProperties(boolean preferIlm, @Nullable String ilmPolicyName, ManagedBy managedBy) implements Writeable { + public record IndexProperties(boolean preferIlm, @Nullable String ilmPolicyName, ManagedBy managedBy, @Nullable String indexMode) + implements + Writeable { public IndexProperties(StreamInput in) throws IOException { - this(in.readBoolean(), in.readOptionalString(), in.readEnum(ManagedBy.class)); + this( + in.readBoolean(), + in.readOptionalString(), + in.readEnum(ManagedBy.class), + in.getTransportVersion().onOrAfter(TransportVersions.INCLUDE_INDEX_MODE_IN_GET_DATA_STREAM) + ? in.readOptionalString() + : "unknown" + ); } @Override @@ -561,6 +592,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(preferIlm); out.writeOptionalString(ilmPolicyName); out.writeEnum(managedBy); + if (out.getTransportVersion().onOrAfter(TransportVersions.INCLUDE_INDEX_MODE_IN_GET_DATA_STREAM)) { + out.writeOptionalString(indexMode); + } } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java index d1b91d7817927..e58f766a9781b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java @@ -91,6 +91,9 @@ public class MetadataIndexTemplateService { public static final String DEFAULT_TIMESTAMP_FIELD = "@timestamp"; public static final CompressedXContent DEFAULT_TIMESTAMP_MAPPING_WITHOUT_ROUTING; + // Names used for validating templates when we do not know the index or data stream name + public static final String VALIDATE_INDEX_NAME = "validate-index-name"; + public static final String VALIDATE_DATA_STREAM_NAME = "validate-data-stream-name"; private static final CompressedXContent DEFAULT_TIMESTAMP_MAPPING_WITH_ROUTING; @@ -714,8 +717,8 @@ void validateIndexTemplateV2(ProjectMetadata projectMetadata, String name, Compo var finalSettings = Settings.builder(); for (var provider : indexSettingProviders) { var newAdditionalSettings = provider.getAdditionalIndexSettings( - "validate-index-name", - indexTemplate.getDataStreamTemplate() != null ? "validate-data-stream-name" : null, + VALIDATE_INDEX_NAME, + indexTemplate.getDataStreamTemplate() != null ? VALIDATE_DATA_STREAM_NAME : null, projectMetadata.retrieveIndexModeFromTemplate(indexTemplate), projectMetadata, now, diff --git a/server/src/test/java/org/elasticsearch/action/datastreams/GetDataStreamActionTests.java b/server/src/test/java/org/elasticsearch/action/datastreams/GetDataStreamActionTests.java index feb00728c858e..dfff09ffd5ddb 100644 --- a/server/src/test/java/org/elasticsearch/action/datastreams/GetDataStreamActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/datastreams/GetDataStreamActionTests.java @@ -92,6 +92,7 @@ private static GetDataStreamAction.Response.DataStreamInfo newDataStreamInfo(boo null, Map.of(), randomBoolean(), + null, null ); } diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProvider.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProvider.java index 28e3191ec7763..22a28e27dd243 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProvider.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProvider.java @@ -12,6 +12,7 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.MetadataIndexTemplateService; import org.elasticsearch.cluster.metadata.ProjectMetadata; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.compress.CompressedXContent; @@ -100,7 +101,9 @@ public Settings getAdditionalIndexSettings( ) { Settings.Builder settingsBuilder = null; boolean isLogsDB = templateIndexMode == IndexMode.LOGSDB; - boolean isTemplateValidation = "validate-index-name".equals(indexName); + // This index name is used when validating component and index templates, we should skip this check in that case. + // (See MetadataIndexTemplateService#validateIndexTemplateV2(...) method) + boolean isTemplateValidation = MetadataIndexTemplateService.VALIDATE_INDEX_NAME.equals(indexName); // Inject logsdb index mode, based on the logs pattern. if (isLogsdbEnabled @@ -118,8 +121,6 @@ && matchesLogsPattern(dataStreamName)) { if (mappingHints.hasSyntheticSourceUsage && supportFallbackToStoredSource.get() && minNodeVersion.get().get().onOrAfter(Version.V_8_17_0)) { - // This index name is used when validating component and index templates, we should skip this check in that case. - // (See MetadataIndexTemplateService#validateIndexTemplateV2(...) method) boolean legacyLicensedUsageOfSyntheticSourceAllowed = isLegacyLicensedUsageOfSyntheticSourceAllowed( templateIndexMode, indexName, @@ -216,7 +217,7 @@ MappingHints getMappingHints( Settings indexTemplateAndCreateRequestSettings, List combinedTemplateMappings ) { - if ("validate-index-name".equals(indexName)) { + if (MetadataIndexTemplateService.VALIDATE_INDEX_NAME.equals(indexName)) { // This index name is used when validating component and index templates, we should skip this check in that case. // (See MetadataIndexTemplateService#validateIndexTemplateV2(...) method) return MappingHints.EMPTY; diff --git a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProviderTests.java b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProviderTests.java index ca5246cb312c6..cf550c3a5ab72 100644 --- a/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProviderTests.java +++ b/x-pack/plugin/logsdb/src/test/java/org/elasticsearch/xpack/logsdb/LogsdbIndexModeSettingsProviderTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.metadata.DataStreamTestHelper; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.MetadataIndexTemplateService; import org.elasticsearch.cluster.metadata.ProjectMetadata; import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.common.compress.CompressedXContent; @@ -475,7 +476,7 @@ public void testNewIndexHasSyntheticSourceUsage() throws IOException { } public void testValidateIndexName() throws IOException { - String indexName = "validate-index-name"; + String indexName = MetadataIndexTemplateService.VALIDATE_INDEX_NAME; String mapping = """ { "_doc": { From 9cd0db7f48f907b1c201b1d51da02b20930f2aca Mon Sep 17 00:00:00 2001 From: Tim Vernum Date: Thu, 6 Mar 2025 08:14:45 +1100 Subject: [PATCH 52/54] Update XPackPlugin for project awareness (#124087) This updates XPackPlugin.alreadyContainsXPackCustomMetadata to consider all projects --- .../org/elasticsearch/xpack/core/XPackPlugin.java | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java index d8503033ef3a9..bcbda55977920 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java @@ -16,6 +16,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.ProjectMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.allocation.DataTier; @@ -296,11 +297,15 @@ public static List nodesNotReadyForXPackCustomMetadata(ClusterSta private static boolean alreadyContainsXPackCustomMetadata(ClusterState clusterState) { final Metadata metadata = clusterState.metadata(); return metadata.custom(LicensesMetadata.TYPE) != null - || metadata.getProject().custom(MlMetadata.TYPE) != null - || metadata.getProject().custom(WatcherMetadata.TYPE) != null - || RoleMappingMetadata.getFromClusterState(clusterState).isEmpty() == false || clusterState.custom(TokenMetadata.TYPE) != null - || metadata.getProject().custom(TransformMetadata.TYPE) != null; + || metadata.projects().values().stream().anyMatch(XPackPlugin::alreadyContainsXPackCustomMetadata); + } + + private static boolean alreadyContainsXPackCustomMetadata(ProjectMetadata project) { + return project.custom(MlMetadata.TYPE) != null + || project.custom(WatcherMetadata.TYPE) != null + || RoleMappingMetadata.getFromProject(project).isEmpty() == false + || project.custom(TransformMetadata.TYPE) != null; } @Override From 206363664cafa9b8188794da6e912b452bc84e2b Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 5 Mar 2025 13:48:20 -0800 Subject: [PATCH 53/54] Introduce allow_partial_results setting in ES|QL (#122890) This change introduces a cluster setting `esql.query.allow_partial_results` that allows enabling or disabling allow_partial_results in ES|QL at the cluster-wide level. Initially, this setting defaults to false, but it will be switched to true soon. The reason for not changing the default in this PR is that it requires adjusting many tests, which would make the PR too large. Instead, we will adjust the tests incrementally and switch the default when the tests are ready. This cluster setting is useful for falling back to the previous behavior (i.e., disabling allow_partial_results) if users upgrade to the new version and haven't updated their queries. Also, the default setting can be overridden on a per-request basis via a URL parameter (allow_partial_results) (changed from request body to URL parameter to conform to the proposal). Relates #122802 --- docs/changelog/122890.yaml | 5 + .../external-modules/error-query/build.gradle | 6 + .../test/esql/EsqlPartialResultsIT.java | 137 ++++++++++++++++++ .../test/errorquery/ErrorQueryPlugin.java | 55 ++++++- .../esql/action/EsqlQueryRequestBuilder.java | 2 + .../xpack/esql/qa/rest/RestEsqlTestCase.java | 9 ++ .../xpack/esql/action/EsqlNodeFailureIT.java | 44 ++++++ .../xpack/esql/action/EsqlQueryRequest.java | 7 +- .../esql/action/EsqlQueryRequestBuilder.java | 6 + .../xpack/esql/action/RequestXContent.java | 2 - .../esql/action/RestEsqlQueryAction.java | 4 + .../xpack/esql/plugin/EsqlPlugin.java | 9 +- .../esql/plugin/TransportEsqlQueryAction.java | 7 + 13 files changed, 286 insertions(+), 7 deletions(-) create mode 100644 docs/changelog/122890.yaml create mode 100644 test/external-modules/error-query/src/javaRestTest/java/org/elasticsearch/test/esql/EsqlPartialResultsIT.java diff --git a/docs/changelog/122890.yaml b/docs/changelog/122890.yaml new file mode 100644 index 0000000000000..5f99cc94df361 --- /dev/null +++ b/docs/changelog/122890.yaml @@ -0,0 +1,5 @@ +pr: 122890 +summary: Introduce `allow_partial_results` setting in ES|QL +area: ES|QL +type: enhancement +issues: [] diff --git a/test/external-modules/error-query/build.gradle b/test/external-modules/error-query/build.gradle index 3131e9bde004a..39b1c17706bb6 100644 --- a/test/external-modules/error-query/build.gradle +++ b/test/external-modules/error-query/build.gradle @@ -8,6 +8,12 @@ */ apply plugin: 'elasticsearch.legacy-yaml-rest-test' +apply plugin: 'elasticsearch.internal-java-rest-test' + +tasks.named('javaRestTest') { + usesDefaultDistribution() + it.onlyIf("snapshot build") { buildParams.snapshotBuild } +} tasks.named('yamlRestTest').configure { it.onlyIf("snapshot build") { buildParams.snapshotBuild } diff --git a/test/external-modules/error-query/src/javaRestTest/java/org/elasticsearch/test/esql/EsqlPartialResultsIT.java b/test/external-modules/error-query/src/javaRestTest/java/org/elasticsearch/test/esql/EsqlPartialResultsIT.java new file mode 100644 index 0000000000000..0252e79c692c7 --- /dev/null +++ b/test/external-modules/error-query/src/javaRestTest/java/org/elasticsearch/test/esql/EsqlPartialResultsIT.java @@ -0,0 +1,137 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.test.esql; + +import org.apache.http.util.EntityUtils; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.junit.ClassRule; + +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.lessThanOrEqualTo; + +public class EsqlPartialResultsIT extends ESRestTestCase { + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .module("test-error-query") + .setting("xpack.security.enabled", "false") + .setting("xpack.license.self_generated.type", "trial") + .setting("esql.query.allow_partial_results", "true") + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + public Set populateIndices() throws Exception { + int nextId = 0; + { + createIndex("failing-index", Settings.EMPTY, """ + { + "runtime": { + "fail_me": { + "type": "long", + "script": { + "source": "", + "lang": "failing_field" + } + } + }, + "properties": { + "v": { + "type": "long" + } + } + } + """); + int numDocs = between(1, 50); + for (int i = 0; i < numDocs; i++) { + String id = Integer.toString(nextId++); + Request doc = new Request("PUT", "failing-index/_doc/" + id); + doc.setJsonEntity("{\"v\": " + id + "}"); + client().performRequest(doc); + } + + } + Set okIds = new HashSet<>(); + { + createIndex("ok-index", Settings.EMPTY, """ + { + "properties": { + "v": { + "type": "long" + } + } + } + """); + int numDocs = between(1, 50); + for (int i = 0; i < numDocs; i++) { + String id = Integer.toString(nextId++); + okIds.add(id); + Request doc = new Request("PUT", "ok-index/_doc/" + id); + doc.setJsonEntity("{\"v\": " + id + "}"); + client().performRequest(doc); + } + } + refresh(client(), "failing-index,ok-index"); + return okIds; + } + + public void testPartialResult() throws Exception { + Set okIds = populateIndices(); + String query = """ + { + "query": "FROM ok-index,failing-index | LIMIT 100 | KEEP fail_me,v" + } + """; + // allow_partial_results = true + { + Request request = new Request("POST", "/_query"); + request.setJsonEntity(query); + if (randomBoolean()) { + request.addParameter("allow_partial_results", "true"); + } + Response resp = client().performRequest(request); + Map results = entityAsMap(resp); + assertThat(results.get("is_partial"), equalTo(true)); + List columns = (List) results.get("columns"); + assertThat(columns, equalTo(List.of(Map.of("name", "fail_me", "type", "long"), Map.of("name", "v", "type", "long")))); + List values = (List) results.get("values"); + assertThat(values.size(), lessThanOrEqualTo(okIds.size())); + } + // allow_partial_results = false + { + Request request = new Request("POST", "/_query"); + request.setJsonEntity(""" + { + "query": "FROM ok-index,failing-index | LIMIT 100" + } + """); + request.addParameter("allow_partial_results", "false"); + var error = expectThrows(ResponseException.class, () -> client().performRequest(request)); + Response resp = error.getResponse(); + assertThat(resp.getStatusLine().getStatusCode(), equalTo(500)); + assertThat(EntityUtils.toString(resp.getEntity()), containsString("Accessing failing field")); + } + } +} diff --git a/test/external-modules/error-query/src/main/java/org/elasticsearch/test/errorquery/ErrorQueryPlugin.java b/test/external-modules/error-query/src/main/java/org/elasticsearch/test/errorquery/ErrorQueryPlugin.java index 3ea83ec755896..c6ad0c40c408f 100644 --- a/test/external-modules/error-query/src/main/java/org/elasticsearch/test/errorquery/ErrorQueryPlugin.java +++ b/test/external-modules/error-query/src/main/java/org/elasticsearch/test/errorquery/ErrorQueryPlugin.java @@ -9,21 +9,74 @@ package org.elasticsearch.test.errorquery; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.OnScriptError; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.ScriptPlugin; import org.elasticsearch.plugins.SearchPlugin; +import org.elasticsearch.script.LongFieldScript; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptEngine; +import org.elasticsearch.search.lookup.SearchLookup; +import java.util.Collection; import java.util.List; +import java.util.Map; +import java.util.Set; import static java.util.Collections.singletonList; /** * Test plugin that exposes a way to simulate search shard failures and warnings. */ -public class ErrorQueryPlugin extends Plugin implements SearchPlugin { +public class ErrorQueryPlugin extends Plugin implements SearchPlugin, ScriptPlugin { public ErrorQueryPlugin() {} @Override public List> getQueries() { return singletonList(new QuerySpec<>(ErrorQueryBuilder.NAME, ErrorQueryBuilder::new, p -> ErrorQueryBuilder.PARSER.parse(p, null))); } + + public static final String FAILING_FIELD_LANG = "failing_field"; + + @Override + public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { + return new ScriptEngine() { + @Override + public String getType() { + return FAILING_FIELD_LANG; + } + + @Override + @SuppressWarnings("unchecked") + public FactoryType compile( + String name, + String code, + ScriptContext context, + Map params + ) { + return (FactoryType) new LongFieldScript.Factory() { + @Override + public LongFieldScript.LeafFactory newFactory( + String fieldName, + Map params, + SearchLookup searchLookup, + OnScriptError onScriptError + ) { + return ctx -> new LongFieldScript(fieldName, params, searchLookup, onScriptError, ctx) { + @Override + public void execute() { + throw new IllegalStateException("Accessing failing field"); + } + }; + } + }; + } + + @Override + public Set> getSupportedContexts() { + return Set.of(LongFieldScript.CONTEXT); + } + }; + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequestBuilder.java index a0a2bbc3bed19..0975f827b828a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/esql/action/EsqlQueryRequestBuilder.java @@ -39,4 +39,6 @@ public final ActionType action() { public abstract EsqlQueryRequestBuilder filter(QueryBuilder filter); + public abstract EsqlQueryRequestBuilder allowPartialResults(boolean allowPartialResults); + } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index accfd2f69ee64..470868b417991 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -127,6 +127,7 @@ public static class RequestObjectBuilder { private Boolean includeCCSMetadata = null; private CheckedConsumer filter; + private Boolean allPartialResults = null; public RequestObjectBuilder() throws IOException { this(randomFrom(XContentType.values())); @@ -204,6 +205,11 @@ public RequestObjectBuilder filter(CheckedConsumer return this; } + public RequestObjectBuilder allPartialResults(boolean allPartialResults) { + this.allPartialResults = allPartialResults; + return this; + } + public RequestObjectBuilder build() throws IOException { if (isBuilt == false) { if (tables != null) { @@ -1151,6 +1157,9 @@ static Request prepareRequestWithOptions(RequestObjectBuilder requestObject, Mod requestObject.build(); Request request = prepareRequest(mode); String mediaType = attachBody(requestObject, request); + if (requestObject.allPartialResults != null) { + request.addParameter("allow_partial_results", String.valueOf(requestObject.allPartialResults)); + } RequestOptions.Builder options = request.getOptions().toBuilder(); options.setWarningsHandler(WarningsHandler.PERMISSIVE); // We assert the warnings ourselves diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java index d14de89430589..08b81b2fd647b 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java @@ -17,6 +17,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.esql.EsqlTestUtils; +import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import java.util.ArrayList; import java.util.Collection; @@ -24,6 +25,7 @@ import java.util.List; import java.util.Set; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.in; import static org.hamcrest.Matchers.lessThanOrEqualTo; @@ -122,4 +124,46 @@ public void testPartialResults() throws Exception { } } } + + public void testDefaultPartialResults() throws Exception { + Set okIds = populateIndices(); + assertAcked( + client().admin() + .cluster() + .prepareUpdateSettings(TimeValue.THIRTY_SECONDS, TimeValue.THIRTY_SECONDS) + .setPersistentSettings(Settings.builder().put(EsqlPlugin.QUERY_ALLOW_PARTIAL_RESULTS.getKey(), true)) + ); + try { + // allow_partial_results = default + { + EsqlQueryRequest request = new EsqlQueryRequest(); + request.query("FROM fail,ok | LIMIT 100"); + request.pragmas(randomPragmas()); + if (randomBoolean()) { + request.allowPartialResults(true); + } + try (EsqlQueryResponse resp = run(request)) { + assertTrue(resp.isPartial()); + List> rows = EsqlTestUtils.getValuesList(resp); + assertThat(rows.size(), lessThanOrEqualTo(okIds.size())); + } + } + // allow_partial_results = false + { + EsqlQueryRequest request = new EsqlQueryRequest(); + request.query("FROM fail,ok | LIMIT 100"); + request.pragmas(randomPragmas()); + request.allowPartialResults(false); + IllegalStateException e = expectThrows(IllegalStateException.class, () -> run(request).close()); + assertThat(e.getMessage(), equalTo("Accessing failing field")); + } + } finally { + assertAcked( + client().admin() + .cluster() + .prepareUpdateSettings(TimeValue.THIRTY_SECONDS, TimeValue.THIRTY_SECONDS) + .setPersistentSettings(Settings.builder().putNull(EsqlPlugin.QUERY_ALLOW_PARTIAL_RESULTS.getKey())) + ); + } + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequest.java index ee557930d1c23..bd0a7635a4653 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequest.java @@ -52,7 +52,7 @@ public class EsqlQueryRequest extends org.elasticsearch.xpack.core.esql.action.E private boolean keepOnCompletion; private boolean onSnapshotBuild = Build.current().isSnapshot(); private boolean acceptedPragmaRisks = false; - private boolean allowPartialResults = false; + private Boolean allowPartialResults = null; /** * "Tables" provided in the request for use with things like {@code LOOKUP}. @@ -232,12 +232,13 @@ public Map> tables() { return tables; } - public boolean allowPartialResults() { + public Boolean allowPartialResults() { return allowPartialResults; } - public void allowPartialResults(boolean allowPartialResults) { + public EsqlQueryRequest allowPartialResults(boolean allowPartialResults) { this.allowPartialResults = allowPartialResults; + return this; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestBuilder.java index 7df5c95cbc953..09f01b3c60a20 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestBuilder.java @@ -66,6 +66,12 @@ public EsqlQueryRequestBuilder keepOnCompletion(boolean keepOnCompletion) { return this; } + @Override + public EsqlQueryRequestBuilder allowPartialResults(boolean allowPartialResults) { + request.allowPartialResults(allowPartialResults); + return this; + } + static { // plumb access from x-pack core SharedSecrets.setEsqlQueryRequestBuilderAccess(EsqlQueryRequestBuilder::newSyncEsqlQueryRequestBuilder); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java index a793f39e90ee3..e77d7b41aaca6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RequestXContent.java @@ -85,7 +85,6 @@ String fields() { static final ParseField WAIT_FOR_COMPLETION_TIMEOUT = new ParseField("wait_for_completion_timeout"); static final ParseField KEEP_ALIVE = new ParseField("keep_alive"); static final ParseField KEEP_ON_COMPLETION = new ParseField("keep_on_completion"); - static final ParseField ALLOW_PARTIAL_RESULTS = new ParseField("allow_partial_results"); private static final ObjectParser SYNC_PARSER = objectParserSync(EsqlQueryRequest::syncEsqlQueryRequest); private static final ObjectParser ASYNC_PARSER = objectParserAsync(EsqlQueryRequest::asyncEsqlQueryRequest); @@ -115,7 +114,6 @@ private static void objectParserCommon(ObjectParser parser) parser.declareString((request, localeTag) -> request.locale(Locale.forLanguageTag(localeTag)), LOCALE_FIELD); parser.declareBoolean(EsqlQueryRequest::profile, PROFILE_FIELD); parser.declareField((p, r, c) -> new ParseTables(r, p).parseTables(), TABLES_FIELD, ObjectParser.ValueType.OBJECT); - parser.declareBoolean(EsqlQueryRequest::allowPartialResults, ALLOW_PARTIAL_RESULTS); } private static ObjectParser objectParserSync(Supplier supplier) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java index ebe51cc2ab4e3..f86033a4781c1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java @@ -51,6 +51,10 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli } protected static RestChannelConsumer restChannelConsumer(EsqlQueryRequest esqlRequest, RestRequest request, NodeClient client) { + final Boolean partialResults = request.paramAsBoolean("allow_partial_results", null); + if (partialResults != null) { + esqlRequest.allowPartialResults(partialResults); + } LOGGER.debug("Beginning execution of ESQL query.\nQuery string: [{}]", esqlRequest.query()); return channel -> { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java index 7b7b056741e27..6fe6cb2f154fe 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java @@ -102,6 +102,13 @@ public class EsqlPlugin extends Plugin implements ActionPlugin { Setting.Property.Dynamic ); + public static final Setting QUERY_ALLOW_PARTIAL_RESULTS = Setting.boolSetting( + "esql.query.allow_partial_results", + false, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + @Override public Collection createComponents(PluginServices services) { CircuitBreaker circuitBreaker = services.indicesService().getBigArrays().breakerService().getBreaker("request"); @@ -151,7 +158,7 @@ protected XPackLicenseState getLicenseState() { */ @Override public List> getSettings() { - return List.of(QUERY_RESULT_TRUNCATION_DEFAULT_SIZE, QUERY_RESULT_TRUNCATION_MAX_SIZE); + return List.of(QUERY_RESULT_TRUNCATION_DEFAULT_SIZE, QUERY_RESULT_TRUNCATION_MAX_SIZE, QUERY_ALLOW_PARTIAL_RESULTS); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java index c0e6704ff65ee..65562b9e65c27 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java @@ -80,6 +80,7 @@ public class TransportEsqlQueryAction extends HandledTransportAction defaultAllowPartialResults = v); } @Override @@ -194,6 +198,9 @@ public void execute(EsqlQueryRequest request, EsqlQueryTask task, ActionListener } private void innerExecute(Task task, EsqlQueryRequest request, ActionListener listener) { + if (request.allowPartialResults() == null) { + request.allowPartialResults(defaultAllowPartialResults); + } Configuration configuration = new Configuration( ZoneOffset.UTC, request.locale() != null ? request.locale() : Locale.US, From d7b8b728e9189ce45bc1f6b3b277e62f83038224 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Wed, 5 Mar 2025 17:05:07 -0500 Subject: [PATCH 54/54] Cleanup RegisteredDomainProcessor (#124123) --- .../common/RegisteredDomainProcessor.java | 108 +++++++----------- 1 file changed, 42 insertions(+), 66 deletions(-) diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RegisteredDomainProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RegisteredDomainProcessor.java index 97e26a9961c20..6c4cf3684e669 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RegisteredDomainProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RegisteredDomainProcessor.java @@ -11,6 +11,7 @@ import org.apache.http.conn.util.PublicSuffixMatcher; import org.apache.http.conn.util.PublicSuffixMatcherLoader; +import org.elasticsearch.core.Nullable; import org.elasticsearch.ingest.AbstractProcessor; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; @@ -19,9 +20,9 @@ import java.util.Map; public class RegisteredDomainProcessor extends AbstractProcessor { - private static final PublicSuffixMatcher SUFFIX_MATCHER = PublicSuffixMatcherLoader.getDefault(); public static final String TYPE = "registered_domain"; + private static final PublicSuffixMatcher SUFFIX_MATCHER = PublicSuffixMatcherLoader.getDefault(); private final String field; private final String targetField; @@ -47,17 +48,18 @@ public boolean getIgnoreMissing() { } @Override - public IngestDocument execute(IngestDocument ingestDocument) throws Exception { - DomainInfo info = getRegisteredDomain(ingestDocument); + public IngestDocument execute(IngestDocument document) throws Exception { + final String fqdn = document.getFieldValue(field, String.class, ignoreMissing); + final DomainInfo info = getRegisteredDomain(fqdn); if (info == null) { if (ignoreMissing) { - return ingestDocument; + return document; } else { throw new IllegalArgumentException("unable to set domain information for document"); } } String fieldPrefix = targetField; - if (fieldPrefix.equals("") == false) { + if (fieldPrefix.isEmpty() == false) { fieldPrefix += "."; } String domainTarget = fieldPrefix + "domain"; @@ -65,30 +67,31 @@ public IngestDocument execute(IngestDocument ingestDocument) throws Exception { String subdomainTarget = fieldPrefix + "subdomain"; String topLevelDomainTarget = fieldPrefix + "top_level_domain"; - if (info.getDomain() != null) { - ingestDocument.setFieldValue(domainTarget, info.getDomain()); + if (info.domain() != null) { + document.setFieldValue(domainTarget, info.domain()); } - if (info.getRegisteredDomain() != null) { - ingestDocument.setFieldValue(registeredDomainTarget, info.getRegisteredDomain()); + if (info.registeredDomain() != null) { + document.setFieldValue(registeredDomainTarget, info.registeredDomain()); } - if (info.getETLD() != null) { - ingestDocument.setFieldValue(topLevelDomainTarget, info.getETLD()); + if (info.eTLD() != null) { + document.setFieldValue(topLevelDomainTarget, info.eTLD()); } - if (info.getSubdomain() != null) { - ingestDocument.setFieldValue(subdomainTarget, info.getSubdomain()); + if (info.subdomain() != null) { + document.setFieldValue(subdomainTarget, info.subdomain()); } - return ingestDocument; + return document; } - private DomainInfo getRegisteredDomain(IngestDocument d) { - String fieldString = d.getFieldValue(field, String.class, ignoreMissing); - if (fieldString == null) { + @Nullable + // visible for testing + static DomainInfo getRegisteredDomain(@Nullable String fqdn) { + if (fqdn == null) { return null; } - String registeredDomain = SUFFIX_MATCHER.getDomainRoot(fieldString); + String registeredDomain = SUFFIX_MATCHER.getDomainRoot(fqdn); if (registeredDomain == null) { - if (SUFFIX_MATCHER.matches(fieldString)) { - return new DomainInfo(fieldString); + if (SUFFIX_MATCHER.matches(fqdn)) { + return DomainInfo.of(fqdn); } return null; } @@ -96,7 +99,7 @@ private DomainInfo getRegisteredDomain(IngestDocument d) { // we have domain with no matching public suffix, but "." in it return null; } - return new DomainInfo(registeredDomain, fieldString); + return DomainInfo.of(registeredDomain, fqdn); } @Override @@ -104,54 +107,27 @@ public String getType() { return TYPE; } - private static class DomainInfo { - private final String domain; - private final String registeredDomain; - private final String eTLD; - private final String subdomain; - - private DomainInfo(String eTLD) { - this.domain = eTLD; - this.eTLD = eTLD; - this.registeredDomain = null; - this.subdomain = null; + // visible for testing + record DomainInfo( + String domain, + String registeredDomain, + String eTLD, // n.b. https://developer.mozilla.org/en-US/docs/Glossary/eTLD + String subdomain + ) { + static DomainInfo of(final String eTLD) { + return new DomainInfo(eTLD, null, eTLD, null); } - private DomainInfo(String registeredDomain, String domain) { + static DomainInfo of(final String registeredDomain, final String domain) { int index = registeredDomain.indexOf('.') + 1; if (index > 0 && index < registeredDomain.length()) { - this.domain = domain; - this.eTLD = registeredDomain.substring(index); - this.registeredDomain = registeredDomain; int subdomainIndex = domain.lastIndexOf("." + registeredDomain); - if (subdomainIndex > 0) { - this.subdomain = domain.substring(0, subdomainIndex); - } else { - this.subdomain = null; - } + final String subdomain = subdomainIndex > 0 ? domain.substring(0, subdomainIndex) : null; + return new DomainInfo(domain, registeredDomain, registeredDomain.substring(index), subdomain); } else { - this.domain = null; - this.eTLD = null; - this.registeredDomain = null; - this.subdomain = null; + return new DomainInfo(null, null, null, null); } } - - public String getDomain() { - return domain; - } - - public String getSubdomain() { - return subdomain; - } - - public String getRegisteredDomain() { - return registeredDomain; - } - - public String getETLD() { - return eTLD; - } } public static final class Factory implements Processor.Factory { @@ -161,15 +137,15 @@ public static final class Factory implements Processor.Factory { @Override public RegisteredDomainProcessor create( Map registry, - String processorTag, + String tag, String description, Map config ) throws Exception { - String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); - String targetField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "target_field", DEFAULT_TARGET_FIELD); - boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_missing", true); + String field = ConfigurationUtils.readStringProperty(TYPE, tag, config, "field"); + String targetField = ConfigurationUtils.readStringProperty(TYPE, tag, config, "target_field", DEFAULT_TARGET_FIELD); + boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, tag, config, "ignore_missing", true); - return new RegisteredDomainProcessor(processorTag, description, field, targetField, ignoreMissing); + return new RegisteredDomainProcessor(tag, description, field, targetField, ignoreMissing); } } }