diff --git a/build-tools/missing-doclet/src/main/java/org/apache/lucene/missingdoclet/MissingDoclet.java b/build-tools/missing-doclet/src/main/java/org/apache/lucene/missingdoclet/MissingDoclet.java
index 8a3c252463c..bb0b0fb3ea0 100644
--- a/build-tools/missing-doclet/src/main/java/org/apache/lucene/missingdoclet/MissingDoclet.java
+++ b/build-tools/missing-doclet/src/main/java/org/apache/lucene/missingdoclet/MissingDoclet.java
@@ -438,13 +438,6 @@ private void error(Element element, String message) {
fullMessage.append("): ");
fullMessage.append(message);
- if (Runtime.version().feature() == 11 && element.getKind() == ElementKind.PACKAGE) {
- // Avoid JDK 11 bug:
- // https://issues.apache.org/jira/browse/LUCENE-9747
- // https://bugs.openjdk.java.net/browse/JDK-8224082
- reporter.print(Diagnostic.Kind.ERROR, fullMessage.toString());
- } else {
- reporter.print(Diagnostic.Kind.ERROR, element, fullMessage.toString());
- }
+ reporter.print(Diagnostic.Kind.ERROR, element, fullMessage.toString());
}
}
diff --git a/build.gradle b/build.gradle
index 2addeeddff4..199ba708330 100644
--- a/build.gradle
+++ b/build.gradle
@@ -132,7 +132,6 @@ apply from: file('gradle/java/javac.gradle')
apply from: file('gradle/testing/defaults-tests.gradle')
apply from: file('gradle/testing/randomization.gradle')
apply from: file('gradle/testing/fail-on-no-tests.gradle')
-apply from: file('gradle/testing/fail-on-unsupported-jdk.gradle')
apply from: file('gradle/testing/alternative-jdk-support.gradle')
apply from: file('gradle/java/jar-manifest.gradle')
apply from: file('gradle/testing/retry-test.gradle')
@@ -205,7 +204,6 @@ apply from: file('gradle/hacks/global-exclude-dependencies.gradle')
apply from: file('gradle/hacks/gradle-archives.gradle')
apply from: file('gradle/hacks/wipe-temp.gradle')
-apply from: file('gradle/hacks/hashmapAssertions.gradle')
apply from: file('gradle/hacks/turbocharge-jvm-opts.gradle')
apply from: file('gradle/hacks/dummy-outputs.gradle')
diff --git a/gradle/hacks/hashmapAssertions.gradle b/gradle/hacks/hashmapAssertions.gradle
deleted file mode 100644
index 095726c9701..00000000000
--- a/gradle/hacks/hashmapAssertions.gradle
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// Disable assertions for HashMap due to: LUCENE-8991 / JDK-8205399
-def vmName = System.getProperty("java.vm.name")
-def spec = System.getProperty("java.specification.version")
-if (vmName =~ /(?i)(hotspot|openjdk|jrockit)/ &&
- spec =~ /^(1\.8|9|10|11)$/ &&
- !Boolean.parseBoolean(propertyOrDefault('tests.asserts.hashmap', 'false'))) {
- logger.info("Enabling HashMap assertions.")
- allprojects {
- plugins.withType(JavaPlugin) {
- tasks.withType(Test) { task ->
- jvmArgs("-da:java.util.HashMap")
- }
- }
- }
-}
-
diff --git a/gradle/testing/fail-on-unsupported-jdk.gradle b/gradle/testing/fail-on-unsupported-jdk.gradle
deleted file mode 100644
index 7d94b709764..00000000000
--- a/gradle/testing/fail-on-unsupported-jdk.gradle
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-configure(rootProject) {
- task ensureJdkSupported() {
- doFirst {
- if (System.getProperty("os.name").toLowerCase(Locale.ROOT).contains("mac") && rootProject.runtimeJavaVersion == JavaVersion.VERSION_20) {
- throw new GradleException("Tests cannot be run with JDK20 on Mac; see SOLR-16733 for more details.")
- }
- }
- }
-
- allprojects {
- tasks.withType(Test) {
- dependsOn ":ensureJdkSupported"
- }
- }
-}
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index a135ecaa539..1fd659b966e 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -34,10 +34,6 @@ Improvements
* SOLR-17516: `LBHttp2SolrClient` is now generic, adding support for `HttpJdkSolrClient`. (James Dyer)
-* SOLR-17541: `LBHttp2SolrClient` now maintains a separate internal/delegate client per Solr Base URL. Both `LBHttp2SolrClient` and `CloudHttp2SolrClient`
- always create and manage these internal clients. The ability for callers to provide a pre-built client is removed. Callers may specify the internal client
- details by providing an instance of either `Http2SolrClient.Builder` or `HttpJdkSolrClient.Builder`. (James Dyer)
-
Optimizations
---------------------
* SOLR-17568: The CLI bin/solr export tool now contacts the appropriate nodes directly for data instead of proxying through one.
@@ -106,11 +102,6 @@ Deprecation Removals
* SOLR-17540: Removed the Hadoop Auth module, and thus Kerberos authentication and other exotic options. (Eric Pugh)
-* SOLR-17541: Removed `CloudHttp2SolrClient.Builder#withHttpClient` in favor of `CloudHttp2SolrClient.Builder#withInternalClientBuilder`.
- The constructor on `LBHttp2SolrClient.Builder` that took an instance of `HttpSolrClientBase` is updated to instead take an instance of
- `HttpSolrClientBuilderBase`. Renamed `LBHttp2SolrClient.Builder#withListenerFactory` to `LBHttp2SolrClient.Builder#withListenerFactories`
- (James Dyer)
-
Dependency Upgrades
---------------------
(No changes)
@@ -159,10 +150,13 @@ New Features
Improvements
---------------------
-* SOLR-17541: Deprecate `CloudHttp2SolrClient.Builder#withHttpClient` in favor of
- `CloudHttp2SolrClient.Builder#withInternalClientBuilder`.
- Deprecate `LBHttp2SolrClient.Builder#withListenerFactory` in favor of
- `LBHttp2SolrClient.Builder#withListenerFactories` (James Dyer)
+* SOLR-15751: The v2 API now has parity with the v1 "COLSTATUS" and "segments" APIs, which can be used to fetch detailed information about
+ specific collections or cores. Collection information can be fetched by a call to `GET /api/collections/collectionName`, and core
+ information with a call to `GET /api/cores/coreName/segments`. (Jason Gerlowski)
+
+* SOLR-16396: All v2 configset APIs have been moved to the slightly different path: `/api/configsets`, to better align with the design of
+ other v2 APIs. SolrJ now offers (experimental) SolrRequest implementations for all v2 configset APIs in
+ `org.apache.solr.client.solrj.request.ConfigsetsApi`. (Jason Gerlowski)
Optimizations
---------------------
diff --git a/solr/api/src/java/org/apache/solr/client/api/endpoint/CollectionStatusApi.java b/solr/api/src/java/org/apache/solr/client/api/endpoint/CollectionStatusApi.java
new file mode 100644
index 00000000000..d07982cab76
--- /dev/null
+++ b/solr/api/src/java/org/apache/solr/client/api/endpoint/CollectionStatusApi.java
@@ -0,0 +1,75 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.client.api.endpoint;
+
+import io.swagger.v3.oas.annotations.Operation;
+import io.swagger.v3.oas.annotations.Parameter;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.QueryParam;
+import org.apache.solr.client.api.model.CollectionStatusResponse;
+
+/**
+ * V2 API definition for fetching collection metadata
+ *
+ *
This API (GET /v2/collections/collectionName) is analogous to the v1
+ * /admin/collections?action=COLSTATUS command.
+ */
+@Path("/collections/{collectionName}")
+public interface CollectionStatusApi {
+
+ // TODO Query parameters currently match those offered by the v1
+ // /admin/collections?action=COLSTATUS. Should param names be updated/clarified?
+ @GET
+ @Operation(
+ summary = "Fetches metadata about the specified collection",
+ tags = {"collections"})
+ CollectionStatusResponse getCollectionStatus(
+ @Parameter(description = "The name of the collection return metadata for", required = true)
+ @PathParam("collectionName")
+ String collectionName,
+ @Parameter(description = SegmentsApi.CORE_INFO_PARAM_DESC) @QueryParam("coreInfo")
+ Boolean coreInfo,
+ @Parameter(
+ description =
+ "Boolean flag to include metadata and statistics about the segments used by each shard leader. Implicitly set to true by 'fieldInfo' and 'sizeInfo'")
+ @QueryParam("segments")
+ Boolean segments,
+ @Parameter(
+ description =
+ SegmentsApi.FIELD_INFO_PARAM_DESC
+ + " Implicitly sets the 'segments' flag to 'true'")
+ @QueryParam("fieldInfo")
+ Boolean fieldInfo,
+ @Parameter(description = SegmentsApi.RAW_SIZE_PARAM_DESC) @QueryParam("rawSize")
+ Boolean rawSize,
+ @Parameter(description = SegmentsApi.RAW_SIZE_SUMMARY_DESC) @QueryParam("rawSizeSummary")
+ Boolean rawSizeSummary,
+ @Parameter(description = SegmentsApi.RAW_SIZE_DETAILS_DESC) @QueryParam("rawSizeDetails")
+ Boolean rawSizeDetails,
+ @Parameter(description = SegmentsApi.RAW_SIZE_SAMPLING_PERCENT_DESC)
+ @QueryParam("rawSizeSamplingPercent")
+ Float rawSizeSamplingPercent,
+ @Parameter(
+ description =
+ SegmentsApi.SIZE_INFO_PARAM_DESC
+ + ". Implicitly sets the 'segment' flag to 'true'")
+ @QueryParam("sizeInfo")
+ Boolean sizeInfo)
+ throws Exception;
+}
diff --git a/solr/api/src/java/org/apache/solr/client/api/endpoint/ConfigsetsApi.java b/solr/api/src/java/org/apache/solr/client/api/endpoint/ConfigsetsApi.java
new file mode 100644
index 00000000000..9961b4c9f28
--- /dev/null
+++ b/solr/api/src/java/org/apache/solr/client/api/endpoint/ConfigsetsApi.java
@@ -0,0 +1,100 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.client.api.endpoint;
+
+import io.swagger.v3.oas.annotations.Operation;
+import io.swagger.v3.oas.annotations.parameters.RequestBody;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.PUT;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.QueryParam;
+import java.io.IOException;
+import java.io.InputStream;
+import org.apache.solr.client.api.model.CloneConfigsetRequestBody;
+import org.apache.solr.client.api.model.ListConfigsetsResponse;
+import org.apache.solr.client.api.model.SolrJerseyResponse;
+
+public interface ConfigsetsApi {
+
+ /** V2 API definition for listing the configsets available to this SolrCloud cluster. */
+ @Path("/configsets")
+ interface List {
+ @GET
+ @Operation(
+ summary = "List the configsets available to Solr.",
+ tags = {"configsets"})
+ ListConfigsetsResponse listConfigSet() throws Exception;
+ }
+
+ /**
+ * V2 API definition for creating a (possibly slightly modified) copy of an existing configset
+ *
+ *
Equivalent to the existing v1 API /admin/configs?action=CREATE
+ */
+ @Path("/configsets")
+ interface Clone {
+ @POST
+ @Operation(
+ summary = "Create a new configset modeled on an existing one.",
+ tags = {"configsets"})
+ SolrJerseyResponse cloneExistingConfigSet(CloneConfigsetRequestBody requestBody)
+ throws Exception;
+ }
+
+ /**
+ * V2 API definition for deleting an existing configset.
+ *
+ *
Equivalent to the existing v1 API /admin/configs?action=DELETE
+ */
+ @Path("/configsets/{configSetName}")
+ interface Delete {
+ @DELETE
+ @Operation(summary = "Delete an existing configset.", tags = "configsets")
+ SolrJerseyResponse deleteConfigSet(@PathParam("configSetName") String configSetName)
+ throws Exception;
+ }
+
+ /**
+ * V2 API definitions for uploading a configset, in whole or part.
+ *
+ *
Equivalent to the existing v1 API /admin/configs?action=UPLOAD
+ */
+ @Path("/configsets/{configSetName}")
+ interface Upload {
+ @PUT
+ @Operation(summary = "Create a new configset.", tags = "configsets")
+ SolrJerseyResponse uploadConfigSet(
+ @PathParam("configSetName") String configSetName,
+ @QueryParam("overwrite") Boolean overwrite,
+ @QueryParam("cleanup") Boolean cleanup,
+ @RequestBody(required = true) InputStream requestBody)
+ throws IOException;
+
+ @PUT
+ @Path("{filePath:.+}")
+ SolrJerseyResponse uploadConfigSetFile(
+ @PathParam("configSetName") String configSetName,
+ @PathParam("filePath") String filePath,
+ @QueryParam("overwrite") Boolean overwrite,
+ @QueryParam("cleanup") Boolean cleanup,
+ @RequestBody(required = true) InputStream requestBody)
+ throws IOException;
+ }
+}
diff --git a/solr/api/src/java/org/apache/solr/client/api/endpoint/ListConfigsetsApi.java b/solr/api/src/java/org/apache/solr/client/api/endpoint/ListConfigsetsApi.java
deleted file mode 100644
index 7e0cf620b7f..00000000000
--- a/solr/api/src/java/org/apache/solr/client/api/endpoint/ListConfigsetsApi.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.client.api.endpoint;
-
-import io.swagger.v3.oas.annotations.Operation;
-import jakarta.ws.rs.GET;
-import jakarta.ws.rs.Path;
-import org.apache.solr.client.api.model.ListConfigsetsResponse;
-
-/** V2 API definition for listing configsets. */
-@Path("/cluster/configs")
-public interface ListConfigsetsApi {
- @GET
- @Operation(
- summary = "List the configsets available to Solr.",
- tags = {"configsets"})
- ListConfigsetsResponse listConfigSet() throws Exception;
-}
diff --git a/solr/api/src/java/org/apache/solr/client/api/endpoint/SegmentsApi.java b/solr/api/src/java/org/apache/solr/client/api/endpoint/SegmentsApi.java
new file mode 100644
index 00000000000..1f6f089642e
--- /dev/null
+++ b/solr/api/src/java/org/apache/solr/client/api/endpoint/SegmentsApi.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.client.api.endpoint;
+
+import io.swagger.v3.oas.annotations.Operation;
+import io.swagger.v3.oas.annotations.Parameter;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.QueryParam;
+import org.apache.solr.client.api.model.GetSegmentDataResponse;
+import org.apache.solr.client.api.util.CoreApiParameters;
+
+/**
+ * V2 API definition for fetching metadata about a core's segments
+ *
+ *
This API (GET /v2/cores/coreName/segments) is analogous to the v1
+ * /solr/coreName/admin/segments API
+ */
+@Path("/cores/{coreName}/segments")
+public interface SegmentsApi {
+
+ String CORE_INFO_PARAM_DESC =
+ "Boolean flag to include metadata (e.g. index an data directories, IndexWriter configuration, etc.) about each shard leader's core";
+ String FIELD_INFO_PARAM_DESC =
+ "Boolean flag to include statistics about the indexed fields present on each shard leader.";
+ String RAW_SIZE_PARAM_DESC =
+ "Boolean flag to include simple estimates of the disk size taken up by each field (e.g. \"id\", \"_version_\") and by each index data structure (e.g. 'storedFields', 'docValues_numeric').";
+ String RAW_SIZE_SUMMARY_DESC =
+ "Boolean flag to include more involved estimates of the disk size taken up by index data structures, on a per-field basis (e.g. how much data does the \"id\" field contribute to 'storedField' index files). More detail than 'rawSize', less detail than 'rawSizeDetails'.";
+ String RAW_SIZE_DETAILS_DESC =
+ "Boolean flag to include detailed statistics about the disk size taken up by various fields and data structures. More detail than 'rawSize' and 'rawSizeSummary'.";
+ String RAW_SIZE_SAMPLING_PERCENT_DESC =
+ "Percentage (between 0 and 100) of data to read when estimating index size and statistics. Defaults to 5.0 (i.e. 5%).";
+ String SIZE_INFO_PARAM_DESC =
+ "Boolean flag to include information about the largest index files for each Lucene segment.";
+
+ @GET
+ @CoreApiParameters
+ @Operation(
+ summary = "Fetches metadata about the segments in use by the specified core",
+ tags = {"segments"})
+ GetSegmentDataResponse getSegmentData(
+ @Parameter(description = CORE_INFO_PARAM_DESC) @QueryParam("coreInfo") Boolean coreInfo,
+ @Parameter(description = FIELD_INFO_PARAM_DESC) @QueryParam("fieldInfo") Boolean fieldInfo,
+ @Parameter(description = RAW_SIZE_PARAM_DESC) @QueryParam("rawSize") Boolean rawSize,
+ @Parameter(description = RAW_SIZE_SUMMARY_DESC) @QueryParam("rawSizeSummary")
+ Boolean rawSizeSummary,
+ @Parameter(description = RAW_SIZE_DETAILS_DESC) @QueryParam("rawSizeDetails")
+ Boolean rawSizeDetails,
+ @Parameter(description = RAW_SIZE_SAMPLING_PERCENT_DESC) @QueryParam("rawSizeSamplingPercent")
+ Float rawSizeSamplingPercent,
+ @Parameter(description = SIZE_INFO_PARAM_DESC) @QueryParam("sizeInfo") Boolean sizeInfo)
+ throws Exception;
+}
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/beans/CreateConfigPayload.java b/solr/api/src/java/org/apache/solr/client/api/model/CloneConfigsetRequestBody.java
similarity index 70%
rename from solr/solrj/src/java/org/apache/solr/client/solrj/request/beans/CreateConfigPayload.java
rename to solr/api/src/java/org/apache/solr/client/api/model/CloneConfigsetRequestBody.java
index 5f7f2e6687d..14e22225986 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/beans/CreateConfigPayload.java
+++ b/solr/api/src/java/org/apache/solr/client/api/model/CloneConfigsetRequestBody.java
@@ -14,19 +14,20 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.solr.client.solrj.request.beans;
+package org.apache.solr.client.api.model;
+import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Map;
-import org.apache.solr.common.annotation.JsonProperty;
-import org.apache.solr.common.util.ReflectMapWriter;
-public class CreateConfigPayload implements ReflectMapWriter {
- public static final String DEFAULT_CONFIGSET =
- "_default"; // TODO Better location for this in SolrJ?
+/** Request body for ConfigsetsApi.Clone */
+public class CloneConfigsetRequestBody {
+ public static final String DEFAULT_CONFIGSET = "_default";
@JsonProperty(required = true)
public String name;
- @JsonProperty public String baseConfigSet = DEFAULT_CONFIGSET;
+ @JsonProperty(defaultValue = DEFAULT_CONFIGSET)
+ public String baseConfigSet;
+
@JsonProperty public Map properties;
}
diff --git a/solr/api/src/java/org/apache/solr/client/api/model/CollectionStatusResponse.java b/solr/api/src/java/org/apache/solr/client/api/model/CollectionStatusResponse.java
new file mode 100644
index 00000000000..82109edb915
--- /dev/null
+++ b/solr/api/src/java/org/apache/solr/client/api/model/CollectionStatusResponse.java
@@ -0,0 +1,147 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.client.api.model;
+
+import com.fasterxml.jackson.annotation.JsonAnyGetter;
+import com.fasterxml.jackson.annotation.JsonAnySetter;
+import com.fasterxml.jackson.annotation.JsonFormat;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Response of the CollectionStatusApi.getCollectionStatus() API
+ *
+ * Note that the corresponding v1 API has a slightly different response format. Users should not
+ * attempt to convert a v1 response into this type.
+ */
+public class CollectionStatusResponse extends SolrJerseyResponse {
+
+ @JsonProperty public String name;
+ @JsonProperty public Integer znodeVersion;
+
+ // TODO - consider 'Instant' once SOLR-17608 is finished
+ @JsonProperty
+ @JsonFormat(shape = JsonFormat.Shape.NUMBER)
+ public Date creationTimeMillis;
+
+ @JsonProperty public CollectionMetadata properties;
+ @JsonProperty public Integer activeShards;
+ @JsonProperty public Integer inactiveShards;
+ @JsonProperty public List schemaNonCompliant;
+
+ @JsonProperty public Map shards;
+
+ // Always present in response
+ public static class CollectionMetadata {
+ @JsonProperty public String configName;
+ @JsonProperty public Integer nrtReplicas;
+ @JsonProperty public Integer pullReplicas;
+ @JsonProperty public Integer tlogReplicas;
+ @JsonProperty public Map router;
+ @JsonProperty public Integer replicationFactor;
+
+ private Map unknownFields = new HashMap<>();
+
+ @JsonAnyGetter
+ public Map unknownProperties() {
+ return unknownFields;
+ }
+
+ @JsonAnySetter
+ public void setUnknownProperty(String field, Object value) {
+ unknownFields.put(field, value);
+ }
+ }
+
+ // Always present in response
+ public static class ShardMetadata {
+ @JsonProperty public String state; // TODO Make this an enum?
+ @JsonProperty public String range;
+ @JsonProperty public ReplicaSummary replicas;
+ @JsonProperty public LeaderSummary leader;
+ }
+
+ // Always present in response
+ public static class ReplicaSummary {
+ @JsonProperty public Integer total;
+ @JsonProperty public Integer active;
+ @JsonProperty public Integer down;
+ @JsonProperty public Integer recovering;
+
+ @JsonProperty("recovery_failed")
+ public Integer recoveryFailed;
+ }
+
+ // Always present in response unless otherwise specified
+ public static class LeaderSummary {
+ @JsonProperty public String coreNode;
+ @JsonProperty public String core;
+ @JsonProperty public Boolean leader;
+
+ @JsonProperty("node_name")
+ public String nodeName;
+
+ @JsonProperty("base_url")
+ public String baseUrl;
+
+ @JsonProperty public String state; // TODO Make this an enum?
+ @JsonProperty public String type; // TODO Make this an enum?
+
+ @JsonProperty("force_set_state")
+ public Boolean forceSetState;
+
+ // Present with coreInfo=true || sizeInfo=true unless otherwise specified
+ @JsonProperty public SegmentInfo segInfos;
+
+ private Map unknownFields = new HashMap<>();
+
+ @JsonAnyGetter
+ public Map unknownProperties() {
+ return unknownFields;
+ }
+
+ @JsonAnySetter
+ public void setUnknownProperty(String field, Object value) {
+ unknownFields.put(field, value);
+ }
+ }
+
+ // Present with segments=true || coreInfo=true || sizeInfo=true || fieldInfo=true unless otherwise
+ // specified
+
+ /**
+ * Same properties as {@link GetSegmentDataResponse}, but uses a different class to avoid
+ * inheriting "responseHeader", etc.
+ */
+ public static class SegmentInfo {
+ @JsonProperty public GetSegmentDataResponse.SegmentSummary info;
+
+ @JsonProperty public Map runningMerges;
+
+ // Present with segments=true || sizeInfo=true || fieldInfo=true
+ @JsonProperty public Map segments;
+
+ // Present with rawSize=true
+ @JsonProperty public GetSegmentDataResponse.RawSize rawSize;
+
+ // Present only with fieldInfo=true
+ @JsonProperty public List fieldInfoLegend;
+ }
+}
diff --git a/solr/api/src/java/org/apache/solr/client/api/model/GetSegmentDataResponse.java b/solr/api/src/java/org/apache/solr/client/api/model/GetSegmentDataResponse.java
new file mode 100644
index 00000000000..b5e3714bfd3
--- /dev/null
+++ b/solr/api/src/java/org/apache/solr/client/api/model/GetSegmentDataResponse.java
@@ -0,0 +1,191 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.client.api.model;
+
+import com.fasterxml.jackson.annotation.JsonAnyGetter;
+import com.fasterxml.jackson.annotation.JsonAnySetter;
+import com.fasterxml.jackson.annotation.JsonFormat;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Response for {@link org.apache.solr.client.api.endpoint.SegmentsApi#getSegmentData(Boolean,
+ * Boolean, Boolean, Boolean, Boolean, Float, Boolean)} API
+ */
+public class GetSegmentDataResponse extends SolrJerseyResponse {
+ @JsonProperty public SegmentSummary info;
+
+ @JsonProperty public Map runningMerges;
+
+ @JsonProperty public Map segments;
+
+ // Present only with fieldInfo=true
+ @JsonProperty public List fieldInfoLegend;
+
+ // Present with rawSize=true
+ @JsonProperty public RawSize rawSize;
+
+ // Always present in response
+ public static class SegmentSummary {
+ @JsonProperty public String minSegmentLuceneVersion;
+ @JsonProperty public String commitLuceneVersion;
+ @JsonProperty public Integer numSegments;
+ @JsonProperty public String segmentsFileName;
+ @JsonProperty public Integer totalMaxDoc;
+ // Typically keys are 'commitCommandVer' and 'commitTimeMSec'
+ @JsonProperty public Map userData;
+
+ // Present for coreInfo=true only
+ @JsonProperty public CoreSummary core;
+ }
+
+ // Always present in response, provided that the specified core has segments
+ public static class SingleSegmentData {
+ @JsonProperty public String name;
+ @JsonProperty public Integer delCount;
+ @JsonProperty public Integer softDelCount;
+ @JsonProperty public Boolean hasFieldUpdates;
+ @JsonProperty public Long sizeInBytes;
+ @JsonProperty public Integer size;
+
+ // TODO - consider 'Instant' once SOLR-17608 is finished
+ @JsonProperty
+ @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "YYYY-MM-DD'T'hh:mm:ss.S'Z'")
+ public Date age;
+
+ @JsonProperty public String source;
+ @JsonProperty public String version;
+ @JsonProperty public Integer createdVersionMajor;
+ @JsonProperty public String minVersion;
+ @JsonProperty public SegmentDiagnosticInfo diagnostics;
+ @JsonProperty public Map attributes;
+ // Only present when index-sorting is in use
+ @JsonProperty public String sort;
+ @JsonProperty public Boolean mergeCandidate;
+
+ // Present only when fieldInfo=true
+ @JsonProperty public Map fields;
+
+ // Present only when sizeInfo=true
+ @JsonProperty("largestFiles")
+ public Map largestFilesByName;
+ }
+
+ // Always present in response, provided that the specified core has segments
+ public static class SegmentSingleFieldInfo {
+ @JsonProperty public String flags;
+ @JsonProperty public Integer docCount;
+ @JsonProperty public Long termCount;
+ @JsonProperty public Long sumDocFreq;
+ @JsonProperty public Long sumTotalTermFreq;
+ @JsonProperty public String schemaType;
+ @JsonProperty public Map nonCompliant;
+ }
+
+ // Always present in response
+ public static class SegmentDiagnosticInfo {
+ @JsonProperty("os.version")
+ public String osVersion;
+
+ @JsonProperty("lucene.version")
+ public String luceneVersion;
+
+ @JsonProperty public String source;
+
+ // TODO - consider 'Instant' once SOLR-17608 is finished
+ @JsonProperty
+ @JsonFormat(shape = JsonFormat.Shape.NUMBER)
+ public Date timestamp;
+
+ @JsonProperty("java.runtime.version")
+ public String javaRuntimeVersion;
+
+ @JsonProperty public String os;
+
+ @JsonProperty("java.vendor")
+ public String javaVendor;
+
+ @JsonProperty("os.arch")
+ public String osArchitecture;
+
+ private Map additionalDiagnostics = new HashMap<>();
+
+ @JsonAnyGetter
+ public Map getAdditionalDiagnostics() {
+ return additionalDiagnostics;
+ }
+
+ @JsonAnySetter
+ public void getAdditionalDiagnostics(String field, Object value) {
+ additionalDiagnostics.put(field, value);
+ }
+ }
+
+ // Present with coreInfo=true unless otherwise specified
+ public static class CoreSummary {
+ @JsonProperty public String startTime;
+ @JsonProperty public String dataDir;
+ @JsonProperty public String indexDir;
+ @JsonProperty public Double sizeInGB;
+ @JsonProperty public IndexWriterConfigSummary indexWriterConfig;
+ }
+
+ // Present with coreInfo=true unless otherwise specified
+
+ /** A serializable representation of Lucene's "LiveIndexWriterConfig" */
+ public static class IndexWriterConfigSummary {
+ @JsonProperty public String analyzer;
+ @JsonProperty public Double ramBufferSizeMB;
+ @JsonProperty public Integer maxBufferedDocs;
+ @JsonProperty public String mergedSegmentWarmer;
+ @JsonProperty public String delPolicy;
+ @JsonProperty public String commit;
+ @JsonProperty public String openMode;
+ @JsonProperty public String similarity;
+ @JsonProperty public String mergeScheduler;
+ @JsonProperty public String codec;
+ @JsonProperty public String infoStream;
+ @JsonProperty public String mergePolicy;
+ @JsonProperty public Boolean readerPooling;
+ @JsonProperty public Integer perThreadHardLimitMB;
+ @JsonProperty public Boolean useCompoundFile;
+ @JsonProperty public Boolean commitOnClose;
+ @JsonProperty public String indexSort;
+ @JsonProperty public Boolean checkPendingFlushOnUpdate;
+ @JsonProperty public String softDeletesField;
+ @JsonProperty public Long maxFullFlushMergeWaitMillis;
+ @JsonProperty public String leafSorter;
+ @JsonProperty public String eventListener;
+ @JsonProperty public String parentField;
+ @JsonProperty public String writer;
+ }
+
+ // Present with rawSize=true unless otherwise specified
+ public static class RawSize {
+ @JsonProperty public Map fieldsBySize;
+ @JsonProperty public Map typesBySize;
+
+ // Present with rawSizeDetails=true
+ @JsonProperty public Object details;
+
+ // Present with rawSizeSummary=true
+ @JsonProperty public Map summary;
+ }
+}
diff --git a/solr/core/src/java/org/apache/solr/api/V2HttpCall.java b/solr/core/src/java/org/apache/solr/api/V2HttpCall.java
index d156710a675..1ecb290fa0e 100644
--- a/solr/core/src/java/org/apache/solr/api/V2HttpCall.java
+++ b/solr/core/src/java/org/apache/solr/api/V2HttpCall.java
@@ -188,6 +188,8 @@ public void call(SolrQueryRequest req, SolrQueryResponse rsp) {
Thread.currentThread().setContextClassLoader(core.getResourceLoader().getClassLoader());
this.path = path = path.substring(prefix.length() + pathSegments.get(1).length() + 2);
+ // Core-level API, so populate "collection" template val
+ parts.put(COLLECTION_PROP, origCorename);
Api apiInfo = getApiInfo(core.getRequestHandlers(), path, req.getMethod(), fullPath, parts);
if (isCompositeApi && apiInfo instanceof CompositeApi) {
((CompositeApi) this.api).add(apiInfo);
diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkController.java b/solr/core/src/java/org/apache/solr/cloud/ZkController.java
index 5a890121a43..e81a4a20c2b 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ZkController.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ZkController.java
@@ -197,6 +197,9 @@ public String toString() {
public final ZkStateReader zkStateReader;
private SolrCloudManager cloudManager;
+ // only for internal usage
+ private Http2SolrClient http2SolrClient;
+
private CloudHttp2SolrClient cloudSolrClient;
private final String zkServerAddress; // example: 127.0.0.1:54062/solr
@@ -751,6 +754,7 @@ public void close() {
sysPropsCacher.close();
customThreadPool.execute(() -> IOUtils.closeQuietly(cloudManager));
customThreadPool.execute(() -> IOUtils.closeQuietly(cloudSolrClient));
+ customThreadPool.execute(() -> IOUtils.closeQuietly(http2SolrClient));
try {
try {
@@ -846,14 +850,15 @@ public SolrCloudManager getSolrCloudManager() {
if (cloudManager != null) {
return cloudManager;
}
- var httpSolrClientBuilder =
+ http2SolrClient =
new Http2SolrClient.Builder()
.withHttpClient(cc.getDefaultHttpSolrClient())
.withIdleTimeout(30000, TimeUnit.MILLISECONDS)
- .withConnectionTimeout(15000, TimeUnit.MILLISECONDS);
+ .withConnectionTimeout(15000, TimeUnit.MILLISECONDS)
+ .build();
cloudSolrClient =
new CloudHttp2SolrClient.Builder(new ZkClientClusterStateProvider(zkStateReader))
- .withInternalClientBuilder(httpSolrClientBuilder)
+ .withHttpClient(http2SolrClient)
.build();
cloudManager = new SolrClientCloudManager(cloudSolrClient, cc.getObjectCache());
cloudManager.getClusterStateProvider().connect();
@@ -1697,16 +1702,18 @@ public void publish(
}
if (core != null && core.getDirectoryFactory().isSharedStorage()) {
if (core.getDirectoryFactory().isSharedStorage()) {
+ // append additional entries to 'm'
+ MapWriter original = m;
m =
- m.append(
- props -> {
- props.put(ZkStateReader.SHARED_STORAGE_PROP, "true");
- props.put("dataDir", core.getDataDir());
- UpdateLog ulog = core.getUpdateHandler().getUpdateLog();
- if (ulog != null) {
- props.put("ulogDir", ulog.getUlogDir());
- }
- });
+ props -> {
+ original.writeMap(props);
+ props.put(ZkStateReader.SHARED_STORAGE_PROP, "true");
+ props.put("dataDir", core.getDataDir());
+ UpdateLog ulog = core.getUpdateHandler().getUpdateLog();
+ if (ulog != null) {
+ props.put("ulogDir", ulog.getUlogDir());
+ }
+ };
}
}
} catch (SolrCoreInitializationException ex) {
diff --git a/solr/core/src/java/org/apache/solr/core/HttpSolrClientProvider.java b/solr/core/src/java/org/apache/solr/core/HttpSolrClientProvider.java
index e9631b26d1f..2bf25a896f6 100644
--- a/solr/core/src/java/org/apache/solr/core/HttpSolrClientProvider.java
+++ b/solr/core/src/java/org/apache/solr/core/HttpSolrClientProvider.java
@@ -16,6 +16,7 @@
*/
package org.apache.solr.core;
+import java.util.List;
import java.util.concurrent.TimeUnit;
import org.apache.solr.client.solrj.impl.Http2SolrClient;
import org.apache.solr.common.util.IOUtils;
@@ -36,24 +37,22 @@ final class HttpSolrClientProvider implements AutoCloseable {
private final Http2SolrClient httpSolrClient;
- private final Http2SolrClient.Builder httpSolrClientBuilder;
-
private final InstrumentedHttpListenerFactory trackHttpSolrMetrics;
HttpSolrClientProvider(UpdateShardHandlerConfig cfg, SolrMetricsContext parentContext) {
trackHttpSolrMetrics = new InstrumentedHttpListenerFactory(getNameStrategy(cfg));
initializeMetrics(parentContext);
- this.httpSolrClientBuilder =
- new Http2SolrClient.Builder().addListenerFactory(trackHttpSolrMetrics);
+ Http2SolrClient.Builder httpClientBuilder =
+ new Http2SolrClient.Builder().withListenerFactory(List.of(trackHttpSolrMetrics));
if (cfg != null) {
- httpSolrClientBuilder
+ httpClientBuilder
.withConnectionTimeout(cfg.getDistributedConnectionTimeout(), TimeUnit.MILLISECONDS)
.withIdleTimeout(cfg.getDistributedSocketTimeout(), TimeUnit.MILLISECONDS)
.withMaxConnectionsPerHost(cfg.getMaxUpdateConnectionsPerHost());
}
- httpSolrClient = httpSolrClientBuilder.build();
+ httpSolrClient = httpClientBuilder.build();
}
private InstrumentedHttpListenerFactory.NameStrategy getNameStrategy(
@@ -77,7 +76,7 @@ Http2SolrClient getSolrClient() {
}
void setSecurityBuilder(HttpClientBuilderPlugin builder) {
- builder.setup(httpSolrClientBuilder, httpSolrClient);
+ builder.setup(httpSolrClient);
}
@Override
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/ColStatus.java b/solr/core/src/java/org/apache/solr/handler/admin/ColStatus.java
index e7fe44e0fe6..234682e8473 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/ColStatus.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/ColStatus.java
@@ -25,6 +25,7 @@
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
+import org.apache.solr.client.api.model.GetSegmentDataResponse;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.io.SolrClientCache;
@@ -41,6 +42,8 @@
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.jersey.SolrJacksonMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -80,12 +83,16 @@ public void getColStatus(NamedList results) {
collections = Collections.singleton(col);
}
boolean withFieldInfo = props.getBool(FIELD_INFO_PROP, false);
- boolean withSegments = props.getBool(SEGMENTS_PROP, false);
boolean withCoreInfo = props.getBool(CORE_INFO_PROP, false);
boolean withSizeInfo = props.getBool(SIZE_INFO_PROP, false);
boolean withRawSizeInfo = props.getBool(RAW_SIZE_PROP, false);
boolean withRawSizeSummary = props.getBool(RAW_SIZE_SUMMARY_PROP, false);
boolean withRawSizeDetails = props.getBool(RAW_SIZE_DETAILS_PROP, false);
+ // FieldInfo and SizeInfo imply segments=true, since they add to the data reported about each
+ // segment
+ boolean withSegments = props.getBool(SEGMENTS_PROP, false);
+ withSegments |= withFieldInfo || withSizeInfo;
+
Object samplingPercentVal = props.get(RAW_SIZE_SAMPLING_PERCENT_PROP);
Float samplingPercent =
samplingPercentVal != null ? Float.parseFloat(String.valueOf(samplingPercentVal)) : null;
@@ -94,6 +101,7 @@ public void getColStatus(NamedList results) {
}
boolean getSegments = false;
if (withFieldInfo
+ || withSegments
|| withSizeInfo
|| withCoreInfo
|| withRawSizeInfo
@@ -196,32 +204,35 @@ public void getColStatus(NamedList results) {
}
QueryRequest req = new QueryRequest(params);
NamedList rsp = client.request(req);
- rsp.remove("responseHeader");
- leaderMap.add("segInfos", rsp);
- NamedList> segs = (NamedList>) rsp.get("segments");
+ final var segmentResponse =
+ SolrJacksonMapper.getObjectMapper().convertValue(rsp, GetSegmentDataResponse.class);
+ segmentResponse.responseHeader = null;
+
+ final var segs = segmentResponse.segments;
if (segs != null) {
- for (Map.Entry entry : segs) {
- NamedList fields =
- (NamedList) ((NamedList) entry.getValue()).get("fields");
- if (fields != null) {
- for (Map.Entry fEntry : fields) {
- Object nc = ((NamedList) fEntry.getValue()).get("nonCompliant");
- if (nc != null) {
+ for (Map.Entry entry :
+ segs.entrySet()) {
+ final var fieldInfoByName = entry.getValue().fields;
+ if (fieldInfoByName != null) {
+ for (Map.Entry fEntry :
+ fieldInfoByName.entrySet()) {
+ if (fEntry.getValue().nonCompliant != null) {
nonCompliant.add(fEntry.getKey());
}
}
}
if (!withFieldInfo) {
- ((NamedList) entry.getValue()).remove("fields");
+ entry.getValue().fields = null;
}
}
}
if (!withSegments) {
- rsp.remove("segments");
+ segmentResponse.segments = null;
}
if (!withFieldInfo) {
- rsp.remove("fieldInfoLegend");
+ segmentResponse.fieldInfoLegend = null;
}
+ leaderMap.add("segInfos", Utils.reflectToMap(segmentResponse));
} catch (SolrServerException | IOException e) {
log.warn("Error getting details of replica segments from {}", url, e);
}
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
index df6ba086d06..aefc1033d5e 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
@@ -174,7 +174,7 @@
import org.apache.solr.handler.admin.api.BalanceShardUnique;
import org.apache.solr.handler.admin.api.ClusterProperty;
import org.apache.solr.handler.admin.api.CollectionProperty;
-import org.apache.solr.handler.admin.api.CollectionStatusAPI;
+import org.apache.solr.handler.admin.api.CollectionStatus;
import org.apache.solr.handler.admin.api.CreateAlias;
import org.apache.solr.handler.admin.api.CreateCollection;
import org.apache.solr.handler.admin.api.CreateCollectionBackup;
@@ -539,11 +539,8 @@ public enum CollectionOperation implements CollectionOp {
ColStatus.RAW_SIZE_SAMPLING_PERCENT_PROP,
ColStatus.SIZE_INFO_PROP);
- new ColStatus(
- h.coreContainer.getSolrClientCache(),
- h.coreContainer.getZkController().getZkStateReader().getClusterState(),
- new ZkNodeProps(props))
- .getColStatus(rsp.getValues());
+ CollectionStatus.populateColStatusData(
+ h.coreContainer, new ZkNodeProps(props), rsp.getValues());
return null;
}),
DELETE_OP(
@@ -1360,6 +1357,7 @@ public Collection> getJerseyResources() {
CreateReplica.class,
AddReplicaProperty.class,
BalanceShardUnique.class,
+ CollectionStatus.class,
CreateAlias.class,
CreateCollection.class,
CreateCollectionBackup.class,
@@ -1399,7 +1397,6 @@ public Collection getApis() {
apis.addAll(AnnotatedApi.getApis(new ModifyCollectionAPI(this)));
apis.addAll(AnnotatedApi.getApis(new MoveReplicaAPI(this)));
apis.addAll(AnnotatedApi.getApis(new RebalanceLeadersAPI(this)));
- apis.addAll(AnnotatedApi.getApis(new CollectionStatusAPI(this)));
return apis;
}
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/ConfigSetsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/ConfigSetsHandler.java
index ff69b1ee147..535deb54e44 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/ConfigSetsHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/ConfigSetsHandler.java
@@ -17,36 +17,30 @@
package org.apache.solr.handler.admin;
import static org.apache.solr.common.params.CommonParams.NAME;
-import static org.apache.solr.handler.configsets.UploadConfigSetFileAPI.FILEPATH_PLACEHOLDER;
import java.lang.invoke.MethodHandles;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
-import java.util.Map;
-import org.apache.solr.api.AnnotatedApi;
import org.apache.solr.api.Api;
import org.apache.solr.api.JerseyResource;
-import org.apache.solr.api.PayloadObj;
-import org.apache.solr.client.solrj.request.beans.CreateConfigPayload;
+import org.apache.solr.client.api.model.CloneConfigsetRequestBody;
+import org.apache.solr.client.api.model.SolrJerseyResponse;
import org.apache.solr.cloud.ConfigSetCmds;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.params.ConfigSetParams;
import org.apache.solr.common.params.ConfigSetParams.ConfigSetAction;
-import org.apache.solr.common.params.DefaultSolrParams;
-import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.core.CoreContainer;
import org.apache.solr.handler.RequestHandlerBase;
import org.apache.solr.handler.api.V2ApiUtils;
-import org.apache.solr.handler.configsets.CreateConfigSetAPI;
-import org.apache.solr.handler.configsets.DeleteConfigSetAPI;
+import org.apache.solr.handler.configsets.CloneConfigSet;
+import org.apache.solr.handler.configsets.ConfigSetAPIBase;
+import org.apache.solr.handler.configsets.DeleteConfigSet;
import org.apache.solr.handler.configsets.ListConfigSets;
-import org.apache.solr.handler.configsets.UploadConfigSetAPI;
-import org.apache.solr.handler.configsets.UploadConfigSetFileAPI;
-import org.apache.solr.request.DelegatingSolrQueryRequest;
+import org.apache.solr.handler.configsets.UploadConfigSet;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.security.AuthorizationContext;
@@ -96,51 +90,30 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw
switch (action) {
case DELETE:
- final DeleteConfigSetAPI deleteConfigSetAPI = new DeleteConfigSetAPI(coreContainer);
- final SolrQueryRequest v2DeleteReq =
- new DelegatingSolrQueryRequest(req) {
- @Override
- public Map getPathTemplateValues() {
- return Map.of(
- DeleteConfigSetAPI.CONFIGSET_NAME_PLACEHOLDER,
- req.getParams().required().get(NAME));
- }
- };
- deleteConfigSetAPI.deleteConfigSet(v2DeleteReq, rsp);
+ final DeleteConfigSet deleteConfigSetAPI = new DeleteConfigSet(coreContainer, req, rsp);
+ final var deleteResponse =
+ deleteConfigSetAPI.deleteConfigSet(req.getParams().required().get(NAME));
+ V2ApiUtils.squashIntoSolrResponseWithoutHeader(rsp, deleteResponse);
break;
case UPLOAD:
- final SolrQueryRequest v2UploadReq =
- new DelegatingSolrQueryRequest(req) {
- @Override
- public Map getPathTemplateValues() {
- final Map templateValsByName = new HashMap<>();
-
- templateValsByName.put(
- UploadConfigSetAPI.CONFIGSET_NAME_PLACEHOLDER,
- req.getParams().required().get(NAME));
- if (!req.getParams().get(ConfigSetParams.FILE_PATH, "").isEmpty()) {
- templateValsByName.put(
- FILEPATH_PLACEHOLDER, req.getParams().get(ConfigSetParams.FILE_PATH));
- }
- return templateValsByName;
- }
-
- // Set the v1 default vals where they differ from v2's
- @Override
- public SolrParams getParams() {
- final ModifiableSolrParams v1Defaults = new ModifiableSolrParams();
- v1Defaults.add(ConfigSetParams.OVERWRITE, "false");
- v1Defaults.add(ConfigSetParams.CLEANUP, "false");
- return new DefaultSolrParams(super.getParams(), v1Defaults);
- }
- };
+ final var uploadApi = new UploadConfigSet(coreContainer, req, rsp);
+ final var configSetName = req.getParams().required().get(NAME);
+ final var overwrite = req.getParams().getBool(ConfigSetParams.OVERWRITE, false);
+ final var cleanup = req.getParams().getBool(ConfigSetParams.CLEANUP, false);
+ final var configSetData = ConfigSetAPIBase.ensureNonEmptyInputStream(req);
+ SolrJerseyResponse uploadResponse;
if (req.getParams()
.get(ConfigSetParams.FILE_PATH, "")
.isEmpty()) { // Uploading a whole configset
- new UploadConfigSetAPI(coreContainer).uploadConfigSet(v2UploadReq, rsp);
+ uploadResponse =
+ uploadApi.uploadConfigSet(configSetName, overwrite, cleanup, configSetData);
} else { // Uploading a single file
- new UploadConfigSetFileAPI(coreContainer).updateConfigSetFile(v2UploadReq, rsp);
+ final var filePath = req.getParams().get(ConfigSetParams.FILE_PATH);
+ uploadResponse =
+ uploadApi.uploadConfigSetFile(
+ configSetName, filePath, overwrite, cleanup, configSetData);
}
+ V2ApiUtils.squashIntoSolrResponseWithoutHeader(rsp, uploadResponse);
break;
case LIST:
final ListConfigSets listConfigSetsAPI = new ListConfigSets(coreContainer);
@@ -153,12 +126,14 @@ public SolrParams getParams() {
}
// Map v1 parameters into v2 format and process request
- final CreateConfigPayload createPayload = new CreateConfigPayload();
- createPayload.name = newConfigSetName;
+ final var requestBody = new CloneConfigsetRequestBody();
+ requestBody.name = newConfigSetName;
if (req.getParams().get(ConfigSetCmds.BASE_CONFIGSET) != null) {
- createPayload.baseConfigSet = req.getParams().get(ConfigSetCmds.BASE_CONFIGSET);
+ requestBody.baseConfigSet = req.getParams().get(ConfigSetCmds.BASE_CONFIGSET);
+ } else {
+ requestBody.baseConfigSet = "_default";
}
- createPayload.properties = new HashMap<>();
+ requestBody.properties = new HashMap<>();
req.getParams().stream()
.filter(entry -> entry.getKey().startsWith(ConfigSetCmds.CONFIG_SET_PROPERTY_PREFIX))
.forEach(
@@ -167,10 +142,11 @@ public SolrParams getParams() {
entry.getKey().substring(ConfigSetCmds.CONFIG_SET_PROPERTY_PREFIX.length());
final Object value =
(entry.getValue().length == 1) ? entry.getValue()[0] : entry.getValue();
- createPayload.properties.put(newKey, value);
+ requestBody.properties.put(newKey, value);
});
- final CreateConfigSetAPI createConfigSetAPI = new CreateConfigSetAPI(coreContainer);
- createConfigSetAPI.create(new PayloadObj<>("create", null, createPayload, req, rsp));
+ final CloneConfigSet createConfigSetAPI = new CloneConfigSet(coreContainer, req, rsp);
+ final var createResponse = createConfigSetAPI.cloneExistingConfigSet(requestBody);
+ V2ApiUtils.squashIntoSolrResponseWithoutHeader(rsp, createResponse);
break;
default:
throw new IllegalStateException("Unexpected ConfigSetAction detected: " + action);
@@ -207,18 +183,13 @@ public Boolean registerV2() {
@Override
public Collection getApis() {
- final List apis = new ArrayList<>();
- apis.addAll(AnnotatedApi.getApis(new CreateConfigSetAPI(coreContainer)));
- apis.addAll(AnnotatedApi.getApis(new DeleteConfigSetAPI(coreContainer)));
- apis.addAll(AnnotatedApi.getApis(new UploadConfigSetAPI(coreContainer)));
- apis.addAll(AnnotatedApi.getApis(new UploadConfigSetFileAPI(coreContainer)));
-
- return apis;
+ return new ArrayList<>();
}
@Override
public Collection> getJerseyResources() {
- return List.of(ListConfigSets.class);
+ return List.of(
+ ListConfigSets.class, CloneConfigSet.class, DeleteConfigSet.class, UploadConfigSet.class);
}
@Override
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/SegmentsInfoRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/SegmentsInfoRequestHandler.java
index 93cdf071a1c..fd1378b6597 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/SegmentsInfoRequestHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/SegmentsInfoRequestHandler.java
@@ -16,57 +16,20 @@
*/
package org.apache.solr.handler.admin;
-import static org.apache.lucene.index.IndexOptions.DOCS;
-import static org.apache.lucene.index.IndexOptions.DOCS_AND_FREQS;
-import static org.apache.lucene.index.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS;
-import static org.apache.solr.common.params.CommonParams.NAME;
-
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Date;
+import java.util.Collection;
import java.util.List;
-import java.util.stream.Collectors;
-import org.apache.lucene.index.DocValuesType;
-import org.apache.lucene.index.FieldInfo;
-import org.apache.lucene.index.FieldInfos;
-import org.apache.lucene.index.FilterLeafReader;
-import org.apache.lucene.index.IndexOptions;
-import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.index.LeafMetaData;
-import org.apache.lucene.index.LeafReader;
-import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.index.MergePolicy;
-import org.apache.lucene.index.MergePolicy.MergeSpecification;
-import org.apache.lucene.index.MergePolicy.OneMerge;
-import org.apache.lucene.index.MergeTrigger;
-import org.apache.lucene.index.SegmentCommitInfo;
-import org.apache.lucene.index.SegmentInfos;
-import org.apache.lucene.index.SegmentReader;
-import org.apache.lucene.index.Terms;
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.util.RamUsageEstimator;
-import org.apache.lucene.util.Version;
-import org.apache.solr.common.luke.FieldFlag;
-import org.apache.solr.common.util.Pair;
-import org.apache.solr.common.util.SimpleOrderedMap;
-import org.apache.solr.core.SolrCore;
+import org.apache.solr.api.JerseyResource;
+import org.apache.solr.client.api.model.SolrJerseyResponse;
+import org.apache.solr.common.params.SolrParams;
import org.apache.solr.handler.RequestHandlerBase;
+import org.apache.solr.handler.admin.api.GetSegmentData;
+import org.apache.solr.handler.api.V2ApiUtils;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
-import org.apache.solr.schema.IndexSchema;
-import org.apache.solr.schema.SchemaField;
-import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.security.AuthorizationContext;
-import org.apache.solr.update.SolrIndexWriter;
-import org.apache.solr.util.RefCounted;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/** This handler exposes information about last commit generation segments */
public class SegmentsInfoRequestHandler extends RequestHandlerBase {
- private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
public static final String FIELD_INFO_PARAM = "fieldInfo";
public static final String CORE_INFO_PARAM = "coreInfo";
@@ -76,385 +39,22 @@ public class SegmentsInfoRequestHandler extends RequestHandlerBase {
public static final String RAW_SIZE_DETAILS_PARAM = "rawSizeDetails";
public static final String RAW_SIZE_SAMPLING_PERCENT_PARAM = "rawSizeSamplingPercent";
- private static final List FI_LEGEND;
-
- static {
- FI_LEGEND =
- Arrays.asList(
- FieldFlag.INDEXED.toString(),
- FieldFlag.DOC_VALUES.toString(),
- "xxx - DocValues type",
- FieldFlag.TERM_VECTOR_STORED.toString(),
- FieldFlag.OMIT_NORMS.toString(),
- FieldFlag.OMIT_TF.toString(),
- FieldFlag.OMIT_POSITIONS.toString(),
- FieldFlag.STORE_OFFSETS_WITH_POSITIONS.toString(),
- "p - field has payloads",
- "s - field uses soft deletes",
- ":x:x:x - point data dim : index dim : num bytes");
- }
-
@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
- getSegmentsInfo(req, rsp);
- rsp.setHttpCaching(false);
- }
-
- private static final double GB = 1024.0 * 1024.0 * 1024.0;
-
- private void getSegmentsInfo(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
- boolean withFieldInfo = req.getParams().getBool(FIELD_INFO_PARAM, false);
- boolean withCoreInfo = req.getParams().getBool(CORE_INFO_PARAM, false);
- boolean withSizeInfo = req.getParams().getBool(SIZE_INFO_PARAM, false);
- boolean withRawSizeInfo = req.getParams().getBool(RAW_SIZE_PARAM, false);
- boolean withRawSizeSummary = req.getParams().getBool(RAW_SIZE_SUMMARY_PARAM, false);
- boolean withRawSizeDetails = req.getParams().getBool(RAW_SIZE_DETAILS_PARAM, false);
- if (withRawSizeSummary || withRawSizeDetails) {
- withRawSizeInfo = true;
- }
- SolrIndexSearcher searcher = req.getSearcher();
-
- SegmentInfos infos = SegmentInfos.readLatestCommit(searcher.getIndexReader().directory());
-
- SimpleOrderedMap segmentInfos = new SimpleOrderedMap<>();
-
- SolrCore core = req.getCore();
- SimpleOrderedMap infosInfo = new SimpleOrderedMap<>();
- Version minVersion = infos.getMinSegmentLuceneVersion();
- if (minVersion != null) {
- infosInfo.add("minSegmentLuceneVersion", minVersion.toString());
- }
- Version commitVersion = infos.getCommitLuceneVersion();
- if (commitVersion != null) {
- infosInfo.add("commitLuceneVersion", commitVersion.toString());
- }
- infosInfo.add("numSegments", infos.size());
- infosInfo.add("segmentsFileName", infos.getSegmentsFileName());
- infosInfo.add("totalMaxDoc", infos.totalMaxDoc());
- infosInfo.add("userData", infos.userData);
- if (withCoreInfo) {
- SimpleOrderedMap coreInfo = new SimpleOrderedMap<>();
- infosInfo.add("core", coreInfo);
- coreInfo.add(
- "startTime", core.getStartTimeStamp().getTime() + "(" + core.getStartTimeStamp() + ")");
- coreInfo.add("dataDir", core.getDataDir());
- coreInfo.add("indexDir", core.getIndexDir());
- coreInfo.add("sizeInGB", (double) core.getIndexSize() / GB);
-
- RefCounted iwRef = core.getSolrCoreState().getIndexWriter(core);
- if (iwRef != null) {
- try {
- IndexWriter iw = iwRef.get();
- String iwConfigStr = iw.getConfig().toString();
- SimpleOrderedMap iwConfig = new SimpleOrderedMap<>();
- // meh ...
- String[] lines = iwConfigStr.split("\\n");
- for (String line : lines) {
- String[] parts = line.split("=");
- if (parts.length < 2) {
- continue;
- }
- iwConfig.add(parts[0], parts[1]);
- }
- coreInfo.add("indexWriterConfig", iwConfig);
- } finally {
- iwRef.decref();
- }
- }
- }
- SimpleOrderedMap segmentInfo;
- List sortable = new ArrayList<>(infos.asList());
- // Order by the number of live docs. The display is logarithmic so it is a little jumbled
- // visually
- sortable.sort(
- (s1, s2) -> (s2.info.maxDoc() - s2.getDelCount()) - (s1.info.maxDoc() - s1.getDelCount()));
-
- List mergeCandidates = new ArrayList<>();
- SimpleOrderedMap runningMerges = getMergeInformation(req, infos, mergeCandidates);
- List leafContexts = searcher.getIndexReader().leaves();
- IndexSchema schema = req.getSchema();
- for (SegmentCommitInfo segmentCommitInfo : sortable) {
- segmentInfo =
- getSegmentInfo(segmentCommitInfo, withSizeInfo, withFieldInfo, leafContexts, schema);
- if (mergeCandidates.contains(segmentCommitInfo.info.name)) {
- segmentInfo.add("mergeCandidate", true);
- }
- segmentInfos.add((String) segmentInfo.get(NAME), segmentInfo);
- }
-
- rsp.add("info", infosInfo);
- if (runningMerges.size() > 0) {
- rsp.add("runningMerges", runningMerges);
- }
- if (withFieldInfo) {
- rsp.add("fieldInfoLegend", FI_LEGEND);
- }
- rsp.add("segments", segmentInfos);
- if (withRawSizeInfo) {
- IndexSizeEstimator estimator =
- new IndexSizeEstimator(
- searcher.getRawReader(), 20, 100, withRawSizeSummary, withRawSizeDetails);
- Object samplingPercentVal = req.getParams().get(RAW_SIZE_SAMPLING_PERCENT_PARAM);
- if (samplingPercentVal != null) {
- estimator.setSamplingPercent(Float.parseFloat(String.valueOf(samplingPercentVal)));
- }
- IndexSizeEstimator.Estimate estimate = estimator.estimate();
- SimpleOrderedMap estimateMap = new SimpleOrderedMap<>();
- // make the units more user-friendly
- estimateMap.add(IndexSizeEstimator.FIELDS_BY_SIZE, estimate.getHumanReadableFieldsBySize());
- estimateMap.add(IndexSizeEstimator.TYPES_BY_SIZE, estimate.getHumanReadableTypesBySize());
- if (estimate.getSummary() != null) {
- estimateMap.add(IndexSizeEstimator.SUMMARY, estimate.getSummary());
- }
- if (estimate.getDetails() != null) {
- estimateMap.add(IndexSizeEstimator.DETAILS, estimate.getDetails());
- }
- rsp.add("rawSize", estimateMap);
- }
- }
-
- private SimpleOrderedMap getSegmentInfo(
- SegmentCommitInfo segmentCommitInfo,
- boolean withSizeInfo,
- boolean withFieldInfos,
- List leafContexts,
- IndexSchema schema)
- throws IOException {
- SimpleOrderedMap segmentInfoMap = new SimpleOrderedMap<>();
-
- segmentInfoMap.add(NAME, segmentCommitInfo.info.name);
- segmentInfoMap.add("delCount", segmentCommitInfo.getDelCount());
- segmentInfoMap.add("softDelCount", segmentCommitInfo.getSoftDelCount());
- segmentInfoMap.add("hasFieldUpdates", segmentCommitInfo.hasFieldUpdates());
- segmentInfoMap.add("sizeInBytes", segmentCommitInfo.sizeInBytes());
- segmentInfoMap.add("size", segmentCommitInfo.info.maxDoc());
- Long timestamp = Long.parseLong(segmentCommitInfo.info.getDiagnostics().get("timestamp"));
- segmentInfoMap.add("age", new Date(timestamp));
- segmentInfoMap.add("source", segmentCommitInfo.info.getDiagnostics().get("source"));
- segmentInfoMap.add("version", segmentCommitInfo.info.getVersion().toString());
- // don't open a new SegmentReader - try to find the right one from the leaf contexts
- SegmentReader seg = null;
- for (LeafReaderContext lrc : leafContexts) {
- LeafReader leafReader = lrc.reader();
- leafReader = FilterLeafReader.unwrap(leafReader);
- if (leafReader instanceof SegmentReader sr) {
- if (sr.getSegmentInfo().info.equals(segmentCommitInfo.info)) {
- seg = sr;
- break;
- }
- }
- }
- if (seg != null) {
- LeafMetaData metaData = seg.getMetaData();
- if (metaData != null) {
- segmentInfoMap.add("createdVersionMajor", metaData.getCreatedVersionMajor());
- segmentInfoMap.add("minVersion", metaData.getMinVersion().toString());
- if (metaData.getSort() != null) {
- segmentInfoMap.add("sort", metaData.getSort().toString());
- }
- }
- }
- if (!segmentCommitInfo.info.getDiagnostics().isEmpty()) {
- segmentInfoMap.add("diagnostics", segmentCommitInfo.info.getDiagnostics());
- }
- if (!segmentCommitInfo.info.getAttributes().isEmpty()) {
- segmentInfoMap.add("attributes", segmentCommitInfo.info.getAttributes());
- }
- if (withSizeInfo) {
- Directory dir = segmentCommitInfo.info.dir;
- List> files =
- segmentCommitInfo.files().stream()
- .map(
- f -> {
- long size = -1;
- try {
- size = dir.fileLength(f);
- } catch (IOException e) {
- }
- return new Pair(f, size);
- })
- .sorted(
- (p1, p2) -> {
- if (p1.second() > p2.second()) {
- return -1;
- } else if (p1.second() < p2.second()) {
- return 1;
- } else {
- return 0;
- }
- })
- .collect(Collectors.toList());
- if (!files.isEmpty()) {
- SimpleOrderedMap topFiles = new SimpleOrderedMap<>();
- for (int i = 0; i < Math.min(files.size(), 5); i++) {
- Pair p = files.get(i);
- topFiles.add(p.first(), RamUsageEstimator.humanReadableUnits(p.second()));
- }
- segmentInfoMap.add("largestFiles", topFiles);
- }
- }
- if (withFieldInfos) {
- if (seg == null) {
- log.debug(
- "Skipping segment info - not available as a SegmentReader: {}", segmentCommitInfo);
- } else {
- FieldInfos fis = seg.getFieldInfos();
- SimpleOrderedMap fields = new SimpleOrderedMap<>();
- for (FieldInfo fi : fis) {
- fields.add(fi.name, getFieldInfo(seg, fi, schema));
- }
- segmentInfoMap.add("fields", fields);
- }
- }
-
- return segmentInfoMap;
- }
+ final SolrParams params = req.getParams();
+ final GetSegmentData segmentDataApi = new GetSegmentData(req.getCore(), req, rsp);
+ final SolrJerseyResponse response =
+ segmentDataApi.getSegmentData(
+ params.getBool(CORE_INFO_PARAM),
+ params.getBool(FIELD_INFO_PARAM),
+ params.getBool(RAW_SIZE_PARAM),
+ params.getBool(RAW_SIZE_SUMMARY_PARAM),
+ params.getBool(RAW_SIZE_DETAILS_PARAM),
+ params.getFloat(RAW_SIZE_SAMPLING_PERCENT_PARAM),
+ params.getBool(SIZE_INFO_PARAM));
+ V2ApiUtils.squashIntoSolrResponseWithoutHeader(rsp, response);
- private SimpleOrderedMap getFieldInfo(
- SegmentReader reader, FieldInfo fi, IndexSchema schema) {
- SimpleOrderedMap fieldFlags = new SimpleOrderedMap<>();
- StringBuilder flags = new StringBuilder();
- IndexOptions opts = fi.getIndexOptions();
- flags.append((opts != IndexOptions.NONE) ? FieldFlag.INDEXED.getAbbreviation() : '-');
- DocValuesType dvt = fi.getDocValuesType();
- if (dvt != DocValuesType.NONE) {
- flags.append(FieldFlag.DOC_VALUES.getAbbreviation());
- switch (dvt) {
- case NUMERIC:
- flags.append("num");
- break;
- case BINARY:
- flags.append("bin");
- break;
- case SORTED:
- flags.append("srt");
- break;
- case SORTED_NUMERIC:
- flags.append("srn");
- break;
- case SORTED_SET:
- flags.append("srs");
- break;
- default:
- flags.append("???"); // should not happen
- }
- } else {
- flags.append("----");
- }
- flags.append((fi.hasVectors()) ? FieldFlag.TERM_VECTOR_STORED.getAbbreviation() : '-');
- flags.append((fi.omitsNorms()) ? FieldFlag.OMIT_NORMS.getAbbreviation() : '-');
-
- flags.append((DOCS == opts) ? FieldFlag.OMIT_TF.getAbbreviation() : '-');
-
- flags.append((DOCS_AND_FREQS == opts) ? FieldFlag.OMIT_POSITIONS.getAbbreviation() : '-');
-
- flags.append(
- (DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS == opts)
- ? FieldFlag.STORE_OFFSETS_WITH_POSITIONS.getAbbreviation()
- : '-');
-
- flags.append((fi.hasPayloads() ? "p" : "-"));
- flags.append((fi.isSoftDeletesField() ? "s" : "-"));
- if (fi.getPointDimensionCount() > 0 || fi.getPointIndexDimensionCount() > 0) {
- flags.append(":");
- flags.append(fi.getPointDimensionCount()).append(':');
- flags.append(fi.getPointIndexDimensionCount()).append(':');
- flags.append(fi.getPointNumBytes());
- }
-
- fieldFlags.add("flags", flags.toString());
- try {
- Terms terms = reader.terms(fi.name);
- if (terms != null) {
- fieldFlags.add("docCount", terms.getDocCount());
- fieldFlags.add("termCount", terms.size());
- fieldFlags.add("sumDocFreq", terms.getSumDocFreq());
- fieldFlags.add("sumTotalTermFreq", terms.getSumTotalTermFreq());
- }
- } catch (Exception e) {
- log.debug("Exception retrieving term stats for field {}", fi.name, e);
- }
-
- // probably too much detail?
- // Map attributes = fi.attributes();
- // if (!attributes.isEmpty()) {
- // fieldFlags.add("attributes", attributes);
- // }
-
- // check compliance of the index with the current schema
- SchemaField sf = schema.getFieldOrNull(fi.name);
- boolean hasPoints = fi.getPointDimensionCount() > 0 || fi.getPointIndexDimensionCount() > 0;
-
- if (sf != null) {
- fieldFlags.add("schemaType", sf.getType().getTypeName());
- SimpleOrderedMap nonCompliant = new SimpleOrderedMap<>();
- if (sf.hasDocValues()
- && fi.getDocValuesType() == DocValuesType.NONE
- && fi.getIndexOptions() != IndexOptions.NONE) {
- nonCompliant.add(
- "docValues", "schema=" + sf.getType().getUninversionType(sf) + ", segment=false");
- }
- if (!sf.hasDocValues() && fi.getDocValuesType() != DocValuesType.NONE) {
- nonCompliant.add("docValues", "schema=false, segment=" + fi.getDocValuesType().toString());
- }
- if (!sf.isPolyField()) { // difficult to find all sub-fields in a general way
- if (sf.indexed() != ((fi.getIndexOptions() != IndexOptions.NONE) || hasPoints)) {
- nonCompliant.add(
- "indexed", "schema=" + sf.indexed() + ", segment=" + fi.getIndexOptions());
- }
- }
- if (!hasPoints && (sf.omitNorms() != fi.omitsNorms())) {
- nonCompliant.add("omitNorms", "schema=" + sf.omitNorms() + ", segment=" + fi.omitsNorms());
- }
- if (sf.storeTermVector() != fi.hasVectors()) {
- nonCompliant.add(
- "termVectors", "schema=" + sf.storeTermVector() + ", segment=" + fi.hasVectors());
- }
- if (sf.storeOffsetsWithPositions()
- != (fi.getIndexOptions() == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS)) {
- nonCompliant.add(
- "storeOffsetsWithPositions",
- "schema=" + sf.storeOffsetsWithPositions() + ", segment=" + fi.getIndexOptions());
- }
-
- if (nonCompliant.size() > 0) {
- nonCompliant.add("schemaField", sf.toString());
- fieldFlags.add("nonCompliant", nonCompliant);
- }
- } else {
- fieldFlags.add("schemaType", "(UNKNOWN)");
- }
- return fieldFlags;
- }
-
- // returns a map of currently running merges, and populates a list of candidate segments for merge
- private SimpleOrderedMap getMergeInformation(
- SolrQueryRequest req, SegmentInfos infos, List mergeCandidates) throws IOException {
- SimpleOrderedMap result = new SimpleOrderedMap<>();
- RefCounted refCounted =
- req.getCore().getSolrCoreState().getIndexWriter(req.getCore());
- try {
- IndexWriter indexWriter = refCounted.get();
- if (indexWriter instanceof SolrIndexWriter) {
- result.addAll(((SolrIndexWriter) indexWriter).getRunningMerges());
- }
- // get chosen merge policy
- MergePolicy mp = indexWriter.getConfig().getMergePolicy();
- // Find merges
- MergeSpecification findMerges = mp.findMerges(MergeTrigger.EXPLICIT, infos, indexWriter);
- if (findMerges != null && findMerges.merges != null && findMerges.merges.size() > 0) {
- for (OneMerge merge : findMerges.merges) {
- // TODO: add merge grouping
- for (SegmentCommitInfo mergeSegmentInfo : merge.segments) {
- mergeCandidates.add(mergeSegmentInfo.info.name);
- }
- }
- }
-
- return result;
- } finally {
- refCounted.decref();
- }
+ rsp.setHttpCaching(false);
}
@Override
@@ -471,4 +71,14 @@ public Category getCategory() {
public Name getPermissionName(AuthorizationContext request) {
return Name.METRICS_READ_PERM;
}
+
+ @Override
+ public Boolean registerV2() {
+ return Boolean.TRUE;
+ }
+
+ @Override
+ public Collection> getJerseyResources() {
+ return List.of(GetSegmentData.class);
+ }
}
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/api/CollectionStatus.java b/solr/core/src/java/org/apache/solr/handler/admin/api/CollectionStatus.java
new file mode 100644
index 00000000000..f80b6363071
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/handler/admin/api/CollectionStatus.java
@@ -0,0 +1,97 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.handler.admin.api;
+
+import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP;
+
+import jakarta.inject.Inject;
+import org.apache.solr.client.api.endpoint.CollectionStatusApi;
+import org.apache.solr.client.api.model.CollectionStatusResponse;
+import org.apache.solr.common.cloud.ZkNodeProps;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.core.CoreContainer;
+import org.apache.solr.handler.admin.ColStatus;
+import org.apache.solr.jersey.PermissionName;
+import org.apache.solr.jersey.SolrJacksonMapper;
+import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.response.SolrQueryResponse;
+import org.apache.solr.security.PermissionNameProvider;
+
+/** V2 API implementation for {@link CollectionStatusApi}. */
+public class CollectionStatus extends AdminAPIBase implements CollectionStatusApi {
+
+ @Inject
+ public CollectionStatus(
+ CoreContainer coreContainer,
+ SolrQueryRequest solrQueryRequest,
+ SolrQueryResponse solrQueryResponse) {
+ super(coreContainer, solrQueryRequest, solrQueryResponse);
+ }
+
+ @Override
+ @PermissionName(PermissionNameProvider.Name.COLL_READ_PERM)
+ public CollectionStatusResponse getCollectionStatus(
+ String collectionName,
+ Boolean coreInfo,
+ Boolean segments,
+ Boolean fieldInfo,
+ Boolean rawSize,
+ Boolean rawSizeSummary,
+ Boolean rawSizeDetails,
+ Float rawSizeSamplingPercent,
+ Boolean sizeInfo)
+ throws Exception {
+ recordCollectionForLogAndTracing(collectionName, solrQueryRequest);
+
+ final var params = new ModifiableSolrParams();
+ params.set(COLLECTION_PROP, collectionName);
+ params.setNonNull(ColStatus.CORE_INFO_PROP, coreInfo);
+ params.setNonNull(ColStatus.SEGMENTS_PROP, segments);
+ params.setNonNull(ColStatus.FIELD_INFO_PROP, fieldInfo);
+ params.setNonNull(ColStatus.RAW_SIZE_PROP, rawSize);
+ params.setNonNull(ColStatus.RAW_SIZE_SUMMARY_PROP, rawSizeSummary);
+ params.setNonNull(ColStatus.RAW_SIZE_DETAILS_PROP, rawSizeDetails);
+ params.setNonNull(ColStatus.RAW_SIZE_SAMPLING_PERCENT_PROP, rawSizeSamplingPercent);
+ params.setNonNull(ColStatus.SIZE_INFO_PROP, sizeInfo);
+
+ final var nlResponse = new NamedList<>();
+ populateColStatusData(coreContainer, new ZkNodeProps(params), nlResponse);
+
+ // v2 API does not support requesting the status of multiple collections simultaneously as its
+ // counterpart does, and its response looks slightly different as a result. Primarily, the
+ // v2 response eschews a level of nesting that necessitated by the multi-collection nature of
+ // v1. These tweaks are made below before returning.
+ final var colStatusResponse =
+ SolrJacksonMapper.getObjectMapper()
+ .convertValue(nlResponse.get(collectionName), CollectionStatusResponse.class);
+ colStatusResponse.name = collectionName;
+ return colStatusResponse;
+ }
+
+ // TODO Modify ColStatus to produce a CollectionStatusResponse instead of a NL
+ public static void populateColStatusData(
+ CoreContainer coreContainer, ZkNodeProps params, NamedList colStatusSink) {
+ final var colStatusAssembler =
+ new ColStatus(
+ coreContainer.getSolrClientCache(),
+ coreContainer.getZkController().getZkStateReader().getClusterState(),
+ params);
+ colStatusAssembler.getColStatus(colStatusSink);
+ }
+}
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/api/CollectionStatusAPI.java b/solr/core/src/java/org/apache/solr/handler/admin/api/CollectionStatusAPI.java
deleted file mode 100644
index 4b7eabe7226..00000000000
--- a/solr/core/src/java/org/apache/solr/handler/admin/api/CollectionStatusAPI.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.solr.handler.admin.api;
-
-import static org.apache.solr.client.solrj.SolrRequest.METHOD.GET;
-import static org.apache.solr.common.params.CommonParams.ACTION;
-import static org.apache.solr.common.params.CoreAdminParams.COLLECTION;
-import static org.apache.solr.handler.ClusterAPI.wrapParams;
-import static org.apache.solr.security.PermissionNameProvider.Name.COLL_READ_PERM;
-
-import java.lang.invoke.MethodHandles;
-import org.apache.solr.api.EndPoint;
-import org.apache.solr.common.cloud.ZkStateReader;
-import org.apache.solr.common.params.CollectionParams;
-import org.apache.solr.handler.admin.CollectionsHandler;
-import org.apache.solr.request.SolrQueryRequest;
-import org.apache.solr.response.SolrQueryResponse;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * V2 API for displaying basic information about a single collection.
- *
- * This API (GET /v2/collections/collectionName) is analogous to the v1
- * /admin/collections?action=CLUSTERSTATUS&collection=collectionName command.
- */
-public class CollectionStatusAPI {
- private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-
- private final CollectionsHandler collectionsHandler;
-
- public CollectionStatusAPI(CollectionsHandler collectionsHandler) {
- this.collectionsHandler = collectionsHandler;
- }
-
- @EndPoint(
- path = {"/c/{collection}", "/collections/{collection}"},
- method = GET,
- permission = COLL_READ_PERM)
- public void getCollectionStatus(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
- req =
- wrapParams(
- req, // 'req' can have a 'shard' param
- ACTION,
- CollectionParams.CollectionAction.CLUSTERSTATUS.toString(),
- COLLECTION,
- req.getPathTemplateValues().get(ZkStateReader.COLLECTION_PROP));
- collectionsHandler.handleRequestBody(req, rsp);
- }
-}
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/api/GetSegmentData.java b/solr/core/src/java/org/apache/solr/handler/admin/api/GetSegmentData.java
new file mode 100644
index 00000000000..ceec55ea33f
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/handler/admin/api/GetSegmentData.java
@@ -0,0 +1,501 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.handler.admin.api;
+
+import static org.apache.lucene.index.IndexOptions.DOCS;
+import static org.apache.lucene.index.IndexOptions.DOCS_AND_FREQS;
+import static org.apache.lucene.index.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS;
+
+import jakarta.inject.Inject;
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+import org.apache.lucene.index.DocValuesType;
+import org.apache.lucene.index.FieldInfo;
+import org.apache.lucene.index.FieldInfos;
+import org.apache.lucene.index.FilterLeafReader;
+import org.apache.lucene.index.IndexOptions;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.LeafMetaData;
+import org.apache.lucene.index.LeafReader;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.LiveIndexWriterConfig;
+import org.apache.lucene.index.MergePolicy;
+import org.apache.lucene.index.MergeTrigger;
+import org.apache.lucene.index.SegmentCommitInfo;
+import org.apache.lucene.index.SegmentInfos;
+import org.apache.lucene.index.SegmentReader;
+import org.apache.lucene.index.Terms;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.RamUsageEstimator;
+import org.apache.lucene.util.Version;
+import org.apache.solr.api.JerseyResource;
+import org.apache.solr.client.api.endpoint.SegmentsApi;
+import org.apache.solr.client.api.model.GetSegmentDataResponse;
+import org.apache.solr.common.luke.FieldFlag;
+import org.apache.solr.common.util.Pair;
+import org.apache.solr.core.SolrCore;
+import org.apache.solr.handler.admin.IndexSizeEstimator;
+import org.apache.solr.jersey.PermissionName;
+import org.apache.solr.jersey.SolrJacksonMapper;
+import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.response.SolrQueryResponse;
+import org.apache.solr.schema.IndexSchema;
+import org.apache.solr.schema.SchemaField;
+import org.apache.solr.search.SolrIndexSearcher;
+import org.apache.solr.security.PermissionNameProvider;
+import org.apache.solr.update.SolrIndexWriter;
+import org.apache.solr.util.RefCounted;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * V2 API implementation for {@link SegmentsApi}
+ *
+ *
Equivalent to the v1 /solr/coreName/admin/segments endpoint.
+ */
+public class GetSegmentData extends JerseyResource implements SegmentsApi {
+
+ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+ private static final double GB = 1024.0 * 1024.0 * 1024.0;
+
+ private static final List FI_LEGEND =
+ Arrays.asList(
+ FieldFlag.INDEXED.toString(),
+ FieldFlag.DOC_VALUES.toString(),
+ "xxx - DocValues type",
+ FieldFlag.TERM_VECTOR_STORED.toString(),
+ FieldFlag.OMIT_NORMS.toString(),
+ FieldFlag.OMIT_TF.toString(),
+ FieldFlag.OMIT_POSITIONS.toString(),
+ FieldFlag.STORE_OFFSETS_WITH_POSITIONS.toString(),
+ "p - field has payloads",
+ "s - field uses soft deletes",
+ ":x:x:x - point data dim : index dim : num bytes");
+
+ protected final SolrCore solrCore;
+ protected final SolrQueryRequest solrQueryRequest;
+ protected final SolrQueryResponse solrQueryResponse;
+
+ @Inject
+ public GetSegmentData(SolrCore solrCore, SolrQueryRequest req, SolrQueryResponse rsp) {
+ this.solrCore = solrCore;
+ this.solrQueryRequest = req;
+ this.solrQueryResponse = rsp;
+ }
+
+ @Override
+ @PermissionName(PermissionNameProvider.Name.METRICS_READ_PERM)
+ public GetSegmentDataResponse getSegmentData(
+ Boolean coreInfo,
+ Boolean fieldInfo,
+ Boolean rawSize,
+ Boolean rawSizeSummary,
+ Boolean rawSizeDetails,
+ Float rawSizeSamplingPercent,
+ Boolean sizeInfo)
+ throws Exception {
+ boolean withFieldInfo = Boolean.TRUE.equals(fieldInfo);
+ boolean withCoreInfo = Boolean.TRUE.equals(coreInfo);
+ boolean withSizeInfo = Boolean.TRUE.equals(sizeInfo);
+ boolean withRawSizeInfo = Boolean.TRUE.equals(rawSize);
+ boolean withRawSizeSummary = Boolean.TRUE.equals(rawSizeSummary);
+ boolean withRawSizeDetails = Boolean.TRUE.equals(rawSizeDetails);
+ if (withRawSizeSummary || withRawSizeDetails) {
+ withRawSizeInfo = true;
+ }
+ SolrIndexSearcher searcher = solrQueryRequest.getSearcher();
+ SolrCore core = solrQueryRequest.getCore();
+
+ final var response = new GetSegmentDataResponse();
+
+ SegmentInfos infos = SegmentInfos.readLatestCommit(searcher.getIndexReader().directory());
+ response.info = new GetSegmentDataResponse.SegmentSummary();
+ Version minVersion = infos.getMinSegmentLuceneVersion();
+ if (minVersion != null) {
+ response.info.minSegmentLuceneVersion = minVersion.toString();
+ }
+ Version commitVersion = infos.getCommitLuceneVersion();
+ if (commitVersion != null) {
+ response.info.commitLuceneVersion = commitVersion.toString();
+ }
+ response.info.numSegments = infos.size();
+ response.info.segmentsFileName = infos.getSegmentsFileName();
+ response.info.totalMaxDoc = infos.totalMaxDoc();
+ response.info.userData = infos.userData;
+
+ if (withCoreInfo) {
+ final var coreSummary = new GetSegmentDataResponse.CoreSummary();
+ response.info.core = coreSummary;
+ coreSummary.startTime =
+ core.getStartTimeStamp().getTime() + "(" + core.getStartTimeStamp() + ")";
+ coreSummary.dataDir = core.getDataDir();
+ coreSummary.indexDir = core.getIndexDir();
+ coreSummary.sizeInGB = (double) core.getIndexSize() / GB;
+
+ RefCounted iwRef = core.getSolrCoreState().getIndexWriter(core);
+ if (iwRef != null) {
+ try {
+ IndexWriter iw = iwRef.get();
+ coreSummary.indexWriterConfig = convertIndexWriterConfigToResponse(iw.getConfig());
+ } finally {
+ iwRef.decref();
+ }
+ }
+ }
+
+ List sortable = new ArrayList<>(infos.asList());
+ // Order by the number of live docs. The display is logarithmic so it is a little jumbled
+ // visually
+ sortable.sort(
+ (s1, s2) -> (s2.info.maxDoc() - s2.getDelCount()) - (s1.info.maxDoc() - s1.getDelCount()));
+
+ List mergeCandidates = new ArrayList<>();
+ final var runningMerges = getMergeInformation(solrQueryRequest, infos, mergeCandidates);
+ List leafContexts = searcher.getIndexReader().leaves();
+ IndexSchema schema = solrQueryRequest.getSchema();
+ response.segments = new HashMap<>();
+ for (SegmentCommitInfo segmentCommitInfo : sortable) {
+ final var singleSegmentData =
+ getSegmentInfo(segmentCommitInfo, withSizeInfo, withFieldInfo, leafContexts, schema);
+ if (mergeCandidates.contains(segmentCommitInfo.info.name)) {
+ singleSegmentData.mergeCandidate = true;
+ }
+ response.segments.put(singleSegmentData.name, singleSegmentData);
+ }
+
+ if (runningMerges.size() > 0) {
+ response.runningMerges = runningMerges;
+ }
+ if (withFieldInfo) {
+ response.fieldInfoLegend = FI_LEGEND;
+ }
+
+ if (withRawSizeInfo) {
+ IndexSizeEstimator estimator =
+ new IndexSizeEstimator(
+ searcher.getRawReader(), 20, 100, withRawSizeSummary, withRawSizeDetails);
+ if (rawSizeSamplingPercent != null) {
+ estimator.setSamplingPercent(rawSizeSamplingPercent);
+ }
+ IndexSizeEstimator.Estimate estimate = estimator.estimate();
+ final var rawSizeResponse = new GetSegmentDataResponse.RawSize();
+ // make the units more user-friendly
+ rawSizeResponse.fieldsBySize = estimate.getHumanReadableFieldsBySize();
+ rawSizeResponse.typesBySize = estimate.getHumanReadableTypesBySize();
+ if (estimate.getSummary() != null) {
+ rawSizeResponse.summary = estimate.getSummary();
+ }
+ if (estimate.getDetails() != null) {
+ rawSizeResponse.details = estimate.getDetails();
+ }
+ response.rawSize = rawSizeResponse;
+ }
+
+ return response;
+ }
+
+ /**
+ * Converts Lucene's IndexWriter configuration object into a response type fit for serialization
+ *
+ * Based on {@link LiveIndexWriterConfig#toString()} for legacy reasons.
+ *
+ * @param iwConfig the Lucene configuration object to convert
+ */
+ private GetSegmentDataResponse.IndexWriterConfigSummary convertIndexWriterConfigToResponse(
+ LiveIndexWriterConfig iwConfig) {
+ final var iwConfigResponse = new GetSegmentDataResponse.IndexWriterConfigSummary();
+ iwConfigResponse.analyzer =
+ iwConfig.getAnalyzer() != null ? iwConfig.getAnalyzer().getClass().getName() : "null";
+ iwConfigResponse.ramBufferSizeMB = iwConfig.getRAMBufferSizeMB();
+ iwConfigResponse.maxBufferedDocs = iwConfig.getMaxBufferedDocs();
+ iwConfigResponse.mergedSegmentWarmer = String.valueOf(iwConfig.getMergedSegmentWarmer());
+ iwConfigResponse.delPolicy = iwConfig.getIndexDeletionPolicy().getClass().getName();
+ iwConfigResponse.commit = String.valueOf(iwConfig.getIndexCommit());
+ iwConfigResponse.openMode = String.valueOf(iwConfig.getOpenMode());
+ iwConfigResponse.similarity = iwConfig.getSimilarity().getClass().getName();
+ iwConfigResponse.mergeScheduler = String.valueOf(iwConfig.getMergeScheduler());
+ iwConfigResponse.codec = String.valueOf(iwConfig.getCodec());
+ iwConfigResponse.infoStream = iwConfig.getInfoStream().getClass().getName();
+ iwConfigResponse.mergePolicy = String.valueOf(iwConfig.getMergePolicy());
+ iwConfigResponse.readerPooling = iwConfig.getReaderPooling();
+ iwConfigResponse.perThreadHardLimitMB = iwConfig.getRAMPerThreadHardLimitMB();
+ iwConfigResponse.useCompoundFile = iwConfig.getUseCompoundFile();
+ iwConfigResponse.commitOnClose = iwConfig.getCommitOnClose();
+ iwConfigResponse.indexSort = String.valueOf(iwConfig.getIndexSort());
+ iwConfigResponse.checkPendingFlushOnUpdate = iwConfig.isCheckPendingFlushOnUpdate();
+ iwConfigResponse.softDeletesField = iwConfig.getSoftDeletesField();
+ iwConfigResponse.maxFullFlushMergeWaitMillis = iwConfig.getMaxFullFlushMergeWaitMillis();
+ iwConfigResponse.leafSorter = String.valueOf(iwConfig.getLeafSorter());
+ iwConfigResponse.eventListener = String.valueOf(iwConfig.getIndexWriterEventListener());
+ iwConfigResponse.parentField = iwConfig.getParentField();
+ return iwConfigResponse;
+ }
+
+ // returns a map of currently running merges, and populates a list of candidate segments for merge
+ private Map getMergeInformation(
+ SolrQueryRequest req, SegmentInfos infos, List mergeCandidates) throws IOException {
+ final var result = new HashMap();
+ RefCounted refCounted =
+ req.getCore().getSolrCoreState().getIndexWriter(req.getCore());
+ try {
+ IndexWriter indexWriter = refCounted.get();
+ if (indexWriter instanceof SolrIndexWriter) {
+ result.putAll(((SolrIndexWriter) indexWriter).getRunningMerges());
+ }
+ // get chosen merge policy
+ MergePolicy mp = indexWriter.getConfig().getMergePolicy();
+ // Find merges
+ MergePolicy.MergeSpecification findMerges =
+ mp.findMerges(MergeTrigger.EXPLICIT, infos, indexWriter);
+ if (findMerges != null && findMerges.merges != null && findMerges.merges.size() > 0) {
+ for (MergePolicy.OneMerge merge : findMerges.merges) {
+ // TODO: add merge grouping
+ for (SegmentCommitInfo mergeSegmentInfo : merge.segments) {
+ mergeCandidates.add(mergeSegmentInfo.info.name);
+ }
+ }
+ }
+
+ return result;
+ } finally {
+ refCounted.decref();
+ }
+ }
+
+ private GetSegmentDataResponse.SingleSegmentData getSegmentInfo(
+ SegmentCommitInfo segmentCommitInfo,
+ boolean withSizeInfo,
+ boolean withFieldInfos,
+ List leafContexts,
+ IndexSchema schema)
+ throws IOException {
+ final var segmentInfo = new GetSegmentDataResponse.SingleSegmentData();
+ segmentInfo.name = segmentCommitInfo.info.name;
+ segmentInfo.delCount = segmentCommitInfo.getDelCount();
+ segmentInfo.softDelCount = segmentCommitInfo.getSoftDelCount();
+ segmentInfo.hasFieldUpdates = segmentCommitInfo.hasFieldUpdates();
+ segmentInfo.sizeInBytes = segmentCommitInfo.sizeInBytes();
+ segmentInfo.size = segmentCommitInfo.info.maxDoc();
+ Long timestamp = Long.parseLong(segmentCommitInfo.info.getDiagnostics().get("timestamp"));
+ segmentInfo.age = new Date(timestamp);
+ segmentInfo.source = segmentCommitInfo.info.getDiagnostics().get("source");
+ segmentInfo.version = segmentCommitInfo.info.getVersion().toString();
+
+ // don't open a new SegmentReader - try to find the right one from the leaf contexts
+ SegmentReader seg = null;
+ for (LeafReaderContext lrc : leafContexts) {
+ LeafReader leafReader = lrc.reader();
+ leafReader = FilterLeafReader.unwrap(leafReader);
+ if (leafReader instanceof SegmentReader sr) {
+ if (sr.getSegmentInfo().info.equals(segmentCommitInfo.info)) {
+ seg = sr;
+ break;
+ }
+ }
+ }
+ if (seg != null) {
+ LeafMetaData metaData = seg.getMetaData();
+ if (metaData != null) {
+ segmentInfo.createdVersionMajor = metaData.getCreatedVersionMajor();
+ segmentInfo.minVersion = metaData.getMinVersion().toString();
+ if (metaData.getSort() != null) {
+ segmentInfo.sort = metaData.getSort().toString();
+ }
+ }
+ }
+
+ if (!segmentCommitInfo.info.getDiagnostics().isEmpty()) {
+ segmentInfo.diagnostics =
+ SolrJacksonMapper.getObjectMapper()
+ .convertValue(
+ segmentCommitInfo.info.getDiagnostics(),
+ GetSegmentDataResponse.SegmentDiagnosticInfo.class);
+ }
+ if (!segmentCommitInfo.info.getAttributes().isEmpty()) {
+ segmentInfo.attributes = segmentCommitInfo.info.getAttributes();
+ }
+ if (withSizeInfo) {
+ Directory dir = segmentCommitInfo.info.dir;
+ List> files =
+ segmentCommitInfo.files().stream()
+ .map(
+ f -> {
+ long size = -1;
+ try {
+ size = dir.fileLength(f);
+ } catch (IOException e) {
+ }
+ return new Pair(f, size);
+ })
+ .sorted(
+ (p1, p2) -> {
+ if (p1.second() > p2.second()) {
+ return -1;
+ } else if (p1.second() < p2.second()) {
+ return 1;
+ } else {
+ return 0;
+ }
+ })
+ .collect(Collectors.toList());
+ if (!files.isEmpty()) {
+ final var topFiles = new HashMap();
+ for (int i = 0; i < Math.min(files.size(), 5); i++) {
+ Pair p = files.get(i);
+ topFiles.put(p.first(), RamUsageEstimator.humanReadableUnits(p.second()));
+ }
+ segmentInfo.largestFilesByName = topFiles;
+ }
+ }
+
+ if (withFieldInfos) {
+ if (seg == null) {
+ log.debug(
+ "Skipping segment info - not available as a SegmentReader: {}", segmentCommitInfo);
+ } else {
+ FieldInfos fis = seg.getFieldInfos();
+ final var fields = new HashMap();
+ for (FieldInfo fi : fis) {
+ fields.put(fi.name, getFieldInfo(seg, fi, schema));
+ }
+ segmentInfo.fields = fields;
+ }
+ }
+
+ return segmentInfo;
+ }
+
+ private GetSegmentDataResponse.SegmentSingleFieldInfo getFieldInfo(
+ SegmentReader reader, FieldInfo fi, IndexSchema schema) {
+ final var responseFieldInfo = new GetSegmentDataResponse.SegmentSingleFieldInfo();
+ StringBuilder flags = new StringBuilder();
+ IndexOptions opts = fi.getIndexOptions();
+ flags.append((opts != IndexOptions.NONE) ? FieldFlag.INDEXED.getAbbreviation() : '-');
+ DocValuesType dvt = fi.getDocValuesType();
+ if (dvt != DocValuesType.NONE) {
+ flags.append(FieldFlag.DOC_VALUES.getAbbreviation());
+ switch (dvt) {
+ case NUMERIC:
+ flags.append("num");
+ break;
+ case BINARY:
+ flags.append("bin");
+ break;
+ case SORTED:
+ flags.append("srt");
+ break;
+ case SORTED_NUMERIC:
+ flags.append("srn");
+ break;
+ case SORTED_SET:
+ flags.append("srs");
+ break;
+ default:
+ flags.append("???"); // should not happen
+ }
+ } else {
+ flags.append("----");
+ }
+ flags.append((fi.hasVectors()) ? FieldFlag.TERM_VECTOR_STORED.getAbbreviation() : '-');
+ flags.append((fi.omitsNorms()) ? FieldFlag.OMIT_NORMS.getAbbreviation() : '-');
+
+ flags.append((DOCS == opts) ? FieldFlag.OMIT_TF.getAbbreviation() : '-');
+
+ flags.append((DOCS_AND_FREQS == opts) ? FieldFlag.OMIT_POSITIONS.getAbbreviation() : '-');
+
+ flags.append(
+ (DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS == opts)
+ ? FieldFlag.STORE_OFFSETS_WITH_POSITIONS.getAbbreviation()
+ : '-');
+
+ flags.append((fi.hasPayloads() ? "p" : "-"));
+ flags.append((fi.isSoftDeletesField() ? "s" : "-"));
+ if (fi.getPointDimensionCount() > 0 || fi.getPointIndexDimensionCount() > 0) {
+ flags.append(":");
+ flags.append(fi.getPointDimensionCount()).append(':');
+ flags.append(fi.getPointIndexDimensionCount()).append(':');
+ flags.append(fi.getPointNumBytes());
+ }
+
+ responseFieldInfo.flags = flags.toString();
+
+ try {
+ Terms terms = reader.terms(fi.name);
+ if (terms != null) {
+ responseFieldInfo.docCount = terms.getDocCount();
+ responseFieldInfo.termCount = terms.size();
+ responseFieldInfo.sumDocFreq = terms.getSumDocFreq();
+ responseFieldInfo.sumTotalTermFreq = terms.getSumTotalTermFreq();
+ }
+ } catch (Exception e) {
+ log.debug("Exception retrieving term stats for field {}", fi.name, e);
+ }
+
+ // check compliance of the index with the current schema
+ SchemaField sf = schema.getFieldOrNull(fi.name);
+ boolean hasPoints = fi.getPointDimensionCount() > 0 || fi.getPointIndexDimensionCount() > 0;
+
+ if (sf != null) {
+ responseFieldInfo.schemaType = sf.getType().getTypeName();
+ final var nonCompliant = new HashMap();
+ if (sf.hasDocValues()
+ && fi.getDocValuesType() == DocValuesType.NONE
+ && fi.getIndexOptions() != IndexOptions.NONE) {
+ nonCompliant.put(
+ "docValues", "schema=" + sf.getType().getUninversionType(sf) + ", segment=false");
+ }
+ if (!sf.hasDocValues() && fi.getDocValuesType() != DocValuesType.NONE) {
+ nonCompliant.put("docValues", "schema=false, segment=" + fi.getDocValuesType().toString());
+ }
+ if (!sf.isPolyField()) { // difficult to find all sub-fields in a general way
+ if (sf.indexed() != ((fi.getIndexOptions() != IndexOptions.NONE) || hasPoints)) {
+ nonCompliant.put(
+ "indexed", "schema=" + sf.indexed() + ", segment=" + fi.getIndexOptions());
+ }
+ }
+ if (!hasPoints && (sf.omitNorms() != fi.omitsNorms())) {
+ nonCompliant.put("omitNorms", "schema=" + sf.omitNorms() + ", segment=" + fi.omitsNorms());
+ }
+ if (sf.storeTermVector() != fi.hasVectors()) {
+ nonCompliant.put(
+ "termVectors", "schema=" + sf.storeTermVector() + ", segment=" + fi.hasVectors());
+ }
+ if (sf.storeOffsetsWithPositions()
+ != (fi.getIndexOptions() == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS)) {
+ nonCompliant.put(
+ "storeOffsetsWithPositions",
+ "schema=" + sf.storeOffsetsWithPositions() + ", segment=" + fi.getIndexOptions());
+ }
+
+ if (nonCompliant.size() > 0) {
+ nonCompliant.put("schemaField", sf.toString());
+ responseFieldInfo.nonCompliant = nonCompliant;
+ }
+ } else {
+ responseFieldInfo.schemaType = "(UNKNOWN)";
+ }
+
+ return responseFieldInfo;
+ }
+}
diff --git a/solr/core/src/java/org/apache/solr/handler/api/V2ApiUtils.java b/solr/core/src/java/org/apache/solr/handler/api/V2ApiUtils.java
index 9a96b34afc0..22f492abf92 100644
--- a/solr/core/src/java/org/apache/solr/handler/api/V2ApiUtils.java
+++ b/solr/core/src/java/org/apache/solr/handler/api/V2ApiUtils.java
@@ -91,6 +91,9 @@ public static void squashIntoNamedListWithoutHeader(
}
public static String getMediaTypeFromWtParam(SolrParams params, String defaultMediaType) {
+ if (params == null) {
+ return defaultMediaType;
+ }
final String wtParam = params.get(WT);
if (StrUtils.isBlank(wtParam)) return defaultMediaType;
diff --git a/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandler.java b/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandler.java
index 320a5fe70d7..7592eed86fc 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandler.java
@@ -114,7 +114,7 @@ public class HttpShardHandler extends ShardHandler {
protected AtomicInteger pending;
private final Map> shardToURLs;
- protected LBHttp2SolrClient lbClient;
+ protected LBHttp2SolrClient lbClient;
public HttpShardHandler(HttpShardHandlerFactory httpShardHandlerFactory) {
this.httpShardHandlerFactory = httpShardHandlerFactory;
diff --git a/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandlerFactory.java b/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandlerFactory.java
index 1437dee63ea..ac7dc0cf8e0 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandlerFactory.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandlerFactory.java
@@ -83,9 +83,8 @@ public class HttpShardHandlerFactory extends ShardHandlerFactory
protected ExecutorService commExecutor;
protected volatile Http2SolrClient defaultClient;
- protected Http2SolrClient.Builder httpSolrClientBuilder;
protected InstrumentedHttpListenerFactory httpListenerFactory;
- protected LBHttp2SolrClient loadbalancer;
+ protected LBHttp2SolrClient loadbalancer;
int corePoolSize = 0;
int maximumPoolSize = Integer.MAX_VALUE;
@@ -306,16 +305,16 @@ public void init(PluginInfo info) {
sb);
int soTimeout =
getParameter(args, HttpClientUtil.PROP_SO_TIMEOUT, HttpClientUtil.DEFAULT_SO_TIMEOUT, sb);
- this.httpSolrClientBuilder =
+
+ this.defaultClient =
new Http2SolrClient.Builder()
.withConnectionTimeout(connectionTimeout, TimeUnit.MILLISECONDS)
.withIdleTimeout(soTimeout, TimeUnit.MILLISECONDS)
.withExecutor(commExecutor)
.withMaxConnectionsPerHost(maxConnectionsPerHost)
- .addListenerFactory(this.httpListenerFactory);
- this.defaultClient = httpSolrClientBuilder.build();
-
- this.loadbalancer = new LBHttp2SolrClient.Builder<>(httpSolrClientBuilder).build();
+ .build();
+ this.defaultClient.addListenerFactory(this.httpListenerFactory);
+ this.loadbalancer = new LBHttp2SolrClient.Builder(defaultClient).build();
initReplicaListTransformers(getParameter(args, "replicaRouting", null, sb));
@@ -325,7 +324,7 @@ public void init(PluginInfo info) {
@Override
public void setSecurityBuilder(HttpClientBuilderPlugin clientBuilderPlugin) {
if (clientBuilderPlugin != null) {
- clientBuilderPlugin.setup(httpSolrClientBuilder, defaultClient);
+ clientBuilderPlugin.setup(defaultClient);
}
}
diff --git a/solr/core/src/java/org/apache/solr/handler/configsets/CreateConfigSetAPI.java b/solr/core/src/java/org/apache/solr/handler/configsets/CloneConfigSet.java
similarity index 53%
rename from solr/core/src/java/org/apache/solr/handler/configsets/CreateConfigSetAPI.java
rename to solr/core/src/java/org/apache/solr/handler/configsets/CloneConfigSet.java
index 796903ff73c..e55c74e04fa 100644
--- a/solr/core/src/java/org/apache/solr/handler/configsets/CreateConfigSetAPI.java
+++ b/solr/core/src/java/org/apache/solr/handler/configsets/CloneConfigSet.java
@@ -17,54 +17,55 @@
package org.apache.solr.handler.configsets;
-import static org.apache.solr.client.solrj.SolrRequest.METHOD.POST;
import static org.apache.solr.common.params.CommonParams.NAME;
import static org.apache.solr.handler.admin.ConfigSetsHandler.DISABLE_CREATE_AUTH_CHECKS;
import static org.apache.solr.security.PermissionNameProvider.Name.CONFIG_EDIT_PERM;
+import jakarta.inject.Inject;
import java.util.HashMap;
import java.util.Map;
-import org.apache.solr.api.Command;
-import org.apache.solr.api.EndPoint;
-import org.apache.solr.api.PayloadObj;
-import org.apache.solr.client.solrj.request.beans.CreateConfigPayload;
+import org.apache.solr.client.api.endpoint.ConfigsetsApi;
+import org.apache.solr.client.api.model.CloneConfigsetRequestBody;
+import org.apache.solr.client.api.model.SolrJerseyResponse;
import org.apache.solr.cloud.ConfigSetCmds;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.ConfigSetParams;
import org.apache.solr.core.CoreContainer;
+import org.apache.solr.jersey.PermissionName;
+import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.response.SolrQueryResponse;
-/**
- * V2 API for creating a new configset as a copy of an existing one.
- *
- * This API (POST /v2/cluster/configs {"create": {...}}) is analogous to the v1
- * /admin/configs?action=CREATE command.
- */
-@EndPoint(method = POST, path = "/cluster/configs", permission = CONFIG_EDIT_PERM)
-public class CreateConfigSetAPI extends ConfigSetAPIBase {
+/** V2 API implementation for ConfigsetsApi.Clone */
+public class CloneConfigSet extends ConfigSetAPIBase implements ConfigsetsApi.Clone {
- public CreateConfigSetAPI(CoreContainer coreContainer) {
- super(coreContainer);
+ @Inject
+ public CloneConfigSet(
+ CoreContainer coreContainer,
+ SolrQueryRequest solrQueryRequest,
+ SolrQueryResponse solrQueryResponse) {
+ super(coreContainer, solrQueryRequest, solrQueryResponse);
}
- @Command(name = "create")
- public void create(PayloadObj obj) throws Exception {
- final CreateConfigPayload createConfigPayload = obj.get();
- if (configSetService.checkConfigExists(createConfigPayload.name)) {
+ @Override
+ @PermissionName(CONFIG_EDIT_PERM)
+ public SolrJerseyResponse cloneExistingConfigSet(CloneConfigsetRequestBody requestBody)
+ throws Exception {
+ final var response = instantiateJerseyResponse(SolrJerseyResponse.class);
+ if (configSetService.checkConfigExists(requestBody.name)) {
throw new SolrException(
- SolrException.ErrorCode.BAD_REQUEST,
- "ConfigSet already exists: " + createConfigPayload.name);
+ SolrException.ErrorCode.BAD_REQUEST, "ConfigSet already exists: " + requestBody.name);
}
// is there a base config that already exists
- if (!configSetService.checkConfigExists(createConfigPayload.baseConfigSet)) {
+ if (!configSetService.checkConfigExists(requestBody.baseConfigSet)) {
throw new SolrException(
SolrException.ErrorCode.BAD_REQUEST,
- "Base ConfigSet does not exist: " + createConfigPayload.baseConfigSet);
+ "Base ConfigSet does not exist: " + requestBody.baseConfigSet);
}
if (!DISABLE_CREATE_AUTH_CHECKS
- && !isTrusted(obj.getRequest().getUserPrincipal(), coreContainer.getAuthenticationPlugin())
- && configSetService.isConfigSetTrusted(createConfigPayload.baseConfigSet)) {
+ && !isTrusted(solrQueryRequest.getUserPrincipal(), coreContainer.getAuthenticationPlugin())
+ && configSetService.isConfigSetTrusted(requestBody.baseConfigSet)) {
throw new SolrException(
SolrException.ErrorCode.UNAUTHORIZED,
"Can't create a configset with an unauthenticated request from a trusted "
@@ -72,16 +73,17 @@ public void create(PayloadObj obj) throws Exception {
}
final Map configsetCommandMsg = new HashMap<>();
- configsetCommandMsg.put(NAME, createConfigPayload.name);
- configsetCommandMsg.put(ConfigSetCmds.BASE_CONFIGSET, createConfigPayload.baseConfigSet);
- if (createConfigPayload.properties != null) {
- for (Map.Entry e : createConfigPayload.properties.entrySet()) {
+ configsetCommandMsg.put(NAME, requestBody.name);
+ configsetCommandMsg.put(ConfigSetCmds.BASE_CONFIGSET, requestBody.baseConfigSet);
+ if (requestBody.properties != null) {
+ for (Map.Entry e : requestBody.properties.entrySet()) {
configsetCommandMsg.put(
ConfigSetCmds.CONFIG_SET_PROPERTY_PREFIX + e.getKey(), e.getValue());
}
}
runConfigSetCommand(
- obj.getResponse(), ConfigSetParams.ConfigSetAction.CREATE, configsetCommandMsg);
+ solrQueryResponse, ConfigSetParams.ConfigSetAction.CREATE, configsetCommandMsg);
+ return response;
}
}
diff --git a/solr/core/src/java/org/apache/solr/handler/configsets/ConfigSetAPIBase.java b/solr/core/src/java/org/apache/solr/handler/configsets/ConfigSetAPIBase.java
index 3f401e31bd8..f6e99167f30 100644
--- a/solr/core/src/java/org/apache/solr/handler/configsets/ConfigSetAPIBase.java
+++ b/solr/core/src/java/org/apache/solr/handler/configsets/ConfigSetAPIBase.java
@@ -28,6 +28,7 @@
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
+import org.apache.solr.api.JerseyResource;
import org.apache.solr.client.solrj.SolrResponse;
import org.apache.solr.cloud.OverseerSolrResponseSerializer;
import org.apache.solr.cloud.OverseerTaskQueue;
@@ -53,18 +54,26 @@
* Contains utilities for tasks common in configset manipulation, including running configset
* "commands" and checking configset "trusted-ness".
*/
-public class ConfigSetAPIBase {
+public class ConfigSetAPIBase extends JerseyResource {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
protected final CoreContainer coreContainer;
+
+ protected final SolrQueryRequest solrQueryRequest;
+ protected final SolrQueryResponse solrQueryResponse;
protected final Optional
distributedCollectionConfigSetCommandRunner;
-
protected final ConfigSetService configSetService;
- public ConfigSetAPIBase(CoreContainer coreContainer) {
+ public ConfigSetAPIBase(
+ CoreContainer coreContainer,
+ SolrQueryRequest solrQueryRequest,
+ SolrQueryResponse solrQueryResponse) {
this.coreContainer = coreContainer;
+ this.solrQueryRequest = solrQueryRequest;
+ this.solrQueryResponse = solrQueryResponse;
+
this.distributedCollectionConfigSetCommandRunner =
coreContainer.getDistributedCollectionCommandRunner();
this.configSetService = coreContainer.getConfigSetService();
@@ -96,7 +105,7 @@ protected void ensureConfigSetUploadEnabled() {
}
}
- protected InputStream ensureNonEmptyInputStream(SolrQueryRequest req) throws IOException {
+ public static InputStream ensureNonEmptyInputStream(SolrQueryRequest req) throws IOException {
Iterator contentStreamsIterator = req.getContentStreams().iterator();
if (!contentStreamsIterator.hasNext()) {
diff --git a/solr/core/src/java/org/apache/solr/handler/configsets/DeleteConfigSetAPI.java b/solr/core/src/java/org/apache/solr/handler/configsets/DeleteConfigSet.java
similarity index 64%
rename from solr/core/src/java/org/apache/solr/handler/configsets/DeleteConfigSetAPI.java
rename to solr/core/src/java/org/apache/solr/handler/configsets/DeleteConfigSet.java
index 4867dd160fd..1a4b363a833 100644
--- a/solr/core/src/java/org/apache/solr/handler/configsets/DeleteConfigSetAPI.java
+++ b/solr/core/src/java/org/apache/solr/handler/configsets/DeleteConfigSet.java
@@ -16,40 +16,37 @@
*/
package org.apache.solr.handler.configsets;
-import static org.apache.solr.client.solrj.SolrRequest.METHOD.DELETE;
import static org.apache.solr.common.params.CommonParams.NAME;
import static org.apache.solr.security.PermissionNameProvider.Name.CONFIG_EDIT_PERM;
+import jakarta.inject.Inject;
import java.util.HashMap;
import java.util.Map;
-import org.apache.solr.api.EndPoint;
+import org.apache.solr.client.api.endpoint.ConfigsetsApi;
+import org.apache.solr.client.api.model.SolrJerseyResponse;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.ConfigSetParams;
import org.apache.solr.common.util.StrUtils;
import org.apache.solr.core.CoreContainer;
+import org.apache.solr.jersey.PermissionName;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
-/**
- * V2 API for deleting an existing configset
- *
- * This API (DELETE /v2/cluster/configs/configsetName) is analogous to the v1
- * /admin/configs?action=DELETE command.
- */
-public class DeleteConfigSetAPI extends ConfigSetAPIBase {
-
- public static final String CONFIGSET_NAME_PLACEHOLDER = "name";
+/** V2 API implementation for ConfigsetsApi.Delete */
+public class DeleteConfigSet extends ConfigSetAPIBase implements ConfigsetsApi.Delete {
- public DeleteConfigSetAPI(CoreContainer coreContainer) {
- super(coreContainer);
+ @Inject
+ public DeleteConfigSet(
+ CoreContainer coreContainer,
+ SolrQueryRequest solrQueryRequest,
+ SolrQueryResponse solrQueryResponse) {
+ super(coreContainer, solrQueryRequest, solrQueryResponse);
}
- @EndPoint(
- method = DELETE,
- path = "/cluster/configs/{" + CONFIGSET_NAME_PLACEHOLDER + "}",
- permission = CONFIG_EDIT_PERM)
- public void deleteConfigSet(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
- final String configSetName = req.getPathTemplateValues().get("name");
+ @Override
+ @PermissionName(CONFIG_EDIT_PERM)
+ public SolrJerseyResponse deleteConfigSet(String configSetName) throws Exception {
+ final var response = instantiateJerseyResponse(SolrJerseyResponse.class);
if (StrUtils.isNullOrEmpty(configSetName)) {
throw new SolrException(
SolrException.ErrorCode.BAD_REQUEST, "No configset name provided to delete");
@@ -57,6 +54,8 @@ public void deleteConfigSet(SolrQueryRequest req, SolrQueryResponse rsp) throws
final Map configsetCommandMsg = new HashMap<>();
configsetCommandMsg.put(NAME, configSetName);
- runConfigSetCommand(rsp, ConfigSetParams.ConfigSetAction.DELETE, configsetCommandMsg);
+ runConfigSetCommand(
+ solrQueryResponse, ConfigSetParams.ConfigSetAction.DELETE, configsetCommandMsg);
+ return response;
}
}
diff --git a/solr/core/src/java/org/apache/solr/handler/configsets/ListConfigSets.java b/solr/core/src/java/org/apache/solr/handler/configsets/ListConfigSets.java
index 5f5d28adcfc..5b45fa38f22 100644
--- a/solr/core/src/java/org/apache/solr/handler/configsets/ListConfigSets.java
+++ b/solr/core/src/java/org/apache/solr/handler/configsets/ListConfigSets.java
@@ -22,7 +22,7 @@
import jakarta.ws.rs.core.Context;
import jakarta.ws.rs.core.HttpHeaders;
import org.apache.solr.api.JerseyResource;
-import org.apache.solr.client.api.endpoint.ListConfigsetsApi;
+import org.apache.solr.client.api.endpoint.ConfigsetsApi;
import org.apache.solr.client.api.model.ListConfigsetsResponse;
import org.apache.solr.core.CoreContainer;
import org.apache.solr.jersey.PermissionName;
@@ -30,9 +30,9 @@
/**
* V2 API implementation for listing all available configsets.
*
- * This API (GET /v2/cluster/configs) is analogous to the v1 /admin/configs?action=LIST command.
+ *
This API (GET /v2/configsets) is analogous to the v1 /admin/configs?action=LIST command.
*/
-public class ListConfigSets extends JerseyResource implements ListConfigsetsApi {
+public class ListConfigSets extends JerseyResource implements ConfigsetsApi.List {
@Context public HttpHeaders headers;
diff --git a/solr/core/src/java/org/apache/solr/handler/configsets/UploadConfigSetAPI.java b/solr/core/src/java/org/apache/solr/handler/configsets/UploadConfigSet.java
similarity index 53%
rename from solr/core/src/java/org/apache/solr/handler/configsets/UploadConfigSetAPI.java
rename to solr/core/src/java/org/apache/solr/handler/configsets/UploadConfigSet.java
index 79d1b34d5ca..d42adf94999 100644
--- a/solr/core/src/java/org/apache/solr/handler/configsets/UploadConfigSetAPI.java
+++ b/solr/core/src/java/org/apache/solr/handler/configsets/UploadConfigSet.java
@@ -6,7 +6,7 @@
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
@@ -16,9 +16,9 @@
*/
package org.apache.solr.handler.configsets;
-import static org.apache.solr.client.solrj.SolrRequest.METHOD.PUT;
import static org.apache.solr.security.PermissionNameProvider.Name.CONFIG_EDIT_PERM;
+import jakarta.inject.Inject;
import java.io.IOException;
import java.io.InputStream;
import java.lang.invoke.MethodHandles;
@@ -27,46 +27,47 @@
import java.util.List;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
-import org.apache.solr.api.EndPoint;
+import org.apache.solr.client.api.endpoint.ConfigsetsApi;
+import org.apache.solr.client.api.model.SolrJerseyResponse;
import org.apache.solr.common.SolrException;
-import org.apache.solr.common.params.ConfigSetParams;
+import org.apache.solr.common.cloud.ZkMaintenanceUtils;
import org.apache.solr.core.ConfigSetService;
import org.apache.solr.core.CoreContainer;
+import org.apache.solr.jersey.PermissionName;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
+import org.apache.solr.util.FileTypeMagicUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-/**
- * V2 API for uploading a new configset (or overwriting an existing one).
- *
- *
This API (PUT /v2/cluster/configs/configsetName) is analogous to the v1
- * /admin/configs?action=UPLOAD command.
- */
-public class UploadConfigSetAPI extends ConfigSetAPIBase {
-
- public static final String CONFIGSET_NAME_PLACEHOLDER = "name";
+public class UploadConfigSet extends ConfigSetAPIBase implements ConfigsetsApi.Upload {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
- public UploadConfigSetAPI(CoreContainer coreContainer) {
- super(coreContainer);
+ @Inject
+ public UploadConfigSet(
+ CoreContainer coreContainer,
+ SolrQueryRequest solrQueryRequest,
+ SolrQueryResponse solrQueryResponse) {
+ super(coreContainer, solrQueryRequest, solrQueryResponse);
}
- @EndPoint(method = PUT, path = "/cluster/configs/{name}", permission = CONFIG_EDIT_PERM)
- public void uploadConfigSet(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
+ @Override
+ @PermissionName(CONFIG_EDIT_PERM)
+ public SolrJerseyResponse uploadConfigSet(
+ String configSetName, Boolean overwrite, Boolean cleanup, InputStream requestBody)
+ throws IOException {
+ final var response = instantiateJerseyResponse(SolrJerseyResponse.class);
ensureConfigSetUploadEnabled();
- final String configSetName = req.getPathTemplateValues().get("name");
boolean overwritesExisting = configSetService.checkConfigExists(configSetName);
boolean requestIsTrusted =
- isTrusted(req.getUserPrincipal(), coreContainer.getAuthenticationPlugin());
+ isTrusted(solrQueryRequest.getUserPrincipal(), coreContainer.getAuthenticationPlugin());
// Get upload parameters
- boolean allowOverwrite = req.getParams().getBool(ConfigSetParams.OVERWRITE, true);
- boolean cleanup = req.getParams().getBool(ConfigSetParams.CLEANUP, false);
- final InputStream inputStream = ensureNonEmptyInputStream(req);
+ if (overwrite == null) overwrite = true;
+ if (cleanup == null) cleanup = false;
- if (overwritesExisting && !allowOverwrite) {
+ if (overwritesExisting && !overwrite) {
throw new SolrException(
SolrException.ErrorCode.BAD_REQUEST,
"The configuration " + configSetName + " already exists");
@@ -84,7 +85,7 @@ public void uploadConfigSet(SolrQueryRequest req, SolrQueryResponse rsp) throws
// singleFilePath is not passed.
createBaseNode(configSetService, overwritesExisting, requestIsTrusted, configSetName);
- try (ZipInputStream zis = new ZipInputStream(inputStream, StandardCharsets.UTF_8)) {
+ try (ZipInputStream zis = new ZipInputStream(requestBody, StandardCharsets.UTF_8)) {
boolean hasEntry = false;
ZipEntry zipEntry;
while ((zipEntry = zis.getNextEntry()) != null) {
@@ -111,6 +112,60 @@ public void uploadConfigSet(SolrQueryRequest req, SolrQueryResponse rsp) throws
&& !configSetService.isConfigSetTrusted(configSetName)) {
configSetService.setConfigSetTrust(configSetName, true);
}
+ return response;
+ }
+
+ @Override
+ @PermissionName(CONFIG_EDIT_PERM)
+ public SolrJerseyResponse uploadConfigSetFile(
+ String configSetName,
+ String filePath,
+ Boolean overwrite,
+ Boolean cleanup,
+ InputStream requestBody)
+ throws IOException {
+ final var response = instantiateJerseyResponse(SolrJerseyResponse.class);
+ ensureConfigSetUploadEnabled();
+
+ boolean overwritesExisting = configSetService.checkConfigExists(configSetName);
+ boolean requestIsTrusted =
+ isTrusted(solrQueryRequest.getUserPrincipal(), coreContainer.getAuthenticationPlugin());
+
+ // Get upload parameters
+
+ String singleFilePath = filePath != null ? filePath : "";
+ if (overwrite == null) overwrite = true;
+ if (cleanup == null) cleanup = false;
+
+ String fixedSingleFilePath = singleFilePath;
+ if (fixedSingleFilePath.charAt(0) == '/') {
+ fixedSingleFilePath = fixedSingleFilePath.substring(1);
+ }
+ byte[] data = requestBody.readAllBytes();
+ if (fixedSingleFilePath.isEmpty()) {
+ throw new SolrException(
+ SolrException.ErrorCode.BAD_REQUEST,
+ "The file path provided for upload, '" + singleFilePath + "', is not valid.");
+ } else if (ZkMaintenanceUtils.isFileForbiddenInConfigSets(fixedSingleFilePath)
+ || FileTypeMagicUtil.isFileForbiddenInConfigset(data)) {
+ throw new SolrException(
+ SolrException.ErrorCode.BAD_REQUEST,
+ "The file type provided for upload, '"
+ + singleFilePath
+ + "', is forbidden for use in configSets.");
+ } else if (cleanup) {
+ // Cleanup is not allowed while using singleFilePath upload
+ throw new SolrException(
+ SolrException.ErrorCode.BAD_REQUEST,
+ "ConfigSet uploads do not allow cleanup=true when file path is used.");
+ } else {
+ // Create a node for the configuration in config
+ // For creating the baseNode, the cleanup parameter is only allowed to be true when
+ // singleFilePath is not passed.
+ createBaseNode(configSetService, overwritesExisting, requestIsTrusted, configSetName);
+ configSetService.uploadFileToConfig(configSetName, fixedSingleFilePath, data, overwrite);
+ }
+ return response;
}
private void deleteUnusedFiles(
diff --git a/solr/core/src/java/org/apache/solr/handler/configsets/UploadConfigSetFileAPI.java b/solr/core/src/java/org/apache/solr/handler/configsets/UploadConfigSetFileAPI.java
deleted file mode 100644
index 2380a79a92b..00000000000
--- a/solr/core/src/java/org/apache/solr/handler/configsets/UploadConfigSetFileAPI.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.handler.configsets;
-
-import static org.apache.solr.client.solrj.SolrRequest.METHOD.PUT;
-import static org.apache.solr.security.PermissionNameProvider.Name.CONFIG_EDIT_PERM;
-
-import java.io.InputStream;
-import org.apache.solr.api.EndPoint;
-import org.apache.solr.common.SolrException;
-import org.apache.solr.common.cloud.ZkMaintenanceUtils;
-import org.apache.solr.common.params.ConfigSetParams;
-import org.apache.solr.core.CoreContainer;
-import org.apache.solr.request.SolrQueryRequest;
-import org.apache.solr.response.SolrQueryResponse;
-import org.apache.solr.util.FileTypeMagicUtil;
-
-/**
- * V2 API for adding or updating a single file within a configset.
- *
- *
This API (PUT /v2/cluster/configs/configsetName/someFilePath) is analogous to the v1
- * /admin/configs?action=UPLOAD&filePath=someFilePath command.
- */
-public class UploadConfigSetFileAPI extends ConfigSetAPIBase {
-
- public static final String CONFIGSET_NAME_PLACEHOLDER =
- UploadConfigSetAPI.CONFIGSET_NAME_PLACEHOLDER;
- public static final String FILEPATH_PLACEHOLDER = "*";
-
- private static final String API_PATH =
- "/cluster/configs/{" + CONFIGSET_NAME_PLACEHOLDER + "}/" + FILEPATH_PLACEHOLDER;
-
- public UploadConfigSetFileAPI(CoreContainer coreContainer) {
- super(coreContainer);
- }
-
- @EndPoint(method = PUT, path = API_PATH, permission = CONFIG_EDIT_PERM)
- public void updateConfigSetFile(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
- ensureConfigSetUploadEnabled();
-
- final String configSetName = req.getPathTemplateValues().get("name");
- boolean overwritesExisting = configSetService.checkConfigExists(configSetName);
- boolean requestIsTrusted =
- isTrusted(req.getUserPrincipal(), coreContainer.getAuthenticationPlugin());
-
- // Get upload parameters
-
- String singleFilePath = req.getPathTemplateValues().getOrDefault(FILEPATH_PLACEHOLDER, "");
- boolean allowOverwrite = req.getParams().getBool(ConfigSetParams.OVERWRITE, true);
- boolean cleanup = req.getParams().getBool(ConfigSetParams.CLEANUP, false);
- final InputStream inputStream = ensureNonEmptyInputStream(req);
-
- String fixedSingleFilePath = singleFilePath;
- if (fixedSingleFilePath.charAt(0) == '/') {
- fixedSingleFilePath = fixedSingleFilePath.substring(1);
- }
- byte[] data = inputStream.readAllBytes();
- if (fixedSingleFilePath.isEmpty()) {
- throw new SolrException(
- SolrException.ErrorCode.BAD_REQUEST,
- "The file path provided for upload, '" + singleFilePath + "', is not valid.");
- } else if (ZkMaintenanceUtils.isFileForbiddenInConfigSets(fixedSingleFilePath)
- || FileTypeMagicUtil.isFileForbiddenInConfigset(data)) {
- throw new SolrException(
- SolrException.ErrorCode.BAD_REQUEST,
- "The file type provided for upload, '"
- + singleFilePath
- + "', is forbidden for use in configSets.");
- } else if (cleanup) {
- // Cleanup is not allowed while using singleFilePath upload
- throw new SolrException(
- SolrException.ErrorCode.BAD_REQUEST,
- "ConfigSet uploads do not allow cleanup=true when file path is used.");
- } else {
- // Create a node for the configuration in config
- // For creating the baseNode, the cleanup parameter is only allowed to be true when
- // singleFilePath is not passed.
- createBaseNode(configSetService, overwritesExisting, requestIsTrusted, configSetName);
- configSetService.uploadFileToConfig(configSetName, fixedSingleFilePath, data, allowOverwrite);
- }
- }
-}
diff --git a/solr/core/src/java/org/apache/solr/jersey/CatchAllExceptionMapper.java b/solr/core/src/java/org/apache/solr/jersey/CatchAllExceptionMapper.java
index 1110880529c..3760bfc4590 100644
--- a/solr/core/src/java/org/apache/solr/jersey/CatchAllExceptionMapper.java
+++ b/solr/core/src/java/org/apache/solr/jersey/CatchAllExceptionMapper.java
@@ -63,6 +63,7 @@ public Response toResponse(Exception exception) {
// success/failure for AuditLogging, and other logic.
final SolrQueryResponse solrQueryResponse =
(SolrQueryResponse) containerRequestContext.getProperty(SOLR_QUERY_RESPONSE);
+
final SolrQueryRequest solrQueryRequest =
(SolrQueryRequest) containerRequestContext.getProperty(SOLR_QUERY_REQUEST);
if (exception instanceof WebApplicationException wae) {
diff --git a/solr/core/src/java/org/apache/solr/jersey/MediaTypeOverridingFilter.java b/solr/core/src/java/org/apache/solr/jersey/MediaTypeOverridingFilter.java
index e5a7f7150cc..44c08bff03e 100644
--- a/solr/core/src/java/org/apache/solr/jersey/MediaTypeOverridingFilter.java
+++ b/solr/core/src/java/org/apache/solr/jersey/MediaTypeOverridingFilter.java
@@ -63,9 +63,9 @@ public void filter(
final SolrQueryRequest solrQueryRequest =
(SolrQueryRequest) requestContext.getProperty(SOLR_QUERY_REQUEST);
- final String mediaType =
- V2ApiUtils.getMediaTypeFromWtParam(
- solrQueryRequest.getParams(), MediaType.APPLICATION_JSON);
+ // TODO Is it valid for SQRequest to be null?
+ final var params = (solrQueryRequest != null) ? solrQueryRequest.getParams() : null;
+ final String mediaType = V2ApiUtils.getMediaTypeFromWtParam(params, MediaType.APPLICATION_JSON);
if (mediaType != null) {
responseContext.getHeaders().putSingle(CONTENT_TYPE, mediaType);
}
diff --git a/solr/core/src/java/org/apache/solr/security/HttpClientBuilderPlugin.java b/solr/core/src/java/org/apache/solr/security/HttpClientBuilderPlugin.java
index 206fb3d0886..b43d5f22190 100644
--- a/solr/core/src/java/org/apache/solr/security/HttpClientBuilderPlugin.java
+++ b/solr/core/src/java/org/apache/solr/security/HttpClientBuilderPlugin.java
@@ -34,9 +34,4 @@ public interface HttpClientBuilderPlugin {
public SolrHttpClientBuilder getHttpClientBuilder(SolrHttpClientBuilder builder);
public default void setup(Http2SolrClient client) {}
-
- /** TODO: Ideally, we only pass the builder here. */
- public default void setup(Http2SolrClient.Builder builder, Http2SolrClient client) {
- setup(client);
- }
}
diff --git a/solr/core/src/java/org/apache/solr/security/PKIAuthenticationPlugin.java b/solr/core/src/java/org/apache/solr/security/PKIAuthenticationPlugin.java
index 93ecdcc9d68..b1f6e6b6eed 100644
--- a/solr/core/src/java/org/apache/solr/security/PKIAuthenticationPlugin.java
+++ b/solr/core/src/java/org/apache/solr/security/PKIAuthenticationPlugin.java
@@ -376,11 +376,6 @@ PublicKey fetchPublicKeyFromRemote(String nodename) {
@Override
public void setup(Http2SolrClient client) {
- setup(null, client);
- }
-
- @Override
- public void setup(Http2SolrClient.Builder builder, Http2SolrClient client) {
final HttpListenerFactory.RequestResponseListener listener =
new HttpListenerFactory.RequestResponseListener() {
private static final String CACHED_REQUEST_USER_KEY = "cachedRequestUser";
@@ -436,12 +431,7 @@ private Optional getUserFromJettyRequest(Request request) {
(String) request.getAttributes().get(CACHED_REQUEST_USER_KEY));
}
};
- if (client != null) {
- client.addListenerFactory(() -> listener);
- }
- if (builder != null) {
- builder.addListenerFactory(() -> listener);
- }
+ client.addListenerFactory(() -> listener);
}
@Override
diff --git a/solr/core/src/java/org/apache/solr/servlet/CoreContainerProvider.java b/solr/core/src/java/org/apache/solr/servlet/CoreContainerProvider.java
index 8b8bc3c927d..5a2a053c090 100644
--- a/solr/core/src/java/org/apache/solr/servlet/CoreContainerProvider.java
+++ b/solr/core/src/java/org/apache/solr/servlet/CoreContainerProvider.java
@@ -215,13 +215,12 @@ private void init(ServletContext servletContext) {
}
// Do initial logs for experimental Lucene classes.
- // TODO: Use "MethodHandles.lookup().ensureClassInitialized()" instead of "Class.forName()"
- // once JDK 15+ is mandatory
+ final var lookup = MethodHandles.lookup();
Stream.of(MMapDirectory.class, VectorUtil.class)
.forEach(
cls -> {
try {
- Class.forName(cls.getName());
+ lookup.ensureInitialized(cls);
} catch (ReflectiveOperationException re) {
throw new SolrException(
ErrorCode.SERVER_ERROR, "Could not load Lucene class: " + cls.getName());
diff --git a/solr/core/src/test/org/apache/solr/cli/PostToolTest.java b/solr/core/src/test/org/apache/solr/cli/PostToolTest.java
index 9eb3e783a2d..758ae9ccd51 100644
--- a/solr/core/src/test/org/apache/solr/cli/PostToolTest.java
+++ b/solr/core/src/test/org/apache/solr/cli/PostToolTest.java
@@ -76,7 +76,6 @@ public void testBasicRun() throws Exception {
withBasicAuth(CollectionAdminRequest.createCollection(collection, "conf1", 1, 1, 0, 0))
.processAndWait(cluster.getSolrClient(), 10);
- waitForState("creating", collection, activeClusterShape(1, 1));
File jsonDoc = File.createTempFile("temp", ".json");
diff --git a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTest.java b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTest.java
index 0fe1b755d62..5239deeaeac 100644
--- a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPISolrJTest.java
@@ -21,6 +21,9 @@
import static org.apache.solr.common.cloud.ZkStateReader.NUM_SHARDS_PROP;
import static org.apache.solr.common.params.CollectionAdminParams.COLLECTION;
import static org.apache.solr.common.params.CollectionAdminParams.DEFAULTS;
+import static org.hamcrest.Matchers.emptyString;
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.not;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
@@ -29,6 +32,7 @@
import java.nio.file.Paths;
import java.time.Instant;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.List;
@@ -39,11 +43,14 @@
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import org.apache.lucene.tests.util.TestUtil;
+import org.apache.lucene.util.Version;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrRequest;
+import org.apache.solr.client.solrj.SolrResponse;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.client.solrj.request.CollectionsApi;
import org.apache.solr.client.solrj.request.CoreAdminRequest;
import org.apache.solr.client.solrj.request.CoreStatus;
import org.apache.solr.client.solrj.request.V2Request;
@@ -569,14 +576,7 @@ private void checkCollectionProperty(String collection, String propertyName, Str
fail("Timed out waiting for cluster property value");
}
- @Test
- public void testColStatus() throws Exception {
- String collectionName = getSaferTestName();
- CollectionAdminRequest.createCollection(collectionName, "conf2", 2, 2)
- .process(cluster.getSolrClient());
-
- cluster.waitForActiveCollection(collectionName, 2, 4);
-
+ private void indexSomeDocs(String collectionName) throws SolrServerException, IOException {
SolrClient client = cluster.getSolrClient();
byte[] binData = collectionName.getBytes(StandardCharsets.UTF_8);
// index some docs
@@ -602,13 +602,97 @@ public void testColStatus() throws Exception {
client.add(collectionName, doc);
}
client.commit(collectionName);
+ }
+
+ private void assertRspPathNull(SolrResponse rsp, String... pathSegments) {
+ assertNull(Utils.getObjectByPath(rsp.getResponse(), false, Arrays.asList(pathSegments)));
+ }
+
+ private void assertRspPathNotNull(SolrResponse rsp, String... pathSegments) {
+ assertNotNull(Utils.getObjectByPath(rsp.getResponse(), false, Arrays.asList(pathSegments)));
+ }
+ @Test
+ @SuppressWarnings("unchecked")
+ public void testColStatus() throws Exception {
+ String collectionName = getSaferTestName();
+ CollectionAdminRequest.createCollection(collectionName, "conf2", 2, 2)
+ .process(cluster.getSolrClient());
+
+ cluster.waitForActiveCollection(collectionName, 2, 4);
+ indexSomeDocs(collectionName);
+
+ // Returns basic info if no additional flags are set
CollectionAdminRequest.ColStatus req = CollectionAdminRequest.collectionStatus(collectionName);
+ CollectionAdminResponse rsp = req.process(cluster.getSolrClient());
+ assertEquals(0, rsp.getStatus());
+ assertNotNull(rsp.getResponse().get(collectionName));
+ assertNotNull(rsp.getResponse().findRecursive(collectionName, "properties"));
+ final var collPropMap =
+ (Map) rsp.getResponse().findRecursive(collectionName, "properties");
+ assertEquals("conf2", collPropMap.get("configName"));
+ assertEquals(2L, collPropMap.get("nrtReplicas"));
+ assertEquals("0", collPropMap.get("tlogReplicas"));
+ assertEquals("0", collPropMap.get("pullReplicas"));
+ assertEquals(
+ 2, ((NamedList) rsp.getResponse().findRecursive(collectionName, "shards")).size());
+ assertNotNull(rsp.getResponse().findRecursive(collectionName, "shards", "shard1", "leader"));
+ // Ensure more advanced info is not returned
+ assertNull(
+ rsp.getResponse().findRecursive(collectionName, "shards", "shard1", "leader", "segInfos"));
+
+ // Returns segment metadata iff requested
+ req = CollectionAdminRequest.collectionStatus(collectionName);
+ req.setWithSegments(true);
+ rsp = req.process(cluster.getSolrClient());
+ assertEquals(0, rsp.getStatus());
+ assertNotNull(rsp.getResponse().get(collectionName));
+ assertRspPathNotNull(
+ rsp, collectionName, "shards", "shard1", "leader", "segInfos", "segments", "_0");
+ // Ensure field, size, etc. information isn't returned if only segment data was requested
+ assertRspPathNull(
+ rsp, collectionName, "shards", "shard1", "leader", "segInfos", "segments", "_0", "fields");
+ assertRspPathNull(
+ rsp,
+ collectionName,
+ "shards",
+ "shard1",
+ "leader",
+ "segInfos",
+ "segments",
+ "_0",
+ "largestFiles");
+
+ // Returns segment metadata and file-size info iff requested
+ // (Note that 'sizeInfo=true' should implicitly enable segments=true)
+ req = CollectionAdminRequest.collectionStatus(collectionName);
+ req.setWithSizeInfo(true);
+ rsp = req.process(cluster.getSolrClient());
+ assertEquals(0, rsp.getStatus());
+ assertRspPathNotNull(rsp, collectionName);
+ assertRspPathNotNull(
+ rsp, collectionName, "shards", "shard1", "leader", "segInfos", "segments", "_0");
+ assertRspPathNotNull(
+ rsp,
+ collectionName,
+ "shards",
+ "shard1",
+ "leader",
+ "segInfos",
+ "segments",
+ "_0",
+ "largestFiles");
+ // Ensure field, etc. information isn't returned if only segment+size data was requested
+ assertRspPathNull(
+ rsp, collectionName, "shards", "shard1", "leader", "segInfos", "segments", "_0", "fields");
+
+ // Set all flags and ensure everything is returned as expected
+ req = CollectionAdminRequest.collectionStatus(collectionName);
+ req.setWithSegments(true);
req.setWithFieldInfo(true);
req.setWithCoreInfo(true);
- req.setWithSegments(true);
req.setWithSizeInfo(true);
- CollectionAdminResponse rsp = req.process(cluster.getSolrClient());
+ rsp = req.process(cluster.getSolrClient());
assertEquals(0, rsp.getStatus());
@SuppressWarnings({"unchecked"})
List nonCompliant =
@@ -616,14 +700,22 @@ public void testColStatus() throws Exception {
assertEquals(nonCompliant.toString(), 1, nonCompliant.size());
assertTrue(nonCompliant.toString(), nonCompliant.contains("(NONE)"));
@SuppressWarnings({"unchecked"})
- NamedList segInfos =
- (NamedList)
- rsp.getResponse()
- .findRecursive(collectionName, "shards", "shard1", "leader", "segInfos");
- assertNotNull(Utils.toJSONString(rsp), segInfos.findRecursive("info", "core", "startTime"));
- assertNotNull(Utils.toJSONString(rsp), segInfos.get("fieldInfoLegend"));
+ final var segInfos =
+ (Map)
+ Utils.getObjectByPath(
+ rsp.getResponse(),
+ false,
+ List.of(collectionName, "shards", "shard1", "leader", "segInfos"));
assertNotNull(
- Utils.toJSONString(rsp), segInfos.findRecursive("segments", "_0", "fields", "id", "flags"));
+ Utils.toJSONString(rsp),
+ Utils.getObjectByPath(segInfos, false, List.of("info", "core", "startTime")));
+ assertNotNull(
+ Utils.toJSONString(rsp),
+ Utils.getObjectByPath(segInfos, false, List.of("fieldInfoLegend")));
+ assertNotNull(
+ Utils.toJSONString(rsp),
+ Utils.getObjectByPath(segInfos, false, List.of("segments", "_0", "fields", "id", "flags")));
+
// test for replicas not active - SOLR-13882
DocCollection coll = cluster.getSolrClient().getClusterState().getCollection(collectionName);
Replica firstReplica = coll.getSlice("shard1").getReplicas().iterator().next();
@@ -637,7 +729,10 @@ public void testColStatus() throws Exception {
assertEquals(0, rsp.getStatus());
Number down =
(Number)
- rsp.getResponse().findRecursive(collectionName, "shards", "shard1", "replicas", "down");
+ Utils.getObjectByPath(
+ rsp.getResponse(),
+ false,
+ List.of(collectionName, "shards", "shard1", "replicas", "down"));
assertTrue(
"should be some down replicas, but there were none in shard1:" + rsp, down.intValue() > 0);
@@ -652,10 +747,8 @@ public void testColStatus() throws Exception {
req = CollectionAdminRequest.collectionStatus(implicitColl);
rsp = req.process(cluster.getSolrClient());
assertNotNull(rsp.getResponse().get(implicitColl));
- assertNotNull(
- rsp.toString(), rsp.getResponse().findRecursive(implicitColl, "shards", "shardA"));
- assertNotNull(
- rsp.toString(), rsp.getResponse().findRecursive(implicitColl, "shards", "shardB"));
+ assertRspPathNotNull(rsp, implicitColl, "shards", "shardA");
+ assertRspPathNotNull(rsp, implicitColl, "shards", "shardB");
}
@Test
@@ -697,6 +790,69 @@ public void testColStatusCollectionName() throws Exception {
assertNotNull(rsp.getResponse().get(collectionNames[0]));
}
+ /**
+ * Unit test for the v2 API: GET /api/collections/$collName
+ *
+ * Uses the OAS-generated SolrRequest/SolrResponse API binding.
+ */
+ @Test
+ public void testV2BasicCollectionStatus() throws Exception {
+ final String simpleCollName = "simpleCollection";
+ CollectionAdminRequest.createCollection(simpleCollName, "conf2", 2, 1, 1, 1)
+ .process(cluster.getSolrClient());
+ cluster.waitForActiveCollection(simpleCollName, 2, 6);
+ indexSomeDocs(simpleCollName);
+
+ final var simpleResponse =
+ new CollectionsApi.GetCollectionStatus(simpleCollName)
+ .process(cluster.getSolrClient())
+ .getParsed();
+ assertEquals(simpleCollName, simpleResponse.name);
+ assertEquals(2, simpleResponse.shards.size());
+ assertEquals(Integer.valueOf(2), simpleResponse.activeShards);
+ assertEquals(Integer.valueOf(0), simpleResponse.inactiveShards);
+ assertEquals(Integer.valueOf(1), simpleResponse.properties.nrtReplicas);
+ assertEquals(Integer.valueOf(1), simpleResponse.properties.replicationFactor);
+ assertEquals(Integer.valueOf(1), simpleResponse.properties.pullReplicas);
+ assertEquals(Integer.valueOf(1), simpleResponse.properties.tlogReplicas);
+ assertNotNull(simpleResponse.shards.get("shard1").leader);
+ assertNull(simpleResponse.shards.get("shard1").leader.segInfos);
+
+ // Ensure segment data present when request sets 'segments=true' flag
+ final var segmentDataRequest = new CollectionsApi.GetCollectionStatus(simpleCollName);
+ segmentDataRequest.setSegments(true);
+ final var segmentDataResponse = segmentDataRequest.process(cluster.getSolrClient()).getParsed();
+ var segmentData = segmentDataResponse.shards.get("shard1").leader.segInfos;
+ assertNotNull(segmentData);
+ assertTrue(segmentData.info.numSegments > 0); // Expect at least one segment
+ assertEquals(segmentData.info.numSegments.intValue(), segmentData.segments.size());
+ assertEquals(Version.LATEST.toString(), segmentData.info.commitLuceneVersion);
+ // Ensure field, size, etc. data not provided
+ assertNull(segmentData.segments.get("_0").fields);
+ assertNull(segmentData.segments.get("_0").largestFilesByName);
+
+ // Ensure file-size data present when request sets sizeInfo flag
+ final var segmentFileSizeRequest = new CollectionsApi.GetCollectionStatus(simpleCollName);
+ segmentFileSizeRequest.setSizeInfo(true);
+ final var segmentFileSizeResponse =
+ segmentFileSizeRequest.process(cluster.getSolrClient()).getParsed();
+ segmentData = segmentFileSizeResponse.shards.get("shard1").leader.segInfos;
+ assertNotNull(segmentData);
+ final var largeFileList = segmentData.segments.get("_0").largestFilesByName;
+ assertNotNull(largeFileList);
+ // Hard to assert what the largest index files should be, but:
+ // - there should be at least 1 entry and...
+ // - all keys/values should be non-empty
+ assertTrue(largeFileList.size() > 0);
+ largeFileList.forEach(
+ (fileName, size) -> {
+ assertThat(fileName, is(not(emptyString())));
+ assertThat(size, is(not(emptyString())));
+ });
+ // Ensure field, etc. data not provided
+ assertNull(segmentData.segments.get("_0").fields);
+ }
+
private static final int NUM_DOCS = 10;
@Test
diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerTaskQueueTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerTaskQueueTest.java
index 27039870857..0b748a0a25f 100644
--- a/solr/core/src/test/org/apache/solr/cloud/OverseerTaskQueueTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/OverseerTaskQueueTest.java
@@ -21,6 +21,7 @@
import java.util.Map;
import org.apache.solr.cloud.api.collections.CollectionHandlingUtils;
import org.apache.solr.common.MapWriter;
+import org.apache.solr.common.cloud.ZkNodeProps;
import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.common.params.CollectionAdminParams;
import org.apache.solr.common.params.CommonAdminParams;
@@ -73,8 +74,9 @@ public void testContainsTaskWithRequestId() throws Exception {
String watchID = tq.createResponseNode();
String requestId2 = "baz";
+ // append async then submit
tq.createRequestNode(
- Utils.toJSON(props.append(ew -> ew.put(CommonAdminParams.ASYNC, requestId2))), watchID);
+ Utils.toJSON(new ZkNodeProps(props).plus(CommonAdminParams.ASYNC, requestId2)), watchID);
// Set a SolrResponse as the response node by removing the QueueEvent, as done in
// OverseerTaskProcessor
diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java
index e611902898f..bad8d58d021 100644
--- a/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java
@@ -1945,16 +1945,17 @@ public Void answer(InvocationOnMock invocation) {
}
private SolrCloudManager getCloudDataProvider(ZkStateReader zkStateReader) {
- var httpSolrClientBuilder =
+ var httpSolrClient =
new Http2SolrClient.Builder()
.withIdleTimeout(30000, TimeUnit.MILLISECONDS)
- .withConnectionTimeout(15000, TimeUnit.MILLISECONDS);
+ .withConnectionTimeout(15000, TimeUnit.MILLISECONDS)
+ .build();
var cloudSolrClient =
new CloudHttp2SolrClient.Builder(new ZkClientClusterStateProvider(zkStateReader))
- .withInternalClientBuilder(httpSolrClientBuilder)
+ .withHttpClient(httpSolrClient)
.build();
solrClients.add(cloudSolrClient);
- solrClients.add(httpSolrClientBuilder.build());
+ solrClients.add(httpSolrClient);
SolrClientCloudManager sccm = new SolrClientCloudManager(cloudSolrClient, null);
sccm.getClusterStateProvider().connect();
return sccm;
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java b/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java
index 886ffb6afca..baa659b7e68 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java
@@ -1543,7 +1543,7 @@ private long uploadGivenConfigSet(
final ByteBuffer fileBytes =
TestSolrConfigHandler.getFileContent(file.getAbsolutePath(), false);
final String uriEnding =
- "/cluster/configs/"
+ "/configsets/"
+ configSetName
+ suffix
+ (!overwrite ? "?overwrite=false" : "")
@@ -1594,11 +1594,13 @@ private long uploadSingleConfigSetFile(
final ByteBuffer sampleConfigFile =
TestSolrConfigHandler.getFileContent(file.getAbsolutePath(), false);
+ if (uploadPath != null && !uploadPath.startsWith("/")) {
+ uploadPath = "/" + uploadPath;
+ }
final String uriEnding =
- "/cluster/configs/"
+ "/configsets/"
+ configSetName
+ suffix
- + "/"
+ uploadPath
+ (!overwrite ? "?overwrite=false" : "")
+ (cleanup ? "?cleanup=true" : "");
diff --git a/solr/core/src/test/org/apache/solr/handler/admin/IndexSizeEstimatorTest.java b/solr/core/src/test/org/apache/solr/handler/admin/IndexSizeEstimatorTest.java
index 5523ad23c15..54aa6394902 100644
--- a/solr/core/src/test/org/apache/solr/handler/admin/IndexSizeEstimatorTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/admin/IndexSizeEstimatorTest.java
@@ -19,6 +19,7 @@
import java.lang.invoke.MethodHandles;
import java.util.Arrays;
import java.util.HashSet;
+import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
@@ -32,6 +33,7 @@
import org.apache.lucene.tests.util.TestUtil;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.RamUsageEstimator;
+import org.apache.solr.client.api.model.CollectionStatusResponse;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.client.solrj.request.UpdateRequest;
@@ -39,10 +41,11 @@
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.cloud.SolrCloudTestCase;
import org.apache.solr.common.SolrInputDocument;
-import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.TimeSource;
+import org.apache.solr.common.util.Utils;
import org.apache.solr.core.SolrCore;
import org.apache.solr.embedded.JettySolrRunner;
+import org.apache.solr.jersey.SolrJacksonMapper;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.util.RefCounted;
import org.apache.solr.util.TimeOut;
@@ -177,56 +180,42 @@ public void testIntegration() throws Exception {
assertEquals(0, sampledRsp.getStatus());
for (int i : Arrays.asList(1, 2)) {
@SuppressWarnings({"unchecked"})
- NamedList segInfos =
- (NamedList)
- rsp.getResponse()
- .findRecursive(collection, "shards", "shard" + i, "leader", "segInfos");
- @SuppressWarnings({"unchecked"})
- NamedList rawSize = (NamedList) segInfos.get("rawSize");
+ final var segInfosRaw =
+ Utils.getObjectByPath(
+ rsp.getResponse(),
+ false,
+ List.of(collection, "shards", "shard" + i, "leader", "segInfos"));
+ final var segInfos =
+ SolrJacksonMapper.getObjectMapper()
+ .convertValue(segInfosRaw, CollectionStatusResponse.SegmentInfo.class);
+
+ final var rawSize = segInfos.rawSize;
assertNotNull("rawSize missing", rawSize);
- @SuppressWarnings({"unchecked"})
- Map rawSizeMap = rawSize.asMap(10);
- @SuppressWarnings({"unchecked"})
- Map fieldsBySize =
- (Map) rawSizeMap.get(IndexSizeEstimator.FIELDS_BY_SIZE);
+ Map fieldsBySize = rawSize.fieldsBySize;
assertNotNull("fieldsBySize missing", fieldsBySize);
assertEquals(fieldsBySize.toString(), fields.size(), fieldsBySize.size());
fields.forEach(field -> assertNotNull("missing field " + field, fieldsBySize.get(field)));
- @SuppressWarnings({"unchecked"})
- Map typesBySize =
- (Map) rawSizeMap.get(IndexSizeEstimator.TYPES_BY_SIZE);
+ Map typesBySize = rawSize.typesBySize;
assertNotNull("typesBySize missing", typesBySize);
assertTrue("expected at least 8 types: " + typesBySize, typesBySize.size() >= 8);
- @SuppressWarnings({"unchecked"})
- Map summary =
- (Map) rawSizeMap.get(IndexSizeEstimator.SUMMARY);
+ Map summary = rawSize.summary;
assertNotNull("summary missing", summary);
assertEquals(summary.toString(), fields.size(), summary.size());
fields.forEach(field -> assertNotNull("missing field " + field, summary.get(field)));
@SuppressWarnings({"unchecked"})
- Map details =
- (Map) rawSizeMap.get(IndexSizeEstimator.DETAILS);
+ Map details = (Map) rawSize.details;
assertNotNull("details missing", summary);
assertEquals(details.keySet().toString(), 6, details.size());
// compare with sampled
- @SuppressWarnings({"unchecked"})
- NamedList sampledRawSize =
- (NamedList)
- rsp.getResponse()
- .findRecursive(
- collection, "shards", "shard" + i, "leader", "segInfos", "rawSize");
+ final var sampledRawSize = rawSize;
assertNotNull("sampled rawSize missing", sampledRawSize);
- @SuppressWarnings({"unchecked"})
- Map sampledRawSizeMap = rawSize.asMap(10);
- @SuppressWarnings({"unchecked"})
- Map sampledFieldsBySize =
- (Map) sampledRawSizeMap.get(IndexSizeEstimator.FIELDS_BY_SIZE);
+ Map sampledFieldsBySize = sampledRawSize.fieldsBySize;
assertNotNull("sampled fieldsBySize missing", sampledFieldsBySize);
fieldsBySize.forEach(
(k, v) -> {
- double size = fromHumanReadableUnits((String) v);
- double sampledSize = fromHumanReadableUnits((String) sampledFieldsBySize.get(k));
+ double size = fromHumanReadableUnits(v);
+ double sampledSize = fromHumanReadableUnits(sampledFieldsBySize.get(k));
double delta = size * 0.5;
assertEquals("sampled size of " + k + " is wildly off", size, sampledSize, delta);
});
diff --git a/solr/core/src/test/org/apache/solr/handler/admin/TestApiFramework.java b/solr/core/src/test/org/apache/solr/handler/admin/TestApiFramework.java
index 265968b23c8..8600ed8236b 100644
--- a/solr/core/src/test/org/apache/solr/handler/admin/TestApiFramework.java
+++ b/solr/core/src/test/org/apache/solr/handler/admin/TestApiFramework.java
@@ -114,7 +114,6 @@ public void testFramework() {
methodNames.add(rsp.getValues()._getStr("/spec[1]/methods[0]", null));
methodNames.add(rsp.getValues()._getStr("/spec[2]/methods[0]", null));
assertTrue(methodNames.contains("POST"));
- assertTrue(methodNames.contains("GET"));
methodNames = new HashSet<>();
diff --git a/solr/core/src/test/org/apache/solr/handler/admin/api/V2CollectionAPIMappingTest.java b/solr/core/src/test/org/apache/solr/handler/admin/api/V2CollectionAPIMappingTest.java
index 98d000773c7..1a61b6516fd 100644
--- a/solr/core/src/test/org/apache/solr/handler/admin/api/V2CollectionAPIMappingTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/admin/api/V2CollectionAPIMappingTest.java
@@ -21,9 +21,7 @@
import static org.apache.solr.common.params.CollectionAdminParams.COLL_CONF;
import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
import static org.apache.solr.common.params.CommonParams.ACTION;
-import static org.apache.solr.common.params.CoreAdminParams.SHARD;
-import java.util.Map;
import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.common.params.CollectionParams;
import org.apache.solr.common.params.SolrParams;
@@ -56,7 +54,6 @@ public void populateApiBag() {
apiBag.registerObject(new ModifyCollectionAPI(collectionsHandler));
apiBag.registerObject(new MoveReplicaAPI(collectionsHandler));
apiBag.registerObject(new RebalanceLeadersAPI(collectionsHandler));
- apiBag.registerObject(new CollectionStatusAPI(collectionsHandler));
}
@Override
@@ -69,17 +66,6 @@ public boolean isCoreSpecific() {
return false;
}
- @Test
- public void testGetCollectionStatus() throws Exception {
- final SolrParams v1Params =
- captureConvertedV1Params(
- "/collections/collName", "GET", Map.of(SHARD, new String[] {"shard2"}));
-
- assertEquals(CollectionParams.CollectionAction.CLUSTERSTATUS.toString(), v1Params.get(ACTION));
- assertEquals("collName", v1Params.get(COLLECTION));
- assertEquals("shard2", v1Params.get(SHARD));
- }
-
@Test
public void testModifyCollectionAllProperties() throws Exception {
final SolrParams v1Params =
diff --git a/solr/core/src/test/org/apache/solr/handler/configsets/ListConfigSetsAPITest.java b/solr/core/src/test/org/apache/solr/handler/configsets/ListConfigSetsAPITest.java
index 40100be48ac..776d0800e03 100644
--- a/solr/core/src/test/org/apache/solr/handler/configsets/ListConfigSetsAPITest.java
+++ b/solr/core/src/test/org/apache/solr/handler/configsets/ListConfigSetsAPITest.java
@@ -61,7 +61,7 @@ public void testSuccessfulListConfigsets() throws Exception {
}
/**
- * Test the v2 to v1 response mapping for /cluster/configs
+ * Test the v2 to v1 response mapping for GET /configsets
*
* {@link org.apache.solr.handler.admin.ConfigSetsHandler} uses {@link ListConfigSets} (and its
* response class {@link ListConfigsetsResponse}) internally to serve the v1 version of this
diff --git a/solr/packaging/test/test_zk.bats b/solr/packaging/test/test_zk.bats
index 240fb072b31..d516637cfb8 100644
--- a/solr/packaging/test/test_zk.bats
+++ b/solr/packaging/test/test_zk.bats
@@ -123,7 +123,7 @@ teardown() {
refute_output --partial "ERROR"
sleep 1
- run curl "http://localhost:${SOLR_PORT}/api/cluster/configs?omitHeader=true"
+ run curl "http://localhost:${SOLR_PORT}/api/configsets"
assert_output --partial '"configSets":["_default","techproducts2"]'
}
diff --git a/solr/solr-ref-guide/modules/configuration-guide/pages/configsets-api.adoc b/solr/solr-ref-guide/modules/configuration-guide/pages/configsets-api.adoc
index 1a5dc43fe48..eeb48138a20 100644
--- a/solr/solr-ref-guide/modules/configuration-guide/pages/configsets-api.adoc
+++ b/solr/solr-ref-guide/modules/configuration-guide/pages/configsets-api.adoc
@@ -35,7 +35,7 @@ NOTE: This API can only be used with Solr running in SolrCloud mode.
If you are not running Solr in SolrCloud mode but would still like to use shared configurations, please see the section xref:config-sets.adoc[].
The API works by passing commands to the `configs` endpoint.
-The path to the endpoint varies depending on the API being used: the v1 API uses `solr/admin/configs`, while the v2 API uses `api/cluster/configs`.
+The path to the endpoint varies depending on the API being used: the v1 API uses `/solr/admin/configs`, while the v2 API uses `/api/configsets`.
Examples of both types are provided below.
[[configsets-list]]
@@ -64,7 +64,7 @@ With the v2 API, the `list` command is implied when there is no data sent with t
[source,bash]
----
-http://localhost:8983/api/cluster/configs?omitHeader=true
+http://localhost:8983/api/configsets?omitHeader=true
----
====
======
@@ -181,7 +181,7 @@ With the v2 API, the name of the configset to upload is provided as a path param
$ (cd solr/server/solr/configsets/sample_techproducts_configs/conf && zip -r - *) > myconfigset.zip
$ curl -X PUT --header "Content-Type:application/octet-stream" --data-binary @myconfigset.zip
- "http://localhost:8983/api/cluster/configs/myConfigSet"
+ "http://localhost:8983/api/configsets/myConfigSet"
----
With this API, the default behavior is to overwrite the configset if it already exists.
@@ -211,14 +211,14 @@ V2 API::
+
====
With the v2 API, the name of the configset and file are both provided in the URL.
-They can be substituted in `/cluster/configs/__config_name__/__file_name__`.
+They can be substituted in `/configsets/__config_name__/__file_name__`.
The filename may be nested and include `/` characters.
[source,bash]
----
curl -X PUT --header "Content-Type:application/octet-stream"
--data-binary @solr/server/solr/configsets/sample_techproducts_configs/conf/solrconfig.xml
- "http://localhost:8983/api/cluster/configs/myConfigSet/solrconfig.xml"
+ "http://localhost:8983/api/configsets/myConfigSet/solrconfig.xml"
----
With this API, the default behavior is to overwrite the file if it already exists within the configset.
@@ -280,30 +280,18 @@ http://localhost:8983/solr/admin/configs?action=CREATE&name=myConfigSet&baseConf
V2 API::
+
====
-With the v2 API, the `create` command is provided as part of the JSON data that contains the required parameters:
+With the v2 API, the `create` command is implicit and parameters are specified in a `POST` request body.
[source,bash]
----
curl -X POST -H 'Content-type: application/json' -d '{
- "create":{
- "name": "myConfigSet",
- "baseConfigSet": "predefinedTemplate",
- "configSetProp.immutable": "false"}}'
- http://localhost:8983/api/cluster/configs?omitHeader=true
-----
-
-With the v2 API, configset properties can also be provided via the `properties` map:
-
-[source,bash]
-----
-curl -X POST -H 'Content-type: application/json' -d '{
- "create":{
- "name": "myConfigSet",
- "baseConfigSet": "predefinedTemplate",
- "properties": {
- "immutable": "false"
- }}}'
- http://localhost:8983/api/cluster/configs?omitHeader=true
+ "name": "myConfigSet",
+ "baseConfigSet": "predefinedTemplate",
+ "properties": {
+ "immutable": "false"
+ }
+}'
+ http://localhost:8983/api/configsets?omitHeader=true
----
====
======
@@ -359,7 +347,7 @@ The name of the configset to delete is provided as a path parameter:
[source,bash]
----
-curl -X DELETE http://localhost:8983/api/cluster/configs/myConfigSet?omitHeader=true
+curl -X DELETE http://localhost:8983/api/configsets/myConfigSet?omitHeader=true
----
====
======
diff --git a/solr/solr-ref-guide/modules/deployment-guide/pages/collection-management.adoc b/solr/solr-ref-guide/modules/deployment-guide/pages/collection-management.adoc
index f4811158ef7..6f89932a3a6 100644
--- a/solr/solr-ref-guide/modules/deployment-guide/pages/collection-management.adoc
+++ b/solr/solr-ref-guide/modules/deployment-guide/pages/collection-management.adoc
@@ -1050,11 +1050,9 @@ http://localhost:8983/solr/admin/collections?action=COLSTATUS&collection=techpro
V2 API::
+
====
-The closest V2 API is this one, but doesn't support all the features of the V1 equivalent.
-
[source,bash]
----
-curl -X GET http://localhost:8983/api/collections/techproducts_v2
+curl -X GET "http://localhost:8983/api/collections/techproducts_v2?coreInfo=true&segments=true&fieldInfo=true&sizeInfo=true"
----
====
======
@@ -1072,7 +1070,8 @@ Such incompatibilities may result from incompatible schema changes or after migr
|===
+
Collection name.
-If missing then it means all collections.
+Provided as a query-parameter in v1 requests, and as a path-parameter in v2.
+If missing then information is returned about all collections (supported by v1 requests only).
`coreInfo`::
+
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrClientFunction.java b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrClientFunction.java
index 68246001a40..0adb49471dc 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrClientFunction.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrClientFunction.java
@@ -18,11 +18,7 @@
import java.io.IOException;
-/**
- * A lambda intended for invoking SolrClient operations
- *
- * @lucene.experimental
- */
+/** A lambda intended for invoking SolrClient operations */
@FunctionalInterface
public interface SolrClientFunction {
R apply(C c) throws IOException, SolrServerException;
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudHttp2SolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudHttp2SolrClient.java
index 18aa318f8ba..ade1ebe433f 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudHttp2SolrClient.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudHttp2SolrClient.java
@@ -40,8 +40,9 @@
public class CloudHttp2SolrClient extends CloudSolrClient {
private final ClusterStateProvider stateProvider;
- private final LBHttp2SolrClient lbClient;
+ private final LBHttp2SolrClient lbClient;
private final Http2SolrClient myClient;
+ private final boolean clientIsInternal;
/**
* Create a new client object that connects to Zookeeper and is always aware of the SolrCloud
@@ -53,8 +54,8 @@ public class CloudHttp2SolrClient extends CloudSolrClient {
*/
protected CloudHttp2SolrClient(Builder builder) {
super(builder.shardLeadersOnly, builder.parallelUpdates, builder.directUpdatesToLeadersOnly);
- var httpSolrClientBuilder = createOrGetHttpClientBuilder(builder);
- this.myClient = httpSolrClientBuilder.build();
+ this.clientIsInternal = builder.httpClient == null;
+ this.myClient = createOrGetHttpClientFromBuilder(builder);
this.stateProvider = createClusterStateProvider(builder);
this.retryExpiryTimeNano = builder.retryExpiryTimeNano;
this.defaultCollection = builder.defaultCollection;
@@ -72,14 +73,16 @@ protected CloudHttp2SolrClient(Builder builder) {
// locks.
this.locks = objectList(builder.parallelCacheRefreshesLocks);
- this.lbClient = new LBHttp2SolrClient.Builder<>(httpSolrClientBuilder).build();
+ this.lbClient = new LBHttp2SolrClient.Builder(myClient).build();
}
- private Http2SolrClient.Builder createOrGetHttpClientBuilder(Builder builder) {
- if (builder.internalClientBuilder != null) {
- return builder.internalClientBuilder;
+ private Http2SolrClient createOrGetHttpClientFromBuilder(Builder builder) {
+ if (builder.httpClient != null) {
+ return builder.httpClient;
+ } else if (builder.internalClientBuilder != null) {
+ return builder.internalClientBuilder.build();
} else {
- return new Http2SolrClient.Builder();
+ return new Http2SolrClient.Builder().build();
}
}
@@ -126,7 +129,7 @@ private ClusterStateProvider createHttp2ClusterStateProvider(
private void closeMyClientIfNeeded() {
try {
- if (myClient != null) {
+ if (clientIsInternal && myClient != null) {
myClient.close();
}
} catch (Exception e) {
@@ -145,7 +148,7 @@ public void close() throws IOException {
}
@Override
- public LBHttp2SolrClient getLbClient() {
+ public LBHttp2SolrClient getLbClient() {
return lbClient;
}
@@ -168,6 +171,7 @@ public static class Builder {
protected Collection zkHosts = new ArrayList<>();
protected List solrUrls = new ArrayList<>();
protected String zkChroot;
+ protected Http2SolrClient httpClient;
protected boolean shardLeadersOnly = true;
protected boolean directUpdatesToLeadersOnly = false;
protected boolean parallelUpdates = true;
@@ -400,6 +404,23 @@ public Builder withCollectionCacheTtl(long timeToLive, TimeUnit unit) {
return this;
}
+ /**
+ * Set the internal http client.
+ *
+ * Note: closing the httpClient instance is at the responsibility of the caller.
+ *
+ * @param httpClient http client
+ * @return this
+ */
+ public Builder withHttpClient(Http2SolrClient httpClient) {
+ if (this.internalClientBuilder != null) {
+ throw new IllegalStateException(
+ "The builder can't accept an httpClient AND an internalClientBuilder, only one of those can be provided");
+ }
+ this.httpClient = httpClient;
+ return this;
+ }
+
/**
* If provided, the CloudHttp2SolrClient will build it's internal Http2SolrClient using this
* builder (instead of the empty default one). Providing this builder allows users to configure
@@ -409,6 +430,10 @@ public Builder withCollectionCacheTtl(long timeToLive, TimeUnit unit) {
* @return this
*/
public Builder withInternalClientBuilder(Http2SolrClient.Builder internalClientBuilder) {
+ if (this.httpClient != null) {
+ throw new IllegalStateException(
+ "The builder can't accept an httpClient AND an internalClientBuilder, only one of those can be provided");
+ }
this.internalClientBuilder = internalClientBuilder;
return this;
}
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Http2SolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Http2SolrClient.java
index fddc4e2daa6..fb2eb1a123f 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Http2SolrClient.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Http2SolrClient.java
@@ -140,7 +140,9 @@ protected Http2SolrClient(String serverBaseUrl, Builder builder) {
this.httpClient = createHttpClient(builder);
this.closeClient = true;
}
- this.listenerFactory.addAll(builder.listenerFactories);
+ if (builder.listenerFactory != null) {
+ this.listenerFactory.addAll(builder.listenerFactory);
+ }
updateDefaultMimeTypeForParser();
this.httpClient.setFollowRedirects(Boolean.TRUE.equals(builder.followRedirects));
@@ -569,7 +571,6 @@ public final R requestWithBaseUrl(
* @param clientFunction a Function that consumes a Http2SolrClient and returns an arbitrary value
* @return the value returned after invoking 'clientFunction'
* @param the type returned by the provided function (and by this method)
- * @lucene.experimental
*/
public R requestWithBaseUrl(
String baseUrl, SolrClientFunction clientFunction)
@@ -905,7 +906,7 @@ public static class Builder
protected Long keyStoreReloadIntervalSecs;
- private List listenerFactories = new ArrayList<>(0);
+ private List listenerFactory;
public Builder() {
super();
@@ -931,27 +932,8 @@ public Builder(String baseSolrUrl) {
this.baseSolrUrl = baseSolrUrl;
}
- /**
- * specify a listener factory, which will be appended to any existing values.
- *
- * @param listenerFactory a HttpListenerFactory
- * @return This Builder
- */
- public Http2SolrClient.Builder addListenerFactory(HttpListenerFactory listenerFactory) {
- this.listenerFactories.add(listenerFactory);
- return this;
- }
-
- /**
- * Specify listener factories, which will replace any existing values.
- *
- * @param listenerFactories a list of HttpListenerFactory instances
- * @return This Builder
- */
- public Http2SolrClient.Builder withListenerFactories(
- List listenerFactories) {
- this.listenerFactories.clear();
- this.listenerFactories.addAll(listenerFactories);
+ public Http2SolrClient.Builder withListenerFactory(List listenerFactory) {
+ this.listenerFactory = listenerFactory;
return this;
}
@@ -1127,9 +1109,9 @@ public Builder withHttpClient(Http2SolrClient http2SolrClient) {
if (this.urlParamNames == null) {
this.urlParamNames = http2SolrClient.urlParamNames;
}
- if (this.listenerFactories.isEmpty()) {
- this.listenerFactories.clear();
- http2SolrClient.listenerFactory.forEach(this.listenerFactories::add);
+ if (this.listenerFactory == null) {
+ this.listenerFactory = new ArrayList();
+ http2SolrClient.listenerFactory.forEach(this.listenerFactory::add);
}
if (this.executor == null) {
this.executor = http2SolrClient.executor;
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpJdkSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpJdkSolrClient.java
index c26d13606b6..1127b3fd1a1 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpJdkSolrClient.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpJdkSolrClient.java
@@ -138,7 +138,7 @@ protected HttpJdkSolrClient(String serverBaseUrl, HttpJdkSolrClient.Builder buil
public CompletableFuture> requestAsync(
final SolrRequest> solrRequest, String collection) {
try {
- PreparedRequest pReq = prepareRequest(solrRequest, collection);
+ PreparedRequest pReq = prepareRequest(solrRequest, collection, null);
return httpClient
.sendAsync(pReq.reqb.build(), HttpResponse.BodyHandlers.ofInputStream())
.thenApply(
@@ -157,10 +157,10 @@ public CompletableFuture> requestAsync(
}
}
- @Override
- public NamedList request(SolrRequest> solrRequest, String collection)
+ protected NamedList requestWithBaseUrl(
+ String baseUrl, SolrRequest> solrRequest, String collection)
throws SolrServerException, IOException {
- PreparedRequest pReq = prepareRequest(solrRequest, collection);
+ PreparedRequest pReq = prepareRequest(solrRequest, collection, baseUrl);
HttpResponse response = null;
try {
response = httpClient.send(pReq.reqb.build(), HttpResponse.BodyHandlers.ofInputStream());
@@ -192,13 +192,25 @@ public NamedList request(SolrRequest> solrRequest, String collection)
}
}
- private PreparedRequest prepareRequest(SolrRequest> solrRequest, String collection)
+ @Override
+ public NamedList request(SolrRequest> solrRequest, String collection)
+ throws SolrServerException, IOException {
+ return requestWithBaseUrl(null, solrRequest, collection);
+ }
+
+ private PreparedRequest prepareRequest(
+ SolrRequest> solrRequest, String collection, String overrideBaseUrl)
throws SolrServerException, IOException {
checkClosed();
if (ClientUtils.shouldApplyDefaultCollection(collection, solrRequest)) {
collection = defaultCollection;
}
- String url = getRequestUrl(solrRequest, collection);
+ String url;
+ if (overrideBaseUrl != null) {
+ url = ClientUtils.buildRequestUrl(solrRequest, overrideBaseUrl, collection);
+ } else {
+ url = getRequestUrl(solrRequest, collection);
+ }
ResponseParser parserToUse = responseParser(solrRequest);
ModifiableSolrParams queryParams = initializeSolrParams(solrRequest, parserToUse);
var reqb = HttpRequest.newBuilder();
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBHttp2SolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBHttp2SolrClient.java
index 4c0d46b13db..2c926a26261 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBHttp2SolrClient.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBHttp2SolrClient.java
@@ -23,25 +23,22 @@
import java.net.SocketException;
import java.net.SocketTimeoutException;
import java.util.Arrays;
-import java.util.Collections;
-import java.util.Map;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
-import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.solr.client.solrj.ResponseParser;
+import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.request.IsUpdateRequest;
import org.apache.solr.client.solrj.request.RequestWriter;
import org.apache.solr.common.SolrException;
-import org.apache.solr.common.util.IOUtils;
import org.apache.solr.common.util.NamedList;
import org.slf4j.MDC;
/**
- * This "LoadBalanced Http Solr Client" is a load balancer for multiple Http Solr Clients. This is
- * useful when you have multiple Solr endpoints and requests need to be Load Balanced among them.
+ * This "LoadBalanced Http Solr Client" is a load balancing wrapper around a Http Solr Client. This
+ * is useful when you have multiple Solr endpoints and requests need to be Load Balanced among them.
*
* Do NOT use this class for indexing in leader/follower scenarios since documents must be
* sent to the correct leader; no inter-node routing is done.
@@ -59,7 +56,7 @@
*
*
*
- * SolrClient client = new LBHttp2SolrClient.Builder(http2SolrClientBuilder,
+ * SolrClient client = new LBHttp2SolrClient.Builder(http2SolrClient,
* new LBSolrClient.Endpoint("http://host1:8080/solr"), new LBSolrClient.Endpoint("http://host2:8080/solr"))
* .build();
*
@@ -72,7 +69,7 @@
*
*
*
- * SolrClient client = new LBHttp2SolrClient.Builder(http2SolrClientBuilder,
+ * SolrClient client = new LBHttp2SolrClient.Builder(http2SolrClient,
* new LBSolrClient.Endpoint("http://host1:8080/solr", "coreA"),
* new LBSolrClient.Endpoint("http://host2:8080/solr", "coreB"))
* .build();
@@ -97,63 +94,35 @@
*
* @since solr 8.0
*/
-public class LBHttp2SolrClient> extends LBSolrClient {
+public class LBHttp2SolrClient extends LBSolrClient {
- private final Map urlToClient;
- private final Set urlParamNames;
-
- // must synchronize on this when building
- private final HttpSolrClientBuilderBase, ?> solrClientBuilder;
+ protected final C solrClient;
+ @SuppressWarnings("unchecked")
private LBHttp2SolrClient(Builder> builder) {
super(Arrays.asList(builder.solrEndpoints));
- this.solrClientBuilder = builder.solrClientBuilder;
-
+ this.solrClient = (C) builder.solrClient;
this.aliveCheckIntervalMillis = builder.aliveCheckIntervalMillis;
this.defaultCollection = builder.defaultCollection;
-
- if (builder.solrClientBuilder.urlParamNames == null) {
- this.urlParamNames = Collections.emptySet();
- } else {
- this.urlParamNames = Set.copyOf(builder.solrClientBuilder.urlParamNames);
- }
-
- this.urlToClient = new ConcurrentHashMap<>();
- for (LBSolrClient.Endpoint endpoint : builder.solrEndpoints) {
- getClient(endpoint);
- }
}
@Override
- protected HttpSolrClientBase getClient(final Endpoint endpoint) {
- return urlToClient.computeIfAbsent(
- endpoint.getBaseUrl(),
- url -> {
- synchronized (solrClientBuilder) {
- solrClientBuilder.baseSolrUrl = url;
- return solrClientBuilder.build();
- }
- });
+ protected SolrClient getClient(Endpoint endpoint) {
+ return solrClient;
}
@Override
public ResponseParser getParser() {
- return urlToClient.isEmpty() ? null : urlToClient.values().iterator().next().getParser();
+ return solrClient.getParser();
}
@Override
public RequestWriter getRequestWriter() {
- return urlToClient.isEmpty() ? null : urlToClient.values().iterator().next().getRequestWriter();
+ return solrClient.getRequestWriter();
}
public Set getUrlParamNames() {
- return urlParamNames;
- }
-
- @Override
- public void close() {
- urlToClient.values().forEach(IOUtils::closeQuietly);
- super.close();
+ return solrClient.getUrlParamNames();
}
/**
@@ -241,18 +210,23 @@ private CompletableFuture> doAsyncRequest(
RetryListener listener) {
String baseUrl = endpoint.toString();
rsp.server = baseUrl;
- final var client = getClient(endpoint);
- CompletableFuture> future =
- client.requestAsync(req.getRequest(), endpoint.getCore());
- future.whenComplete(
- (result, throwable) -> {
- if (!future.isCompletedExceptionally()) {
- onSuccessfulRequest(result, endpoint, rsp, isZombie, listener);
- } else if (!future.isCancelled()) {
- onFailedRequest(throwable, endpoint, isNonRetryable, isZombie, listener);
- }
- });
- return future;
+ final var client = (Http2SolrClient) getClient(endpoint);
+ try {
+ CompletableFuture> future =
+ client.requestWithBaseUrl(baseUrl, (c) -> c.requestAsync(req.getRequest()));
+ future.whenComplete(
+ (result, throwable) -> {
+ if (!future.isCompletedExceptionally()) {
+ onSuccessfulRequest(result, endpoint, rsp, isZombie, listener);
+ } else if (!future.isCancelled()) {
+ onFailedRequest(throwable, endpoint, isNonRetryable, isZombie, listener);
+ }
+ });
+ return future;
+ } catch (SolrServerException | IOException e) {
+ // Unreachable, since 'requestWithBaseUrl' above is running the request asynchronously
+ throw new RuntimeException(e);
+ }
}
private void onSuccessfulRequest(
@@ -316,28 +290,16 @@ private void onFailedRequest(
}
}
- public static class Builder> {
-
- private final B solrClientBuilder;
+ public static class Builder {
+ private final C solrClient;
private final LBSolrClient.Endpoint[] solrEndpoints;
private long aliveCheckIntervalMillis =
TimeUnit.MILLISECONDS.convert(60, TimeUnit.SECONDS); // 1 minute between checks
protected String defaultCollection;
- /**
- * Use this Builder to configure an LBHttp2SolrClient. The passed-in Solr Client Builder will be
- * used to generate an internal client per Endpoint.
- *
- * Implementation Note: LBHttp2SolrClient will modify the passed-in Builder's {@link
- * HttpSolrClientBuilderBase#baseSolrUrl} whenever it needs to generate a new Http Solr Client.
- *
- * @param solrClientBuilder A Builder like {@link Http2SolrClient.Builder} used to generate the
- * internal clients
- * @param endpoints the initial Solr Endpoints to load balance
- */
- public Builder(B solrClientBuilder, Endpoint... endpoints) {
- this.solrClientBuilder = solrClientBuilder;
+ public Builder(C solrClient, Endpoint... endpoints) {
+ this.solrClient = solrClient;
this.solrEndpoints = endpoints;
}
@@ -347,7 +309,7 @@ public Builder(B solrClientBuilder, Endpoint... endpoints) {
*
* @param aliveCheckInterval how often to ping for aliveness
*/
- public Builder setAliveCheckInterval(int aliveCheckInterval, TimeUnit unit) {
+ public Builder setAliveCheckInterval(int aliveCheckInterval, TimeUnit unit) {
if (aliveCheckInterval <= 0) {
throw new IllegalArgumentException(
"Alive check interval must be " + "positive, specified value = " + aliveCheckInterval);
@@ -357,13 +319,13 @@ public Builder setAliveCheckInterval(int aliveCheckInterval, TimeUnit unit) {
}
/** Sets a default for core or collection based requests. */
- public Builder withDefaultCollection(String defaultCoreOrCollection) {
+ public Builder withDefaultCollection(String defaultCoreOrCollection) {
this.defaultCollection = defaultCoreOrCollection;
return this;
}
- public LBHttp2SolrClient build() {
- return new LBHttp2SolrClient(this);
+ public LBHttp2SolrClient build() {
+ return new LBHttp2SolrClient(this);
}
}
}
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBSolrClient.java
index 31c75662b48..64201b03c13 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBSolrClient.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBSolrClient.java
@@ -497,7 +497,25 @@ private static boolean isTimeExceeded(long timeAllowedNano, long timeOutTime) {
private NamedList doRequest(Endpoint endpoint, SolrRequest> solrRequest)
throws SolrServerException, IOException {
final var solrClient = getClient(endpoint);
- return solrClient.request(solrRequest, endpoint.getCore());
+ return doRequest(solrClient, endpoint.getBaseUrl(), endpoint.getCore(), solrRequest);
+ }
+
+ // TODO SOLR-17541 should remove the need for the special-casing below; remove as a part of that
+ // ticket.
+ private NamedList doRequest(
+ SolrClient solrClient, String baseUrl, String collection, SolrRequest> solrRequest)
+ throws SolrServerException, IOException {
+ // Some implementations of LBSolrClient.getClient(...) return a Http2SolrClient that may not be
+ // pointed at the desired URL (or any URL for that matter). We special case that here to ensure
+ // the appropriate URL is provided.
+ if (solrClient instanceof Http2SolrClient httpSolrClient) {
+ return httpSolrClient.requestWithBaseUrl(baseUrl, (c) -> c.request(solrRequest, collection));
+ } else if (solrClient instanceof HttpJdkSolrClient) {
+ return ((HttpJdkSolrClient) solrClient).requestWithBaseUrl(baseUrl, solrRequest, collection);
+ }
+
+ // Assume provided client already uses 'baseUrl'
+ return solrClient.request(solrRequest, collection);
}
protected Exception doRequest(
@@ -607,7 +625,12 @@ private void checkAZombieServer(EndpointWrapper zombieServer) {
// First the one on the endpoint, then the default collection
final String effectiveCollection =
Objects.requireNonNullElse(zombieEndpoint.getCore(), getDefaultCollection());
- final var responseRaw = getClient(zombieEndpoint).request(queryRequest, effectiveCollection);
+ final var responseRaw =
+ doRequest(
+ getClient(zombieEndpoint),
+ zombieEndpoint.getBaseUrl(),
+ effectiveCollection,
+ queryRequest);
QueryResponse resp = new QueryResponse();
resp.setResponse(responseRaw);
@@ -711,7 +734,7 @@ public NamedList request(
// Choose the endpoint's core/collection over any specified by the user
final var effectiveCollection =
endpoint.getCore() == null ? collection : endpoint.getCore();
- return getClient(endpoint).request(request, effectiveCollection);
+ return doRequest(getClient(endpoint), endpoint.getBaseUrl(), effectiveCollection, request);
} catch (SolrException e) {
// Server is alive but the request was malformed or invalid
throw e;
@@ -752,7 +775,8 @@ public NamedList request(
++numServersTried;
final String effectiveCollection =
endpoint.getCore() == null ? collection : endpoint.getCore();
- NamedList rsp = getClient(endpoint).request(request, effectiveCollection);
+ NamedList rsp =
+ doRequest(getClient(endpoint), endpoint.getBaseUrl(), effectiveCollection, request);
// remove from zombie list *before* adding to the alive list to avoid a race that could lose
// a server
zombieServers.remove(endpoint.getUrl());
diff --git a/solr/solrj/src/java/org/apache/solr/common/MapWriter.java b/solr/solrj/src/java/org/apache/solr/common/MapWriter.java
index 0bbe2dc50a0..ed9da582fdc 100644
--- a/solr/solrj/src/java/org/apache/solr/common/MapWriter.java
+++ b/solr/solrj/src/java/org/apache/solr/common/MapWriter.java
@@ -33,6 +33,9 @@
*/
public interface MapWriter extends MapSerializable, NavigableObject, JSONWriter.Writable {
+ /** Writes this object's entries out to {@code ew}. */
+ void writeMap(EntryWriter ew) throws IOException;
+
default String jsonStr() {
return Utils.toJSONString(this);
}
@@ -42,6 +45,7 @@ default Map toMap(Map map) {
return Utils.convertToMap(this, map);
}
+ /** For implementing Noggit {@link org.noggit.JSONWriter.Writable}. */
@Override
default void write(JSONWriter writer) {
writer.startObject();
@@ -70,16 +74,6 @@ public MapWriter.EntryWriter put(CharSequence k, Object v) {
writer.endObject();
}
- void writeMap(EntryWriter ew) throws IOException;
-
- default MapWriter append(MapWriter another) {
- MapWriter m = this;
- return ew -> {
- m.writeMap(ew);
- another.writeMap(ew);
- };
- }
-
/**
* An interface to push one entry at a time to the output. The order of the keys is not defined,
* but we assume they are distinct -- don't call {@code put} more than once for the same key.
diff --git a/solr/solrj/src/java/org/apache/solr/common/util/JavaBinCodec.java b/solr/solrj/src/java/org/apache/solr/common/util/JavaBinCodec.java
index 77e7186c9de..7373632da28 100644
--- a/solr/solrj/src/java/org/apache/solr/common/util/JavaBinCodec.java
+++ b/solr/solrj/src/java/org/apache/solr/common/util/JavaBinCodec.java
@@ -103,6 +103,7 @@ public class JavaBinCodec implements PushWriter {
MAP_ENTRY = 19,
UUID = 20, // This is reserved to be used only in LogCodec
// types that combine tag + length (or other info) in a single byte
+ PRIMITIVE_ARR = 21,
TAG_AND_LEN = (byte) (1 << 5),
STR = (byte) (1 << 5),
SINT = (byte) (2 << 5),
@@ -348,6 +349,8 @@ protected Object readObject(DataInputInputStream dis) throws IOException {
return readMapEntry(dis);
case MAP_ENTRY_ITER:
return readMapIter(dis);
+ case PRIMITIVE_ARR:
+ return readPrimitiveArray(dis);
}
throw new RuntimeException("Unknown type " + tagByte);
@@ -438,9 +441,150 @@ public boolean writeKnownType(Object val) throws IOException {
writeBoolean(((AtomicBoolean) val).get());
return true;
}
+ if (val instanceof float[] ff) {
+ writeFloatArr(ff);
+ return true;
+ }
+ if (val instanceof int[] ii) {
+ writeIntArr(ii);
+ return true;
+ }
+ if (val instanceof long[] ll) {
+ writeLongArr(ll);
+ return true;
+ }
+ if (val instanceof double[] dd) {
+ writeDoubleArr(dd);
+ return true;
+ }
+ if (val instanceof short[] ss) {
+ writeShortArr(ss);
+ return true;
+ }
+ if (val instanceof boolean[] bb) {
+ writeBoolArr(bb);
+ return true;
+ }
return false;
}
+ public Object readPrimitiveArray(DataInputInputStream dis) throws IOException {
+ tagByte = dis.readByte();
+ int len = readVInt(dis);
+ switch (tagByte) {
+ case FLOAT:
+ {
+ float[] v = new float[len];
+ for (int i = 0; i < len; i++) {
+ v[i] = dis.readFloat();
+ }
+ return v;
+ }
+ case INT:
+ {
+ int[] v = new int[len];
+ for (int i = 0; i < len; i++) {
+ v[i] = dis.readInt();
+ }
+ return v;
+ }
+
+ case LONG:
+ {
+ long[] v = new long[len];
+ for (int i = 0; i < len; i++) {
+ v[i] = dis.readLong();
+ }
+ return v;
+ }
+ case DOUBLE:
+ {
+ double[] v = new double[len];
+ for (int i = 0; i < len; i++) {
+ v[i] = dis.readDouble();
+ }
+ return v;
+ }
+ case SHORT:
+ {
+ short[] v = new short[len];
+ for (int i = 0; i < len; i++) {
+ v[i] = dis.readShort();
+ }
+ return v;
+ }
+ case BOOL_TRUE:
+ case BOOL_FALSE:
+ {
+ boolean[] v = new boolean[len];
+ for (int i = 0; i < len; i++) {
+ byte b = dis.readByte();
+ v[i] = b == BOOL_FALSE ? false : true;
+ }
+ return v;
+ }
+ case BYTE:
+ {
+ // it should be possible to serialize byte[] in the new format as well
+ byte[] v = new byte[len];
+ dis.readFully(v);
+ return v;
+ }
+ default:
+ {
+ throw new RuntimeException("Invalid type : " + tagByte);
+ }
+ }
+ }
+
+ public void writePrimitiveArrHeader(byte tag, int len) throws IOException {
+ writeTag(PRIMITIVE_ARR);
+ writeTag(tag);
+ writeVInt(len, daos);
+ }
+
+ public void writeFloatArr(float[] vals) throws IOException {
+ writePrimitiveArrHeader(FLOAT, vals.length);
+ for (float f : vals) {
+ daos.writeFloat(f);
+ }
+ }
+
+ public void writeIntArr(int[] vals) throws IOException {
+ writePrimitiveArrHeader(INT, vals.length);
+ for (int i : vals) {
+ daos.writeInt(i);
+ }
+ }
+
+ public void writeDoubleArr(double[] vals) throws IOException {
+ writePrimitiveArrHeader(DOUBLE, vals.length);
+ for (double d : vals) {
+ daos.writeDouble(d);
+ }
+ }
+
+ public void writeLongArr(long[] vals) throws IOException {
+ writePrimitiveArrHeader(LONG, vals.length);
+ for (long l : vals) {
+ daos.writeLong(l);
+ }
+ }
+
+ public void writeBoolArr(boolean[] vals) throws IOException {
+ writePrimitiveArrHeader(BOOL_TRUE, vals.length);
+ for (boolean b : vals) {
+ writeBoolean(b);
+ }
+ }
+
+ public void writeShortArr(short[] vals) throws IOException {
+ writePrimitiveArrHeader(SHORT, vals.length);
+ for (short l : vals) {
+ daos.writeShort(l);
+ }
+ }
+
public class BinEntryWriter implements MapWriter.EntryWriter {
@Override
public MapWriter.EntryWriter put(CharSequence k, Object v) throws IOException {
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudHttp2SolrClientBuilderTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudHttp2SolrClientBuilderTest.java
index 46b6883f755..0846dfefc6c 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudHttp2SolrClientBuilderTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudHttp2SolrClientBuilderTest.java
@@ -120,6 +120,26 @@ public void testIsDirectUpdatesToLeadersOnlyDefault() throws IOException {
}
}
+ @Test
+ public void testExternalClientAndInternalBuilderTogether() {
+ expectThrows(
+ IllegalStateException.class,
+ () ->
+ new CloudHttp2SolrClient.Builder(
+ Collections.singletonList(ANY_ZK_HOST), Optional.of(ANY_CHROOT))
+ .withHttpClient(mock(Http2SolrClient.class))
+ .withInternalClientBuilder(mock(Http2SolrClient.Builder.class))
+ .build());
+ expectThrows(
+ IllegalStateException.class,
+ () ->
+ new CloudHttp2SolrClient.Builder(
+ Collections.singletonList(ANY_ZK_HOST), Optional.of(ANY_CHROOT))
+ .withInternalClientBuilder(mock(Http2SolrClient.Builder.class))
+ .withHttpClient(mock(Http2SolrClient.class))
+ .build());
+ }
+
@Test
public void testProvideInternalBuilder() throws IOException {
Http2SolrClient http2Client = mock(Http2SolrClient.class);
@@ -139,6 +159,20 @@ public void testProvideInternalBuilder() throws IOException {
verify(http2Client, times(1)).close();
}
+ @Test
+ public void testProvideExternalClient() throws IOException {
+ Http2SolrClient http2Client = mock(Http2SolrClient.class);
+ CloudHttp2SolrClient.Builder clientBuilder =
+ new CloudHttp2SolrClient.Builder(
+ Collections.singletonList(ANY_ZK_HOST), Optional.of(ANY_CHROOT))
+ .withHttpClient(http2Client);
+ try (CloudHttp2SolrClient client = clientBuilder.build()) {
+ assertEquals(http2Client, client.getHttpClient());
+ }
+ // it's external, should be NOT closed when closing CloudSolrClient
+ verify(http2Client, never()).close();
+ }
+
@Test
public void testDefaultCollectionPassedFromBuilderToClient() throws IOException {
try (CloudHttp2SolrClient createdClient =
@@ -161,19 +195,29 @@ public void testDefaultCollectionPassedFromBuilderToClient() throws IOException
public void testHttpClientPreservedInHttp2ClusterStateProvider() throws IOException {
List solrUrls = List.of(cluster.getJettySolrRunner(0).getBaseUrl().toString());
- // without internalClientBuilder
- testHttpClientConsistency(solrUrls, null);
+ // No httpClient - No internalClientBuilder
+ testHttpClientConsistency(solrUrls, null, null);
+
+ // httpClient - No internalClientBuilder
+ try (Http2SolrClient httpClient = new Http2SolrClient.Builder().build()) {
+ testHttpClientConsistency(solrUrls, httpClient, null);
+ }
- // with internalClientBuilder
+ // No httpClient - internalClientBuilder
Http2SolrClient.Builder internalClientBuilder = new Http2SolrClient.Builder();
- testHttpClientConsistency(solrUrls, internalClientBuilder);
+ testHttpClientConsistency(solrUrls, null, internalClientBuilder);
}
private void testHttpClientConsistency(
- List solrUrls, Http2SolrClient.Builder internalClientBuilder) throws IOException {
+ List solrUrls,
+ Http2SolrClient httpClient,
+ Http2SolrClient.Builder internalClientBuilder)
+ throws IOException {
CloudHttp2SolrClient.Builder clientBuilder = new CloudHttp2SolrClient.Builder(solrUrls);
- if (internalClientBuilder != null) {
+ if (httpClient != null) {
+ clientBuilder.withHttpClient(httpClient);
+ } else if (internalClientBuilder != null) {
clientBuilder.withInternalClientBuilder(internalClientBuilder);
}
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/HttpJdkSolrClientTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/HttpJdkSolrClientTest.java
index 1dbfc0e998b..b3980ad44bc 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/HttpJdkSolrClientTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/HttpJdkSolrClientTest.java
@@ -34,6 +34,7 @@
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrRequest;
import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.request.QueryRequest;
import org.apache.solr.client.solrj.request.RequestWriter;
import org.apache.solr.client.solrj.response.SolrPingResponse;
import org.apache.solr.common.params.CommonParams;
@@ -153,6 +154,25 @@ protected void testQuerySetup(SolrRequest.METHOD method, ResponseParser rp) thro
}
}
+ @Test
+ public void testRequestWithBaseUrl() throws Exception {
+ DebugServlet.clear();
+ DebugServlet.addResponseHeader("Content-Type", "application/octet-stream");
+ DebugServlet.responseBodyByQueryFragment.put("", javabinResponse());
+ String someOtherUrl = getBaseUrl() + "/some/other/base/url";
+ String intendedUrl = getBaseUrl() + DEBUG_SERVLET_PATH;
+ SolrQuery q = new SolrQuery("foo");
+ q.setParam("a", MUST_ENCODE);
+
+ HttpJdkSolrClient.Builder b =
+ builder(someOtherUrl).withResponseParser(new BinaryResponseParser());
+ try (HttpJdkSolrClient client = b.build()) {
+ client.requestWithBaseUrl(intendedUrl, new QueryRequest(q, SolrRequest.METHOD.GET), null);
+ assertEquals(
+ client.getParser().getVersion(), DebugServlet.parameters.get(CommonParams.VERSION)[0]);
+ }
+ }
+
@Test
public void testGetById() throws Exception {
DebugServlet.clear();
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/LBHttp2SolrClientIntegrationTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/LBHttp2SolrClientIntegrationTest.java
index 9c2f79ff0a5..61504a052b8 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/LBHttp2SolrClientIntegrationTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/LBHttp2SolrClientIntegrationTest.java
@@ -18,6 +18,7 @@
import java.io.File;
import java.io.IOException;
+import java.io.UncheckedIOException;
import java.lang.invoke.MethodHandles;
import java.nio.file.Files;
import java.nio.file.Path;
@@ -117,28 +118,30 @@ public void tearDown() throws Exception {
private LBClientHolder client(LBSolrClient.Endpoint... baseSolrEndpoints) {
if (random().nextBoolean()) {
- var delegateClientBuilder =
+ var delegateClient =
new Http2SolrClient.Builder()
.withConnectionTimeout(1000, TimeUnit.MILLISECONDS)
- .withIdleTimeout(2000, TimeUnit.MILLISECONDS);
+ .withIdleTimeout(2000, TimeUnit.MILLISECONDS)
+ .build();
var lbClient =
- new LBHttp2SolrClient.Builder<>(delegateClientBuilder, baseSolrEndpoints)
+ new LBHttp2SolrClient.Builder<>(delegateClient, baseSolrEndpoints)
.withDefaultCollection(solr[0].getDefaultCollection())
.setAliveCheckInterval(500, TimeUnit.MILLISECONDS)
.build();
- return new LBClientHolder(lbClient, delegateClientBuilder);
+ return new LBClientHolder(lbClient, delegateClient);
} else {
- var delegateClientBuilder =
+ var delegateClient =
new HttpJdkSolrClient.Builder()
.withConnectionTimeout(1000, TimeUnit.MILLISECONDS)
.withIdleTimeout(2000, TimeUnit.MILLISECONDS)
- .withSSLContext(MockTrustManager.ALL_TRUSTING_SSL_CONTEXT);
+ .withSSLContext(MockTrustManager.ALL_TRUSTING_SSL_CONTEXT)
+ .build();
var lbClient =
- new LBHttp2SolrClient.Builder<>(delegateClientBuilder, baseSolrEndpoints)
+ new LBHttp2SolrClient.Builder<>(delegateClient, baseSolrEndpoints)
.withDefaultCollection(solr[0].getDefaultCollection())
.setAliveCheckInterval(500, TimeUnit.MILLISECONDS)
.build();
- return new LBClientHolder(lbClient, delegateClientBuilder);
+ return new LBClientHolder(lbClient, delegateClient);
}
}
@@ -314,9 +317,9 @@ public void startJetty() throws Exception {
private static class LBClientHolder implements AutoCloseable {
final LBHttp2SolrClient> lbClient;
- final HttpSolrClientBuilderBase, ?> delegate;
+ final HttpSolrClientBase delegate;
- LBClientHolder(LBHttp2SolrClient> lbClient, HttpSolrClientBuilderBase, ?> delegate) {
+ LBClientHolder(LBHttp2SolrClient> lbClient, HttpSolrClientBase delegate) {
this.lbClient = lbClient;
this.delegate = delegate;
}
@@ -324,6 +327,11 @@ private static class LBClientHolder implements AutoCloseable {
@Override
public void close() {
lbClient.close();
+ try {
+ delegate.close();
+ } catch (IOException ioe) {
+ throw new UncheckedIOException(ioe);
+ }
}
}
}
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/LBHttp2SolrClientTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/LBHttp2SolrClientTest.java
index 30cee0804b5..9d2019309b0 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/LBHttp2SolrClientTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/LBHttp2SolrClientTest.java
@@ -18,8 +18,6 @@
import java.io.IOException;
import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
@@ -30,6 +28,7 @@
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.apache.solr.SolrTestCase;
+import org.apache.solr.client.solrj.SolrClientFunction;
import org.apache.solr.client.solrj.SolrRequest;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.request.QueryRequest;
@@ -52,12 +51,11 @@ public void testLBHttp2SolrClientWithTheseParamNamesInTheUrl() {
Set urlParamNames = new HashSet<>(2);
urlParamNames.add("param1");
- var httpSolrClientBuilder =
- new Http2SolrClient.Builder(url).withTheseParamNamesInTheUrl(urlParamNames);
- var endpoint = new LBSolrClient.Endpoint(url);
- try (var testClient =
- new LBHttp2SolrClient.Builder(httpSolrClientBuilder, endpoint)
- .build()) {
+ try (Http2SolrClient http2SolrClient =
+ new Http2SolrClient.Builder(url).withTheseParamNamesInTheUrl(urlParamNames).build();
+ LBHttp2SolrClient testClient =
+ new LBHttp2SolrClient.Builder<>(http2SolrClient, new LBSolrClient.Endpoint(url))
+ .build()) {
assertArrayEquals(
"Wrong urlParamNames found in lb client.",
@@ -66,7 +64,7 @@ public void testLBHttp2SolrClientWithTheseParamNamesInTheUrl() {
assertArrayEquals(
"Wrong urlParamNames found in base client.",
urlParamNames.toArray(),
- testClient.getClient(endpoint).getUrlParamNames().toArray());
+ http2SolrClient.getUrlParamNames().toArray());
}
}
@@ -76,11 +74,12 @@ public void testSynchronous() throws Exception {
LBSolrClient.Endpoint ep2 = new LBSolrClient.Endpoint("http://endpoint.two");
List endpointList = List.of(ep1, ep2);
- var httpSolrClientBuilder =
- new MockHttpSolrClientBuilder().withConnectionTimeout(10, TimeUnit.SECONDS);
-
- try (LBHttp2SolrClient testClient =
- new LBHttp2SolrClient.Builder<>(httpSolrClientBuilder, ep1, ep2).build()) {
+ Http2SolrClient.Builder b =
+ new Http2SolrClient.Builder("http://base.url").withConnectionTimeout(10, TimeUnit.SECONDS);
+ ;
+ try (MockHttpSolrClient client = new MockHttpSolrClient("http://base.url", b);
+ LBHttp2SolrClient testClient =
+ new LBHttp2SolrClient.Builder<>(client, ep1, ep2).build()) {
String lastEndpoint = null;
for (int i = 0; i < 10; i++) {
@@ -104,14 +103,15 @@ public void testSynchronousWithFalures() throws Exception {
LBSolrClient.Endpoint ep2 = new LBSolrClient.Endpoint("http://endpoint.two");
List endpointList = List.of(ep1, ep2);
- var httpSolrClientBuilder =
- new MockHttpSolrClientBuilder().withConnectionTimeout(10, TimeUnit.SECONDS);
-
- try (LBHttp2SolrClient testClient =
- new LBHttp2SolrClient.Builder<>(httpSolrClientBuilder, ep1, ep2).build()) {
+ Http2SolrClient.Builder b =
+ new Http2SolrClient.Builder("http://base.url").withConnectionTimeout(10, TimeUnit.SECONDS);
+ ;
+ try (MockHttpSolrClient client = new MockHttpSolrClient("http://base.url", b);
+ LBHttp2SolrClient testClient =
+ new LBHttp2SolrClient.Builder<>(client, ep1, ep2).build()) {
- setEndpointToFail(testClient, ep1);
- setEndpointToSucceed(testClient, ep2);
+ client.basePathToFail = ep1.getBaseUrl();
+ String basePathToSucceed = ep2.getBaseUrl();
String qValue = "First time";
for (int i = 0; i < 5; i++) {
@@ -121,13 +121,13 @@ public void testSynchronousWithFalures() throws Exception {
LBSolrClient.Rsp response = testClient.request(req);
assertEquals(
"The healthy node 'endpoint two' should have served the request: " + i,
- ep2.getBaseUrl(),
+ basePathToSucceed,
response.server);
checkSynchonousResponseContent(response, qValue);
}
- setEndpointToFail(testClient, ep2);
- setEndpointToSucceed(testClient, ep1);
+ client.basePathToFail = ep2.getBaseUrl();
+ basePathToSucceed = ep1.getBaseUrl();
qValue = "Second time";
for (int i = 0; i < 5; i++) {
@@ -137,13 +137,21 @@ public void testSynchronousWithFalures() throws Exception {
LBSolrClient.Rsp response = testClient.request(req);
assertEquals(
"The healthy node 'endpoint one' should have served the request: " + i,
- ep1.getBaseUrl(),
+ basePathToSucceed,
response.server);
checkSynchonousResponseContent(response, qValue);
}
}
}
+ private void checkSynchonousResponseContent(LBSolrClient.Rsp response, String qValue) {
+ assertEquals("There should be one element in the respnse.", 1, response.getResponse().size());
+ assertEquals(
+ "The response key 'response' should echo the query.",
+ qValue,
+ response.getResponse().get("response"));
+ }
+
@Test
public void testAsyncWithFailures() {
@@ -154,35 +162,28 @@ public void testAsyncWithFailures() {
LBSolrClient.Endpoint ep2 = new LBSolrClient.Endpoint("http://endpoint.two");
List endpointList = List.of(ep1, ep2);
- var httpSolrClientBuilder =
- new MockHttpSolrClientBuilder().withConnectionTimeout(10, TimeUnit.SECONDS);
-
- try (LBHttp2SolrClient testClient =
- new LBHttp2SolrClient.Builder<>(httpSolrClientBuilder, ep1, ep2).build()) {
+ Http2SolrClient.Builder b =
+ new Http2SolrClient.Builder("http://base.url").withConnectionTimeout(10, TimeUnit.SECONDS);
+ ;
+ try (MockHttpSolrClient client = new MockHttpSolrClient("http://base.url", b);
+ LBHttp2SolrClient testClient =
+ new LBHttp2SolrClient.Builder<>(client, ep1, ep2).build()) {
for (int j = 0; j < 2; j++) {
// first time Endpoint One will return error code 500.
// second time Endpoint One will be healthy
- LBSolrClient.Endpoint endpointToSucceed;
- LBSolrClient.Endpoint endpointToFail;
+ String basePathToSucceed;
if (j == 0) {
- setEndpointToFail(testClient, ep1);
- setEndpointToSucceed(testClient, ep2);
- endpointToSucceed = ep2;
- endpointToFail = ep1;
+ client.basePathToFail = ep1.getBaseUrl();
+ basePathToSucceed = ep2.getBaseUrl();
} else {
- setEndpointToFail(testClient, ep2);
- setEndpointToSucceed(testClient, ep1);
- endpointToSucceed = ep1;
- endpointToFail = ep2;
+ client.basePathToFail = ep2.getBaseUrl();
+ basePathToSucceed = ep1.getBaseUrl();
}
- List successEndpointLastBasePaths =
- basePathsForEndpoint(testClient, endpointToSucceed);
- List failEndpointLastBasePaths = basePathsForEndpoint(testClient, endpointToFail);
for (int i = 0; i < 10; i++) {
- // i: we'll try 10 times. It should behave the same with iter 2-10. .
+ // i: we'll try 10 times to see if it behaves the same every time.
QueryRequest queryRequest = new QueryRequest(new MapSolrParams(Map.of("q", "" + i)));
LBSolrClient.Req req = new LBSolrClient.Req(queryRequest, endpointList);
@@ -195,26 +196,26 @@ public void testAsyncWithFailures() {
} catch (TimeoutException | ExecutionException e) {
fail(iterMessage + " Response ended in failure: " + e);
}
-
if (i == 0) {
- // When i=0, it must try both endpoints to find success:
+ // When j=0, "endpoint one" fails.
+ // The first time around (i) it tries the first, then the second.
+ //
+ // With j=0 and i>0, it only tries "endpoint two".
//
- // with j=0, endpoint one is tried first because it
- // is first one the list, but it fails.
- // with j=1, endpoint two is tried first because
- // it is the only known healthy node, but
- // now it is failing.
- assertEquals(iterMessage, 1, successEndpointLastBasePaths.size());
- assertEquals(iterMessage, 1, failEndpointLastBasePaths.size());
+ // When j=1 and i=0, "endpoint two" starts failing.
+ // So it tries both it and "endpoint one"
+ //
+ // With j=1 and i>0, it only tries "endpoint one".
+ assertEquals(iterMessage, 2, client.lastBasePaths.size());
+
+ String failedBasePath = client.lastBasePaths.remove(0);
+ assertEquals(iterMessage, client.basePathToFail, failedBasePath);
} else {
- // With i>0,
- // With j=0 and i>0, it only tries "endpoint two".
- // With j=1 and i>0, it only tries "endpoint one".
- assertEquals(iterMessage, 1, successEndpointLastBasePaths.size());
- assertTrue(iterMessage, failEndpointLastBasePaths.isEmpty());
+ // The first endpoint does not give the exception, it doesn't retry.
+ assertEquals(iterMessage, 1, client.lastBasePaths.size());
}
- successEndpointLastBasePaths.clear();
- failEndpointLastBasePaths.clear();
+ String successBasePath = client.lastBasePaths.remove(0);
+ assertEquals(iterMessage, basePathToSucceed, successBasePath);
}
}
}
@@ -226,11 +227,11 @@ public void testAsync() {
LBSolrClient.Endpoint ep2 = new LBSolrClient.Endpoint("http://endpoint.two");
List endpointList = List.of(ep1, ep2);
- var httpSolrClientBuilder =
- new MockHttpSolrClientBuilder().withConnectionTimeout(10, TimeUnit.SECONDS);
-
- try (LBHttp2SolrClient testClient =
- new LBHttp2SolrClient.Builder<>(httpSolrClientBuilder, ep1, ep2).build()) {
+ Http2SolrClient.Builder b =
+ new Http2SolrClient.Builder("http://base.url").withConnectionTimeout(10, TimeUnit.SECONDS);
+ try (MockHttpSolrClient client = new MockHttpSolrClient("http://base.url", b);
+ LBHttp2SolrClient testClient =
+ new LBHttp2SolrClient.Builder<>(client, ep1, ep2).build()) {
int limit = 10; // For simplicity use an even limit
List> responses = new ArrayList<>();
@@ -242,17 +243,23 @@ public void testAsync() {
}
QueryRequest[] queryRequests = new QueryRequest[limit];
- List> lastSolrRequests = lastSolrRequests(testClient, ep1, ep2);
- assertEquals(limit, lastSolrRequests.size());
-
+ int numEndpointOne = 0;
+ int numEndpointTwo = 0;
for (int i = 0; i < limit; i++) {
- SolrRequest> lastSolrReq = lastSolrRequests.get(i);
+ SolrRequest> lastSolrReq = client.lastSolrRequests.get(i);
assertTrue(lastSolrReq instanceof QueryRequest);
QueryRequest lastQueryReq = (QueryRequest) lastSolrReq;
int index = Integer.parseInt(lastQueryReq.getParams().get("q"));
assertNull("Found same request twice: " + index, queryRequests[index]);
queryRequests[index] = lastQueryReq;
+ final String lastBasePath = client.lastBasePaths.get(i);
+ if (lastBasePath.equals(ep1.toString())) {
+ numEndpointOne++;
+ } else if (lastBasePath.equals(ep2.toString())) {
+ numEndpointTwo++;
+ }
+
LBSolrClient.Rsp lastRsp = null;
try {
lastRsp = responses.get(index).get();
@@ -271,55 +278,15 @@ public void testAsync() {
// It is the user's responsibility to shuffle the endpoints when using
// async. LB Http Solr Client will always try the passed-in endpoints
// in order. In this case, endpoint 1 gets all the requests!
- List ep1BasePaths = basePathsForEndpoint(testClient, ep1);
- List ep2BasePaths = basePathsForEndpoint(testClient, ep2);
- assertEquals(limit, basePathsForEndpoint(testClient, ep1).size());
- assertEquals(0, basePathsForEndpoint(testClient, ep2).size());
- }
- }
-
- private void checkSynchonousResponseContent(LBSolrClient.Rsp response, String qValue) {
- assertEquals("There should be one element in the response.", 1, response.getResponse().size());
- assertEquals(
- "The response key 'response' should echo the query.",
- qValue,
- response.getResponse().get("response"));
- }
-
- private void setEndpointToFail(
- LBHttp2SolrClient testClient, LBSolrClient.Endpoint ep) {
- ((MockHttpSolrClient) testClient.getClient(ep)).allRequestsShallFail = true;
- }
+ assertEquals(limit, numEndpointOne);
+ assertEquals(0, numEndpointTwo);
- private void setEndpointToSucceed(
- LBHttp2SolrClient testClient, LBSolrClient.Endpoint ep) {
- ((MockHttpSolrClient) testClient.getClient(ep)).allRequestsShallFail = false;
- }
-
- private List basePathsForEndpoint(
- LBHttp2SolrClient testClient, LBSolrClient.Endpoint ep) {
- return ((MockHttpSolrClient) testClient.getClient(ep)).lastBasePaths;
- }
-
- private List> lastSolrRequests(
- LBHttp2SolrClient testClient, LBSolrClient.Endpoint... endpoints) {
- return Arrays.stream(endpoints)
- .map(testClient::getClient)
- .map(MockHttpSolrClient.class::cast)
- .flatMap(c -> c.lastSolrRequests.stream())
- .toList();
- }
-
- public static class MockHttpSolrClientBuilder
- extends HttpSolrClientBuilderBase {
-
- @Override
- public MockHttpSolrClient build() {
- return new MockHttpSolrClient(baseSolrUrl, this);
+ assertEquals(limit, client.lastSolrRequests.size());
+ assertEquals(limit, client.lastCollections.size());
}
}
- public static class MockHttpSolrClient extends HttpSolrClientBase {
+ public static class MockHttpSolrClient extends Http2SolrClient {
public List> lastSolrRequests = new ArrayList<>();
@@ -327,13 +294,15 @@ public static class MockHttpSolrClient extends HttpSolrClientBase {
public List lastCollections = new ArrayList<>();
- public boolean allRequestsShallFail;
+ public String basePathToFail = null;
public String tmpBaseUrl = null;
- public boolean closeCalled;
+ protected MockHttpSolrClient(String serverBaseUrl, Builder builder) {
- protected MockHttpSolrClient(String serverBaseUrl, MockHttpSolrClientBuilder builder) {
+ // TODO: Consider creating an interface for Http*SolrClient
+ // so mocks can Implement, not Extend, and not actually need to
+ // build an (unused) client
super(serverBaseUrl, builder);
}
@@ -343,12 +312,25 @@ public NamedList request(final SolrRequest> request, String collection
lastSolrRequests.add(request);
lastBasePaths.add(tmpBaseUrl);
lastCollections.add(collection);
- if (allRequestsShallFail) {
+ if (tmpBaseUrl.equals(basePathToFail)) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "We should retry this.");
}
return generateResponse(request);
}
+ @Override
+ public R requestWithBaseUrl(
+ String baseUrl, SolrClientFunction clientFunction)
+ throws SolrServerException, IOException {
+ // This use of 'tmpBaseUrl' is NOT thread safe, but that's fine for our purposes here.
+ try {
+ tmpBaseUrl = baseUrl;
+ return clientFunction.apply(this);
+ } finally {
+ tmpBaseUrl = null;
+ }
+ }
+
@Override
public CompletableFuture> requestAsync(
final SolrRequest> solrRequest, String collection) {
@@ -356,7 +338,7 @@ public CompletableFuture> requestAsync(
lastSolrRequests.add(solrRequest);
lastBasePaths.add(tmpBaseUrl);
lastCollections.add(collection);
- if (allRequestsShallFail) {
+ if (tmpBaseUrl != null && tmpBaseUrl.equals(basePathToFail)) {
cf.completeExceptionally(
new SolrException(SolrException.ErrorCode.SERVER_ERROR, "We should retry this."));
} else {
@@ -369,32 +351,5 @@ private NamedList generateResponse(SolrRequest> solrRequest) {
String id = solrRequest.getParams().get("q");
return new NamedList<>(Collections.singletonMap("response", id));
}
-
- @Override
- public void close() throws IOException {
- closeCalled = true;
- }
-
- @Override
- protected boolean isFollowRedirects() {
- return false;
- }
-
- @Override
- protected boolean processorAcceptsMimeType(
- Collection processorSupportedContentTypes, String mimeType) {
- return false;
- }
-
- @Override
- protected String allProcessorSupportedContentTypesCommaDelimited(
- Collection processorSupportedContentTypes) {
- return null;
- }
-
- @Override
- protected void updateDefaultMimeTypeForParser() {
- // no-op
- }
}
}
diff --git a/solr/solrj/src/test/org/apache/solr/common/util/TestJavaBinCodec.java b/solr/solrj/src/test/org/apache/solr/common/util/TestJavaBinCodec.java
index 6d02cf82185..3a85f4e0f04 100644
--- a/solr/solrj/src/test/org/apache/solr/common/util/TestJavaBinCodec.java
+++ b/solr/solrj/src/test/org/apache/solr/common/util/TestJavaBinCodec.java
@@ -108,6 +108,20 @@ public static SolrDocument generateSolrDocumentWithChildDocs() {
return parentDocument;
}
+ @Test
+ public void testPrimitiveArrays() throws Exception {
+ List types = new ArrayList<>();
+
+ types.add(new float[] {1.0678f, 4.094565f, 0.000456f});
+ types.add(new double[] {1.0678d, 4.094565d, 0.000456d});
+ types.add(new int[] {145543, 4546354, 9789857});
+ types.add(new long[] {145543L, 4546354L, 9789857L});
+ types.add(new short[] {43, 454, 857});
+ types.add(new boolean[] {true, true, false});
+
+ compareObjects((List>) getObject(getBytes(types)), types);
+ }
+
private List generateAllDataTypes() {
List types = new ArrayList<>();
@@ -223,6 +237,23 @@ private void compareObjects(List> unmarshalledObj, List> matchObj) {
} else if (unmarshalledObj.get(i) instanceof SolrInputField
&& matchObj.get(i) instanceof SolrInputField) {
assertTrue(assertSolrInputFieldEquals(unmarshalledObj.get(i), matchObj.get(i)));
+ } else if (unmarshalledObj.get(i) instanceof float[] a
+ && matchObj.get(i) instanceof float[] e) {
+ assertArrayEquals(e, a, 0.000000f);
+ } else if (unmarshalledObj.get(i) instanceof double[] a
+ && matchObj.get(i) instanceof double[] e) {
+ assertArrayEquals(e, a, 0.000000d);
+ } else if (unmarshalledObj.get(i) instanceof long[] a
+ && matchObj.get(i) instanceof long[] e) {
+ assertArrayEquals(e, a);
+ } else if (unmarshalledObj.get(i) instanceof int[] a && matchObj.get(i) instanceof int[] e) {
+ assertArrayEquals(e, a);
+ } else if (unmarshalledObj.get(i) instanceof short[] a
+ && matchObj.get(i) instanceof short[] e) {
+ assertArrayEquals(e, a);
+ } else if (unmarshalledObj.get(i) instanceof boolean[] a
+ && matchObj.get(i) instanceof boolean[] e) {
+ assertArrayEquals(e, a);
} else {
assertEquals(unmarshalledObj.get(i), matchObj.get(i));
}
diff --git a/solr/test-framework/src/java/org/apache/solr/util/SSLTestConfig.java b/solr/test-framework/src/java/org/apache/solr/util/SSLTestConfig.java
index 4b0c7c47529..5a830e35aa8 100644
--- a/solr/test-framework/src/java/org/apache/solr/util/SSLTestConfig.java
+++ b/solr/test-framework/src/java/org/apache/solr/util/SSLTestConfig.java
@@ -16,7 +16,6 @@
*/
package org.apache.solr.util;
-import com.carrotsearch.randomizedtesting.RandomizedTest;
import java.security.KeyManagementException;
import java.security.KeyStore;
import java.security.KeyStoreException;
@@ -27,7 +26,6 @@
import java.security.UnrecoverableKeyException;
import java.util.Random;
import java.util.concurrent.ThreadLocalRandom;
-import java.util.regex.Pattern;
import javax.net.ssl.SSLContext;
import org.apache.http.config.Registry;
import org.apache.http.config.RegistryBuilder;
@@ -38,7 +36,6 @@
import org.apache.http.conn.ssl.TrustSelfSignedStrategy;
import org.apache.http.ssl.SSLContextBuilder;
import org.apache.http.ssl.SSLContexts;
-import org.apache.lucene.util.Constants;
import org.apache.solr.client.solrj.embedded.SSLConfig;
import org.apache.solr.client.solrj.impl.HttpClientUtil;
import org.apache.solr.client.solrj.impl.HttpClientUtil.SocketFactoryRegistryProvider;
@@ -108,10 +105,6 @@ public SSLTestConfig(boolean useSsl, boolean clientAuth, boolean checkPeerName)
this.clientAuth = clientAuth;
this.checkPeerName = checkPeerName;
- if (this.useSsl) {
- assumeSslIsSafeToTest();
- }
-
final String resourceName =
checkPeerName ? TEST_KEYSTORE_LOCALHOST_RESOURCE : TEST_KEYSTORE_BOGUSHOST_RESOURCE;
trustStore = keyStore = Resource.newClassPathResource(resourceName);
@@ -414,50 +407,4 @@ public void reseed(SecureRandomParameters params) {
/* NOOP */
}
}
-
- /**
- * Helper method for sanity checking if it's safe to use SSL on this JVM
- *
- * @see SOLR-12988
- * @throws org.junit.internal.AssumptionViolatedException if this JVM is known to have SSL
- * problems
- */
- public static void assumeSslIsSafeToTest() {
- if (Constants.JVM_NAME.startsWith("OpenJDK")
- || Constants.JVM_NAME.startsWith("Java HotSpot(TM)")) {
- RandomizedTest.assumeFalse(
- "Test (or randomization for this seed) wants to use SSL, "
- + "but SSL is known to fail on your JVM: "
- + Constants.JVM_NAME
- + " / "
- + Constants.JVM_VERSION,
- isOpenJdkJvmVersionKnownToHaveProblems(Constants.JVM_VERSION));
- }
- }
-
- /**
- * package visibility for tests
- *
- * @see Constants#JVM_VERSION
- * @lucene.internal
- */
- static boolean isOpenJdkJvmVersionKnownToHaveProblems(final String jvmVersion) {
- // TODO: would be nice to replace with Runtime.Version once we don't have to
- // worry about java8 support when backporting to branch_8x
- return KNOWN_BAD_OPENJDK_JVMS.matcher(jvmVersion).matches();
- }
-
- private static final Pattern KNOWN_BAD_OPENJDK_JVMS =
- Pattern.compile( // 11 to 11.0.2 were all definitely problematic
- // - https://bugs.openjdk.java.net/browse/JDK-8212885
- // - https://bugs.openjdk.java.net/browse/JDK-8213202
- "(^11(\\.0(\\.0|\\.1|\\.2)?)?($|(\\_|\\+|\\-).*$))|"
- +
- // early (pre-ea) "testing" builds of 11, 12, and 13 were also buggy
- // - https://bugs.openjdk.java.net/browse/JDK-8224829
- "(^(11|12|13).*-testing.*$)|"
- +
- // So far, all 13-ea builds (up to 13-ea-26) have been buggy
- // - https://bugs.openjdk.java.net/browse/JDK-8226338
- "(^13-ea.*$)");
}
diff --git a/solr/test-framework/src/test/org/apache/solr/util/TestSSLTestConfig.java b/solr/test-framework/src/test/org/apache/solr/util/TestSSLTestConfig.java
deleted file mode 100644
index 319f78923d8..00000000000
--- a/solr/test-framework/src/test/org/apache/solr/util/TestSSLTestConfig.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.solr.util;
-
-import java.util.Arrays;
-import java.util.List;
-import org.apache.lucene.util.Constants;
-import org.apache.solr.SolrTestCase;
-
-public class TestSSLTestConfig extends SolrTestCase {
-
- /** Sanity check that our JVM version parsing logic seems correct */
- public void testIsOpenJdkJvmVersionKnownToHaveProblems() {
- final List rel_suffixes = Arrays.asList("", "+42");
- final List ea_suffixes = Arrays.asList("-ea", "-ea+42");
- final List suffixes = Arrays.asList("", "+42", "-ea", "-ea+42");
-
- // as far as we know, any Java 8, 9 or 10 impl should be fine...
- for (String base :
- Arrays.asList(
- "1.8", "1.8.0", "1.8.1", "9", "9.0", "9.1", "9.0.0", "9.1.0", "9.1.1", "10", "10.0",
- "10.1", "10.0.0", "10.1.0", "10.1.1")) {
- for (String suffix : suffixes) {
- final String v = base + suffix;
- assertFalse(v, SSLTestConfig.isOpenJdkJvmVersionKnownToHaveProblems(v));
- }
- }
-
- // Known Problems start with Java 11...
-
- // java 11 releases below 11.0.3 were all bad...
- for (String bad : Arrays.asList("11", "11.0", "11.0.1", "11.0.2")) {
- for (String suffix : suffixes) {
- final String v = bad + suffix;
- assertTrue(v, SSLTestConfig.isOpenJdkJvmVersionKnownToHaveProblems(v));
- }
- }
-
- // ...but 11.0.3 or higher should be ok.
- for (String ok : Arrays.asList("11.0.3", "11.0.42", "11.1", "11.1.42")) {
- for (String suffix : suffixes) {
- final String v = ok + suffix;
- assertFalse(v, SSLTestConfig.isOpenJdkJvmVersionKnownToHaveProblems(v));
- }
- }
-
- // As far as we know/hope, all "official" java 12 and higher impls should be fine...
- for (String major : Arrays.asList("12", "13", "99")) {
- for (String minor : Arrays.asList("", ".0", ".42", ".0.42")) {
- for (String suffix : rel_suffixes) {
- final String v = major + minor + suffix;
- assertFalse(v, SSLTestConfig.isOpenJdkJvmVersionKnownToHaveProblems(v));
- }
- }
- }
-
- // ...but pre EA "testing" builds of 11, 12, and 13 are all definitely problematic...
- for (String major : Arrays.asList("11", "12", "13")) {
- for (String suffix : suffixes) {
- final String v = major + "-testing" + suffix;
- assertTrue(v, SSLTestConfig.isOpenJdkJvmVersionKnownToHaveProblems(v));
- }
- }
-
- // ...and all 13-ea builds (so far) have definitely been problematic.
- for (String suffix : ea_suffixes) {
- final String v = "13" + suffix;
- assertTrue(v, SSLTestConfig.isOpenJdkJvmVersionKnownToHaveProblems(v));
- }
- }
-
- public void testFailIfUserRunsTestsWithJVMThatHasKnownSSLBugs() {
- // NOTE: If there is some future JVM version, where all available "ea" builds are known to be
- // buggy, but we still want to be able to use for running tests (ie: via jenkins) to look for
- // *other* bugs, then those -ea versions can be "white listed" here...
-
- try {
- SSLTestConfig.assumeSslIsSafeToTest();
- } catch (org.junit.AssumptionViolatedException ave) {
- fail(
- "Current JVM ("
- + Constants.JVM_NAME
- + " / "
- + Constants.JVM_VERSION
- + ") is known to have SSL Bugs. Other tests that (explicitly or via randomization) "
- + " use SSL will be SKIPed");
- }
- }
-}