diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index a7a5ef5683c..2fec97d6129 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -36,7 +36,8 @@ Improvements
 
 Optimizations
 ---------------------
-(No changes)
+* SOLR-17568: The CLI bin/solr export tool now contacts the appropriate nodes directly for data instead of proxying through one.
+ (David Smiley)
 
 Bug Fixes
 ---------------------
@@ -95,6 +96,10 @@ Deprecation Removals
 
 * SOLR-17576: Remove deprecated master/slave option language from ReplicationHandler. (Eric Pugh)
 
+* SOLR-16781: Support for `<lib/>` directives (used in solrconfig.xml to add JARs on a core-by-core basis) has been removed.  Users
+  looking for similar functionality can use Solr's package manager.  Users that don't need to vary JAR access on a per-core basis
+  have many options, including the `<sharedLib/>` tag and directly modifying Solr's classpath prior to JVM startup. (Jason Gerlowski)
+
 Dependency Upgrades
 ---------------------
 (No changes)
@@ -132,6 +137,8 @@ Other Changes
 
 * SOLR-16903: Update CLI tools to use java.nio.file.Path instead of java.io.File (Andrey Bozhko)
 
+* SOLR-17568: SolrCloud no longer reroutes/proxies a core request to another node if not found locally. (David Smiley)
+
 ==================  9.8.0 ==================
 New Features
 ---------------------
@@ -231,6 +238,8 @@ Bug Fixes
 
 * SOLR-17575: Fixed broken backwards compatibility with the legacy "langid.whitelist" config in Solr Langid. (Jan Høydahl, Alexander Zagniotov)
 
+* SOLR-17574: Fix AllowListUrlChecker when liveNodes changes. Remove ClusterState.getHostAllowList (Bruno Roustant, David Smiley)
+
 Dependency Upgrades
 ---------------------
 (No changes)
diff --git a/solr/core/src/java/org/apache/solr/api/V2HttpCall.java b/solr/core/src/java/org/apache/solr/api/V2HttpCall.java
index 6912af39510..d156710a675 100644
--- a/solr/core/src/java/org/apache/solr/api/V2HttpCall.java
+++ b/solr/core/src/java/org/apache/solr/api/V2HttpCall.java
@@ -165,7 +165,7 @@ public void call(SolrQueryRequest req, SolrQueryResponse rsp) {
           core = getCoreByCollection(collectionName, isPreferLeader);
           if (core == null) {
             // this collection exists , but this node does not have a replica for that collection
-            extractRemotePath(collectionName, collectionName);
+            extractRemotePath(collectionName);
             if (action == REMOTEQUERY) {
               action = ADMIN_OR_REMOTEQUERY;
               coreUrl = coreUrl.replace("/solr/", "/solr/____v2/c/");
diff --git a/solr/core/src/java/org/apache/solr/cli/ExportTool.java b/solr/core/src/java/org/apache/solr/cli/ExportTool.java
index 59bca949dc1..b5a58377ba3 100644
--- a/solr/core/src/java/org/apache/solr/cli/ExportTool.java
+++ b/solr/core/src/java/org/apache/solr/cli/ExportTool.java
@@ -660,8 +660,8 @@ class CoreHandler {
       }
 
       boolean exportDocsFromCore() throws IOException, SolrServerException {
-
-        try (SolrClient client = CLIUtils.getSolrClient(baseurl, credentials)) {
+        // reference the replica's node URL, not the baseUrl in scope, which could be anywhere
+        try (SolrClient client = CLIUtils.getSolrClient(replica.getBaseUrl(), credentials)) {
           expectedDocs = getDocCount(replica.getCoreName(), client, query);
           QueryRequest request;
           ModifiableSolrParams params = new ModifiableSolrParams();
diff --git a/solr/core/src/java/org/apache/solr/cli/RunExampleTool.java b/solr/core/src/java/org/apache/solr/cli/RunExampleTool.java
index 783d58e0a57..6d7b349911c 100644
--- a/solr/core/src/java/org/apache/solr/cli/RunExampleTool.java
+++ b/solr/core/src/java/org/apache/solr/cli/RunExampleTool.java
@@ -647,10 +647,15 @@ protected Map<String, Object> startSolr(
     if (!isWindows && cwdPath.length() > 1 && solrHome.startsWith(cwdPath))
       solrHome = solrHome.substring(cwdPath.length() + 1);
 
+    final var syspropArg =
+        ("techproducts".equals(cli.getOptionValue(EXAMPLE_OPTION)))
+            ? "-Dsolr.modules=clustering,extraction,langid,ltr,scripting -Dsolr.ltr.enabled=true -Dsolr.clustering.enabled=true"
+            : "";
+
     String startCmd =
         String.format(
             Locale.ROOT,
-            "\"%s\" start %s -p %d --solr-home \"%s\" %s %s %s %s %s %s %s",
+            "\"%s\" start %s -p %d --solr-home \"%s\" %s %s %s %s %s %s %s %s",
             callScript,
             cloudModeArg,
             port,
@@ -661,7 +666,8 @@ protected Map<String, Object> startSolr(
             forceArg,
             verboseArg,
             extraArgs,
-            jvmOptsArg);
+            jvmOptsArg,
+            syspropArg);
     startCmd = startCmd.replaceAll("\\s+", " ").trim(); // for pretty printing
 
     echo("\nStarting up Solr on port " + port + " using command:");
diff --git a/solr/core/src/java/org/apache/solr/core/ConfigSetService.java b/solr/core/src/java/org/apache/solr/core/ConfigSetService.java
index 53160cd75ee..a9f9b417abf 100644
--- a/solr/core/src/java/org/apache/solr/core/ConfigSetService.java
+++ b/solr/core/src/java/org/apache/solr/core/ConfigSetService.java
@@ -272,7 +272,7 @@ public final ConfigSet loadConfigSet(CoreDescriptor dcore) {
       NamedList<?> properties = loadConfigSetProperties(dcore, coreLoader);
       boolean trusted = isConfigSetTrusted(coreLoader);
 
-      SolrConfig solrConfig = createSolrConfig(dcore, coreLoader, trusted);
+      SolrConfig solrConfig = createSolrConfig(dcore, coreLoader);
       return new ConfigSet(
           configSetName(dcore),
           solrConfig,
@@ -314,13 +314,12 @@ public ConfigSetService(SolrResourceLoader loader, boolean shareSchema) {
    *
    * @param cd the core's CoreDescriptor
    * @param loader the core's resource loader
-   * @param isTrusted is the configset trusted?
    * @return a SolrConfig object
    */
-  protected SolrConfig createSolrConfig(
-      CoreDescriptor cd, SolrResourceLoader loader, boolean isTrusted) throws IOException {
+  protected SolrConfig createSolrConfig(CoreDescriptor cd, SolrResourceLoader loader)
+      throws IOException {
     return SolrConfig.readFromResourceLoader(
-        loader, cd.getConfigName(), isTrusted, cd.getSubstitutableProperties());
+        loader, cd.getConfigName(), cd.getSubstitutableProperties());
   }
 
   /**
diff --git a/solr/core/src/java/org/apache/solr/core/SolrConfig.java b/solr/core/src/java/org/apache/solr/core/SolrConfig.java
index b3c95a23fe7..e7adaf8d2f6 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrConfig.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrConfig.java
@@ -17,7 +17,6 @@
 package org.apache.solr.core;
 
 import static org.apache.solr.common.params.CommonParams.NAME;
-import static org.apache.solr.common.params.CommonParams.PATH;
 import static org.apache.solr.core.ConfigOverlay.ZNODEVER;
 import static org.apache.solr.core.SolrConfig.PluginOpts.LAZY;
 import static org.apache.solr.core.SolrConfig.PluginOpts.MULTI_OK;
@@ -31,7 +30,6 @@
 import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.lang.invoke.MethodHandles;
-import java.net.MalformedURLException;
 import java.net.URL;
 import java.nio.file.Files;
 import java.nio.file.Path;
@@ -96,7 +94,7 @@
 
 /**
  * Provides a static reference to a Config object modeling the main configuration data for a Solr
- * instance -- typically found in "solrconfig.xml".
+ * core -- typically found in "solrconfig.xml".
  */
 public class SolrConfig implements MapSerializable {
 
@@ -143,16 +141,13 @@ public enum PluginOpts {
    * @param name the configuration name used by the loader if the stream is null
    */
   public SolrConfig(Path instanceDir, String name) throws IOException {
-    this(new SolrResourceLoader(instanceDir), name, true, null);
+    this(new SolrResourceLoader(instanceDir), name, null);
   }
 
   public static SolrConfig readFromResourceLoader(
-      SolrResourceLoader loader,
-      String name,
-      boolean isConfigsetTrusted,
-      Properties substitutableProperties) {
+      SolrResourceLoader loader, String name, Properties substitutableProperties) {
     try {
-      return new SolrConfig(loader, name, isConfigsetTrusted, substitutableProperties);
+      return new SolrConfig(loader, name, substitutableProperties);
     } catch (Exception e) {
       String resource;
       if (loader instanceof ZkSolrResourceLoader) {
@@ -196,15 +191,9 @@ public InputStream apply(String s) {
    *
    * @param loader the resource loader
    * @param name the configuration name
-   * @param isConfigsetTrusted false if configset was uploaded using unsecured configset upload API,
-   *     true otherwise
    * @param substitutableProperties optional properties to substitute into the XML
    */
-  private SolrConfig(
-      SolrResourceLoader loader,
-      String name,
-      boolean isConfigsetTrusted,
-      Properties substitutableProperties) {
+  private SolrConfig(SolrResourceLoader loader, String name, Properties substitutableProperties) {
     this.resourceLoader = loader;
     this.resourceName = name;
     this.substituteProperties = substitutableProperties;
@@ -237,7 +226,7 @@ private SolrConfig(
       rootDataHashCode = this.root.txt().hashCode();
 
       getRequestParams();
-      initLibs(loader, isConfigsetTrusted);
+      initLibs(loader);
       String val =
           root.child(
                   IndexSchema.LUCENE_MATCH_VERSION_PARAM,
@@ -934,11 +923,10 @@ public PluginInfo getPluginInfo(String type) {
         SolrException.ErrorCode.SERVER_ERROR, "Multiple plugins configured for type: " + type);
   }
 
-  private void initLibs(SolrResourceLoader loader, boolean isConfigsetTrusted) {
+  private void initLibs(SolrResourceLoader loader) {
     // TODO Want to remove SolrResourceLoader.getInstancePath; it can be on a Standalone subclass.
     // For Zk subclass, it's needed for the time being as well.  We could remove that one if we
-    // remove two things in SolrCloud: (1) instancePath/lib  and (2) solrconfig lib directives with
-    // relative paths. Can wait till 9.0.
+    // remove "instancePath/lib" in SolrCloud. Can wait till 9.0.
     Path instancePath = loader.getInstancePath();
     List<URL> urls = new ArrayList<>();
 
@@ -950,48 +938,15 @@ private void initLibs(SolrResourceLoader loader, boolean isConfigsetTrusted) {
         log.warn("Couldn't add files from {} to classpath: {}", libPath, e);
       }
     }
-
-    List<ConfigNode> nodes = root.getAll("lib");
-    if (nodes != null && nodes.size() > 0) {
-      if (!isConfigsetTrusted) {
-        throw new SolrException(
-            ErrorCode.UNAUTHORIZED,
-            "The configset for this collection was uploaded without any authentication in place,"
-                + " and use of <lib> is not available for collections with untrusted configsets. To use this component, re-upload the configset"
-                + " after enabling authentication and authorization.");
-      }
-
-      for (int i = 0; i < nodes.size(); i++) {
-        ConfigNode node = nodes.get(i);
-        String baseDir = node.attr("dir");
-        String path = node.attr(PATH);
-        if (null != baseDir) {
-          // :TODO: add support for a simpler 'glob' mutually exclusive of regex
-          Path dir = instancePath.resolve(baseDir);
-          String regex = node.attr("regex");
-          try {
-            if (regex == null) urls.addAll(SolrResourceLoader.getURLs(dir));
-            else urls.addAll(SolrResourceLoader.getFilteredURLs(dir, regex));
-          } catch (IOException e) {
-            log.warn("Couldn't add files from {} filtered by {} to classpath: {}", dir, regex, e);
-          }
-        } else if (null != path) {
-          final Path dir = instancePath.resolve(path);
-          try {
-            urls.add(dir.toUri().toURL());
-          } catch (MalformedURLException e) {
-            log.warn("Couldn't add file {} to classpath: {}", dir, e);
-          }
-        } else {
-          throw new RuntimeException("lib: missing mandatory attributes: 'dir' or 'path'");
-        }
-      }
-    }
-
     if (!urls.isEmpty()) {
       loader.addToClassLoader(urls);
       loader.reloadLuceneSPI();
     }
+
+    List<ConfigNode> nodes = root.getAll("lib");
+    if (nodes != null && nodes.size() > 0) {
+      log.warn("<lib/> entries no longer supported in solrconfig.xml; ignoring...");
+    }
   }
 
   public int getMultipartUploadLimitKB() {
diff --git a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
index 2424addaff8..d059ad38d53 100644
--- a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
@@ -76,7 +76,6 @@
 import org.apache.solr.client.api.model.SolrJerseyResponse;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
-import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.CoreAdminParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.SolrParams;
@@ -347,14 +346,6 @@ private void getFileStream(SolrParams solrParams, SolrQueryResponse rsp, SolrQue
       return;
     }
 
-    if (solrParams.getParams(CommonParams.WT) == null) {
-      reportErrorOnResponse(
-          rsp,
-          "Missing wt parameter",
-          new SolrException(SolrException.ErrorCode.BAD_REQUEST, "wt not specified in request"));
-      return;
-    }
-
     coreReplicationAPI.fetchFile(
         fileName,
         dirType,
diff --git a/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerAPI.java b/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerAPI.java
index f34793aa744..1370c775540 100644
--- a/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerAPI.java
+++ b/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerAPI.java
@@ -254,7 +254,7 @@ public void updateFileContents(SolrQueryRequest req, SolrQueryResponse rsp)
       try {
         InMemoryResourceLoader loader =
             new InMemoryResourceLoader(coreContainer, mutableId, SOLR_CONFIG_XML, data);
-        SolrConfig.readFromResourceLoader(loader, SOLR_CONFIG_XML, requestIsTrusted, null);
+        SolrConfig.readFromResourceLoader(loader, SOLR_CONFIG_XML, null);
       } catch (Exception exc) {
         updateFileError = exc;
       }
diff --git a/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerConfigSetHelper.java b/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerConfigSetHelper.java
index 0b14b0d4cf4..955aa1d98a9 100644
--- a/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerConfigSetHelper.java
+++ b/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerConfigSetHelper.java
@@ -677,9 +677,7 @@ ManagedIndexSchema deleteNestedDocsFieldsIfNeeded(ManagedIndexSchema schema, boo
 
   SolrConfig loadSolrConfig(String configSet) {
     ZkSolrResourceLoader zkLoader = zkLoaderForConfigSet(configSet);
-    boolean trusted = isConfigSetTrusted(configSet);
-
-    return SolrConfig.readFromResourceLoader(zkLoader, SOLR_CONFIG_XML, trusted, null);
+    return SolrConfig.readFromResourceLoader(zkLoader, SOLR_CONFIG_XML, null);
   }
 
   ManagedIndexSchema loadLatestSchema(String configSet) {
diff --git a/solr/core/src/java/org/apache/solr/security/AllowListUrlChecker.java b/solr/core/src/java/org/apache/solr/security/AllowListUrlChecker.java
index 9bcede9b060..9fbffc4cdfb 100644
--- a/solr/core/src/java/org/apache/solr/security/AllowListUrlChecker.java
+++ b/solr/core/src/java/org/apache/solr/security/AllowListUrlChecker.java
@@ -27,6 +27,7 @@
 import java.util.Set;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
+import java.util.stream.Collectors;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.cloud.ClusterState;
 import org.apache.solr.core.NodeConfig;
@@ -85,6 +86,9 @@ public String toString() {
   /** Allow list of hosts. Elements in the list will be host:port (no protocol or context). */
   private final Set<String> hostAllowList;
 
+  private volatile Set<String> liveHostUrlsCache;
+  private volatile Set<String> liveNodesCache;
+
   /**
    * @param urlAllowList List of allowed URLs. URLs must be well-formed, missing protocol is
    *     tolerated. An empty list means there is no explicit allow-list of URLs, in this case no URL
@@ -136,11 +140,10 @@ public void checkAllowList(List<String> urls) throws MalformedURLException {
    */
   public void checkAllowList(List<String> urls, ClusterState clusterState)
       throws MalformedURLException {
-    Set<String> clusterHostAllowList =
-        clusterState == null ? Collections.emptySet() : clusterState.getHostAllowList();
+    Set<String> liveHostUrls = getLiveHostUrls(clusterState);
     for (String url : urls) {
       String hostPort = parseHostPort(url);
-      if (clusterHostAllowList.stream().noneMatch(hostPort::equalsIgnoreCase)
+      if (liveHostUrls.stream().noneMatch(hostPort::equalsIgnoreCase)
           && hostAllowList.stream().noneMatch(hostPort::equalsIgnoreCase)) {
         throw new SolrException(
             SolrException.ErrorCode.FORBIDDEN,
@@ -154,6 +157,33 @@ public void checkAllowList(List<String> urls, ClusterState clusterState)
     }
   }
 
+  /**
+   * Gets the set of live hosts urls (host:port) built from the set of live nodes. The set is cached
+   * to be reused until the live nodes change.
+   */
+  private Set<String> getLiveHostUrls(ClusterState clusterState) {
+    if (clusterState == null) {
+      return Set.of();
+    }
+    if (liveHostUrlsCache == null || clusterState.getLiveNodes() != liveNodesCache) {
+      synchronized (this) {
+        Set<String> liveNodes = clusterState.getLiveNodes();
+        if (liveHostUrlsCache == null || liveNodes != liveNodesCache) {
+          liveHostUrlsCache = buildLiveHostUrls(liveNodes);
+          liveNodesCache = liveNodes;
+        }
+      }
+    }
+    return liveHostUrlsCache;
+  }
+
+  @VisibleForTesting
+  Set<String> buildLiveHostUrls(Set<String> liveNodes) {
+    return liveNodes.stream()
+        .map((liveNode) -> liveNode.substring(0, liveNode.indexOf('_')))
+        .collect(Collectors.toSet());
+  }
+
   /** Whether this checker has been created with a non-empty allow-list of URLs. */
   public boolean hasExplicitAllowList() {
     return !hostAllowList.isEmpty();
diff --git a/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java b/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java
index 7241be77390..1e6c9f42e29 100644
--- a/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java
+++ b/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java
@@ -40,7 +40,6 @@
 import java.lang.invoke.MethodHandles;
 import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.Enumeration;
@@ -279,7 +278,7 @@ protected void init() throws Exception {
         } else {
           // if we couldn't find it locally, look on other nodes
           if (idx > 0) {
-            extractRemotePath(collectionName, origCorename);
+            extractRemotePath(collectionName);
             if (action == REMOTEQUERY) {
               path = path.substring(idx);
               return;
@@ -462,10 +461,10 @@ protected void extractHandlerFromURLPath(SolrRequestParsers parser) throws Excep
     }
   }
 
-  protected void extractRemotePath(String collectionName, String origCorename)
+  protected void extractRemotePath(String collectionName)
       throws KeeperException, InterruptedException, SolrException {
     assert core == null;
-    coreUrl = getRemoteCoreUrl(collectionName, origCorename);
+    coreUrl = getRemoteCoreUrl(collectionName);
     // don't proxy for internal update requests
     invalidStates = checkStateVersionsAreValid(queryParams.get(CloudSolrClient.STATE_VERSION));
     if (coreUrl != null
@@ -1090,39 +1089,16 @@ private SolrCore checkProps(ZkNodeProps zkProps) {
     return core;
   }
 
-  private List<Slice> getSlicesForAllCollections(ClusterState clusterState, boolean activeSlices) {
-    // looks across *all* collections
-    if (activeSlices) {
-      return clusterState
-          .collectionStream()
-          .flatMap(coll -> Arrays.stream(coll.getActiveSlicesArr()))
-          .toList();
-    } else {
-      return clusterState.collectionStream().flatMap(coll -> coll.getSlices().stream()).toList();
-    }
-  }
-
-  protected String getRemoteCoreUrl(String collectionName, String origCorename)
-      throws SolrException {
+  protected String getRemoteCoreUrl(String collectionName) throws SolrException {
     ClusterState clusterState = cores.getZkController().getClusterState();
     final DocCollection docCollection = clusterState.getCollectionOrNull(collectionName);
-    Slice[] slices = (docCollection != null) ? docCollection.getActiveSlicesArr() : null;
-    List<Slice> activeSlices;
-    boolean byCoreName = false;
+    if (docCollection == null) {
+      return null;
+    }
+    Collection<Slice> activeSlices = docCollection.getActiveSlices();
 
     int totalReplicas = 0;
 
-    if (slices == null) {
-      byCoreName = true;
-      // all collections!
-      activeSlices = getSlicesForAllCollections(clusterState, true);
-      if (activeSlices.isEmpty()) {
-        activeSlices = getSlicesForAllCollections(clusterState, false);
-      }
-    } else {
-      activeSlices = List.of(slices);
-    }
-
     for (Slice s : activeSlices) {
       totalReplicas += s.getReplicas().size();
     }
@@ -1145,48 +1121,30 @@ protected String getRemoteCoreUrl(String collectionName, String origCorename)
           "No active replicas found for collection: " + collectionName);
     }
 
-    String coreUrl =
-        getCoreUrl(collectionName, origCorename, clusterState, activeSlices, byCoreName, true);
+    String coreUrl = getCoreUrl(activeSlices, true, clusterState.getLiveNodes());
 
     if (coreUrl == null) {
-      coreUrl =
-          getCoreUrl(collectionName, origCorename, clusterState, activeSlices, byCoreName, false);
+      coreUrl = getCoreUrl(activeSlices, false, clusterState.getLiveNodes());
     }
 
     return coreUrl;
   }
 
   private String getCoreUrl(
-      String collectionName,
-      String origCorename,
-      ClusterState clusterState,
-      List<Slice> slices,
-      boolean byCoreName,
-      boolean activeReplicas) {
-    String coreUrl;
-    Set<String> liveNodes = clusterState.getLiveNodes();
+      Collection<Slice> slices, boolean activeReplicas, Set<String> liveNodes) {
 
-    List<Slice> shuffledSlices;
-    if (slices.size() < 2) {
-      shuffledSlices = slices;
-    } else {
-      shuffledSlices = new ArrayList<>(slices);
-      Collections.shuffle(shuffledSlices, Utils.RANDOM);
-    }
+    Iterator<Slice> shuffledSlices = new RandomIterator<>(Utils.RANDOM, slices);
+    while (shuffledSlices.hasNext()) {
+      Slice slice = shuffledSlices.next();
 
-    for (Slice slice : shuffledSlices) {
-      List<Replica> randomizedReplicas = new ArrayList<>(slice.getReplicas());
-      Collections.shuffle(randomizedReplicas, Utils.RANDOM);
+      Iterator<Replica> shuffledReplicas = new RandomIterator<>(Utils.RANDOM, slice.getReplicas());
+      while (shuffledReplicas.hasNext()) {
+        Replica replica = shuffledReplicas.next();
 
-      for (Replica replica : randomizedReplicas) {
         if (!activeReplicas
             || (liveNodes.contains(replica.getNodeName())
                 && replica.getState() == Replica.State.ACTIVE)) {
 
-          if (byCoreName && !Objects.equals(origCorename, replica.getStr(CORE_NAME_PROP))) {
-            // if it's by core name, make sure they match
-            continue;
-          }
           if (Objects.equals(replica.getBaseUrl(), cores.getZkController().getBaseUrl())) {
             // don't count a local core
             continue;
diff --git a/solr/core/src/test-files/solr/collection1/conf/solrconfig-test-misc.xml b/solr/core/src/test-files/solr/collection1/conf/solrconfig-test-misc.xml
index 1020db8319e..cbfe9752574 100644
--- a/solr/core/src/test-files/solr/collection1/conf/solrconfig-test-misc.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/solrconfig-test-misc.xml
@@ -28,12 +28,6 @@
   <directoryFactory name="DirectoryFactory" class="${solr.directoryFactory:solr.MockDirectoryFactory}"/>
   <schemaFactory class="ClassicIndexSchemaFactory"/>
 
-  <!-- see TestConfig.testLib() -->
-  <lib dir="../../lib-dirs/a" />
-  <lib dir="../../lib-dirs/b" regex="b." />
-  <lib dir="../../lib-dirs/c" regex="c1" />
-  <lib path="../../lib-dirs/d/d1/" />
-
   <!-- see TestConfig.testJavaProperty -->
   <propTest attr1="${solr.test.sys.prop1}-$${literal}"
             attr2="${non.existent.sys.prop:default-from-config}">prefix-${solr.test.sys.prop2}-suffix</propTest>
diff --git a/solr/core/src/test-files/solr/configsets/upload/with-lib-directive/managed-schema.xml b/solr/core/src/test-files/solr/configsets/upload/with-lib-directive/managed-schema.xml
deleted file mode 100644
index 25a37e6eee3..00000000000
--- a/solr/core/src/test-files/solr/configsets/upload/with-lib-directive/managed-schema.xml
+++ /dev/null
@@ -1,25 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" ?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements.  See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License.  You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<schema name="minimal" version="1.7">
- <types>
-  <fieldType name="string" class="solr.StrField"/>
- </types>
- <fields>
-   <dynamicField name="*" type="string" indexed="true" stored="true" />
- </fields>
-</schema>
diff --git a/solr/core/src/test-files/solr/configsets/upload/with-lib-directive/solrconfig.xml b/solr/core/src/test-files/solr/configsets/upload/with-lib-directive/solrconfig.xml
deleted file mode 100644
index 315bfffbf6c..00000000000
--- a/solr/core/src/test-files/solr/configsets/upload/with-lib-directive/solrconfig.xml
+++ /dev/null
@@ -1,53 +0,0 @@
-<?xml version="1.0" ?>
-
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements.  See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License.  You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-
-<!-- This is a "kitchen sink" config file that tests can use.
-     When writting a new test, feel free to add *new* items (plugins,
-     config options, etc...) as long as they don't break any existing
-     tests.  if you need to test something esoteric please add a new
-     "solrconfig-your-esoteric-purpose.xml" config file.
-
-     Note in particular that this test is used by MinimalSchemaTest so
-     Anything added to this file needs to work correctly even if there
-     is now uniqueKey or defaultSearch Field.
-  -->
-
-<config>
-
-  <dataDir>${solr.data.dir:}</dataDir>
-
-  <directoryFactory name="DirectoryFactory"
-                    class="${solr.directoryFactory:solr.NRTCachingDirectoryFactory}"/>
-
-  <luceneMatchVersion>${tests.luceneMatchVersion:LATEST}</luceneMatchVersion>
-
-  <lib dir="${solr.install.dir:../../../..}/modules/ltr/lib/" regex=".*\.jar" />
-
-  <requestHandler name="/select" class="solr.SearchHandler">
-    <lst name="defaults">
-      <str name="echoParams">explicit</str>
-      <str name="indent">true</str>
-      <str name="df">text</str>
-    </lst>
-
-  </requestHandler>
-
-  <requestHandler name="/update" class="solr.UpdateRequestHandler"  />
-</config>
-
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java b/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java
index b41d698ac4c..01614b7218e 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java
@@ -1421,48 +1421,6 @@ public void testUploadWithScriptUpdateProcessor() throws Exception {
     scriptRequest("newcollection2");
   }
 
-  @Test
-  public void testUploadWithLibDirective() throws Exception {
-    final String untrustedSuffix = "-untrusted";
-    uploadConfigSetWithAssertions("with-lib-directive", untrustedSuffix, null);
-    // try to create a collection with the uploaded configset
-    ignoreException("without any authentication in place");
-    Throwable thrown =
-        expectThrows(
-            SolrClient.RemoteSolrException.class,
-            () -> {
-              createCollection(
-                  "newcollection3",
-                  "with-lib-directive" + untrustedSuffix,
-                  1,
-                  1,
-                  cluster.getSolrClient());
-            });
-    unIgnoreException("without any authentication in place");
-
-    assertThat(thrown.getMessage(), containsString("Underlying core creation failed"));
-
-    // Authorization on
-    final String trustedSuffix = "-trusted";
-    uploadConfigSetWithAssertions("with-lib-directive", trustedSuffix, "solr");
-    // try to create a collection with the uploaded configset
-    CollectionAdminResponse resp =
-        createCollection(
-            "newcollection3", "with-lib-directive" + trustedSuffix, 1, 1, cluster.getSolrClient());
-
-    SolrInputDocument doc = sdoc("id", "4055", "subject", "Solr");
-    cluster.getSolrClient().add("newcollection3", doc);
-    cluster.getSolrClient().commit("newcollection3");
-    assertEquals(
-        "4055",
-        cluster
-            .getSolrClient()
-            .query("newcollection3", params("q", "*:*"))
-            .getResults()
-            .get(0)
-            .get("id"));
-  }
-
   @Test
   public void testUploadWithForbiddenContent() throws Exception {
     // Uploads a config set containing a script, a class file and jar file, will return 400 error
diff --git a/solr/core/src/test/org/apache/solr/core/TestConfLoadPerf.java b/solr/core/src/test/org/apache/solr/core/TestConfLoadPerf.java
index ee9177b2105..4c348104b35 100644
--- a/solr/core/src/test/org/apache/solr/core/TestConfLoadPerf.java
+++ b/solr/core/src/test/org/apache/solr/core/TestConfLoadPerf.java
@@ -84,7 +84,7 @@ public InputStream openResource(String resource) throws IOException {
     long startTime = System.currentTimeMillis();
     int numReads = 100;
     for (int i = 0; i < numReads; i++) {
-      allConfigs.add(SolrConfig.readFromResourceLoader(srl, "solrconfig.xml", true, null));
+      allConfigs.add(SolrConfig.readFromResourceLoader(srl, "solrconfig.xml", null));
     }
     assertEquals(numReads, allConfigs.size());
     System.gc();
diff --git a/solr/core/src/test/org/apache/solr/core/TestConfig.java b/solr/core/src/test/org/apache/solr/core/TestConfig.java
index 2c17cbf1e1a..745fa21c4c5 100644
--- a/solr/core/src/test/org/apache/solr/core/TestConfig.java
+++ b/solr/core/src/test/org/apache/solr/core/TestConfig.java
@@ -45,20 +45,6 @@ public static void beforeClass() throws Exception {
   public void testLib() throws IOException {
     SolrResourceLoader loader = h.getCore().getResourceLoader();
     InputStream data = null;
-    String[] expectedFiles =
-        new String[] {
-          "empty-file-main-lib.txt",
-          "empty-file-a1.txt",
-          "empty-file-a2.txt",
-          "empty-file-b1.txt",
-          "empty-file-b2.txt",
-          "empty-file-c1.txt"
-        };
-    for (String f : expectedFiles) {
-      data = loader.openResource(f);
-      assertNotNull("Should have found file " + f, data);
-      data.close();
-    }
     String[] unexpectedFiles = new String[] {"empty-file-c2.txt", "empty-file-d2.txt"};
     for (String f : unexpectedFiles) {
       data = null;
diff --git a/solr/core/src/test/org/apache/solr/core/TestMinimalConfig.java b/solr/core/src/test/org/apache/solr/core/TestMinimalConfig.java
index 1efa74d2d03..b04420664e8 100644
--- a/solr/core/src/test/org/apache/solr/core/TestMinimalConfig.java
+++ b/solr/core/src/test/org/apache/solr/core/TestMinimalConfig.java
@@ -29,8 +29,7 @@ public static void beforeClass() throws Exception {
     initCore("solrconfig-minimal.xml", "schema-minimal.xml");
   }
 
-  // Make sure the content of the lib/ core subfolder is loaded even if there is no <lib> node in
-  // the solrconfig
+  // Make sure the content of the lib/ core subfolder is loaded
   @Test
   public void testLib() throws IOException {
     SolrResourceLoader loader = h.getCore().getResourceLoader();
diff --git a/solr/core/src/test/org/apache/solr/handler/component/TestShardHandlerFactory.java b/solr/core/src/test/org/apache/solr/handler/component/TestShardHandlerFactory.java
index 541d2d845f3..970fa7bb48a 100644
--- a/solr/core/src/test/org/apache/solr/handler/component/TestShardHandlerFactory.java
+++ b/solr/core/src/test/org/apache/solr/handler/component/TestShardHandlerFactory.java
@@ -17,21 +17,16 @@
 package org.apache.solr.handler.component;
 
 import static org.hamcrest.CoreMatchers.equalTo;
-import static org.hamcrest.CoreMatchers.hasItem;
 import static org.hamcrest.CoreMatchers.instanceOf;
-import static org.hamcrest.CoreMatchers.is;
 
 import java.nio.file.Path;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
-import java.util.Set;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.client.solrj.impl.LBSolrClient;
 import org.apache.solr.client.solrj.request.QueryRequest;
-import org.apache.solr.common.cloud.ClusterState;
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.ShardParams;
@@ -155,18 +150,6 @@ public void getShardsAllowList() {
     }
   }
 
-  @Test
-  public void testLiveNodesToHostUrl() {
-    Set<String> liveNodes =
-        new HashSet<>(Arrays.asList("1.2.3.4:8983_solr", "1.2.3.4:9000_", "1.2.3.4:9001_solr-2"));
-    ClusterState cs = new ClusterState(liveNodes, new HashMap<>());
-    Set<String> hostSet = cs.getHostAllowList();
-    assertThat(hostSet.size(), is(3));
-    assertThat(hostSet, hasItem("1.2.3.4:8983"));
-    assertThat(hostSet, hasItem("1.2.3.4:9000"));
-    assertThat(hostSet, hasItem("1.2.3.4:9001"));
-  }
-
   @Test
   public void testXML() {
     Path home = TEST_PATH();
diff --git a/solr/core/src/test/org/apache/solr/schema/TestManagedSchemaThreadSafety.java b/solr/core/src/test/org/apache/solr/schema/TestManagedSchemaThreadSafety.java
index bd452178f63..2cc881cecc2 100644
--- a/solr/core/src/test/org/apache/solr/schema/TestManagedSchemaThreadSafety.java
+++ b/solr/core/src/test/org/apache/solr/schema/TestManagedSchemaThreadSafety.java
@@ -183,8 +183,7 @@ private Runnable indexSchemaLoader(String configsetName, final ZkController zkCo
       try {
         SolrResourceLoader loader =
             new ZkSolrResourceLoader(loaderPath, configsetName, null, zkController);
-        SolrConfig solrConfig =
-            SolrConfig.readFromResourceLoader(loader, "solrconfig.xml", true, null);
+        SolrConfig solrConfig = SolrConfig.readFromResourceLoader(loader, "solrconfig.xml", null);
 
         ManagedIndexSchemaFactory factory = new ManagedIndexSchemaFactory();
         factory.init(new NamedList<>());
diff --git a/solr/core/src/test/org/apache/solr/security/AllowListUrlCheckerTest.java b/solr/core/src/test/org/apache/solr/security/AllowListUrlCheckerTest.java
index b32c2124c15..0a4f57ba5af 100644
--- a/solr/core/src/test/org/apache/solr/security/AllowListUrlCheckerTest.java
+++ b/solr/core/src/test/org/apache/solr/security/AllowListUrlCheckerTest.java
@@ -24,11 +24,14 @@
 import java.net.MalformedURLException;
 import java.util.Arrays;
 import java.util.Collections;
+import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
+import java.util.concurrent.atomic.AtomicInteger;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.SolrException;
+import org.apache.solr.common.cloud.ClusterState;
 import org.junit.Test;
 
 /** Tests {@link AllowListUrlChecker}. */
@@ -196,6 +199,51 @@ public void testHostParsingNoProtocol() throws Exception {
         equalTo(AllowListUrlChecker.parseHostPorts(urls("https://abc-1.com:8983/solr"))));
   }
 
+  @Test
+  public void testLiveNodesToHostUrlCache() throws Exception {
+    // Given some live nodes defined in the cluster state.
+    Set<String> liveNodes = Set.of("1.2.3.4:8983_solr", "1.2.3.4:9000_", "1.2.3.4:9001_solr-2");
+    ClusterState clusterState1 = new ClusterState(liveNodes, new HashMap<>());
+
+    // When we call the AllowListUrlChecker.checkAllowList method on both valid and invalid urls.
+    AtomicInteger callCount = new AtomicInteger();
+    AllowListUrlChecker checker =
+        new AllowListUrlChecker(List.of()) {
+          @Override
+          Set<String> buildLiveHostUrls(Set<String> liveNodes) {
+            callCount.incrementAndGet();
+            return super.buildLiveHostUrls(liveNodes);
+          }
+        };
+    for (int i = 0; i < 3; i++) {
+      checker.checkAllowList(
+          List.of("1.2.3.4:8983", "1.2.3.4:9000", "1.2.3.4:9001"), clusterState1);
+      SolrException exception =
+          expectThrows(
+              SolrException.class,
+              () -> checker.checkAllowList(List.of("1.1.3.4:8983"), clusterState1));
+      assertThat(exception.code(), equalTo(SolrException.ErrorCode.FORBIDDEN.code));
+    }
+    // Then we verify that the AllowListUrlChecker caches the live host urls and only builds them
+    // once.
+    assertThat(callCount.get(), equalTo(1));
+
+    // And when the ClusterState live nodes change.
+    liveNodes = Set.of("2.3.4.5:8983_solr", "2.3.4.5:9000_", "2.3.4.5:9001_solr-2");
+    ClusterState clusterState2 = new ClusterState(liveNodes, new HashMap<>());
+    for (int i = 0; i < 3; i++) {
+      checker.checkAllowList(
+          List.of("2.3.4.5:8983", "2.3.4.5:9000", "2.3.4.5:9001"), clusterState2);
+      SolrException exception =
+          expectThrows(
+              SolrException.class,
+              () -> checker.checkAllowList(List.of("1.1.3.4:8983"), clusterState2));
+      assertThat(exception.code(), equalTo(SolrException.ErrorCode.FORBIDDEN.code));
+    }
+    // Then the AllowListUrlChecker rebuilds the cache of live host urls.
+    assertThat(callCount.get(), equalTo(2));
+  }
+
   private static List<String> urls(String... urls) {
     return Arrays.asList(urls);
   }
diff --git a/solr/example/README.md b/solr/example/README.md
index 09bb2dfb277..f0c8e46a4be 100644
--- a/solr/example/README.md
+++ b/solr/example/README.md
@@ -73,18 +73,6 @@ For a list of other tutorials and introductory articles.
 Notes About These Examples
 --------------------------
 
-### References to Jar Files Outside This Directory
-
-Various example SolrHome dirs contained in this directory may use "<lib>"
-statements in the solrconfig.xml file to reference plugin jars outside of
-this directory for loading modules via relative paths.  
-
-If you make a copy of this example server and wish to use the
-ExtractingRequestHandler (SolrCell), the clustering component,
-or any other modules, you will need to
-copy the required jars or update the paths to those jars in your
-solrconfig.xml.
-
 ### Logging
 
 By default, Jetty & Solr will log to the console and logs/solr.log. This can
diff --git a/solr/modules/ltr/src/test-files/solr/collection1/conf/solrconfig-ltr.xml b/solr/modules/ltr/src/test-files/solr/collection1/conf/solrconfig-ltr.xml
index b863d61728c..c20ee2026f6 100644
--- a/solr/modules/ltr/src/test-files/solr/collection1/conf/solrconfig-ltr.xml
+++ b/solr/modules/ltr/src/test-files/solr/collection1/conf/solrconfig-ltr.xml
@@ -16,9 +16,6 @@
  <directoryFactory name="DirectoryFactory"
   class="${solr.directoryFactory:solr.MockDirectoryFactory}" />
 
- <!-- for use with the DefaultWrapperModel class -->
- <lib dir="${solr.solr.home:.}/models" />
-
  <schemaFactory class="ClassicIndexSchemaFactory" />
 
  <requestDispatcher>
diff --git a/solr/solr-ref-guide/modules/configuration-guide/pages/config-sets.adoc b/solr/solr-ref-guide/modules/configuration-guide/pages/config-sets.adoc
index 57707e94c6e..b44c8a82825 100644
--- a/solr/solr-ref-guide/modules/configuration-guide/pages/config-sets.adoc
+++ b/solr/solr-ref-guide/modules/configuration-guide/pages/config-sets.adoc
@@ -39,7 +39,7 @@ The Configsets API has some other operations as well, and likewise, so does the
 To upload a file to a configset already stored on ZooKeeper, you can use xref:deployment-guide:solr-control-script-reference.adoc#copy-between-local-files-and-zookeeper-znodes[`bin/solr zk cp`].
 
 CAUTION: By default, ZooKeeper's file size limit is 1MB.
-If your files are larger than this, you'll need to either xref:deployment-guide:zookeeper-ensemble.adoc#increasing-the-file-size-limit[increase the ZooKeeper file size limit] or store them xref:libs.adoc#lib-directives-in-solrconfig[on the filesystem] of every node in a cluster.
+If your files are larger than this, you'll need to either xref:deployment-guide:zookeeper-ensemble.adoc#increasing-the-file-size-limit[increase the ZooKeeper file size limit] or store them xref:libs.adoc[on the filesystem] of every node in a cluster.
 
 === Forbidden File Types
 
diff --git a/solr/solr-ref-guide/modules/configuration-guide/pages/configsets-api.adoc b/solr/solr-ref-guide/modules/configuration-guide/pages/configsets-api.adoc
index 45987271de6..1a5dc43fe48 100644
--- a/solr/solr-ref-guide/modules/configuration-guide/pages/configsets-api.adoc
+++ b/solr/solr-ref-guide/modules/configuration-guide/pages/configsets-api.adoc
@@ -97,8 +97,6 @@ Upon creation of a collection using an "untrusted" configset, the following func
 
 * The XSLT transformer (`tr` parameter) cannot be used at request processing time.
 * If specified in the configset, the ScriptUpdateProcessorFactory will not initialize.
-* Collections won't initialize if <lib> directives are used in the configset.
-(Note: Libraries added to Solr's classpath don't need the <lib> directive)
 
 If you use any of these parameters or features, you must have enabled security features in your Solr installation and you must upload the configset as an authenticated user.
 
diff --git a/solr/solr-ref-guide/modules/configuration-guide/pages/configuring-solr-xml.adoc b/solr/solr-ref-guide/modules/configuration-guide/pages/configuring-solr-xml.adoc
index 549753d5f3a..13c90737ffe 100644
--- a/solr/solr-ref-guide/modules/configuration-guide/pages/configuring-solr-xml.adoc
+++ b/solr/solr-ref-guide/modules/configuration-guide/pages/configuring-solr-xml.adoc
@@ -253,8 +253,7 @@ Note that specifying `sharedLib` will not remove `$SOLR_HOME/lib` from Solr's cl
 +
 Takes a list of bundled xref:solr-modules.adoc[] to enable
 on startup. This way of adding modules will add them to the shared class loader, making them
-available to every collection in Solr, unlike `<lib>` tag in `solrconfig.xml` which is only
-for that one collection. Example value: `extracting,ltr`. See the
+available to every collection in Solr.  Example value: `extracting,ltr`. See the
 xref:solr-modules.adoc[Solr Modules] chapter for more details.
 
 `allowPaths`::
@@ -506,7 +505,7 @@ Optional parameter to provide a compression implementation for state.json over t
 |===
 +
 The class to use for logging.
-The corresponding JAR file must be available to Solr, perhaps through a `<lib>` directive in `solrconfig.xml`.
+The corresponding JAR file must be available to Solr, perhaps through a `<sharedLib>` directive.
 
 `enabled`::
 +
diff --git a/solr/solr-ref-guide/modules/configuration-guide/pages/configuring-solrconfig-xml.adoc b/solr/solr-ref-guide/modules/configuration-guide/pages/configuring-solrconfig-xml.adoc
index 3680b928896..324a1fdd09e 100644
--- a/solr/solr-ref-guide/modules/configuration-guide/pages/configuring-solrconfig-xml.adoc
+++ b/solr/solr-ref-guide/modules/configuration-guide/pages/configuring-solrconfig-xml.adoc
@@ -49,7 +49,6 @@ The `solrconfig.xml` file is located in the `conf/` directory for each collectio
 Several well-commented example files can be found in the `server/solr/configsets/` directories demonstrating best practices for many different types of installations.
 
 Some `solrconfig.xml` aspects are documented in other sections.
-See xref:libs.adoc#lib-directives-in-solrconfig[lib directives in SolrConfig], which can be used for both Plugins and Resources.
 
 ****
 // This tags the below list so it can be used in the parent page section list
diff --git a/solr/solr-ref-guide/modules/configuration-guide/pages/libs.adoc b/solr/solr-ref-guide/modules/configuration-guide/pages/libs.adoc
index 269e56f4169..2ad86d6355b 100644
--- a/solr/solr-ref-guide/modules/configuration-guide/pages/libs.adoc
+++ b/solr/solr-ref-guide/modules/configuration-guide/pages/libs.adoc
@@ -47,32 +47,3 @@ Certain plugins or add-ons to plugins require placement here, and they will have
 Solr incorporates Jetty for providing HTTP server functionality.
 Jetty has some directories that contain `.jar` files for itself and its own plugins / modules or JVM level plugins (e.g., loggers).
 Solr plugins won't work in these locations.
-
-== Lib Directives in SolrConfig
-
-_Both_ plugin and xref:resource-loading.adoc[resource] file paths are configurable via `<lib/>` directives in `solrconfig.xml`.
-When a directive matches a directory, then resources can be resolved from it.
-When a directive matches a `.jar` file, Solr plugins and their dependencies are resolved from it.
-Resources can be placed in a `.jar` too but that's unusual.
-It's erroneous to refer to any other type of file.
-
-A `<lib/>` directive must have one (not both) of these two attributes:
-
-* `path`: used to refer to a single directory (for resources) or file (for a plugin `.jar`)
-
-* `dir`: used to refer to _all_ direct descendants of the specified directory.  Optionally supply a `regex` attribute to filter these to those matching the regular expression.
-
-All directories are resolved as relative to the Solr core's `instanceDir`.
-
-These examples show how to load modules into Solr:
-
-[source,xml]
-----
-  <lib dir="${solr.install.dir:../../../..}/modules/extraction/lib" regex=".*\.jar" />
-
-  <lib dir="${solr.install.dir:../../../..}/modules/clustering/lib/" regex=".*\.jar" />
-
-  <lib dir="${solr.install.dir:../../../..}/modules/langid/lib/" regex=".*\.jar" />
-
-  <lib dir="${solr.install.dir:../../../..}/modules/ltr/lib/" regex=".*\.jar" />
-----
diff --git a/solr/solr-ref-guide/modules/configuration-guide/pages/resource-loading.adoc b/solr/solr-ref-guide/modules/configuration-guide/pages/resource-loading.adoc
index bcdbe17c5ef..3825f362de0 100644
--- a/solr/solr-ref-guide/modules/configuration-guide/pages/resource-loading.adoc
+++ b/solr/solr-ref-guide/modules/configuration-guide/pages/resource-loading.adoc
@@ -37,10 +37,5 @@ Prefer to put resources here.
 
 == Resources in Other Places
 
-Resources can also be placed in an arbitrary directory and xref:libs.adoc#lib-directives-in-solrconfig[referenced] from a `<lib />` directive in `solrconfig.xml`, provided the directive refers to a directory and not the actual resource file.
-Example: `<lib path="/volume/models/" />`
-This choice may make sense if the resource is too large for a configset in ZooKeeper.
-However it's up to you to somehow ensure that all nodes in your cluster have access to these resources.
-
 Finally, and this is very unusual, resources can also be packaged inside `.jar` files from which they will be referenced.
 That might make sense for default resources wherein a plugin user can override it via placing the same-named file in a configSet.
diff --git a/solr/solr-ref-guide/modules/configuration-guide/pages/solr-modules.adoc b/solr/solr-ref-guide/modules/configuration-guide/pages/solr-modules.adoc
index 93a5f0b4ff6..99740b32db1 100644
--- a/solr/solr-ref-guide/modules/configuration-guide/pages/solr-modules.adoc
+++ b/solr/solr-ref-guide/modules/configuration-guide/pages/solr-modules.adoc
@@ -45,10 +45,5 @@ You can also specify the modules to include when using the Solr CLI to start Sol
 bin/solr start -e techproducts -Dsolr.modules=scripting
 ----
 
-NOTE: If you only wish to enable a module for certain collections, you may add `<lib>` tags to `solrconfig.xml` in applicable configset(s).
-as explained in xref:configuration-guide:libs.adoc[Lib Directories].
-Collection-level plugins will work if the module is enabled either per collection (`<lib>`) or for the whole Solr node.
-Node-level plugins such as those specified in `solr.xml` will not work when using the `<lib>` option in `solrconfig.xml` because configsets configure collections, not the node.  They must be enabled for the entire Solr node, as described above.
-
 Some modules may have been made available as packages for the xref:configuration-guide:package-manager.adoc[Package Manager],
 check by listing available packages.
diff --git a/solr/solr-ref-guide/modules/configuration-guide/pages/update-request-processors.adoc b/solr/solr-ref-guide/modules/configuration-guide/pages/update-request-processors.adoc
index 8ac9faf031f..3abebae9747 100644
--- a/solr/solr-ref-guide/modules/configuration-guide/pages/update-request-processors.adoc
+++ b/solr/solr-ref-guide/modules/configuration-guide/pages/update-request-processors.adoc
@@ -424,7 +424,7 @@ The {solr-javadocs}/modules/langid/index.html[`langid`] module provides::
 The {solr-javadocs}/modules/analysis-extras/index.html[`analysis-extras`] module provides::
 
 {solr-javadocs}/modules/analysis-extras/org/apache/solr/update/processor/OpenNLPExtractNamedEntitiesUpdateProcessorFactory.html[OpenNLPExtractNamedEntitiesUpdateProcessorFactory]::: Update document(s) to be indexed with named entities extracted using an OpenNLP NER model.
-Note that in order to use model files larger than 1MB on SolrCloud, you must either xref:deployment-guide:zookeeper-ensemble#increasing-the-file-size-limit[configure both ZooKeeper server and clients] or xref:libs.adoc#lib-directives-in-solrconfig[store the model files on the filesystem] on each node hosting a collection replica.
+Note that in order to use model files larger than 1MB on SolrCloud, you must xref:deployment-guide:zookeeper-ensemble#increasing-the-file-size-limit[configure both ZooKeeper server and clients].
 
 === Update Processor Factories You Should _Not_ Modify or Remove
 
diff --git a/solr/solr-ref-guide/modules/deployment-guide/pages/rule-based-authorization-plugin.adoc b/solr/solr-ref-guide/modules/deployment-guide/pages/rule-based-authorization-plugin.adoc
index 2d4c528c20e..7e99462bc96 100644
--- a/solr/solr-ref-guide/modules/deployment-guide/pages/rule-based-authorization-plugin.adoc
+++ b/solr/solr-ref-guide/modules/deployment-guide/pages/rule-based-authorization-plugin.adoc
@@ -401,7 +401,6 @@ If edit permissions should only be applied to specific collections, a custom per
 Note that this allows schema read permissions for _all_ collections.
 If read permissions should only be applied to specific collections, a custom permission would need to be created.
 * *config-edit*: this permission is allowed to edit a collection's configuration using the xref:configuration-guide:config-api.adoc[], the xref:configuration-guide:request-parameters-api.adoc[], and other APIs which modify `configoverlay.json`.
-Because configs xref:configuration-guide:libs.adoc#lib-directives-in-solrconfig[can add libraries/custom code] from various locations, loading any new code via a trusted SolrConfig is explicitly allowed for users with this permission.
 Note that this allows configuration edit permissions for _all_ collections.
 If edit permissions should only be applied to specific collections, a custom permission would need to be created.
 * *config-read*: this permission is allowed to read a collection's configuration using the xref:configuration-guide:config-api.adoc[], the xref:configuration-guide:request-parameters-api.adoc[], xref:configuration-guide:configsets-api.adoc#configsets-list[Configsets API], the Admin UI's xref:configuration-guide:configuration-files.adoc#files-screen[Files Screen], and other APIs accessing configuration.
diff --git a/solr/solr-ref-guide/modules/indexing-guide/pages/indexing-with-tika.adoc b/solr/solr-ref-guide/modules/indexing-guide/pages/indexing-with-tika.adoc
index b1344ed5519..b0cdb7eba30 100644
--- a/solr/solr-ref-guide/modules/indexing-guide/pages/indexing-with-tika.adoc
+++ b/solr/solr-ref-guide/modules/indexing-guide/pages/indexing-with-tika.adoc
@@ -421,13 +421,8 @@ Also see the section <<Defining XPath Expressions>> for an example.
 
 If you have started Solr with one of the supplied xref:configuration-guide:config-sets.adoc[example configsets], you may already have the `ExtractingRequestHandler` configured by default.
 
-First, you must enable the xref:#module[Module].
-If `solrconfig.xml` is not already configured, you will need to modify it to find the `ExtractingRequestHandler` and its dependencies:
-
-[source,xml]
-----
-  <lib dir="${solr.install.dir:../../..}/modules/extraction/lib" regex=".*\.jar" />
-----
+First, the `extraction` xref:#module[module] must be enabled.
+This can be done by specifying the environment variable `SOLR_MODULES=extraction` in your startup configuration.
 
 You can then configure the `ExtractingRequestHandler` in `solrconfig.xml`.
 The following is the default configuration found in Solr's `sample_techproducts_configs` configset, which you can modify as needed:
diff --git a/solr/solr-ref-guide/modules/query-guide/pages/learning-to-rank.adoc b/solr/solr-ref-guide/modules/query-guide/pages/learning-to-rank.adoc
index 1d3f575ba03..fcdf78e6315 100644
--- a/solr/solr-ref-guide/modules/query-guide/pages/learning-to-rank.adoc
+++ b/solr/solr-ref-guide/modules/query-guide/pages/learning-to-rank.adoc
@@ -111,10 +111,6 @@ In the form of JSON files your trained model or models (e.g., different models f
 |(custom) |(custom class extending {solr-javadocs}/modules/ltr/org/apache/solr/ltr/model/LTRScoringModel.html[LTRScoringModel]) |(not applicable)
 |===
 
-== Module
-
-This is provided via the `ltr` xref:configuration-guide:solr-modules.adoc[Solr Module] that needs to be enabled before use.
-
 == Installation of LTR
 
 The ltr module requires the `modules/ltr/lib/solr-ltr-*.jar` JARs.
@@ -125,13 +121,8 @@ Learning-To-Rank is a module and therefore its plugins must be configured in `so
 
 === Minimum Requirements
 
-* Include the required module JARs.
-Note that by default paths are relative to the Solr core, so they may need adjustments to your configuration, or an explicit specification of the `$solr.install.dir`.
-+
-[source,xml]
-----
-<lib dir="${solr.install.dir:../../../..}/modules/ltr/lib/" regex=".*\.jar" />
-----
+* Enable the `ltr` module to make the LTR classes available on Solr's classpath.
+See xref:configuration-guide:solr-modules.adoc[Solr Module] for more details.
 
 * Declaration of the `ltr` query parser.
 +
@@ -248,11 +239,11 @@ Assuming that you consider to use a large model placed at `/path/to/models/myMod
 }
 ----
 
-First, add the directory to Solr's resource paths with a xref:configuration-guide:libs.adoc#lib-directives-in-solrconfig[`<lib/>` directive]:
+First, add the directory to Solr's resource paths with a xref:configuration-guide:configuring-solr-xml.adoc#the-solr-element[solr.xml `<sharedLib/>` directive]:
 
 [source,xml]
 ----
-  <lib dir="/path/to" regex="models" />
+  <sharedLib>/path/to/models"</sharedLib>
 ----
 
 Then, configure `DefaultWrapperModel` to wrap `myModel.json`:
@@ -273,8 +264,6 @@ Then, configure `DefaultWrapperModel` to wrap `myModel.json`:
 
 NOTE: No `"features"` are configured in `myWrapperModel` because the features of the wrapped model (`myModel`) will be used; also note that the `"store"` configured for the wrapper model must match that of the wrapped model i.e., in this example the feature store called `largeModelsFeatureStore` is used.
 
-CAUTION: `<lib dir="/path/to/models" regex=".*\.json" />` doesn't work as expected in this case, because `SolrResourceLoader` considers given resources as JAR if `<lib />` indicates files.
-
 As an alternative to the above-described `DefaultWrapperModel`, it is possible to xref:deployment-guide:zookeeper-ensemble.adoc#increasing-the-file-size-limit[increase ZooKeeper's file size limit].
 
 === Applying Changes
diff --git a/solr/solr-ref-guide/modules/upgrade-notes/pages/major-changes-in-solr-10.adoc b/solr/solr-ref-guide/modules/upgrade-notes/pages/major-changes-in-solr-10.adoc
index 97942e989c1..af919ebe9d3 100644
--- a/solr/solr-ref-guide/modules/upgrade-notes/pages/major-changes-in-solr-10.adoc
+++ b/solr/solr-ref-guide/modules/upgrade-notes/pages/major-changes-in-solr-10.adoc
@@ -60,6 +60,12 @@ Users who previously relied on collection-specific URLs to avoid including the c
 
 The service installer now installs a `systemd` startup script instead of an `init.d` startup script. It is up to the user to uninstall any existing `init.d` script when upgrading.
 
+=== SolrCloud request routing
+
+HTTP requests to SolrCloud that are for a specific core must be delivered to the node with that core, or else an HTTP 404 Not Found response will occur.
+Previously, SolrCloud would try too hard scanning the cluster's state to look for it and internally route/proxy it.
+If only one node is exposed to a client, and if the client uses the bin/solr export tool, it probably won't work.
+
 === Deprecation removals
 
 * The `jaegertracer-configurator` module, which was deprecated in 9.2, is removed. Users should migrate to the `opentelemetry` module.
@@ -87,3 +93,7 @@ Please note this also removes the ability to share resource intensive objects ac
 The removed writer types (invoked as part of the `wt` parameter) include `python`, `ruby`, `php`, and `phps`.
 
 * The deprecated support for configuring replication using master/slave terminology is removed.  Use leader/follower.
+
+* Support for the `<lib/>` directive, which historically could be used in solrconfig.xml to add JARs on a core-by-core basis, was deprecated in 9.8 and has now been removed.
+Users that need to vary JAR accessibility on a per-core basis can use Solr's xref:configuration-guide:package-manager.adoc[Package Manager].
+Users who that don't need to vary JAR access on a per-core basis have several options, including the xref:configuration-guide:configuring-solr-xml.adoc[`<sharedLib/>` tag supported by solr.xml] or manipulation of Solr's classpath prior to JVM startup.
diff --git a/solr/solr-ref-guide/modules/upgrade-notes/pages/major-changes-in-solr-9.adoc b/solr/solr-ref-guide/modules/upgrade-notes/pages/major-changes-in-solr-9.adoc
index 399ceba5926..fc3a9d27577 100644
--- a/solr/solr-ref-guide/modules/upgrade-notes/pages/major-changes-in-solr-9.adoc
+++ b/solr/solr-ref-guide/modules/upgrade-notes/pages/major-changes-in-solr-9.adoc
@@ -380,7 +380,7 @@ Other relevant placement strategies should be used instead, such as autoscaling
 ** The `solrj-deps` (SolrJ Dependencies) are no longer separated out from the other Server jars.
 ** Please refer to the SolrJ Maven artifact to see the exact dependencies you need to include from `server/solr-webapp/webapp/WEB-INF/lib/` and `server/lib/ext/` if you are loading in SolrJ manually.
 If you plan on using SolrJ as a JDBC driver, please refer to the xref:query-guide:sql-query.adoc#generic-clients[JDBC documentation]
-** More information can be found in the xref:configuration-guide:libs.adoc#lib-directives-in-solrconfig[Libs documentation].
+** More information can be found in the xref:configuration-guide:libs.adoc[Libs documentation].
 
 * SolrJ class `CloudSolrClient` now supports HTTP2. It has a new Builder. See `CloudLegacySolrClient` for the 8.x version of this class.
 * In Backup request responses, the `response` key now uses a map to return information instead of a list. This is only applicable for users returning information in JSON format, which is the default behavior.
diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java
index 19bfc2565d6..22e1005ed8b 100644
--- a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java
+++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java
@@ -34,7 +34,6 @@
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.function.Consumer;
 import java.util.function.Function;
-import java.util.stream.Collectors;
 import java.util.stream.Stream;
 import org.apache.solr.common.MapWriter;
 import org.apache.solr.common.SolrException;
@@ -53,6 +52,8 @@
  * Immutable state of the cloud. Normally you can get the state by using {@code
  * ZkStateReader#getClusterState()}.
  *
+ * <p>However, the {@link #setLiveNodes list of live nodes} is updated when nodes go up and down.
+ *
  * @lucene.experimental
  */
 public class ClusterState implements MapWriter {
@@ -63,8 +64,7 @@ public class ClusterState implements MapWriter {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private final Map<String, CollectionRef> collectionStates, immutableCollectionStates;
-  private Set<String> liveNodes;
-  private Set<String> hostAllowList;
+  private volatile Set<String> liveNodes;
 
   /** Use this constr when ClusterState is meant for consumption. */
   public ClusterState(Set<String> liveNodes, Map<String, DocCollection> collectionStates) {
@@ -85,8 +85,7 @@ private static Map<String, CollectionRef> getRefMap(Map<String, DocCollection> c
    * loaded (parameter order different from constructor above to have different erasures)
    */
   public ClusterState(Map<String, CollectionRef> collectionStates, Set<String> liveNodes) {
-    this.liveNodes = CollectionUtil.newHashSet(liveNodes.size());
-    this.liveNodes.addAll(liveNodes);
+    setLiveNodes(liveNodes);
     this.collectionStates = new LinkedHashMap<>(collectionStates);
     this.immutableCollectionStates = Collections.unmodifiableMap(this.collectionStates);
   }
@@ -189,7 +188,7 @@ public Map<String, DocCollection> getCollectionsMap() {
 
   /** Get names of the currently live nodes. */
   public Set<String> getLiveNodes() {
-    return Collections.unmodifiableSet(liveNodes);
+    return liveNodes;
   }
 
   @Deprecated
@@ -387,7 +386,7 @@ public boolean equals(Object obj) {
 
   /** Internal API used only by ZkStateReader */
   void setLiveNodes(Set<String> liveNodes) {
-    this.liveNodes = liveNodes;
+    this.liveNodes = Set.copyOf(liveNodes);
   }
 
   /**
@@ -401,20 +400,6 @@ public Map<String, CollectionRef> getCollectionStates() {
     return immutableCollectionStates;
   }
 
-  /**
-   * Gets the set of allowed hosts (host:port) built from the set of live nodes. The set is cached
-   * to be reused.
-   */
-  public Set<String> getHostAllowList() {
-    if (hostAllowList == null) {
-      hostAllowList =
-          getLiveNodes().stream()
-              .map((liveNode) -> liveNode.substring(0, liveNode.indexOf('_')))
-              .collect(Collectors.toSet());
-    }
-    return hostAllowList;
-  }
-
   /**
    * Streams the resolved {@link DocCollection}s, which will often fetch from ZooKeeper for each one
    * for a many-collection scenario. Use this sparingly; some users have thousands of collections!
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractBasicDistributedZk2TestBase.java b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractBasicDistributedZk2TestBase.java
index 513596533ff..1ac793e852d 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractBasicDistributedZk2TestBase.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractBasicDistributedZk2TestBase.java
@@ -204,25 +204,13 @@ private void addAndQueryDocs(final String baseUrl, int docs) throws Exception {
 
     SolrQuery query = new SolrQuery("*:*");
 
-    String collectionUrl = baseUrl + "/onenodecollection" + "core";
-    try (SolrClient client = getHttpSolrClient(baseUrl, "onenodecollectioncore")) {
-
-      // it might take a moment for the proxy node to see us in their cloud state
-      waitForNon403or404or503(client, collectionUrl);
-
+    try (SolrClient client = getHttpSolrClient(baseUrl, "onenodecollection")) {
       // add a doc
-      SolrInputDocument doc = new SolrInputDocument();
-      doc.addField("id", docs);
-      client.add(doc);
+      client.add(sdoc("id", docs));
       client.commit();
 
       QueryResponse results = client.query(query);
       assertEquals(docs - 1, results.getResults().getNumFound());
-    }
-
-    try (SolrClient client = getHttpSolrClient(baseUrl, "onenodecollection")) {
-      QueryResponse results = client.query(query);
-      assertEquals(docs - 1, results.getResults().getNumFound());
 
       SolrInputDocument doc = new SolrInputDocument();
       doc.addField("id", docs + 1);